fix(tools): updated memory block to throw better errors, removed deprecated posthog route, remove deprecated templates & console helpers (#2753)

* fix(tools): updated memory block to throw better errors, removed deprecated posthog route, remove deprecated templates & console helpers

* remove isDeployed in favor of deploymentStatus

* ack PR comments
This commit is contained in:
Waleed
2026-01-09 16:53:37 -08:00
committed by GitHub
parent 860610b4c2
commit d3d6012d5c
34 changed files with 71 additions and 666 deletions

View File

@@ -4575,3 +4575,22 @@ export function FirefliesIcon(props: SVGProps<SVGSVGElement>) {
</svg>
)
}
export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'>
<defs>
<linearGradient id='bedrock_gradient' x1='80%' x2='20%' y1='20%' y2='80%'>
<stop offset='0%' stopColor='#6350FB' />
<stop offset='50%' stopColor='#3D8FFF' />
<stop offset='100%' stopColor='#9AD8F8' />
</linearGradient>
</defs>
<path
d='M13.05 15.513h3.08c.214 0 .389.177.389.394v1.82a1.704 1.704 0 011.296 1.661c0 .943-.755 1.708-1.685 1.708-.931 0-1.686-.765-1.686-1.708 0-.807.554-1.484 1.297-1.662v-1.425h-2.69v4.663a.395.395 0 01-.188.338l-2.69 1.641a.385.385 0 01-.405-.002l-4.926-3.086a.395.395 0 01-.185-.336V16.3L2.196 14.87A.395.395 0 012 14.555L2 14.528V9.406c0-.14.073-.27.192-.34l2.465-1.462V4.448c0-.129.062-.249.165-.322l.021-.014L9.77 1.058a.385.385 0 01.407 0l2.69 1.675a.395.395 0 01.185.336V7.6h3.856V5.683a1.704 1.704 0 01-1.296-1.662c0-.943.755-1.708 1.685-1.708.931 0 1.685.765 1.685 1.708 0 .807-.553 1.484-1.296 1.662v2.311a.391.391 0 01-.389.394h-4.245v1.806h6.624a1.69 1.69 0 011.64-1.313c.93 0 1.685.764 1.685 1.707 0 .943-.754 1.708-1.685 1.708a1.69 1.69 0 01-1.64-1.314H13.05v1.937h4.953l.915 1.18a1.66 1.66 0 01.84-.227c.931 0 1.685.764 1.685 1.707 0 .943-.754 1.708-1.685 1.708-.93 0-1.685-.765-1.685-1.708 0-.346.102-.668.276-.937l-.724-.935H13.05v1.806zM9.973 1.856L7.93 3.122V6.09h-.778V3.604L5.435 4.669v2.945l2.11 1.36L9.712 7.61V5.334h.778V7.83c0 .136-.07.263-.184.335L7.963 9.638v2.081l1.422 1.009-.446.646-1.406-.998-1.53 1.005-.423-.66 1.605-1.055v-1.99L5.038 8.29l-2.26 1.34v1.676l1.972-1.189.398.677-2.37 1.429V14.3l2.166 1.258 2.27-1.368.397.677-2.176 1.311V19.3l1.876 1.175 2.365-1.426.398.678-2.017 1.216 1.918 1.201 2.298-1.403v-5.78l-4.758 2.893-.4-.675 5.158-3.136V3.289L9.972 1.856zM16.13 18.47a.913.913 0 00-.908.92c0 .507.406.918.908.918a.913.913 0 00.907-.919.913.913 0 00-.907-.92zm3.63-3.81a.913.913 0 00-.908.92c0 .508.406.92.907.92a.913.913 0 00.908-.92.913.913 0 00-.908-.92zm1.555-4.99a.913.913 0 00-.908.92c0 .507.407.918.908.918a.913.913 0 00.907-.919.913.913 0 00-.907-.92zM17.296 3.1a.913.913 0 00-.907.92c0 .508.406.92.907.92a.913.913 0 00.908-.92.913.913 0 00-.908-.92z'
fill='url(#bedrock_gradient)'
fillRule='nonzero'
/>
</svg>
)
}

View File

@@ -162,6 +162,7 @@ Create a webhook to receive recording events
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Grain API key \(Personal Access Token\) |
| `hookUrl` | string | Yes | Webhook endpoint URL \(must respond 2xx\) |
| `hookType` | string | Yes | Type of webhook: "recording_added" or "upload_status" |
| `filterBeforeDatetime` | string | No | Filter: recordings before this date |
| `filterAfterDatetime` | string | No | Filter: recordings after this date |
| `filterParticipantScope` | string | No | Filter: "internal" or "external" |
@@ -178,6 +179,7 @@ Create a webhook to receive recording events
| `id` | string | Hook UUID |
| `enabled` | boolean | Whether hook is active |
| `hook_url` | string | The webhook URL |
| `hook_type` | string | Type of hook: recording_added or upload_status |
| `filter` | object | Applied filters |
| `include` | object | Included fields |
| `inserted_at` | string | ISO8601 creation timestamp |

View File

@@ -851,24 +851,6 @@ List all status updates for a project in Linear
| --------- | ---- | ----------- |
| `updates` | array | Array of project updates |
### `linear_create_project_link`
Add an external link to a project in Linear
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `projectId` | string | Yes | Project ID to add link to |
| `url` | string | Yes | URL of the external link |
| `label` | string | No | Link label/title |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `link` | object | The created project link |
### `linear_list_notifications`
List notifications for the current user in Linear
@@ -1246,7 +1228,6 @@ Create a new project label in Linear
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `projectId` | string | Yes | The project for this label |
| `name` | string | Yes | Project label name |
| `color` | string | No | Label color \(hex code\) |
| `description` | string | No | Label description |
@@ -1424,12 +1405,12 @@ Create a new project status in Linear
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `projectId` | string | Yes | The project to create the status for |
| `name` | string | Yes | Project status name |
| `type` | string | Yes | Status type: "backlog", "planned", "started", "paused", "completed", or "canceled" |
| `color` | string | Yes | Status color \(hex code\) |
| `position` | number | Yes | Position in status list \(e.g. 0, 1, 2...\) |
| `description` | string | No | Status description |
| `indefinite` | boolean | No | Whether the status is indefinite |
| `position` | number | No | Position in status list |
#### Output

View File

@@ -79,30 +79,6 @@ Capture multiple events at once in PostHog. Use this for bulk event ingestion to
| `status` | string | Status message indicating whether the batch was captured successfully |
| `eventsProcessed` | number | Number of events processed in the batch |
### `posthog_list_events`
List events in PostHog. Note: This endpoint is deprecated but kept for backwards compatibility. For production use, prefer the Query endpoint with HogQL.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `personalApiKey` | string | Yes | PostHog Personal API Key \(for authenticated API access\) |
| `region` | string | No | PostHog region: us \(default\) or eu |
| `projectId` | string | Yes | PostHog Project ID |
| `limit` | number | No | Number of events to return \(default: 100, max: 100\) |
| `offset` | number | No | Number of events to skip for pagination |
| `event` | string | No | Filter by specific event name |
| `distinctId` | string | No | Filter by specific distinct_id |
| `before` | string | No | ISO 8601 timestamp - only return events before this time |
| `after` | string | No | ISO 8601 timestamp - only return events after this time |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `events` | array | List of events with their properties and metadata |
### `posthog_list_persons`
List persons (users) in PostHog. Returns user profiles with their properties and distinct IDs.

View File

@@ -53,6 +53,9 @@ Send a chat completion request to any supported LLM provider
| `vertexProject` | string | No | Google Cloud project ID for Vertex AI |
| `vertexLocation` | string | No | Google Cloud location for Vertex AI \(defaults to us-central1\) |
| `vertexCredential` | string | No | Google Cloud OAuth credential ID for Vertex AI |
| `bedrockAccessKeyId` | string | No | AWS Access Key ID for Bedrock |
| `bedrockSecretKey` | string | No | AWS Secret Access Key for Bedrock |
| `bedrockRegion` | string | No | AWS region for Bedrock \(defaults to us-east-1\) |
#### Output

View File

@@ -1,50 +0,0 @@
/**
* @deprecated This route is not currently in use
* @remarks Kept for reference - may be removed in future cleanup
*/
import { db } from '@sim/db'
import { copilotChats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
const logger = createLogger('UpdateChatTitleAPI')
const UpdateTitleSchema = z.object({
chatId: z.string(),
title: z.string(),
})
export async function POST(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const parsed = UpdateTitleSchema.parse(body)
// Update the chat title
await db
.update(copilotChats)
.set({
title: parsed.title,
updatedAt: new Date(),
})
.where(eq(copilotChats.id, parsed.chatId))
logger.info('Chat title updated', { chatId: parsed.chatId, title: parsed.title })
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error updating chat title:', error)
return NextResponse.json(
{ success: false, error: 'Failed to update chat title' },
{ status: 500 }
)
}
}

View File

@@ -74,8 +74,6 @@ export async function POST(
loops: deployedState.loops || {},
parallels: deployedState.parallels || {},
lastSaved: Date.now(),
isDeployed: true,
deployedAt: new Date(),
deploymentStatuses: deployedState.deploymentStatuses || {},
})
@@ -88,7 +86,6 @@ export async function POST(
.set({ lastSynced: new Date(), updatedAt: new Date() })
.where(eq(workflow.id, id))
// Sync MCP tools with the reverted version's parameter schema
await syncMcpToolsForWorkflow({
workflowId: id,
requestId,

View File

@@ -106,8 +106,6 @@ function normalizeWorkflowState(input?: any): WorkflowState | null {
lastUpdate: input.lastUpdate,
metadata: input.metadata,
variables: input.variables,
isDeployed: input.isDeployed,
deployedAt: input.deployedAt,
deploymentStatuses: input.deploymentStatuses,
needsRedeployment: input.needsRedeployment,
dragStartPosition: input.dragStartPosition ?? null,

View File

@@ -109,8 +109,6 @@ function normalizeWorkflowState(input?: any): WorkflowState | null {
lastUpdate: input.lastUpdate,
metadata: input.metadata,
variables: input.variables,
isDeployed: input.isDeployed,
deployedAt: input.deployedAt,
deploymentStatuses: input.deploymentStatuses,
needsRedeployment: input.needsRedeployment,
dragStartPosition: input.dragStartPosition ?? null,

View File

@@ -13,7 +13,7 @@ import { useDebounce } from '@/hooks/use-debounce'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
/**
* Template data structure with support for both new and legacy fields
* Template data structure
*/
export interface Template {
/** Unique identifier for the template */
@@ -59,16 +59,6 @@ export interface Template {
isStarred: boolean
/** Whether the current user is a super user */
isSuperUser?: boolean
/** @deprecated Legacy field - use creator.referenceId instead */
userId?: string
/** @deprecated Legacy field - use details.tagline instead */
description?: string | null
/** @deprecated Legacy field - use creator.name instead */
author?: string
/** @deprecated Legacy field - use creator.referenceType instead */
authorType?: 'user' | 'organization'
/** @deprecated Legacy field - use creator.referenceId when referenceType is 'organization' */
organizationId?: string | null
/** Display color for the template card */
color?: string
/** Display icon for the template card */
@@ -107,7 +97,6 @@ export default function Templates({
/**
* Filter templates based on active tab and search query
* Memoized to prevent unnecessary recalculations on render
*/
const filteredTemplates = useMemo(() => {
const query = debouncedSearchQuery.toLowerCase()
@@ -115,7 +104,7 @@ export default function Templates({
return templates.filter((template) => {
const tabMatch =
activeTab === 'your'
? template.userId === currentUserId || template.isStarred
? template.creator?.referenceId === currentUserId || template.isStarred
: activeTab === 'gallery'
? template.status === 'approved'
: template.status === 'pending'
@@ -124,13 +113,7 @@ export default function Templates({
if (!query) return true
const searchableText = [
template.name,
template.description,
template.details?.tagline,
template.author,
template.creator?.name,
]
const searchableText = [template.name, template.details?.tagline, template.creator?.name]
.filter(Boolean)
.join(' ')
.toLowerCase()
@@ -141,7 +124,6 @@ export default function Templates({
/**
* Get empty state message based on current filters
* Memoized to prevent unnecessary recalculations on render
*/
const emptyState = useMemo(() => {
if (debouncedSearchQuery) {
@@ -235,25 +217,20 @@ export default function Templates({
</div>
</div>
) : (
filteredTemplates.map((template) => {
const author = template.author || template.creator?.name || 'Unknown'
const authorImageUrl = template.creator?.profileImageUrl || null
return (
<TemplateCard
key={template.id}
id={template.id}
title={template.name}
author={author}
authorImageUrl={authorImageUrl}
usageCount={template.views.toString()}
stars={template.stars}
state={template.state}
isStarred={template.isStarred}
isVerified={template.creator?.verified || false}
/>
)
})
filteredTemplates.map((template) => (
<TemplateCard
key={template.id}
id={template.id}
title={template.name}
author={template.creator?.name || 'Unknown'}
authorImageUrl={template.creator?.profileImageUrl || null}
usageCount={template.views.toString()}
stars={template.stars}
state={template.state}
isStarred={template.isStarred}
isVerified={template.creator?.verified || false}
/>
))
)}
</div>
</div>

View File

@@ -109,12 +109,7 @@ export const DiffControls = memo(function DiffControls() {
loops: rawState.loops || {},
parallels: rawState.parallels || {},
lastSaved: rawState.lastSaved || Date.now(),
isDeployed: rawState.isDeployed || false,
deploymentStatuses: rawState.deploymentStatuses || {},
// Only include deployedAt if it's a valid date, never include null/undefined
...(rawState.deployedAt && rawState.deployedAt instanceof Date
? { deployedAt: rawState.deployedAt }
: {}),
}
logger.info('Prepared complete workflow state for checkpoint', {

View File

@@ -16,8 +16,6 @@ export interface CurrentWorkflow {
loops: Record<string, Loop>
parallels: Record<string, Parallel>
lastSaved?: number
isDeployed?: boolean
deployedAt?: Date
deploymentStatuses?: Record<string, DeploymentStatus>
needsRedeployment?: boolean
@@ -50,8 +48,6 @@ export function useCurrentWorkflow(): CurrentWorkflow {
loops: state.loops,
parallels: state.parallels,
lastSaved: state.lastSaved,
isDeployed: state.isDeployed,
deployedAt: state.deployedAt,
deploymentStatuses: state.deploymentStatuses,
needsRedeployment: state.needsRedeployment,
}))
@@ -82,8 +78,6 @@ export function useCurrentWorkflow(): CurrentWorkflow {
loops: activeWorkflow.loops || {},
parallels: activeWorkflow.parallels || {},
lastSaved: activeWorkflow.lastSaved,
isDeployed: activeWorkflow.isDeployed,
deployedAt: activeWorkflow.deployedAt,
deploymentStatuses: activeWorkflow.deploymentStatuses,
needsRedeployment: activeWorkflow.needsRedeployment,

View File

@@ -117,7 +117,6 @@ export async function applyAutoLayoutAndUpdateStore(
const cleanedWorkflowState = {
...stateToSave,
deployedAt: stateToSave.deployedAt ? new Date(stateToSave.deployedAt) : undefined,
loops: stateToSave.loops || {},
parallels: stateToSave.parallels || {},
edges: (stateToSave.edges || []).map((edge: any) => {

View File

@@ -369,8 +369,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
loops: workflowState.loops || {},
parallels: workflowState.parallels || {},
lastSaved: workflowState.lastSaved || Date.now(),
isDeployed: workflowState.isDeployed ?? false,
deployedAt: workflowState.deployedAt,
deploymentStatuses: workflowState.deploymentStatuses || {},
})

View File

@@ -121,12 +121,6 @@ export const MemoryBlock: BlockConfig = {
if (!conversationId) {
errors.push('Conversation ID is required for add operation')
}
if (!params.role) {
errors.push('Role is required for agent memory')
}
if (!params.content) {
errors.push('Content is required for agent memory')
}
}
if (params.operation === 'get' || params.operation === 'delete') {

View File

@@ -23,7 +23,6 @@ export const PostHogBlock: BlockConfig<PostHogResponse> = {
// Core Data Operations
{ label: 'Capture Event', id: 'posthog_capture_event' },
{ label: 'Batch Events', id: 'posthog_batch_events' },
{ label: 'List Events', id: 'posthog_list_events' },
{ label: 'List Persons', id: 'posthog_list_persons' },
{ label: 'Get Person', id: 'posthog_get_person' },
{ label: 'Delete Person', id: 'posthog_delete_person' },
@@ -105,7 +104,6 @@ export const PostHogBlock: BlockConfig<PostHogResponse> = {
'posthog_get_event_definition',
'posthog_update_event_definition',
// Core Operations (with personalApiKey)
'posthog_list_events',
'posthog_list_persons',
'posthog_get_person',
'posthog_delete_person',
@@ -403,54 +401,6 @@ Return ONLY the JSON array.`,
condition: { field: 'operation', value: 'posthog_query' },
},
// List Events filters
{
id: 'eventFilter',
title: 'Event Name Filter',
type: 'short-input',
placeholder: 'e.g., page_view, button_clicked',
condition: { field: 'operation', value: 'posthog_list_events' },
},
{
id: 'before',
title: 'Before (ISO 8601)',
type: 'short-input',
placeholder: '2024-01-01T12:00:00Z',
condition: { field: 'operation', value: 'posthog_list_events' },
wandConfig: {
enabled: true,
prompt: `Generate an ISO 8601 timestamp based on the user's description.
The timestamp should be in the format: YYYY-MM-DDTHH:MM:SSZ (UTC timezone).
Examples:
- "today" -> Today's date at 00:00:00Z
- "this week" -> The start of this week at 00:00:00Z
- "last month" -> The 1st of last month at 00:00:00Z
Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
placeholder: 'Describe the cutoff date (e.g., "today", "this week")...',
generationType: 'timestamp',
},
},
{
id: 'after',
title: 'After (ISO 8601)',
type: 'short-input',
placeholder: '2024-01-01T00:00:00Z',
condition: { field: 'operation', value: 'posthog_list_events' },
wandConfig: {
enabled: true,
prompt: `Generate an ISO 8601 timestamp based on the user's description.
The timestamp should be in the format: YYYY-MM-DDTHH:MM:SSZ (UTC timezone).
Examples:
- "yesterday" -> Yesterday's date at 00:00:00Z
- "last week" -> 7 days ago at 00:00:00Z
- "beginning of this month" -> The 1st of this month at 00:00:00Z
Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
placeholder: 'Describe the start date (e.g., "yesterday", "last week")...',
generationType: 'timestamp',
},
},
{
id: 'distinctIdFilter',
title: 'Distinct ID Filter',
@@ -458,7 +408,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
placeholder: 'user123',
condition: {
field: 'operation',
value: ['posthog_list_events', 'posthog_list_persons'],
value: 'posthog_list_persons',
},
},
@@ -1079,7 +1029,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
condition: {
field: 'operation',
value: [
'posthog_list_events',
'posthog_list_persons',
'posthog_list_insights',
'posthog_list_dashboards',
@@ -1104,7 +1053,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
condition: {
field: 'operation',
value: [
'posthog_list_events',
'posthog_list_persons',
'posthog_list_insights',
'posthog_list_dashboards',
@@ -1188,7 +1136,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
// Core Data
'posthog_capture_event',
'posthog_batch_events',
'posthog_list_events',
'posthog_list_persons',
'posthog_get_person',
'posthog_delete_person',
@@ -1297,17 +1244,8 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
params.tags = params.insightTags
}
// Map eventFilter to event for list_events
if (params.operation === 'posthog_list_events' && params.eventFilter) {
params.event = params.eventFilter
}
// Map distinctIdFilter to distinctId for list operations
if (
(params.operation === 'posthog_list_events' ||
params.operation === 'posthog_list_persons') &&
params.distinctIdFilter
) {
// Map distinctIdFilter to distinctId for list_persons
if (params.operation === 'posthog_list_persons' && params.distinctIdFilter) {
params.distinctId = params.distinctIdFilter
}

View File

@@ -34,14 +34,8 @@ export class GenericBlockHandler implements BlockHandler {
if (blockType) {
const blockConfig = getBlock(blockType)
if (blockConfig?.tools?.config?.params) {
try {
const transformedParams = blockConfig.tools.config.params(inputs)
finalInputs = { ...inputs, ...transformedParams }
} catch (error) {
logger.warn(`Failed to apply parameter transformation for block type ${blockType}:`, {
error: error instanceof Error ? error.message : String(error),
})
}
const transformedParams = blockConfig.tools.config.params(inputs)
finalInputs = { ...inputs, ...transformedParams }
}
if (blockConfig?.inputs) {

View File

@@ -615,8 +615,6 @@ export function useCollaborativeWorkflow() {
edges: workflowData.state.edges || [],
loops: workflowData.state.loops || {},
parallels: workflowData.state.parallels || {},
isDeployed: workflowData.state.isDeployed || false,
deployedAt: workflowData.state.deployedAt,
lastSaved: workflowData.state.lastSaved || Date.now(),
deploymentStatuses: workflowData.state.deploymentStatuses || {},
})

View File

@@ -119,8 +119,6 @@ export function buildDefaultWorkflowArtifacts(): DefaultWorkflowArtifacts {
loops: {},
parallels: {},
lastSaved: Date.now(),
isDeployed: false,
deployedAt: undefined,
deploymentStatuses: {},
needsRedeployment: false,
}

View File

@@ -2483,8 +2483,6 @@ export const useCopilotStore = create<CopilotStore>()(
loops: reverted.loops || {},
parallels: reverted.parallels || {},
lastSaved: reverted.lastSaved || Date.now(),
isDeployed: !!reverted.isDeployed,
...(reverted.deployedAt ? { deployedAt: new Date(reverted.deployedAt) } : {}),
deploymentStatuses: reverted.deploymentStatuses || {},
})

View File

@@ -35,7 +35,6 @@ const isStreamingOutput = (output: any): boolean => {
return false
}
// Check for streaming indicators
return (
output.isStreaming === true ||
('executionData' in output &&
@@ -53,12 +52,10 @@ const shouldSkipEntry = (output: any): boolean => {
return false
}
// Skip raw streaming objects with both stream and executionData
if ('stream' in output && 'executionData' in output) {
return true
}
// Skip raw StreamingExecution objects
if ('stream' in output && 'execution' in output) {
return true
}
@@ -75,12 +72,10 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => {
set((state) => {
// Skip duplicate streaming entries
if (shouldSkipEntry(entry.output)) {
return { entries: state.entries }
}
// Redact API keys from output and input
const redactedEntry = { ...entry }
if (
!isStreamingOutput(entry.output) &&
@@ -93,7 +88,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
redactedEntry.input = redactApiKeys(redactedEntry.input)
}
// Create new entry with ID and timestamp
const newEntry: ConsoleEntry = {
...redactedEntry,
id: crypto.randomUUID(),
@@ -105,8 +99,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
const newEntry = get().entries[0]
// Surface error notifications immediately when error entries are added
// Only show if error notifications are enabled in settings
if (newEntry?.error) {
const { isErrorNotificationsEnabled } = useGeneralStore.getState()
@@ -115,7 +107,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
const errorMessage = String(newEntry.error)
const blockName = newEntry.blockName || 'Unknown Block'
// Copilot message includes block name for better debugging context
const copilotMessage = `${errorMessage}\n\nError in ${blockName}.\n\nPlease fix this.`
useNotificationStore.getState().addNotification({
@@ -147,22 +138,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
set((state) => ({
entries: state.entries.filter((entry) => entry.workflowId !== workflowId),
}))
// Clear run path indicators when console is cleared
useExecutionStore.getState().clearRunPath()
},
/**
* Clears all console entries or entries for a specific workflow and clears the run path
* @param workflowId - The workflow ID to clear entries for, or null to clear all
* @deprecated Use clearWorkflowConsole for clearing specific workflows
*/
clearConsole: (workflowId: string | null) => {
set((state) => ({
entries: workflowId
? state.entries.filter((entry) => entry.workflowId !== workflowId)
: [],
}))
// Clear run path indicators when console is cleared
useExecutionStore.getState().clearRunPath()
},
@@ -183,7 +158,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
let stringValue = typeof value === 'object' ? JSON.stringify(value) : String(value)
// Escape quotes and wrap in quotes if contains special characters
if (
stringValue.includes('"') ||
stringValue.includes(',') ||
@@ -232,7 +206,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19)
const filename = `terminal-console-${workflowId}-${timestamp}.csv`
// Create and trigger download
const blob = new Blob([csvContent], { type: 'text/csv;charset=utf-8;' })
const link = document.createElement('a')
@@ -259,18 +232,15 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
updateConsole: (blockId: string, update: string | ConsoleUpdate, executionId?: string) => {
set((state) => {
const updatedEntries = state.entries.map((entry) => {
// Only update if both blockId and executionId match
if (entry.blockId !== blockId || entry.executionId !== executionId) {
return entry
}
// Handle simple string update
if (typeof update === 'string') {
const newOutput = updateBlockOutput(entry.output, update)
return { ...entry, output: newOutput }
}
// Handle complex update
const updatedEntry = { ...entry }
if (update.content !== undefined) {

View File

@@ -48,7 +48,6 @@ export interface ConsoleStore {
isOpen: boolean
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
clearWorkflowConsole: (workflowId: string) => void
clearConsole: (workflowId: string | null) => void
exportConsoleCSV: (workflowId: string) => void
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
toggleConsole: () => void

View File

@@ -34,13 +34,7 @@ export function getWorkflowWithValues(workflowId: string) {
const deploymentStatus = useWorkflowRegistry.getState().getWorkflowDeploymentStatus(workflowId)
// Use the current state from the store (only available for active workflow)
const workflowState: WorkflowState = {
// Use the main store's method to get the base workflow state
...useWorkflowStore.getState().getWorkflowState(),
// Override deployment fields with registry-specific deployment status
isDeployed: deploymentStatus?.isDeployed || false,
deployedAt: deploymentStatus?.deployedAt,
}
const workflowState: WorkflowState = useWorkflowStore.getState().getWorkflowState()
// Merge the subblock values for this specific workflow
const mergedBlocks = mergeSubblockState(workflowState.blocks, workflowId)
@@ -58,8 +52,9 @@ export function getWorkflowWithValues(workflowId: string) {
loops: workflowState.loops,
parallels: workflowState.parallels,
lastSaved: workflowState.lastSaved,
isDeployed: workflowState.isDeployed,
deployedAt: workflowState.deployedAt,
// Get deployment fields from registry for API compatibility
isDeployed: deploymentStatus?.isDeployed || false,
deployedAt: deploymentStatus?.deployedAt,
},
}
}
@@ -101,7 +96,6 @@ export function getAllWorkflowsWithValues() {
// Ensure state has all required fields for Zod validation
const workflowState: WorkflowState = {
// Use the main store's method to get the base workflow state with fallback values
...useWorkflowStore.getState().getWorkflowState(),
// Ensure fallback values for safer handling
blocks: currentState.blocks || {},
@@ -109,9 +103,6 @@ export function getAllWorkflowsWithValues() {
loops: currentState.loops || {},
parallels: currentState.parallels || {},
lastSaved: currentState.lastSaved || Date.now(),
// Override deployment fields with registry-specific deployment status
isDeployed: deploymentStatus?.isDeployed || false,
deployedAt: deploymentStatus?.deployedAt,
}
// Merge the subblock values for this specific workflow
@@ -132,8 +123,9 @@ export function getAllWorkflowsWithValues() {
loops: workflowState.loops,
parallels: workflowState.parallels,
lastSaved: workflowState.lastSaved,
isDeployed: workflowState.isDeployed,
deployedAt: workflowState.deployedAt,
// Get deployment fields from registry for API compatibility
isDeployed: deploymentStatus?.isDeployed || false,
deployedAt: deploymentStatus?.deployedAt,
},
// Include API key if available
apiKey,

View File

@@ -40,9 +40,7 @@ function resetWorkflowStores() {
edges: [],
loops: {},
parallels: {},
isDeployed: false,
deployedAt: undefined,
deploymentStatuses: {}, // Reset deployment statuses map
deploymentStatuses: {},
lastSaved: Date.now(),
})
@@ -227,31 +225,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
},
},
}))
// Also update the workflow store if this is the active workflow
const { activeWorkflowId } = get()
if (workflowId === activeWorkflowId) {
// Update the workflow store for backward compatibility
useWorkflowStore.setState((state) => ({
isDeployed,
deployedAt: deployedAt || (isDeployed ? new Date() : undefined),
needsRedeployment: isDeployed ? false : state.needsRedeployment,
deploymentStatuses: {
...state.deploymentStatuses,
[workflowId as string]: {
isDeployed,
deployedAt: deployedAt || (isDeployed ? new Date() : undefined),
apiKey,
needsRedeployment: isDeployed
? false
: ((state.deploymentStatuses?.[workflowId as string] as any)?.needsRedeployment ??
false),
},
},
}))
}
// Note: Socket.IO handles real-time sync automatically
},
// Method to set the needsRedeployment flag for a specific workflow
@@ -322,9 +295,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
edges: workflowData.state.edges || [],
loops: workflowData.state.loops || {},
parallels: workflowData.state.parallels || {},
isDeployed: workflowData.isDeployed || false,
deployedAt: workflowData.deployedAt ? new Date(workflowData.deployedAt) : undefined,
apiKey: workflowData.apiKey,
lastSaved: Date.now(),
deploymentStatuses: {},
}
@@ -334,8 +304,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
edges: [],
loops: {},
parallels: {},
isDeployed: false,
deployedAt: undefined,
deploymentStatuses: {},
lastSaved: Date.now(),
}
@@ -543,8 +511,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
edges: sourceState.edges,
loops: sourceState.loops,
parallels: sourceState.parallels,
isDeployed: false,
deployedAt: undefined,
workspaceId,
deploymentStatuses: {},
lastSaved: Date.now(),
@@ -622,8 +588,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
edges: [...useWorkflowStore.getState().edges],
loops: { ...useWorkflowStore.getState().loops },
parallels: { ...useWorkflowStore.getState().parallels },
isDeployed: useWorkflowStore.getState().isDeployed,
deployedAt: useWorkflowStore.getState().deployedAt,
lastSaved: useWorkflowStore.getState().lastSaved,
}
: null,
@@ -646,8 +610,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
edges: [],
loops: {},
parallels: {},
isDeployed: false,
deployedAt: undefined,
lastSaved: Date.now(),
})

View File

@@ -97,10 +97,6 @@ const initialState = {
loops: {},
parallels: {},
lastSaved: undefined,
// Legacy deployment fields (keeping for compatibility but they will be deprecated)
isDeployed: false,
deployedAt: undefined,
// New field for per-workflow deployment tracking
deploymentStatuses: {},
needsRedeployment: false,
}
@@ -174,7 +170,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
...data,
...(parentId && { parentId, extent: extent || 'parent' }),
}
// #endregion
const subBlocks: Record<string, SubBlockState> = {}
const subBlockStore = useSubBlockStore.getState()
@@ -506,8 +501,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
loops: state.loops,
parallels: state.parallels,
lastSaved: state.lastSaved,
isDeployed: state.isDeployed,
deployedAt: state.deployedAt,
deploymentStatuses: state.deploymentStatuses,
needsRedeployment: state.needsRedeployment,
}
@@ -534,9 +527,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
edges: nextEdges,
loops: nextLoops,
parallels: nextParallels,
isDeployed:
workflowState.isDeployed !== undefined ? workflowState.isDeployed : state.isDeployed,
deployedAt: workflowState.deployedAt ?? state.deployedAt,
deploymentStatuses: workflowState.deploymentStatuses || state.deploymentStatuses,
needsRedeployment:
workflowState.needsRedeployment !== undefined
@@ -1043,7 +1033,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
edges: deployedState.edges,
loops: deployedState.loops || {},
parallels: deployedState.parallels || {},
isDeployed: true,
needsRedeployment: false,
// Keep existing deployment statuses and update for the active workflow if needed
deploymentStatuses: {

View File

@@ -164,8 +164,6 @@ export interface WorkflowState {
exportedAt?: string
}
variables?: Record<string, Variable>
isDeployed?: boolean
deployedAt?: Date
deploymentStatuses?: Record<string, DeploymentStatus>
needsRedeployment?: boolean
dragStartPosition?: DragStartPosition | null

View File

@@ -1,4 +1,3 @@
import { buildMemoryKey } from '@/tools/memory/helpers'
import type { MemoryResponse } from '@/tools/memory/types'
import type { ToolConfig } from '@/tools/types'
@@ -24,11 +23,13 @@ export const memoryAddTool: ToolConfig<any, MemoryResponse> = {
role: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Role for agent memory (user, assistant, or system)',
},
content: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Content for agent memory',
},
},
@@ -41,52 +42,15 @@ export const memoryAddTool: ToolConfig<any, MemoryResponse> = {
}),
body: (params) => {
const workspaceId = params._context?.workspaceId
if (!workspaceId) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'workspaceId is required and must be provided in execution context',
},
},
},
}
throw new Error('workspaceId is required in execution context')
}
const conversationId = params.conversationId || params.id
if (!conversationId || conversationId.trim() === '') {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'conversationId or id is required',
},
},
},
}
if (!conversationId) {
throw new Error('conversationId or id is required')
}
if (!params.role || !params.content) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'Role and content are required for agent memory',
},
},
},
}
}
const key = buildMemoryKey(conversationId)
const key = conversationId
const body: Record<string, any> = {
key,

View File

@@ -23,37 +23,15 @@ export const memoryDeleteTool: ToolConfig<any, MemoryResponse> = {
},
request: {
url: (params): any => {
url: (params) => {
const workspaceId = params._context?.workspaceId
if (!workspaceId) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'workspaceId is required and must be provided in execution context',
},
},
},
}
throw new Error('workspaceId is required in execution context')
}
const conversationId = params.conversationId || params.id
if (!conversationId) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'conversationId or id must be provided',
},
},
},
}
throw new Error('conversationId or id is required')
}
const url = new URL('/api/memory', 'http://dummy')

View File

@@ -1,4 +1,3 @@
import { buildMemoryKey } from '@/tools/memory/helpers'
import type { MemoryResponse } from '@/tools/memory/types'
import type { ToolConfig } from '@/tools/types'
@@ -24,40 +23,17 @@ export const memoryGetTool: ToolConfig<any, MemoryResponse> = {
},
request: {
url: (params): any => {
url: (params) => {
const workspaceId = params._context?.workspaceId
if (!workspaceId) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'workspaceId is required and must be provided in execution context',
},
},
},
}
throw new Error('workspaceId is required in execution context')
}
const conversationId = params.conversationId || params.id
if (!conversationId) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'conversationId or id must be provided',
},
},
},
}
throw new Error('conversationId or id is required')
}
const query = buildMemoryKey(conversationId)
const query = conversationId
const url = new URL('/api/memory', 'http://dummy')
url.searchParams.set('workspaceId', workspaceId)

View File

@@ -10,21 +10,10 @@ export const memoryGetAllTool: ToolConfig<any, MemoryResponse> = {
params: {},
request: {
url: (params): any => {
url: (params) => {
const workspaceId = params._context?.workspaceId
if (!workspaceId) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'workspaceId is required and must be provided in execution context',
},
},
},
}
throw new Error('workspaceId is required in execution context')
}
return `/api/memory?workspaceId=${encodeURIComponent(workspaceId)}`

View File

@@ -1,25 +0,0 @@
/**
* Parse memory key to extract conversationId
* Memory is now thread-scoped, so the key is just the conversationId
* @param key The memory key (conversationId)
* @returns Object with conversationId, or null if invalid
*/
export function parseMemoryKey(key: string): { conversationId: string } | null {
if (!key) {
return null
}
return {
conversationId: key,
}
}
/**
* Build memory key from conversationId
* Memory is thread-scoped, so key is just the conversationId
* @param conversationId The conversation ID
* @returns The memory key (same as conversationId)
*/
export function buildMemoryKey(conversationId: string): string {
return conversationId
}

View File

@@ -29,7 +29,6 @@ import { listCohortsTool } from '@/tools/posthog/list_cohorts'
import { listDashboardsTool } from '@/tools/posthog/list_dashboards'
// Data Management
import { listEventDefinitionsTool } from '@/tools/posthog/list_event_definitions'
import { listEventsTool } from '@/tools/posthog/list_events'
import { listExperimentsTool } from '@/tools/posthog/list_experiments'
// Feature Management
import { listFeatureFlagsTool } from '@/tools/posthog/list_feature_flags'
@@ -53,7 +52,6 @@ import { updateSurveyTool } from '@/tools/posthog/update_survey'
// Export all tools with posthog prefix
export const posthogCaptureEventTool = captureEventTool
export const posthogBatchEventsTool = batchEventsTool
export const posthogListEventsTool = listEventsTool
export const posthogListPersonsTool = listPersonsTool
export const posthogGetPersonTool = getPersonTool
export const posthogDeletePersonTool = deletePersonTool

View File

@@ -1,190 +0,0 @@
import type { ToolConfig } from '@/tools/types'
export interface PostHogListEventsParams {
personalApiKey: string
region?: 'us' | 'eu'
projectId: string
limit?: number
offset?: number
event?: string
distinctId?: string
before?: string
after?: string
}
export interface PostHogEvent {
id: string
event: string
distinct_id: string
properties: Record<string, any>
timestamp: string
person?: {
id: string
distinct_ids: string[]
properties: Record<string, any>
}
}
export interface PostHogListEventsResponse {
success: boolean
output: {
events: PostHogEvent[]
next?: string
}
}
export const listEventsTool: ToolConfig<PostHogListEventsParams, PostHogListEventsResponse> = {
id: 'posthog_list_events',
name: 'PostHog List Events',
description:
'List events in PostHog. Note: This endpoint is deprecated but kept for backwards compatibility. For production use, prefer the Query endpoint with HogQL.',
version: '1.0.0',
params: {
personalApiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'PostHog Personal API Key (for authenticated API access)',
},
region: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'PostHog region: us (default) or eu',
default: 'us',
},
projectId: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'PostHog Project ID',
},
limit: {
type: 'number',
required: false,
visibility: 'user-only',
description: 'Number of events to return (default: 100, max: 100)',
default: 100,
},
offset: {
type: 'number',
required: false,
visibility: 'user-only',
description: 'Number of events to skip for pagination',
},
event: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Filter by specific event name',
},
distinctId: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Filter by specific distinct_id',
},
before: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'ISO 8601 timestamp - only return events before this time',
},
after: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'ISO 8601 timestamp - only return events after this time',
},
},
request: {
url: (params) => {
const baseUrl = params.region === 'eu' ? 'https://eu.posthog.com' : 'https://us.posthog.com'
const url = new URL(`${baseUrl}/api/projects/${params.projectId}/events/`)
if (params.limit) url.searchParams.append('limit', params.limit.toString())
if (params.offset) url.searchParams.append('offset', params.offset.toString())
if (params.event) url.searchParams.append('event', params.event)
if (params.distinctId) url.searchParams.append('distinct_id', params.distinctId)
if (params.before) url.searchParams.append('before', params.before)
if (params.after) url.searchParams.append('after', params.after)
return url.toString()
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.personalApiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
if (!response.ok) {
const error = await response.text()
return {
success: false,
output: {
events: [],
},
error: error || 'Failed to list events',
}
}
const data = await response.json()
return {
success: true,
output: {
events:
data.results?.map((event: any) => ({
id: event.id,
event: event.event,
distinct_id: event.distinct_id,
properties: event.properties || {},
timestamp: event.timestamp,
person: event.person
? {
id: event.person.id,
distinct_ids: event.person.distinct_ids || [],
properties: event.person.properties || {},
}
: undefined,
})) || [],
next: data.next || undefined,
},
}
},
outputs: {
events: {
type: 'array',
description: 'List of events with their properties and metadata',
items: {
type: 'object',
properties: {
id: { type: 'string', description: 'Unique event ID' },
event: { type: 'string', description: 'Event name' },
distinct_id: { type: 'string', description: 'User or device identifier' },
properties: { type: 'object', description: 'Event properties' },
timestamp: { type: 'string', description: 'When the event occurred' },
person: {
type: 'object',
description: 'Associated person data',
properties: {
id: { type: 'string', description: 'Person ID' },
distinct_ids: { type: 'array', description: 'All distinct IDs for this person' },
properties: { type: 'object', description: 'Person properties' },
},
},
},
},
},
next: {
type: 'string',
description: 'URL for the next page of results (if available)',
optional: true,
},
},
}

View File

@@ -891,7 +891,6 @@ import {
posthogListCohortsTool,
posthogListDashboardsTool,
posthogListEventDefinitionsTool,
posthogListEventsTool,
posthogListExperimentsTool,
posthogListFeatureFlagsTool,
posthogListInsightsTool,
@@ -1903,7 +1902,6 @@ export const tools: Record<string, ToolConfig> = {
perplexity_search: perplexitySearchTool,
posthog_capture_event: posthogCaptureEventTool,
posthog_batch_events: posthogBatchEventsTool,
posthog_list_events: posthogListEventsTool,
posthog_list_persons: posthogListPersonsTool,
posthog_get_person: posthogGetPersonTool,
posthog_delete_person: posthogDeletePersonTool,