Compare commits

..

32 Commits

Author SHA1 Message Date
Waleed
67cfb21d08 v0.5.34: servicenow, code cleanup, prevent cyclic edge connections, custom tool fixes 2025-12-17 23:39:10 -08:00
Waleed
1d6975db49 v0.5.33: loops, chat fixes, subflow resizing refactor, terminal updates 2025-12-17 15:45:39 -08:00
Waleed
837aabca5e v0.5.32: google sheets fix, schedule input format 2025-12-16 15:41:04 -08:00
Vikhyath Mondreti
f9cfca92bf v0.5.31: add zod as direct dep 2025-12-15 20:40:02 -08:00
Waleed
25afacb25e v0.5.30: vllm fixes, permissions fixes, isolated vms for code execution, tool fixes 2025-12-15 19:38:01 -08:00
Gaurav Chadha
fcf52ac4d5 fix(landing): prevent url encoding for spaces for footer links (#2376) 2025-12-15 10:59:12 -08:00
Shivam
842200bcf2 fix(docs): clarify working directory for drizzle migration (#2375) 2025-12-15 10:58:27 -08:00
Waleed
a0fb889644 v0.5.29: chat voice mode, opengraph for docs, option to disable auth 2025-12-13 19:50:06 -08:00
Waleed
f526c36fc0 v0.5.28: tool fixes, sqs, spotify, nextjs update, component playground 2025-12-12 21:05:57 -08:00
Waleed
e24f31cbce v0.5.27: sidebar updates, ssrf patches, gpt-5.2, stagehand fixes 2025-12-11 14:45:25 -08:00
Waleed
3fbd57caf1 v0.5.26: tool fixes, templates and knowledgebase fixes, deployment versions in logs 2025-12-11 00:52:13 -08:00
Vikhyath Mondreti
b5da61377c v0.5.25: minor ui improvements, copilot billing fix 2025-12-10 18:32:27 -08:00
Waleed
18b7032494 v0.5.24: agent tool and UX improvements, redis service overhaul (#2291)
* feat(folders): add the ability to create a folder within a folder in popover (#2287)

* fix(agent): filter out empty params to ensure LLM can set tool params at runtime (#2288)

* fix(mcp): added backfill effect to add missing descriptions for mcp tools (#2290)

* fix(redis): cleanup access pattern across callsites (#2289)

* fix(redis): cleanup access pattern across callsites

* swap redis command to be non blocking

* improvement(log-details): polling, trace spans (#2292)

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com>
2025-12-10 13:09:21 -08:00
Waleed
b7bbef8620 v0.5.23: kb, logs, general ui improvements, token bucket rate limits, docs, mcp, autolayout improvements (#2286)
* fix(mcp): prevent redundant MCP server discovery calls at runtime, use cached tool schema instead (#2273)

* fix(mcp): prevent redundant MCP server discovery calls at runtime, use cached tool schema instead

* added backfill, added loading state for tools in settings > mcp

* fix tool inp

* feat(rate-limiter): token bucket algorithm  (#2270)

* fix(ratelimit): make deployed chat rate limited

* improvement(rate-limiter): use token bucket algo

* update docs

* fix

* fix type

* fix db rate limiter

* address greptile comments

* feat(i18n): update translations (#2275)

Co-authored-by: icecrasher321 <icecrasher321@users.noreply.github.com>

* fix(tools): updated kalshi and polymarket tools to accurately reflect outputs (#2274)

* feat(i18n): update translations (#2276)

Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>

* fix(autolayout): align by handle (#2277)

* fix(autolayout): align by handle

* use shared constants everywhere

* cleanup

* fix(copilot): fix custom tools (#2278)

* Fix title custom tool

* Checkpoitn (broken)

* Fix custom tool flash

* Edit workflow returns null fix

* Works

* Fix lint

* fix(ime): prevent form submission during IME composition steps (#2279)

* fix(ui): prevent form submission during IME composition steps

* chore(gitignore): add IntelliJ IDE files to .gitignore

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Waleed <walif6@gmail.com>
Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>

* feat(ui): logs, kb, emcn (#2207)

* feat(kb): emcn alignment; sidebar: popover primary; settings-modal: expand

* feat: EMCN breadcrumb; improvement(KB): UI

* fix: hydration error

* improvement(KB): UI

* feat: emcn modal sizing, KB tags; refactor: deleted old sidebar

* feat(logs): UI

* fix: add documents modal name

* feat: logs, emcn, cursorrules; refactor: logs

* feat: dashboard

* feat: notifications; improvement: logs details

* fixed random rectangle on canvas

* fixed the name of the file to align

* fix build

---------

Co-authored-by: waleed <walif6@gmail.com>

* fix(creds): glitch allowing multiple credentials in an integration (#2282)

* improvement: custom tools modal, logs-details (#2283)

* fix(docs): fix copy page button and header hook (#2284)

* improvement(chat): add the ability to download files from the deployed chat (#2280)

* added teams download and chat download file

* Removed comments

* removed comments

* component structure and download all

* removed comments

* cleanup code

* fix empty files case

* small fix

* fix(container): resize heuristic improvement (#2285)

* estimate block height for resize based on subblocks

* fix hydration error

* make more conservative

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: icecrasher321 <icecrasher321@users.noreply.github.com>
Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>
Co-authored-by: Siddharth Ganesan <33737564+Sg312@users.noreply.github.com>
Co-authored-by: mosa <mosaxiv@gmail.com>
Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com>
Co-authored-by: Adam Gough <77861281+aadamgough@users.noreply.github.com>
2025-12-10 00:57:58 -08:00
Waleed
52edbea659 v0.5.22: rss feed trigger, sftp tool, billing fixes, 413 surfacing, copilot improvements 2025-12-09 10:27:36 -08:00
Vikhyath Mondreti
d480057fd3 fix(migration): migration got removed by force push (#2253) 2025-12-08 14:08:12 -08:00
Waleed
c27c233da0 v0.5.21: google groups, virtualized code viewer, ui, autolayout, docs improvements 2025-12-08 13:10:50 -08:00
Waleed
ebef5f3a27 v0.5.20: google slides, ui fixes, subflow resizing improvements 2025-12-06 15:36:09 -08:00
Vikhyath Mondreti
12c4c2d44f v0.5.19: copilot fix 2025-12-05 15:27:31 -08:00
Vikhyath Mondreti
929a352edb fix(build): added trigger.dev sdk mock to tests (#2216) 2025-12-05 14:26:50 -08:00
Vikhyath Mondreti
6cd078b0fe v0.5.18: ui fixes, nextjs16, workspace notifications, admin APIs, loading improvements, new slack tools 2025-12-05 14:03:09 -08:00
Waleed
31874939ee v0.5.17: modals, billing fixes, bun update, zoom, dropbox, kalshi, polymarket, datadog, ahrefs, gitlab, shopify, ssh, wordpress integrations 2025-12-04 13:29:46 -08:00
Waleed
e157ce5fbc v0.5.16: MCP fixes, code refactors, jira fixes, new mistral models 2025-12-02 22:02:11 -08:00
Vikhyath Mondreti
774e5d585c v0.5.15: add tools, revert subblock prop change 2025-12-01 13:52:12 -08:00
Vikhyath Mondreti
54cc93743f v0.5.14: fix issue with teams, google selectors + cleanup code 2025-12-01 12:39:39 -08:00
Waleed
8c32ad4c0d v0.5.13: polling fixes, generic agent search tool, status page, smtp, sendgrid, linkedin, more tools (#2148)
* feat(tools): added smtp, sendgrid, mailgun, linkedin, fixed permissions in context menu (#2133)

* feat(tools): added twilio sendgrid integration

* feat(tools): added smtp, sendgrid, mailgun, fixed permissions in context menu

* added top level mocks for sporadically failing tests

* incr type safety

* fix(team-plans): track departed member usage so value not lost (#2118)

* fix(team-plans): track departed member usage so value not lost

* reset usage to 0 when they leave team

* prep merge with stagig

* regen migrations

* fix org invite + ws selection'

---------

Co-authored-by: Waleed <walif6@gmail.com>

* feat(i18n): update translations (#2134)

Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>

* feat(creators): add verification for creators (#2135)

* feat(tools): added apify block/tools  (#2136)

* feat(tools): added apify

* cleanup

* feat(i18n): update translations (#2137)

Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>

* feat(env): added more optional env var examples (#2138)

* feat(statuspage): added statuspage, updated list of tools in footer, renamed routes (#2139)

* feat(statuspage): added statuspage, updated list of tools in footer, renamed routes

* ack PR comments

* feat(tools): add generic search tool (#2140)

* feat(i18n): update translations (#2141)

* fix(sdks): bump sdk versions (#2142)

* fix(webhooks): count test webhooks towards usage limit (#2143)

* fix(bill): add requestId to webhook processing (#2144)

* improvement(subflow): remove all associated edges when moving a block into a subflow (#2145)

* improvement(subflow): remove all associated edges when moving a block into a subflow

* ack PR comments

* fix(polling): mark webhook failed on webhook trigger errors (#2146)

* fix(deps): declare core transient deps explicitly (#2147)

* fix(deps): declare core transient deps explicitly

* ack PR comments

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>
2025-12-01 10:15:36 -08:00
Waleed
1d08796853 v0.5.12: memory optimizations, sentry, incidentio, posthog, zendesk, pylon, intercom, mailchimp, loading optimizations (#2132)
* fix(memory-util): fixed unbounded array of gmail/outlook pollers causing high memory util, added missing db indexes/removed unused ones, auto-disable schedules/webhooks after 10 consecutive failures (#2115)

* fix(memory-util): fixed unbounded array of gmail/outlook pollers causing high memory util, added missing db indexes/removed unused ones, auto-disable schedules/webhooks after 10 consecutive failures

* ack PR comments

* ack

* improvement(teams-plan): seats increase simplification + not triggering checkout session (#2117)

* improvement(teams-plan): seats increase simplification + not triggering checkout session

* cleanup via helper

* feat(tools): added sentry, incidentio, and posthog tools (#2116)

* feat(tools): added sentry, incidentio, and posthog tools

* update docs

* fixed docs to use native fumadocs for llms.txt and copy markdown, fixed tool issues

* cleanup

* enhance error extractor, fixed posthog tools

* docs enhancements, cleanup

* added more incident io ops, remove zustand/shallow in favor of zustand/react/shallow

* fix type errors

* remove unnecessary comments

* added vllm to docs

* feat(i18n): update translations (#2120)

* feat(i18n): update translations

* fix build

---------

Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>

* improvement(workflow-execution): perf improvements to passing workflow state + decrypted env vars (#2119)

* improvement(execution): load workflow state once instead of 2-3 times

* decrypt only in get helper

* remove comments

* remove comments

* feat(models): host google gemini models (#2122)

* feat(models): host google gemini models

* remove unused primary key

* feat(i18n): update translations (#2123)

Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>

* feat(tools): added zendesk, pylon, intercom, & mailchimp (#2126)

* feat(tools): added zendesk, pylon, intercom, & mailchimp

* finish zendesk and pylon

* updated docs

* feat(i18n): update translations (#2129)

* feat(i18n): update translations

* fixed build

---------

Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>

* fix(permissions): add client-side permissions validation to prevent unauthorized actions, upgraded custom tool modal (#2130)

* fix(permissions): add client-side permissions validation to prevent unauthorized actions, upgraded custom tool modal

* fix failing test

* fix test

* cleanup

* fix(custom-tools): add composite index on custom tool names & workspace id (#2131)

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>
2025-11-28 16:08:06 -08:00
Waleed
ebcd243942 v0.5.11: stt, videogen, vllm, billing fixes, new models 2025-11-25 01:14:12 -08:00
Waleed
b7e814b721 v0.5.10: copilot upgrade, preprocessor, logs search, UI, code hygiene 2025-11-21 12:04:34 -08:00
Waleed
842ef27ed9 v0.5.9: add backwards compatibility for agent messages array 2025-11-20 11:19:42 -08:00
Vikhyath Mondreti
31c34b2ea3 v0.5.8: notifications, billing, ui changes, store loading state machine 2025-11-20 01:32:32 -08:00
Vikhyath Mondreti
8f0ef58056 v0.5.7: combobox selectors, usage indicator, workflow loading race condition, other improvements 2025-11-17 21:25:51 -08:00
13 changed files with 109 additions and 710 deletions

View File

@@ -188,6 +188,7 @@ DATABASE_URL="postgresql://postgres:your_password@localhost:5432/simstudio"
Then run the migrations:
```bash
cd apps/sim # Required so drizzle picks correct .env file
bunx drizzle-kit migrate --config=./drizzle.config.ts
```

View File

@@ -109,7 +109,7 @@ export default function Footer({ fullWidth = false }: FooterProps) {
{FOOTER_BLOCKS.map((block) => (
<Link
key={block}
href={`https://docs.sim.ai/blocks/${block.toLowerCase().replace(' ', '-')}`}
href={`https://docs.sim.ai/blocks/${block.toLowerCase().replaceAll(' ', '-')}`}
target='_blank'
rel='noopener noreferrer'
className='text-[14px] text-muted-foreground transition-colors hover:text-foreground'

View File

@@ -1,111 +0,0 @@
import { db } from '@sim/db'
import { workflow, workflowFolder } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { loadBulkWorkflowsFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WorkspaceExportAPI')
/**
* GET /api/workspaces/[id]/export
* Export all workspace data (workflows with states, folders) in a single request.
* Much more efficient than fetching each workflow individually.
*/
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const startTime = Date.now()
const { id: workspaceId } = await params
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check if user has access to this workspace
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
if (!userPermission) {
return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 404 })
}
// Fetch all workflows and folders in parallel (2 queries)
const [workflows, folders] = await Promise.all([
db.select().from(workflow).where(eq(workflow.workspaceId, workspaceId)),
db.select().from(workflowFolder).where(eq(workflowFolder.workspaceId, workspaceId)),
])
const workflowIds = workflows.map((w) => w.id)
// Bulk load all workflow states (3 queries total via inArray)
const workflowStates = await loadBulkWorkflowsFromNormalizedTables(workflowIds)
// Build export data
const workflowsExport = workflows.map((w) => {
const state = workflowStates.get(w.id)
// Build the workflow state with defaults if no normalized data
const workflowState = state
? {
blocks: state.blocks,
edges: state.edges,
loops: state.loops,
parallels: state.parallels,
lastSaved: Date.now(),
isDeployed: w.isDeployed || false,
deployedAt: w.deployedAt,
}
: {
blocks: {},
edges: [],
loops: {},
parallels: {},
lastSaved: Date.now(),
isDeployed: w.isDeployed || false,
deployedAt: w.deployedAt,
}
// Extract variables from workflow record
const variables = Object.values((w.variables as Record<string, any>) || {}).map((v: any) => ({
id: v.id,
name: v.name,
type: v.type,
value: v.value,
}))
return {
workflow: {
id: w.id,
name: w.name,
description: w.description,
color: w.color,
folderId: w.folderId,
},
state: workflowState,
variables,
}
})
const foldersExport = folders.map((f) => ({
id: f.id,
name: f.name,
parentId: f.parentId,
}))
const elapsed = Date.now() - startTime
logger.info(`Exported workspace ${workspaceId} in ${elapsed}ms`, {
workflowsCount: workflowsExport.length,
foldersCount: foldersExport.length,
})
return NextResponse.json({
workflows: workflowsExport,
folders: foldersExport,
})
} catch (error) {
const elapsed = Date.now() - startTime
logger.error(`Error exporting workspace ${workspaceId} after ${elapsed}ms:`, error)
return NextResponse.json({ error: 'Failed to export workspace' }, { status: 500 })
}
}

View File

@@ -101,9 +101,6 @@ const ACTION_VERBS = [
'Generated',
'Rendering',
'Rendered',
'Sleeping',
'Slept',
'Resumed',
] as const
/**
@@ -583,11 +580,6 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
(toolCall.state === (ClientToolCallState.executing as any) ||
toolCall.state === ('executing' as any))
const showWake =
toolCall.name === 'sleep' &&
(toolCall.state === (ClientToolCallState.executing as any) ||
toolCall.state === ('executing' as any))
const handleStateChange = (state: any) => {
forceUpdate({})
onStateChange?.(state)
@@ -1110,37 +1102,6 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
Move to Background
</Button>
</div>
) : showWake ? (
<div className='mt-[8px]'>
<Button
onClick={async () => {
try {
const instance = getClientTool(toolCall.id)
// Get elapsed seconds before waking
const elapsedSeconds = instance?.getElapsedSeconds?.() || 0
// Transition to background state locally so UI updates immediately
// Pass elapsed seconds in the result so dynamic text can use it
instance?.setState?.((ClientToolCallState as any).background, {
result: { _elapsedSeconds: elapsedSeconds },
})
// Update the tool call params in the store to include elapsed time for display
const { updateToolCallParams } = useCopilotStore.getState()
updateToolCallParams?.(toolCall.id, { _elapsedSeconds: Math.round(elapsedSeconds) })
await instance?.markToolComplete?.(
200,
`User woke you up after ${Math.round(elapsedSeconds)} seconds`
)
// Optionally force a re-render; store should sync state from server
forceUpdate({})
onStateChange?.('background')
} catch {}
}}
variant='primary'
title='Wake'
>
Wake
</Button>
</div>
) : null}
</div>
)

View File

@@ -1,6 +1,9 @@
import { useCallback, useState } from 'react'
import { createLogger } from '@/lib/logs/console/logger'
import { exportWorkspaceToZip } from '@/lib/workflows/operations/import-export'
import {
exportWorkspaceToZip,
type WorkflowExportData,
} from '@/lib/workflows/operations/import-export'
const logger = createLogger('useExportWorkspace')
@@ -15,7 +18,8 @@ interface UseExportWorkspaceProps {
* Hook for managing workspace export to ZIP.
*
* Handles:
* - Fetching all workflows and folders from workspace via bulk export endpoint
* - Fetching all workflows and folders from workspace
* - Fetching workflow states and variables
* - Creating ZIP file with all workspace data
* - Downloading the ZIP file
* - Loading state management
@@ -38,13 +42,74 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
try {
logger.info('Exporting workspace', { workspaceId })
// Single API call to get all workspace data (workflows with states + folders)
const response = await fetch(`/api/workspaces/${workspaceId}/export`)
if (!response.ok) {
throw new Error('Failed to export workspace')
// Fetch all workflows in workspace
const workflowsResponse = await fetch(`/api/workflows?workspaceId=${workspaceId}`)
if (!workflowsResponse.ok) {
throw new Error('Failed to fetch workflows')
}
const { data: workflows } = await workflowsResponse.json()
// Fetch all folders in workspace
const foldersResponse = await fetch(`/api/folders?workspaceId=${workspaceId}`)
if (!foldersResponse.ok) {
throw new Error('Failed to fetch folders')
}
const foldersData = await foldersResponse.json()
// Export each workflow
const workflowsToExport: WorkflowExportData[] = []
for (const workflow of workflows) {
try {
const workflowResponse = await fetch(`/api/workflows/${workflow.id}`)
if (!workflowResponse.ok) {
logger.warn(`Failed to fetch workflow ${workflow.id}`)
continue
}
const { data: workflowData } = await workflowResponse.json()
if (!workflowData?.state) {
logger.warn(`Workflow ${workflow.id} has no state`)
continue
}
const variablesResponse = await fetch(`/api/workflows/${workflow.id}/variables`)
let workflowVariables: any[] = []
if (variablesResponse.ok) {
const variablesData = await variablesResponse.json()
workflowVariables = Object.values(variablesData?.data || {}).map((v: any) => ({
id: v.id,
name: v.name,
type: v.type,
value: v.value,
}))
}
workflowsToExport.push({
workflow: {
id: workflow.id,
name: workflow.name,
description: workflow.description,
color: workflow.color,
folderId: workflow.folderId,
},
state: workflowData.state,
variables: workflowVariables,
})
} catch (error) {
logger.error(`Failed to export workflow ${workflow.id}:`, error)
}
}
const { workflows: workflowsToExport, folders: foldersToExport } = await response.json()
const foldersToExport: Array<{
id: string
name: string
parentId: string | null
}> = (foldersData.folders || []).map((folder: any) => ({
id: folder.id,
name: folder.name,
parentId: folder.parentId,
}))
const zipBlob = await exportWorkspaceToZip(
workspaceName,

View File

@@ -33,7 +33,6 @@ export const ToolIds = z.enum([
'knowledge_base',
'manage_custom_tool',
'manage_mcp_tool',
'sleep',
])
export type ToolId = z.infer<typeof ToolIds>
@@ -253,14 +252,6 @@ export const ToolArgSchemas = {
.optional()
.describe('Required for add and edit operations. The MCP server configuration.'),
}),
sleep: z.object({
seconds: z
.number()
.min(0)
.max(180)
.describe('The number of seconds to sleep (0-180, max 3 minutes)'),
}),
} as const
export type ToolArgSchemaMap = typeof ToolArgSchemas
@@ -327,7 +318,6 @@ export const ToolSSESchemas = {
knowledge_base: toolCallSSEFor('knowledge_base', ToolArgSchemas.knowledge_base),
manage_custom_tool: toolCallSSEFor('manage_custom_tool', ToolArgSchemas.manage_custom_tool),
manage_mcp_tool: toolCallSSEFor('manage_mcp_tool', ToolArgSchemas.manage_mcp_tool),
sleep: toolCallSSEFor('sleep', ToolArgSchemas.sleep),
} as const
export type ToolSSESchemaMap = typeof ToolSSESchemas
@@ -562,11 +552,6 @@ export const ToolResultSchemas = {
serverName: z.string().optional(),
message: z.string().optional(),
}),
sleep: z.object({
success: z.boolean(),
seconds: z.number(),
message: z.string().optional(),
}),
} as const
export type ToolResultSchemaMap = typeof ToolResultSchemas

View File

@@ -1,144 +0,0 @@
import { Loader2, MinusCircle, Moon, XCircle } from 'lucide-react'
import {
BaseClientTool,
type BaseClientToolMetadata,
ClientToolCallState,
} from '@/lib/copilot/tools/client/base-tool'
import { createLogger } from '@/lib/logs/console/logger'
/** Maximum sleep duration in seconds (3 minutes) */
const MAX_SLEEP_SECONDS = 180
/** Track sleep start times for calculating elapsed time on wake */
const sleepStartTimes: Record<string, number> = {}
interface SleepArgs {
seconds?: number
}
/**
* Format seconds into a human-readable duration string
*/
function formatDuration(seconds: number): string {
if (seconds >= 60) {
return `${Math.round(seconds / 60)} minute${seconds >= 120 ? 's' : ''}`
}
return `${seconds} second${seconds !== 1 ? 's' : ''}`
}
export class SleepClientTool extends BaseClientTool {
static readonly id = 'sleep'
constructor(toolCallId: string) {
super(toolCallId, SleepClientTool.id, SleepClientTool.metadata)
}
static readonly metadata: BaseClientToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Preparing to sleep', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Sleeping', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Sleeping', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Finished sleeping', icon: Moon },
[ClientToolCallState.error]: { text: 'Sleep interrupted', icon: XCircle },
[ClientToolCallState.rejected]: { text: 'Sleep skipped', icon: MinusCircle },
[ClientToolCallState.aborted]: { text: 'Sleep aborted', icon: MinusCircle },
[ClientToolCallState.background]: { text: 'Resumed', icon: Moon },
},
// No interrupt - auto-execute immediately
getDynamicText: (params, state) => {
const seconds = params?.seconds
if (typeof seconds === 'number' && seconds > 0) {
const displayTime = formatDuration(seconds)
switch (state) {
case ClientToolCallState.success:
return `Slept for ${displayTime}`
case ClientToolCallState.executing:
case ClientToolCallState.pending:
return `Sleeping for ${displayTime}`
case ClientToolCallState.generating:
return `Preparing to sleep for ${displayTime}`
case ClientToolCallState.error:
return `Failed to sleep for ${displayTime}`
case ClientToolCallState.rejected:
return `Skipped sleeping for ${displayTime}`
case ClientToolCallState.aborted:
return `Aborted sleeping for ${displayTime}`
case ClientToolCallState.background: {
// Calculate elapsed time from when sleep started
const elapsedSeconds = params?._elapsedSeconds
if (typeof elapsedSeconds === 'number' && elapsedSeconds > 0) {
return `Resumed after ${formatDuration(Math.round(elapsedSeconds))}`
}
return 'Resumed early'
}
}
}
return undefined
},
}
/**
* Get elapsed seconds since sleep started
*/
getElapsedSeconds(): number {
const startTime = sleepStartTimes[this.toolCallId]
if (!startTime) return 0
return (Date.now() - startTime) / 1000
}
async handleReject(): Promise<void> {
await super.handleReject()
this.setState(ClientToolCallState.rejected)
}
async handleAccept(args?: SleepArgs): Promise<void> {
const logger = createLogger('SleepClientTool')
// Use a timeout slightly longer than max sleep (3 minutes + buffer)
const timeoutMs = (MAX_SLEEP_SECONDS + 30) * 1000
await this.executeWithTimeout(async () => {
const params = args || {}
logger.debug('handleAccept() called', {
toolCallId: this.toolCallId,
state: this.getState(),
hasArgs: !!args,
seconds: params.seconds,
})
// Validate and clamp seconds
let seconds = typeof params.seconds === 'number' ? params.seconds : 0
if (seconds < 0) seconds = 0
if (seconds > MAX_SLEEP_SECONDS) seconds = MAX_SLEEP_SECONDS
logger.debug('Starting sleep', { seconds })
// Track start time for elapsed calculation
sleepStartTimes[this.toolCallId] = Date.now()
this.setState(ClientToolCallState.executing)
try {
// Sleep for the specified duration
await new Promise((resolve) => setTimeout(resolve, seconds * 1000))
logger.debug('Sleep completed successfully')
this.setState(ClientToolCallState.success)
await this.markToolComplete(200, `Slept for ${seconds} seconds`)
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
logger.error('Sleep failed', { error: message })
this.setState(ClientToolCallState.error)
await this.markToolComplete(500, message)
} finally {
// Clean up start time tracking
delete sleepStartTimes[this.toolCallId]
}
}, timeoutMs)
}
async execute(args?: SleepArgs): Promise<void> {
// Auto-execute without confirmation - go straight to executing
await this.handleAccept(args)
}
}

View File

@@ -8,7 +8,6 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
import { getAllBlocks, getBlock } from '@/blocks/registry'
import type { SubBlockConfig } from '@/blocks/types'
@@ -851,18 +850,13 @@ function applyOperationsToWorkflowState(
* Reorder operations to ensure correct execution sequence:
* 1. delete - Remove blocks first to free up IDs and clean state
* 2. extract_from_subflow - Extract blocks from subflows before modifications
* 3. add - Create new blocks (sorted by connection dependencies)
* 3. add - Create new blocks so they exist before being referenced
* 4. insert_into_subflow - Insert blocks into subflows (sorted by parent dependency)
* 5. edit - Edit existing blocks last, so connections to newly added blocks work
*
* This ordering is CRITICAL: operations may reference blocks being added/inserted
* in the same batch. Without proper ordering, target blocks wouldn't exist yet.
*
* For add operations, we use a two-pass approach:
* - Pass 1: Create all blocks (without connections)
* - Pass 2: Add all connections (now all blocks exist)
* This ensures that if block A connects to block B, and both are being added,
* B will exist when we try to create the edge from A to B.
* This ordering is CRITICAL: edit operations may reference blocks being added
* in the same batch (e.g., connecting block A to newly added block B).
* Without proper ordering, the target block wouldn't exist yet.
*/
const deletes = operations.filter((op) => op.operation_type === 'delete')
const extracts = operations.filter((op) => op.operation_type === 'extract_from_subflow')
@@ -874,8 +868,6 @@ function applyOperationsToWorkflowState(
// This handles cases where a loop/parallel is being added along with its children
const sortedInserts = topologicalSortInserts(inserts, adds)
// We'll process add operations in two passes (handled in the switch statement below)
// This is tracked via a separate flag to know which pass we're in
const orderedOperations: EditWorkflowOperation[] = [
...deletes,
...extracts,
@@ -885,46 +877,15 @@ function applyOperationsToWorkflowState(
]
logger.info('Operations after reordering:', {
totalOperations: orderedOperations.length,
deleteCount: deletes.length,
extractCount: extracts.length,
addCount: adds.length,
insertCount: sortedInserts.length,
editCount: edits.length,
operationOrder: orderedOperations.map(
order: orderedOperations.map(
(op) =>
`${op.operation_type}:${op.block_id}${op.params?.subflowId ? `(parent:${op.params.subflowId})` : ''}`
),
})
// Two-pass processing for add operations:
// Pass 1: Create all blocks (without connections)
// Pass 2: Add all connections (all blocks now exist)
const addOperationsWithConnections: Array<{
blockId: string
connections: Record<string, any>
}> = []
for (const operation of orderedOperations) {
const { operation_type, block_id, params } = operation
// CRITICAL: Validate block_id is a valid string and not "undefined"
// This prevents undefined keys from being set in the workflow state
if (!isValidKey(block_id)) {
logSkippedItem(skippedItems, {
type: 'missing_required_params',
operationType: operation_type,
blockId: String(block_id || 'invalid'),
reason: `Invalid block_id "${block_id}" (type: ${typeof block_id}) - operation skipped. Block IDs must be valid non-empty strings.`,
})
logger.error('Invalid block_id detected in operation', {
operation_type,
block_id,
block_id_type: typeof block_id,
})
continue
}
logger.debug(`Executing operation: ${operation_type} for block ${block_id}`, {
params: params ? Object.keys(params) : [],
currentBlockCount: Object.keys(modifiedState.blocks).length,
@@ -1167,22 +1128,6 @@ function applyOperationsToWorkflowState(
// Add new nested blocks
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
// Validate childId is a valid string
if (!isValidKey(childId)) {
logSkippedItem(skippedItems, {
type: 'missing_required_params',
operationType: 'add_nested_node',
blockId: String(childId || 'invalid'),
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
})
logger.error('Invalid childId detected in nestedNodes', {
parentBlockId: block_id,
childId,
childId_type: typeof childId,
})
return
}
const childBlockState = createBlockFromParams(
childId,
childBlock,
@@ -1415,22 +1360,6 @@ function applyOperationsToWorkflowState(
// Handle nested nodes (for loops/parallels created from scratch)
if (params.nestedNodes) {
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
// Validate childId is a valid string
if (!isValidKey(childId)) {
logSkippedItem(skippedItems, {
type: 'missing_required_params',
operationType: 'add_nested_node',
blockId: String(childId || 'invalid'),
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
})
logger.error('Invalid childId detected in nestedNodes', {
parentBlockId: block_id,
childId,
childId_type: typeof childId,
})
return
}
const childBlockState = createBlockFromParams(
childId,
childBlock,
@@ -1439,22 +1368,21 @@ function applyOperationsToWorkflowState(
)
modifiedState.blocks[childId] = childBlockState
// Defer connection processing to ensure all blocks exist first
if (childBlock.connections) {
addOperationsWithConnections.push({
blockId: childId,
connections: childBlock.connections,
})
addConnectionsAsEdges(
modifiedState,
childId,
childBlock.connections,
logger,
skippedItems
)
}
})
}
// Defer connection processing to ensure all blocks exist first (pass 2)
// Add connections as edges
if (params.connections) {
addOperationsWithConnections.push({
blockId: block_id,
connections: params.connections,
})
addConnectionsAsEdges(modifiedState, block_id, params.connections, logger, skippedItems)
}
break
}
@@ -1578,18 +1506,13 @@ function applyOperationsToWorkflowState(
modifiedState.blocks[block_id] = newBlock
}
// Defer connection processing to ensure all blocks exist first
// This is particularly important when multiple blocks are being inserted
// and they have connections to each other
// Add/update connections as edges
if (params.connections) {
// Remove existing edges from this block first
// Remove existing edges from this block
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id)
// Add to deferred connections list
addOperationsWithConnections.push({
blockId: block_id,
connections: params.connections,
})
// Add new connections
addConnectionsAsEdges(modifiedState, block_id, params.connections, logger, skippedItems)
}
break
}
@@ -1639,34 +1562,6 @@ function applyOperationsToWorkflowState(
}
}
// Pass 2: Add all deferred connections from add/insert operations
// Now all blocks exist (from add, insert, and edit operations), so connections can be safely created
// This ensures that if block A connects to block B, and both are being added/inserted,
// B will exist when we create the edge from A to B
if (addOperationsWithConnections.length > 0) {
logger.info('Processing deferred connections from add/insert operations', {
deferredConnectionCount: addOperationsWithConnections.length,
totalBlocks: Object.keys(modifiedState.blocks).length,
})
for (const { blockId, connections } of addOperationsWithConnections) {
// Verify the source block still exists (it might have been deleted by a later operation)
if (!modifiedState.blocks[blockId]) {
logger.warn('Source block no longer exists for deferred connection', {
blockId,
availableBlocks: Object.keys(modifiedState.blocks),
})
continue
}
addConnectionsAsEdges(modifiedState, blockId, connections, logger, skippedItems)
}
logger.info('Finished processing deferred connections', {
totalEdges: modifiedState.edges.length,
})
}
// Regenerate loops and parallels after modifications
modifiedState.loops = generateLoopBlocks(modifiedState.blocks)
modifiedState.parallels = generateParallelBlocks(modifiedState.blocks)

View File

@@ -2,7 +2,6 @@ import type { Edge } from 'reactflow'
import { v4 as uuidv4 } from 'uuid'
import { createLogger } from '@/lib/logs/console/logger'
import type { BlockWithDiff } from '@/lib/workflows/diff/types'
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
import { mergeSubblockState } from '@/stores/workflows/utils'
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
@@ -538,17 +537,6 @@ export class WorkflowDiffEngine {
// First pass: build ID mappings
for (const [proposedId, proposedBlock] of Object.entries(proposedState.blocks)) {
// CRITICAL: Skip invalid block IDs to prevent "undefined" keys in workflow state
if (!isValidKey(proposedId)) {
logger.error('Invalid proposedId detected in proposed state', {
proposedId,
proposedId_type: typeof proposedId,
blockType: proposedBlock?.type,
blockName: proposedBlock?.name,
})
continue
}
const key = `${proposedBlock.type}:${proposedBlock.name}`
// Check if this block exists in current state by type:name
@@ -564,31 +552,7 @@ export class WorkflowDiffEngine {
// Second pass: build final blocks with mapped IDs
for (const [proposedId, proposedBlock] of Object.entries(proposedState.blocks)) {
// CRITICAL: Skip invalid block IDs to prevent "undefined" keys in workflow state
if (!isValidKey(proposedId)) {
logger.error('Invalid proposedId detected in proposed state (second pass)', {
proposedId,
proposedId_type: typeof proposedId,
blockType: proposedBlock?.type,
blockName: proposedBlock?.name,
})
continue
}
const finalId = idMap[proposedId]
// CRITICAL: Validate finalId before using as key
if (!isValidKey(finalId)) {
logger.error('Invalid finalId generated from idMap', {
proposedId,
finalId,
finalId_type: typeof finalId,
blockType: proposedBlock?.type,
blockName: proposedBlock?.name,
})
continue
}
const key = `${proposedBlock.type}:${proposedBlock.name}`
const existingBlock = existingBlockMap[key]?.block
@@ -653,8 +617,6 @@ export class WorkflowDiffEngine {
const { generateLoopBlocks, generateParallelBlocks } = await import(
'@/stores/workflows/workflow/utils'
)
// Build the proposed state
const finalProposedState: WorkflowState = {
blocks: finalBlocks,
edges: finalEdges,
@@ -663,9 +625,6 @@ export class WorkflowDiffEngine {
lastSaved: Date.now(),
}
// Use the proposed state directly - validation happens at the source
const fullyCleanedState = finalProposedState
// Transfer block heights from baseline workflow for better measurements in diff view
// If editing on top of diff, this transfers from the diff (which already has good heights)
// Otherwise transfers from original workflow
@@ -735,7 +694,7 @@ export class WorkflowDiffEngine {
'@/lib/workflows/autolayout/constants'
)
const layoutedBlocks = applyTargetedLayout(finalBlocks, fullyCleanedState.edges, {
const layoutedBlocks = applyTargetedLayout(finalBlocks, finalProposedState.edges, {
changedBlockIds: impactedBlockArray,
horizontalSpacing: DEFAULT_HORIZONTAL_SPACING,
verticalSpacing: DEFAULT_VERTICAL_SPACING,
@@ -783,7 +742,7 @@ export class WorkflowDiffEngine {
const layoutResult = applyNativeAutoLayout(
finalBlocks,
fullyCleanedState.edges,
finalProposedState.edges,
DEFAULT_LAYOUT_OPTIONS
)
@@ -865,7 +824,7 @@ export class WorkflowDiffEngine {
})
// Create edge identifiers for proposed state
fullyCleanedState.edges.forEach((edge) => {
finalEdges.forEach((edge) => {
const edgeId = `${edge.source}-${edge.sourceHandle || 'source'}-${edge.target}-${edge.targetHandle || 'target'}`
proposedEdgeSet.add(edgeId)
})
@@ -904,21 +863,21 @@ export class WorkflowDiffEngine {
}
}
// Apply diff markers to blocks in the fully cleaned state
// Apply diff markers to blocks
if (computed) {
for (const id of computed.new_blocks || []) {
if (fullyCleanedState.blocks[id]) {
;(fullyCleanedState.blocks[id] as any).is_diff = 'new'
if (finalBlocks[id]) {
finalBlocks[id].is_diff = 'new'
}
}
for (const id of computed.edited_blocks || []) {
if (fullyCleanedState.blocks[id]) {
;(fullyCleanedState.blocks[id] as any).is_diff = 'edited'
if (finalBlocks[id]) {
finalBlocks[id].is_diff = 'edited'
// Also mark specific subblocks that changed
if (computed.field_diffs?.[id]) {
const fieldDiff = computed.field_diffs[id]
const block = fullyCleanedState.blocks[id]
const block = finalBlocks[id]
// Apply diff markers to changed subblocks
for (const changedField of fieldDiff.changed_fields) {
@@ -930,12 +889,12 @@ export class WorkflowDiffEngine {
}
}
}
// Note: We don't remove deleted blocks from fullyCleanedState, just mark them
// Note: We don't remove deleted blocks from finalBlocks, just mark them
}
// Store the diff with the fully sanitized state
// Store the diff
this.currentDiff = {
proposedState: fullyCleanedState,
proposedState: finalProposedState,
diffAnalysis: computed,
metadata: {
source: 'workflow_state',
@@ -944,10 +903,10 @@ export class WorkflowDiffEngine {
}
logger.info('Successfully created diff from workflow state', {
blockCount: Object.keys(fullyCleanedState.blocks).length,
edgeCount: fullyCleanedState.edges.length,
hasLoops: Object.keys(fullyCleanedState.loops || {}).length > 0,
hasParallels: Object.keys(fullyCleanedState.parallels || {}).length > 0,
blockCount: Object.keys(finalProposedState.blocks).length,
edgeCount: finalProposedState.edges.length,
hasLoops: Object.keys(finalProposedState.loops || {}).length > 0,
hasParallels: Object.keys(finalProposedState.parallels || {}).length > 0,
newBlocks: computed?.new_blocks?.length || 0,
editedBlocks: computed?.edited_blocks?.length || 0,
deletedBlocks: computed?.deleted_blocks?.length || 0,
@@ -1137,17 +1096,6 @@ export function stripWorkflowDiffMarkers(state: WorkflowState): WorkflowState {
const cleanBlocks: Record<string, BlockState> = {}
for (const [blockId, block] of Object.entries(state.blocks || {})) {
// Validate block ID at the source - skip invalid IDs
if (!isValidKey(blockId)) {
logger.error('Invalid blockId detected in stripWorkflowDiffMarkers', {
blockId,
blockId_type: typeof blockId,
blockType: block?.type,
blockName: block?.name,
})
continue
}
const cleanBlock: BlockState = structuredClone(block)
const blockWithDiff = cleanBlock as BlockState & BlockWithDiff
blockWithDiff.is_diff = undefined

View File

@@ -9,7 +9,7 @@ import {
workflowSubflows,
} from '@sim/db'
import type { InferSelectModel } from 'drizzle-orm'
import { and, desc, eq, inArray, sql } from 'drizzle-orm'
import { and, desc, eq, sql } from 'drizzle-orm'
import type { Edge } from 'reactflow'
import { v4 as uuidv4 } from 'uuid'
import { createLogger } from '@/lib/logs/console/logger'
@@ -602,178 +602,6 @@ export async function deployWorkflow(params: {
}
}
/**
* Bulk load workflow states for multiple workflows in a single set of queries.
* Much more efficient than calling loadWorkflowFromNormalizedTables for each workflow.
*/
export async function loadBulkWorkflowsFromNormalizedTables(
workflowIds: string[]
): Promise<Map<string, NormalizedWorkflowData>> {
const result = new Map<string, NormalizedWorkflowData>()
if (workflowIds.length === 0) {
return result
}
try {
// Load all components for all workflows in parallel (just 3 queries total)
const [allBlocks, allEdges, allSubflows] = await Promise.all([
db.select().from(workflowBlocks).where(inArray(workflowBlocks.workflowId, workflowIds)),
db.select().from(workflowEdges).where(inArray(workflowEdges.workflowId, workflowIds)),
db.select().from(workflowSubflows).where(inArray(workflowSubflows.workflowId, workflowIds)),
])
// Group blocks by workflow
const blocksByWorkflow = new Map<string, typeof allBlocks>()
for (const block of allBlocks) {
const existing = blocksByWorkflow.get(block.workflowId) || []
existing.push(block)
blocksByWorkflow.set(block.workflowId, existing)
}
// Group edges by workflow
const edgesByWorkflow = new Map<string, typeof allEdges>()
for (const edge of allEdges) {
const existing = edgesByWorkflow.get(edge.workflowId) || []
existing.push(edge)
edgesByWorkflow.set(edge.workflowId, existing)
}
// Group subflows by workflow
const subflowsByWorkflow = new Map<string, typeof allSubflows>()
for (const subflow of allSubflows) {
const existing = subflowsByWorkflow.get(subflow.workflowId) || []
existing.push(subflow)
subflowsByWorkflow.set(subflow.workflowId, existing)
}
// Process each workflow
for (const workflowId of workflowIds) {
const blocks = blocksByWorkflow.get(workflowId) || []
const edges = edgesByWorkflow.get(workflowId) || []
const subflows = subflowsByWorkflow.get(workflowId) || []
// Skip workflows with no blocks (not migrated yet)
if (blocks.length === 0) {
continue
}
// Convert blocks to the expected format
const blocksMap: Record<string, BlockState> = {}
blocks.forEach((block) => {
const blockData = block.data || {}
const assembled: BlockState = {
id: block.id,
type: block.type,
name: block.name,
position: {
x: Number(block.positionX),
y: Number(block.positionY),
},
enabled: block.enabled,
horizontalHandles: block.horizontalHandles,
advancedMode: block.advancedMode,
triggerMode: block.triggerMode,
height: Number(block.height),
subBlocks: (block.subBlocks as BlockState['subBlocks']) || {},
outputs: (block.outputs as BlockState['outputs']) || {},
data: blockData,
}
blocksMap[block.id] = assembled
})
// Sanitize any invalid custom tools in agent blocks
const { blocks: sanitizedBlocks } = sanitizeAgentToolsInBlocks(blocksMap)
// Migrate old agent block format to new messages array format
const migratedBlocks = migrateAgentBlocksToMessagesFormat(sanitizedBlocks)
// Convert edges to the expected format
const edgesArray: Edge[] = edges.map((edge) => ({
id: edge.id,
source: edge.sourceBlockId,
target: edge.targetBlockId,
sourceHandle: edge.sourceHandle ?? undefined,
targetHandle: edge.targetHandle ?? undefined,
type: 'default',
data: {},
}))
// Convert subflows to loops and parallels
const loops: Record<string, Loop> = {}
const parallels: Record<string, Parallel> = {}
subflows.forEach((subflow) => {
const config = (subflow.config ?? {}) as Partial<Loop & Parallel>
if (subflow.type === SUBFLOW_TYPES.LOOP) {
const loopType =
(config as Loop).loopType === 'for' ||
(config as Loop).loopType === 'forEach' ||
(config as Loop).loopType === 'while' ||
(config as Loop).loopType === 'doWhile'
? (config as Loop).loopType
: 'for'
const loop: Loop = {
id: subflow.id,
nodes: Array.isArray((config as Loop).nodes) ? (config as Loop).nodes : [],
iterations:
typeof (config as Loop).iterations === 'number' ? (config as Loop).iterations : 1,
loopType,
forEachItems: (config as Loop).forEachItems ?? '',
whileCondition: (config as Loop).whileCondition ?? '',
doWhileCondition: (config as Loop).doWhileCondition ?? '',
}
loops[subflow.id] = loop
// Sync block.data with loop config
if (migratedBlocks[subflow.id]) {
const block = migratedBlocks[subflow.id]
migratedBlocks[subflow.id] = {
...block,
data: {
...block.data,
collection: loop.forEachItems ?? block.data?.collection ?? '',
whileCondition: loop.whileCondition ?? block.data?.whileCondition ?? '',
doWhileCondition: loop.doWhileCondition ?? block.data?.doWhileCondition ?? '',
},
}
}
} else if (subflow.type === SUBFLOW_TYPES.PARALLEL) {
const parallel: Parallel = {
id: subflow.id,
nodes: Array.isArray((config as Parallel).nodes) ? (config as Parallel).nodes : [],
count: typeof (config as Parallel).count === 'number' ? (config as Parallel).count : 5,
distribution: (config as Parallel).distribution ?? '',
parallelType:
(config as Parallel).parallelType === 'count' ||
(config as Parallel).parallelType === 'collection'
? (config as Parallel).parallelType
: 'count',
}
parallels[subflow.id] = parallel
}
})
result.set(workflowId, {
blocks: migratedBlocks,
edges: edgesArray,
loops,
parallels,
isFromNormalizedTables: true,
})
}
return result
} catch (error) {
logger.error('Error bulk loading workflows from normalized tables:', error)
return result
}
}
/**
* Regenerates all IDs in a workflow state to avoid conflicts when duplicating or using templates
* Returns a new state with all IDs regenerated and references updated

View File

@@ -1,9 +0,0 @@
/**
* Checks if a key is valid (not undefined, null, empty, or literal "undefined"/"null")
* Use this to validate BEFORE setting a dynamic key on any object.
*/
export function isValidKey(key: unknown): key is string {
return (
!!key && typeof key === 'string' && key !== 'undefined' && key !== 'null' && key.trim() !== ''
)
}

View File

@@ -32,7 +32,6 @@ import { SearchDocumentationClientTool } from '@/lib/copilot/tools/client/other/
import { SearchErrorsClientTool } from '@/lib/copilot/tools/client/other/search-errors'
import { SearchOnlineClientTool } from '@/lib/copilot/tools/client/other/search-online'
import { SearchPatternsClientTool } from '@/lib/copilot/tools/client/other/search-patterns'
import { SleepClientTool } from '@/lib/copilot/tools/client/other/sleep'
import { createExecutionContext, getTool } from '@/lib/copilot/tools/client/registry'
import { GetCredentialsClientTool } from '@/lib/copilot/tools/client/user/get-credentials'
import { SetEnvironmentVariablesClientTool } from '@/lib/copilot/tools/client/user/set-environment-variables'
@@ -105,7 +104,6 @@ const CLIENT_TOOL_INSTANTIATORS: Record<string, (id: string) => any> = {
navigate_ui: (id) => new NavigateUIClientTool(id),
manage_custom_tool: (id) => new ManageCustomToolClientTool(id),
manage_mcp_tool: (id) => new ManageMcpToolClientTool(id),
sleep: (id) => new SleepClientTool(id),
}
// Read-only static metadata for class-based tools (no instances)
@@ -143,7 +141,6 @@ export const CLASS_TOOL_METADATA: Record<string, BaseClientToolMetadata | undefi
navigate_ui: (NavigateUIClientTool as any)?.metadata,
manage_custom_tool: (ManageCustomToolClientTool as any)?.metadata,
manage_mcp_tool: (ManageMcpToolClientTool as any)?.metadata,
sleep: (SleepClientTool as any)?.metadata,
}
function ensureClientToolInstance(toolName: string | undefined, toolCallId: string | undefined) {
@@ -2263,22 +2260,6 @@ export const useCopilotStore = create<CopilotStore>()(
set({ toolCallsById: map })
} catch {}
},
updateToolCallParams: (toolCallId: string, params: Record<string, any>) => {
try {
if (!toolCallId) return
const map = { ...get().toolCallsById }
const current = map[toolCallId]
if (!current) return
const updatedParams = { ...current.params, ...params }
map[toolCallId] = {
...current,
params: updatedParams,
display: resolveToolDisplay(current.name, current.state, toolCallId, updatedParams),
}
set({ toolCallsById: map })
} catch {}
},
updatePreviewToolCallState: (
toolCallState: 'accepted' | 'rejected' | 'error',
toolCallId?: string

View File

@@ -178,7 +178,6 @@ export interface CopilotActions {
toolCallId?: string
) => void
setToolCallState: (toolCall: any, newState: ClientToolCallState, options?: any) => void
updateToolCallParams: (toolCallId: string, params: Record<string, any>) => void
sendDocsMessage: (query: string, options?: { stream?: boolean; topK?: number }) => Promise<void>
saveChatMessages: (chatId: string) => Promise<void>