mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-18 18:25:14 -05:00
Checkpoint
This commit is contained in:
@@ -62,7 +62,7 @@ describe('sse-handlers tool lifecycle', () => {
|
||||
await sseHandlers.tool_call(
|
||||
{
|
||||
type: 'tool_call',
|
||||
data: { id: 'tool-1', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||
data: { id: 'tool-1', name: 'read', arguments: { workflowId: 'workflow-1' } },
|
||||
} as any,
|
||||
context,
|
||||
execContext,
|
||||
@@ -90,7 +90,7 @@ describe('sse-handlers tool lifecycle', () => {
|
||||
|
||||
const event = {
|
||||
type: 'tool_call',
|
||||
data: { id: 'tool-dup', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||
data: { id: 'tool-dup', name: 'read', arguments: { workflowId: 'workflow-1' } },
|
||||
}
|
||||
|
||||
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })
|
||||
|
||||
@@ -24,6 +24,12 @@ import {
|
||||
executeRedeploy,
|
||||
} from './deployment-tools'
|
||||
import { executeIntegrationToolDirect } from './integration-tools'
|
||||
import {
|
||||
executeVfsGlob,
|
||||
executeVfsGrep,
|
||||
executeVfsList,
|
||||
executeVfsRead,
|
||||
} from './vfs-tools'
|
||||
import type {
|
||||
CheckDeploymentStatusParams,
|
||||
CreateFolderParams,
|
||||
@@ -36,11 +42,8 @@ import type {
|
||||
GetBlockOutputsParams,
|
||||
GetBlockUpstreamReferencesParams,
|
||||
GetDeployedWorkflowStateParams,
|
||||
GetUserWorkflowParams,
|
||||
GetWorkflowDataParams,
|
||||
GetWorkflowFromNameParams,
|
||||
ListFoldersParams,
|
||||
ListUserWorkflowsParams,
|
||||
ListWorkspaceMcpServersParams,
|
||||
MoveFolderParams,
|
||||
MoveWorkflowParams,
|
||||
@@ -59,11 +62,8 @@ import {
|
||||
executeGetBlockOutputs,
|
||||
executeGetBlockUpstreamReferences,
|
||||
executeGetDeployedWorkflowState,
|
||||
executeGetUserWorkflow,
|
||||
executeGetWorkflowData,
|
||||
executeGetWorkflowFromName,
|
||||
executeListFolders,
|
||||
executeListUserWorkflows,
|
||||
executeListUserWorkspaces,
|
||||
executeMoveFolder,
|
||||
executeMoveWorkflow,
|
||||
@@ -319,17 +319,13 @@ async function executeManageCustomTool(
|
||||
}
|
||||
|
||||
const SERVER_TOOLS = new Set<string>([
|
||||
'get_blocks_and_tools',
|
||||
'get_blocks_metadata',
|
||||
'get_block_options',
|
||||
'get_block_config',
|
||||
'get_trigger_blocks',
|
||||
'edit_workflow',
|
||||
'get_workflow_console',
|
||||
'search_documentation',
|
||||
'search_online',
|
||||
'set_environment_variables',
|
||||
'get_credentials',
|
||||
'make_api_request',
|
||||
'knowledge_base',
|
||||
])
|
||||
@@ -338,9 +334,6 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||
string,
|
||||
(params: Record<string, unknown>, context: ExecutionContext) => Promise<ToolCallResult>
|
||||
> = {
|
||||
get_user_workflow: (p, c) => executeGetUserWorkflow(p as GetUserWorkflowParams, c),
|
||||
get_workflow_from_name: (p, c) => executeGetWorkflowFromName(p as GetWorkflowFromNameParams, c),
|
||||
list_user_workflows: (p, c) => executeListUserWorkflows(p as ListUserWorkflowsParams, c),
|
||||
list_user_workspaces: (_p, c) => executeListUserWorkspaces(c),
|
||||
list_folders: (p, c) => executeListFolders(p as ListFoldersParams, c),
|
||||
create_workflow: (p, c) => executeCreateWorkflow(p as CreateWorkflowParams, c),
|
||||
@@ -416,6 +409,11 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||
}
|
||||
},
|
||||
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
|
||||
// VFS tools
|
||||
grep: (p, c) => executeVfsGrep(p, c),
|
||||
glob: (p, c) => executeVfsGlob(p, c),
|
||||
read: (p, c) => executeVfsRead(p, c),
|
||||
list: (p, c) => executeVfsList(p, c),
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -5,19 +5,6 @@
|
||||
|
||||
// === Workflow Query Params ===
|
||||
|
||||
export interface GetUserWorkflowParams {
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export interface GetWorkflowFromNameParams {
|
||||
workflow_name?: string
|
||||
}
|
||||
|
||||
export interface ListUserWorkflowsParams {
|
||||
workspaceId?: string
|
||||
folderId?: string
|
||||
}
|
||||
|
||||
export interface GetWorkflowDataParams {
|
||||
workflowId?: string
|
||||
data_type?: string
|
||||
|
||||
128
apps/sim/lib/copilot/orchestrator/tool-executor/vfs-tools.ts
Normal file
128
apps/sim/lib/copilot/orchestrator/tool-executor/vfs-tools.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||
import { getOrMaterializeVFS } from '@/lib/copilot/vfs'
|
||||
|
||||
const logger = createLogger('VfsTools')
|
||||
|
||||
export async function executeVfsGrep(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const pattern = params.pattern as string | undefined
|
||||
if (!pattern) {
|
||||
return { success: false, error: "Missing required parameter 'pattern'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const result = vfs.grep(
|
||||
pattern,
|
||||
params.path as string | undefined,
|
||||
{
|
||||
maxResults: (params.maxResults as number) ?? 50,
|
||||
outputMode: (params.output_mode as 'content' | 'files_with_matches' | 'count') ?? 'content',
|
||||
ignoreCase: (params.ignoreCase as boolean) ?? false,
|
||||
lineNumbers: (params.lineNumbers as boolean) ?? true,
|
||||
context: (params.context as number) ?? 0,
|
||||
}
|
||||
)
|
||||
const outputMode = (params.output_mode as string) ?? 'content'
|
||||
const key = outputMode === 'files_with_matches' ? 'files' : outputMode === 'count' ? 'counts' : 'matches'
|
||||
return { success: true, output: { [key]: result } }
|
||||
} catch (err) {
|
||||
logger.error('vfs_grep failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_grep failed' }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeVfsGlob(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const pattern = params.pattern as string | undefined
|
||||
if (!pattern) {
|
||||
return { success: false, error: "Missing required parameter 'pattern'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const files = vfs.glob(pattern)
|
||||
return { success: true, output: { files } }
|
||||
} catch (err) {
|
||||
logger.error('vfs_glob failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_glob failed' }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeVfsRead(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const path = params.path as string | undefined
|
||||
if (!path) {
|
||||
return { success: false, error: "Missing required parameter 'path'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const result = vfs.read(
|
||||
path,
|
||||
params.offset as number | undefined,
|
||||
params.limit as number | undefined
|
||||
)
|
||||
if (!result) {
|
||||
return { success: false, error: `File not found: ${path}` }
|
||||
}
|
||||
return { success: true, output: result }
|
||||
} catch (err) {
|
||||
logger.error('vfs_read failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_read failed' }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeVfsList(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const path = params.path as string | undefined
|
||||
if (!path) {
|
||||
return { success: false, error: "Missing required parameter 'path'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const entries = vfs.list(path)
|
||||
return { success: true, output: { entries } }
|
||||
} catch (err) {
|
||||
logger.error('vfs_list failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_list failed' }
|
||||
}
|
||||
}
|
||||
@@ -2,10 +2,7 @@ import { db } from '@sim/db'
|
||||
import { customTools, permissions, workflow, workflowFolder, workspace } from '@sim/db/schema'
|
||||
import { and, asc, desc, eq, isNull, or } from 'drizzle-orm'
|
||||
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||
import {
|
||||
formatNormalizedWorkflowForCopilot,
|
||||
normalizeWorkflowName,
|
||||
} from '@/lib/copilot/tools/shared/workflow-utils'
|
||||
import { formatNormalizedWorkflowForCopilot } from '@/lib/copilot/tools/shared/workflow-utils'
|
||||
import { mcpService } from '@/lib/mcp/service'
|
||||
import { listWorkspaceFiles } from '@/lib/uploads/contexts/workspace'
|
||||
import { getEffectiveBlockOutputPaths } from '@/lib/workflows/blocks/block-outputs'
|
||||
@@ -22,116 +19,16 @@ import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
import {
|
||||
ensureWorkflowAccess,
|
||||
ensureWorkspaceAccess,
|
||||
getAccessibleWorkflowsForUser,
|
||||
getDefaultWorkspaceId,
|
||||
} from '../access'
|
||||
import type {
|
||||
GetBlockOutputsParams,
|
||||
GetBlockUpstreamReferencesParams,
|
||||
GetDeployedWorkflowStateParams,
|
||||
GetUserWorkflowParams,
|
||||
GetWorkflowDataParams,
|
||||
GetWorkflowFromNameParams,
|
||||
ListFoldersParams,
|
||||
ListUserWorkflowsParams,
|
||||
} from '../param-types'
|
||||
|
||||
export async function executeGetUserWorkflow(
|
||||
params: GetUserWorkflowParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const workflowId = params.workflowId || context.workflowId
|
||||
if (!workflowId) {
|
||||
return { success: false, error: 'workflowId is required' }
|
||||
}
|
||||
|
||||
const { workflow: workflowRecord, workspaceId } = await ensureWorkflowAccess(
|
||||
workflowId,
|
||||
context.userId
|
||||
)
|
||||
|
||||
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
|
||||
if (!userWorkflow) {
|
||||
return { success: false, error: 'Workflow has no normalized data' }
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
workflowId,
|
||||
workflowName: workflowRecord.name || '',
|
||||
workspaceId,
|
||||
userWorkflow,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeGetWorkflowFromName(
|
||||
params: GetWorkflowFromNameParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const workflowName = typeof params.workflow_name === 'string' ? params.workflow_name.trim() : ''
|
||||
if (!workflowName) {
|
||||
return { success: false, error: 'workflow_name is required' }
|
||||
}
|
||||
|
||||
const workflows = await getAccessibleWorkflowsForUser(context.userId)
|
||||
|
||||
const targetName = normalizeWorkflowName(workflowName)
|
||||
const match = workflows.find((w) => normalizeWorkflowName(w.name) === targetName)
|
||||
if (!match) {
|
||||
return { success: false, error: `Workflow not found: ${workflowName}` }
|
||||
}
|
||||
|
||||
const normalized = await loadWorkflowFromNormalizedTables(match.id)
|
||||
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
|
||||
if (!userWorkflow) {
|
||||
return { success: false, error: 'Workflow has no normalized data' }
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
workflowId: match.id,
|
||||
workflowName: match.name || '',
|
||||
workspaceId: match.workspaceId,
|
||||
userWorkflow,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeListUserWorkflows(
|
||||
params: ListUserWorkflowsParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const workspaceId = params?.workspaceId as string | undefined
|
||||
const folderId = params?.folderId as string | undefined
|
||||
|
||||
const workflows = await getAccessibleWorkflowsForUser(context.userId, { workspaceId, folderId })
|
||||
|
||||
const workflowList = workflows.map((w) => ({
|
||||
workflowId: w.id,
|
||||
workflowName: w.name || '',
|
||||
workspaceId: w.workspaceId,
|
||||
folderId: w.folderId,
|
||||
}))
|
||||
|
||||
return { success: true, output: { workflows: workflowList } }
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeListUserWorkspaces(
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { LucideIcon } from 'lucide-react'
|
||||
import {
|
||||
Blocks,
|
||||
BookOpen,
|
||||
Bug,
|
||||
Check,
|
||||
@@ -9,7 +8,6 @@ import {
|
||||
ClipboardCheck,
|
||||
Compass,
|
||||
Database,
|
||||
FileCode,
|
||||
FileText,
|
||||
FlaskConical,
|
||||
GitBranch,
|
||||
@@ -19,9 +17,7 @@ import {
|
||||
Grid2x2Check,
|
||||
Grid2x2X,
|
||||
Info,
|
||||
Key,
|
||||
KeyRound,
|
||||
ListChecks,
|
||||
ListFilter,
|
||||
ListTodo,
|
||||
Loader2,
|
||||
@@ -41,13 +37,11 @@ import {
|
||||
Sparkles,
|
||||
Tag,
|
||||
TerminalSquare,
|
||||
WorkflowIcon,
|
||||
Wrench,
|
||||
X,
|
||||
XCircle,
|
||||
Zap,
|
||||
} from 'lucide-react'
|
||||
import { getLatestBlock } from '@/blocks/registry'
|
||||
import { getCustomTool } from '@/hooks/queries/custom-tools'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -629,86 +623,6 @@ const META_evaluate: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_block_config: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting block config', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Getting block config', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Getting block config', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved block config', icon: FileCode },
|
||||
[ClientToolCallState.error]: { text: 'Failed to get block config', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block config', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped getting block config',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.blockType && typeof params.blockType === 'string') {
|
||||
const blockConfig = getLatestBlock(params.blockType)
|
||||
const blockName = (blockConfig?.name ?? params.blockType.replace(/_/g, ' ')).toLowerCase()
|
||||
const opSuffix = params.operation ? ` (${params.operation})` : ''
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Retrieved ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Retrieving ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to retrieve ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted retrieving ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped retrieving ${blockName}${opSuffix} config`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_block_options: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved block operations', icon: ListFilter },
|
||||
[ClientToolCallState.error]: { text: 'Failed to get block operations', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block operations', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped getting block operations',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const blockId =
|
||||
(params as any)?.blockId ||
|
||||
(params as any)?.blockType ||
|
||||
(params as any)?.block_id ||
|
||||
(params as any)?.block_type
|
||||
if (typeof blockId === 'string') {
|
||||
const blockConfig = getLatestBlock(blockId)
|
||||
const blockName = (blockConfig?.name ?? blockId.replace(/_/g, ' ')).toLowerCase()
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Retrieved ${blockName} operations`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Retrieving ${blockName} operations`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to retrieve ${blockName} operations`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted retrieving ${blockName} operations`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped retrieving ${blockName} operations`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_block_outputs: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting block outputs', icon: Loader2 },
|
||||
@@ -767,19 +681,6 @@ const META_get_block_upstream_references: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_blocks_and_tools: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Explored available options', icon: Blocks },
|
||||
[ClientToolCallState.error]: { text: 'Failed to explore options', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted exploring options', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped exploring options', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
}
|
||||
|
||||
const META_get_blocks_metadata: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Searching block choices', icon: Loader2 },
|
||||
@@ -821,27 +722,6 @@ const META_get_blocks_metadata: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_credentials: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Fetched connected integrations', icon: Key },
|
||||
[ClientToolCallState.error]: {
|
||||
text: 'Failed to fetch connected integrations',
|
||||
icon: XCircle,
|
||||
},
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted fetching connected integrations',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped fetching connected integrations',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_examples_rag: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching examples', icon: Loader2 },
|
||||
@@ -989,41 +869,6 @@ const META_get_trigger_examples: ToolMetadata = {
|
||||
interrupt: undefined,
|
||||
}
|
||||
|
||||
const META_get_user_workflow: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Reading your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Reading your workflow', icon: WorkflowIcon },
|
||||
[ClientToolCallState.executing]: { text: 'Reading your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted reading your workflow', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Read your workflow', icon: WorkflowIcon },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read your workflow', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped reading your workflow', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const workflowId = params?.workflowId || useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (workflowId) {
|
||||
const workflowName = useWorkflowRegistry.getState().workflows[workflowId]?.name
|
||||
if (workflowName) {
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Read ${workflowName}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Reading ${workflowName}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to read ${workflowName}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted reading ${workflowName}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped reading ${workflowName}`
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_workflow_console: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching execution logs', icon: Loader2 },
|
||||
@@ -1106,39 +951,6 @@ const META_get_workflow_data: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_workflow_from_name: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Reading workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Reading workflow', icon: FileText },
|
||||
[ClientToolCallState.executing]: { text: 'Reading workflow', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted reading workflow', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Read workflow', icon: FileText },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read workflow', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped reading workflow', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.workflow_name && typeof params.workflow_name === 'string') {
|
||||
const workflowName = params.workflow_name
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Read ${workflowName}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Reading ${workflowName}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to read ${workflowName}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted reading ${workflowName}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped reading ${workflowName}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_info: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting info', icon: Loader2 },
|
||||
@@ -1230,18 +1042,6 @@ const META_knowledge_base: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_list_user_workflows: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Listing your workflows', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Listing your workflows', icon: ListChecks },
|
||||
[ClientToolCallState.executing]: { text: 'Listing your workflows', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted listing workflows', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Listed your workflows', icon: ListChecks },
|
||||
[ClientToolCallState.error]: { text: 'Failed to list workflows', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped listing workflows', icon: XCircle },
|
||||
},
|
||||
}
|
||||
|
||||
const META_list_workspace_mcp_servers: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: {
|
||||
@@ -2543,13 +2343,9 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
||||
edit: META_edit,
|
||||
edit_workflow: META_edit_workflow,
|
||||
evaluate: META_evaluate,
|
||||
get_block_config: META_get_block_config,
|
||||
get_block_options: META_get_block_options,
|
||||
get_block_outputs: META_get_block_outputs,
|
||||
get_block_upstream_references: META_get_block_upstream_references,
|
||||
get_blocks_and_tools: META_get_blocks_and_tools,
|
||||
get_blocks_metadata: META_get_blocks_metadata,
|
||||
get_credentials: META_get_credentials,
|
||||
generate_api_key: META_generate_api_key,
|
||||
get_examples_rag: META_get_examples_rag,
|
||||
get_operations_examples: META_get_operations_examples,
|
||||
@@ -2557,14 +2353,11 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
||||
get_platform_actions: META_get_platform_actions,
|
||||
get_trigger_blocks: META_get_trigger_blocks,
|
||||
get_trigger_examples: META_get_trigger_examples,
|
||||
get_user_workflow: META_get_user_workflow,
|
||||
get_workflow_console: META_get_workflow_console,
|
||||
get_workflow_data: META_get_workflow_data,
|
||||
get_workflow_from_name: META_get_workflow_from_name,
|
||||
info: META_info,
|
||||
knowledge: META_knowledge,
|
||||
knowledge_base: META_knowledge_base,
|
||||
list_user_workflows: META_list_user_workflows,
|
||||
list_workspace_mcp_servers: META_list_workspace_mcp_servers,
|
||||
make_api_request: META_make_api_request,
|
||||
manage_custom_tool: META_manage_custom_tool,
|
||||
|
||||
@@ -27,25 +27,6 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'list_workflows',
|
||||
toolId: 'list_user_workflows',
|
||||
description:
|
||||
'List all workflows the user has access to. Returns workflow IDs, names, workspace, and folder info. Use workspaceId/folderId to scope results.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
workspaceId: {
|
||||
type: 'string',
|
||||
description: 'Optional workspace ID to filter workflows.',
|
||||
},
|
||||
folderId: {
|
||||
type: 'string',
|
||||
description: 'Optional folder ID to filter workflows.',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'list_folders',
|
||||
toolId: 'list_folders',
|
||||
@@ -62,22 +43,6 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
required: ['workspaceId'],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'get_workflow',
|
||||
toolId: 'get_user_workflow',
|
||||
description:
|
||||
'Get a workflow by ID. Returns the full workflow definition including all blocks, connections, and configuration.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
workflowId: {
|
||||
type: 'string',
|
||||
description: 'Workflow ID to retrieve.',
|
||||
},
|
||||
},
|
||||
required: ['workflowId'],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'create_workflow',
|
||||
toolId: 'create_workflow',
|
||||
|
||||
@@ -1,493 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import {
|
||||
GetBlockConfigInput,
|
||||
type GetBlockConfigInputType,
|
||||
GetBlockConfigResult,
|
||||
type GetBlockConfigResultType,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { registry as blockRegistry, getLatestBlock } from '@/blocks/registry'
|
||||
import { isHiddenFromDisplay, type SubBlockConfig } from '@/blocks/types'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
import { PROVIDER_DEFINITIONS } from '@/providers/models'
|
||||
import { tools as toolsRegistry } from '@/tools/registry'
|
||||
import { getTrigger, isTriggerValid } from '@/triggers'
|
||||
|
||||
interface InputFieldSchema {
|
||||
type: string
|
||||
description?: string
|
||||
placeholder?: string
|
||||
required?: boolean
|
||||
options?: string[]
|
||||
default?: any
|
||||
min?: number
|
||||
max?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all available models from PROVIDER_DEFINITIONS as static options.
|
||||
* This provides fallback data when store state is not available server-side.
|
||||
*/
|
||||
function getStaticModelOptions(): string[] {
|
||||
const models: string[] = []
|
||||
|
||||
for (const provider of Object.values(PROVIDER_DEFINITIONS)) {
|
||||
// Skip providers with dynamic/fetched models
|
||||
if (provider.id === 'ollama' || provider.id === 'vllm' || provider.id === 'openrouter') {
|
||||
continue
|
||||
}
|
||||
if (provider?.models) {
|
||||
for (const model of provider.models) {
|
||||
models.push(model.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return models
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to call a dynamic options function with fallback data injected.
|
||||
*/
|
||||
function callOptionsWithFallback(optionsFn: () => any[]): any[] | undefined {
|
||||
const staticModels = getStaticModelOptions()
|
||||
|
||||
const mockProvidersState = {
|
||||
providers: {
|
||||
base: { models: staticModels },
|
||||
ollama: { models: [] },
|
||||
vllm: { models: [] },
|
||||
openrouter: { models: [] },
|
||||
},
|
||||
}
|
||||
|
||||
let originalGetState: (() => any) | undefined
|
||||
let store: any
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
store = require('@/stores/providers')
|
||||
if (store?.useProvidersStore?.getState) {
|
||||
originalGetState = store.useProvidersStore.getState
|
||||
store.useProvidersStore.getState = () => mockProvidersState
|
||||
}
|
||||
} catch {
|
||||
// Store module not available
|
||||
}
|
||||
|
||||
try {
|
||||
return optionsFn()
|
||||
} finally {
|
||||
if (store?.useProvidersStore && originalGetState) {
|
||||
store.useProvidersStore.getState = originalGetState
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves options from a subBlock, handling both static arrays and dynamic functions
|
||||
*/
|
||||
function resolveSubBlockOptions(sb: SubBlockConfig): string[] | undefined {
|
||||
// Skip if subblock uses fetchOptions (async network calls)
|
||||
if (sb.fetchOptions) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let rawOptions: any[] | undefined
|
||||
|
||||
try {
|
||||
if (typeof sb.options === 'function') {
|
||||
rawOptions = callOptionsWithFallback(sb.options)
|
||||
} else {
|
||||
rawOptions = sb.options
|
||||
}
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!Array.isArray(rawOptions) || rawOptions.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// Return the actual option ID/value that edit_workflow expects, not the display label
|
||||
return rawOptions
|
||||
.map((opt: any) => {
|
||||
if (!opt) return undefined
|
||||
if (typeof opt === 'object') {
|
||||
return opt.id || opt.label // Prefer id (actual value) over label (display name)
|
||||
}
|
||||
return String(opt)
|
||||
})
|
||||
.filter((o): o is string => o !== undefined)
|
||||
}
|
||||
|
||||
interface OutputFieldSchema {
|
||||
type: string
|
||||
description?: string
|
||||
properties?: Record<string, OutputFieldSchema>
|
||||
items?: { type: string }
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the condition to check if it matches the given operation
|
||||
*/
|
||||
function matchesOperation(condition: any, operation: string): boolean {
|
||||
if (!condition) return false
|
||||
|
||||
const cond = typeof condition === 'function' ? condition() : condition
|
||||
if (!cond) return false
|
||||
|
||||
if (cond.field === 'operation' && !cond.not) {
|
||||
const values = Array.isArray(cond.value) ? cond.value : [cond.value]
|
||||
return values.includes(operation)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts input schema from subBlocks
|
||||
*/
|
||||
function extractInputsFromSubBlocks(
|
||||
subBlocks: SubBlockConfig[],
|
||||
operation?: string,
|
||||
triggerMode?: boolean
|
||||
): Record<string, InputFieldSchema> {
|
||||
const inputs: Record<string, InputFieldSchema> = {}
|
||||
|
||||
for (const sb of subBlocks) {
|
||||
// Handle trigger vs non-trigger mode filtering
|
||||
if (triggerMode) {
|
||||
// In trigger mode, only include subBlocks with mode: 'trigger'
|
||||
if (sb.mode !== 'trigger') continue
|
||||
} else {
|
||||
// In non-trigger mode, skip trigger-mode subBlocks
|
||||
if (sb.mode === 'trigger') continue
|
||||
}
|
||||
|
||||
// Skip hidden subBlocks
|
||||
if (sb.hidden) continue
|
||||
|
||||
// If operation is specified, only include subBlocks that:
|
||||
// 1. Have no condition (common parameters)
|
||||
// 2. Have a condition matching the operation
|
||||
if (operation) {
|
||||
const condition = typeof sb.condition === 'function' ? sb.condition() : sb.condition
|
||||
if (condition) {
|
||||
if (condition.field === 'operation' && !condition.not) {
|
||||
// This is an operation-specific field
|
||||
const values = Array.isArray(condition.value) ? condition.value : [condition.value]
|
||||
if (!values.includes(operation)) {
|
||||
continue // Skip if doesn't match our operation
|
||||
}
|
||||
} else if (!matchesOperation(condition, operation)) {
|
||||
// Other condition that doesn't match
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const field: InputFieldSchema = {
|
||||
type: mapSubBlockTypeToSchemaType(sb.type),
|
||||
}
|
||||
|
||||
if (sb.description) field.description = sb.description
|
||||
if (sb.title && !sb.description) field.description = sb.title
|
||||
if (sb.placeholder) field.placeholder = sb.placeholder
|
||||
|
||||
// Handle required
|
||||
if (typeof sb.required === 'boolean') {
|
||||
field.required = sb.required
|
||||
} else if (typeof sb.required === 'object') {
|
||||
field.required = true // Has conditional requirement
|
||||
}
|
||||
|
||||
// Handle options using the resolver that handles dynamic model lists
|
||||
const resolvedOptions = resolveSubBlockOptions(sb)
|
||||
if (resolvedOptions && resolvedOptions.length > 0) {
|
||||
field.options = resolvedOptions
|
||||
}
|
||||
|
||||
// Handle default value
|
||||
if (sb.defaultValue !== undefined) {
|
||||
field.default = sb.defaultValue
|
||||
}
|
||||
|
||||
// Handle numeric constraints
|
||||
if (sb.min !== undefined) field.min = sb.min
|
||||
if (sb.max !== undefined) field.max = sb.max
|
||||
|
||||
inputs[sb.id] = field
|
||||
}
|
||||
|
||||
return inputs
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps subBlock type to a simplified schema type
|
||||
*/
|
||||
function mapSubBlockTypeToSchemaType(type: string): string {
|
||||
const typeMap: Record<string, string> = {
|
||||
'short-input': 'string',
|
||||
'long-input': 'string',
|
||||
code: 'string',
|
||||
dropdown: 'string',
|
||||
combobox: 'string',
|
||||
slider: 'number',
|
||||
switch: 'boolean',
|
||||
'tool-input': 'json',
|
||||
'checkbox-list': 'array',
|
||||
'grouped-checkbox-list': 'array',
|
||||
'condition-input': 'json',
|
||||
'eval-input': 'json',
|
||||
'time-input': 'string',
|
||||
'oauth-input': 'credential',
|
||||
'file-selector': 'string',
|
||||
'project-selector': 'string',
|
||||
'channel-selector': 'string',
|
||||
'user-selector': 'string',
|
||||
'folder-selector': 'string',
|
||||
'knowledge-base-selector': 'string',
|
||||
'document-selector': 'string',
|
||||
'mcp-server-selector': 'string',
|
||||
'mcp-tool-selector': 'string',
|
||||
table: 'json',
|
||||
'file-upload': 'file',
|
||||
'messages-input': 'array',
|
||||
}
|
||||
|
||||
return typeMap[type] || 'string'
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts a single output field schema, including nested properties
|
||||
*/
|
||||
function extractOutputField(def: any): OutputFieldSchema {
|
||||
if (typeof def === 'string') {
|
||||
return { type: def }
|
||||
}
|
||||
|
||||
if (typeof def !== 'object' || def === null) {
|
||||
return { type: 'any' }
|
||||
}
|
||||
|
||||
const field: OutputFieldSchema = {
|
||||
type: def.type || 'any',
|
||||
}
|
||||
|
||||
if (def.description) {
|
||||
field.description = def.description
|
||||
}
|
||||
|
||||
// Include nested properties if present
|
||||
if (def.properties && typeof def.properties === 'object') {
|
||||
field.properties = {}
|
||||
for (const [propKey, propDef] of Object.entries(def.properties)) {
|
||||
field.properties[propKey] = extractOutputField(propDef)
|
||||
}
|
||||
}
|
||||
|
||||
// Include items schema for arrays
|
||||
if (def.items && typeof def.items === 'object') {
|
||||
field.items = { type: def.items.type || 'any' }
|
||||
}
|
||||
|
||||
return field
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts trigger outputs from the first available trigger
|
||||
*/
|
||||
function extractTriggerOutputs(blockConfig: any): Record<string, OutputFieldSchema> {
|
||||
const outputs: Record<string, OutputFieldSchema> = {}
|
||||
|
||||
if (!blockConfig.triggers?.enabled || !blockConfig.triggers?.available?.length) {
|
||||
return outputs
|
||||
}
|
||||
|
||||
// Get the first available trigger's outputs as a baseline
|
||||
const triggerId = blockConfig.triggers.available[0]
|
||||
if (triggerId && isTriggerValid(triggerId)) {
|
||||
const trigger = getTrigger(triggerId)
|
||||
if (trigger.outputs) {
|
||||
for (const [key, def] of Object.entries(trigger.outputs)) {
|
||||
if (isHiddenFromDisplay(def)) continue
|
||||
outputs[key] = extractOutputField(def)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return outputs
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts output schema from block config or tool
|
||||
*/
|
||||
function extractOutputs(
|
||||
blockConfig: any,
|
||||
operation?: string,
|
||||
triggerMode?: boolean
|
||||
): Record<string, OutputFieldSchema> {
|
||||
const outputs: Record<string, OutputFieldSchema> = {}
|
||||
|
||||
// In trigger mode, return trigger outputs
|
||||
if (triggerMode && blockConfig.triggers?.enabled) {
|
||||
return extractTriggerOutputs(blockConfig)
|
||||
}
|
||||
|
||||
// If operation is specified, try to get outputs from the specific tool
|
||||
if (operation) {
|
||||
try {
|
||||
const toolSelector = blockConfig.tools?.config?.tool
|
||||
if (typeof toolSelector === 'function') {
|
||||
const toolId = toolSelector({ operation })
|
||||
const tool = toolsRegistry[toolId]
|
||||
if (tool?.outputs) {
|
||||
for (const [key, def] of Object.entries(tool.outputs)) {
|
||||
if (isHiddenFromDisplay(def)) continue
|
||||
outputs[key] = extractOutputField(def)
|
||||
}
|
||||
return outputs
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Fall through to block-level outputs
|
||||
}
|
||||
}
|
||||
|
||||
// Use block-level outputs
|
||||
if (blockConfig.outputs) {
|
||||
for (const [key, def] of Object.entries(blockConfig.outputs)) {
|
||||
if (isHiddenFromDisplay(def)) continue
|
||||
outputs[key] = extractOutputField(def)
|
||||
}
|
||||
}
|
||||
|
||||
return outputs
|
||||
}
|
||||
|
||||
export const getBlockConfigServerTool: BaseServerTool<
|
||||
GetBlockConfigInputType,
|
||||
GetBlockConfigResultType
|
||||
> = {
|
||||
name: 'get_block_config',
|
||||
inputSchema: GetBlockConfigInput,
|
||||
outputSchema: GetBlockConfigResult,
|
||||
async execute(
|
||||
{ blockType, operation, trigger }: GetBlockConfigInputType,
|
||||
context?: { userId: string }
|
||||
): Promise<GetBlockConfigResultType> {
|
||||
const logger = createLogger('GetBlockConfigServerTool')
|
||||
logger.debug('Executing get_block_config', { blockType, operation, trigger })
|
||||
|
||||
if (blockType === 'loop') {
|
||||
const result = {
|
||||
blockType,
|
||||
blockName: 'Loop',
|
||||
operation,
|
||||
trigger,
|
||||
inputs: {
|
||||
loopType: {
|
||||
type: 'string',
|
||||
description: 'Loop type',
|
||||
options: ['for', 'forEach', 'while', 'doWhile'],
|
||||
default: 'for',
|
||||
},
|
||||
iterations: {
|
||||
type: 'number',
|
||||
description: 'Number of iterations (for loop type "for")',
|
||||
},
|
||||
collection: {
|
||||
type: 'string',
|
||||
description: 'Collection to iterate (for loop type "forEach")',
|
||||
},
|
||||
condition: {
|
||||
type: 'string',
|
||||
description: 'Loop condition (for loop types "while" and "doWhile")',
|
||||
},
|
||||
},
|
||||
outputs: {},
|
||||
}
|
||||
return GetBlockConfigResult.parse(result)
|
||||
}
|
||||
|
||||
if (blockType === 'parallel') {
|
||||
const result = {
|
||||
blockType,
|
||||
blockName: 'Parallel',
|
||||
operation,
|
||||
trigger,
|
||||
inputs: {
|
||||
parallelType: {
|
||||
type: 'string',
|
||||
description: 'Parallel type',
|
||||
options: ['count', 'collection'],
|
||||
default: 'count',
|
||||
},
|
||||
count: {
|
||||
type: 'number',
|
||||
description: 'Number of parallel branches (for parallel type "count")',
|
||||
},
|
||||
collection: {
|
||||
type: 'string',
|
||||
description: 'Collection to branch over (for parallel type "collection")',
|
||||
},
|
||||
},
|
||||
outputs: {},
|
||||
}
|
||||
return GetBlockConfigResult.parse(result)
|
||||
}
|
||||
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
const allowedIntegrations = permissionConfig?.allowedIntegrations
|
||||
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockType)) {
|
||||
throw new Error(`Block "${blockType}" is not available`)
|
||||
}
|
||||
|
||||
const blockConfig = blockRegistry[blockType]
|
||||
if (!blockConfig) {
|
||||
throw new Error(`Block not found: ${blockType}`)
|
||||
}
|
||||
|
||||
// Validate trigger mode is supported for this block
|
||||
if (trigger && !blockConfig.triggers?.enabled && !blockConfig.triggerAllowed) {
|
||||
throw new Error(
|
||||
`Block "${blockType}" does not support trigger mode. Only blocks with triggers.enabled or triggerAllowed can be used in trigger mode.`
|
||||
)
|
||||
}
|
||||
|
||||
// If operation is specified, validate it exists
|
||||
if (operation) {
|
||||
const operationSubBlock = blockConfig.subBlocks?.find((sb) => sb.id === 'operation')
|
||||
if (operationSubBlock && Array.isArray(operationSubBlock.options)) {
|
||||
const validOperations = operationSubBlock.options.map((o) =>
|
||||
typeof o === 'object' ? o.id : o
|
||||
)
|
||||
if (!validOperations.includes(operation)) {
|
||||
throw new Error(
|
||||
`Invalid operation "${operation}" for block "${blockType}". Valid operations: ${validOperations.join(', ')}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const subBlocks = Array.isArray(blockConfig.subBlocks) ? blockConfig.subBlocks : []
|
||||
const inputs = extractInputsFromSubBlocks(subBlocks, operation, trigger)
|
||||
const outputs = extractOutputs(blockConfig, operation, trigger)
|
||||
|
||||
const latestBlock = getLatestBlock(blockType)
|
||||
const displayName = latestBlock?.name ?? blockConfig.name
|
||||
|
||||
const result = {
|
||||
blockType,
|
||||
blockName: displayName,
|
||||
operation,
|
||||
trigger,
|
||||
inputs,
|
||||
outputs,
|
||||
}
|
||||
|
||||
return GetBlockConfigResult.parse(result)
|
||||
},
|
||||
}
|
||||
@@ -1,130 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import {
|
||||
GetBlockOptionsInput,
|
||||
type GetBlockOptionsInputType,
|
||||
GetBlockOptionsResult,
|
||||
type GetBlockOptionsResultType,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { registry as blockRegistry, getLatestBlock } from '@/blocks/registry'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
import { tools as toolsRegistry } from '@/tools/registry'
|
||||
|
||||
export const getBlockOptionsServerTool: BaseServerTool<
|
||||
GetBlockOptionsInputType,
|
||||
GetBlockOptionsResultType
|
||||
> = {
|
||||
name: 'get_block_options',
|
||||
inputSchema: GetBlockOptionsInput,
|
||||
outputSchema: GetBlockOptionsResult,
|
||||
async execute(
|
||||
{ blockId }: GetBlockOptionsInputType,
|
||||
context?: { userId: string }
|
||||
): Promise<GetBlockOptionsResultType> {
|
||||
const logger = createLogger('GetBlockOptionsServerTool')
|
||||
logger.debug('Executing get_block_options', { blockId })
|
||||
|
||||
if (blockId === 'loop') {
|
||||
const result = {
|
||||
blockId,
|
||||
blockName: 'Loop',
|
||||
operations: [
|
||||
{ id: 'for', name: 'For', description: 'Run a fixed number of iterations.' },
|
||||
{ id: 'forEach', name: 'For each', description: 'Iterate over a collection.' },
|
||||
{ id: 'while', name: 'While', description: 'Repeat while a condition is true.' },
|
||||
{
|
||||
id: 'doWhile',
|
||||
name: 'Do while',
|
||||
description: 'Run once, then repeat while a condition is true.',
|
||||
},
|
||||
],
|
||||
}
|
||||
return GetBlockOptionsResult.parse(result)
|
||||
}
|
||||
|
||||
if (blockId === 'parallel') {
|
||||
const result = {
|
||||
blockId,
|
||||
blockName: 'Parallel',
|
||||
operations: [
|
||||
{ id: 'count', name: 'Count', description: 'Run a fixed number of parallel branches.' },
|
||||
{
|
||||
id: 'collection',
|
||||
name: 'Collection',
|
||||
description: 'Run one branch per collection item.',
|
||||
},
|
||||
],
|
||||
}
|
||||
return GetBlockOptionsResult.parse(result)
|
||||
}
|
||||
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
const allowedIntegrations = permissionConfig?.allowedIntegrations
|
||||
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockId)) {
|
||||
throw new Error(`Block "${blockId}" is not available`)
|
||||
}
|
||||
|
||||
const blockConfig = blockRegistry[blockId]
|
||||
if (!blockConfig) {
|
||||
throw new Error(`Block not found: ${blockId}`)
|
||||
}
|
||||
|
||||
const operations: { id: string; name: string; description?: string }[] = []
|
||||
|
||||
// Check if block has an operation dropdown to determine available operations
|
||||
const operationSubBlock = blockConfig.subBlocks?.find((sb) => sb.id === 'operation')
|
||||
if (operationSubBlock && Array.isArray(operationSubBlock.options)) {
|
||||
// Block has operations - get tool info for each operation
|
||||
for (const option of operationSubBlock.options) {
|
||||
const opId = typeof option === 'object' ? option.id : option
|
||||
const opLabel = typeof option === 'object' ? option.label : option
|
||||
|
||||
// Try to resolve the tool for this operation
|
||||
let toolDescription: string | undefined
|
||||
try {
|
||||
const toolSelector = blockConfig.tools?.config?.tool
|
||||
if (typeof toolSelector === 'function') {
|
||||
const toolId = toolSelector({ operation: opId })
|
||||
const tool = toolsRegistry[toolId]
|
||||
if (tool) {
|
||||
toolDescription = tool.description
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Tool resolution failed, continue without description
|
||||
}
|
||||
|
||||
operations.push({
|
||||
id: opId,
|
||||
name: opLabel || opId,
|
||||
description: toolDescription,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// No operation dropdown - list all accessible tools
|
||||
const accessibleTools = blockConfig.tools?.access || []
|
||||
for (const toolId of accessibleTools) {
|
||||
const tool = toolsRegistry[toolId]
|
||||
if (tool) {
|
||||
operations.push({
|
||||
id: toolId,
|
||||
name: tool.name || toolId,
|
||||
description: tool.description,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const latestBlock = getLatestBlock(blockId)
|
||||
const displayName = latestBlock?.name ?? blockConfig.name
|
||||
|
||||
const result = {
|
||||
blockId,
|
||||
blockName: displayName,
|
||||
operations,
|
||||
}
|
||||
|
||||
return GetBlockOptionsResult.parse(result)
|
||||
},
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { GetBlocksAndToolsInput, GetBlocksAndToolsResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { registry as blockRegistry } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
|
||||
export const getBlocksAndToolsServerTool: BaseServerTool<
|
||||
ReturnType<typeof GetBlocksAndToolsInput.parse>,
|
||||
ReturnType<typeof GetBlocksAndToolsResult.parse>
|
||||
> = {
|
||||
name: 'get_blocks_and_tools',
|
||||
inputSchema: GetBlocksAndToolsInput,
|
||||
outputSchema: GetBlocksAndToolsResult,
|
||||
async execute(_args: unknown, context?: { userId: string }) {
|
||||
const logger = createLogger('GetBlocksAndToolsServerTool')
|
||||
logger.debug('Executing get_blocks_and_tools')
|
||||
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
const allowedIntegrations = permissionConfig?.allowedIntegrations
|
||||
|
||||
type BlockListItem = {
|
||||
type: string
|
||||
name: string
|
||||
description?: string
|
||||
triggerAllowed?: boolean
|
||||
}
|
||||
const blocks: BlockListItem[] = []
|
||||
|
||||
Object.entries(blockRegistry)
|
||||
.filter(([blockType, blockConfig]: [string, BlockConfig]) => {
|
||||
if (blockConfig.hideFromToolbar) return false
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockType)) return false
|
||||
return true
|
||||
})
|
||||
.forEach(([blockType, blockConfig]: [string, BlockConfig]) => {
|
||||
blocks.push({
|
||||
type: blockType,
|
||||
name: blockConfig.name,
|
||||
description: blockConfig.longDescription,
|
||||
triggerAllowed: 'triggerAllowed' in blockConfig ? !!blockConfig.triggerAllowed : false,
|
||||
})
|
||||
})
|
||||
|
||||
const specialBlocks: Record<string, { name: string; description: string }> = {
|
||||
loop: {
|
||||
name: 'Loop',
|
||||
description:
|
||||
'Control flow block for iterating over collections or repeating actions in a loop',
|
||||
},
|
||||
parallel: {
|
||||
name: 'Parallel',
|
||||
description: 'Control flow block for executing multiple branches simultaneously',
|
||||
},
|
||||
}
|
||||
Object.entries(specialBlocks).forEach(([blockType, info]) => {
|
||||
if (!blocks.some((b) => b.type === blockType)) {
|
||||
blocks.push({
|
||||
type: blockType,
|
||||
name: info.name,
|
||||
description: info.description,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return GetBlocksAndToolsResult.parse({ blocks })
|
||||
},
|
||||
}
|
||||
@@ -1,8 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool, ServerToolContext } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { getBlockConfigServerTool } from '@/lib/copilot/tools/server/blocks/get-block-config'
|
||||
import { getBlockOptionsServerTool } from '@/lib/copilot/tools/server/blocks/get-block-options'
|
||||
import { getBlocksAndToolsServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-and-tools'
|
||||
import { getBlocksMetadataServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-metadata-tool'
|
||||
import { getTriggerBlocksServerTool } from '@/lib/copilot/tools/server/blocks/get-trigger-blocks'
|
||||
import { searchDocumentationServerTool } from '@/lib/copilot/tools/server/docs/search-documentation'
|
||||
@@ -22,10 +19,7 @@ const logger = createLogger('ServerToolRouter')
|
||||
|
||||
/** Registry of all server tools. Tools self-declare their validation schemas. */
|
||||
const serverToolRegistry: Record<string, BaseServerTool> = {
|
||||
[getBlocksAndToolsServerTool.name]: getBlocksAndToolsServerTool,
|
||||
[getBlocksMetadataServerTool.name]: getBlocksMetadataServerTool,
|
||||
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
||||
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
||||
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
||||
[editWorkflowServerTool.name]: editWorkflowServerTool,
|
||||
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
||||
|
||||
@@ -7,22 +7,6 @@ export const ExecuteResponseSuccessSchema = z.object({
|
||||
})
|
||||
export type ExecuteResponseSuccess = z.infer<typeof ExecuteResponseSuccessSchema>
|
||||
|
||||
// get_blocks_and_tools
|
||||
export const GetBlocksAndToolsInput = z.object({})
|
||||
export const GetBlocksAndToolsResult = z.object({
|
||||
blocks: z.array(
|
||||
z
|
||||
.object({
|
||||
type: z.string(),
|
||||
name: z.string(),
|
||||
triggerAllowed: z.boolean().optional(),
|
||||
longDescription: z.string().optional(),
|
||||
})
|
||||
.passthrough()
|
||||
),
|
||||
})
|
||||
export type GetBlocksAndToolsResultType = z.infer<typeof GetBlocksAndToolsResult>
|
||||
|
||||
// get_blocks_metadata
|
||||
export const GetBlocksMetadataInput = z.object({ blockIds: z.array(z.string()).min(1) })
|
||||
export const GetBlocksMetadataResult = z.object({ metadata: z.record(z.any()) })
|
||||
@@ -35,41 +19,6 @@ export const GetTriggerBlocksResult = z.object({
|
||||
})
|
||||
export type GetTriggerBlocksResultType = z.infer<typeof GetTriggerBlocksResult>
|
||||
|
||||
// get_block_options
|
||||
export const GetBlockOptionsInput = z.object({
|
||||
blockId: z.string(),
|
||||
})
|
||||
export const GetBlockOptionsResult = z.object({
|
||||
blockId: z.string(),
|
||||
blockName: z.string(),
|
||||
operations: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
),
|
||||
})
|
||||
export type GetBlockOptionsInputType = z.infer<typeof GetBlockOptionsInput>
|
||||
export type GetBlockOptionsResultType = z.infer<typeof GetBlockOptionsResult>
|
||||
|
||||
// get_block_config
|
||||
export const GetBlockConfigInput = z.object({
|
||||
blockType: z.string(),
|
||||
operation: z.string().optional(),
|
||||
trigger: z.boolean().optional(),
|
||||
})
|
||||
export const GetBlockConfigResult = z.object({
|
||||
blockType: z.string(),
|
||||
blockName: z.string(),
|
||||
operation: z.string().optional(),
|
||||
trigger: z.boolean().optional(),
|
||||
inputs: z.record(z.any()),
|
||||
outputs: z.record(z.any()),
|
||||
})
|
||||
export type GetBlockConfigInputType = z.infer<typeof GetBlockConfigInput>
|
||||
export type GetBlockConfigResultType = z.infer<typeof GetBlockConfigResult>
|
||||
|
||||
// knowledge_base - shared schema used by client tool, server tool, and registry
|
||||
export const KnowledgeBaseArgsSchema = z.object({
|
||||
operation: z.enum([
|
||||
|
||||
17
apps/sim/lib/copilot/vfs/index.ts
Normal file
17
apps/sim/lib/copilot/vfs/index.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
export { WorkspaceVFS, getOrMaterializeVFS } from '@/lib/copilot/vfs/workspace-vfs'
|
||||
export type {
|
||||
GrepMatch,
|
||||
GrepOptions,
|
||||
GrepOutputMode,
|
||||
GrepCountEntry,
|
||||
ReadResult,
|
||||
DirEntry,
|
||||
} from '@/lib/copilot/vfs/operations'
|
||||
export {
|
||||
serializeBlockSchema,
|
||||
serializeDocuments,
|
||||
serializeIntegrationSchema,
|
||||
serializeKBMeta,
|
||||
serializeRecentExecutions,
|
||||
serializeWorkflowMeta,
|
||||
} from '@/lib/copilot/vfs/serializers'
|
||||
237
apps/sim/lib/copilot/vfs/operations.ts
Normal file
237
apps/sim/lib/copilot/vfs/operations.ts
Normal file
@@ -0,0 +1,237 @@
|
||||
export interface GrepMatch {
|
||||
path: string
|
||||
line: number
|
||||
content: string
|
||||
}
|
||||
|
||||
export type GrepOutputMode = 'content' | 'files_with_matches' | 'count'
|
||||
|
||||
export interface GrepOptions {
|
||||
maxResults?: number
|
||||
outputMode?: GrepOutputMode
|
||||
ignoreCase?: boolean
|
||||
lineNumbers?: boolean
|
||||
context?: number
|
||||
}
|
||||
|
||||
export interface GrepCountEntry {
|
||||
path: string
|
||||
count: number
|
||||
}
|
||||
|
||||
export interface ReadResult {
|
||||
content: string
|
||||
totalLines: number
|
||||
}
|
||||
|
||||
export interface DirEntry {
|
||||
name: string
|
||||
type: 'file' | 'dir'
|
||||
}
|
||||
|
||||
/**
|
||||
* Regex search over VFS file contents.
|
||||
* Supports multiple output modes: content (default), files_with_matches, count.
|
||||
*/
|
||||
export function grep(
|
||||
files: Map<string, string>,
|
||||
pattern: string,
|
||||
path?: string,
|
||||
opts?: GrepOptions
|
||||
): GrepMatch[] | string[] | GrepCountEntry[] {
|
||||
const maxResults = opts?.maxResults ?? 100
|
||||
const outputMode = opts?.outputMode ?? 'content'
|
||||
const ignoreCase = opts?.ignoreCase ?? false
|
||||
const showLineNumbers = opts?.lineNumbers ?? true
|
||||
const contextLines = opts?.context ?? 0
|
||||
|
||||
const flags = ignoreCase ? 'gi' : 'g'
|
||||
let regex: RegExp
|
||||
try {
|
||||
regex = new RegExp(pattern, flags)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
|
||||
if (outputMode === 'files_with_matches') {
|
||||
const matchingFiles: string[] = []
|
||||
for (const [filePath, content] of files) {
|
||||
if (path && !filePath.startsWith(path)) continue
|
||||
regex.lastIndex = 0
|
||||
if (regex.test(content)) {
|
||||
matchingFiles.push(filePath)
|
||||
if (matchingFiles.length >= maxResults) break
|
||||
}
|
||||
}
|
||||
return matchingFiles
|
||||
}
|
||||
|
||||
if (outputMode === 'count') {
|
||||
const counts: GrepCountEntry[] = []
|
||||
for (const [filePath, content] of files) {
|
||||
if (path && !filePath.startsWith(path)) continue
|
||||
const lines = content.split('\n')
|
||||
let count = 0
|
||||
for (const line of lines) {
|
||||
regex.lastIndex = 0
|
||||
if (regex.test(line)) count++
|
||||
}
|
||||
if (count > 0) {
|
||||
counts.push({ path: filePath, count })
|
||||
if (counts.length >= maxResults) break
|
||||
}
|
||||
}
|
||||
return counts
|
||||
}
|
||||
|
||||
// Default: 'content' mode
|
||||
const matches: GrepMatch[] = []
|
||||
for (const [filePath, content] of files) {
|
||||
if (path && !filePath.startsWith(path)) continue
|
||||
|
||||
const lines = content.split('\n')
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
regex.lastIndex = 0
|
||||
if (regex.test(lines[i])) {
|
||||
if (contextLines > 0) {
|
||||
const start = Math.max(0, i - contextLines)
|
||||
const end = Math.min(lines.length - 1, i + contextLines)
|
||||
for (let j = start; j <= end; j++) {
|
||||
matches.push({
|
||||
path: filePath,
|
||||
line: showLineNumbers ? j + 1 : 0,
|
||||
content: lines[j],
|
||||
})
|
||||
}
|
||||
} else {
|
||||
matches.push({
|
||||
path: filePath,
|
||||
line: showLineNumbers ? i + 1 : 0,
|
||||
content: lines[i],
|
||||
})
|
||||
}
|
||||
if (matches.length >= maxResults) return matches
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return matches
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a glob pattern to a RegExp.
|
||||
* Supports *, **, and ? wildcards.
|
||||
*/
|
||||
function globToRegExp(pattern: string): RegExp {
|
||||
let regexStr = '^'
|
||||
let i = 0
|
||||
while (i < pattern.length) {
|
||||
const ch = pattern[i]
|
||||
if (ch === '*') {
|
||||
if (pattern[i + 1] === '*') {
|
||||
// ** matches any number of path segments
|
||||
if (pattern[i + 2] === '/') {
|
||||
regexStr += '(?:.+/)?'
|
||||
i += 3
|
||||
} else {
|
||||
regexStr += '.*'
|
||||
i += 2
|
||||
}
|
||||
} else {
|
||||
// * matches anything except /
|
||||
regexStr += '[^/]*'
|
||||
i++
|
||||
}
|
||||
} else if (ch === '?') {
|
||||
regexStr += '[^/]'
|
||||
i++
|
||||
} else if ('.+^${}()|[]\\'.includes(ch)) {
|
||||
regexStr += '\\' + ch
|
||||
i++
|
||||
} else {
|
||||
regexStr += ch
|
||||
i++
|
||||
}
|
||||
}
|
||||
regexStr += '$'
|
||||
return new RegExp(regexStr)
|
||||
}
|
||||
|
||||
/**
|
||||
* Glob pattern matching against VFS file paths.
|
||||
* Returns matching file paths.
|
||||
*/
|
||||
export function glob(files: Map<string, string>, pattern: string): string[] {
|
||||
const regex = globToRegExp(pattern)
|
||||
const result: string[] = []
|
||||
for (const filePath of files.keys()) {
|
||||
if (regex.test(filePath)) {
|
||||
result.push(filePath)
|
||||
}
|
||||
}
|
||||
return result.sort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a VFS file's content, optionally with offset and limit.
|
||||
* Returns null if the file does not exist.
|
||||
*/
|
||||
export function read(
|
||||
files: Map<string, string>,
|
||||
path: string,
|
||||
offset?: number,
|
||||
limit?: number
|
||||
): ReadResult | null {
|
||||
const content = files.get(path)
|
||||
if (content === undefined) return null
|
||||
|
||||
const lines = content.split('\n')
|
||||
const totalLines = lines.length
|
||||
|
||||
if (offset !== undefined || limit !== undefined) {
|
||||
const start = offset ?? 0
|
||||
const end = limit !== undefined ? start + limit : lines.length
|
||||
return {
|
||||
content: lines.slice(start, end).join('\n'),
|
||||
totalLines,
|
||||
}
|
||||
}
|
||||
|
||||
return { content, totalLines }
|
||||
}
|
||||
|
||||
/**
|
||||
* List entries in a VFS directory path.
|
||||
* Returns files and subdirectories at the given path level.
|
||||
*/
|
||||
export function list(files: Map<string, string>, path: string): DirEntry[] {
|
||||
const normalizedPath = path.endsWith('/') ? path : path + '/'
|
||||
const seen = new Set<string>()
|
||||
const entries: DirEntry[] = []
|
||||
|
||||
for (const filePath of files.keys()) {
|
||||
if (!filePath.startsWith(normalizedPath)) continue
|
||||
|
||||
const remainder = filePath.slice(normalizedPath.length)
|
||||
if (!remainder) continue
|
||||
|
||||
const slashIndex = remainder.indexOf('/')
|
||||
if (slashIndex === -1) {
|
||||
if (!seen.has(remainder)) {
|
||||
seen.add(remainder)
|
||||
entries.push({ name: remainder, type: 'file' })
|
||||
}
|
||||
} else {
|
||||
const dirName = remainder.slice(0, slashIndex)
|
||||
if (!seen.has(dirName)) {
|
||||
seen.add(dirName)
|
||||
entries.push({ name: dirName, type: 'dir' })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return entries.sort((a, b) => {
|
||||
if (a.type !== b.type) return a.type === 'dir' ? -1 : 1
|
||||
return a.name.localeCompare(b.name)
|
||||
})
|
||||
}
|
||||
282
apps/sim/lib/copilot/vfs/serializers.ts
Normal file
282
apps/sim/lib/copilot/vfs/serializers.ts
Normal file
@@ -0,0 +1,282 @@
|
||||
import type { BlockConfig, SubBlockConfig } from '@/blocks/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
/**
|
||||
* Serialize workflow metadata for VFS meta.json
|
||||
*/
|
||||
export function serializeWorkflowMeta(wf: {
|
||||
id: string
|
||||
name: string
|
||||
description?: string | null
|
||||
isDeployed: boolean
|
||||
deployedAt?: Date | null
|
||||
runCount: number
|
||||
lastRunAt?: Date | null
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
}): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
id: wf.id,
|
||||
name: wf.name,
|
||||
description: wf.description || undefined,
|
||||
isDeployed: wf.isDeployed,
|
||||
deployedAt: wf.deployedAt?.toISOString(),
|
||||
runCount: wf.runCount,
|
||||
lastRunAt: wf.lastRunAt?.toISOString(),
|
||||
createdAt: wf.createdAt.toISOString(),
|
||||
updatedAt: wf.updatedAt.toISOString(),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize execution logs for VFS executions.json.
|
||||
* Takes recent execution log rows and produces a summary.
|
||||
*/
|
||||
export function serializeRecentExecutions(
|
||||
executions: Array<{
|
||||
id: string
|
||||
executionId: string
|
||||
status: string
|
||||
trigger: string
|
||||
startedAt: Date
|
||||
endedAt?: Date | null
|
||||
totalDurationMs?: number | null
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
executions.map((e) => ({
|
||||
executionId: e.executionId,
|
||||
status: e.status,
|
||||
trigger: e.trigger,
|
||||
startedAt: e.startedAt.toISOString(),
|
||||
endedAt: e.endedAt?.toISOString(),
|
||||
durationMs: e.totalDurationMs,
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize knowledge base metadata for VFS meta.json
|
||||
*/
|
||||
export function serializeKBMeta(kb: {
|
||||
id: string
|
||||
name: string
|
||||
description?: string | null
|
||||
embeddingModel: string
|
||||
embeddingDimension: number
|
||||
tokenCount: number
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
documentCount: number
|
||||
}): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
id: kb.id,
|
||||
name: kb.name,
|
||||
description: kb.description || undefined,
|
||||
embeddingModel: kb.embeddingModel,
|
||||
embeddingDimension: kb.embeddingDimension,
|
||||
tokenCount: kb.tokenCount,
|
||||
documentCount: kb.documentCount,
|
||||
createdAt: kb.createdAt.toISOString(),
|
||||
updatedAt: kb.updatedAt.toISOString(),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize documents list for VFS documents.json (metadata only, no content)
|
||||
*/
|
||||
export function serializeDocuments(
|
||||
docs: Array<{
|
||||
id: string
|
||||
filename: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
chunkCount: number
|
||||
tokenCount: number
|
||||
processingStatus: string
|
||||
enabled: boolean
|
||||
uploadedAt: Date
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
docs.map((d) => ({
|
||||
id: d.id,
|
||||
filename: d.filename,
|
||||
fileSize: d.fileSize,
|
||||
mimeType: d.mimeType,
|
||||
chunkCount: d.chunkCount,
|
||||
tokenCount: d.tokenCount,
|
||||
processingStatus: d.processingStatus,
|
||||
enabled: d.enabled,
|
||||
uploadedAt: d.uploadedAt.toISOString(),
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a SubBlockConfig for the VFS component schema.
|
||||
* Strips functions and UI-only fields.
|
||||
*/
|
||||
function serializeSubBlock(sb: SubBlockConfig): Record<string, unknown> {
|
||||
const result: Record<string, unknown> = {
|
||||
id: sb.id,
|
||||
type: sb.type,
|
||||
}
|
||||
if (sb.title) result.title = sb.title
|
||||
if (sb.required === true) result.required = true
|
||||
if (sb.defaultValue !== undefined) result.defaultValue = sb.defaultValue
|
||||
if (sb.mode) result.mode = sb.mode
|
||||
if (sb.canonicalParamId) result.canonicalParamId = sb.canonicalParamId
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a block schema for VFS components/blocks/{type}.json
|
||||
*/
|
||||
export function serializeBlockSchema(block: BlockConfig): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
description: block.description,
|
||||
category: block.category,
|
||||
longDescription: block.longDescription || undefined,
|
||||
bestPractices: block.bestPractices || undefined,
|
||||
triggerAllowed: block.triggerAllowed || undefined,
|
||||
singleInstance: block.singleInstance || undefined,
|
||||
tools: block.tools.access,
|
||||
subBlocks: block.subBlocks.map(serializeSubBlock),
|
||||
inputs: block.inputs,
|
||||
outputs: Object.fromEntries(
|
||||
Object.entries(block.outputs)
|
||||
.filter(([key]) => key !== 'visualization')
|
||||
.map(([key, val]) => [
|
||||
key,
|
||||
typeof val === 'string'
|
||||
? { type: val }
|
||||
: { type: val.type, description: (val as { description?: string }).description },
|
||||
])
|
||||
),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize OAuth credentials for VFS environment/credentials.json.
|
||||
* Shows which integrations are connected — IDs and scopes, NOT tokens.
|
||||
*/
|
||||
export function serializeCredentials(
|
||||
accounts: Array<{
|
||||
providerId: string
|
||||
scope: string | null
|
||||
createdAt: Date
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
accounts.map((a) => ({
|
||||
provider: a.providerId,
|
||||
scope: a.scope || undefined,
|
||||
connectedAt: a.createdAt.toISOString(),
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize API keys for VFS environment/api-keys.json.
|
||||
* Shows key names and types — NOT the actual key values.
|
||||
*/
|
||||
export function serializeApiKeys(
|
||||
keys: Array<{
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
lastUsed: Date | null
|
||||
createdAt: Date
|
||||
expiresAt: Date | null
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
keys.map((k) => ({
|
||||
id: k.id,
|
||||
name: k.name,
|
||||
type: k.type,
|
||||
lastUsed: k.lastUsed?.toISOString(),
|
||||
createdAt: k.createdAt.toISOString(),
|
||||
expiresAt: k.expiresAt?.toISOString(),
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize environment variables for VFS environment/variables.json.
|
||||
* Shows variable NAMES only — NOT values.
|
||||
*/
|
||||
export function serializeEnvironmentVariables(
|
||||
personalVarNames: string[],
|
||||
workspaceVarNames: string[]
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
personal: personalVarNames,
|
||||
workspace: workspaceVarNames,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize an integration/tool schema for VFS components/integrations/{service}/{operation}.json
|
||||
*/
|
||||
export function serializeIntegrationSchema(tool: ToolConfig): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
id: tool.id,
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
version: tool.version,
|
||||
oauth: tool.oauth
|
||||
? { required: tool.oauth.required, provider: tool.oauth.provider }
|
||||
: undefined,
|
||||
params: Object.fromEntries(
|
||||
Object.entries(tool.params).map(([key, val]) => [
|
||||
key,
|
||||
{
|
||||
type: val.type,
|
||||
required: val.required,
|
||||
description: val.description,
|
||||
default: val.default,
|
||||
},
|
||||
])
|
||||
),
|
||||
outputs: tool.outputs
|
||||
? Object.fromEntries(
|
||||
Object.entries(tool.outputs).map(([key, val]) => [
|
||||
key,
|
||||
{ type: val.type, description: val.description },
|
||||
])
|
||||
)
|
||||
: undefined,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
396
apps/sim/lib/copilot/vfs/workspace-vfs.ts
Normal file
396
apps/sim/lib/copilot/vfs/workspace-vfs.ts
Normal file
@@ -0,0 +1,396 @@
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
account,
|
||||
apiKey,
|
||||
document,
|
||||
environment,
|
||||
knowledgeBase,
|
||||
workflow,
|
||||
workspaceEnvironment,
|
||||
workflowExecutionLogs,
|
||||
} from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, count, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import { getLatestVersionTools } from '@/tools/utils'
|
||||
import { tools as toolRegistry } from '@/tools/registry'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
|
||||
import type { GrepMatch, GrepOptions, ReadResult, DirEntry } from '@/lib/copilot/vfs/operations'
|
||||
import * as ops from '@/lib/copilot/vfs/operations'
|
||||
import {
|
||||
serializeApiKeys,
|
||||
serializeBlockSchema,
|
||||
serializeCredentials,
|
||||
serializeDocuments,
|
||||
serializeEnvironmentVariables,
|
||||
serializeIntegrationSchema,
|
||||
serializeKBMeta,
|
||||
serializeRecentExecutions,
|
||||
serializeWorkflowMeta,
|
||||
} from '@/lib/copilot/vfs/serializers'
|
||||
|
||||
const logger = createLogger('WorkspaceVFS')
|
||||
|
||||
/** Cache entry for a materialized VFS */
|
||||
interface VFSCacheEntry {
|
||||
vfs: WorkspaceVFS
|
||||
expiresAt: number
|
||||
}
|
||||
|
||||
/** Module-level VFS cache keyed by workspaceId */
|
||||
const vfsCache = new Map<string, VFSCacheEntry>()
|
||||
|
||||
/** Cache TTL in milliseconds (30 seconds) */
|
||||
const VFS_CACHE_TTL_MS = 30_000
|
||||
|
||||
/** Static component files, computed once and shared across all VFS instances */
|
||||
let staticComponentFiles: Map<string, string> | null = null
|
||||
|
||||
/**
|
||||
* Build the static component files from block and tool registries.
|
||||
* This only needs to happen once per process.
|
||||
*/
|
||||
function getStaticComponentFiles(): Map<string, string> {
|
||||
if (staticComponentFiles) return staticComponentFiles
|
||||
staticComponentFiles = new Map()
|
||||
|
||||
const allBlocks = getAllBlocks()
|
||||
for (const block of allBlocks) {
|
||||
const path = `components/blocks/${block.type}.json`
|
||||
staticComponentFiles.set(path, serializeBlockSchema(block))
|
||||
}
|
||||
|
||||
const latestTools = getLatestVersionTools(toolRegistry)
|
||||
for (const [toolId, tool] of Object.entries(latestTools)) {
|
||||
const parts = toolId.split('_')
|
||||
const service = parts[0]
|
||||
const operation = parts.slice(1).join('_') || 'default'
|
||||
const path = `components/integrations/${service}/${operation}.json`
|
||||
staticComponentFiles.set(path, serializeIntegrationSchema(tool))
|
||||
}
|
||||
|
||||
logger.info('Static component files built', {
|
||||
blocks: allBlocks.length,
|
||||
integrations: Object.keys(latestTools).length,
|
||||
})
|
||||
|
||||
return staticComponentFiles
|
||||
}
|
||||
|
||||
/**
|
||||
* Virtual Filesystem that materializes workspace data into an in-memory Map.
|
||||
*
|
||||
* Structure:
|
||||
* workflows/{name}/meta.json
|
||||
* workflows/{name}/blocks.json
|
||||
* workflows/{name}/edges.json
|
||||
* workflows/{name}/executions.json
|
||||
* knowledgebases/{name}/meta.json
|
||||
* knowledgebases/{name}/documents.json
|
||||
* environment/credentials.json
|
||||
* environment/api-keys.json
|
||||
* environment/variables.json
|
||||
* components/blocks/{type}.json
|
||||
* components/integrations/{service}/{operation}.json
|
||||
*/
|
||||
export class WorkspaceVFS {
|
||||
private files: Map<string, string> = new Map()
|
||||
|
||||
/**
|
||||
* Materialize workspace data from DB into the VFS.
|
||||
* Queries workflows, knowledge bases, and merges static component schemas.
|
||||
*/
|
||||
async materialize(workspaceId: string, userId: string): Promise<void> {
|
||||
const start = Date.now()
|
||||
this.files = new Map()
|
||||
|
||||
await Promise.all([
|
||||
this.materializeWorkflows(workspaceId, userId),
|
||||
this.materializeKnowledgeBases(workspaceId),
|
||||
this.materializeEnvironment(workspaceId, userId),
|
||||
])
|
||||
|
||||
// Merge static component files
|
||||
for (const [path, content] of getStaticComponentFiles()) {
|
||||
this.files.set(path, content)
|
||||
}
|
||||
|
||||
logger.info('VFS materialized', {
|
||||
workspaceId,
|
||||
fileCount: this.files.size,
|
||||
durationMs: Date.now() - start,
|
||||
})
|
||||
}
|
||||
|
||||
grep(
|
||||
pattern: string,
|
||||
path?: string,
|
||||
options?: GrepOptions
|
||||
): GrepMatch[] | string[] | ops.GrepCountEntry[] {
|
||||
return ops.grep(this.files, pattern, path, options)
|
||||
}
|
||||
|
||||
glob(pattern: string): string[] {
|
||||
return ops.glob(this.files, pattern)
|
||||
}
|
||||
|
||||
read(path: string, offset?: number, limit?: number): ReadResult | null {
|
||||
return ops.read(this.files, path, offset, limit)
|
||||
}
|
||||
|
||||
list(path: string): DirEntry[] {
|
||||
return ops.list(this.files, path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Materialize all workflows in the workspace.
|
||||
*/
|
||||
private async materializeWorkflows(workspaceId: string, userId: string): Promise<void> {
|
||||
const workflowRows = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
isDeployed: workflow.isDeployed,
|
||||
deployedAt: workflow.deployedAt,
|
||||
runCount: workflow.runCount,
|
||||
lastRunAt: workflow.lastRunAt,
|
||||
createdAt: workflow.createdAt,
|
||||
updatedAt: workflow.updatedAt,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(eq(workflow.workspaceId, workspaceId))
|
||||
|
||||
// Load normalized data + executions in parallel for all workflows
|
||||
await Promise.all(
|
||||
workflowRows.map(async (wf) => {
|
||||
const safeName = sanitizeName(wf.name)
|
||||
const prefix = `workflows/${safeName}/`
|
||||
|
||||
// Meta
|
||||
this.files.set(`${prefix}meta.json`, serializeWorkflowMeta(wf))
|
||||
|
||||
// Blocks + edges from normalized tables
|
||||
try {
|
||||
const normalized = await loadWorkflowFromNormalizedTables(wf.id)
|
||||
if (normalized) {
|
||||
const sanitized = sanitizeForCopilot({
|
||||
blocks: normalized.blocks,
|
||||
edges: normalized.edges,
|
||||
loops: normalized.loops,
|
||||
parallels: normalized.parallels,
|
||||
} as any)
|
||||
this.files.set(`${prefix}blocks.json`, JSON.stringify(sanitized, null, 2))
|
||||
|
||||
// Edges as simple source->target list
|
||||
const edges = normalized.edges.map((e) => ({
|
||||
source: e.source,
|
||||
target: e.target,
|
||||
sourceHandle: e.sourceHandle || undefined,
|
||||
targetHandle: e.targetHandle || undefined,
|
||||
}))
|
||||
this.files.set(`${prefix}edges.json`, JSON.stringify(edges, null, 2))
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Failed to load workflow blocks', {
|
||||
workflowId: wf.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
|
||||
// Recent executions (last 5)
|
||||
try {
|
||||
const execRows = await db
|
||||
.select({
|
||||
id: workflowExecutionLogs.id,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
status: workflowExecutionLogs.status,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.where(eq(workflowExecutionLogs.workflowId, wf.id))
|
||||
.orderBy(desc(workflowExecutionLogs.startedAt))
|
||||
.limit(5)
|
||||
|
||||
if (execRows.length > 0) {
|
||||
this.files.set(`${prefix}executions.json`, serializeRecentExecutions(execRows))
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Failed to load execution logs', {
|
||||
workflowId: wf.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Materialize all knowledge bases in the workspace.
|
||||
*/
|
||||
private async materializeKnowledgeBases(workspaceId: string): Promise<void> {
|
||||
const kbRows = await db
|
||||
.select({
|
||||
id: knowledgeBase.id,
|
||||
name: knowledgeBase.name,
|
||||
description: knowledgeBase.description,
|
||||
embeddingModel: knowledgeBase.embeddingModel,
|
||||
embeddingDimension: knowledgeBase.embeddingDimension,
|
||||
tokenCount: knowledgeBase.tokenCount,
|
||||
createdAt: knowledgeBase.createdAt,
|
||||
updatedAt: knowledgeBase.updatedAt,
|
||||
})
|
||||
.from(knowledgeBase)
|
||||
.where(and(eq(knowledgeBase.workspaceId, workspaceId), isNull(knowledgeBase.deletedAt)))
|
||||
|
||||
await Promise.all(
|
||||
kbRows.map(async (kb) => {
|
||||
const safeName = sanitizeName(kb.name)
|
||||
const prefix = `knowledgebases/${safeName}/`
|
||||
|
||||
// Get document count
|
||||
const [docCountRow] = await db
|
||||
.select({ count: count() })
|
||||
.from(document)
|
||||
.where(and(eq(document.knowledgeBaseId, kb.id), isNull(document.deletedAt)))
|
||||
|
||||
this.files.set(
|
||||
`${prefix}meta.json`,
|
||||
serializeKBMeta({
|
||||
...kb,
|
||||
documentCount: docCountRow?.count ?? 0,
|
||||
})
|
||||
)
|
||||
|
||||
// Documents metadata
|
||||
const docRows = await db
|
||||
.select({
|
||||
id: document.id,
|
||||
filename: document.filename,
|
||||
fileSize: document.fileSize,
|
||||
mimeType: document.mimeType,
|
||||
chunkCount: document.chunkCount,
|
||||
tokenCount: document.tokenCount,
|
||||
processingStatus: document.processingStatus,
|
||||
enabled: document.enabled,
|
||||
uploadedAt: document.uploadedAt,
|
||||
})
|
||||
.from(document)
|
||||
.where(and(eq(document.knowledgeBaseId, kb.id), isNull(document.deletedAt)))
|
||||
|
||||
if (docRows.length > 0) {
|
||||
this.files.set(`${prefix}documents.json`, serializeDocuments(docRows))
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Materialize environment data: credentials, API keys, env variable names.
|
||||
*/
|
||||
private async materializeEnvironment(workspaceId: string, userId: string): Promise<void> {
|
||||
try {
|
||||
// OAuth credentials — which integrations are connected (no tokens)
|
||||
const oauthRows = await db
|
||||
.select({
|
||||
providerId: account.providerId,
|
||||
scope: account.scope,
|
||||
createdAt: account.createdAt,
|
||||
})
|
||||
.from(account)
|
||||
.where(eq(account.userId, userId))
|
||||
|
||||
this.files.set('environment/credentials.json', serializeCredentials(oauthRows))
|
||||
|
||||
// API keys — names and types (no key values)
|
||||
const apiKeyRows = await db
|
||||
.select({
|
||||
id: apiKey.id,
|
||||
name: apiKey.name,
|
||||
type: apiKey.type,
|
||||
lastUsed: apiKey.lastUsed,
|
||||
createdAt: apiKey.createdAt,
|
||||
expiresAt: apiKey.expiresAt,
|
||||
})
|
||||
.from(apiKey)
|
||||
.where(eq(apiKey.workspaceId, workspaceId))
|
||||
|
||||
this.files.set('environment/api-keys.json', serializeApiKeys(apiKeyRows))
|
||||
|
||||
// Environment variables — names only (no values)
|
||||
let personalVarNames: string[] = []
|
||||
let workspaceVarNames: string[] = []
|
||||
|
||||
const [personalEnv] = await db
|
||||
.select({ variables: environment.variables })
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, userId))
|
||||
|
||||
if (personalEnv?.variables && typeof personalEnv.variables === 'object') {
|
||||
personalVarNames = Object.keys(personalEnv.variables as Record<string, unknown>)
|
||||
}
|
||||
|
||||
const [workspaceEnv] = await db
|
||||
.select({ variables: workspaceEnvironment.variables })
|
||||
.from(workspaceEnvironment)
|
||||
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
|
||||
|
||||
if (workspaceEnv?.variables && typeof workspaceEnv.variables === 'object') {
|
||||
workspaceVarNames = Object.keys(workspaceEnv.variables as Record<string, unknown>)
|
||||
}
|
||||
|
||||
this.files.set(
|
||||
'environment/variables.json',
|
||||
serializeEnvironmentVariables(personalVarNames, workspaceVarNames)
|
||||
)
|
||||
} catch (err) {
|
||||
logger.warn('Failed to materialize environment data', {
|
||||
workspaceId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or create a cached VFS for a workspace.
|
||||
* Re-materializes if the cache is expired.
|
||||
*/
|
||||
export async function getOrMaterializeVFS(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<WorkspaceVFS> {
|
||||
const now = Date.now()
|
||||
const cached = vfsCache.get(workspaceId)
|
||||
|
||||
if (cached && cached.expiresAt > now) {
|
||||
return cached.vfs
|
||||
}
|
||||
|
||||
const vfs = new WorkspaceVFS()
|
||||
await vfs.materialize(workspaceId, userId)
|
||||
|
||||
vfsCache.set(workspaceId, {
|
||||
vfs,
|
||||
expiresAt: now + VFS_CACHE_TTL_MS,
|
||||
})
|
||||
|
||||
return vfs
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize a name for use as a VFS path segment.
|
||||
* Converts to lowercase, replaces spaces/special chars with hyphens.
|
||||
*/
|
||||
function sanitizeName(name: string): string {
|
||||
return name
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/^-|-$/g, '')
|
||||
.slice(0, 64)
|
||||
}
|
||||
@@ -43,7 +43,6 @@ You have access to these specialized subagents. Call them by name to delegate ta
|
||||
|
||||
## Direct Tools
|
||||
|
||||
- **get_user_workflow(workflowId)** — Get workflow structure and blocks. Requires the workflow ID.
|
||||
- **search_online** — Search the web for information.
|
||||
- **memory_file_read(file_path)** — Read a persistent memory file.
|
||||
- **memory_file_write(file_path, content)** — Write/update a persistent memory file.
|
||||
|
||||
Reference in New Issue
Block a user