mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-07 22:24:06 -05:00
improvement(copilot): v0.2 (#2086)
* Edit workflow returns workflow * Condense workflkow conosle * Limit console results to 2 * mark checkoff v1 * Mark and checkoff todo * Fixes * Plan mode * fix agent output * broken * Fixes * Fix diff mode persist * Fix diff coloring * Undo/redo * Checkpoint udno redo * fix(templates): fix templates details page (#1942) * Fix template details * Fix deps * fix(templates-details): restore approval feature, and keep details UI consistent, smoothen out creation of profile (#1943) * fix(templates): view current ui * update UI to be less cluttered * make state management for creating user profile smoother * fix autoselect logic * fix lint * fix(landing): need to propagate landing page copilot prompt (#1944) * fix(wand): subblocks should not be overwritten after wand gen (#1946) * fix(settings): fix broken api keys, help modal, logs, workflow renaming (#1945) * fix(settings): fix broken api keys, help modal, logs, workflow renaming * fix build * cleanup * use emcn * fix(files): changed file input value sample from string -> object (#1947) * improvement: usage-indicator UI (#1948) * fix(deploy): fix button (#1949) * fix(executor): consolidate execution hooks (#1950) * fix(autoconnect): should check if triggermode is set from the toolbar drag event directly (#1951) * improvement: templates styling (#1952) * improvement: template use button (#1954) * feat(newgifs): added new gifs (#1953) * new gifs * changed wording * changed wording * lowercase * changed wording * remove blog stuff --------- Co-authored-by: aadamgough <adam@sim.ai> Co-authored-by: waleed <walif6@gmail.com> * feat(drizzle): added ods for analytics from drizzle (#1956) * feat(drizzle): added ods for analytics from drizzle * clean * fix(sheets): file selector had incorrect provider (#1958) * feat(docs): added docs analytics drizzle ods (#1957) * feat(docs): added docs analytics drizzle ods * fix build * fix provider for docs selector (#1959) * fix(custom-tools): updates to legacy + copilot generated custom tools (#1960) * fix(custom-tools): updates to existing tools * don't reorder custom tools in modal based on edit time * restructure custom tools to persist copilot generated tools * fix tests * fix(slack): remove update message incorrect canonical param (#1964) * fix(slack): send message tool * add logging * add more logs * add more logs * fix canonical param * improvement(templates): add share button, serve public templates routes for unauthenticated users and workspace one for authenticated users, improve settings style and organization (#1962) * improvement(templates): add share button, serve public templates routes for unauthenticated users and workspace one for authenticated users, improve settings style and organization * fix lint --------- Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai> * fix(onedrive): incorrect canonical param (#1966) * fix(onedrive): incorrect canonical param * fix download file * fix * fix * Revert "fix" This reverts commitf68ccd75fd. * Revert "fix" This reverts commitf3d8acee7d. * fix(executor): streaming after tool calls (#1963) * Provider changes * Fix lint * fix(code): readd wand to code subblock (#1969) * fix(cmd-k): z-index + reoder tools, triggers (#1970) * fix(cmd-k): z-index + reoder tools, triggers * fix more z-index styling * fix(executor): streaming response format (#1972) * fix(workflow-block): fix redeploy header to not repeatedly show redeploy when redeploy is not necessary (#1973) * fix(workflow-block): fix redeploy header to not repeatedly show redeploy when redeploy is not necessary * cleanup * improvement(platform): chat, emcn, terminal, usage-limit (#1974) * improvement(usage-indicator): layout * improvement: expand default terminal height * fix: swap workflow block ports * improvement: chat initial positioning * improvement(chat): display; improvement(emcn): popover attributes * fix lint * feat(settings): added reactquery for settings, removed zustand stores, added apollo, added workflow block selector dropdown search, added add environment variable option to empty env var dropdown (#1971) * feat(settings): added reactquery for settings, removed zustand stores, added apollo, added workflow block selector dropdown search, added add environment variable option to empty env var dropdown * fix delete dialog for copilot keys * simplify combobox * fix more z indices * consolidated duplicate hooks --------- Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai> * fix(copilot-subflows): copilot-added subflows id mismatch (#1977) * feat(i18n): update translations (#1978) * feat(i18n): update translations * fix build --------- Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com> * fix(logs): show block inputs (#1979) * Fix executor lgos block inputs * Fix Comment * fix(onedrive): parse array values correctly (#1981) * fix(onedrive): parse array values correctly * fix onedrive * fix * fix onedrive input parsing by reusing code subblock * fix type * feat(files): add presigned URL generation support for execution files (#1980) * fix(popovers): billed account + async example command (#1982) * fix(settings): update usage data in settings > subs to use reactquery hooks (#1983) * fix(settings): update usage data in settings > subs to use reactquery hooks * standardize usage pills calculation * fix(output-selector): z-index in chat deploy modal (#1984) * improvement(logs): improved logs search (#1985) * improvement(logs): improved logs search * more * ack PR comments * feat(slack): added slack full message object in response (#1987) * feat(slack): add better error messages, reminder to add bot to app (#1990) * feat(i18n): update translations (#1989) Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com> * fix(landing): hero stripe icon (#1988) * Temp * Update * Update * Update * Progress * Diff store fixes * simplify sockets ops for diff store * Search patterns tool * Better tool calls * Fix sanitizationg * Context window display * fix(variables): fix double stringification (#1991) * improvement(variables): support dot notation for nested objects (#1992) * improvement(tanstack): migrate multiple stores (#1994) * improvement(tanstack): migrate folders, knowledge to tanstack * fix types * fix(folders): duplicate (#1996) * fix(variables): fix variables block json resolution (#1997) * Improvement(ui/ux): signup, command-list, cursors, search modal, workflow runs, usage indicator (#1998) * improvement: signup loading, command-list, cursors, search modal ordering * improvement: workflow runs, search modal * improvement(usage-indicator): ui/ux * test(pr): hackathon (#1999) * test(pr): github trigger (#2000) * fix(usage-indicator): conditional rendering, upgrade, and ui/ux (#2001) * fix: usage-limit indicator and render conditonally on is billing enabled * fix: upgrade render * fix(notes): fix notes, tighten spacing, update deprecated zustand function, update use mention data to ignore block positon (#2002) * fix(pdfs): use unpdf instead of pdf-parse (#2004) * fix(modals): fix z-index for various modals and output selector and variables (#2005) * fix(condition): treat condition input the same as the code subblock (#2006) * feat(models): added gpt-5.1 (#2007) * improvement: runpath edges, blocks, active (#2008) * feat(i18n): update translations (#2009) * fix(triggers): check triggermode and consolidate block type (#2011) * fix(triggers): disabled trigger shouldn't be added to dag (#2012) * Fix disabled blocks * Comments * Fix api/chat trigger not found message * fix(tags): only show start block upstream if is ancestor (#2013) * fix(variables): Fix resolution on double < (#2016) * Fix variable <> * Ling * Clean * feat(billing): add notif for first failed payment, added upgrade email from free, updated providers that supported granular tool control to support them, fixed envvar popover, fixed redirect to wrong workspace after oauth connect (#2015) * feat(billing): add notif for first failed payment, added upgrade email from free, updated providers that supported granular tool control to support them, fixed envvar popover, fixed redirect to wrong workspace after oauth connect * fix build * ack PR comments * feat(performance): added reactquery hooks for workflow operations, for logs, fixed logs reloading, fix subscription UI (#2017) * feat(performance): added reactquery hooks for workflow operations, for logs, fixed logs reloading, fix subscription UI * use useInfiniteQuery for logs fetching * fix(copilot): run workflow supports input format and fix run id (#2018) * fix(router): fix error edge in router block + fix source handle problem (#2019) * Fix router block error port handling * Remove comment * Fix edge execution * improvement: code subblock, action bar, connections (#2024) * improvement: action bar, connections * fix: code block draggable resize * fix(response): fix response block http format (#2027) * Fix response block * Lint * fix(notes): fix notes block spacing, additional logs for billing transfer route (#2029) * fix(usage-data): refetch on usage limit update in settings (#2032) * fix(overage): fix pill calculation in the usage indicator to be consistent across views (#2034) * fix(workflows): fixed workflow loading in without start block, added templates RQ hook, cleaned up unused templates code (#2035) * fix(triggers): dedup + not surfacing deployment status log (#2033) * fix(triggers): dedup + not surfacing deployment status log * fix ms teams * change to microsoftteams * Revert "change to microsoftteams" This reverts commit217f808641. * fix * fix * fix provider name * fix oauth for msteams * improvement(undo-redo): expand undo-redo store to store 100 ops instead of 15 (#2036) * improvement(undo-redo): expand undo-redo store to store 100 ops instead of 15 * prevent undo-redo from interfering with subblock browser text undo * improvement(docs): remove copy page from mobile view on docs (#2037) * improvement(docs): remove copy page from mobile view on docs * bring title and pagenav lower on mobile * added cursor pointer to clickable components in docs * fix(workflow-block): clearing child workflow input format field must lazy cascade parent workflow state deletion (#2038) * Error tools * Ui tools * Navigate ui tool * Combine env vars and oauth tools * Plan mode section * Plan mode v1 * Plan mode v1 * Plan mode improvements * Build plan button * Make workflow plan editable * Tool improvements * Fix build plan button * Lint * Fix * Fix lint * Fix plan * Remove migrations * fix undo/redo settling of ops * Add migratinos back * Smoothen out diff store * Hide plan mode * Fix lint * Edit run workflow params * Fix lint * Fix tests * Fix typing * Fix build errors and lint * Fix build * Fix tests --------- Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com> Co-authored-by: Waleed <walif6@gmail.com> Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com> Co-authored-by: Adam Gough <77861281+aadamgough@users.noreply.github.com> Co-authored-by: aadamgough <adam@sim.ai> Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai> Co-authored-by: Emir Karabeg <emirkarabeg@berkeley.edu> Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>
This commit is contained in:
committed by
GitHub
parent
67bd5bd8fa
commit
ddd3219126
@@ -19,6 +19,7 @@ describe('Chat Edit API Route', () => {
|
||||
const mockCreateErrorResponse = vi.fn()
|
||||
const mockEncryptSecret = vi.fn()
|
||||
const mockCheckChatAccess = vi.fn()
|
||||
const mockGetSession = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
@@ -42,6 +43,10 @@ describe('Chat Edit API Route', () => {
|
||||
chat: { id: 'id', identifier: 'identifier', userId: 'userId' },
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth', () => ({
|
||||
getSession: mockGetSession,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/logs/console/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue({
|
||||
info: vi.fn(),
|
||||
@@ -89,9 +94,7 @@ describe('Chat Edit API Route', () => {
|
||||
|
||||
describe('GET', () => {
|
||||
it('should return 401 when user is not authenticated', async () => {
|
||||
vi.doMock('@/lib/auth', () => ({
|
||||
getSession: vi.fn().mockResolvedValue(null),
|
||||
}))
|
||||
mockGetSession.mockResolvedValueOnce(null)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/chat/manage/chat-123')
|
||||
const { GET } = await import('@/app/api/chat/manage/[id]/route')
|
||||
@@ -102,11 +105,9 @@ describe('Chat Edit API Route', () => {
|
||||
})
|
||||
|
||||
it('should return 404 when chat not found or access denied', async () => {
|
||||
vi.doMock('@/lib/auth', () => ({
|
||||
getSession: vi.fn().mockResolvedValue({
|
||||
user: { id: 'user-id' },
|
||||
}),
|
||||
}))
|
||||
mockGetSession.mockResolvedValueOnce({
|
||||
user: { id: 'user-id' },
|
||||
})
|
||||
|
||||
mockCheckChatAccess.mockResolvedValue({ hasAccess: false })
|
||||
|
||||
|
||||
@@ -563,6 +563,8 @@ describe('Copilot Chat API Route', () => {
|
||||
],
|
||||
messageCount: 4,
|
||||
previewYaml: null,
|
||||
config: null,
|
||||
planArtifact: null,
|
||||
createdAt: '2024-01-01T00:00:00.000Z',
|
||||
updatedAt: '2024-01-02T00:00:00.000Z',
|
||||
},
|
||||
@@ -576,6 +578,8 @@ describe('Copilot Chat API Route', () => {
|
||||
],
|
||||
messageCount: 2,
|
||||
previewYaml: null,
|
||||
config: null,
|
||||
planArtifact: null,
|
||||
createdAt: '2024-01-03T00:00:00.000Z',
|
||||
updatedAt: '2024-01-04T00:00:00.000Z',
|
||||
},
|
||||
|
||||
@@ -53,7 +53,7 @@ const ChatMessageSchema = z.object({
|
||||
])
|
||||
.optional()
|
||||
.default('claude-4.5-sonnet'),
|
||||
mode: z.enum(['ask', 'agent']).optional().default('agent'),
|
||||
mode: z.enum(['ask', 'agent', 'plan']).optional().default('agent'),
|
||||
prefetch: z.boolean().optional(),
|
||||
createNewChat: z.boolean().optional().default(false),
|
||||
stream: z.boolean().optional().default(true),
|
||||
@@ -880,6 +880,8 @@ export async function GET(req: NextRequest) {
|
||||
title: copilotChats.title,
|
||||
model: copilotChats.model,
|
||||
messages: copilotChats.messages,
|
||||
planArtifact: copilotChats.planArtifact,
|
||||
config: copilotChats.config,
|
||||
createdAt: copilotChats.createdAt,
|
||||
updatedAt: copilotChats.updatedAt,
|
||||
})
|
||||
@@ -897,6 +899,8 @@ export async function GET(req: NextRequest) {
|
||||
messages: Array.isArray(chat.messages) ? chat.messages : [],
|
||||
messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0,
|
||||
previewYaml: null, // Not needed for chat list
|
||||
planArtifact: chat.planArtifact || null,
|
||||
config: chat.config || null,
|
||||
createdAt: chat.createdAt,
|
||||
updatedAt: chat.updatedAt,
|
||||
}))
|
||||
|
||||
@@ -37,6 +37,14 @@ const UpdateMessagesSchema = z.object({
|
||||
.optional(),
|
||||
})
|
||||
),
|
||||
planArtifact: z.string().nullable().optional(),
|
||||
config: z
|
||||
.object({
|
||||
mode: z.enum(['ask', 'build', 'plan']).optional(),
|
||||
model: z.string().optional(),
|
||||
})
|
||||
.nullable()
|
||||
.optional(),
|
||||
})
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
@@ -49,7 +57,7 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { chatId, messages } = UpdateMessagesSchema.parse(body)
|
||||
const { chatId, messages, planArtifact, config } = UpdateMessagesSchema.parse(body)
|
||||
|
||||
// Verify that the chat belongs to the user
|
||||
const [chat] = await db
|
||||
@@ -62,18 +70,27 @@ export async function POST(req: NextRequest) {
|
||||
return createNotFoundResponse('Chat not found or unauthorized')
|
||||
}
|
||||
|
||||
// Update chat with new messages
|
||||
await db
|
||||
.update(copilotChats)
|
||||
.set({
|
||||
messages: messages,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(copilotChats.id, chatId))
|
||||
// Update chat with new messages, plan artifact, and config
|
||||
const updateData: Record<string, any> = {
|
||||
messages: messages,
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
logger.info(`[${tracker.requestId}] Successfully updated chat messages`, {
|
||||
if (planArtifact !== undefined) {
|
||||
updateData.planArtifact = planArtifact
|
||||
}
|
||||
|
||||
if (config !== undefined) {
|
||||
updateData.config = config
|
||||
}
|
||||
|
||||
await db.update(copilotChats).set(updateData).where(eq(copilotChats.id, chatId))
|
||||
|
||||
logger.info(`[${tracker.requestId}] Successfully updated chat`, {
|
||||
chatId,
|
||||
newMessageCount: messages.length,
|
||||
hasPlanArtifact: !!planArtifact,
|
||||
hasConfig: !!config,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
|
||||
140
apps/sim/app/api/templates/approved/sanitized/route.ts
Normal file
140
apps/sim/app/api/templates/approved/sanitized/route.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import { db } from '@sim/db'
|
||||
import { templates } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalApiKey } from '@/lib/copilot/utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/json-sanitizer'
|
||||
|
||||
const logger = createLogger('TemplatesSanitizedAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
/**
|
||||
* GET /api/templates/approved/sanitized
|
||||
* Returns all approved templates with their sanitized JSONs, names, and descriptions
|
||||
* Requires internal API secret authentication via X-API-Key header
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const url = new URL(request.url)
|
||||
const hasApiKey = !!request.headers.get('x-api-key')
|
||||
|
||||
// Check internal API key authentication
|
||||
const authResult = checkInternalApiKey(request)
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Authentication failed for approved sanitized templates`, {
|
||||
error: authResult.error,
|
||||
hasApiKey,
|
||||
howToUse: 'Add header: X-API-Key: <INTERNAL_API_SECRET>',
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: authResult.error,
|
||||
hint: 'Include X-API-Key header with INTERNAL_API_SECRET value',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
// Fetch all approved templates
|
||||
const approvedTemplates = await db
|
||||
.select({
|
||||
id: templates.id,
|
||||
name: templates.name,
|
||||
details: templates.details,
|
||||
state: templates.state,
|
||||
tags: templates.tags,
|
||||
requiredCredentials: templates.requiredCredentials,
|
||||
})
|
||||
.from(templates)
|
||||
.where(eq(templates.status, 'approved'))
|
||||
|
||||
// Process each template to sanitize for copilot
|
||||
const sanitizedTemplates = approvedTemplates
|
||||
.map((template) => {
|
||||
try {
|
||||
const copilotSanitized = sanitizeForCopilot(template.state as any)
|
||||
|
||||
if (copilotSanitized?.blocks) {
|
||||
Object.values(copilotSanitized.blocks).forEach((block: any) => {
|
||||
if (block && typeof block === 'object') {
|
||||
block.outputs = undefined
|
||||
block.position = undefined
|
||||
block.height = undefined
|
||||
block.layout = undefined
|
||||
block.horizontalHandles = undefined
|
||||
|
||||
// Also clean nested nodes recursively
|
||||
if (block.nestedNodes) {
|
||||
Object.values(block.nestedNodes).forEach((nestedBlock: any) => {
|
||||
if (nestedBlock && typeof nestedBlock === 'object') {
|
||||
nestedBlock.outputs = undefined
|
||||
nestedBlock.position = undefined
|
||||
nestedBlock.height = undefined
|
||||
nestedBlock.layout = undefined
|
||||
nestedBlock.horizontalHandles = undefined
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const details = template.details as { tagline?: string; about?: string } | null
|
||||
const description = details?.tagline || details?.about || ''
|
||||
|
||||
return {
|
||||
id: template.id,
|
||||
name: template.name,
|
||||
description,
|
||||
tags: template.tags,
|
||||
requiredCredentials: template.requiredCredentials,
|
||||
sanitizedJson: copilotSanitized,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error sanitizing template ${template.id}`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return null
|
||||
}
|
||||
})
|
||||
.filter((t): t is NonNullable<typeof t> => t !== null)
|
||||
|
||||
const response = {
|
||||
templates: sanitizedTemplates,
|
||||
count: sanitizedTemplates.length,
|
||||
}
|
||||
|
||||
return NextResponse.json(response)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching approved sanitized templates`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Internal server error',
|
||||
requestId,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Add a helpful OPTIONS handler for CORS preflight
|
||||
export async function OPTIONS(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
logger.info(`[${requestId}] OPTIONS request received for /api/templates/approved/sanitized`)
|
||||
|
||||
return new NextResponse(null, {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Access-Control-Allow-Methods': 'GET, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'X-API-Key, Content-Type',
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { extractAndPersistCustomTools } from '@/lib/workflows/custom-tools-persistence'
|
||||
@@ -248,6 +249,26 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
const elapsed = Date.now() - startTime
|
||||
logger.info(`[${requestId}] Successfully saved workflow ${workflowId} state in ${elapsed}ms`)
|
||||
|
||||
try {
|
||||
const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002'
|
||||
const notifyResponse = await fetch(`${socketUrl}/api/workflow-updated`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ workflowId }),
|
||||
})
|
||||
|
||||
if (!notifyResponse.ok) {
|
||||
logger.warn(
|
||||
`[${requestId}] Failed to notify Socket.IO server about workflow ${workflowId} update`
|
||||
)
|
||||
}
|
||||
} catch (notificationError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Error notifying Socket.IO server about workflow ${workflowId} update`,
|
||||
notificationError
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json({ success: true, warnings }, { status: 200 })
|
||||
} catch (error: any) {
|
||||
const elapsed = Date.now() - startTime
|
||||
|
||||
@@ -70,7 +70,7 @@ export function OutputSelect({
|
||||
const popoverRef = useRef<HTMLDivElement>(null)
|
||||
const contentRef = useRef<HTMLDivElement>(null)
|
||||
const blocks = useWorkflowStore((state) => state.blocks)
|
||||
const { isShowingDiff, isDiffReady, diffWorkflow } = useWorkflowDiffStore()
|
||||
const { isShowingDiff, isDiffReady, hasActiveDiff, baselineWorkflow } = useWorkflowDiffStore()
|
||||
const subBlockValues = useSubBlockStore((state) =>
|
||||
workflowId ? state.workflowValues[workflowId] : null
|
||||
)
|
||||
@@ -78,7 +78,9 @@ export function OutputSelect({
|
||||
/**
|
||||
* Uses diff blocks when in diff mode, otherwise main blocks
|
||||
*/
|
||||
const workflowBlocks = isShowingDiff && isDiffReady && diffWorkflow ? diffWorkflow.blocks : blocks
|
||||
const shouldUseBaseline = hasActiveDiff && isDiffReady && !isShowingDiff && baselineWorkflow
|
||||
const workflowBlocks =
|
||||
shouldUseBaseline && baselineWorkflow ? baselineWorkflow.blocks : (blocks as any)
|
||||
|
||||
/**
|
||||
* Extracts all available workflow outputs for the dropdown
|
||||
@@ -100,7 +102,7 @@ export function OutputSelect({
|
||||
const blockArray = Object.values(workflowBlocks)
|
||||
if (blockArray.length === 0) return outputs
|
||||
|
||||
blockArray.forEach((block) => {
|
||||
blockArray.forEach((block: any) => {
|
||||
if (block.type === 'starter' || !block?.id || !block?.type) return
|
||||
|
||||
const blockName =
|
||||
@@ -110,8 +112,8 @@ export function OutputSelect({
|
||||
|
||||
const blockConfig = getBlock(block.type)
|
||||
const responseFormatValue =
|
||||
isShowingDiff && isDiffReady && diffWorkflow
|
||||
? diffWorkflow.blocks[block.id]?.subBlocks?.responseFormat?.value
|
||||
shouldUseBaseline && baselineWorkflow
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks?.responseFormat?.value
|
||||
: subBlockValues?.[block.id]?.responseFormat
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, block.id)
|
||||
|
||||
@@ -164,7 +166,16 @@ export function OutputSelect({
|
||||
})
|
||||
|
||||
return outputs
|
||||
}, [workflowBlocks, workflowId, isShowingDiff, isDiffReady, diffWorkflow, blocks, subBlockValues])
|
||||
}, [
|
||||
workflowBlocks,
|
||||
workflowId,
|
||||
isShowingDiff,
|
||||
isDiffReady,
|
||||
baselineWorkflow,
|
||||
blocks,
|
||||
subBlockValues,
|
||||
shouldUseBaseline,
|
||||
])
|
||||
|
||||
/**
|
||||
* Checks if an output is currently selected by comparing both ID and label
|
||||
|
||||
@@ -12,20 +12,28 @@ const logger = createLogger('DiffControls')
|
||||
|
||||
export const DiffControls = memo(function DiffControls() {
|
||||
// Optimized: Single diff store subscription
|
||||
const { isShowingDiff, isDiffReady, diffWorkflow, toggleDiffView, acceptChanges, rejectChanges } =
|
||||
useWorkflowDiffStore(
|
||||
useCallback(
|
||||
(state) => ({
|
||||
isShowingDiff: state.isShowingDiff,
|
||||
isDiffReady: state.isDiffReady,
|
||||
diffWorkflow: state.diffWorkflow,
|
||||
toggleDiffView: state.toggleDiffView,
|
||||
acceptChanges: state.acceptChanges,
|
||||
rejectChanges: state.rejectChanges,
|
||||
}),
|
||||
[]
|
||||
)
|
||||
const {
|
||||
isShowingDiff,
|
||||
isDiffReady,
|
||||
hasActiveDiff,
|
||||
toggleDiffView,
|
||||
acceptChanges,
|
||||
rejectChanges,
|
||||
baselineWorkflow,
|
||||
} = useWorkflowDiffStore(
|
||||
useCallback(
|
||||
(state) => ({
|
||||
isShowingDiff: state.isShowingDiff,
|
||||
isDiffReady: state.isDiffReady,
|
||||
hasActiveDiff: state.hasActiveDiff,
|
||||
toggleDiffView: state.toggleDiffView,
|
||||
acceptChanges: state.acceptChanges,
|
||||
rejectChanges: state.rejectChanges,
|
||||
baselineWorkflow: state.baselineWorkflow,
|
||||
}),
|
||||
[]
|
||||
)
|
||||
)
|
||||
|
||||
// Optimized: Single copilot store subscription for needed values
|
||||
const { updatePreviewToolCallState, clearPreviewYaml, currentChat, messages } = useCopilotStore(
|
||||
@@ -61,10 +69,11 @@ export const DiffControls = memo(function DiffControls() {
|
||||
try {
|
||||
logger.info('Creating checkpoint before accepting changes')
|
||||
|
||||
// Get current workflow state from the store and ensure it's complete
|
||||
const rawState = useWorkflowStore.getState().getWorkflowState()
|
||||
// Use the baseline workflow (state before diff) instead of current state
|
||||
// This ensures reverting to the checkpoint restores the pre-diff state
|
||||
const rawState = baselineWorkflow || useWorkflowStore.getState().getWorkflowState()
|
||||
|
||||
// Merge subblock values from the SubBlockStore to get complete state
|
||||
// The baseline already has merged subblock values, but we'll merge again to be safe
|
||||
// This ensures all user inputs and subblock data are captured
|
||||
const blocksWithSubblockValues = mergeSubblockState(rawState.blocks, activeWorkflowId)
|
||||
|
||||
@@ -199,7 +208,7 @@ export const DiffControls = memo(function DiffControls() {
|
||||
logger.error('Failed to create checkpoint:', error)
|
||||
return false
|
||||
}
|
||||
}, [activeWorkflowId, currentChat, messages])
|
||||
}, [activeWorkflowId, currentChat, messages, baselineWorkflow])
|
||||
|
||||
const handleAccept = useCallback(async () => {
|
||||
logger.info('Accepting proposed changes with backup protection')
|
||||
@@ -297,7 +306,7 @@ export const DiffControls = memo(function DiffControls() {
|
||||
}, [clearPreviewYaml, updatePreviewToolCallState, rejectChanges])
|
||||
|
||||
// Don't show anything if no diff is available or diff is not ready
|
||||
if (!diffWorkflow || !isDiffReady) {
|
||||
if (!hasActiveDiff || !isDiffReady) {
|
||||
return null
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
export * from './copilot-message/copilot-message'
|
||||
export * from './inline-tool-call/inline-tool-call'
|
||||
export * from './plan-mode-section/plan-mode-section'
|
||||
export * from './todo-list/todo-list'
|
||||
export * from './user-input/user-input'
|
||||
export * from './welcome/welcome'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { Loader2 } from 'lucide-react'
|
||||
import useDrivePicker from 'react-google-drive-picker'
|
||||
import { Button } from '@/components/emcn'
|
||||
@@ -52,8 +52,17 @@ const ACTION_VERBS = [
|
||||
'Editing',
|
||||
'Edited',
|
||||
'Running',
|
||||
'Ran',
|
||||
'Designing',
|
||||
'Designed',
|
||||
'Searching',
|
||||
'Searched',
|
||||
'Debugging',
|
||||
'Debugged',
|
||||
'Validating',
|
||||
'Validated',
|
||||
'Adjusting',
|
||||
'Adjusted',
|
||||
'Summarizing',
|
||||
'Summarized',
|
||||
'Marking',
|
||||
@@ -70,6 +79,27 @@ const ACTION_VERBS = [
|
||||
'Evaluating',
|
||||
'Evaluated',
|
||||
'Finished',
|
||||
'Setting',
|
||||
'Set',
|
||||
'Applied',
|
||||
'Applying',
|
||||
'Rejected',
|
||||
'Deploy',
|
||||
'Deploying',
|
||||
'Deployed',
|
||||
'Redeploying',
|
||||
'Redeployed',
|
||||
'Redeploy',
|
||||
'Undeploy',
|
||||
'Undeploying',
|
||||
'Undeployed',
|
||||
'Checking',
|
||||
'Checked',
|
||||
'Opening',
|
||||
'Opened',
|
||||
'Create',
|
||||
'Creating',
|
||||
'Created',
|
||||
] as const
|
||||
|
||||
/**
|
||||
@@ -198,10 +228,15 @@ function ShimmerOverlayText({
|
||||
|
||||
/**
|
||||
* Determines if a tool call is "special" and should display with gradient styling.
|
||||
* Only workflow operation tools (edit, build, run) get the purple gradient.
|
||||
* Only workflow operation tools (edit, build, run, deploy) get the purple gradient.
|
||||
*/
|
||||
function isSpecialToolCall(toolCall: CopilotToolCall): boolean {
|
||||
const workflowOperationTools = ['edit_workflow', 'build_workflow', 'run_workflow']
|
||||
const workflowOperationTools = [
|
||||
'edit_workflow',
|
||||
'build_workflow',
|
||||
'run_workflow',
|
||||
'deploy_workflow',
|
||||
]
|
||||
|
||||
return workflowOperationTools.includes(toolCall.name)
|
||||
}
|
||||
@@ -223,12 +258,21 @@ function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||
return hasInterrupt && toolCall.state === 'pending'
|
||||
}
|
||||
|
||||
async function handleRun(toolCall: CopilotToolCall, setToolCallState: any, onStateChange?: any) {
|
||||
async function handleRun(
|
||||
toolCall: CopilotToolCall,
|
||||
setToolCallState: any,
|
||||
onStateChange?: any,
|
||||
editedParams?: any
|
||||
) {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
if (!instance) return
|
||||
try {
|
||||
const mergedParams =
|
||||
(toolCall as any).params || (toolCall as any).parameters || (toolCall as any).input || {}
|
||||
editedParams ||
|
||||
(toolCall as any).params ||
|
||||
(toolCall as any).parameters ||
|
||||
(toolCall as any).input ||
|
||||
{}
|
||||
await instance.handleAccept?.(mergedParams)
|
||||
onStateChange?.('executing')
|
||||
} catch (e) {
|
||||
@@ -262,9 +306,11 @@ function getDisplayName(toolCall: CopilotToolCall): string {
|
||||
function RunSkipButtons({
|
||||
toolCall,
|
||||
onStateChange,
|
||||
editedParams,
|
||||
}: {
|
||||
toolCall: CopilotToolCall
|
||||
onStateChange?: (state: any) => void
|
||||
editedParams?: any
|
||||
}) {
|
||||
const [isProcessing, setIsProcessing] = useState(false)
|
||||
const [buttonsHidden, setButtonsHidden] = useState(false)
|
||||
@@ -280,7 +326,7 @@ function RunSkipButtons({
|
||||
setIsProcessing(true)
|
||||
setButtonsHidden(true)
|
||||
try {
|
||||
await handleRun(toolCall, setToolCallState, onStateChange)
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||
} finally {
|
||||
setIsProcessing(false)
|
||||
}
|
||||
@@ -418,14 +464,29 @@ export function InlineToolCall({
|
||||
)
|
||||
const toolCall = liveToolCall || toolCallProp
|
||||
|
||||
// Guard: nothing to render without a toolCall
|
||||
if (!toolCall) return null
|
||||
|
||||
const isExpandablePending =
|
||||
toolCall?.state === 'pending' &&
|
||||
(toolCall.name === 'make_api_request' || toolCall.name === 'set_global_workflow_variables')
|
||||
(toolCall.name === 'make_api_request' ||
|
||||
toolCall.name === 'set_global_workflow_variables' ||
|
||||
toolCall.name === 'run_workflow')
|
||||
|
||||
const [expanded, setExpanded] = useState(isExpandablePending)
|
||||
|
||||
// Guard: nothing to render without a toolCall
|
||||
if (!toolCall) return null
|
||||
// State for editable parameters
|
||||
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
||||
const [editedParams, setEditedParams] = useState(params)
|
||||
const paramsRef = useRef(params)
|
||||
|
||||
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
||||
useEffect(() => {
|
||||
if (JSON.stringify(params) !== JSON.stringify(paramsRef.current)) {
|
||||
setEditedParams(params)
|
||||
paramsRef.current = params
|
||||
}
|
||||
}, [params])
|
||||
|
||||
// Skip rendering tools that are not in the registry or are explicitly omitted
|
||||
try {
|
||||
@@ -436,7 +497,9 @@ export function InlineToolCall({
|
||||
return null
|
||||
}
|
||||
const isExpandableTool =
|
||||
toolCall.name === 'make_api_request' || toolCall.name === 'set_global_workflow_variables'
|
||||
toolCall.name === 'make_api_request' ||
|
||||
toolCall.name === 'set_global_workflow_variables' ||
|
||||
toolCall.name === 'run_workflow'
|
||||
|
||||
const showButtons = shouldShowRunSkipButtons(toolCall)
|
||||
const showMoveToBackground =
|
||||
@@ -450,7 +513,6 @@ export function InlineToolCall({
|
||||
}
|
||||
|
||||
const displayName = getDisplayName(toolCall)
|
||||
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
||||
|
||||
const isLoadingState =
|
||||
toolCall.state === ClientToolCallState.pending ||
|
||||
@@ -460,8 +522,8 @@ export function InlineToolCall({
|
||||
|
||||
const renderPendingDetails = () => {
|
||||
if (toolCall.name === 'make_api_request') {
|
||||
const url = params.url || ''
|
||||
const method = (params.method || '').toUpperCase()
|
||||
const url = editedParams.url || ''
|
||||
const method = (editedParams.method || '').toUpperCase()
|
||||
return (
|
||||
<div className='w-full overflow-hidden rounded-[4px] border border-[var(--border-strong)] bg-[#1F1F1F]'>
|
||||
<table className='w-full table-fixed bg-transparent'>
|
||||
@@ -479,19 +541,23 @@ export function InlineToolCall({
|
||||
<tr className='group relative border-[var(--border-strong)] border-t bg-transparent'>
|
||||
<td className='relative w-[26%] border-[var(--border-strong)] border-r bg-transparent p-0'>
|
||||
<div className='px-[10px] py-[8px]'>
|
||||
<span className='font-mono text-muted-foreground text-xs'>
|
||||
{method || 'GET'}
|
||||
</span>
|
||||
<input
|
||||
type='text'
|
||||
value={method || 'GET'}
|
||||
onChange={(e) => setEditedParams({ ...editedParams, method: e.target.value })}
|
||||
className='w-full bg-transparent font-mono text-muted-foreground text-xs outline-none focus:text-foreground'
|
||||
/>
|
||||
</div>
|
||||
</td>
|
||||
<td className='relative w-[74%] bg-transparent p-0'>
|
||||
<div className='min-w-0 px-[10px] py-[8px]'>
|
||||
<span
|
||||
className='block break-all font-mono text-muted-foreground text-xs'
|
||||
title={url}
|
||||
>
|
||||
{url || 'URL not provided'}
|
||||
</span>
|
||||
<input
|
||||
type='text'
|
||||
value={url || ''}
|
||||
onChange={(e) => setEditedParams({ ...editedParams, url: e.target.value })}
|
||||
placeholder='URL not provided'
|
||||
className='w-full bg-transparent font-mono text-muted-foreground text-xs outline-none focus:text-foreground'
|
||||
/>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
@@ -503,17 +569,20 @@ export function InlineToolCall({
|
||||
|
||||
if (toolCall.name === 'set_environment_variables') {
|
||||
const variables =
|
||||
params.variables && typeof params.variables === 'object' ? params.variables : {}
|
||||
editedParams.variables && typeof editedParams.variables === 'object'
|
||||
? editedParams.variables
|
||||
: {}
|
||||
|
||||
// Normalize variables - handle both direct key-value and nested {name, value} format
|
||||
const normalizedEntries: Array<[string, string]> = []
|
||||
// Store [originalKey, displayName, displayValue]
|
||||
const normalizedEntries: Array<[string, string, string]> = []
|
||||
Object.entries(variables).forEach(([key, value]) => {
|
||||
if (typeof value === 'object' && value !== null && 'name' in value && 'value' in value) {
|
||||
// Handle { name: "KEY", value: "VAL" } format
|
||||
normalizedEntries.push([String((value as any).name), String((value as any).value)])
|
||||
// Handle { name: "KEY", value: "VAL" } format (common in arrays or structured objects)
|
||||
normalizedEntries.push([key, String((value as any).name), String((value as any).value)])
|
||||
} else {
|
||||
// Handle direct key-value format
|
||||
normalizedEntries.push([key, String(value)])
|
||||
normalizedEntries.push([key, key, String(value)])
|
||||
}
|
||||
})
|
||||
|
||||
@@ -538,21 +607,75 @@ export function InlineToolCall({
|
||||
</td>
|
||||
</tr>
|
||||
) : (
|
||||
normalizedEntries.map(([name, value]) => (
|
||||
normalizedEntries.map(([originalKey, name, value]) => (
|
||||
<tr
|
||||
key={name}
|
||||
key={originalKey}
|
||||
className='group relative border-[var(--border-strong)] border-t bg-transparent'
|
||||
>
|
||||
<td className='relative w-[36%] border-[var(--border-strong)] border-r bg-transparent p-0'>
|
||||
<div className='px-[10px] py-[8px]'>
|
||||
<span className='truncate font-medium text-foreground text-xs'>{name}</span>
|
||||
<input
|
||||
type='text'
|
||||
value={name}
|
||||
onChange={(e) => {
|
||||
const newName = e.target.value
|
||||
const newVariables = Array.isArray(variables)
|
||||
? [...variables]
|
||||
: { ...variables }
|
||||
|
||||
if (Array.isArray(newVariables)) {
|
||||
// Array format: update .name property
|
||||
const idx = Number(originalKey)
|
||||
const item = newVariables[idx]
|
||||
if (typeof item === 'object' && item !== null && 'name' in item) {
|
||||
newVariables[idx] = { ...item, name: newName }
|
||||
}
|
||||
} else {
|
||||
// Object format: rename key
|
||||
// We need to preserve the value but change the key
|
||||
const value = newVariables[originalKey as keyof typeof newVariables]
|
||||
delete newVariables[originalKey as keyof typeof newVariables]
|
||||
newVariables[newName as keyof typeof newVariables] = value
|
||||
}
|
||||
setEditedParams({ ...editedParams, variables: newVariables })
|
||||
}}
|
||||
className='w-full bg-transparent font-medium text-foreground text-xs outline-none'
|
||||
/>
|
||||
</div>
|
||||
</td>
|
||||
<td className='relative w-[64%] bg-transparent p-0'>
|
||||
<div className='min-w-0 px-[10px] py-[8px]'>
|
||||
<span className='block overflow-x-auto whitespace-nowrap font-mono text-muted-foreground text-xs'>
|
||||
{value}
|
||||
</span>
|
||||
<input
|
||||
type='text'
|
||||
value={value}
|
||||
onChange={(e) => {
|
||||
// Clone the variables container (works for both Array and Object)
|
||||
const newVariables = Array.isArray(variables)
|
||||
? [...variables]
|
||||
: { ...variables }
|
||||
|
||||
const currentVal =
|
||||
newVariables[originalKey as keyof typeof newVariables]
|
||||
|
||||
if (
|
||||
typeof currentVal === 'object' &&
|
||||
currentVal !== null &&
|
||||
'value' in currentVal
|
||||
) {
|
||||
// Update value in object structure
|
||||
newVariables[originalKey as keyof typeof newVariables] = {
|
||||
...(currentVal as any),
|
||||
value: e.target.value,
|
||||
}
|
||||
} else {
|
||||
// Update direct value
|
||||
newVariables[originalKey as keyof typeof newVariables] = e.target
|
||||
.value as any
|
||||
}
|
||||
setEditedParams({ ...editedParams, variables: newVariables })
|
||||
}}
|
||||
className='w-full bg-transparent font-mono text-muted-foreground text-xs outline-none focus:text-foreground'
|
||||
/>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
@@ -565,7 +688,7 @@ export function InlineToolCall({
|
||||
}
|
||||
|
||||
if (toolCall.name === 'set_global_workflow_variables') {
|
||||
const ops = Array.isArray(params.operations) ? (params.operations as any[]) : []
|
||||
const ops = Array.isArray(editedParams.operations) ? (editedParams.operations as any[]) : []
|
||||
return (
|
||||
<div className='w-full overflow-hidden rounded border border-muted bg-card'>
|
||||
<div className='grid grid-cols-3 gap-0 border-muted/60 border-b bg-muted/40 py-1.5'>
|
||||
@@ -588,9 +711,16 @@ export function InlineToolCall({
|
||||
{ops.map((op, idx) => (
|
||||
<div key={idx} className='grid grid-cols-3 gap-0 py-1.5'>
|
||||
<div className='min-w-0 self-start px-2'>
|
||||
<span className='truncate font-season text-amber-800 text-xs dark:text-amber-200'>
|
||||
{String(op.name || '')}
|
||||
</span>
|
||||
<input
|
||||
type='text'
|
||||
value={String(op.name || '')}
|
||||
onChange={(e) => {
|
||||
const newOps = [...ops]
|
||||
newOps[idx] = { ...op, name: e.target.value }
|
||||
setEditedParams({ ...editedParams, operations: newOps })
|
||||
}}
|
||||
className='w-full bg-transparent font-season text-amber-800 text-xs outline-none dark:text-amber-200'
|
||||
/>
|
||||
</div>
|
||||
<div className='self-start px-2'>
|
||||
<span className='rounded border px-1 py-0.5 font-[470] font-season text-[#707070] text-[10px] dark:text-[#E8E8E8]'>
|
||||
@@ -599,9 +729,16 @@ export function InlineToolCall({
|
||||
</div>
|
||||
<div className='min-w-0 self-start px-2'>
|
||||
{op.value !== undefined ? (
|
||||
<span className='block overflow-x-auto whitespace-nowrap font-[470] font-mono text-amber-700 text-xs dark:text-amber-300'>
|
||||
{String(op.value)}
|
||||
</span>
|
||||
<input
|
||||
type='text'
|
||||
value={String(op.value)}
|
||||
onChange={(e) => {
|
||||
const newOps = [...ops]
|
||||
newOps[idx] = { ...op, value: e.target.value }
|
||||
setEditedParams({ ...editedParams, operations: newOps })
|
||||
}}
|
||||
className='w-full bg-transparent font-[470] font-mono text-amber-700 text-xs outline-none focus:text-amber-800 dark:text-amber-300 dark:focus:text-amber-200'
|
||||
/>
|
||||
) : (
|
||||
<span className='font-[470] font-season text-[#707070] text-xs dark:text-[#E8E8E8]'>
|
||||
—
|
||||
@@ -616,6 +753,111 @@ export function InlineToolCall({
|
||||
)
|
||||
}
|
||||
|
||||
if (toolCall.name === 'run_workflow') {
|
||||
// Get inputs - could be in multiple locations
|
||||
let inputs = editedParams.input || editedParams.inputs || editedParams.workflow_input
|
||||
let isNestedInWorkflowInput = false
|
||||
|
||||
// If input is a JSON string, parse it
|
||||
if (typeof inputs === 'string') {
|
||||
try {
|
||||
inputs = JSON.parse(inputs)
|
||||
} catch {
|
||||
inputs = {}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if workflow_input exists and contains the actual inputs
|
||||
if (editedParams.workflow_input && typeof editedParams.workflow_input === 'object') {
|
||||
inputs = editedParams.workflow_input
|
||||
isNestedInWorkflowInput = true
|
||||
}
|
||||
|
||||
// If no inputs object found, treat base editedParams as inputs (excluding system fields)
|
||||
if (!inputs || typeof inputs !== 'object') {
|
||||
const { workflowId, workflow_input, ...rest } = editedParams
|
||||
inputs = rest
|
||||
}
|
||||
|
||||
const safeInputs = inputs && typeof inputs === 'object' ? inputs : {}
|
||||
const inputEntries = Object.entries(safeInputs)
|
||||
|
||||
return (
|
||||
<div className='w-full overflow-hidden rounded-[4px] border border-[var(--border-strong)] bg-[#1F1F1F]'>
|
||||
<table className='w-full table-fixed bg-transparent'>
|
||||
<thead className='bg-transparent'>
|
||||
<tr className='border-[var(--border-strong)] border-b bg-transparent'>
|
||||
<th className='w-[36%] border-[var(--border-strong)] border-r bg-transparent px-[10px] py-[5px] text-left font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
Input
|
||||
</th>
|
||||
<th className='w-[64%] bg-transparent px-[10px] py-[5px] text-left font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
Value
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody className='bg-transparent'>
|
||||
{inputEntries.length === 0 ? (
|
||||
<tr className='border-[var(--border-strong)] border-t bg-transparent'>
|
||||
<td colSpan={2} className='px-[10px] py-[8px] text-muted-foreground text-xs'>
|
||||
No inputs provided
|
||||
</td>
|
||||
</tr>
|
||||
) : (
|
||||
inputEntries.map(([key, value]) => (
|
||||
<tr
|
||||
key={key}
|
||||
className='group relative border-[var(--border-strong)] border-t bg-transparent'
|
||||
>
|
||||
<td className='relative w-[36%] border-[var(--border-strong)] border-r bg-transparent p-0'>
|
||||
<div className='px-[10px] py-[8px]'>
|
||||
<span className='truncate font-medium text-foreground text-xs'>{key}</span>
|
||||
</div>
|
||||
</td>
|
||||
<td className='relative w-[64%] bg-transparent p-0'>
|
||||
<div className='min-w-0 px-[10px] py-[8px]'>
|
||||
<input
|
||||
type='text'
|
||||
value={String(value)}
|
||||
onChange={(e) => {
|
||||
const newInputs = { ...safeInputs, [key]: e.target.value }
|
||||
|
||||
// Determine how to update based on original structure
|
||||
if (isNestedInWorkflowInput) {
|
||||
// Update workflow_input
|
||||
setEditedParams({ ...editedParams, workflow_input: newInputs })
|
||||
} else if (typeof editedParams.input === 'string') {
|
||||
// Input was a JSON string, serialize back
|
||||
setEditedParams({ ...editedParams, input: JSON.stringify(newInputs) })
|
||||
} else if (
|
||||
editedParams.input &&
|
||||
typeof editedParams.input === 'object'
|
||||
) {
|
||||
// Input is an object
|
||||
setEditedParams({ ...editedParams, input: newInputs })
|
||||
} else if (
|
||||
editedParams.inputs &&
|
||||
typeof editedParams.inputs === 'object'
|
||||
) {
|
||||
// Inputs is an object
|
||||
setEditedParams({ ...editedParams, inputs: newInputs })
|
||||
} else {
|
||||
// Flat structure - update at base level
|
||||
setEditedParams({ ...editedParams, [key]: e.target.value })
|
||||
}
|
||||
}}
|
||||
className='w-full bg-transparent font-mono text-muted-foreground text-xs outline-none focus:text-foreground'
|
||||
/>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
))
|
||||
)}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -630,7 +872,13 @@ export function InlineToolCall({
|
||||
className='font-[470] font-season text-[#939393] text-sm dark:text-[#939393]'
|
||||
/>
|
||||
<div className='mt-[8px]'>{renderPendingDetails()}</div>
|
||||
{showButtons && <RunSkipButtons toolCall={toolCall} onStateChange={handleStateChange} />}
|
||||
{showButtons && (
|
||||
<RunSkipButtons
|
||||
toolCall={toolCall}
|
||||
onStateChange={handleStateChange}
|
||||
editedParams={editedParams}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -652,7 +900,11 @@ export function InlineToolCall({
|
||||
</div>
|
||||
{isExpandableTool && expanded && <div>{renderPendingDetails()}</div>}
|
||||
{showButtons ? (
|
||||
<RunSkipButtons toolCall={toolCall} onStateChange={handleStateChange} />
|
||||
<RunSkipButtons
|
||||
toolCall={toolCall}
|
||||
onStateChange={handleStateChange}
|
||||
editedParams={editedParams}
|
||||
/>
|
||||
) : showMoveToBackground ? (
|
||||
<div className='mt-[8px]'>
|
||||
<Button
|
||||
|
||||
@@ -0,0 +1,284 @@
|
||||
/**
|
||||
* Plan Mode Section component with resizable markdown content display.
|
||||
* Displays markdown content in a separate section at the top of the copilot panel.
|
||||
* Follows emcn design principles with consistent spacing, typography, and color scheme.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* import { PlanModeSection } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components'
|
||||
*
|
||||
* function CopilotPanel() {
|
||||
* const plan = "# My Plan\n\nThis is a plan description..."
|
||||
*
|
||||
* return (
|
||||
* <PlanModeSection
|
||||
* content={plan}
|
||||
* initialHeight={200}
|
||||
* minHeight={100}
|
||||
* maxHeight={600}
|
||||
* />
|
||||
* )
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
|
||||
'use client'
|
||||
|
||||
import * as React from 'react'
|
||||
import { Check, GripHorizontal, Pencil, X } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { Textarea } from '@/components/ui'
|
||||
import { cn } from '@/lib/utils'
|
||||
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
|
||||
/**
|
||||
* Shared border and background styles
|
||||
*/
|
||||
const SURFACE_5 = 'bg-[var(--surface-5)] dark:bg-[var(--surface-5)]'
|
||||
const SURFACE_9 = 'bg-[var(--surface-9)] dark:bg-[var(--surface-9)]'
|
||||
const BORDER_STRONG = 'border-[var(--border-strong)] dark:border-[var(--border-strong)]'
|
||||
|
||||
export interface PlanModeSectionProps {
|
||||
/**
|
||||
* Markdown content to display
|
||||
*/
|
||||
content: string
|
||||
/**
|
||||
* Optional class name for additional styling
|
||||
*/
|
||||
className?: string
|
||||
/**
|
||||
* Initial height of the section in pixels
|
||||
* @default 180
|
||||
*/
|
||||
initialHeight?: number
|
||||
/**
|
||||
* Minimum height in pixels
|
||||
* @default 80
|
||||
*/
|
||||
minHeight?: number
|
||||
/**
|
||||
* Maximum height in pixels
|
||||
* @default 600
|
||||
*/
|
||||
maxHeight?: number
|
||||
/**
|
||||
* Callback function when clear button is clicked
|
||||
*/
|
||||
onClear?: () => void
|
||||
/**
|
||||
* Callback function when save button is clicked
|
||||
* Receives the current content as parameter
|
||||
*/
|
||||
onSave?: (content: string) => void
|
||||
/**
|
||||
* Callback when Build Plan button is clicked
|
||||
*/
|
||||
onBuildPlan?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Plan Mode Section component for displaying markdown content with resizable height.
|
||||
* Features: pinned position, resizable height with drag handle, internal scrolling.
|
||||
*/
|
||||
const PlanModeSection: React.FC<PlanModeSectionProps> = ({
|
||||
content,
|
||||
className,
|
||||
initialHeight,
|
||||
minHeight = 80,
|
||||
maxHeight = 600,
|
||||
onClear,
|
||||
onSave,
|
||||
onBuildPlan,
|
||||
}) => {
|
||||
// Default to 75% of max height
|
||||
const defaultHeight = initialHeight ?? Math.floor(maxHeight * 0.75)
|
||||
const [height, setHeight] = React.useState(defaultHeight)
|
||||
const [isResizing, setIsResizing] = React.useState(false)
|
||||
const [isEditing, setIsEditing] = React.useState(false)
|
||||
const [editedContent, setEditedContent] = React.useState(content)
|
||||
const resizeStartRef = React.useRef({ y: 0, startHeight: 0 })
|
||||
const textareaRef = React.useRef<HTMLTextAreaElement>(null)
|
||||
|
||||
// Update edited content when content prop changes
|
||||
React.useEffect(() => {
|
||||
if (!isEditing) {
|
||||
setEditedContent(content)
|
||||
}
|
||||
}, [content, isEditing])
|
||||
|
||||
const handleResizeStart = React.useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.preventDefault()
|
||||
setIsResizing(true)
|
||||
resizeStartRef.current = {
|
||||
y: e.clientY,
|
||||
startHeight: height,
|
||||
}
|
||||
},
|
||||
[height]
|
||||
)
|
||||
|
||||
const handleResizeMove = React.useCallback(
|
||||
(e: MouseEvent) => {
|
||||
if (!isResizing) return
|
||||
|
||||
const deltaY = e.clientY - resizeStartRef.current.y
|
||||
const newHeight = Math.max(
|
||||
minHeight,
|
||||
Math.min(maxHeight, resizeStartRef.current.startHeight + deltaY)
|
||||
)
|
||||
setHeight(newHeight)
|
||||
},
|
||||
[isResizing, minHeight, maxHeight]
|
||||
)
|
||||
|
||||
const handleResizeEnd = React.useCallback(() => {
|
||||
setIsResizing(false)
|
||||
}, [])
|
||||
|
||||
React.useEffect(() => {
|
||||
if (isResizing) {
|
||||
document.addEventListener('mousemove', handleResizeMove)
|
||||
document.addEventListener('mouseup', handleResizeEnd)
|
||||
document.body.style.cursor = 'ns-resize'
|
||||
document.body.style.userSelect = 'none'
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('mousemove', handleResizeMove)
|
||||
document.removeEventListener('mouseup', handleResizeEnd)
|
||||
document.body.style.cursor = ''
|
||||
document.body.style.userSelect = ''
|
||||
}
|
||||
}
|
||||
}, [isResizing, handleResizeMove, handleResizeEnd])
|
||||
|
||||
const handleEdit = React.useCallback(() => {
|
||||
setIsEditing(true)
|
||||
setEditedContent(content)
|
||||
setTimeout(() => {
|
||||
textareaRef.current?.focus()
|
||||
}, 50)
|
||||
}, [content])
|
||||
|
||||
const handleSave = React.useCallback(() => {
|
||||
if (onSave && editedContent.trim() !== content.trim()) {
|
||||
onSave(editedContent.trim())
|
||||
}
|
||||
setIsEditing(false)
|
||||
}, [editedContent, content, onSave])
|
||||
|
||||
const handleCancel = React.useCallback(() => {
|
||||
setEditedContent(content)
|
||||
setIsEditing(false)
|
||||
}, [content])
|
||||
|
||||
if (!content || !content.trim()) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn('relative flex flex-col rounded-[4px]', SURFACE_5, className)}
|
||||
style={{ height: `${height}px` }}
|
||||
>
|
||||
{/* Header with build/edit/save/clear buttons */}
|
||||
<div className='flex flex-shrink-0 items-center justify-between border-[var(--border-strong)] border-b py-[6px] pr-[2px] pl-[12px] dark:border-[var(--border-strong)]'>
|
||||
<span className='font-[500] text-[11px] text-[var(--text-secondary)] uppercase tracking-wide dark:text-[var(--text-secondary)]'>
|
||||
Workflow Plan
|
||||
</span>
|
||||
<div className='ml-auto flex items-center gap-[4px]'>
|
||||
{isEditing ? (
|
||||
<>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-[18px] w-[18px] p-0 hover:text-[var(--text-primary)]'
|
||||
onClick={handleCancel}
|
||||
aria-label='Cancel editing'
|
||||
>
|
||||
<X className='h-[11px] w-[11px]' />
|
||||
</Button>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-[18px] w-[18px] p-0 hover:text-[var(--text-primary)]'
|
||||
onClick={handleSave}
|
||||
aria-label='Save changes'
|
||||
>
|
||||
<Check className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{onBuildPlan && (
|
||||
<Button
|
||||
variant='default'
|
||||
onClick={onBuildPlan}
|
||||
className='h-[22px] px-[10px] text-[11px]'
|
||||
title='Build workflow from plan'
|
||||
>
|
||||
Build Plan
|
||||
</Button>
|
||||
)}
|
||||
{onSave && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-[18px] w-[18px] p-0 hover:text-[var(--text-primary)]'
|
||||
onClick={handleEdit}
|
||||
aria-label='Edit workflow plan'
|
||||
>
|
||||
<Pencil className='h-[10px] w-[10px]' />
|
||||
</Button>
|
||||
)}
|
||||
{onClear && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-[18px] w-[18px] p-0 hover:text-[var(--text-primary)]'
|
||||
onClick={onClear}
|
||||
aria-label='Clear workflow plan'
|
||||
>
|
||||
<Trash className='h-[11px] w-[11px]' />
|
||||
</Button>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Scrollable content area */}
|
||||
<div className='flex-1 overflow-y-auto overflow-x-hidden px-[12px] py-[10px]'>
|
||||
{isEditing ? (
|
||||
<Textarea
|
||||
ref={textareaRef}
|
||||
value={editedContent}
|
||||
onChange={(e) => setEditedContent(e.target.value)}
|
||||
className='h-full min-h-full w-full resize-none border-0 bg-transparent p-0 font-[470] font-season text-[13px] text-[var(--text-primary)] leading-[1.4rem] outline-none ring-0 focus-visible:ring-0 focus-visible:ring-offset-0 dark:text-[var(--text-primary)]'
|
||||
placeholder='Enter your workflow plan...'
|
||||
/>
|
||||
) : (
|
||||
<CopilotMarkdownRenderer content={content.trim()} />
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Resize handle */}
|
||||
<div
|
||||
className={cn(
|
||||
'group flex h-[20px] w-full cursor-ns-resize items-center justify-center border-t',
|
||||
BORDER_STRONG,
|
||||
'transition-colors hover:bg-[var(--surface-9)] dark:hover:bg-[var(--surface-9)]',
|
||||
isResizing && SURFACE_9
|
||||
)}
|
||||
onMouseDown={handleResizeStart}
|
||||
role='separator'
|
||||
aria-orientation='horizontal'
|
||||
aria-label='Resize plan section'
|
||||
>
|
||||
<GripHorizontal className='h-3 w-3 text-[var(--text-secondary)] transition-colors group-hover:text-[var(--text-primary)] dark:text-[var(--text-secondary)] dark:group-hover:text-[var(--text-primary)]' />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
PlanModeSection.displayName = 'PlanModeSection'
|
||||
|
||||
export { PlanModeSection }
|
||||
@@ -0,0 +1,76 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { Tooltip } from '@/components/emcn'
|
||||
|
||||
interface ContextUsageIndicatorProps {
|
||||
/** Usage percentage (0-100) */
|
||||
percentage: number
|
||||
/** Size of the indicator in pixels */
|
||||
size?: number
|
||||
/** Stroke width in pixels */
|
||||
strokeWidth?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Circular context usage indicator showing percentage of context window used.
|
||||
* Displays a progress ring that changes color based on usage level.
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Rendered context usage indicator
|
||||
*/
|
||||
export function ContextUsageIndicator({
|
||||
percentage,
|
||||
size = 20,
|
||||
strokeWidth = 2,
|
||||
}: ContextUsageIndicatorProps) {
|
||||
const radius = (size - strokeWidth) / 2
|
||||
const circumference = radius * 2 * Math.PI
|
||||
const offset = circumference - (percentage / 100) * circumference
|
||||
|
||||
const color = useMemo(() => {
|
||||
if (percentage >= 90) return '#dc2626'
|
||||
if (percentage >= 75) return '#d97706'
|
||||
return '#6b7280'
|
||||
}, [percentage])
|
||||
|
||||
const displayPercentage = useMemo(() => {
|
||||
return Math.round(percentage)
|
||||
}, [percentage])
|
||||
|
||||
return (
|
||||
<Tooltip.Root delayDuration={100}>
|
||||
<Tooltip.Trigger asChild>
|
||||
<div
|
||||
className='flex cursor-pointer items-center justify-center transition-opacity hover:opacity-80'
|
||||
style={{ width: size, height: size }}
|
||||
>
|
||||
<svg width={size} height={size} className='rotate-[-90deg]'>
|
||||
<circle
|
||||
cx={size / 2}
|
||||
cy={size / 2}
|
||||
r={radius}
|
||||
stroke='currentColor'
|
||||
strokeWidth={strokeWidth}
|
||||
fill='none'
|
||||
className='text-muted-foreground/20'
|
||||
/>
|
||||
<circle
|
||||
cx={size / 2}
|
||||
cy={size / 2}
|
||||
r={radius}
|
||||
stroke={color}
|
||||
strokeWidth={strokeWidth}
|
||||
fill='none'
|
||||
strokeDasharray={circumference}
|
||||
strokeDashoffset={offset}
|
||||
className='transition-all duration-300 ease-in-out'
|
||||
strokeLinecap='round'
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>{displayPercentage}% context used</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
export { AttachedFilesDisplay } from './attached-files-display/attached-files-display'
|
||||
export { ContextPills } from './context-pills/context-pills'
|
||||
export { ContextUsageIndicator } from './context-usage-indicator/context-usage-indicator'
|
||||
export { MentionMenu } from './mention-menu/mention-menu'
|
||||
export { ModeSelector } from './mode-selector/mode-selector'
|
||||
export { ModelSelector } from './model-selector/model-selector'
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { MessageSquare, Package } from 'lucide-react'
|
||||
import { ListTree, MessageSquare, Package } from 'lucide-react'
|
||||
import {
|
||||
Badge,
|
||||
Popover,
|
||||
@@ -13,10 +13,10 @@ import {
|
||||
import { cn } from '@/lib/utils'
|
||||
|
||||
interface ModeSelectorProps {
|
||||
/** Current mode - 'ask' or 'build' */
|
||||
mode: 'ask' | 'build'
|
||||
/** Current mode - 'ask', 'build', or 'plan' */
|
||||
mode: 'ask' | 'build' | 'plan'
|
||||
/** Callback when mode changes */
|
||||
onModeChange?: (mode: 'ask' | 'build') => void
|
||||
onModeChange?: (mode: 'ask' | 'build' | 'plan') => void
|
||||
/** Whether the input is near the top of viewport (affects dropdown direction) */
|
||||
isNearTop: boolean
|
||||
/** Whether the selector is disabled */
|
||||
@@ -24,7 +24,7 @@ interface ModeSelectorProps {
|
||||
}
|
||||
|
||||
/**
|
||||
* Mode selector dropdown for switching between Ask and Build modes.
|
||||
* Mode selector dropdown for switching between Ask, Build, and Plan modes.
|
||||
* Displays appropriate icon and label, with tooltips explaining each mode.
|
||||
*
|
||||
* @param props - Component props
|
||||
@@ -39,6 +39,9 @@ export function ModeSelector({ mode, onModeChange, isNearTop, disabled }: ModeSe
|
||||
if (mode === 'ask') {
|
||||
return <MessageSquare className='h-3 w-3' />
|
||||
}
|
||||
if (mode === 'plan') {
|
||||
return <ListTree className='h-3 w-3' />
|
||||
}
|
||||
return <Package className='h-3 w-3' />
|
||||
}
|
||||
|
||||
@@ -46,10 +49,13 @@ export function ModeSelector({ mode, onModeChange, isNearTop, disabled }: ModeSe
|
||||
if (mode === 'ask') {
|
||||
return 'Ask'
|
||||
}
|
||||
if (mode === 'plan') {
|
||||
return 'Plan'
|
||||
}
|
||||
return 'Build'
|
||||
}
|
||||
|
||||
const handleSelect = (selectedMode: 'ask' | 'build') => {
|
||||
const handleSelect = (selectedMode: 'ask' | 'build' | 'plan') => {
|
||||
onModeChange?.(selectedMode)
|
||||
setOpen(false)
|
||||
}
|
||||
@@ -122,6 +128,10 @@ export function ModeSelector({ mode, onModeChange, isNearTop, disabled }: ModeSe
|
||||
<MessageSquare className='h-3.5 w-3.5' />
|
||||
<span>Ask</span>
|
||||
</PopoverItem>
|
||||
{/* <PopoverItem active={mode === 'plan'} onClick={() => handleSelect('plan')}>
|
||||
<ListTree className='h-3.5 w-3.5' />
|
||||
<span>Plan</span>
|
||||
</PopoverItem> */}
|
||||
<PopoverItem active={mode === 'build'} onClick={() => handleSelect('build')}>
|
||||
<Package className='h-3.5 w-3.5' />
|
||||
<span>Build</span>
|
||||
|
||||
@@ -19,6 +19,7 @@ import { cn } from '@/lib/utils'
|
||||
import {
|
||||
AttachedFilesDisplay,
|
||||
ContextPills,
|
||||
ContextUsageIndicator,
|
||||
MentionMenu,
|
||||
ModelSelector,
|
||||
ModeSelector,
|
||||
@@ -52,12 +53,13 @@ interface UserInputProps {
|
||||
isAborting?: boolean
|
||||
placeholder?: string
|
||||
className?: string
|
||||
mode?: 'ask' | 'build'
|
||||
onModeChange?: (mode: 'ask' | 'build') => void
|
||||
mode?: 'ask' | 'build' | 'plan'
|
||||
onModeChange?: (mode: 'ask' | 'build' | 'plan') => void
|
||||
value?: string
|
||||
onChange?: (value: string) => void
|
||||
panelWidth?: number
|
||||
clearOnSubmit?: boolean
|
||||
hasPlanArtifact?: boolean
|
||||
}
|
||||
|
||||
interface UserInputRef {
|
||||
@@ -88,6 +90,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
onChange: onControlledChange,
|
||||
panelWidth = 308,
|
||||
clearOnSubmit = true,
|
||||
hasPlanArtifact = false,
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
@@ -97,7 +100,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
// Store hooks
|
||||
const { workflowId, selectedModel, setSelectedModel } = useCopilotStore()
|
||||
const { workflowId, selectedModel, setSelectedModel, contextUsage } = useCopilotStore()
|
||||
|
||||
// Internal state
|
||||
const [internalMessage, setInternalMessage] = useState('')
|
||||
@@ -112,7 +115,12 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
|
||||
// Effective placeholder
|
||||
const effectivePlaceholder =
|
||||
placeholder || (mode === 'ask' ? 'Ask about your workflow' : 'Plan, search, build anything')
|
||||
placeholder ||
|
||||
(mode === 'ask'
|
||||
? 'Ask about your workflow'
|
||||
: mode === 'plan'
|
||||
? 'Plan your workflow'
|
||||
: 'Plan, search, build anything')
|
||||
|
||||
// Custom hooks - order matters for ref sharing
|
||||
// Context management (manages selectedContexts state)
|
||||
@@ -271,51 +279,73 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}, [mentionMenu.openSubmenuFor])
|
||||
|
||||
// Handlers
|
||||
const handleSubmit = useCallback(async () => {
|
||||
const trimmedMessage = message.trim()
|
||||
if (!trimmedMessage || disabled || isLoading) return
|
||||
const handleSubmit = useCallback(
|
||||
async (overrideMessage?: string, options: { preserveInput?: boolean } = {}) => {
|
||||
const targetMessage = overrideMessage ?? message
|
||||
const trimmedMessage = targetMessage.trim()
|
||||
if (!trimmedMessage || disabled || isLoading) return
|
||||
|
||||
const failedUploads = fileAttachments.attachedFiles.filter((f) => !f.uploading && !f.key)
|
||||
if (failedUploads.length > 0) {
|
||||
logger.error(`Some files failed to upload: ${failedUploads.map((f) => f.name).join(', ')}`)
|
||||
const failedUploads = fileAttachments.attachedFiles.filter((f) => !f.uploading && !f.key)
|
||||
if (failedUploads.length > 0) {
|
||||
logger.error(
|
||||
`Some files failed to upload: ${failedUploads.map((f) => f.name).join(', ')}`
|
||||
)
|
||||
}
|
||||
|
||||
const fileAttachmentsForApi = fileAttachments.attachedFiles
|
||||
.filter((f) => !f.uploading && f.key)
|
||||
.map((f) => ({
|
||||
id: f.id,
|
||||
key: f.key!,
|
||||
filename: f.name,
|
||||
media_type: f.type,
|
||||
size: f.size,
|
||||
}))
|
||||
|
||||
onSubmit(trimmedMessage, fileAttachmentsForApi, contextManagement.selectedContexts as any)
|
||||
|
||||
const shouldClearInput = clearOnSubmit && !options.preserveInput && !overrideMessage
|
||||
if (shouldClearInput) {
|
||||
fileAttachments.attachedFiles.forEach((f) => {
|
||||
if (f.previewUrl) {
|
||||
URL.revokeObjectURL(f.previewUrl)
|
||||
}
|
||||
})
|
||||
|
||||
setMessage('')
|
||||
fileAttachments.clearAttachedFiles()
|
||||
contextManagement.clearContexts()
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
} else {
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
}
|
||||
|
||||
mentionMenu.setShowMentionMenu(false)
|
||||
},
|
||||
[
|
||||
message,
|
||||
disabled,
|
||||
isLoading,
|
||||
fileAttachments,
|
||||
onSubmit,
|
||||
contextManagement,
|
||||
clearOnSubmit,
|
||||
setMessage,
|
||||
mentionMenu,
|
||||
]
|
||||
)
|
||||
|
||||
const handleBuildWorkflow = useCallback(() => {
|
||||
if (!hasPlanArtifact || !onModeChange) {
|
||||
return
|
||||
}
|
||||
if (disabled || isLoading) {
|
||||
return
|
||||
}
|
||||
|
||||
const fileAttachmentsForApi = fileAttachments.attachedFiles
|
||||
.filter((f) => !f.uploading && f.key)
|
||||
.map((f) => ({
|
||||
id: f.id,
|
||||
key: f.key!,
|
||||
filename: f.name,
|
||||
media_type: f.type,
|
||||
size: f.size,
|
||||
}))
|
||||
|
||||
onSubmit(trimmedMessage, fileAttachmentsForApi, contextManagement.selectedContexts as any)
|
||||
|
||||
if (clearOnSubmit) {
|
||||
fileAttachments.attachedFiles.forEach((f) => {
|
||||
if (f.previewUrl) {
|
||||
URL.revokeObjectURL(f.previewUrl)
|
||||
}
|
||||
})
|
||||
|
||||
setMessage('')
|
||||
fileAttachments.clearAttachedFiles()
|
||||
contextManagement.clearContexts()
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
}
|
||||
mentionMenu.setShowMentionMenu(false)
|
||||
}, [
|
||||
message,
|
||||
disabled,
|
||||
isLoading,
|
||||
fileAttachments,
|
||||
onSubmit,
|
||||
contextManagement,
|
||||
clearOnSubmit,
|
||||
setMessage,
|
||||
mentionMenu,
|
||||
])
|
||||
onModeChange('build')
|
||||
void handleSubmit('build the workflow according to the design plan', { preserveInput: true })
|
||||
}, [hasPlanArtifact, onModeChange, disabled, isLoading, handleSubmit])
|
||||
|
||||
const handleAbort = useCallback(() => {
|
||||
if (onAbort && isLoading) {
|
||||
@@ -576,25 +606,47 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
onDragOver={fileAttachments.handleDragOver}
|
||||
onDrop={fileAttachments.handleDrop}
|
||||
>
|
||||
{/* Top Row: @ Button + Context Pills + Context Usage Pill */}
|
||||
<div className='mb-[6px] flex flex-wrap items-center gap-[6px]'>
|
||||
<Badge
|
||||
variant='outline'
|
||||
onClick={handleOpenMentionMenuWithAt}
|
||||
title='Insert @'
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] p-[4.5px]',
|
||||
(disabled || isLoading) && 'cursor-not-allowed'
|
||||
)}
|
||||
>
|
||||
<AtSign className='h-3 w-3' strokeWidth={1.75} />
|
||||
</Badge>
|
||||
{/* Top Row: Context controls + Build Workflow button */}
|
||||
<div className='mb-[6px] flex flex-wrap items-center justify-between gap-[6px]'>
|
||||
<div className='flex flex-wrap items-center gap-[6px]'>
|
||||
<Badge
|
||||
variant='outline'
|
||||
onClick={handleOpenMentionMenuWithAt}
|
||||
title='Insert @'
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] p-[4.5px]',
|
||||
(disabled || isLoading) && 'cursor-not-allowed'
|
||||
)}
|
||||
>
|
||||
<AtSign className='h-3 w-3' strokeWidth={1.75} />
|
||||
</Badge>
|
||||
|
||||
{/* Selected Context Pills */}
|
||||
<ContextPills
|
||||
contexts={contextManagement.selectedContexts}
|
||||
onRemoveContext={contextManagement.removeContext}
|
||||
/>
|
||||
{/* Context Usage Indicator */}
|
||||
{contextUsage && contextUsage.percentage > 0 && (
|
||||
<ContextUsageIndicator
|
||||
percentage={contextUsage.percentage}
|
||||
size={18}
|
||||
strokeWidth={2.5}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Selected Context Pills */}
|
||||
<ContextPills
|
||||
contexts={contextManagement.selectedContexts}
|
||||
onRemoveContext={contextManagement.removeContext}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{hasPlanArtifact && (
|
||||
<Button
|
||||
type='button'
|
||||
variant='outline'
|
||||
onClick={handleBuildWorkflow}
|
||||
disabled={disabled || isLoading}
|
||||
>
|
||||
Build Plan
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Attached Files Display */}
|
||||
@@ -709,7 +761,9 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
onClick={handleSubmit}
|
||||
onClick={() => {
|
||||
void handleSubmit()
|
||||
}}
|
||||
disabled={!canSubmit}
|
||||
className={cn(
|
||||
'h-[22px] w-[22px] rounded-full p-0 transition-colors',
|
||||
|
||||
@@ -8,8 +8,8 @@ import { Button } from '@/components/emcn'
|
||||
interface WelcomeProps {
|
||||
/** Callback when a suggested question is clicked */
|
||||
onQuestionClick?: (question: string) => void
|
||||
/** Current copilot mode ('ask' for Q&A, 'build' for workflow building) */
|
||||
mode?: 'ask' | 'build'
|
||||
/** Current copilot mode ('ask' for Q&A, 'plan' for planning, 'build' for workflow building) */
|
||||
mode?: 'ask' | 'build' | 'plan'
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
'use client'
|
||||
|
||||
import type { MouseEvent as ReactMouseEvent } from 'react'
|
||||
import { forwardRef, useCallback, useEffect, useImperativeHandle, useRef, useState } from 'react'
|
||||
import {
|
||||
forwardRef,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useImperativeHandle,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { History, Plus } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
@@ -16,6 +24,7 @@ import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
CopilotMessage,
|
||||
PlanModeSection,
|
||||
TodoList,
|
||||
UserInput,
|
||||
Welcome,
|
||||
@@ -79,6 +88,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
inputValue,
|
||||
planTodos,
|
||||
showPlanTodos,
|
||||
streamingPlanContent,
|
||||
sendMessage,
|
||||
abortMessage,
|
||||
createNewChat,
|
||||
@@ -95,6 +105,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
areChatsFresh,
|
||||
workflowId: copilotWorkflowId,
|
||||
setPlanTodos,
|
||||
clearPlanArtifact,
|
||||
savePlanArtifact,
|
||||
} = useCopilotStore()
|
||||
|
||||
// Initialize copilot
|
||||
@@ -132,6 +144,22 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
setPlanTodos,
|
||||
})
|
||||
|
||||
/**
|
||||
* Get markdown content for design document section
|
||||
* Available in all modes once created
|
||||
*/
|
||||
const designDocumentContent = useMemo(() => {
|
||||
// Use streaming content if available
|
||||
if (streamingPlanContent) {
|
||||
logger.info('[DesignDocument] Using streaming plan content', {
|
||||
contentLength: streamingPlanContent.length,
|
||||
})
|
||||
return streamingPlanContent
|
||||
}
|
||||
|
||||
return ''
|
||||
}, [streamingPlanContent])
|
||||
|
||||
/**
|
||||
* Helper function to focus the copilot input
|
||||
*/
|
||||
@@ -454,15 +482,27 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
value={inputValue}
|
||||
onChange={setInputValue}
|
||||
panelWidth={panelWidth}
|
||||
hasPlanArtifact={Boolean(designDocumentContent)}
|
||||
/>
|
||||
</div>
|
||||
<div className='flex-shrink-0 pt-[8px]'>
|
||||
<Welcome onQuestionClick={handleSubmit} mode={mode === 'ask' ? 'ask' : 'build'} />
|
||||
<Welcome onQuestionClick={handleSubmit} mode={mode} />
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
/* Normal messages view */
|
||||
<div className='relative flex flex-1 flex-col overflow-hidden'>
|
||||
{/* Design Document Section - Pinned at top, shown in all modes when available */}
|
||||
{designDocumentContent && (
|
||||
<div className='flex-shrink-0 px-[8px] pt-[8px]'>
|
||||
<PlanModeSection
|
||||
content={designDocumentContent}
|
||||
onClear={clearPlanArtifact}
|
||||
onSave={savePlanArtifact}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='relative flex-1 overflow-hidden'>
|
||||
<div
|
||||
ref={scrollAreaRef}
|
||||
@@ -549,6 +589,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
value={inputValue}
|
||||
onChange={setInputValue}
|
||||
panelWidth={panelWidth}
|
||||
hasPlanArtifact={Boolean(designDocumentContent)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -14,13 +14,12 @@ import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/u
|
||||
import { getEnv } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/db-helpers'
|
||||
import { resolveStartCandidates, StartBlockPath } from '@/lib/workflows/triggers'
|
||||
import { getInputFormatExample as getInputFormatExampleUtil } from '@/lib/workflows/deployment-utils'
|
||||
import { ChatDeploy } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/chat-deploy'
|
||||
import { DeployedWorkflowModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/deployed-workflow-modal'
|
||||
import { DeploymentInfo } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/deployment-info'
|
||||
import { TemplateDeploy } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/components/deploy-modal/components/template-deploy'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
@@ -110,117 +109,7 @@ export function DeployModal({
|
||||
workflowWorkspaceId ? 'YOUR_WORKSPACE_API_KEY' : 'YOUR_PERSONAL_API_KEY'
|
||||
|
||||
const getInputFormatExample = (includeStreaming = false) => {
|
||||
let inputFormatExample = ''
|
||||
try {
|
||||
const blocks = Object.values(useWorkflowStore.getState().blocks)
|
||||
const candidates = resolveStartCandidates(useWorkflowStore.getState().blocks, {
|
||||
execution: 'api',
|
||||
})
|
||||
|
||||
const targetCandidate =
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.UNIFIED) ||
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.SPLIT_API) ||
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.SPLIT_INPUT) ||
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.LEGACY_STARTER)
|
||||
|
||||
const targetBlock = targetCandidate?.block
|
||||
|
||||
if (targetBlock) {
|
||||
const inputFormat = useSubBlockStore.getState().getValue(targetBlock.id, 'inputFormat')
|
||||
|
||||
const exampleData: Record<string, any> = {}
|
||||
|
||||
if (inputFormat && Array.isArray(inputFormat) && inputFormat.length > 0) {
|
||||
inputFormat.forEach((field: any) => {
|
||||
if (field.name) {
|
||||
switch (field.type) {
|
||||
case 'string':
|
||||
exampleData[field.name] = 'example'
|
||||
break
|
||||
case 'number':
|
||||
exampleData[field.name] = 42
|
||||
break
|
||||
case 'boolean':
|
||||
exampleData[field.name] = true
|
||||
break
|
||||
case 'object':
|
||||
exampleData[field.name] = { key: 'value' }
|
||||
break
|
||||
case 'array':
|
||||
exampleData[field.name] = [1, 2, 3]
|
||||
break
|
||||
case 'files':
|
||||
exampleData[field.name] = [
|
||||
{
|
||||
data: 'data:application/pdf;base64,...',
|
||||
type: 'file',
|
||||
name: 'document.pdf',
|
||||
mime: 'application/pdf',
|
||||
},
|
||||
]
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Add streaming parameters if enabled and outputs are selected
|
||||
if (includeStreaming && selectedStreamingOutputs.length > 0) {
|
||||
exampleData.stream = true
|
||||
// Convert blockId_attribute format to blockName.attribute format for display
|
||||
const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/i
|
||||
|
||||
const convertedOutputs = selectedStreamingOutputs
|
||||
.map((outputId) => {
|
||||
// If it starts with a UUID, convert to blockName.attribute format
|
||||
if (UUID_REGEX.test(outputId)) {
|
||||
const underscoreIndex = outputId.indexOf('_')
|
||||
if (underscoreIndex === -1) return null
|
||||
|
||||
const blockId = outputId.substring(0, underscoreIndex)
|
||||
const attribute = outputId.substring(underscoreIndex + 1)
|
||||
|
||||
// Find the block by ID and get its name
|
||||
const block = blocks.find((b) => b.id === blockId)
|
||||
if (block?.name) {
|
||||
// Normalize block name: lowercase and remove spaces
|
||||
const normalizedBlockName = block.name.toLowerCase().replace(/\s+/g, '')
|
||||
return `${normalizedBlockName}.${attribute}`
|
||||
}
|
||||
// Block not found (deleted), return null to filter out
|
||||
return null
|
||||
}
|
||||
|
||||
// Already in blockName.attribute format, verify the block exists
|
||||
const parts = outputId.split('.')
|
||||
if (parts.length >= 2) {
|
||||
const blockName = parts[0]
|
||||
// Check if a block with this name exists
|
||||
const block = blocks.find(
|
||||
(b) => b.name?.toLowerCase().replace(/\s+/g, '') === blockName.toLowerCase()
|
||||
)
|
||||
if (!block) {
|
||||
// Block not found (deleted), return null to filter out
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
return outputId
|
||||
})
|
||||
.filter((output): output is string => output !== null)
|
||||
|
||||
exampleData.selectedOutputs = convertedOutputs
|
||||
}
|
||||
|
||||
if (Object.keys(exampleData).length > 0) {
|
||||
inputFormatExample = ` -d '${JSON.stringify(exampleData)}'`
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error generating input format example:', error)
|
||||
}
|
||||
|
||||
return inputFormatExample
|
||||
return getInputFormatExampleUtil(includeStreaming, selectedStreamingOutputs)
|
||||
}
|
||||
|
||||
const fetchChatDeploymentInfo = async () => {
|
||||
|
||||
@@ -74,11 +74,14 @@ export function useSubBlockValue<T = any>(
|
||||
)
|
||||
|
||||
// Check if we're in diff mode and get diff value if available
|
||||
const { isShowingDiff, diffWorkflow } = useWorkflowDiffStore()
|
||||
const diffValue =
|
||||
isShowingDiff && diffWorkflow
|
||||
? (diffWorkflow.blocks?.[blockId]?.subBlocks?.[subBlockId]?.value ?? null)
|
||||
: null
|
||||
const { isShowingDiff, hasActiveDiff, baselineWorkflow } = useWorkflowDiffStore()
|
||||
const isBaselineView = hasActiveDiff && !isShowingDiff
|
||||
const snapshotSubBlock =
|
||||
isBaselineView && baselineWorkflow
|
||||
? baselineWorkflow.blocks?.[blockId]?.subBlocks?.[subBlockId]
|
||||
: undefined
|
||||
const hasSnapshotValue = snapshotSubBlock !== undefined
|
||||
const snapshotValue = hasSnapshotValue ? ((snapshotSubBlock as any)?.value ?? null) : null
|
||||
|
||||
// Check if this is an API key field that could be auto-filled
|
||||
const isApiKey =
|
||||
@@ -122,9 +125,9 @@ export function useSubBlockValue<T = any>(
|
||||
// Hook to set a value in the subblock store
|
||||
const setValue = useCallback(
|
||||
(newValue: T) => {
|
||||
// Don't allow updates when in diff mode (readonly preview)
|
||||
if (isShowingDiff) {
|
||||
logger.debug('Ignoring setValue in diff mode', { blockId, subBlockId })
|
||||
// Don't allow updates when showing the baseline snapshot (readonly preview)
|
||||
if (isBaselineView) {
|
||||
logger.debug('Ignoring setValue while viewing baseline diff', { blockId, subBlockId })
|
||||
return
|
||||
}
|
||||
|
||||
@@ -202,17 +205,16 @@ export function useSubBlockValue<T = any>(
|
||||
modelValue,
|
||||
isStreaming,
|
||||
emitValue,
|
||||
isShowingDiff,
|
||||
isBaselineView,
|
||||
]
|
||||
)
|
||||
|
||||
// Determine the effective value: diff value takes precedence if in diff mode
|
||||
const effectiveValue =
|
||||
isShowingDiff && diffValue !== null
|
||||
? diffValue
|
||||
: storeValue !== undefined
|
||||
? storeValue
|
||||
: initialValue
|
||||
const effectiveValue = hasSnapshotValue
|
||||
? snapshotValue
|
||||
: storeValue !== undefined
|
||||
? storeValue
|
||||
: initialValue
|
||||
|
||||
// Initialize valueRef on first render
|
||||
useEffect(() => {
|
||||
|
||||
@@ -72,7 +72,7 @@ export function Editor() {
|
||||
// Get block properties (advanced/trigger modes)
|
||||
const { advancedMode, triggerMode } = useEditorBlockProperties(
|
||||
currentBlockId,
|
||||
currentWorkflow.isDiffMode
|
||||
currentWorkflow.isSnapshotView
|
||||
)
|
||||
|
||||
// Subscribe to block's subblock values
|
||||
@@ -94,7 +94,7 @@ export function Editor() {
|
||||
triggerMode,
|
||||
activeWorkflowId,
|
||||
blockSubBlockValues,
|
||||
currentWorkflow.isDiffMode
|
||||
currentWorkflow.isSnapshotView
|
||||
)
|
||||
|
||||
// Get block connections
|
||||
|
||||
@@ -7,13 +7,14 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
* Provides access to advanced mode and trigger mode states.
|
||||
*
|
||||
* @param blockId - The ID of the block being edited
|
||||
* @param isDiffMode - Whether we're currently viewing a diff
|
||||
* @param isSnapshotView - Whether we're currently viewing the baseline snapshot
|
||||
* @returns Block display properties (advanced mode, trigger mode)
|
||||
*/
|
||||
export function useEditorBlockProperties(blockId: string | null, isDiffMode: boolean) {
|
||||
// Get blocks from appropriate source
|
||||
export function useEditorBlockProperties(blockId: string | null, isSnapshotView: boolean) {
|
||||
const normalBlocks = useWorkflowStore(useCallback((state) => state.blocks, []))
|
||||
const diffWorkflow = useWorkflowDiffStore(useCallback((state) => state.diffWorkflow, []))
|
||||
const baselineBlocks = useWorkflowDiffStore(
|
||||
useCallback((state) => state.baselineWorkflow?.blocks || {}, [])
|
||||
)
|
||||
|
||||
const blockProperties = useMemo(() => {
|
||||
if (!blockId) {
|
||||
@@ -23,15 +24,14 @@ export function useEditorBlockProperties(blockId: string | null, isDiffMode: boo
|
||||
}
|
||||
}
|
||||
|
||||
// Get block from appropriate source based on mode
|
||||
const blocks = isDiffMode ? (diffWorkflow as any)?.blocks || {} : normalBlocks
|
||||
const block = blocks[blockId]
|
||||
const blocks = isSnapshotView ? baselineBlocks : normalBlocks
|
||||
const block = blocks?.[blockId]
|
||||
|
||||
return {
|
||||
advancedMode: block?.advancedMode ?? false,
|
||||
triggerMode: block?.triggerMode ?? false,
|
||||
}
|
||||
}, [blockId, isDiffMode, normalBlocks, diffWorkflow])
|
||||
}, [blockId, isSnapshotView, normalBlocks, baselineBlocks])
|
||||
|
||||
return blockProperties
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ export function useEditorSubblockLayout(
|
||||
displayTriggerMode: boolean,
|
||||
activeWorkflowId: string | null,
|
||||
blockSubBlockValues: Record<string, any>,
|
||||
isDiffMode: boolean
|
||||
isSnapshotView: boolean
|
||||
) {
|
||||
return useMemo(() => {
|
||||
// Guard against missing config or block selection
|
||||
@@ -33,41 +33,34 @@ export function useEditorSubblockLayout(
|
||||
return { subBlocks: [] as SubBlockConfig[], stateToUse: {} }
|
||||
}
|
||||
|
||||
// Get the appropriate state for conditional evaluation
|
||||
let stateToUse: Record<string, any> = {}
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
const workflowBlocks = useWorkflowStore.getState().blocks || {}
|
||||
|
||||
// Get blocks based on whether we're in diff mode
|
||||
let blocks: Record<string, any>
|
||||
if (isDiffMode) {
|
||||
// In diff mode, get blocks from diff workflow
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
const diffWorkflow = diffStore.diffWorkflow
|
||||
blocks = (diffWorkflow as any)?.blocks || {}
|
||||
} else {
|
||||
// In normal mode, get blocks from workflow store
|
||||
blocks = useWorkflowStore.getState().blocks || {}
|
||||
}
|
||||
const sourceBlocks = isSnapshotView
|
||||
? (diffStore.baselineWorkflow?.blocks as Record<string, any>) || {}
|
||||
: workflowBlocks
|
||||
|
||||
const mergedMap = isSnapshotView
|
||||
? { [blockId]: structuredClone(sourceBlocks[blockId]) }
|
||||
: mergeSubblockState(sourceBlocks, activeWorkflowId || undefined, blockId)
|
||||
|
||||
const mergedMap = mergeSubblockState(blocks, activeWorkflowId || undefined, blockId)
|
||||
const mergedState = mergedMap ? mergedMap[blockId] : undefined
|
||||
const mergedSubBlocks = mergedState?.subBlocks || {}
|
||||
|
||||
// In diff mode, prioritize diff workflow values; in normal mode, prioritize live store values
|
||||
stateToUse = Object.keys(mergedSubBlocks).reduce(
|
||||
const stateToUse = Object.keys(mergedSubBlocks).reduce(
|
||||
(acc, key) => {
|
||||
const value = isDiffMode
|
||||
? (mergedSubBlocks[key]?.value ?? null)
|
||||
: blockSubBlockValues[key] !== undefined
|
||||
? blockSubBlockValues[key]
|
||||
: (mergedSubBlocks[key]?.value ?? null)
|
||||
acc[key] = { value }
|
||||
const baselineValue = mergedSubBlocks[key]?.value ?? null
|
||||
const liveValue =
|
||||
blockSubBlockValues[key] !== undefined ? blockSubBlockValues[key] : baselineValue
|
||||
acc[key] = {
|
||||
value: isSnapshotView ? baselineValue : liveValue,
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, { value: unknown }>
|
||||
)
|
||||
|
||||
// Only add live store values if not in diff mode
|
||||
if (!isDiffMode) {
|
||||
if (!isSnapshotView) {
|
||||
Object.keys(blockSubBlockValues).forEach((key) => {
|
||||
if (!(key in stateToUse)) {
|
||||
stateToUse[key] = { value: blockSubBlockValues[key] }
|
||||
@@ -153,6 +146,6 @@ export function useEditorSubblockLayout(
|
||||
displayTriggerMode,
|
||||
blockSubBlockValues,
|
||||
activeWorkflowId,
|
||||
isDiffMode,
|
||||
isSnapshotView,
|
||||
])
|
||||
}
|
||||
|
||||
@@ -556,21 +556,16 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
* Uses preview values in preview mode, diff workflow values in diff mode,
|
||||
* or the current block's subblock values otherwise.
|
||||
*/
|
||||
let stateToUse: Record<string, { value: unknown }> = {}
|
||||
|
||||
if (data.isPreview && data.subBlockValues) {
|
||||
stateToUse = data.subBlockValues
|
||||
} else if (currentWorkflow.isDiffMode && currentBlock) {
|
||||
stateToUse = currentBlock.subBlocks || {}
|
||||
} else {
|
||||
stateToUse = Object.entries(blockSubBlockValues).reduce(
|
||||
(acc, [key, value]) => {
|
||||
acc[key] = { value }
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, { value: unknown }>
|
||||
)
|
||||
}
|
||||
const stateToUse: Record<string, { value: unknown }> =
|
||||
data.isPreview && data.subBlockValues
|
||||
? data.subBlockValues
|
||||
: Object.entries(blockSubBlockValues).reduce(
|
||||
(acc, [key, value]) => {
|
||||
acc[key] = { value }
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, { value: unknown }>
|
||||
)
|
||||
|
||||
const effectiveAdvanced = displayAdvancedMode
|
||||
const effectiveTrigger = displayTriggerMode
|
||||
|
||||
@@ -24,6 +24,7 @@ export interface CurrentWorkflow {
|
||||
// Mode information
|
||||
isDiffMode: boolean
|
||||
isNormalMode: boolean
|
||||
isSnapshotView: boolean
|
||||
|
||||
// Full workflow state (for cases that need the complete object)
|
||||
workflowState: WorkflowState
|
||||
@@ -59,15 +60,15 @@ export function useCurrentWorkflow(): CurrentWorkflow {
|
||||
}, shallow)
|
||||
|
||||
// Get diff state - now including isDiffReady
|
||||
const { isShowingDiff, isDiffReady, diffWorkflow } = useWorkflowDiffStore()
|
||||
const { isShowingDiff, isDiffReady, hasActiveDiff, baselineWorkflow } = useWorkflowDiffStore()
|
||||
|
||||
// Create the abstracted interface - optimized to prevent unnecessary re-renders
|
||||
const currentWorkflow = useMemo((): CurrentWorkflow => {
|
||||
// Determine which workflow to use - only use diff if it's ready
|
||||
const hasDiffBlocks =
|
||||
!!diffWorkflow && Object.keys((diffWorkflow as any).blocks || {}).length > 0
|
||||
const shouldUseDiff = isShowingDiff && isDiffReady && hasDiffBlocks
|
||||
const activeWorkflow = shouldUseDiff ? diffWorkflow : normalWorkflow
|
||||
// Determine which workflow to use
|
||||
const isSnapshotView =
|
||||
Boolean(baselineWorkflow) && hasActiveDiff && isDiffReady && !isShowingDiff
|
||||
|
||||
const activeWorkflow = isSnapshotView ? (baselineWorkflow as WorkflowState) : normalWorkflow
|
||||
|
||||
return {
|
||||
// Current workflow state
|
||||
@@ -82,8 +83,9 @@ export function useCurrentWorkflow(): CurrentWorkflow {
|
||||
needsRedeployment: activeWorkflow.needsRedeployment,
|
||||
|
||||
// Mode information - update to reflect ready state
|
||||
isDiffMode: shouldUseDiff,
|
||||
isNormalMode: !shouldUseDiff,
|
||||
isDiffMode: hasActiveDiff && isShowingDiff,
|
||||
isNormalMode: !hasActiveDiff || (!isShowingDiff && !isSnapshotView),
|
||||
isSnapshotView: Boolean(isSnapshotView),
|
||||
|
||||
// Full workflow state (for cases that need the complete object)
|
||||
workflowState: activeWorkflow as WorkflowState,
|
||||
@@ -95,7 +97,7 @@ export function useCurrentWorkflow(): CurrentWorkflow {
|
||||
hasBlocks: () => Object.keys(activeWorkflow.blocks || {}).length > 0,
|
||||
hasEdges: () => (activeWorkflow.edges || []).length > 0,
|
||||
}
|
||||
}, [normalWorkflow, isShowingDiff, isDiffReady, diffWorkflow])
|
||||
}, [normalWorkflow, isShowingDiff, isDiffReady, hasActiveDiff, baselineWorkflow])
|
||||
|
||||
return currentWorkflow
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { shallow } from 'zustand/shallow'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||
import { processStreamingBlockLogs } from '@/lib/tokenization'
|
||||
@@ -107,26 +106,7 @@ export function useWorkflowExecution() {
|
||||
} = useExecutionStore()
|
||||
const [executionResult, setExecutionResult] = useState<ExecutionResult | null>(null)
|
||||
const executionStream = useExecutionStream()
|
||||
const {
|
||||
diffWorkflow: executionDiffWorkflow,
|
||||
isDiffReady: isDiffWorkflowReady,
|
||||
isShowingDiff: isViewingDiff,
|
||||
} = useWorkflowDiffStore(
|
||||
useCallback(
|
||||
(state) => ({
|
||||
diffWorkflow: state.diffWorkflow,
|
||||
isDiffReady: state.isDiffReady,
|
||||
isShowingDiff: state.isShowingDiff,
|
||||
}),
|
||||
[]
|
||||
),
|
||||
shallow
|
||||
)
|
||||
const hasActiveDiffWorkflow =
|
||||
isDiffWorkflowReady &&
|
||||
isViewingDiff &&
|
||||
!!executionDiffWorkflow &&
|
||||
Object.keys(executionDiffWorkflow.blocks || {}).length > 0
|
||||
const isViewingDiff = useWorkflowDiffStore((state) => state.isShowingDiff)
|
||||
|
||||
/**
|
||||
* Validates debug state before performing debug operations
|
||||
@@ -681,9 +661,13 @@ export function useWorkflowExecution() {
|
||||
overrideTriggerType?: 'chat' | 'manual' | 'api'
|
||||
): Promise<ExecutionResult | StreamingExecution> => {
|
||||
// Use diff workflow for execution when available, regardless of canvas view state
|
||||
const executionWorkflowState =
|
||||
hasActiveDiffWorkflow && executionDiffWorkflow ? executionDiffWorkflow : null
|
||||
const usingDiffForExecution = executionWorkflowState !== null
|
||||
const executionWorkflowState = null as {
|
||||
blocks?: any
|
||||
edges?: any
|
||||
loops?: any
|
||||
parallels?: any
|
||||
} | null
|
||||
const usingDiffForExecution = false
|
||||
|
||||
// Read blocks and edges directly from store to ensure we get the latest state,
|
||||
// even if React hasn't re-rendered yet after adding blocks/edges
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import type { ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import { useTerminalConsoleStore } from '@/stores/terminal'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
export interface WorkflowExecutionOptions {
|
||||
@@ -25,14 +24,6 @@ export async function executeWorkflowWithFullLogging(
|
||||
throw new Error('No active workflow')
|
||||
}
|
||||
|
||||
// Check if there's an active diff workflow to execute
|
||||
const { diffWorkflow, isDiffReady, isShowingDiff } = useWorkflowDiffStore.getState()
|
||||
const hasActiveDiffWorkflow =
|
||||
isDiffReady &&
|
||||
isShowingDiff &&
|
||||
!!diffWorkflow &&
|
||||
Object.keys(diffWorkflow.blocks || {}).length > 0
|
||||
|
||||
const executionId = options.executionId || uuidv4()
|
||||
const { addConsole } = useTerminalConsoleStore.getState()
|
||||
|
||||
@@ -44,16 +35,6 @@ export async function executeWorkflowWithFullLogging(
|
||||
useDraftState: true,
|
||||
}
|
||||
|
||||
// Add diff workflow override if active
|
||||
if (hasActiveDiffWorkflow) {
|
||||
payload.workflowStateOverride = {
|
||||
blocks: diffWorkflow.blocks,
|
||||
edges: diffWorkflow.edges,
|
||||
loops: diffWorkflow.loops,
|
||||
parallels: diffWorkflow.parallels,
|
||||
}
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/workflows/${activeWorkflowId}/execute`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
||||
@@ -164,7 +164,20 @@ const WorkflowContent = React.memo(() => {
|
||||
}, [blocks])
|
||||
|
||||
// Get diff analysis for edge reconstruction
|
||||
const { diffAnalysis, isShowingDiff, isDiffReady } = useWorkflowDiffStore()
|
||||
const { diffAnalysis, isShowingDiff, isDiffReady, reapplyDiffMarkers, hasActiveDiff } =
|
||||
useWorkflowDiffStore()
|
||||
|
||||
// Re-apply diff markers when blocks change (e.g., after socket rehydration)
|
||||
const blocksRef = useRef(blocks)
|
||||
useEffect(() => {
|
||||
if (hasActiveDiff && isDiffReady && blocks !== blocksRef.current) {
|
||||
blocksRef.current = blocks
|
||||
// Use setTimeout to ensure the store update has settled
|
||||
setTimeout(() => {
|
||||
reapplyDiffMarkers()
|
||||
}, 0)
|
||||
}
|
||||
}, [blocks, hasActiveDiff, isDiffReady, reapplyDiffMarkers])
|
||||
|
||||
// Reconstruct deleted edges when viewing original workflow and filter out invalid edges
|
||||
const edgesForDisplay = useMemo(() => {
|
||||
@@ -255,18 +268,17 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
// Create diff-aware permissions that disable editing when in diff mode
|
||||
const effectivePermissions = useMemo(() => {
|
||||
if (isDiffMode) {
|
||||
// In diff mode, disable all editing regardless of user permissions
|
||||
if (currentWorkflow.isSnapshotView) {
|
||||
// Snapshot view is read-only
|
||||
return {
|
||||
...userPermissions,
|
||||
canEdit: false,
|
||||
canAdmin: false,
|
||||
// Keep canRead true so users can still view content
|
||||
canRead: userPermissions.canRead,
|
||||
}
|
||||
}
|
||||
return userPermissions
|
||||
}, [userPermissions, isDiffMode])
|
||||
}, [userPermissions, currentWorkflow.isSnapshotView])
|
||||
|
||||
// Workspace permissions - get all users and their permissions for this workspace
|
||||
const { permissions: workspacePermissions, error: permissionsError } = useWorkspacePermissions(
|
||||
|
||||
@@ -319,15 +319,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
eventHandlers.current.workflowReverted?.(data)
|
||||
})
|
||||
|
||||
// Workflow update events (external changes like LLM edits)
|
||||
socketInstance.on('workflow-updated', (data) => {
|
||||
logger.info(`Workflow ${data.workflowId} has been updated externally - requesting sync`)
|
||||
// Request fresh workflow state to sync with external changes
|
||||
if (data.workflowId === urlWorkflowId) {
|
||||
socketInstance.emit('request-sync', { workflowId: data.workflowId })
|
||||
}
|
||||
})
|
||||
|
||||
// Shared function to rehydrate workflow stores
|
||||
const rehydrateWorkflowStores = async (
|
||||
workflowId: string,
|
||||
@@ -340,11 +331,13 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
{ useWorkflowRegistry },
|
||||
{ useWorkflowStore },
|
||||
{ useSubBlockStore },
|
||||
{ useWorkflowDiffStore },
|
||||
] = await Promise.all([
|
||||
import('@/stores/operation-queue/store'),
|
||||
import('@/stores/workflows/registry/store'),
|
||||
import('@/stores/workflows/workflow/store'),
|
||||
import('@/stores/workflows/subblock/store'),
|
||||
import('@/stores/workflow-diff/store'),
|
||||
])
|
||||
|
||||
// Only proceed if this is the active workflow
|
||||
|
||||
@@ -26,6 +26,7 @@ export function useCollaborativeWorkflow() {
|
||||
const undoRedo = useUndoRedo()
|
||||
const isUndoRedoInProgress = useRef(false)
|
||||
const skipEdgeRecording = useRef(false)
|
||||
const lastDiffOperationId = useRef<string | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
const moveHandler = (e: any) => {
|
||||
@@ -55,13 +56,58 @@ export function useCollaborativeWorkflow() {
|
||||
skipEdgeRecording.current = skip
|
||||
}
|
||||
|
||||
const diffOperationHandler = (e: any) => {
|
||||
const {
|
||||
type,
|
||||
baselineSnapshot,
|
||||
proposedState,
|
||||
diffAnalysis,
|
||||
beforeAccept,
|
||||
afterAccept,
|
||||
beforeReject,
|
||||
afterReject,
|
||||
} = e.detail || {}
|
||||
// Don't record during undo/redo operations
|
||||
if (isUndoRedoInProgress.current) return
|
||||
|
||||
// Generate a unique ID for this diff operation to prevent duplicates
|
||||
// Use block keys from the relevant states for each operation type
|
||||
let stateForId
|
||||
if (type === 'apply-diff') {
|
||||
stateForId = proposedState
|
||||
} else if (type === 'accept-diff') {
|
||||
stateForId = afterAccept
|
||||
} else if (type === 'reject-diff') {
|
||||
stateForId = afterReject
|
||||
}
|
||||
|
||||
const blockKeys = stateForId?.blocks ? Object.keys(stateForId.blocks).sort().join(',') : ''
|
||||
const operationId = `${type}-${blockKeys}`
|
||||
|
||||
if (lastDiffOperationId.current === operationId) {
|
||||
logger.debug('Skipping duplicate diff operation', { type, operationId })
|
||||
return // Skip duplicate
|
||||
}
|
||||
lastDiffOperationId.current = operationId
|
||||
|
||||
if (type === 'apply-diff' && baselineSnapshot && proposedState) {
|
||||
undoRedo.recordApplyDiff(baselineSnapshot, proposedState, diffAnalysis)
|
||||
} else if (type === 'accept-diff' && beforeAccept && afterAccept) {
|
||||
undoRedo.recordAcceptDiff(beforeAccept, afterAccept, diffAnalysis, baselineSnapshot)
|
||||
} else if (type === 'reject-diff' && beforeReject && afterReject) {
|
||||
undoRedo.recordRejectDiff(beforeReject, afterReject, diffAnalysis, baselineSnapshot)
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener('workflow-record-move', moveHandler)
|
||||
window.addEventListener('workflow-record-parent-update', parentUpdateHandler)
|
||||
window.addEventListener('skip-edge-recording', skipEdgeHandler)
|
||||
window.addEventListener('record-diff-operation', diffOperationHandler)
|
||||
return () => {
|
||||
window.removeEventListener('workflow-record-move', moveHandler)
|
||||
window.removeEventListener('workflow-record-parent-update', parentUpdateHandler)
|
||||
window.removeEventListener('skip-edge-recording', skipEdgeHandler)
|
||||
window.removeEventListener('record-diff-operation', diffOperationHandler)
|
||||
}
|
||||
}, [undoRedo])
|
||||
const {
|
||||
@@ -89,7 +135,8 @@ export function useCollaborativeWorkflow() {
|
||||
const subBlockStore = useSubBlockStore()
|
||||
const variablesStore = useVariablesStore()
|
||||
const { data: session } = useSession()
|
||||
const { isShowingDiff } = useWorkflowDiffStore()
|
||||
const { hasActiveDiff, isShowingDiff } = useWorkflowDiffStore()
|
||||
const isBaselineDiffView = hasActiveDiff && !isShowingDiff
|
||||
|
||||
// Track if we're applying remote changes to avoid infinite loops
|
||||
const isApplyingRemoteChange = useRef(false)
|
||||
@@ -394,6 +441,39 @@ export function useCollaborativeWorkflow() {
|
||||
variablesStore.duplicateVariable(payload.sourceVariableId, payload.id)
|
||||
break
|
||||
}
|
||||
} else if (target === 'workflow') {
|
||||
switch (operation) {
|
||||
case 'replace-state':
|
||||
if (payload.state) {
|
||||
logger.info('Received workflow state replacement from remote user', {
|
||||
userId,
|
||||
blockCount: Object.keys(payload.state.blocks || {}).length,
|
||||
edgeCount: (payload.state.edges || []).length,
|
||||
hasActiveDiff,
|
||||
isShowingDiff,
|
||||
})
|
||||
workflowStore.replaceWorkflowState(payload.state)
|
||||
|
||||
// Extract and apply subblock values
|
||||
const subBlockValues: Record<string, Record<string, any>> = {}
|
||||
Object.entries(payload.state.blocks || {}).forEach(
|
||||
([blockId, block]: [string, any]) => {
|
||||
subBlockValues[blockId] = {}
|
||||
Object.entries(block.subBlocks || {}).forEach(
|
||||
([subBlockId, subBlock]: [string, any]) => {
|
||||
subBlockValues[blockId][subBlockId] = subBlock.value
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
if (activeWorkflowId) {
|
||||
subBlockStore.setWorkflowValues(activeWorkflowId, subBlockValues)
|
||||
}
|
||||
|
||||
logger.info('Successfully applied remote workflow state replacement')
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error applying remote operation:', error)
|
||||
@@ -603,9 +683,9 @@ export function useCollaborativeWorkflow() {
|
||||
return
|
||||
}
|
||||
|
||||
// Skip socket operations when in diff mode
|
||||
if (isShowingDiff) {
|
||||
logger.debug('Skipping socket operation in diff mode:', operation)
|
||||
// Skip socket operations when viewing baseline diff (readonly)
|
||||
if (isBaselineDiffView) {
|
||||
logger.debug('Skipping socket operation while viewing baseline diff:', operation)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -637,7 +717,7 @@ export function useCollaborativeWorkflow() {
|
||||
[
|
||||
addToQueue,
|
||||
session?.user?.id,
|
||||
isShowingDiff,
|
||||
isBaselineDiffView,
|
||||
activeWorkflowId,
|
||||
isInActiveRoom,
|
||||
currentWorkflowId,
|
||||
@@ -648,8 +728,8 @@ export function useCollaborativeWorkflow() {
|
||||
(operation: string, target: string, payload: any, localAction: () => void) => {
|
||||
if (isApplyingRemoteChange.current) return
|
||||
|
||||
if (isShowingDiff) {
|
||||
logger.debug('Skipping debounced socket operation in diff mode:', operation)
|
||||
if (isBaselineDiffView) {
|
||||
logger.debug('Skipping debounced socket operation while viewing baseline diff:', operation)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -667,7 +747,7 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
emitWorkflowOperation(operation, target, payload)
|
||||
},
|
||||
[emitWorkflowOperation, isShowingDiff, isInActiveRoom, currentWorkflowId, activeWorkflowId]
|
||||
[emitWorkflowOperation, isBaselineDiffView, isInActiveRoom, currentWorkflowId, activeWorkflowId]
|
||||
)
|
||||
|
||||
const collaborativeAddBlock = useCallback(
|
||||
@@ -682,9 +762,9 @@ export function useCollaborativeWorkflow() {
|
||||
autoConnectEdge?: Edge,
|
||||
triggerMode?: boolean
|
||||
) => {
|
||||
// Skip socket operations when in diff mode
|
||||
if (isShowingDiff) {
|
||||
logger.debug('Skipping collaborative add block in diff mode')
|
||||
// Skip socket operations when viewing baseline diff
|
||||
if (isBaselineDiffView) {
|
||||
logger.debug('Skipping collaborative add block while viewing baseline diff')
|
||||
return
|
||||
}
|
||||
|
||||
@@ -866,7 +946,7 @@ export function useCollaborativeWorkflow() {
|
||||
activeWorkflowId,
|
||||
addToQueue,
|
||||
session?.user?.id,
|
||||
isShowingDiff,
|
||||
isBaselineDiffView,
|
||||
isInActiveRoom,
|
||||
currentWorkflowId,
|
||||
undoRedo,
|
||||
@@ -1108,9 +1188,9 @@ export function useCollaborativeWorkflow() {
|
||||
(blockId: string, subblockId: string, value: any, options?: { _visited?: Set<string> }) => {
|
||||
if (isApplyingRemoteChange.current) return
|
||||
|
||||
// Skip socket operations when in diff mode
|
||||
if (isShowingDiff) {
|
||||
logger.debug('Skipping collaborative subblock update in diff mode')
|
||||
// Skip socket operations when viewing baseline diff
|
||||
if (isBaselineDiffView) {
|
||||
logger.debug('Skipping collaborative subblock update while viewing baseline diff')
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1167,7 +1247,15 @@ export function useCollaborativeWorkflow() {
|
||||
// Best-effort; do not block on clearing
|
||||
}
|
||||
},
|
||||
[subBlockStore, currentWorkflowId, addToQueue, session?.user?.id, isShowingDiff, isInActiveRoom]
|
||||
[
|
||||
subBlockStore,
|
||||
currentWorkflowId,
|
||||
activeWorkflowId,
|
||||
addToQueue,
|
||||
session?.user?.id,
|
||||
isBaselineDiffView,
|
||||
isInActiveRoom,
|
||||
]
|
||||
)
|
||||
|
||||
// Immediate tag selection (uses queue but processes immediately, no debouncing)
|
||||
@@ -1659,19 +1747,21 @@ export function useCollaborativeWorkflow() {
|
||||
subBlockStore,
|
||||
|
||||
// Undo/Redo operations (wrapped to prevent recording moves during undo/redo)
|
||||
undo: useCallback(() => {
|
||||
undo: useCallback(async () => {
|
||||
isUndoRedoInProgress.current = true
|
||||
undoRedo.undo()
|
||||
queueMicrotask(() => {
|
||||
await undoRedo.undo()
|
||||
// Use a longer delay to ensure all async operations complete
|
||||
setTimeout(() => {
|
||||
isUndoRedoInProgress.current = false
|
||||
})
|
||||
}, 100)
|
||||
}, [undoRedo]),
|
||||
redo: useCallback(() => {
|
||||
redo: useCallback(async () => {
|
||||
isUndoRedoInProgress.current = true
|
||||
undoRedo.redo()
|
||||
queueMicrotask(() => {
|
||||
await undoRedo.redo()
|
||||
// Use a longer delay to ensure all async operations complete
|
||||
setTimeout(() => {
|
||||
isUndoRedoInProgress.current = false
|
||||
})
|
||||
}, 100)
|
||||
}, [undoRedo]),
|
||||
getUndoRedoSizes: undoRedo.getStackSizes,
|
||||
clearUndoRedo: undoRedo.clearStacks,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -23,6 +23,14 @@ export interface CopilotMessage {
|
||||
citations?: Citation[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Chat config stored in database
|
||||
*/
|
||||
export interface CopilotChatConfig {
|
||||
mode?: 'ask' | 'build' | 'plan'
|
||||
model?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Chat interface for copilot conversations
|
||||
*/
|
||||
@@ -33,6 +41,8 @@ export interface CopilotChat {
|
||||
messages: CopilotMessage[]
|
||||
messageCount: number
|
||||
previewYaml: string | null
|
||||
planArtifact: string | null
|
||||
config: CopilotChatConfig | null
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
}
|
||||
@@ -56,7 +66,7 @@ export interface SendMessageRequest {
|
||||
userMessageId?: string // ID from frontend for the user message
|
||||
chatId?: string
|
||||
workflowId?: string
|
||||
mode?: 'ask' | 'agent'
|
||||
mode?: 'ask' | 'agent' | 'plan'
|
||||
model?:
|
||||
| 'gpt-5-fast'
|
||||
| 'gpt-5'
|
||||
|
||||
@@ -13,10 +13,12 @@ export const ToolIds = z.enum([
|
||||
'get_operations_examples',
|
||||
'search_documentation',
|
||||
'search_online',
|
||||
'search_patterns',
|
||||
'search_errors',
|
||||
'remember_debug',
|
||||
'make_api_request',
|
||||
'get_environment_variables',
|
||||
'set_environment_variables',
|
||||
'get_oauth_credentials',
|
||||
'get_credentials',
|
||||
'gdrive_request_access',
|
||||
'list_gdrive_files',
|
||||
'read_gdrive_file',
|
||||
@@ -27,6 +29,9 @@ export const ToolIds = z.enum([
|
||||
'set_global_workflow_variables',
|
||||
'oauth_request_access',
|
||||
'get_trigger_blocks',
|
||||
'deploy_workflow',
|
||||
'check_deployment_status',
|
||||
'navigate_ui',
|
||||
])
|
||||
export type ToolId = z.infer<typeof ToolIds>
|
||||
|
||||
@@ -68,6 +73,20 @@ export const ToolArgSchemas = {
|
||||
// New
|
||||
oauth_request_access: z.object({}),
|
||||
|
||||
deploy_workflow: z.object({
|
||||
action: z.enum(['deploy', 'undeploy']).optional().default('deploy'),
|
||||
deployType: z.enum(['api', 'chat']).optional().default('api'),
|
||||
}),
|
||||
|
||||
check_deployment_status: z.object({
|
||||
workflowId: z.string().optional(),
|
||||
}),
|
||||
|
||||
navigate_ui: z.object({
|
||||
destination: z.enum(['workflow', 'logs', 'templates', 'vector_db', 'settings']),
|
||||
workflowName: z.string().optional(),
|
||||
}),
|
||||
|
||||
edit_workflow: z.object({
|
||||
operations: z
|
||||
.array(
|
||||
@@ -128,6 +147,24 @@ export const ToolArgSchemas = {
|
||||
hl: z.string().optional(),
|
||||
}),
|
||||
|
||||
search_patterns: z.object({
|
||||
queries: z.array(z.string()).min(1).max(3),
|
||||
limit: z.number().optional().default(3),
|
||||
}),
|
||||
|
||||
search_errors: z.object({
|
||||
query: z.string(),
|
||||
limit: z.number().optional().default(5),
|
||||
}),
|
||||
|
||||
remember_debug: z.object({
|
||||
operation: z.enum(['add', 'edit', 'delete']),
|
||||
id: z.string().optional(),
|
||||
problem: z.string().optional(),
|
||||
solution: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
}),
|
||||
|
||||
make_api_request: z.object({
|
||||
url: z.string(),
|
||||
method: z.enum(['GET', 'POST', 'PUT']),
|
||||
@@ -136,13 +173,11 @@ export const ToolArgSchemas = {
|
||||
body: z.union([z.record(z.any()), z.string()]).optional(),
|
||||
}),
|
||||
|
||||
get_environment_variables: z.object({}),
|
||||
|
||||
set_environment_variables: z.object({
|
||||
variables: z.record(z.string()),
|
||||
}),
|
||||
|
||||
get_oauth_credentials: z.object({}),
|
||||
get_credentials: z.object({}),
|
||||
|
||||
gdrive_request_access: z.object({}),
|
||||
|
||||
@@ -208,19 +243,15 @@ export const ToolSSESchemas = {
|
||||
),
|
||||
search_documentation: toolCallSSEFor('search_documentation', ToolArgSchemas.search_documentation),
|
||||
search_online: toolCallSSEFor('search_online', ToolArgSchemas.search_online),
|
||||
search_patterns: toolCallSSEFor('search_patterns', ToolArgSchemas.search_patterns),
|
||||
search_errors: toolCallSSEFor('search_errors', ToolArgSchemas.search_errors),
|
||||
remember_debug: toolCallSSEFor('remember_debug', ToolArgSchemas.remember_debug),
|
||||
make_api_request: toolCallSSEFor('make_api_request', ToolArgSchemas.make_api_request),
|
||||
get_environment_variables: toolCallSSEFor(
|
||||
'get_environment_variables',
|
||||
ToolArgSchemas.get_environment_variables
|
||||
),
|
||||
set_environment_variables: toolCallSSEFor(
|
||||
'set_environment_variables',
|
||||
ToolArgSchemas.set_environment_variables
|
||||
),
|
||||
get_oauth_credentials: toolCallSSEFor(
|
||||
'get_oauth_credentials',
|
||||
ToolArgSchemas.get_oauth_credentials
|
||||
),
|
||||
get_credentials: toolCallSSEFor('get_credentials', ToolArgSchemas.get_credentials),
|
||||
gdrive_request_access: toolCallSSEFor(
|
||||
'gdrive_request_access',
|
||||
ToolArgSchemas.gdrive_request_access as any
|
||||
@@ -230,6 +261,12 @@ export const ToolSSESchemas = {
|
||||
reason: toolCallSSEFor('reason', ToolArgSchemas.reason),
|
||||
// New
|
||||
oauth_request_access: toolCallSSEFor('oauth_request_access', ToolArgSchemas.oauth_request_access),
|
||||
deploy_workflow: toolCallSSEFor('deploy_workflow', ToolArgSchemas.deploy_workflow),
|
||||
check_deployment_status: toolCallSSEFor(
|
||||
'check_deployment_status',
|
||||
ToolArgSchemas.check_deployment_status
|
||||
),
|
||||
navigate_ui: toolCallSSEFor('navigate_ui', ToolArgSchemas.navigate_ui),
|
||||
} as const
|
||||
export type ToolSSESchemaMap = typeof ToolSSESchemas
|
||||
|
||||
@@ -339,20 +376,53 @@ export const ToolResultSchemas = {
|
||||
}),
|
||||
search_documentation: z.object({ results: z.array(z.any()) }),
|
||||
search_online: z.object({ results: z.array(z.any()) }),
|
||||
search_patterns: z.object({
|
||||
patterns: z.array(
|
||||
z.object({
|
||||
blocks_involved: z.array(z.string()).optional(),
|
||||
description: z.string().optional(),
|
||||
pattern_category: z.string().optional(),
|
||||
pattern_name: z.string().optional(),
|
||||
use_cases: z.array(z.string()).optional(),
|
||||
workflow_json: z.any().optional(),
|
||||
})
|
||||
),
|
||||
}),
|
||||
search_errors: z.object({
|
||||
results: z.array(
|
||||
z.object({
|
||||
problem: z.string().optional(),
|
||||
solution: z.string().optional(),
|
||||
context: z.string().optional(),
|
||||
similarity: z.number().optional(),
|
||||
})
|
||||
),
|
||||
}),
|
||||
remember_debug: z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string().optional(),
|
||||
id: z.string().optional(),
|
||||
}),
|
||||
make_api_request: z.object({
|
||||
status: z.number(),
|
||||
statusText: z.string().optional(),
|
||||
headers: z.record(z.string()).optional(),
|
||||
body: z.any().optional(),
|
||||
}),
|
||||
get_environment_variables: z.object({ variables: z.record(z.string()) }),
|
||||
set_environment_variables: z
|
||||
.object({ variables: z.record(z.string()) })
|
||||
.or(z.object({ message: z.any().optional(), data: z.any().optional() })),
|
||||
get_oauth_credentials: z.object({
|
||||
credentials: z.array(
|
||||
z.object({ id: z.string(), provider: z.string(), isDefault: z.boolean().optional() })
|
||||
),
|
||||
get_credentials: z.object({
|
||||
oauth: z.object({
|
||||
credentials: z.array(
|
||||
z.object({ id: z.string(), provider: z.string(), isDefault: z.boolean().optional() })
|
||||
),
|
||||
total: z.number(),
|
||||
}),
|
||||
environment: z.object({
|
||||
variableNames: z.array(z.string()),
|
||||
count: z.number(),
|
||||
}),
|
||||
}),
|
||||
gdrive_request_access: z.object({
|
||||
granted: z.boolean().optional(),
|
||||
@@ -370,6 +440,30 @@ export const ToolResultSchemas = {
|
||||
}),
|
||||
read_gdrive_file: z.object({ content: z.string().optional(), data: z.any().optional() }),
|
||||
reason: z.object({ reasoning: z.string() }),
|
||||
deploy_workflow: z.object({
|
||||
action: z.enum(['deploy', 'undeploy']).optional(),
|
||||
deployType: z.enum(['api', 'chat']).optional(),
|
||||
isDeployed: z.boolean().optional(),
|
||||
deployedAt: z.string().optional(),
|
||||
needsApiKey: z.boolean().optional(),
|
||||
message: z.string().optional(),
|
||||
endpoint: z.string().optional(),
|
||||
curlCommand: z.string().optional(),
|
||||
apiKeyPlaceholder: z.string().optional(),
|
||||
openedModal: z.boolean().optional(),
|
||||
}),
|
||||
check_deployment_status: z.object({
|
||||
isDeployed: z.boolean(),
|
||||
deploymentTypes: z.array(z.string()),
|
||||
apiDeployed: z.boolean(),
|
||||
chatDeployed: z.boolean(),
|
||||
deployedAt: z.string().nullable(),
|
||||
}),
|
||||
navigate_ui: z.object({
|
||||
destination: z.enum(['workflow', 'logs', 'templates', 'vector_db', 'settings']),
|
||||
workflowName: z.string().optional(),
|
||||
navigated: z.boolean(),
|
||||
}),
|
||||
} as const
|
||||
export type ToolResultSchemaMap = typeof ToolResultSchemas
|
||||
|
||||
|
||||
@@ -23,12 +23,28 @@ export interface ClientToolDisplay {
|
||||
icon: LucideIcon
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to generate dynamic display text based on tool parameters and state
|
||||
* @param params - The tool call parameters
|
||||
* @param state - The current tool call state
|
||||
* @returns The dynamic text to display, or undefined to use the default text
|
||||
*/
|
||||
export type DynamicTextFormatter = (
|
||||
params: Record<string, any>,
|
||||
state: ClientToolCallState
|
||||
) => string | undefined
|
||||
|
||||
export interface BaseClientToolMetadata {
|
||||
displayNames: Partial<Record<ClientToolCallState, ClientToolDisplay>>
|
||||
interrupt?: {
|
||||
accept: ClientToolDisplay
|
||||
reject: ClientToolDisplay
|
||||
}
|
||||
/**
|
||||
* Optional function to generate dynamic display text based on parameters
|
||||
* If provided, this will override the default text in displayNames
|
||||
*/
|
||||
getDynamicText?: DynamicTextFormatter
|
||||
}
|
||||
|
||||
export class BaseClientTool {
|
||||
|
||||
@@ -24,17 +24,43 @@ export class GetBlocksMetadataClientTool extends BaseClientTool {
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Evaluating block choices', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Evaluating block choices', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Evaluating block choices', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Evaluated block choices', icon: ListFilter },
|
||||
[ClientToolCallState.error]: { text: 'Failed to evaluate block choices', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted evaluating block choices', icon: XCircle },
|
||||
[ClientToolCallState.generating]: { text: 'Searching block choices', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Searching block choices', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Searching block choices', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Searched block choices', icon: ListFilter },
|
||||
[ClientToolCallState.error]: { text: 'Failed to search block choices', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted searching block choices', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped evaluating block choices',
|
||||
text: 'Skipped searching block choices',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.blockIds && Array.isArray(params.blockIds) && params.blockIds.length > 0) {
|
||||
const blockList = params.blockIds
|
||||
.slice(0, 3)
|
||||
.map((blockId) => blockId.replace(/_/g, ' '))
|
||||
.join(', ')
|
||||
const more = params.blockIds.length > 3 ? '...' : ''
|
||||
const blocks = `${blockList}${more}`
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Searched ${blocks}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Searching ${blocks}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to search ${blocks}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted searching ${blocks}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped searching ${blocks}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: GetBlocksMetadataArgs): Promise<void> {
|
||||
|
||||
@@ -23,6 +23,28 @@ export class GetExamplesRagClientTool extends BaseClientTool {
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped getting examples', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.query && typeof params.query === 'string') {
|
||||
const query = params.query
|
||||
const truncated = query.length > 40 ? `${query.slice(0, 40)}...` : query
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Found examples for ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Searching examples for ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to find examples for ${truncated}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted searching examples for ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped searching examples for ${truncated}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(): Promise<void> {
|
||||
|
||||
@@ -29,6 +29,28 @@ export class GetOperationsExamplesClientTool extends BaseClientTool {
|
||||
},
|
||||
},
|
||||
interrupt: undefined,
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.query && typeof params.query === 'string') {
|
||||
const query = params.query
|
||||
const truncated = query.length > 40 ? `${query.slice(0, 40)}...` : query
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Designed ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Designing ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to design ${truncated}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted designing ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped designing ${truncated}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(): Promise<void> {
|
||||
|
||||
@@ -32,6 +32,27 @@ export class ListGDriveFilesClientTool extends BaseClientTool {
|
||||
[ClientToolCallState.error]: { text: 'Failed to list GDrive files', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped listing GDrive files', icon: MinusCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const searchQuery = params?.search_query || params?.searchQuery
|
||||
if (searchQuery && typeof searchQuery === 'string') {
|
||||
const query = searchQuery
|
||||
const truncated = query.length > 40 ? `${query.slice(0, 40)}...` : query
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Listed files matching ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Listing files matching ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to list files matching ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped listing files matching ${truncated}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: ListGDriveFilesArgs): Promise<void> {
|
||||
|
||||
@@ -34,6 +34,28 @@ export class ReadGDriveFileClientTool extends BaseClientTool {
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.fileId && typeof params.fileId === 'string') {
|
||||
const fileId = params.fileId
|
||||
const fileType = params?.type ? ` (${params.type})` : ''
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Read file ${fileId}${fileType}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Reading file ${fileId}${fileType}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to read file ${fileId}${fileType}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted reading file ${fileId}${fileType}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped reading file ${fileId}${fileType}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: ReadGDriveFileArgs): Promise<void> {
|
||||
|
||||
241
apps/sim/lib/copilot/tools/client/navigation/navigate-ui.ts
Normal file
241
apps/sim/lib/copilot/tools/client/navigation/navigate-ui.ts
Normal file
@@ -0,0 +1,241 @@
|
||||
import { Loader2, Navigation, X, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
type NavigationDestination = 'workflow' | 'logs' | 'templates' | 'vector_db' | 'settings'
|
||||
|
||||
interface NavigateUIArgs {
|
||||
destination: NavigationDestination
|
||||
workflowName?: string
|
||||
}
|
||||
|
||||
export class NavigateUIClientTool extends BaseClientTool {
|
||||
static readonly id = 'navigate_ui'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, NavigateUIClientTool.id, NavigateUIClientTool.metadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* Override to provide dynamic button text based on destination
|
||||
*/
|
||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
||||
const toolCall = toolCallsById[this.toolCallId]
|
||||
const params = toolCall?.params as NavigateUIArgs | undefined
|
||||
|
||||
const destination = params?.destination
|
||||
const workflowName = params?.workflowName
|
||||
|
||||
let buttonText = 'Navigate'
|
||||
|
||||
if (destination === 'workflow' && workflowName) {
|
||||
buttonText = 'Open workflow'
|
||||
} else if (destination === 'logs') {
|
||||
buttonText = 'Open logs'
|
||||
} else if (destination === 'templates') {
|
||||
buttonText = 'Open templates'
|
||||
} else if (destination === 'vector_db') {
|
||||
buttonText = 'Open vector DB'
|
||||
} else if (destination === 'settings') {
|
||||
buttonText = 'Open settings'
|
||||
}
|
||||
|
||||
return {
|
||||
accept: { text: buttonText, icon: Navigation },
|
||||
reject: { text: 'Skip', icon: XCircle },
|
||||
}
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: {
|
||||
text: 'Preparing to open',
|
||||
icon: Loader2,
|
||||
},
|
||||
[ClientToolCallState.pending]: { text: 'Open?', icon: Navigation },
|
||||
[ClientToolCallState.executing]: { text: 'Opening', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Opened', icon: Navigation },
|
||||
[ClientToolCallState.error]: { text: 'Failed to open', icon: X },
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted opening',
|
||||
icon: XCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped opening',
|
||||
icon: XCircle,
|
||||
},
|
||||
},
|
||||
interrupt: {
|
||||
accept: { text: 'Open', icon: Navigation },
|
||||
reject: { text: 'Skip', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const destination = params?.destination as NavigationDestination | undefined
|
||||
const workflowName = params?.workflowName
|
||||
|
||||
const action = 'open'
|
||||
const actionCapitalized = 'Open'
|
||||
const actionPast = 'opened'
|
||||
const actionIng = 'opening'
|
||||
let target = ''
|
||||
|
||||
if (destination === 'workflow' && workflowName) {
|
||||
target = ` workflow "${workflowName}"`
|
||||
} else if (destination === 'workflow') {
|
||||
target = ' workflows'
|
||||
} else if (destination === 'logs') {
|
||||
target = ' logs'
|
||||
} else if (destination === 'templates') {
|
||||
target = ' templates'
|
||||
} else if (destination === 'vector_db') {
|
||||
target = ' vector database'
|
||||
} else if (destination === 'settings') {
|
||||
target = ' settings'
|
||||
}
|
||||
|
||||
const fullAction = `${action}${target}`
|
||||
const fullActionCapitalized = `${actionCapitalized}${target}`
|
||||
const fullActionPast = `${actionPast}${target}`
|
||||
const fullActionIng = `${actionIng}${target}`
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return fullActionPast.charAt(0).toUpperCase() + fullActionPast.slice(1)
|
||||
case ClientToolCallState.executing:
|
||||
return fullActionIng.charAt(0).toUpperCase() + fullActionIng.slice(1)
|
||||
case ClientToolCallState.generating:
|
||||
return `Preparing to ${fullAction}`
|
||||
case ClientToolCallState.pending:
|
||||
return `${fullActionCapitalized}?`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to ${fullAction}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted ${fullAction}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped ${fullAction}`
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
await super.handleReject()
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
}
|
||||
|
||||
async handleAccept(args?: NavigateUIArgs): Promise<void> {
|
||||
const logger = createLogger('NavigateUIClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
// Get params from copilot store if not provided directly
|
||||
let destination = args?.destination
|
||||
let workflowName = args?.workflowName
|
||||
|
||||
if (!destination) {
|
||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
||||
const toolCall = toolCallsById[this.toolCallId]
|
||||
const params = toolCall?.params as NavigateUIArgs | undefined
|
||||
destination = params?.destination
|
||||
workflowName = params?.workflowName
|
||||
}
|
||||
|
||||
if (!destination) {
|
||||
throw new Error('No destination provided')
|
||||
}
|
||||
|
||||
let navigationUrl = ''
|
||||
let successMessage = ''
|
||||
|
||||
// Get current workspace ID from URL
|
||||
const workspaceId = window.location.pathname.split('/')[2]
|
||||
|
||||
switch (destination) {
|
||||
case 'workflow':
|
||||
if (workflowName) {
|
||||
// Find workflow by name
|
||||
const { workflows } = useWorkflowRegistry.getState()
|
||||
const workflow = Object.values(workflows).find(
|
||||
(w) => w.name.toLowerCase() === workflowName.toLowerCase()
|
||||
)
|
||||
|
||||
if (!workflow) {
|
||||
throw new Error(`Workflow "${workflowName}" not found`)
|
||||
}
|
||||
|
||||
navigationUrl = `/workspace/${workspaceId}/w/${workflow.id}`
|
||||
successMessage = `Navigated to workflow "${workflowName}"`
|
||||
} else {
|
||||
navigationUrl = `/workspace/${workspaceId}/w`
|
||||
successMessage = 'Navigated to workflows'
|
||||
}
|
||||
break
|
||||
|
||||
case 'logs':
|
||||
navigationUrl = `/workspace/${workspaceId}/logs`
|
||||
successMessage = 'Navigated to logs'
|
||||
break
|
||||
|
||||
case 'templates':
|
||||
navigationUrl = `/workspace/${workspaceId}/templates`
|
||||
successMessage = 'Navigated to templates'
|
||||
break
|
||||
|
||||
case 'vector_db':
|
||||
navigationUrl = `/workspace/${workspaceId}/vector-db`
|
||||
successMessage = 'Navigated to vector database'
|
||||
break
|
||||
|
||||
case 'settings':
|
||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'general' } }))
|
||||
successMessage = 'Opened settings'
|
||||
break
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown destination: ${destination}`)
|
||||
}
|
||||
|
||||
// Navigate if URL was set
|
||||
if (navigationUrl) {
|
||||
window.location.href = navigationUrl
|
||||
}
|
||||
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, successMessage, {
|
||||
destination,
|
||||
workflowName,
|
||||
navigated: true,
|
||||
})
|
||||
} catch (e: any) {
|
||||
logger.error('Navigation failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
|
||||
// Get destination info for better error message
|
||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
||||
const toolCall = toolCallsById[this.toolCallId]
|
||||
const params = toolCall?.params as NavigateUIArgs | undefined
|
||||
const dest = params?.destination
|
||||
const wfName = params?.workflowName
|
||||
|
||||
let errorMessage = e?.message || 'Failed to navigate'
|
||||
if (dest === 'workflow' && wfName) {
|
||||
errorMessage = `Failed to navigate to workflow "${wfName}": ${e?.message || 'Unknown error'}`
|
||||
} else if (dest) {
|
||||
errorMessage = `Failed to navigate to ${dest}: ${e?.message || 'Unknown error'}`
|
||||
}
|
||||
|
||||
await this.markToolComplete(500, errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
async execute(args?: NavigateUIArgs): Promise<void> {
|
||||
await this.handleAccept(args)
|
||||
}
|
||||
}
|
||||
@@ -36,6 +36,44 @@ export class MakeApiRequestClientTool extends BaseClientTool {
|
||||
accept: { text: 'Execute', icon: Globe2 },
|
||||
reject: { text: 'Skip', icon: MinusCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.url && typeof params.url === 'string') {
|
||||
const method = params.method || 'GET'
|
||||
let url = params.url
|
||||
|
||||
// Extract domain from URL for cleaner display
|
||||
try {
|
||||
const urlObj = new URL(url)
|
||||
url = urlObj.hostname + urlObj.pathname
|
||||
if (url.length > 40) {
|
||||
url = `${url.slice(0, 40)}...`
|
||||
}
|
||||
} catch {
|
||||
// If URL parsing fails, just truncate
|
||||
if (url.length > 40) {
|
||||
url = `${url.slice(0, 40)}...`
|
||||
}
|
||||
}
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `${method} ${url} complete`
|
||||
case ClientToolCallState.executing:
|
||||
return `${method} ${url}`
|
||||
case ClientToolCallState.generating:
|
||||
return `Preparing ${method} ${url}`
|
||||
case ClientToolCallState.pending:
|
||||
return `Review ${method} ${url}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed ${method} ${url}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped ${method} ${url}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted ${method} ${url}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
|
||||
80
apps/sim/lib/copilot/tools/client/other/remember-debug.ts
Normal file
80
apps/sim/lib/copilot/tools/client/other/remember-debug.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { CheckCircle2, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
|
||||
export class RememberDebugClientTool extends BaseClientTool {
|
||||
static readonly id = 'remember_debug'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, RememberDebugClientTool.id, RememberDebugClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Validating fix', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Validating fix', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Validating fix', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Validated fix', icon: CheckCircle2 },
|
||||
[ClientToolCallState.error]: { text: 'Failed to validate', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted validation', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped validation', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
getDynamicText: (params, state) => {
|
||||
const operation = params?.operation
|
||||
|
||||
if (operation === 'add' || operation === 'edit') {
|
||||
// For add/edit, show from problem or solution
|
||||
const text = params?.problem || params?.solution
|
||||
if (text && typeof text === 'string') {
|
||||
const truncated = text.length > 40 ? `${text.slice(0, 40)}...` : text
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Validated fix ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Validating fix ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to validate fix ${truncated}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted validating fix ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped validating fix ${truncated}`
|
||||
}
|
||||
}
|
||||
} else if (operation === 'delete') {
|
||||
// For delete, show from problem or solution (or id as fallback)
|
||||
const text = params?.problem || params?.solution || params?.id
|
||||
if (text && typeof text === 'string') {
|
||||
const truncated = text.length > 40 ? `${text.slice(0, 40)}...` : text
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Adjusted fix ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Adjusting fix ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to adjust fix ${truncated}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted adjusting fix ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped adjusting fix ${truncated}`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -30,6 +30,28 @@ export class SearchDocumentationClientTool extends BaseClientTool {
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted documentation search', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped documentation search', icon: MinusCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.query && typeof params.query === 'string') {
|
||||
const query = params.query
|
||||
const truncated = query.length > 50 ? `${query.slice(0, 50)}...` : query
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Searched docs for ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Searching docs for ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to search docs for ${truncated}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted searching docs for ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped searching docs for ${truncated}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: SearchDocumentationArgs): Promise<void> {
|
||||
|
||||
53
apps/sim/lib/copilot/tools/client/other/search-errors.ts
Normal file
53
apps/sim/lib/copilot/tools/client/other/search-errors.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { Bug, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
|
||||
export class SearchErrorsClientTool extends BaseClientTool {
|
||||
static readonly id = 'search_errors'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, SearchErrorsClientTool.id, SearchErrorsClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Debugging', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Debugging', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Debugging', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Debugged', icon: Bug },
|
||||
[ClientToolCallState.error]: { text: 'Failed to debug', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted debugging', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped debugging', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.query && typeof params.query === 'string') {
|
||||
const query = params.query
|
||||
const truncated = query.length > 50 ? `${query.slice(0, 50)}...` : query
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Debugged ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Debugging ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to debug ${truncated}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted debugging ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped debugging ${truncated}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -32,6 +32,28 @@ export class SearchOnlineClientTool extends BaseClientTool {
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped online search', icon: MinusCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted online search', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.query && typeof params.query === 'string') {
|
||||
const query = params.query
|
||||
const truncated = query.length > 50 ? `${query.slice(0, 50)}...` : query
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Searched online for ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Searching online for ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to search online for ${truncated}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted searching online for ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped searching online for ${truncated}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: SearchOnlineArgs): Promise<void> {
|
||||
|
||||
53
apps/sim/lib/copilot/tools/client/other/search-patterns.ts
Normal file
53
apps/sim/lib/copilot/tools/client/other/search-patterns.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { Loader2, MinusCircle, Search, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
|
||||
export class SearchPatternsClientTool extends BaseClientTool {
|
||||
static readonly id = 'search_patterns'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, SearchPatternsClientTool.id, SearchPatternsClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Searching workflow patterns', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Searching workflow patterns', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Searching workflow patterns', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Found workflow patterns', icon: Search },
|
||||
[ClientToolCallState.error]: { text: 'Failed to search patterns', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted pattern search', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped pattern search', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.queries && Array.isArray(params.queries) && params.queries.length > 0) {
|
||||
const firstQuery = String(params.queries[0])
|
||||
const truncated = firstQuery.length > 50 ? `${firstQuery.slice(0, 50)}...` : firstQuery
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Searched ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Searching ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to search ${truncated}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted searching ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped searching ${truncated}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -8,41 +8,44 @@ import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface GetOAuthCredentialsArgs {
|
||||
interface GetCredentialsArgs {
|
||||
userId?: string
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export class GetOAuthCredentialsClientTool extends BaseClientTool {
|
||||
static readonly id = 'get_oauth_credentials'
|
||||
export class GetCredentialsClientTool extends BaseClientTool {
|
||||
static readonly id = 'get_credentials'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, GetOAuthCredentialsClientTool.id, GetOAuthCredentialsClientTool.metadata)
|
||||
super(toolCallId, GetCredentialsClientTool.id, GetCredentialsClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching OAuth credentials', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Fetching OAuth credentials', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Retrieving login IDs', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved login IDs', icon: Key },
|
||||
[ClientToolCallState.error]: { text: 'Failed to retrieve login IDs', icon: XCircle },
|
||||
[ClientToolCallState.generating]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Fetched connected integrations', icon: Key },
|
||||
[ClientToolCallState.error]: {
|
||||
text: 'Failed to fetch connected integrations',
|
||||
icon: XCircle,
|
||||
},
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted fetching OAuth credentials',
|
||||
text: 'Aborted fetching connected integrations',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped fetching OAuth credentials',
|
||||
text: 'Skipped fetching connected integrations',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: GetOAuthCredentialsArgs): Promise<void> {
|
||||
const logger = createLogger('GetOAuthCredentialsClientTool')
|
||||
async execute(args?: GetCredentialsArgs): Promise<void> {
|
||||
const logger = createLogger('GetCredentialsClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
const payload: GetOAuthCredentialsArgs = { ...(args || {}) }
|
||||
const payload: GetCredentialsArgs = { ...(args || {}) }
|
||||
if (!payload.workflowId && !payload.userId) {
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
if (activeWorkflowId) payload.workflowId = activeWorkflowId
|
||||
@@ -50,7 +53,7 @@ export class GetOAuthCredentialsClientTool extends BaseClientTool {
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'get_oauth_credentials', payload }),
|
||||
body: JSON.stringify({ toolName: 'get_credentials', payload }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
@@ -59,12 +62,12 @@ export class GetOAuthCredentialsClientTool extends BaseClientTool {
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Retrieved login IDs', parsed.result)
|
||||
await this.markToolComplete(200, 'Connected integrations fetched', parsed.result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to retrieve login IDs')
|
||||
await this.markToolComplete(500, e?.message || 'Failed to fetch connected integrations')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
import { KeyRound, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface GetEnvArgs {
|
||||
userId?: string
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export class GetEnvironmentVariablesClientTool extends BaseClientTool {
|
||||
static readonly id = 'get_environment_variables'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(
|
||||
toolCallId,
|
||||
GetEnvironmentVariablesClientTool.id,
|
||||
GetEnvironmentVariablesClientTool.metadata
|
||||
)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: {
|
||||
text: 'Reading environment variables',
|
||||
icon: Loader2,
|
||||
},
|
||||
[ClientToolCallState.pending]: { text: 'Reading environment variables', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Reading environment variables', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Read environment variables', icon: KeyRound },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read environment variables', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted reading environment variables',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped reading environment variables',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: GetEnvArgs): Promise<void> {
|
||||
const logger = createLogger('GetEnvironmentVariablesClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
const payload: GetEnvArgs = { ...(args || {}) }
|
||||
if (!payload.workflowId) {
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
if (activeWorkflowId) payload.workflowId = activeWorkflowId
|
||||
}
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'get_environment_variables', payload }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
throw new Error(txt || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Environment variables fetched', parsed.result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to get environment variables')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -48,6 +48,30 @@ export class SetEnvironmentVariablesClientTool extends BaseClientTool {
|
||||
accept: { text: 'Apply', icon: Settings2 },
|
||||
reject: { text: 'Skip', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.variables && typeof params.variables === 'object') {
|
||||
const count = Object.keys(params.variables).length
|
||||
const varText = count === 1 ? 'variable' : 'variables'
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Set ${count} ${varText}`
|
||||
case ClientToolCallState.executing:
|
||||
return `Setting ${count} ${varText}`
|
||||
case ClientToolCallState.generating:
|
||||
return `Preparing to set ${count} ${varText}`
|
||||
case ClientToolCallState.pending:
|
||||
return `Set ${count} ${varText}?`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to set ${count} ${varText}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted setting ${count} ${varText}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped setting ${count} ${varText}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
|
||||
@@ -0,0 +1,100 @@
|
||||
import { Loader2, Rocket, X, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface CheckDeploymentStatusArgs {
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export class CheckDeploymentStatusClientTool extends BaseClientTool {
|
||||
static readonly id = 'check_deployment_status'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, CheckDeploymentStatusClientTool.id, CheckDeploymentStatusClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: {
|
||||
text: 'Checking deployment status',
|
||||
icon: Loader2,
|
||||
},
|
||||
[ClientToolCallState.pending]: { text: 'Checking deployment status', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Checking deployment status', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Checked deployment status', icon: Rocket },
|
||||
[ClientToolCallState.error]: { text: 'Failed to check deployment status', icon: X },
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted checking deployment status',
|
||||
icon: XCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped checking deployment status',
|
||||
icon: XCircle,
|
||||
},
|
||||
},
|
||||
interrupt: undefined,
|
||||
}
|
||||
|
||||
async execute(args?: CheckDeploymentStatusArgs): Promise<void> {
|
||||
const logger = createLogger('CheckDeploymentStatusClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
const workflowId = args?.workflowId || activeWorkflowId
|
||||
|
||||
if (!workflowId) {
|
||||
throw new Error('No workflow ID provided')
|
||||
}
|
||||
|
||||
// Fetch deployment status from API
|
||||
const [apiDeployRes, chatDeployRes] = await Promise.all([
|
||||
fetch(`/api/workflows/${workflowId}/deploy`),
|
||||
fetch(`/api/workflows/${workflowId}/chat/status`),
|
||||
])
|
||||
|
||||
const apiDeploy = apiDeployRes.ok ? await apiDeployRes.json() : null
|
||||
const chatDeploy = chatDeployRes.ok ? await chatDeployRes.json() : null
|
||||
|
||||
const isApiDeployed = apiDeploy?.isDeployed || false
|
||||
const isChatDeployed = !!(chatDeploy?.isDeployed && chatDeploy?.deployment)
|
||||
|
||||
const deploymentTypes: string[] = []
|
||||
|
||||
if (isApiDeployed) {
|
||||
// Default to sync API, could be extended to detect streaming/async
|
||||
deploymentTypes.push('api')
|
||||
}
|
||||
|
||||
if (isChatDeployed) {
|
||||
deploymentTypes.push('chat')
|
||||
}
|
||||
|
||||
const isDeployed = isApiDeployed || isChatDeployed
|
||||
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(
|
||||
200,
|
||||
isDeployed
|
||||
? `Workflow is deployed as: ${deploymentTypes.join(', ')}`
|
||||
: 'Workflow is not deployed',
|
||||
{
|
||||
isDeployed,
|
||||
deploymentTypes,
|
||||
apiDeployed: isApiDeployed,
|
||||
chatDeployed: isChatDeployed,
|
||||
deployedAt: apiDeploy?.deployedAt || null,
|
||||
}
|
||||
)
|
||||
} catch (e: any) {
|
||||
logger.error('Check deployment status failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to check deployment status')
|
||||
}
|
||||
}
|
||||
}
|
||||
333
apps/sim/lib/copilot/tools/client/workflow/deploy-workflow.ts
Normal file
333
apps/sim/lib/copilot/tools/client/workflow/deploy-workflow.ts
Normal file
@@ -0,0 +1,333 @@
|
||||
import { Loader2, Rocket, X, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getInputFormatExample } from '@/lib/workflows/deployment-utils'
|
||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface DeployWorkflowArgs {
|
||||
action: 'deploy' | 'undeploy'
|
||||
deployType?: 'api' | 'chat'
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
interface ApiKeysData {
|
||||
workspaceKeys: Array<{ id: string; name: string }>
|
||||
personalKeys: Array<{ id: string; name: string }>
|
||||
}
|
||||
|
||||
export class DeployWorkflowClientTool extends BaseClientTool {
|
||||
static readonly id = 'deploy_workflow'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, DeployWorkflowClientTool.id, DeployWorkflowClientTool.metadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* Override to provide dynamic button text based on action and deployType
|
||||
*/
|
||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
||||
// Get params from the copilot store
|
||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
||||
const toolCall = toolCallsById[this.toolCallId]
|
||||
const params = toolCall?.params as DeployWorkflowArgs | undefined
|
||||
|
||||
const action = params?.action || 'deploy'
|
||||
const deployType = params?.deployType || 'api'
|
||||
|
||||
// Check if workflow is already deployed
|
||||
const workflowId = params?.workflowId || useWorkflowRegistry.getState().activeWorkflowId
|
||||
const isAlreadyDeployed = workflowId
|
||||
? useWorkflowRegistry.getState().getWorkflowDeploymentStatus(workflowId)?.isDeployed
|
||||
: false
|
||||
|
||||
let buttonText = action.charAt(0).toUpperCase() + action.slice(1)
|
||||
|
||||
// Change to "Redeploy" if already deployed
|
||||
if (action === 'deploy' && isAlreadyDeployed) {
|
||||
buttonText = 'Redeploy'
|
||||
} else if (action === 'deploy' && deployType === 'chat') {
|
||||
buttonText = 'Deploy as chat'
|
||||
}
|
||||
|
||||
return {
|
||||
accept: { text: buttonText, icon: Rocket },
|
||||
reject: { text: 'Skip', icon: XCircle },
|
||||
}
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: {
|
||||
text: 'Preparing to deploy workflow',
|
||||
icon: Loader2,
|
||||
},
|
||||
[ClientToolCallState.pending]: { text: 'Deploy workflow?', icon: Rocket },
|
||||
[ClientToolCallState.executing]: { text: 'Deploying workflow', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Deployed workflow', icon: Rocket },
|
||||
[ClientToolCallState.error]: { text: 'Failed to deploy workflow', icon: X },
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted deploying workflow',
|
||||
icon: XCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped deploying workflow',
|
||||
icon: XCircle,
|
||||
},
|
||||
},
|
||||
interrupt: {
|
||||
accept: { text: 'Deploy', icon: Rocket },
|
||||
reject: { text: 'Skip', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const action = params?.action === 'undeploy' ? 'undeploy' : 'deploy'
|
||||
const deployType = params?.deployType || 'api'
|
||||
|
||||
// Check if workflow is already deployed
|
||||
const workflowId = params?.workflowId || useWorkflowRegistry.getState().activeWorkflowId
|
||||
const isAlreadyDeployed = workflowId
|
||||
? useWorkflowRegistry.getState().getWorkflowDeploymentStatus(workflowId)?.isDeployed
|
||||
: false
|
||||
|
||||
// Determine action text based on deployment status
|
||||
let actionText = action
|
||||
let actionTextIng = action === 'undeploy' ? 'undeploying' : 'deploying'
|
||||
let actionTextPast = action === 'undeploy' ? 'undeployed' : 'deployed'
|
||||
|
||||
// If already deployed and action is deploy, change to redeploy
|
||||
if (action === 'deploy' && isAlreadyDeployed) {
|
||||
actionText = 'redeploy'
|
||||
actionTextIng = 'redeploying'
|
||||
actionTextPast = 'redeployed'
|
||||
}
|
||||
|
||||
const actionCapitalized = actionText.charAt(0).toUpperCase() + actionText.slice(1)
|
||||
|
||||
// Special text for chat deployment
|
||||
const isChatDeploy = action === 'deploy' && deployType === 'chat'
|
||||
const displayAction = isChatDeploy ? 'deploy as chat' : actionText
|
||||
const displayActionCapitalized = isChatDeploy ? 'Deploy as chat' : actionCapitalized
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return isChatDeploy
|
||||
? 'Opened chat deployment settings'
|
||||
: `${actionCapitalized}ed workflow`
|
||||
case ClientToolCallState.executing:
|
||||
return isChatDeploy
|
||||
? 'Opening chat deployment settings'
|
||||
: `${actionCapitalized}ing workflow`
|
||||
case ClientToolCallState.generating:
|
||||
return `Preparing to ${displayAction} workflow`
|
||||
case ClientToolCallState.pending:
|
||||
return `${displayActionCapitalized} workflow?`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to ${displayAction} workflow`
|
||||
case ClientToolCallState.aborted:
|
||||
return isChatDeploy
|
||||
? 'Aborted opening chat deployment'
|
||||
: `Aborted ${actionTextIng} workflow`
|
||||
case ClientToolCallState.rejected:
|
||||
return isChatDeploy
|
||||
? 'Skipped opening chat deployment'
|
||||
: `Skipped ${actionTextIng} workflow`
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the user has any API keys (workspace or personal)
|
||||
*/
|
||||
private async hasApiKeys(workspaceId: string): Promise<boolean> {
|
||||
try {
|
||||
const [workspaceRes, personalRes] = await Promise.all([
|
||||
fetch(`/api/workspaces/${workspaceId}/api-keys`),
|
||||
fetch('/api/users/me/api-keys'),
|
||||
])
|
||||
|
||||
if (!workspaceRes.ok || !personalRes.ok) {
|
||||
return false
|
||||
}
|
||||
|
||||
const workspaceData = await workspaceRes.json()
|
||||
const personalData = await personalRes.json()
|
||||
|
||||
const workspaceKeys = (workspaceData?.keys || []) as Array<any>
|
||||
const personalKeys = (personalData?.keys || []) as Array<any>
|
||||
|
||||
return workspaceKeys.length > 0 || personalKeys.length > 0
|
||||
} catch (error) {
|
||||
const logger = createLogger('DeployWorkflowClientTool')
|
||||
logger.warn('Failed to check API keys:', error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the settings modal to the API keys tab
|
||||
*/
|
||||
private openApiKeysModal(): void {
|
||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'apikeys' } }))
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the deploy modal to the chat tab
|
||||
*/
|
||||
private openDeployModal(tab: 'api' | 'chat' = 'api'): void {
|
||||
window.dispatchEvent(new CustomEvent('open-deploy-modal', { detail: { tab } }))
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
await super.handleReject()
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
}
|
||||
|
||||
async handleAccept(args?: DeployWorkflowArgs): Promise<void> {
|
||||
const logger = createLogger('DeployWorkflowClientTool')
|
||||
try {
|
||||
const action = args?.action || 'deploy'
|
||||
const deployType = args?.deployType || 'api'
|
||||
const { activeWorkflowId, workflows } = useWorkflowRegistry.getState()
|
||||
const workflowId = args?.workflowId || activeWorkflowId
|
||||
|
||||
if (!workflowId) {
|
||||
throw new Error('No workflow ID provided')
|
||||
}
|
||||
|
||||
const workflow = workflows[workflowId]
|
||||
const workspaceId = workflow?.workspaceId
|
||||
|
||||
// For chat deployment, just open the deploy modal
|
||||
if (action === 'deploy' && deployType === 'chat') {
|
||||
this.setState(ClientToolCallState.success)
|
||||
this.openDeployModal('chat')
|
||||
await this.markToolComplete(
|
||||
200,
|
||||
'Opened chat deployment settings. Configure and deploy your workflow as a chat interface.',
|
||||
{
|
||||
action,
|
||||
deployType,
|
||||
openedModal: true,
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
// For deploy action, check if user has API keys first
|
||||
if (action === 'deploy') {
|
||||
if (!workspaceId) {
|
||||
throw new Error('Workflow workspace not found')
|
||||
}
|
||||
|
||||
const hasKeys = await this.hasApiKeys(workspaceId)
|
||||
|
||||
if (!hasKeys) {
|
||||
// Mark as rejected since we can't deploy without an API key
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
|
||||
// Open the API keys modal to help user create one
|
||||
this.openApiKeysModal()
|
||||
|
||||
await this.markToolComplete(
|
||||
200,
|
||||
'Cannot deploy without an API key. Opened API key settings so you can create one. Once you have an API key, try deploying again.',
|
||||
{
|
||||
needsApiKey: true,
|
||||
message:
|
||||
'You need to create an API key before you can deploy your workflow. The API key settings have been opened for you. After creating an API key, you can deploy your workflow.',
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
// Perform the deploy/undeploy action
|
||||
const endpoint = `/api/workflows/${workflowId}/deploy`
|
||||
const method = action === 'deploy' ? 'POST' : 'DELETE'
|
||||
|
||||
const res = await fetch(endpoint, {
|
||||
method,
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: action === 'deploy' ? JSON.stringify({ deployChatEnabled: false }) : undefined,
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
throw new Error(txt || `Server error (${res.status})`)
|
||||
}
|
||||
|
||||
const json = await res.json()
|
||||
|
||||
let successMessage = ''
|
||||
let resultData: any = {
|
||||
action,
|
||||
isDeployed: action === 'deploy',
|
||||
deployedAt: json.deployedAt,
|
||||
}
|
||||
|
||||
if (action === 'deploy') {
|
||||
// Generate the curl command for the deployed workflow (matching deploy modal format)
|
||||
const appUrl =
|
||||
typeof window !== 'undefined'
|
||||
? window.location.origin
|
||||
: process.env.NEXT_PUBLIC_APP_URL || 'https://app.sim.ai'
|
||||
const endpoint = `${appUrl}/api/workflows/${workflowId}/execute`
|
||||
const apiKeyPlaceholder = '$SIM_API_KEY'
|
||||
|
||||
// Get input format example (returns empty string if no inputs, or -d flag with example data)
|
||||
const inputExample = getInputFormatExample(false)
|
||||
|
||||
// Match the exact format from deploy modal
|
||||
const curlCommand = `curl -X POST -H "X-API-Key: ${apiKeyPlaceholder}" -H "Content-Type: application/json"${inputExample} ${endpoint}`
|
||||
|
||||
successMessage = 'Workflow deployed successfully. You can now call it via the API.'
|
||||
|
||||
resultData = {
|
||||
...resultData,
|
||||
endpoint,
|
||||
curlCommand,
|
||||
apiKeyPlaceholder,
|
||||
}
|
||||
} else {
|
||||
successMessage = 'Workflow undeployed successfully.'
|
||||
}
|
||||
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, successMessage, resultData)
|
||||
|
||||
// Refresh the workflow registry to update deployment status
|
||||
try {
|
||||
const setDeploymentStatus = useWorkflowRegistry.getState().setDeploymentStatus
|
||||
if (action === 'deploy') {
|
||||
setDeploymentStatus(
|
||||
workflowId,
|
||||
true,
|
||||
json.deployedAt ? new Date(json.deployedAt) : undefined,
|
||||
json.apiKey || ''
|
||||
)
|
||||
} else {
|
||||
setDeploymentStatus(workflowId, false, undefined, '')
|
||||
}
|
||||
const actionPast = action === 'undeploy' ? 'undeployed' : 'deployed'
|
||||
logger.info(`Workflow ${actionPast} and registry updated`)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to update workflow registry:', error)
|
||||
}
|
||||
} catch (e: any) {
|
||||
logger.error('Deploy/undeploy failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Failed to deploy/undeploy workflow')
|
||||
}
|
||||
}
|
||||
|
||||
async execute(args?: DeployWorkflowArgs): Promise<void> {
|
||||
await this.handleAccept(args)
|
||||
}
|
||||
}
|
||||
@@ -6,10 +6,13 @@ import {
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { stripWorkflowDiffMarkers } from '@/lib/workflows/diff'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/json-sanitizer'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
interface EditWorkflowOperation {
|
||||
operation_type: 'add' | 'edit' | 'delete'
|
||||
@@ -28,11 +31,68 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
private lastResult: any | undefined
|
||||
private hasExecuted = false
|
||||
private hasAppliedDiff = false
|
||||
private workflowId: string | undefined
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, EditWorkflowClientTool.id, EditWorkflowClientTool.metadata)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get sanitized workflow JSON from a workflow state, merge subblocks, and sanitize for copilot
|
||||
* This matches what get_user_workflow returns
|
||||
*/
|
||||
private getSanitizedWorkflowJson(workflowState: any): string | undefined {
|
||||
const logger = createLogger('EditWorkflowClientTool')
|
||||
|
||||
if (!this.workflowId) {
|
||||
logger.warn('No workflowId available for getting sanitized workflow JSON')
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!workflowState) {
|
||||
logger.warn('No workflow state provided')
|
||||
return undefined
|
||||
}
|
||||
|
||||
try {
|
||||
// Normalize required properties
|
||||
if (!workflowState.loops) workflowState.loops = {}
|
||||
if (!workflowState.parallels) workflowState.parallels = {}
|
||||
if (!workflowState.edges) workflowState.edges = []
|
||||
if (!workflowState.blocks) workflowState.blocks = {}
|
||||
|
||||
// Merge latest subblock values so edits are reflected
|
||||
let mergedState = workflowState
|
||||
if (workflowState.blocks) {
|
||||
mergedState = {
|
||||
...workflowState,
|
||||
blocks: mergeSubblockState(workflowState.blocks, this.workflowId as any),
|
||||
}
|
||||
logger.info('Merged subblock values into workflow state', {
|
||||
workflowId: this.workflowId,
|
||||
blockCount: Object.keys(mergedState.blocks || {}).length,
|
||||
})
|
||||
}
|
||||
|
||||
// Sanitize workflow state for copilot (remove UI-specific data)
|
||||
const sanitizedState = sanitizeForCopilot(mergedState)
|
||||
|
||||
// Convert to JSON string for transport
|
||||
const workflowJson = JSON.stringify(sanitizedState, null, 2)
|
||||
logger.info('Successfully created sanitized workflow JSON', {
|
||||
workflowId: this.workflowId,
|
||||
jsonLength: workflowJson.length,
|
||||
})
|
||||
|
||||
return workflowJson
|
||||
} catch (error) {
|
||||
logger.error('Failed to get sanitized workflow JSON', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Editing your workflow', icon: Loader2 },
|
||||
@@ -44,6 +104,31 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted editing your workflow', icon: MinusCircle },
|
||||
[ClientToolCallState.pending]: { text: 'Editing your workflow', icon: Loader2 },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const workflowId = params?.workflowId || useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (workflowId) {
|
||||
const workflowName = useWorkflowRegistry.getState().workflows[workflowId]?.name
|
||||
if (workflowName) {
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Edited ${workflowName}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Editing ${workflowName}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to edit ${workflowName}`
|
||||
case ClientToolCallState.review:
|
||||
return `Review changes to ${workflowName}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Rejected changes to ${workflowName}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted editing ${workflowName}`
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async handleAccept(): Promise<void> {
|
||||
@@ -54,7 +139,17 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
hasResult: this.lastResult !== undefined,
|
||||
})
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Workflow edits accepted', this.lastResult)
|
||||
|
||||
// Read from the workflow store to get the actual state with diff applied
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
const currentState = workflowStore.getWorkflowState()
|
||||
|
||||
// Get the workflow state that was applied, merge subblocks, and sanitize
|
||||
// This matches what get_user_workflow would return
|
||||
const workflowJson = this.getSanitizedWorkflowJson(currentState)
|
||||
const sanitizedData = workflowJson ? { userWorkflow: workflowJson } : undefined
|
||||
|
||||
await this.markToolComplete(200, 'Workflow edits accepted', sanitizedData)
|
||||
this.setState(ClientToolCallState.success)
|
||||
}
|
||||
|
||||
@@ -88,6 +183,9 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
return
|
||||
}
|
||||
|
||||
// Store workflowId for later use
|
||||
this.workflowId = workflowId
|
||||
|
||||
// Validate operations
|
||||
const operations = args?.operations || []
|
||||
if (!operations.length) {
|
||||
@@ -98,54 +196,24 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
|
||||
// Prepare currentUserWorkflow JSON from stores to preserve block IDs
|
||||
let currentUserWorkflow = args?.currentUserWorkflow
|
||||
const diffStoreState = useWorkflowDiffStore.getState()
|
||||
let usedDiffWorkflow = false
|
||||
|
||||
if (!currentUserWorkflow && diffStoreState.isDiffReady && diffStoreState.diffWorkflow) {
|
||||
try {
|
||||
const diffWorkflow = diffStoreState.diffWorkflow
|
||||
const normalizedDiffWorkflow = {
|
||||
...diffWorkflow,
|
||||
blocks: diffWorkflow.blocks || {},
|
||||
edges: diffWorkflow.edges || [],
|
||||
loops: diffWorkflow.loops || {},
|
||||
parallels: diffWorkflow.parallels || {},
|
||||
}
|
||||
currentUserWorkflow = JSON.stringify(normalizedDiffWorkflow)
|
||||
usedDiffWorkflow = true
|
||||
logger.info('Using diff workflow state as base for edit_workflow operations', {
|
||||
toolCallId: this.toolCallId,
|
||||
blocksCount: Object.keys(normalizedDiffWorkflow.blocks).length,
|
||||
edgesCount: normalizedDiffWorkflow.edges.length,
|
||||
})
|
||||
} catch (e) {
|
||||
logger.warn(
|
||||
'Failed to serialize diff workflow state; falling back to active workflow',
|
||||
e as any
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (!currentUserWorkflow && !usedDiffWorkflow) {
|
||||
if (!currentUserWorkflow) {
|
||||
try {
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
const fullState = workflowStore.getWorkflowState()
|
||||
let merged = fullState
|
||||
if (merged?.blocks) {
|
||||
merged = { ...merged, blocks: mergeSubblockState(merged.blocks, workflowId as any) }
|
||||
}
|
||||
if (merged) {
|
||||
if (!merged.loops) merged.loops = {}
|
||||
if (!merged.parallels) merged.parallels = {}
|
||||
if (!merged.edges) merged.edges = []
|
||||
if (!merged.blocks) merged.blocks = {}
|
||||
currentUserWorkflow = JSON.stringify(merged)
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn(
|
||||
'Failed to build currentUserWorkflow from stores; proceeding without it',
|
||||
e as any
|
||||
)
|
||||
const mergedBlocks = mergeSubblockState(fullState.blocks, workflowId as any)
|
||||
const payloadState = stripWorkflowDiffMarkers({
|
||||
...fullState,
|
||||
blocks: mergedBlocks,
|
||||
edges: fullState.edges || [],
|
||||
loops: fullState.loops || {},
|
||||
parallels: fullState.parallels || {},
|
||||
})
|
||||
currentUserWorkflow = JSON.stringify(payloadState)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to build currentUserWorkflow from stores; proceeding without it', {
|
||||
error,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -183,15 +251,27 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
})
|
||||
|
||||
// Update diff directly with workflow state - no YAML conversion needed!
|
||||
// The diff engine may transform the workflow state (e.g., assign new IDs), so we must use
|
||||
// the returned proposedState rather than the original result.workflowState
|
||||
let actualDiffWorkflow: WorkflowState | null = null
|
||||
|
||||
if (result.workflowState) {
|
||||
try {
|
||||
if (!this.hasAppliedDiff) {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
// setProposedChanges applies the state directly to the workflow store
|
||||
await diffStore.setProposedChanges(result.workflowState)
|
||||
logger.info('diff proposed changes set for edit_workflow with direct workflow state')
|
||||
this.hasAppliedDiff = true
|
||||
|
||||
// Read back the applied state from the workflow store
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
actualDiffWorkflow = workflowStore.getWorkflowState()
|
||||
} else {
|
||||
logger.info('skipping diff apply (already applied)')
|
||||
// If we already applied, read from workflow store
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
actualDiffWorkflow = workflowStore.getWorkflowState()
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Failed to set proposed changes in diff store', e as any)
|
||||
@@ -201,8 +281,17 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
throw new Error('No workflow state returned from server')
|
||||
}
|
||||
|
||||
if (!actualDiffWorkflow) {
|
||||
throw new Error('Failed to retrieve workflow from diff store after setting changes')
|
||||
}
|
||||
|
||||
// Get the workflow state that was just applied, merge subblocks, and sanitize
|
||||
// This matches what get_user_workflow would return (the true state after edits were applied)
|
||||
const workflowJson = this.getSanitizedWorkflowJson(actualDiffWorkflow)
|
||||
const sanitizedData = workflowJson ? { userWorkflow: workflowJson } : undefined
|
||||
|
||||
// Mark complete early to unblock LLM stream
|
||||
await this.markToolComplete(200, 'Workflow diff ready for review', result)
|
||||
await this.markToolComplete(200, 'Workflow diff ready for review', sanitizedData)
|
||||
|
||||
// Move into review state
|
||||
this.setState(ClientToolCallState.review, { result })
|
||||
|
||||
@@ -5,8 +5,8 @@ import {
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { stripWorkflowDiffMarkers } from '@/lib/workflows/diff'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/json-sanitizer'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -27,13 +27,36 @@ export class GetUserWorkflowClientTool extends BaseClientTool {
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Analyzing your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Analyzing your workflow', icon: WorkflowIcon },
|
||||
[ClientToolCallState.executing]: { text: 'Analyzing your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted analyzing your workflow', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Analyzed your workflow', icon: WorkflowIcon },
|
||||
[ClientToolCallState.error]: { text: 'Failed to analyze your workflow', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped analyzing your workflow', icon: XCircle },
|
||||
[ClientToolCallState.generating]: { text: 'Reading your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Reading your workflow', icon: WorkflowIcon },
|
||||
[ClientToolCallState.executing]: { text: 'Reading your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted reading your workflow', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Read your workflow', icon: WorkflowIcon },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read your workflow', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped reading your workflow', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const workflowId = params?.workflowId || useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (workflowId) {
|
||||
const workflowName = useWorkflowRegistry.getState().workflows[workflowId]?.name
|
||||
if (workflowName) {
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Read ${workflowName}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Reading ${workflowName}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to read ${workflowName}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted reading ${workflowName}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped reading ${workflowName}`
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
@@ -58,37 +81,31 @@ export class GetUserWorkflowClientTool extends BaseClientTool {
|
||||
includeMetadata: args?.includeMetadata,
|
||||
})
|
||||
|
||||
// Prefer diff/preview store if available; otherwise use main workflow store
|
||||
// Always use main workflow store as the source of truth
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
const fullWorkflowState = workflowStore.getWorkflowState()
|
||||
|
||||
let workflowState: any = null
|
||||
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
if (diffStore.diffWorkflow && Object.keys(diffStore.diffWorkflow.blocks || {}).length > 0) {
|
||||
workflowState = diffStore.diffWorkflow
|
||||
logger.info('Using workflow from diff/preview store', { workflowId })
|
||||
} else {
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
const fullWorkflowState = workflowStore.getWorkflowState()
|
||||
if (!fullWorkflowState || !fullWorkflowState.blocks) {
|
||||
const workflowRegistry = useWorkflowRegistry.getState()
|
||||
const wfKey = String(workflowId)
|
||||
const workflow = (workflowRegistry as any).workflows?.[wfKey]
|
||||
|
||||
if (!fullWorkflowState || !fullWorkflowState.blocks) {
|
||||
const workflowRegistry = useWorkflowRegistry.getState()
|
||||
const wfKey = String(workflowId)
|
||||
const workflow = (workflowRegistry as any).workflows?.[wfKey]
|
||||
|
||||
if (!workflow) {
|
||||
await this.markToolComplete(404, `Workflow ${workflowId} not found in any store`)
|
||||
this.setState(ClientToolCallState.error)
|
||||
return
|
||||
}
|
||||
|
||||
logger.warn('No workflow state found, using workflow metadata only', { workflowId })
|
||||
workflowState = workflow
|
||||
} else {
|
||||
workflowState = fullWorkflowState
|
||||
logger.info('Using workflow state from workflow store', {
|
||||
workflowId,
|
||||
blockCount: Object.keys(fullWorkflowState.blocks || {}).length,
|
||||
})
|
||||
if (!workflow) {
|
||||
await this.markToolComplete(404, `Workflow ${workflowId} not found in any store`)
|
||||
this.setState(ClientToolCallState.error)
|
||||
return
|
||||
}
|
||||
|
||||
logger.warn('No workflow state found, using workflow metadata only', { workflowId })
|
||||
workflowState = workflow
|
||||
} else {
|
||||
workflowState = stripWorkflowDiffMarkers(fullWorkflowState)
|
||||
logger.info('Using workflow state from workflow store', {
|
||||
workflowId,
|
||||
blockCount: Object.keys(fullWorkflowState.blocks || {}).length,
|
||||
})
|
||||
}
|
||||
|
||||
// Normalize required properties
|
||||
|
||||
@@ -23,19 +23,41 @@ export class GetWorkflowConsoleClientTool extends BaseClientTool {
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching workflow console', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Fetching workflow console', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Workflow console fetched', icon: TerminalSquare },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read workflow console', icon: XCircle },
|
||||
[ClientToolCallState.generating]: { text: 'Fetching execution logs', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Fetching execution logs', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Fetched execution logs', icon: TerminalSquare },
|
||||
[ClientToolCallState.error]: { text: 'Failed to fetch execution logs', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped reading workflow console',
|
||||
text: 'Skipped fetching execution logs',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted reading workflow console',
|
||||
text: 'Aborted fetching execution logs',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
[ClientToolCallState.pending]: { text: 'Fetching workflow console', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Fetching execution logs', icon: Loader2 },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const limit = params?.limit
|
||||
if (limit && typeof limit === 'number') {
|
||||
const logText = limit === 1 ? 'execution log' : 'execution logs'
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Fetched last ${limit} ${logText}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Fetching last ${limit} ${logText}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to fetch last ${limit} ${logText}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped fetching last ${limit} ${logText}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted fetching last ${limit} ${logText}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -23,13 +23,34 @@ export class GetWorkflowFromNameClientTool extends BaseClientTool {
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Retrieving workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Retrieving workflow', icon: FileText },
|
||||
[ClientToolCallState.executing]: { text: 'Retrieving workflow', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted retrieving workflow', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved workflow', icon: FileText },
|
||||
[ClientToolCallState.error]: { text: 'Failed to retrieve workflow', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped retrieving workflow', icon: XCircle },
|
||||
[ClientToolCallState.generating]: { text: 'Reading workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Reading workflow', icon: FileText },
|
||||
[ClientToolCallState.executing]: { text: 'Reading workflow', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted reading workflow', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Read workflow', icon: FileText },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read workflow', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped reading workflow', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.workflow_name && typeof params.workflow_name === 'string') {
|
||||
const workflowName = params.workflow_name
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Read ${workflowName}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Reading ${workflowName}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to read ${workflowName}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted reading ${workflowName}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped reading ${workflowName}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -38,6 +38,33 @@ export class RunWorkflowClientTool extends BaseClientTool {
|
||||
accept: { text: 'Run', icon: Play },
|
||||
reject: { text: 'Skip', icon: MinusCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const workflowId = params?.workflowId || useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (workflowId) {
|
||||
const workflowName = useWorkflowRegistry.getState().workflows[workflowId]?.name
|
||||
if (workflowName) {
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Ran ${workflowName}`
|
||||
case ClientToolCallState.executing:
|
||||
return `Running ${workflowName}`
|
||||
case ClientToolCallState.generating:
|
||||
return `Preparing to run ${workflowName}`
|
||||
case ClientToolCallState.pending:
|
||||
return `Run ${workflowName}?`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to run ${workflowName}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped running ${workflowName}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted running ${workflowName}`
|
||||
case ClientToolCallState.background:
|
||||
return `Running ${workflowName} in background`
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
|
||||
@@ -48,6 +48,38 @@ export class SetGlobalWorkflowVariablesClientTool extends BaseClientTool {
|
||||
accept: { text: 'Apply', icon: Settings2 },
|
||||
reject: { text: 'Skip', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.operations && Array.isArray(params.operations)) {
|
||||
const varNames = params.operations
|
||||
.slice(0, 2)
|
||||
.map((op: any) => op.name)
|
||||
.filter(Boolean)
|
||||
|
||||
if (varNames.length > 0) {
|
||||
const varList = varNames.join(', ')
|
||||
const more = params.operations.length > 2 ? '...' : ''
|
||||
const displayText = `${varList}${more}`
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Set ${displayText}`
|
||||
case ClientToolCallState.executing:
|
||||
return `Setting ${displayText}`
|
||||
case ClientToolCallState.generating:
|
||||
return `Preparing to set ${displayText}`
|
||||
case ClientToolCallState.pending:
|
||||
return `Set ${displayText}?`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to set ${displayText}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted setting ${displayText}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped setting ${displayText}`
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
|
||||
@@ -7,8 +7,7 @@ import { listGDriveFilesServerTool } from '@/lib/copilot/tools/server/gdrive/lis
|
||||
import { readGDriveFileServerTool } from '@/lib/copilot/tools/server/gdrive/read-file'
|
||||
import { makeApiRequestServerTool } from '@/lib/copilot/tools/server/other/make-api-request'
|
||||
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
||||
import { getEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/get-environment-variables'
|
||||
import { getOAuthCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-oauth-credentials'
|
||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
||||
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
||||
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
||||
@@ -39,11 +38,10 @@ serverToolRegistry[editWorkflowServerTool.name] = editWorkflowServerTool
|
||||
serverToolRegistry[getWorkflowConsoleServerTool.name] = getWorkflowConsoleServerTool
|
||||
serverToolRegistry[searchDocumentationServerTool.name] = searchDocumentationServerTool
|
||||
serverToolRegistry[searchOnlineServerTool.name] = searchOnlineServerTool
|
||||
serverToolRegistry[getEnvironmentVariablesServerTool.name] = getEnvironmentVariablesServerTool
|
||||
serverToolRegistry[setEnvironmentVariablesServerTool.name] = setEnvironmentVariablesServerTool
|
||||
serverToolRegistry[listGDriveFilesServerTool.name] = listGDriveFilesServerTool
|
||||
serverToolRegistry[readGDriveFileServerTool.name] = readGDriveFileServerTool
|
||||
serverToolRegistry[getOAuthCredentialsServerTool.name] = getOAuthCredentialsServerTool
|
||||
serverToolRegistry[getCredentialsServerTool.name] = getCredentialsServerTool
|
||||
serverToolRegistry[makeApiRequestServerTool.name] = makeApiRequestServerTool
|
||||
|
||||
export async function routeExecution(
|
||||
|
||||
@@ -4,24 +4,23 @@ import { eq } from 'drizzle-orm'
|
||||
import { jwtDecode } from 'jwt-decode'
|
||||
import { createPermissionError, verifyWorkflowAccess } from '@/lib/copilot/auth/permissions'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { getEnvironmentVariableKeys } from '@/lib/environment/utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
|
||||
interface GetOAuthCredentialsParams {
|
||||
interface GetCredentialsParams {
|
||||
userId?: string
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export const getOAuthCredentialsServerTool: BaseServerTool<GetOAuthCredentialsParams, any> = {
|
||||
name: 'get_oauth_credentials',
|
||||
async execute(params: GetOAuthCredentialsParams, context?: { userId: string }): Promise<any> {
|
||||
const logger = createLogger('GetOAuthCredentialsServerTool')
|
||||
export const getCredentialsServerTool: BaseServerTool<GetCredentialsParams, any> = {
|
||||
name: 'get_credentials',
|
||||
async execute(params: GetCredentialsParams, context?: { userId: string }): Promise<any> {
|
||||
const logger = createLogger('GetCredentialsServerTool')
|
||||
|
||||
if (!context?.userId) {
|
||||
logger.error(
|
||||
'Unauthorized attempt to access OAuth credentials - no authenticated user context'
|
||||
)
|
||||
logger.error('Unauthorized attempt to access credentials - no authenticated user context')
|
||||
throw new Error('Authentication required')
|
||||
}
|
||||
|
||||
@@ -32,7 +31,7 @@ export const getOAuthCredentialsServerTool: BaseServerTool<GetOAuthCredentialsPa
|
||||
|
||||
if (!hasAccess) {
|
||||
const errorMessage = createPermissionError('access credentials in')
|
||||
logger.error('Unauthorized attempt to access OAuth credentials', {
|
||||
logger.error('Unauthorized attempt to access credentials', {
|
||||
workflowId: params.workflowId,
|
||||
authenticatedUserId,
|
||||
})
|
||||
@@ -42,10 +41,12 @@ export const getOAuthCredentialsServerTool: BaseServerTool<GetOAuthCredentialsPa
|
||||
|
||||
const userId = authenticatedUserId
|
||||
|
||||
logger.info('Fetching OAuth credentials for authenticated user', {
|
||||
logger.info('Fetching credentials for authenticated user', {
|
||||
userId,
|
||||
hasWorkflowId: !!params?.workflowId,
|
||||
})
|
||||
|
||||
// Fetch OAuth credentials
|
||||
const accounts = await db.select().from(account).where(eq(account.userId, userId))
|
||||
const userRecord = await db
|
||||
.select({ email: user.email })
|
||||
@@ -54,7 +55,7 @@ export const getOAuthCredentialsServerTool: BaseServerTool<GetOAuthCredentialsPa
|
||||
.limit(1)
|
||||
const userEmail = userRecord.length > 0 ? userRecord[0]?.email : null
|
||||
|
||||
const credentials: Array<{
|
||||
const oauthCredentials: Array<{
|
||||
id: string
|
||||
name: string
|
||||
provider: string
|
||||
@@ -85,7 +86,7 @@ export const getOAuthCredentialsServerTool: BaseServerTool<GetOAuthCredentialsPa
|
||||
)
|
||||
accessToken = refreshedToken || accessToken
|
||||
} catch {}
|
||||
credentials.push({
|
||||
oauthCredentials.push({
|
||||
id: acc.id,
|
||||
name: displayName,
|
||||
provider: providerId,
|
||||
@@ -94,7 +95,25 @@ export const getOAuthCredentialsServerTool: BaseServerTool<GetOAuthCredentialsPa
|
||||
accessToken,
|
||||
})
|
||||
}
|
||||
logger.info('Fetched OAuth credentials', { userId, count: credentials.length })
|
||||
return { credentials, total: credentials.length }
|
||||
|
||||
// Fetch environment variables
|
||||
const envResult = await getEnvironmentVariableKeys(userId)
|
||||
|
||||
logger.info('Fetched credentials', {
|
||||
userId,
|
||||
oauthCount: oauthCredentials.length,
|
||||
envVarCount: envResult.count,
|
||||
})
|
||||
|
||||
return {
|
||||
oauth: {
|
||||
credentials: oauthCredentials,
|
||||
total: oauthCredentials.length,
|
||||
},
|
||||
environment: {
|
||||
variableNames: envResult.variableNames,
|
||||
count: envResult.count,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
import { createPermissionError, verifyWorkflowAccess } from '@/lib/copilot/auth/permissions'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { getEnvironmentVariableKeys } from '@/lib/environment/utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
interface GetEnvironmentVariablesParams {
|
||||
userId?: string
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export const getEnvironmentVariablesServerTool: BaseServerTool<GetEnvironmentVariablesParams, any> =
|
||||
{
|
||||
name: 'get_environment_variables',
|
||||
async execute(
|
||||
params: GetEnvironmentVariablesParams,
|
||||
context?: { userId: string }
|
||||
): Promise<any> {
|
||||
const logger = createLogger('GetEnvironmentVariablesServerTool')
|
||||
|
||||
if (!context?.userId) {
|
||||
logger.error(
|
||||
'Unauthorized attempt to access environment variables - no authenticated user context'
|
||||
)
|
||||
throw new Error('Authentication required')
|
||||
}
|
||||
|
||||
const authenticatedUserId = context.userId
|
||||
|
||||
if (params?.workflowId) {
|
||||
const { hasAccess } = await verifyWorkflowAccess(authenticatedUserId, params.workflowId)
|
||||
|
||||
if (!hasAccess) {
|
||||
const errorMessage = createPermissionError('access environment variables in')
|
||||
logger.error('Unauthorized attempt to access environment variables', {
|
||||
workflowId: params.workflowId,
|
||||
authenticatedUserId,
|
||||
})
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
const userId = authenticatedUserId
|
||||
|
||||
logger.info('Getting environment variables for authenticated user', {
|
||||
userId,
|
||||
hasWorkflowId: !!params?.workflowId,
|
||||
})
|
||||
|
||||
const result = await getEnvironmentVariableKeys(userId)
|
||||
logger.info('Environment variable keys retrieved', { userId, variableCount: result.count })
|
||||
return {
|
||||
variableNames: result.variableNames,
|
||||
count: result.count,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -220,8 +220,8 @@ export const getWorkflowConsoleServerTool: BaseServerTool<GetWorkflowConsoleArgs
|
||||
const logger = createLogger('GetWorkflowConsoleServerTool')
|
||||
const {
|
||||
workflowId,
|
||||
limit = 3,
|
||||
includeDetails = true,
|
||||
limit = 2,
|
||||
includeDetails = false,
|
||||
} = rawArgs || ({} as GetWorkflowConsoleArgs)
|
||||
|
||||
if (!workflowId || typeof workflowId !== 'string') {
|
||||
@@ -247,61 +247,36 @@ export const getWorkflowConsoleServerTool: BaseServerTool<GetWorkflowConsoleArgs
|
||||
.orderBy(desc(workflowExecutionLogs.startedAt))
|
||||
.limit(limit)
|
||||
|
||||
const formattedEntries: ExecutionEntry[] = executionLogs.map((log) => {
|
||||
// Simplify data for copilot - only essential block execution details
|
||||
const simplifiedExecutions = executionLogs.map((log) => {
|
||||
const executionData = log.executionData as any
|
||||
const traceSpans = executionData?.traceSpans || []
|
||||
const blockExecutions = includeDetails ? extractBlockExecutionsFromTraceSpans(traceSpans) : []
|
||||
|
||||
let finalOutput: any
|
||||
if (blockExecutions.length > 0) {
|
||||
const sortedBlocks = [...blockExecutions].sort(
|
||||
(a, b) => new Date(b.endedAt).getTime() - new Date(a.endedAt).getTime()
|
||||
)
|
||||
const outputBlock = sortedBlocks.find(
|
||||
(block) =>
|
||||
block.status === 'success' &&
|
||||
block.outputData &&
|
||||
Object.keys(block.outputData).length > 0
|
||||
)
|
||||
if (outputBlock) finalOutput = outputBlock.outputData
|
||||
}
|
||||
|
||||
const { message: errorMessage, block: errorBlock } = deriveExecutionErrorSummary({
|
||||
blockExecutions,
|
||||
traceSpans,
|
||||
executionData,
|
||||
})
|
||||
// Simplify block executions to only essential fields
|
||||
const simplifiedBlocks = blockExecutions.map((block) => ({
|
||||
id: block.blockId,
|
||||
name: block.blockName,
|
||||
startedAt: block.startedAt,
|
||||
endedAt: block.endedAt,
|
||||
durationMs: block.durationMs,
|
||||
output: block.outputData,
|
||||
error: block.status === 'error' ? block.errorMessage : undefined,
|
||||
}))
|
||||
|
||||
return {
|
||||
id: log.id,
|
||||
executionId: log.executionId,
|
||||
level: log.level,
|
||||
trigger: log.trigger,
|
||||
startedAt: log.startedAt.toISOString(),
|
||||
endedAt: log.endedAt?.toISOString() || null,
|
||||
durationMs: log.totalDurationMs,
|
||||
totalCost: (log.cost as any)?.total ?? null,
|
||||
totalTokens: (log.cost as any)?.tokens?.total ?? null,
|
||||
blockExecutions,
|
||||
output: finalOutput,
|
||||
errorMessage: errorMessage,
|
||||
errorBlock: errorBlock,
|
||||
blocks: simplifiedBlocks,
|
||||
}
|
||||
})
|
||||
|
||||
const resultSize = JSON.stringify(formattedEntries).length
|
||||
const resultSize = JSON.stringify(simplifiedExecutions).length
|
||||
logger.info('Workflow console result prepared', {
|
||||
entryCount: formattedEntries.length,
|
||||
executionCount: simplifiedExecutions.length,
|
||||
resultSizeKB: Math.round(resultSize / 1024),
|
||||
hasBlockDetails: includeDetails,
|
||||
})
|
||||
|
||||
return {
|
||||
entries: formattedEntries,
|
||||
totalEntries: formattedEntries.length,
|
||||
workflowId,
|
||||
retrievedAt: new Date().toISOString(),
|
||||
hasBlockDetails: includeDetails,
|
||||
}
|
||||
return simplifiedExecutions
|
||||
},
|
||||
}
|
||||
|
||||
@@ -76,11 +76,12 @@ describe('OAuth Token Refresh', () => {
|
||||
endpoint: 'https://auth.atlassian.com/oauth/token',
|
||||
},
|
||||
{ name: 'Jira', providerId: 'jira', endpoint: 'https://auth.atlassian.com/oauth/token' },
|
||||
{
|
||||
name: 'Discord',
|
||||
providerId: 'discord',
|
||||
endpoint: 'https://discord.com/api/v10/oauth2/token',
|
||||
},
|
||||
// Discord is currently disabled
|
||||
// {
|
||||
// name: 'Discord',
|
||||
// providerId: 'discord',
|
||||
// endpoint: 'https://discord.com/api/v10/oauth2/token',
|
||||
// },
|
||||
{ name: 'Linear', providerId: 'linear', endpoint: 'https://api.linear.app/oauth/token' },
|
||||
{
|
||||
name: 'Reddit',
|
||||
@@ -154,11 +155,12 @@ describe('OAuth Token Refresh', () => {
|
||||
providerId: 'outlook',
|
||||
endpoint: 'https://login.microsoftonline.com/common/oauth2/v2.0/token',
|
||||
},
|
||||
{
|
||||
name: 'Supabase',
|
||||
providerId: 'supabase',
|
||||
endpoint: 'https://api.supabase.com/v1/oauth/token',
|
||||
},
|
||||
// Supabase is currently disabled
|
||||
// {
|
||||
// name: 'Supabase',
|
||||
// providerId: 'supabase',
|
||||
// endpoint: 'https://api.supabase.com/v1/oauth/token',
|
||||
// },
|
||||
{ name: 'Notion', providerId: 'notion', endpoint: 'https://api.notion.com/v1/oauth/token' },
|
||||
{ name: 'Slack', providerId: 'slack', endpoint: 'https://slack.com/api/oauth.v2.access' },
|
||||
]
|
||||
|
||||
131
apps/sim/lib/workflows/deployment-utils.ts
Normal file
131
apps/sim/lib/workflows/deployment-utils.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { resolveStartCandidates, StartBlockPath } from '@/lib/workflows/triggers'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('DeploymentUtils')
|
||||
|
||||
/**
|
||||
* Gets the input format example for a workflow's API deployment
|
||||
* Returns the -d flag with example data if inputs exist, empty string otherwise
|
||||
*
|
||||
* @param includeStreaming - Whether to include streaming parameters in the example
|
||||
* @param selectedStreamingOutputs - Array of output IDs to stream
|
||||
* @returns A string containing the curl -d flag with example data, or empty string if no inputs
|
||||
*/
|
||||
export function getInputFormatExample(
|
||||
includeStreaming = false,
|
||||
selectedStreamingOutputs: string[] = []
|
||||
): string {
|
||||
let inputFormatExample = ''
|
||||
try {
|
||||
const blocks = Object.values(useWorkflowStore.getState().blocks)
|
||||
const candidates = resolveStartCandidates(useWorkflowStore.getState().blocks, {
|
||||
execution: 'api',
|
||||
})
|
||||
|
||||
const targetCandidate =
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.UNIFIED) ||
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.SPLIT_API) ||
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.SPLIT_INPUT) ||
|
||||
candidates.find((candidate) => candidate.path === StartBlockPath.LEGACY_STARTER)
|
||||
|
||||
const targetBlock = targetCandidate?.block
|
||||
|
||||
if (targetBlock) {
|
||||
const inputFormat = useSubBlockStore.getState().getValue(targetBlock.id, 'inputFormat')
|
||||
|
||||
const exampleData: Record<string, any> = {}
|
||||
|
||||
if (inputFormat && Array.isArray(inputFormat) && inputFormat.length > 0) {
|
||||
inputFormat.forEach((field: any) => {
|
||||
if (field.name) {
|
||||
switch (field.type) {
|
||||
case 'string':
|
||||
exampleData[field.name] = 'example'
|
||||
break
|
||||
case 'number':
|
||||
exampleData[field.name] = 42
|
||||
break
|
||||
case 'boolean':
|
||||
exampleData[field.name] = true
|
||||
break
|
||||
case 'object':
|
||||
exampleData[field.name] = { key: 'value' }
|
||||
break
|
||||
case 'array':
|
||||
exampleData[field.name] = [1, 2, 3]
|
||||
break
|
||||
case 'files':
|
||||
exampleData[field.name] = [
|
||||
{
|
||||
data: 'data:application/pdf;base64,...',
|
||||
type: 'file',
|
||||
name: 'document.pdf',
|
||||
mime: 'application/pdf',
|
||||
},
|
||||
]
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Add streaming parameters if enabled and outputs are selected
|
||||
if (includeStreaming && selectedStreamingOutputs.length > 0) {
|
||||
exampleData.stream = true
|
||||
// Convert blockId_attribute format to blockName.attribute format for display
|
||||
const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/i
|
||||
|
||||
const convertedOutputs = selectedStreamingOutputs
|
||||
.map((outputId) => {
|
||||
// If it starts with a UUID, convert to blockName.attribute format
|
||||
if (UUID_REGEX.test(outputId)) {
|
||||
const underscoreIndex = outputId.indexOf('_')
|
||||
if (underscoreIndex === -1) return null
|
||||
|
||||
const blockId = outputId.substring(0, underscoreIndex)
|
||||
const attribute = outputId.substring(underscoreIndex + 1)
|
||||
|
||||
// Find the block by ID and get its name
|
||||
const block = blocks.find((b) => b.id === blockId)
|
||||
if (block?.name) {
|
||||
// Normalize block name: lowercase and remove spaces
|
||||
const normalizedBlockName = block.name.toLowerCase().replace(/\s+/g, '')
|
||||
return `${normalizedBlockName}.${attribute}`
|
||||
}
|
||||
// Block not found (deleted), return null to filter out
|
||||
return null
|
||||
}
|
||||
|
||||
// Already in blockName.attribute format, verify the block exists
|
||||
const parts = outputId.split('.')
|
||||
if (parts.length >= 2) {
|
||||
const blockName = parts[0]
|
||||
// Check if a block with this name exists
|
||||
const block = blocks.find(
|
||||
(b) => b.name?.toLowerCase().replace(/\s+/g, '') === blockName.toLowerCase()
|
||||
)
|
||||
if (!block) {
|
||||
// Block not found (deleted), return null to filter out
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
return outputId
|
||||
})
|
||||
.filter((output): output is string => output !== null)
|
||||
|
||||
exampleData.selectedOutputs = convertedOutputs
|
||||
}
|
||||
|
||||
if (Object.keys(exampleData).length > 0) {
|
||||
inputFormatExample = ` -d '${JSON.stringify(exampleData)}'`
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Error generating input format example:', error)
|
||||
}
|
||||
|
||||
return inputFormatExample
|
||||
}
|
||||
@@ -474,7 +474,8 @@ export class WorkflowDiffEngine {
|
||||
*/
|
||||
async createDiffFromWorkflowState(
|
||||
proposedState: WorkflowState,
|
||||
diffAnalysis?: DiffAnalysis
|
||||
diffAnalysis?: DiffAnalysis,
|
||||
baselineOverride?: WorkflowState
|
||||
): Promise<DiffResult & { diff?: WorkflowDiff }> {
|
||||
try {
|
||||
logger.info('WorkflowDiffEngine.createDiffFromWorkflowState called with:', {
|
||||
@@ -483,15 +484,14 @@ export class WorkflowDiffEngine {
|
||||
hasDiffAnalysis: !!diffAnalysis,
|
||||
})
|
||||
|
||||
// Get baseline for comparison
|
||||
// If we already have a diff, use it as baseline (editing on top of diff)
|
||||
// Otherwise use the current workflow state
|
||||
// Determine baseline for comparison
|
||||
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
||||
const currentWorkflowState = useWorkflowStore.getState().getWorkflowState()
|
||||
|
||||
// Check if we're editing on top of an existing diff
|
||||
const baselineForComparison = this.currentDiff?.proposedState || currentWorkflowState
|
||||
const isEditingOnTopOfDiff = !!this.currentDiff
|
||||
const hasBaselineOverride = !!baselineOverride
|
||||
const baselineForComparison =
|
||||
baselineOverride ?? this.currentDiff?.proposedState ?? currentWorkflowState
|
||||
const isEditingOnTopOfDiff = !baselineOverride && !!this.currentDiff
|
||||
|
||||
if (isEditingOnTopOfDiff) {
|
||||
logger.info('Editing on top of existing diff - using diff as baseline for comparison', {
|
||||
@@ -503,8 +503,8 @@ export class WorkflowDiffEngine {
|
||||
let mergedBaseline: WorkflowState = baselineForComparison
|
||||
|
||||
// Only merge subblock values if we're comparing against original workflow
|
||||
// If editing on top of diff, use the diff state as-is
|
||||
if (!isEditingOnTopOfDiff) {
|
||||
// If editing on top of diff or using an explicit override, trust provided values
|
||||
if (!isEditingOnTopOfDiff && !hasBaselineOverride) {
|
||||
try {
|
||||
mergedBaseline = {
|
||||
...baselineForComparison,
|
||||
@@ -1082,7 +1082,7 @@ export class WorkflowDiffEngine {
|
||||
|
||||
try {
|
||||
// Clean up the proposed state by removing diff markers
|
||||
const cleanState = this.cleanDiffMarkers(this.currentDiff.proposedState)
|
||||
const cleanState = stripWorkflowDiffMarkers(this.currentDiff.proposedState)
|
||||
|
||||
logger.info('Diff accepted', {
|
||||
blocksCount: Object.keys(cleanState.blocks).length,
|
||||
@@ -1098,35 +1098,40 @@ export class WorkflowDiffEngine {
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean diff markers from a workflow state
|
||||
*/
|
||||
private cleanDiffMarkers(state: WorkflowState): WorkflowState {
|
||||
const cleanBlocks: Record<string, BlockState> = {}
|
||||
/**
|
||||
* Removes diff metadata from a workflow state so it can be persisted or re-used safely.
|
||||
*/
|
||||
export function stripWorkflowDiffMarkers(state: WorkflowState): WorkflowState {
|
||||
const cleanBlocks: Record<string, BlockState> = {}
|
||||
|
||||
// Remove diff markers from each block
|
||||
for (const [blockId, block] of Object.entries(state.blocks)) {
|
||||
const cleanBlock: BlockState = { ...block }
|
||||
for (const [blockId, block] of Object.entries(state.blocks || {})) {
|
||||
const cleanBlock: BlockState = structuredClone(block)
|
||||
const blockWithDiff = cleanBlock as BlockState & BlockWithDiff
|
||||
blockWithDiff.is_diff = undefined
|
||||
blockWithDiff.field_diffs = undefined
|
||||
|
||||
// Remove diff markers using proper typing
|
||||
const blockWithDiff = cleanBlock as BlockState & BlockWithDiff
|
||||
blockWithDiff.is_diff = undefined
|
||||
blockWithDiff.field_diffs = undefined
|
||||
|
||||
// Ensure outputs is never null/undefined
|
||||
if (cleanBlock.outputs === undefined || cleanBlock.outputs === null) {
|
||||
cleanBlock.outputs = {}
|
||||
}
|
||||
|
||||
cleanBlocks[blockId] = cleanBlock
|
||||
if (cleanBlock.subBlocks) {
|
||||
Object.values(cleanBlock.subBlocks).forEach((subBlock) => {
|
||||
if (subBlock && typeof subBlock === 'object') {
|
||||
;(subBlock as any).is_diff = undefined
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
blocks: cleanBlocks,
|
||||
edges: state.edges || [],
|
||||
loops: state.loops || {},
|
||||
parallels: state.parallels || {},
|
||||
if (cleanBlock.outputs === undefined || cleanBlock.outputs === null) {
|
||||
cleanBlock.outputs = {}
|
||||
}
|
||||
|
||||
cleanBlocks[blockId] = cleanBlock
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
blocks: cleanBlocks,
|
||||
edges: structuredClone(state.edges || []),
|
||||
loops: structuredClone(state.loops || {}),
|
||||
parallels: structuredClone(state.parallels || {}),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
export type { DiffAnalysis, DiffMetadata, DiffResult, WorkflowDiff } from './diff-engine'
|
||||
export { WorkflowDiffEngine } from './diff-engine'
|
||||
export { stripWorkflowDiffMarkers, WorkflowDiffEngine } from './diff-engine'
|
||||
|
||||
@@ -22,7 +22,6 @@ export interface CopilotBlockState {
|
||||
type: string
|
||||
name: string
|
||||
inputs?: Record<string, string | number | string[][] | object>
|
||||
outputs: BlockState['outputs']
|
||||
connections?: Record<string, string | string[]>
|
||||
nestedNodes?: Record<string, CopilotBlockState>
|
||||
enabled: boolean
|
||||
@@ -350,10 +349,10 @@ export function sanitizeForCopilot(state: WorkflowState): CopilotWorkflowState {
|
||||
})
|
||||
}
|
||||
|
||||
// Create clean result without runtime data (outputs, positions, layout, etc.)
|
||||
const result: CopilotBlockState = {
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
outputs: block.outputs,
|
||||
enabled: block.enabled,
|
||||
}
|
||||
|
||||
@@ -363,6 +362,9 @@ export function sanitizeForCopilot(state: WorkflowState): CopilotWorkflowState {
|
||||
if (block.advancedMode !== undefined) result.advancedMode = block.advancedMode
|
||||
if (block.triggerMode !== undefined) result.triggerMode = block.triggerMode
|
||||
|
||||
// Note: outputs, position, height, layout, horizontalHandles are intentionally excluded
|
||||
// These are runtime/UI-specific fields not needed for copilot understanding
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
|
||||
93
apps/sim/lib/workflows/socket-operations.ts
Normal file
93
apps/sim/lib/workflows/socket-operations.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { client } from '@/lib/auth-client'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useOperationQueueStore } from '@/stores/operation-queue/store'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('WorkflowSocketOperations')
|
||||
|
||||
async function resolveUserId(): Promise<string> {
|
||||
try {
|
||||
const sessionResult = await client.getSession()
|
||||
const userId = sessionResult.data?.user?.id
|
||||
if (userId) {
|
||||
return userId
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to resolve session user id for workflow operation', { error })
|
||||
}
|
||||
|
||||
return 'unknown'
|
||||
}
|
||||
|
||||
interface EnqueueWorkflowOperationArgs {
|
||||
operation: string
|
||||
target: string
|
||||
payload: any
|
||||
workflowId: string
|
||||
immediate?: boolean
|
||||
operationId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Queues a workflow socket operation so it flows through the standard operation queue,
|
||||
* ensuring consistent retries, confirmations, and telemetry.
|
||||
*/
|
||||
export async function enqueueWorkflowOperation({
|
||||
operation,
|
||||
target,
|
||||
payload,
|
||||
workflowId,
|
||||
immediate = false,
|
||||
operationId,
|
||||
}: EnqueueWorkflowOperationArgs): Promise<string> {
|
||||
const userId = await resolveUserId()
|
||||
const opId = operationId ?? crypto.randomUUID()
|
||||
|
||||
useOperationQueueStore.getState().addToQueue({
|
||||
id: opId,
|
||||
operation: {
|
||||
operation,
|
||||
target,
|
||||
payload,
|
||||
},
|
||||
workflowId,
|
||||
userId,
|
||||
immediate,
|
||||
})
|
||||
|
||||
logger.debug('Queued workflow operation', {
|
||||
workflowId,
|
||||
operation,
|
||||
target,
|
||||
operationId: opId,
|
||||
immediate,
|
||||
})
|
||||
|
||||
return opId
|
||||
}
|
||||
|
||||
interface EnqueueReplaceStateArgs {
|
||||
workflowId: string
|
||||
state: WorkflowState
|
||||
immediate?: boolean
|
||||
operationId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience wrapper for broadcasting a full workflow state replacement via the queue.
|
||||
*/
|
||||
export async function enqueueReplaceWorkflowState({
|
||||
workflowId,
|
||||
state,
|
||||
immediate,
|
||||
operationId,
|
||||
}: EnqueueReplaceStateArgs): Promise<string> {
|
||||
return enqueueWorkflowOperation({
|
||||
workflowId,
|
||||
operation: 'replace-state',
|
||||
target: 'workflow',
|
||||
payload: { state },
|
||||
immediate,
|
||||
operationId,
|
||||
})
|
||||
}
|
||||
125
apps/sim/scripts/export-workflow.ts
Executable file
125
apps/sim/scripts/export-workflow.ts
Executable file
@@ -0,0 +1,125 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
/**
|
||||
* Export workflow JSON from database
|
||||
*
|
||||
* Usage:
|
||||
* bun apps/sim/scripts/export-workflow.ts <workflow-id>
|
||||
*
|
||||
* This script exports a workflow in the same format as the export API route.
|
||||
* It fetches the workflow state from normalized tables, combines it with metadata
|
||||
* and variables, sanitizes it, and outputs the JSON.
|
||||
*
|
||||
* Make sure DATABASE_URL or POSTGRES_URL is set in your environment.
|
||||
*/
|
||||
|
||||
// Suppress console logs from imported modules - only JSON should go to stdout
|
||||
const originalConsole = {
|
||||
log: console.log,
|
||||
warn: console.warn,
|
||||
error: console.error,
|
||||
}
|
||||
console.log = () => {}
|
||||
console.warn = () => {}
|
||||
console.error = () => {}
|
||||
|
||||
import { writeFileSync } from 'fs'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { db, workflow } from '../../../packages/db/index.js'
|
||||
import { loadWorkflowFromNormalizedTables } from '../lib/workflows/db-helpers.js'
|
||||
import { sanitizeForExport } from '../lib/workflows/json-sanitizer.js'
|
||||
|
||||
// ---------- CLI argument parsing ----------
|
||||
const args = process.argv.slice(2)
|
||||
const workflowId = args[0]
|
||||
const outputFile = args[1] // Optional output filename
|
||||
|
||||
if (!workflowId) {
|
||||
process.stderr.write(
|
||||
'Usage: bun apps/sim/scripts/export-workflow.ts <workflow-id> [output-file]\n'
|
||||
)
|
||||
process.stderr.write('\n')
|
||||
process.stderr.write('Examples:\n')
|
||||
process.stderr.write(' bun apps/sim/scripts/export-workflow.ts abc123\n')
|
||||
process.stderr.write(' bun apps/sim/scripts/export-workflow.ts abc123 workflow.json\n')
|
||||
process.stderr.write('\n')
|
||||
process.stderr.write('Make sure DATABASE_URL or POSTGRES_URL is set in your environment.\n')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// ---------- Main export function ----------
|
||||
async function exportWorkflow(workflowId: string, outputFile?: string): Promise<void> {
|
||||
try {
|
||||
// Fetch workflow metadata
|
||||
const [workflowData] = await db
|
||||
.select()
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!workflowData) {
|
||||
process.stderr.write(`Error: Workflow ${workflowId} not found\n`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Load workflow from normalized tables
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
|
||||
if (!normalizedData) {
|
||||
process.stderr.write(`Error: Workflow ${workflowId} has no normalized data\n`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Convert variables to array format
|
||||
let workflowVariables: any[] = []
|
||||
if (workflowData.variables && typeof workflowData.variables === 'object') {
|
||||
workflowVariables = Object.values(workflowData.variables).map((v: any) => ({
|
||||
id: v.id,
|
||||
name: v.name,
|
||||
type: v.type,
|
||||
value: v.value,
|
||||
}))
|
||||
}
|
||||
|
||||
// Prepare export state - match the exact format from the UI
|
||||
const workflowState = {
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
metadata: {
|
||||
name: workflowData.name,
|
||||
description: workflowData.description ?? undefined,
|
||||
color: workflowData.color ?? undefined,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflowVariables,
|
||||
}
|
||||
|
||||
// Sanitize and export - this returns { version, exportedAt, state }
|
||||
const exportState = sanitizeForExport(workflowState)
|
||||
const jsonString = JSON.stringify(exportState, null, 2)
|
||||
|
||||
// Write to file or stdout
|
||||
if (outputFile) {
|
||||
writeFileSync(outputFile, jsonString, 'utf-8')
|
||||
process.stderr.write(`Workflow exported to ${outputFile}\n`)
|
||||
} else {
|
||||
// Output the JSON to stdout only
|
||||
process.stdout.write(`${jsonString}\n`)
|
||||
}
|
||||
} catch (error) {
|
||||
process.stderr.write(`Error exporting workflow: ${error}\n`)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// ---------- Execute ----------
|
||||
exportWorkflow(workflowId, outputFile)
|
||||
.then(() => {
|
||||
process.exit(0)
|
||||
})
|
||||
.catch((error) => {
|
||||
process.stderr.write(`Unexpected error: ${error}\n`)
|
||||
process.exit(1)
|
||||
})
|
||||
@@ -182,6 +182,9 @@ export async function persistWorkflowOperation(workflowId: string, operation: an
|
||||
case 'variable':
|
||||
await handleVariableOperationTx(tx, workflowId, op, payload)
|
||||
break
|
||||
case 'workflow':
|
||||
await handleWorkflowOperationTx(tx, workflowId, op, payload)
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unknown operation target: ${target}`)
|
||||
}
|
||||
@@ -1062,3 +1065,101 @@ async function handleVariableOperationTx(
|
||||
throw new Error(`Unsupported variable operation: ${operation}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Workflow operations - handles complete state replacement
|
||||
async function handleWorkflowOperationTx(
|
||||
tx: any,
|
||||
workflowId: string,
|
||||
operation: string,
|
||||
payload: any
|
||||
) {
|
||||
switch (operation) {
|
||||
case 'replace-state': {
|
||||
if (!payload.state) {
|
||||
throw new Error('Missing state for replace-state operation')
|
||||
}
|
||||
|
||||
const { blocks, edges, loops, parallels } = payload.state
|
||||
|
||||
logger.info(`Replacing workflow state for ${workflowId}`, {
|
||||
blockCount: Object.keys(blocks || {}).length,
|
||||
edgeCount: (edges || []).length,
|
||||
loopCount: Object.keys(loops || {}).length,
|
||||
parallelCount: Object.keys(parallels || {}).length,
|
||||
})
|
||||
|
||||
// Delete all existing blocks (this will cascade delete edges via ON DELETE CASCADE)
|
||||
await tx.delete(workflowBlocks).where(eq(workflowBlocks.workflowId, workflowId))
|
||||
|
||||
// Delete all existing subflows
|
||||
await tx.delete(workflowSubflows).where(eq(workflowSubflows.workflowId, workflowId))
|
||||
|
||||
// Insert all blocks from the new state
|
||||
if (blocks && Object.keys(blocks).length > 0) {
|
||||
const blockValues = Object.values(blocks).map((block: any) => ({
|
||||
id: block.id,
|
||||
workflowId,
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
positionX: block.position.x,
|
||||
positionY: block.position.y,
|
||||
data: block.data || {},
|
||||
subBlocks: block.subBlocks || {},
|
||||
outputs: block.outputs || {},
|
||||
enabled: block.enabled ?? true,
|
||||
horizontalHandles: block.horizontalHandles ?? true,
|
||||
advancedMode: block.advancedMode ?? false,
|
||||
triggerMode: block.triggerMode ?? false,
|
||||
height: block.height || 0,
|
||||
}))
|
||||
|
||||
await tx.insert(workflowBlocks).values(blockValues)
|
||||
}
|
||||
|
||||
// Insert all edges from the new state
|
||||
if (edges && edges.length > 0) {
|
||||
const edgeValues = edges.map((edge: any) => ({
|
||||
id: edge.id,
|
||||
workflowId,
|
||||
sourceBlockId: edge.source,
|
||||
targetBlockId: edge.target,
|
||||
sourceHandle: edge.sourceHandle || null,
|
||||
targetHandle: edge.targetHandle || null,
|
||||
}))
|
||||
|
||||
await tx.insert(workflowEdges).values(edgeValues)
|
||||
}
|
||||
|
||||
// Insert all loops from the new state
|
||||
if (loops && Object.keys(loops).length > 0) {
|
||||
const loopValues = Object.entries(loops).map(([id, loop]: [string, any]) => ({
|
||||
id,
|
||||
workflowId,
|
||||
type: 'loop',
|
||||
config: loop,
|
||||
}))
|
||||
|
||||
await tx.insert(workflowSubflows).values(loopValues)
|
||||
}
|
||||
|
||||
// Insert all parallels from the new state
|
||||
if (parallels && Object.keys(parallels).length > 0) {
|
||||
const parallelValues = Object.entries(parallels).map(([id, parallel]: [string, any]) => ({
|
||||
id,
|
||||
workflowId,
|
||||
type: 'parallel',
|
||||
config: parallel,
|
||||
}))
|
||||
|
||||
await tx.insert(workflowSubflows).values(parallelValues)
|
||||
}
|
||||
|
||||
logger.info(`Successfully replaced workflow state for ${workflowId}`)
|
||||
break
|
||||
}
|
||||
|
||||
default:
|
||||
logger.warn(`Unknown workflow operation: ${operation}`)
|
||||
throw new Error(`Unsupported workflow operation: ${operation}`)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -183,6 +183,44 @@ export function setupOperationsHandlers(
|
||||
return
|
||||
}
|
||||
|
||||
if (target === 'workflow' && operation === 'replace-state') {
|
||||
// Persist the workflow state replacement to database first
|
||||
await persistWorkflowOperation(workflowId, {
|
||||
operation,
|
||||
target,
|
||||
payload,
|
||||
timestamp: operationTimestamp,
|
||||
userId: session.userId,
|
||||
})
|
||||
|
||||
room.lastModified = Date.now()
|
||||
|
||||
const broadcastData = {
|
||||
operation,
|
||||
target,
|
||||
payload,
|
||||
timestamp: operationTimestamp,
|
||||
senderId: socket.id,
|
||||
userId: session.userId,
|
||||
userName: session.userName,
|
||||
metadata: {
|
||||
workflowId,
|
||||
operationId: crypto.randomUUID(),
|
||||
},
|
||||
}
|
||||
|
||||
socket.to(workflowId).emit('workflow-operation', broadcastData)
|
||||
|
||||
if (operationId) {
|
||||
socket.emit('operation-confirmed', {
|
||||
operationId,
|
||||
serverTimestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// For non-position operations, persist first then broadcast
|
||||
await persistWorkflowOperation(workflowId, {
|
||||
operation,
|
||||
|
||||
@@ -132,29 +132,6 @@ export function setupWorkflowHandlers(
|
||||
}
|
||||
})
|
||||
|
||||
socket.on('request-sync', async ({ workflowId }) => {
|
||||
try {
|
||||
if (!socket.userId) {
|
||||
socket.emit('error', { type: 'NOT_AUTHENTICATED', message: 'Not authenticated' })
|
||||
return
|
||||
}
|
||||
|
||||
const accessInfo = await verifyWorkflowAccess(socket.userId, workflowId)
|
||||
if (!accessInfo.hasAccess) {
|
||||
socket.emit('error', { type: 'ACCESS_DENIED', message: 'Access denied' })
|
||||
return
|
||||
}
|
||||
|
||||
const workflowState = await getWorkflowState(workflowId)
|
||||
socket.emit('workflow-state', workflowState)
|
||||
|
||||
logger.info(`Sent sync data to ${socket.userId} for workflow ${workflowId}`)
|
||||
} catch (error) {
|
||||
logger.error('Error handling sync request:', error)
|
||||
socket.emit('error', { type: 'SYNC_FAILED', message: 'Failed to sync workflow state' })
|
||||
}
|
||||
})
|
||||
|
||||
socket.on('leave-workflow', () => {
|
||||
const workflowId = roomManager.getWorkflowIdForSocket(socket.id)
|
||||
const session = roomManager.getUserSession(socket.id)
|
||||
|
||||
@@ -21,6 +21,7 @@ const ROLE_PERMISSIONS: Record<string, string[]> = {
|
||||
'update-trigger-mode',
|
||||
'toggle-handles',
|
||||
'duplicate',
|
||||
'replace-state',
|
||||
],
|
||||
write: [
|
||||
'add',
|
||||
@@ -35,6 +36,7 @@ const ROLE_PERMISSIONS: Record<string, string[]> = {
|
||||
'update-trigger-mode',
|
||||
'toggle-handles',
|
||||
'duplicate',
|
||||
'replace-state',
|
||||
],
|
||||
read: ['update-position'],
|
||||
}
|
||||
|
||||
@@ -114,11 +114,22 @@ export const VariableOperationSchema = z.union([
|
||||
}),
|
||||
])
|
||||
|
||||
export const WorkflowStateOperationSchema = z.object({
|
||||
operation: z.literal('replace-state'),
|
||||
target: z.literal('workflow'),
|
||||
payload: z.object({
|
||||
state: z.any(), // Full workflow state
|
||||
}),
|
||||
timestamp: z.number(),
|
||||
operationId: z.string().optional(),
|
||||
})
|
||||
|
||||
export const WorkflowOperationSchema = z.union([
|
||||
BlockOperationSchema,
|
||||
EdgeOperationSchema,
|
||||
SubflowOperationSchema,
|
||||
VariableOperationSchema,
|
||||
WorkflowStateOperationSchema,
|
||||
])
|
||||
|
||||
export { PositionSchema, AutoConnectEdgeSchema }
|
||||
|
||||
@@ -142,7 +142,10 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
|
||||
JSON.stringify(op.operation.payload) === JSON.stringify(operation.operation.payload)))
|
||||
)
|
||||
|
||||
if (duplicateContent) {
|
||||
const isReplaceStateWorkflowOp =
|
||||
operation.operation.target === 'workflow' && operation.operation.operation === 'replace-state'
|
||||
|
||||
if (duplicateContent && !isReplaceStateWorkflowOp) {
|
||||
logger.debug('Skipping duplicate operation content', {
|
||||
operationId: operation.id,
|
||||
existingOperationId: duplicateContent.id,
|
||||
|
||||
@@ -23,17 +23,22 @@ import {
|
||||
registerClientTool,
|
||||
registerToolStateSync,
|
||||
} from '@/lib/copilot/tools/client/manager'
|
||||
import { NavigateUIClientTool } from '@/lib/copilot/tools/client/navigation/navigate-ui'
|
||||
import { CheckoffTodoClientTool } from '@/lib/copilot/tools/client/other/checkoff-todo'
|
||||
import { MakeApiRequestClientTool } from '@/lib/copilot/tools/client/other/make-api-request'
|
||||
import { MarkTodoInProgressClientTool } from '@/lib/copilot/tools/client/other/mark-todo-in-progress'
|
||||
import { OAuthRequestAccessClientTool } from '@/lib/copilot/tools/client/other/oauth-request-access'
|
||||
import { PlanClientTool } from '@/lib/copilot/tools/client/other/plan'
|
||||
import { RememberDebugClientTool } from '@/lib/copilot/tools/client/other/remember-debug'
|
||||
import { SearchDocumentationClientTool } from '@/lib/copilot/tools/client/other/search-documentation'
|
||||
import { SearchErrorsClientTool } from '@/lib/copilot/tools/client/other/search-errors'
|
||||
import { SearchOnlineClientTool } from '@/lib/copilot/tools/client/other/search-online'
|
||||
import { SearchPatternsClientTool } from '@/lib/copilot/tools/client/other/search-patterns'
|
||||
import { createExecutionContext, getTool } from '@/lib/copilot/tools/client/registry'
|
||||
import { GetEnvironmentVariablesClientTool } from '@/lib/copilot/tools/client/user/get-environment-variables'
|
||||
import { GetOAuthCredentialsClientTool } from '@/lib/copilot/tools/client/user/get-oauth-credentials'
|
||||
import { GetCredentialsClientTool } from '@/lib/copilot/tools/client/user/get-credentials'
|
||||
import { SetEnvironmentVariablesClientTool } from '@/lib/copilot/tools/client/user/set-environment-variables'
|
||||
import { CheckDeploymentStatusClientTool } from '@/lib/copilot/tools/client/workflow/check-deployment-status'
|
||||
import { DeployWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/deploy-workflow'
|
||||
import { EditWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/edit-workflow'
|
||||
import { GetGlobalWorkflowVariablesClientTool } from '@/lib/copilot/tools/client/workflow/get-global-workflow-variables'
|
||||
import { GetUserWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/get-user-workflow'
|
||||
@@ -59,7 +64,7 @@ const logger = createLogger('CopilotStore')
|
||||
// On module load, clear any lingering diff preview (fresh page refresh)
|
||||
try {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
if (diffStore?.isShowingDiff || diffStore?.diffWorkflow) {
|
||||
if (diffStore?.hasActiveDiff) {
|
||||
diffStore.clearDiff()
|
||||
}
|
||||
} catch {}
|
||||
@@ -73,11 +78,13 @@ const CLIENT_TOOL_INSTANTIATORS: Record<string, (id: string) => any> = {
|
||||
get_trigger_blocks: (id) => new GetTriggerBlocksClientTool(id),
|
||||
search_online: (id) => new SearchOnlineClientTool(id),
|
||||
search_documentation: (id) => new SearchDocumentationClientTool(id),
|
||||
get_environment_variables: (id) => new GetEnvironmentVariablesClientTool(id),
|
||||
search_patterns: (id) => new SearchPatternsClientTool(id),
|
||||
search_errors: (id) => new SearchErrorsClientTool(id),
|
||||
remember_debug: (id) => new RememberDebugClientTool(id),
|
||||
set_environment_variables: (id) => new SetEnvironmentVariablesClientTool(id),
|
||||
list_gdrive_files: (id) => new ListGDriveFilesClientTool(id),
|
||||
read_gdrive_file: (id) => new ReadGDriveFileClientTool(id),
|
||||
get_oauth_credentials: (id) => new GetOAuthCredentialsClientTool(id),
|
||||
get_credentials: (id) => new GetCredentialsClientTool(id),
|
||||
make_api_request: (id) => new MakeApiRequestClientTool(id),
|
||||
plan: (id) => new PlanClientTool(id),
|
||||
checkoff_todo: (id) => new CheckoffTodoClientTool(id),
|
||||
@@ -94,6 +101,9 @@ const CLIENT_TOOL_INSTANTIATORS: Record<string, (id: string) => any> = {
|
||||
get_examples_rag: (id) => new GetExamplesRagClientTool(id),
|
||||
get_operations_examples: (id) => new GetOperationsExamplesClientTool(id),
|
||||
summarize_conversation: (id) => new SummarizeClientTool(id),
|
||||
deploy_workflow: (id) => new DeployWorkflowClientTool(id),
|
||||
check_deployment_status: (id) => new CheckDeploymentStatusClientTool(id),
|
||||
navigate_ui: (id) => new NavigateUIClientTool(id),
|
||||
}
|
||||
|
||||
// Read-only static metadata for class-based tools (no instances)
|
||||
@@ -105,11 +115,13 @@ export const CLASS_TOOL_METADATA: Record<string, BaseClientToolMetadata | undefi
|
||||
get_trigger_blocks: (GetTriggerBlocksClientTool as any)?.metadata,
|
||||
search_online: (SearchOnlineClientTool as any)?.metadata,
|
||||
search_documentation: (SearchDocumentationClientTool as any)?.metadata,
|
||||
get_environment_variables: (GetEnvironmentVariablesClientTool as any)?.metadata,
|
||||
search_patterns: (SearchPatternsClientTool as any)?.metadata,
|
||||
search_errors: (SearchErrorsClientTool as any)?.metadata,
|
||||
remember_debug: (RememberDebugClientTool as any)?.metadata,
|
||||
set_environment_variables: (SetEnvironmentVariablesClientTool as any)?.metadata,
|
||||
list_gdrive_files: (ListGDriveFilesClientTool as any)?.metadata,
|
||||
read_gdrive_file: (ReadGDriveFileClientTool as any)?.metadata,
|
||||
get_oauth_credentials: (GetOAuthCredentialsClientTool as any)?.metadata,
|
||||
get_credentials: (GetCredentialsClientTool as any)?.metadata,
|
||||
make_api_request: (MakeApiRequestClientTool as any)?.metadata,
|
||||
plan: (PlanClientTool as any)?.metadata,
|
||||
checkoff_todo: (CheckoffTodoClientTool as any)?.metadata,
|
||||
@@ -126,6 +138,9 @@ export const CLASS_TOOL_METADATA: Record<string, BaseClientToolMetadata | undefi
|
||||
oauth_request_access: (OAuthRequestAccessClientTool as any)?.metadata,
|
||||
get_operations_examples: (GetOperationsExamplesClientTool as any)?.metadata,
|
||||
summarize_conversation: (SummarizeClientTool as any)?.metadata,
|
||||
deploy_workflow: (DeployWorkflowClientTool as any)?.metadata,
|
||||
check_deployment_status: (CheckDeploymentStatusClientTool as any)?.metadata,
|
||||
navigate_ui: (NavigateUIClientTool as any)?.metadata,
|
||||
}
|
||||
|
||||
function ensureClientToolInstance(toolName: string | undefined, toolCallId: string | undefined) {
|
||||
@@ -156,10 +171,27 @@ function resolveToolDisplay(
|
||||
try {
|
||||
if (!toolName) return undefined
|
||||
const def = getTool(toolName) as any
|
||||
const meta = def?.metadata?.displayNames || CLASS_TOOL_METADATA[toolName]?.displayNames || {}
|
||||
const toolMetadata = def?.metadata || CLASS_TOOL_METADATA[toolName]
|
||||
const meta = toolMetadata?.displayNames || {}
|
||||
|
||||
// Exact state first
|
||||
const ds = meta?.[state]
|
||||
if (ds?.text || ds?.icon) return { text: ds.text, icon: ds.icon }
|
||||
if (ds?.text || ds?.icon) {
|
||||
// Check if tool has a dynamic text formatter
|
||||
const getDynamicText = toolMetadata?.getDynamicText
|
||||
if (getDynamicText && params) {
|
||||
try {
|
||||
const dynamicText = getDynamicText(params, state)
|
||||
if (dynamicText) {
|
||||
return { text: dynamicText, icon: ds.icon }
|
||||
}
|
||||
} catch (e) {
|
||||
// Fall back to static text if formatter fails
|
||||
}
|
||||
}
|
||||
return { text: ds.text, icon: ds.icon }
|
||||
}
|
||||
|
||||
// Fallback order (prefer pre-execution states for unknown states like pending)
|
||||
const fallbackOrder: ClientToolCallState[] = [
|
||||
(ClientToolCallState as any).generating,
|
||||
@@ -328,6 +360,12 @@ function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
|
||||
},
|
||||
}
|
||||
}
|
||||
if (b?.type === TEXT_BLOCK_TYPE && typeof b.content === 'string') {
|
||||
return {
|
||||
...b,
|
||||
content: stripTodoTags(b.content),
|
||||
}
|
||||
}
|
||||
return b
|
||||
})
|
||||
: []
|
||||
@@ -366,13 +404,20 @@ function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
|
||||
})
|
||||
: (message as any).toolCalls
|
||||
|
||||
const sanitizedContent = stripTodoTags(message.content || '')
|
||||
|
||||
return {
|
||||
...message,
|
||||
content: sanitizedContent,
|
||||
...(updatedToolCalls && { toolCalls: updatedToolCalls }),
|
||||
...(blocks.length > 0
|
||||
? { contentBlocks: blocks }
|
||||
: message.content?.trim()
|
||||
? { contentBlocks: [{ type: 'text', content: message.content, timestamp: Date.now() }] }
|
||||
: sanitizedContent.trim()
|
||||
? {
|
||||
contentBlocks: [
|
||||
{ type: TEXT_BLOCK_TYPE, content: sanitizedContent, timestamp: Date.now() },
|
||||
],
|
||||
}
|
||||
: {}),
|
||||
}
|
||||
})
|
||||
@@ -487,6 +532,16 @@ function createErrorMessage(messageId: string, content: string): CopilotMessage
|
||||
}
|
||||
}
|
||||
|
||||
function stripTodoTags(text: string): string {
|
||||
if (!text) return text
|
||||
return text
|
||||
.replace(/<marktodo>[\s\S]*?<\/marktodo>/g, '')
|
||||
.replace(/<checkofftodo>[\s\S]*?<\/checkofftodo>/g, '')
|
||||
.replace(/<design_workflow>[\s\S]*?<\/design_workflow>/g, '')
|
||||
.replace(/[ \t]+\n/g, '\n')
|
||||
.replace(/\n{2,}/g, '\n')
|
||||
}
|
||||
|
||||
function validateMessagesForLLM(messages: CopilotMessage[]): any[] {
|
||||
return messages
|
||||
.map((msg) => {
|
||||
@@ -500,9 +555,13 @@ function validateMessagesForLLM(messages: CopilotMessage[]): any[] {
|
||||
.trim()
|
||||
}
|
||||
|
||||
// Strip thinking tags from content
|
||||
// Strip thinking, design_workflow, and todo tags from content
|
||||
if (content) {
|
||||
content = content.replace(/<thinking>[\s\S]*?<\/thinking>/g, '').trim()
|
||||
content = stripTodoTags(
|
||||
content
|
||||
.replace(/<thinking>[\s\S]*?<\/thinking>/g, '')
|
||||
.replace(/<design_workflow>[\s\S]*?<\/design_workflow>/g, '')
|
||||
).trim()
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -549,6 +608,8 @@ interface StreamingContext {
|
||||
currentTextBlock: any | null
|
||||
isInThinkingBlock: boolean
|
||||
currentThinkingBlock: any | null
|
||||
isInDesignWorkflowBlock: boolean
|
||||
designWorkflowContent: string
|
||||
pendingContent: string
|
||||
newChatId?: string
|
||||
doneEventCount: number
|
||||
@@ -772,6 +833,7 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
const name: string | undefined = toolData.name || data?.toolName
|
||||
if (!id) return
|
||||
const args = toolData.arguments
|
||||
const isPartial = toolData.partial === true
|
||||
const { toolCallsById } = get()
|
||||
|
||||
// Ensure class-based client tool instances are registered (for interrupts/display)
|
||||
@@ -1003,7 +1065,7 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
context.currentTextBlock = null
|
||||
updateStreamingMessage(set, context)
|
||||
},
|
||||
content: (data, context, _get, set) => {
|
||||
content: (data, context, get, set) => {
|
||||
if (!data.data) return
|
||||
context.pendingContent += data.data
|
||||
|
||||
@@ -1012,8 +1074,149 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
|
||||
const thinkingStartRegex = /<thinking>/
|
||||
const thinkingEndRegex = /<\/thinking>/
|
||||
const designWorkflowStartRegex = /<design_workflow>/
|
||||
const designWorkflowEndRegex = /<\/design_workflow>/
|
||||
|
||||
const appendTextToContent = (text: string) => {
|
||||
if (!text) return
|
||||
context.accumulatedContent.append(text)
|
||||
if (context.currentTextBlock && context.contentBlocks.length > 0) {
|
||||
const lastBlock = context.contentBlocks[context.contentBlocks.length - 1]
|
||||
if (lastBlock.type === TEXT_BLOCK_TYPE && lastBlock === context.currentTextBlock) {
|
||||
lastBlock.content += text
|
||||
return
|
||||
}
|
||||
}
|
||||
context.currentTextBlock = contentBlockPool.get()
|
||||
context.currentTextBlock.type = TEXT_BLOCK_TYPE
|
||||
context.currentTextBlock.content = text
|
||||
context.currentTextBlock.timestamp = Date.now()
|
||||
context.contentBlocks.push(context.currentTextBlock)
|
||||
}
|
||||
|
||||
while (contentToProcess.length > 0) {
|
||||
// Handle design_workflow tags (takes priority over other content processing)
|
||||
if (context.isInDesignWorkflowBlock) {
|
||||
const endMatch = designWorkflowEndRegex.exec(contentToProcess)
|
||||
if (endMatch) {
|
||||
const designContent = contentToProcess.substring(0, endMatch.index)
|
||||
context.designWorkflowContent += designContent
|
||||
context.isInDesignWorkflowBlock = false
|
||||
|
||||
// Update store with complete design workflow content (available in all modes)
|
||||
logger.info('[design_workflow] Tag complete, setting plan content', {
|
||||
contentLength: context.designWorkflowContent.length,
|
||||
})
|
||||
set({ streamingPlanContent: context.designWorkflowContent })
|
||||
|
||||
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
|
||||
hasProcessedContent = true
|
||||
} else {
|
||||
// Still in design_workflow block, accumulate content
|
||||
context.designWorkflowContent += contentToProcess
|
||||
|
||||
// Update store with partial content for streaming effect (available in all modes)
|
||||
set({ streamingPlanContent: context.designWorkflowContent })
|
||||
|
||||
contentToProcess = ''
|
||||
hasProcessedContent = true
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (!context.isInThinkingBlock && !context.isInDesignWorkflowBlock) {
|
||||
// Check for design_workflow start tag first
|
||||
const designStartMatch = designWorkflowStartRegex.exec(contentToProcess)
|
||||
if (designStartMatch) {
|
||||
const textBeforeDesign = contentToProcess.substring(0, designStartMatch.index)
|
||||
if (textBeforeDesign) {
|
||||
appendTextToContent(textBeforeDesign)
|
||||
hasProcessedContent = true
|
||||
}
|
||||
context.isInDesignWorkflowBlock = true
|
||||
context.designWorkflowContent = ''
|
||||
contentToProcess = contentToProcess.substring(
|
||||
designStartMatch.index + designStartMatch[0].length
|
||||
)
|
||||
hasProcessedContent = true
|
||||
continue
|
||||
}
|
||||
|
||||
const nextMarkIndex = contentToProcess.indexOf('<marktodo>')
|
||||
const nextCheckIndex = contentToProcess.indexOf('<checkofftodo>')
|
||||
const hasMark = nextMarkIndex >= 0
|
||||
const hasCheck = nextCheckIndex >= 0
|
||||
|
||||
const nextTagIndex =
|
||||
hasMark && hasCheck
|
||||
? Math.min(nextMarkIndex, nextCheckIndex)
|
||||
: hasMark
|
||||
? nextMarkIndex
|
||||
: hasCheck
|
||||
? nextCheckIndex
|
||||
: -1
|
||||
|
||||
if (nextTagIndex >= 0) {
|
||||
const isMarkTodo = hasMark && nextMarkIndex === nextTagIndex
|
||||
const tagStart = isMarkTodo ? '<marktodo>' : '<checkofftodo>'
|
||||
const tagEnd = isMarkTodo ? '</marktodo>' : '</checkofftodo>'
|
||||
const closingIndex = contentToProcess.indexOf(tagEnd, nextTagIndex + tagStart.length)
|
||||
|
||||
if (closingIndex === -1) {
|
||||
// Partial tag; wait for additional content
|
||||
break
|
||||
}
|
||||
|
||||
const todoId = contentToProcess
|
||||
.substring(nextTagIndex + tagStart.length, closingIndex)
|
||||
.trim()
|
||||
logger.info(
|
||||
isMarkTodo ? '[TODO] Detected marktodo tag' : '[TODO] Detected checkofftodo tag',
|
||||
{ todoId }
|
||||
)
|
||||
|
||||
if (todoId) {
|
||||
try {
|
||||
get().updatePlanTodoStatus(todoId, isMarkTodo ? 'executing' : 'completed')
|
||||
logger.info(
|
||||
isMarkTodo
|
||||
? '[TODO] Successfully marked todo in progress'
|
||||
: '[TODO] Successfully checked off todo',
|
||||
{ todoId }
|
||||
)
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
isMarkTodo
|
||||
? '[TODO] Failed to mark todo in progress'
|
||||
: '[TODO] Failed to checkoff todo',
|
||||
{ todoId, error: e }
|
||||
)
|
||||
}
|
||||
} else {
|
||||
logger.warn('[TODO] Empty todoId extracted from todo tag', { tagType: tagStart })
|
||||
}
|
||||
|
||||
// Remove the tag AND newlines around it, but preserve ONE newline if both sides had them
|
||||
let beforeTag = contentToProcess.substring(0, nextTagIndex)
|
||||
let afterTag = contentToProcess.substring(closingIndex + tagEnd.length)
|
||||
|
||||
const hadNewlineBefore = /(\r?\n)+$/.test(beforeTag)
|
||||
const hadNewlineAfter = /^(\r?\n)+/.test(afterTag)
|
||||
|
||||
// Strip trailing newlines before the tag
|
||||
beforeTag = beforeTag.replace(/(\r?\n)+$/, '')
|
||||
// Strip leading newlines after the tag
|
||||
afterTag = afterTag.replace(/^(\r?\n)+/, '')
|
||||
|
||||
// If there were newlines on both sides, add back ONE to preserve paragraph breaks
|
||||
contentToProcess =
|
||||
beforeTag + (hadNewlineBefore && hadNewlineAfter ? '\n' : '') + afterTag
|
||||
context.currentTextBlock = null
|
||||
hasProcessedContent = true
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (context.isInThinkingBlock) {
|
||||
const endMatch = thinkingEndRegex.exec(contentToProcess)
|
||||
if (endMatch) {
|
||||
@@ -1082,10 +1285,23 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
contentToProcess = contentToProcess.substring(startMatch.index + startMatch[0].length)
|
||||
hasProcessedContent = true
|
||||
} else {
|
||||
const partialTagIndex = contentToProcess.lastIndexOf('<')
|
||||
// Check if content might contain partial todo tags and hold them back
|
||||
let partialTagIndex = contentToProcess.lastIndexOf('<')
|
||||
|
||||
// Also check for partial marktodo or checkofftodo tags
|
||||
const partialMarkTodo = contentToProcess.lastIndexOf('<marktodo')
|
||||
const partialCheckoffTodo = contentToProcess.lastIndexOf('<checkofftodo')
|
||||
|
||||
if (partialMarkTodo > partialTagIndex) {
|
||||
partialTagIndex = partialMarkTodo
|
||||
}
|
||||
if (partialCheckoffTodo > partialTagIndex) {
|
||||
partialTagIndex = partialCheckoffTodo
|
||||
}
|
||||
|
||||
let textToAdd = contentToProcess
|
||||
let remaining = ''
|
||||
if (partialTagIndex >= 0 && partialTagIndex > contentToProcess.length - 10) {
|
||||
if (partialTagIndex >= 0 && partialTagIndex > contentToProcess.length - 50) {
|
||||
textToAdd = contentToProcess.substring(0, partialTagIndex)
|
||||
remaining = contentToProcess.substring(partialTagIndex)
|
||||
}
|
||||
@@ -1315,6 +1531,7 @@ const initialState = {
|
||||
inputValue: '',
|
||||
planTodos: [] as Array<{ id: string; content: string; completed?: boolean; executing?: boolean }>,
|
||||
showPlanTodos: false,
|
||||
streamingPlanContent: '',
|
||||
toolCallsById: {} as Record<string, CopilotToolCall>,
|
||||
suppressAutoSelect: false,
|
||||
contextUsage: null,
|
||||
@@ -1327,7 +1544,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
// Basic mode controls
|
||||
setMode: (mode) => set({ mode }),
|
||||
|
||||
// Clear messages
|
||||
// Clear messages (don't clear streamingPlanContent - let it persist)
|
||||
clearMessages: () => set({ messages: [], contextUsage: null }),
|
||||
|
||||
// Workflow selection
|
||||
@@ -1377,9 +1594,24 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
useWorkflowDiffStore.getState().clearDiff()
|
||||
} catch {}
|
||||
|
||||
// Restore plan content and config (mode/model) from selected chat
|
||||
const planArtifact = chat.planArtifact || ''
|
||||
const chatConfig = chat.config || {}
|
||||
const chatMode = chatConfig.mode || get().mode
|
||||
const chatModel = chatConfig.model || get().selectedModel
|
||||
|
||||
logger.info('[Chat] Restoring chat config', {
|
||||
chatId: chat.id,
|
||||
mode: chatMode,
|
||||
model: chatModel,
|
||||
hasPlanArtifact: !!planArtifact,
|
||||
})
|
||||
|
||||
// Capture previous chat/messages for optimistic background save
|
||||
const previousChat = currentChat
|
||||
const previousMessages = get().messages
|
||||
const previousMode = get().mode
|
||||
const previousModel = get().selectedModel
|
||||
|
||||
// Optimistically set selected chat and normalize messages for UI
|
||||
set({
|
||||
@@ -1387,18 +1619,30 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
messages: normalizeMessagesForUI(chat.messages || []),
|
||||
planTodos: [],
|
||||
showPlanTodos: false,
|
||||
streamingPlanContent: planArtifact,
|
||||
mode: chatMode,
|
||||
selectedModel: chatModel as CopilotStore['selectedModel'],
|
||||
suppressAutoSelect: false,
|
||||
contextUsage: null,
|
||||
})
|
||||
|
||||
// Background-save the previous chat's latest messages before switching (optimistic)
|
||||
// Background-save the previous chat's latest messages, plan artifact, and config before switching (optimistic)
|
||||
try {
|
||||
if (previousChat && previousChat.id !== chat.id) {
|
||||
const dbMessages = validateMessagesForLLM(previousMessages)
|
||||
const previousPlanArtifact = get().streamingPlanContent
|
||||
fetch('/api/copilot/chat/update-messages', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ chatId: previousChat.id, messages: dbMessages }),
|
||||
body: JSON.stringify({
|
||||
chatId: previousChat.id,
|
||||
messages: dbMessages,
|
||||
planArtifact: previousPlanArtifact || null,
|
||||
config: {
|
||||
mode: previousMode,
|
||||
model: previousModel,
|
||||
},
|
||||
}),
|
||||
}).catch(() => {})
|
||||
}
|
||||
} catch {}
|
||||
@@ -1457,14 +1701,22 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
|
||||
// Background-save the current chat before clearing (optimistic)
|
||||
try {
|
||||
const { currentChat } = get()
|
||||
const { currentChat, streamingPlanContent, mode, selectedModel } = get()
|
||||
if (currentChat) {
|
||||
const currentMessages = get().messages
|
||||
const dbMessages = validateMessagesForLLM(currentMessages)
|
||||
fetch('/api/copilot/chat/update-messages', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ chatId: currentChat.id, messages: dbMessages }),
|
||||
body: JSON.stringify({
|
||||
chatId: currentChat.id,
|
||||
messages: dbMessages,
|
||||
planArtifact: streamingPlanContent || null,
|
||||
config: {
|
||||
mode,
|
||||
model: selectedModel,
|
||||
},
|
||||
}),
|
||||
}).catch(() => {})
|
||||
}
|
||||
} catch {}
|
||||
@@ -1476,6 +1728,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
messageCheckpoints: {},
|
||||
planTodos: [],
|
||||
showPlanTodos: false,
|
||||
streamingPlanContent: '',
|
||||
suppressAutoSelect: true,
|
||||
contextUsage: null,
|
||||
})
|
||||
@@ -1549,6 +1802,12 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
} else {
|
||||
const normalizedMessages = normalizeMessagesForUI(updatedCurrentChat.messages || [])
|
||||
|
||||
// Restore plan artifact and config from refreshed chat
|
||||
const refreshedPlanArtifact = updatedCurrentChat.planArtifact || ''
|
||||
const refreshedConfig = updatedCurrentChat.config || {}
|
||||
const refreshedMode = refreshedConfig.mode || get().mode
|
||||
const refreshedModel = refreshedConfig.model || get().selectedModel
|
||||
|
||||
// Build toolCallsById map from all tool calls in normalized messages
|
||||
const toolCallsById: Record<string, CopilotToolCall> = {}
|
||||
for (const msg of normalizedMessages) {
|
||||
@@ -1565,6 +1824,9 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
currentChat: updatedCurrentChat,
|
||||
messages: normalizedMessages,
|
||||
toolCallsById,
|
||||
streamingPlanContent: refreshedPlanArtifact,
|
||||
mode: refreshedMode,
|
||||
selectedModel: refreshedModel as CopilotStore['selectedModel'],
|
||||
})
|
||||
}
|
||||
try {
|
||||
@@ -1574,6 +1836,19 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const mostRecentChat: CopilotChat = data.chats[0]
|
||||
const normalizedMessages = normalizeMessagesForUI(mostRecentChat.messages || [])
|
||||
|
||||
// Restore plan artifact and config from most recent chat
|
||||
const planArtifact = mostRecentChat.planArtifact || ''
|
||||
const chatConfig = mostRecentChat.config || {}
|
||||
const chatMode = chatConfig.mode || get().mode
|
||||
const chatModel = chatConfig.model || get().selectedModel
|
||||
|
||||
logger.info('[Chat] Auto-selecting most recent chat with config', {
|
||||
chatId: mostRecentChat.id,
|
||||
mode: chatMode,
|
||||
model: chatModel,
|
||||
hasPlanArtifact: !!planArtifact,
|
||||
})
|
||||
|
||||
// Build toolCallsById map from all tool calls in normalized messages
|
||||
const toolCallsById: Record<string, CopilotToolCall> = {}
|
||||
for (const msg of normalizedMessages) {
|
||||
@@ -1590,6 +1865,9 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
currentChat: mostRecentChat,
|
||||
messages: normalizedMessages,
|
||||
toolCallsById,
|
||||
streamingPlanContent: planArtifact,
|
||||
mode: chatMode,
|
||||
selectedModel: chatModel as CopilotStore['selectedModel'],
|
||||
})
|
||||
try {
|
||||
await get().loadMessageCheckpoints(mostRecentChat.id)
|
||||
@@ -1682,12 +1960,26 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
})
|
||||
} catch {}
|
||||
|
||||
// Prepend design document to message if available
|
||||
const { streamingPlanContent } = get()
|
||||
let messageToSend = message
|
||||
if (streamingPlanContent?.trim()) {
|
||||
messageToSend = `Design Document:\n\n${streamingPlanContent}\n\n==============\n\nUser Query:\n\n${message}`
|
||||
logger.info('[DesignDocument] Prepending plan content to message', {
|
||||
planLength: streamingPlanContent.length,
|
||||
originalMessageLength: message.length,
|
||||
finalMessageLength: messageToSend.length,
|
||||
})
|
||||
}
|
||||
|
||||
const apiMode: 'ask' | 'agent' | 'plan' =
|
||||
mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent'
|
||||
const result = await sendStreamingMessage({
|
||||
message,
|
||||
message: messageToSend,
|
||||
userMessageId: userMessage.id,
|
||||
chatId: currentChat?.id,
|
||||
workflowId,
|
||||
mode: mode === 'ask' ? 'ask' : 'agent',
|
||||
mode: apiMode,
|
||||
model: get().selectedModel,
|
||||
prefetch: get().agentPrefetch,
|
||||
createNewChat: !currentChat,
|
||||
@@ -1776,14 +2068,18 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
abortController: null,
|
||||
}))
|
||||
} else {
|
||||
set({ isSendingMessage: false, isAborting: false, abortController: null })
|
||||
set({
|
||||
isSendingMessage: false,
|
||||
isAborting: false,
|
||||
abortController: null,
|
||||
})
|
||||
}
|
||||
|
||||
// Immediately put all in-progress tools into aborted state
|
||||
abortAllInProgressTools(set, get)
|
||||
|
||||
// Persist whatever contentBlocks/text we have to keep ordering for reloads
|
||||
const { currentChat } = get()
|
||||
const { currentChat, streamingPlanContent, mode, selectedModel } = get()
|
||||
if (currentChat) {
|
||||
try {
|
||||
const currentMessages = get().messages
|
||||
@@ -1791,7 +2087,15 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
fetch('/api/copilot/chat/update-messages', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ chatId: currentChat.id, messages: dbMessages }),
|
||||
body: JSON.stringify({
|
||||
chatId: currentChat.id,
|
||||
messages: dbMessages,
|
||||
planArtifact: streamingPlanContent || null,
|
||||
config: {
|
||||
mode,
|
||||
model: selectedModel,
|
||||
},
|
||||
}),
|
||||
}).catch(() => {})
|
||||
} catch {}
|
||||
}
|
||||
@@ -1817,11 +2121,13 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const newAssistantMessage = createStreamingMessage()
|
||||
set((state) => ({ messages: [...state.messages, newAssistantMessage] }))
|
||||
try {
|
||||
const apiMode: 'ask' | 'agent' | 'plan' =
|
||||
mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent'
|
||||
const result = await sendStreamingMessage({
|
||||
message: 'Please continue your response.',
|
||||
chatId: currentChat?.id,
|
||||
workflowId,
|
||||
mode: mode === 'ask' ? 'ask' : 'agent',
|
||||
mode: apiMode,
|
||||
model: selectedModel,
|
||||
prefetch: get().agentPrefetch,
|
||||
createNewChat: !currentChat,
|
||||
@@ -2131,6 +2437,8 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
currentTextBlock: null,
|
||||
isInThinkingBlock: false,
|
||||
currentThinkingBlock: null,
|
||||
isInDesignWorkflowBlock: false,
|
||||
designWorkflowContent: '',
|
||||
pendingContent: '',
|
||||
doneEventCount: 0,
|
||||
}
|
||||
@@ -2169,6 +2477,16 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
}
|
||||
streamingUpdateQueue.clear()
|
||||
|
||||
let sanitizedContentBlocks: any[] = []
|
||||
if (context.contentBlocks && context.contentBlocks.length > 0) {
|
||||
const optimizedBlocks = createOptimizedContentBlocks(context.contentBlocks)
|
||||
sanitizedContentBlocks = optimizedBlocks.map((block: any) =>
|
||||
block.type === TEXT_BLOCK_TYPE && typeof block.content === 'string'
|
||||
? { ...block, content: stripTodoTags(block.content) }
|
||||
: block
|
||||
)
|
||||
}
|
||||
|
||||
if (context.contentBlocks) {
|
||||
context.contentBlocks.forEach((block) => {
|
||||
if (block.type === TEXT_BLOCK_TYPE || block.type === THINKING_BLOCK_TYPE) {
|
||||
@@ -2177,14 +2495,14 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
})
|
||||
}
|
||||
|
||||
const finalContent = context.accumulatedContent.toString()
|
||||
const finalContent = stripTodoTags(context.accumulatedContent.toString())
|
||||
set((state) => ({
|
||||
messages: state.messages.map((msg) =>
|
||||
msg.id === assistantMessageId
|
||||
? {
|
||||
...msg,
|
||||
content: finalContent,
|
||||
contentBlocks: context.contentBlocks,
|
||||
contentBlocks: sanitizedContentBlocks,
|
||||
}
|
||||
: msg
|
||||
),
|
||||
@@ -2197,16 +2515,35 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
await get().handleNewChatCreation(context.newChatId)
|
||||
}
|
||||
|
||||
// Persist full message state (including contentBlocks) to database
|
||||
const { currentChat } = get()
|
||||
// Persist full message state (including contentBlocks), plan artifact, and config to database
|
||||
const { currentChat, streamingPlanContent, mode, selectedModel } = get()
|
||||
if (currentChat) {
|
||||
try {
|
||||
const currentMessages = get().messages
|
||||
const dbMessages = validateMessagesForLLM(currentMessages)
|
||||
const config = {
|
||||
mode,
|
||||
model: selectedModel,
|
||||
}
|
||||
|
||||
await fetch('/api/copilot/chat/update-messages', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ chatId: currentChat.id, messages: dbMessages }),
|
||||
body: JSON.stringify({
|
||||
chatId: currentChat.id,
|
||||
messages: dbMessages,
|
||||
planArtifact: streamingPlanContent || null,
|
||||
config,
|
||||
}),
|
||||
})
|
||||
|
||||
// Update local chat object with plan artifact and config
|
||||
set({
|
||||
currentChat: {
|
||||
...currentChat,
|
||||
planArtifact: streamingPlanContent || null,
|
||||
config,
|
||||
},
|
||||
})
|
||||
} catch {}
|
||||
}
|
||||
@@ -2226,13 +2563,19 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
|
||||
// Handle new chat creation from stream
|
||||
handleNewChatCreation: async (newChatId: string) => {
|
||||
const { mode, selectedModel, streamingPlanContent } = get()
|
||||
const newChat: CopilotChat = {
|
||||
id: newChatId,
|
||||
title: null,
|
||||
model: 'gpt-4',
|
||||
model: selectedModel,
|
||||
messages: get().messages,
|
||||
messageCount: get().messages.length,
|
||||
previewYaml: null,
|
||||
planArtifact: streamingPlanContent || null,
|
||||
config: {
|
||||
mode,
|
||||
model: selectedModel,
|
||||
},
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
@@ -2298,6 +2641,93 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
},
|
||||
closePlanTodos: () => set({ showPlanTodos: false }),
|
||||
|
||||
clearPlanArtifact: async () => {
|
||||
const { currentChat } = get()
|
||||
|
||||
// Clear from local state
|
||||
set({ streamingPlanContent: '' })
|
||||
|
||||
// Update database if we have a current chat
|
||||
if (currentChat) {
|
||||
try {
|
||||
const currentMessages = get().messages
|
||||
const dbMessages = validateMessagesForLLM(currentMessages)
|
||||
const { mode, selectedModel } = get()
|
||||
|
||||
await fetch('/api/copilot/chat/update-messages', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
chatId: currentChat.id,
|
||||
messages: dbMessages,
|
||||
planArtifact: null,
|
||||
config: {
|
||||
mode,
|
||||
model: selectedModel,
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
// Update local chat object
|
||||
set({
|
||||
currentChat: {
|
||||
...currentChat,
|
||||
planArtifact: null,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info('[PlanArtifact] Cleared plan artifact', { chatId: currentChat.id })
|
||||
} catch (error) {
|
||||
logger.error('[PlanArtifact] Failed to clear plan artifact', error)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
savePlanArtifact: async (content: string) => {
|
||||
const { currentChat } = get()
|
||||
|
||||
// Update local state
|
||||
set({ streamingPlanContent: content })
|
||||
|
||||
// Update database if we have a current chat
|
||||
if (currentChat) {
|
||||
try {
|
||||
const currentMessages = get().messages
|
||||
const dbMessages = validateMessagesForLLM(currentMessages)
|
||||
const { mode, selectedModel } = get()
|
||||
|
||||
await fetch('/api/copilot/chat/update-messages', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
chatId: currentChat.id,
|
||||
messages: dbMessages,
|
||||
planArtifact: content,
|
||||
config: {
|
||||
mode,
|
||||
model: selectedModel,
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
// Update local chat object
|
||||
set({
|
||||
currentChat: {
|
||||
...currentChat,
|
||||
planArtifact: content,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info('[PlanArtifact] Saved plan artifact', {
|
||||
chatId: currentChat.id,
|
||||
contentLength: content.length,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('[PlanArtifact] Failed to save plan artifact', error)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Diff updates are out of scope for minimal store
|
||||
updateDiffStore: async (_yamlContent: string) => {},
|
||||
updateDiffStoreWithWorkflowState: async (_workflowState: any) => {},
|
||||
|
||||
@@ -53,18 +53,11 @@ export type ChatContext =
|
||||
| { kind: 'templates'; templateId?: string; label: string }
|
||||
| { kind: 'docs'; label: string }
|
||||
|
||||
export interface CopilotChat {
|
||||
id: string
|
||||
title: string | null
|
||||
model: string
|
||||
messages: CopilotMessage[]
|
||||
messageCount: number
|
||||
previewYaml: string | null
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
}
|
||||
import type { CopilotChat as ApiCopilotChat } from '@/lib/copilot/api'
|
||||
|
||||
export type CopilotMode = 'ask' | 'build'
|
||||
export type CopilotChat = ApiCopilotChat
|
||||
|
||||
export type CopilotMode = 'ask' | 'build' | 'plan'
|
||||
|
||||
export interface CopilotState {
|
||||
mode: CopilotMode
|
||||
@@ -115,6 +108,9 @@ export interface CopilotState {
|
||||
planTodos: Array<{ id: string; content: string; completed?: boolean; executing?: boolean }>
|
||||
showPlanTodos: boolean
|
||||
|
||||
// Streaming plan content from design_workflow tool (for plan mode section)
|
||||
streamingPlanContent: string
|
||||
|
||||
// Map of toolCallId -> CopilotToolCall for quick access during streaming
|
||||
toolCallsById: Record<string, CopilotToolCall>
|
||||
|
||||
@@ -198,6 +194,8 @@ export interface CopilotActions {
|
||||
) => void
|
||||
updatePlanTodoStatus: (id: string, status: 'executing' | 'completed') => void
|
||||
closePlanTodos: () => void
|
||||
clearPlanArtifact: () => Promise<void>
|
||||
savePlanArtifact: (content: string) => Promise<void>
|
||||
|
||||
handleStreamingResponse: (
|
||||
stream: ReadableStream,
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
export { useUndoRedoStore } from './store'
|
||||
export { runWithUndoRedoRecordingSuspended, useUndoRedoStore } from './store'
|
||||
export * from './types'
|
||||
export * from './utils'
|
||||
|
||||
@@ -16,6 +16,29 @@ const logger = createLogger('UndoRedoStore')
|
||||
const DEFAULT_CAPACITY = 100
|
||||
const MAX_STACKS = 5
|
||||
|
||||
let recordingSuspendDepth = 0
|
||||
|
||||
function isRecordingSuspended(): boolean {
|
||||
return recordingSuspendDepth > 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Temporarily suspends undo/redo recording while the provided callback runs.
|
||||
*
|
||||
* @param callback - Function to execute while recording is disabled.
|
||||
* @returns The callback result.
|
||||
*/
|
||||
export async function runWithUndoRedoRecordingSuspended<T>(
|
||||
callback: () => Promise<T> | T
|
||||
): Promise<T> {
|
||||
recordingSuspendDepth += 1
|
||||
try {
|
||||
return await Promise.resolve(callback())
|
||||
} finally {
|
||||
recordingSuspendDepth = Math.max(0, recordingSuspendDepth - 1)
|
||||
}
|
||||
}
|
||||
|
||||
function getStackKey(workflowId: string, userId: string): string {
|
||||
return `${workflowId}:${userId}`
|
||||
}
|
||||
@@ -106,6 +129,15 @@ export const useUndoRedoStore = create<UndoRedoState>()(
|
||||
capacity: DEFAULT_CAPACITY,
|
||||
|
||||
push: (workflowId: string, userId: string, entry: OperationEntry) => {
|
||||
if (isRecordingSuspended()) {
|
||||
logger.debug('Skipped push while undo/redo recording suspended', {
|
||||
workflowId,
|
||||
userId,
|
||||
operationType: entry.operation.type,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const key = getStackKey(workflowId, userId)
|
||||
const state = get()
|
||||
const currentStacks = { ...state.stacks }
|
||||
@@ -131,6 +163,43 @@ export const useUndoRedoStore = create<UndoRedoState>()(
|
||||
|
||||
const stack = currentStacks[key] || { undo: [], redo: [] }
|
||||
|
||||
// Prevent duplicate diff operations (apply-diff, accept-diff, reject-diff)
|
||||
if (['apply-diff', 'accept-diff', 'reject-diff'].includes(entry.operation.type)) {
|
||||
const lastEntry = stack.undo[stack.undo.length - 1]
|
||||
if (lastEntry && lastEntry.operation.type === entry.operation.type) {
|
||||
// Check if it's a duplicate by comparing the relevant state data
|
||||
const lastData = lastEntry.operation.data as any
|
||||
const newData = entry.operation.data as any
|
||||
|
||||
// For each diff operation type, check the relevant state
|
||||
let isDuplicate = false
|
||||
if (entry.operation.type === 'apply-diff') {
|
||||
isDuplicate =
|
||||
JSON.stringify(lastData.baselineSnapshot?.blocks) ===
|
||||
JSON.stringify(newData.baselineSnapshot?.blocks) &&
|
||||
JSON.stringify(lastData.proposedState?.blocks) ===
|
||||
JSON.stringify(newData.proposedState?.blocks)
|
||||
} else if (entry.operation.type === 'accept-diff') {
|
||||
isDuplicate =
|
||||
JSON.stringify(lastData.afterAccept?.blocks) ===
|
||||
JSON.stringify(newData.afterAccept?.blocks)
|
||||
} else if (entry.operation.type === 'reject-diff') {
|
||||
isDuplicate =
|
||||
JSON.stringify(lastData.afterReject?.blocks) ===
|
||||
JSON.stringify(newData.afterReject?.blocks)
|
||||
}
|
||||
|
||||
if (isDuplicate) {
|
||||
logger.debug('Skipping duplicate diff operation', {
|
||||
type: entry.operation.type,
|
||||
workflowId,
|
||||
userId,
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Coalesce consecutive move-block operations for the same block
|
||||
if (entry.operation.type === 'move-block') {
|
||||
const incoming = entry.operation as MoveBlockOperation
|
||||
|
||||
@@ -12,6 +12,9 @@ export type OperationType =
|
||||
| 'move-subflow'
|
||||
| 'duplicate-block'
|
||||
| 'update-parent'
|
||||
| 'apply-diff'
|
||||
| 'accept-diff'
|
||||
| 'reject-diff'
|
||||
|
||||
export interface BaseOperation {
|
||||
id: string
|
||||
@@ -122,6 +125,35 @@ export interface UpdateParentOperation extends BaseOperation {
|
||||
}
|
||||
}
|
||||
|
||||
export interface ApplyDiffOperation extends BaseOperation {
|
||||
type: 'apply-diff'
|
||||
data: {
|
||||
baselineSnapshot: any // WorkflowState snapshot before diff
|
||||
proposedState: any // WorkflowState with diff applied
|
||||
diffAnalysis: any // DiffAnalysis for re-applying markers
|
||||
}
|
||||
}
|
||||
|
||||
export interface AcceptDiffOperation extends BaseOperation {
|
||||
type: 'accept-diff'
|
||||
data: {
|
||||
beforeAccept: any // WorkflowState with diff markers
|
||||
afterAccept: any // WorkflowState without diff markers
|
||||
diffAnalysis: any // DiffAnalysis to restore markers on undo
|
||||
baselineSnapshot: any // Baseline workflow state
|
||||
}
|
||||
}
|
||||
|
||||
export interface RejectDiffOperation extends BaseOperation {
|
||||
type: 'reject-diff'
|
||||
data: {
|
||||
beforeReject: any // WorkflowState with diff markers
|
||||
afterReject: any // WorkflowState baseline (after reject)
|
||||
diffAnalysis: any // DiffAnalysis to restore markers on undo
|
||||
baselineSnapshot: any // Baseline workflow state
|
||||
}
|
||||
}
|
||||
|
||||
export type Operation =
|
||||
| AddBlockOperation
|
||||
| RemoveBlockOperation
|
||||
@@ -133,6 +165,9 @@ export type Operation =
|
||||
| MoveSubflowOperation
|
||||
| DuplicateBlockOperation
|
||||
| UpdateParentOperation
|
||||
| ApplyDiffOperation
|
||||
| AcceptDiffOperation
|
||||
| RejectDiffOperation
|
||||
|
||||
export interface OperationEntry {
|
||||
id: string
|
||||
|
||||
@@ -113,6 +113,38 @@ export function createInverseOperation(operation: Operation): Operation {
|
||||
},
|
||||
}
|
||||
|
||||
case 'apply-diff':
|
||||
return {
|
||||
...operation,
|
||||
data: {
|
||||
baselineSnapshot: operation.data.proposedState,
|
||||
proposedState: operation.data.baselineSnapshot,
|
||||
diffAnalysis: operation.data.diffAnalysis,
|
||||
},
|
||||
}
|
||||
|
||||
case 'accept-diff':
|
||||
return {
|
||||
...operation,
|
||||
data: {
|
||||
beforeAccept: operation.data.afterAccept,
|
||||
afterAccept: operation.data.beforeAccept,
|
||||
diffAnalysis: operation.data.diffAnalysis,
|
||||
baselineSnapshot: operation.data.baselineSnapshot,
|
||||
},
|
||||
}
|
||||
|
||||
case 'reject-diff':
|
||||
return {
|
||||
...operation,
|
||||
data: {
|
||||
beforeReject: operation.data.afterReject,
|
||||
afterReject: operation.data.beforeReject,
|
||||
diffAnalysis: operation.data.diffAnalysis,
|
||||
baselineSnapshot: operation.data.baselineSnapshot,
|
||||
},
|
||||
}
|
||||
|
||||
default: {
|
||||
const exhaustiveCheck: never = operation
|
||||
throw new Error(`Unhandled operation type: ${(exhaustiveCheck as any).type}`)
|
||||
@@ -213,6 +245,33 @@ export function operationToCollaborativePayload(operation: Operation): {
|
||||
},
|
||||
}
|
||||
|
||||
case 'apply-diff':
|
||||
return {
|
||||
operation: 'apply-diff',
|
||||
target: 'workflow',
|
||||
payload: {
|
||||
diffAnalysis: operation.data.diffAnalysis,
|
||||
},
|
||||
}
|
||||
|
||||
case 'accept-diff':
|
||||
return {
|
||||
operation: 'accept-diff',
|
||||
target: 'workflow',
|
||||
payload: {
|
||||
diffAnalysis: operation.data.diffAnalysis,
|
||||
},
|
||||
}
|
||||
|
||||
case 'reject-diff':
|
||||
return {
|
||||
operation: 'reject-diff',
|
||||
target: 'workflow',
|
||||
payload: {
|
||||
diffAnalysis: operation.data.diffAnalysis,
|
||||
},
|
||||
}
|
||||
|
||||
default: {
|
||||
const exhaustiveCheck: never = operation
|
||||
throw new Error(`Unhandled operation type: ${(exhaustiveCheck as any).type}`)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -157,5 +157,13 @@ export const useSubBlockStore = create<SubBlockStore>()(
|
||||
}
|
||||
})
|
||||
},
|
||||
setWorkflowValues: (workflowId: string, values: Record<string, Record<string, any>>) => {
|
||||
set((state) => ({
|
||||
workflowValues: {
|
||||
...state.workflowValues,
|
||||
[workflowId]: values,
|
||||
},
|
||||
}))
|
||||
},
|
||||
}))
|
||||
)
|
||||
|
||||
@@ -11,4 +11,5 @@ export interface SubBlockStore extends SubBlockState {
|
||||
getValue: (blockId: string, subBlockId: string) => any
|
||||
clear: () => void
|
||||
initializeFromWorkflow: (workflowId: string, blocks: Record<string, any>) => void
|
||||
setWorkflowValues: (workflowId: string, values: Record<string, Record<string, any>>) => void
|
||||
}
|
||||
|
||||
@@ -522,6 +522,43 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
||||
needsRedeployment: state.needsRedeployment,
|
||||
}
|
||||
},
|
||||
replaceWorkflowState: (
|
||||
workflowState: WorkflowState,
|
||||
options?: { updateLastSaved?: boolean }
|
||||
) => {
|
||||
set((state) => {
|
||||
const nextBlocks = workflowState.blocks || {}
|
||||
const nextEdges = workflowState.edges || []
|
||||
const nextLoops =
|
||||
Object.keys(workflowState.loops || {}).length > 0
|
||||
? workflowState.loops
|
||||
: generateLoopBlocks(nextBlocks)
|
||||
const nextParallels =
|
||||
Object.keys(workflowState.parallels || {}).length > 0
|
||||
? workflowState.parallels
|
||||
: generateParallelBlocks(nextBlocks)
|
||||
|
||||
return {
|
||||
...state,
|
||||
blocks: nextBlocks,
|
||||
edges: nextEdges,
|
||||
loops: nextLoops,
|
||||
parallels: nextParallels,
|
||||
isDeployed:
|
||||
workflowState.isDeployed !== undefined ? workflowState.isDeployed : state.isDeployed,
|
||||
deployedAt: workflowState.deployedAt ?? state.deployedAt,
|
||||
deploymentStatuses: workflowState.deploymentStatuses || state.deploymentStatuses,
|
||||
needsRedeployment:
|
||||
workflowState.needsRedeployment !== undefined
|
||||
? workflowState.needsRedeployment
|
||||
: state.needsRedeployment,
|
||||
lastSaved:
|
||||
options?.updateLastSaved === true
|
||||
? Date.now()
|
||||
: (workflowState.lastSaved ?? state.lastSaved),
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
toggleBlockEnabled: (id: string) => {
|
||||
const newState = {
|
||||
|
||||
@@ -226,6 +226,10 @@ export interface WorkflowActions {
|
||||
setDragStartPosition: (position: DragStartPosition | null) => void
|
||||
getDragStartPosition: () => DragStartPosition | null
|
||||
getWorkflowState: () => WorkflowState
|
||||
replaceWorkflowState: (
|
||||
workflowState: WorkflowState,
|
||||
options?: { updateLastSaved?: boolean }
|
||||
) => void
|
||||
}
|
||||
|
||||
export type WorkflowStore = WorkflowState & WorkflowActions
|
||||
|
||||
2
packages/db/migrations/0111_solid_dreadnoughts.sql
Normal file
2
packages/db/migrations/0111_solid_dreadnoughts.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE "copilot_chats" ADD COLUMN "plan_artifact" text;--> statement-breakpoint
|
||||
ALTER TABLE "copilot_chats" ADD COLUMN "config" jsonb;
|
||||
7684
packages/db/migrations/meta/0111_snapshot.json
Normal file
7684
packages/db/migrations/meta/0111_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -771,6 +771,13 @@
|
||||
"when": 1763511770301,
|
||||
"tag": "0110_broken_paladin",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 111,
|
||||
"version": "7",
|
||||
"when": 1763667488537,
|
||||
"tag": "0111_solid_dreadnoughts",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1250,6 +1250,8 @@ export const copilotChats = pgTable(
|
||||
model: text('model').notNull().default('claude-3-7-sonnet-latest'),
|
||||
conversationId: text('conversation_id'),
|
||||
previewYaml: text('preview_yaml'), // YAML content for pending workflow preview
|
||||
planArtifact: text('plan_artifact'), // Plan/design document artifact for the chat
|
||||
config: jsonb('config'), // JSON config storing model and mode settings { model, mode }
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user