Merge pull request #646 from simstudioai/feat/ask-docs

feat(yaml workflow): yaml workflow representation + doc embeddings
This commit is contained in:
Vikhyath Mondreti
2025-07-09 13:27:10 -07:00
committed by GitHub
49 changed files with 18151 additions and 459 deletions

View File

@@ -0,0 +1,281 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import {
type CopilotChat,
type CopilotMessage,
createChat,
generateChatTitle,
generateDocsResponse,
getChat,
updateChat,
} from '@/lib/copilot/service'
import { createLogger } from '@/lib/logs/console-logger'
const logger = createLogger('CopilotDocsAPI')
// Schema for docs queries
const DocsQuerySchema = z.object({
query: z.string().min(1, 'Query is required'),
topK: z.number().min(1).max(20).default(5),
provider: z.string().optional(),
model: z.string().optional(),
stream: z.boolean().optional().default(false),
chatId: z.string().optional(),
workflowId: z.string().optional(),
createNewChat: z.boolean().optional().default(false),
})
/**
* POST /api/copilot/docs
* Ask questions about documentation using RAG
*/
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID()
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await req.json()
const { query, topK, provider, model, stream, chatId, workflowId, createNewChat } =
DocsQuerySchema.parse(body)
logger.info(`[${requestId}] Docs RAG query: "${query}"`, {
provider,
model,
topK,
chatId,
workflowId,
createNewChat,
userId: session.user.id,
})
// Handle chat context
let currentChat: CopilotChat | null = null
let conversationHistory: CopilotMessage[] = []
if (chatId) {
// Load existing chat
currentChat = await getChat(chatId, session.user.id)
if (currentChat) {
conversationHistory = currentChat.messages
}
} else if (createNewChat && workflowId) {
// Create new chat
currentChat = await createChat(session.user.id, workflowId)
}
// Generate docs response
const result = await generateDocsResponse(query, conversationHistory, {
topK,
provider,
model,
stream,
workflowId,
requestId,
})
if (stream && result.response instanceof ReadableStream) {
// Handle streaming response with docs sources
logger.info(`[${requestId}] Returning streaming docs response`)
const encoder = new TextEncoder()
return new Response(
new ReadableStream({
async start(controller) {
const reader = (result.response as ReadableStream).getReader()
let accumulatedResponse = ''
try {
// Send initial metadata including sources
const metadata = {
type: 'metadata',
chatId: currentChat?.id,
sources: result.sources,
citations: result.sources.map((source, index) => ({
id: index + 1,
title: source.title,
url: source.url,
})),
metadata: {
requestId,
chunksFound: result.sources.length,
query,
topSimilarity: result.sources[0]?.similarity,
provider,
model,
},
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(metadata)}\n\n`))
while (true) {
const { done, value } = await reader.read()
if (done) break
const chunk = new TextDecoder().decode(value)
// Clean up any object serialization artifacts in streaming content
const cleanedChunk = chunk.replace(/\[object Object\],?/g, '')
accumulatedResponse += cleanedChunk
const contentChunk = {
type: 'content',
content: cleanedChunk,
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(contentChunk)}\n\n`))
}
// Send completion marker first to unblock the user
controller.enqueue(encoder.encode(`data: {"type":"done"}\n\n`))
// Save conversation to database asynchronously (non-blocking)
if (currentChat) {
// Fire-and-forget database save to avoid blocking stream completion
Promise.resolve()
.then(async () => {
try {
const userMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'user',
content: query,
timestamp: new Date().toISOString(),
}
const assistantMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'assistant',
content: accumulatedResponse,
timestamp: new Date().toISOString(),
citations: result.sources.map((source, index) => ({
id: index + 1,
title: source.title,
url: source.url,
})),
}
const updatedMessages = [
...conversationHistory,
userMessage,
assistantMessage,
]
// Generate title if this is the first message
let updatedTitle = currentChat.title ?? undefined
if (!updatedTitle && conversationHistory.length === 0) {
updatedTitle = await generateChatTitle(query)
}
// Update the chat in database
await updateChat(currentChat.id, session.user.id, {
title: updatedTitle,
messages: updatedMessages,
})
logger.info(
`[${requestId}] Updated chat ${currentChat.id} with new docs messages`
)
} catch (dbError) {
logger.error(`[${requestId}] Failed to save chat to database:`, dbError)
// Database errors don't affect the user's streaming experience
}
})
.catch((error) => {
logger.error(`[${requestId}] Unexpected error in async database save:`, error)
})
}
} catch (error) {
logger.error(`[${requestId}] Docs streaming error:`, error)
try {
const errorChunk = {
type: 'error',
error: 'Streaming failed',
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(errorChunk)}\n\n`))
} catch (enqueueError) {
logger.error(`[${requestId}] Failed to enqueue error response:`, enqueueError)
}
} finally {
controller.close()
}
},
}),
{
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
},
}
)
}
// Handle non-streaming response
logger.info(`[${requestId}] Docs RAG response generated successfully`)
// Save conversation to database if we have a chat
if (currentChat) {
const userMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'user',
content: query,
timestamp: new Date().toISOString(),
}
const assistantMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'assistant',
content: typeof result.response === 'string' ? result.response : '[Streaming Response]',
timestamp: new Date().toISOString(),
citations: result.sources.map((source, index) => ({
id: index + 1,
title: source.title,
url: source.url,
})),
}
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
// Generate title if this is the first message
let updatedTitle = currentChat.title ?? undefined
if (!updatedTitle && conversationHistory.length === 0) {
updatedTitle = await generateChatTitle(query)
}
// Update the chat in database
await updateChat(currentChat.id, session.user.id, {
title: updatedTitle,
messages: updatedMessages,
})
logger.info(`[${requestId}] Updated chat ${currentChat.id} with new docs messages`)
}
return NextResponse.json({
success: true,
response: result.response,
sources: result.sources,
chatId: currentChat?.id,
metadata: {
requestId,
chunksFound: result.sources.length,
query,
topSimilarity: result.sources[0]?.similarity,
provider,
model,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Copilot docs error:`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -1,214 +1,425 @@
import { NextResponse } from 'next/server'
import { OpenAI } from 'openai'
import type { ChatCompletionMessageParam } from 'openai/resources/chat/completions'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import {
createChat,
deleteChat,
generateChatTitle,
getChat,
listChats,
sendMessage,
updateChat,
} from '@/lib/copilot/service'
import { createLogger } from '@/lib/logs/console-logger'
const logger = createLogger('CopilotAPI')
const MessageSchema = z.object({
role: z.enum(['user', 'assistant', 'system']),
content: z.string(),
// Interface for StreamingExecution response
interface StreamingExecution {
stream: ReadableStream
execution: Promise<any>
}
// Schema for sending messages
const SendMessageSchema = z.object({
message: z.string().min(1, 'Message is required'),
chatId: z.string().optional(),
workflowId: z.string().optional(),
createNewChat: z.boolean().optional().default(false),
stream: z.boolean().optional().default(false),
})
const RequestSchema = z.object({
messages: z.array(MessageSchema),
workflowState: z.object({
blocks: z.record(z.any()),
edges: z.array(z.any()),
}),
// Schema for docs queries
const DocsQuerySchema = z.object({
query: z.string().min(1, 'Query is required'),
topK: z.number().min(1).max(20).default(5),
provider: z.string().optional(),
model: z.string().optional(),
stream: z.boolean().optional().default(false),
chatId: z.string().optional(),
workflowId: z.string().optional(),
createNewChat: z.boolean().optional().default(false),
})
const workflowActions = {
addBlock: {
description: 'Add one new block to the workflow',
parameters: {
type: 'object',
required: ['type'],
properties: {
type: {
type: 'string',
enum: ['agent', 'api', 'condition', 'function', 'router'],
description: 'The type of block to add',
},
name: {
type: 'string',
description:
'Optional custom name for the block. Do not provide a name unless the user has specified it.',
},
position: {
type: 'object',
description:
'Optional position for the block. Do not provide a position unless the user has specified it.',
properties: {
x: { type: 'number' },
y: { type: 'number' },
},
},
},
},
},
addEdge: {
description: 'Create a connection (edge) between two blocks',
parameters: {
type: 'object',
required: ['sourceId', 'targetId'],
properties: {
sourceId: {
type: 'string',
description: 'ID of the source block',
},
targetId: {
type: 'string',
description: 'ID of the target block',
},
sourceHandle: {
type: 'string',
description: 'Optional handle identifier for the source connection point',
},
targetHandle: {
type: 'string',
description: 'Optional handle identifier for the target connection point',
},
},
},
},
removeBlock: {
description: 'Remove a block from the workflow',
parameters: {
type: 'object',
required: ['id'],
properties: {
id: { type: 'string', description: 'ID of the block to remove' },
},
},
},
removeEdge: {
description: 'Remove a connection (edge) between blocks',
parameters: {
type: 'object',
required: ['id'],
properties: {
id: { type: 'string', description: 'ID of the edge to remove' },
},
},
},
}
// Schema for creating chats
const CreateChatSchema = z.object({
workflowId: z.string().min(1, 'Workflow ID is required'),
title: z.string().optional(),
initialMessage: z.string().optional(),
})
// System prompt that references workflow state
const getSystemPrompt = (workflowState: any) => {
const blockCount = Object.keys(workflowState.blocks).length
const edgeCount = workflowState.edges.length
// Schema for updating chats
const UpdateChatSchema = z.object({
chatId: z.string().min(1, 'Chat ID is required'),
messages: z
.array(
z.object({
id: z.string(),
role: z.enum(['user', 'assistant', 'system']),
content: z.string(),
timestamp: z.string(),
citations: z
.array(
z.object({
id: z.number(),
title: z.string(),
url: z.string(),
similarity: z.number().optional(),
})
)
.optional(),
})
)
.optional(),
title: z.string().optional(),
})
// Create a summary of existing blocks
const blockSummary = Object.values(workflowState.blocks)
.map((block: any) => `- ${block.type} block named "${block.name}" with id ${block.id}`)
.join('\n')
// Schema for listing chats
const ListChatsSchema = z.object({
workflowId: z.string().min(1, 'Workflow ID is required'),
limit: z.number().min(1).max(100).optional().default(50),
offset: z.number().min(0).optional().default(0),
})
// Create a summary of existing edges
const edgeSummary = workflowState.edges
.map((edge: any) => `- ${edge.source} -> ${edge.target} with id ${edge.id}`)
.join('\n')
return `You are a workflow assistant that helps users modify their workflow by adding/removing blocks and connections.
Current Workflow State:
${
blockCount === 0
? 'The workflow is empty.'
: `${blockSummary}
Connections:
${edgeCount === 0 ? 'No connections between blocks.' : edgeSummary}`
}
When users request changes:
- Consider existing blocks when suggesting connections
- Provide clear feedback about what actions you've taken
Use the following functions to modify the workflow:
1. Use the addBlock function to create a new block
2. Use the addEdge function to connect one block to another
3. Use the removeBlock function to remove a block
4. Use the removeEdge function to remove a connection
Only use the provided functions and respond naturally to the user's requests.`
}
export async function POST(request: Request) {
const requestId = crypto.randomUUID().slice(0, 8)
/**
* POST /api/copilot
* Send a message to the copilot
*/
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID()
try {
// Validate API key
const apiKey = request.headers.get('X-OpenAI-Key')
if (!apiKey) {
return NextResponse.json({ error: 'OpenAI API key is required' }, { status: 401 })
const body = await req.json()
const { message, chatId, workflowId, createNewChat, stream } = SendMessageSchema.parse(body)
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Parse and validate request body
const body = await request.json()
const validatedData = RequestSchema.parse(body)
const { messages, workflowState } = validatedData
// Initialize OpenAI client
const openai = new OpenAI({ apiKey })
// Create message history with workflow context
const messageHistory = [
{ role: 'system', content: getSystemPrompt(workflowState) },
...messages,
]
// Make OpenAI API call with workflow context
const completion = await openai.chat.completions.create({
model: 'gpt-4o',
messages: messageHistory as ChatCompletionMessageParam[],
tools: Object.entries(workflowActions).map(([name, config]) => ({
type: 'function',
function: {
name,
description: config.description,
parameters: config.parameters,
},
})),
tool_choice: 'auto',
logger.info(`[${requestId}] Copilot message: "${message}"`, {
chatId,
workflowId,
createNewChat,
stream,
userId: session.user.id,
})
const message = completion.choices[0].message
// Send message using the service
const result = await sendMessage({
message,
chatId,
workflowId,
createNewChat,
stream,
userId: session.user.id,
})
// Process tool calls if present
if (message.tool_calls) {
logger.debug(`[${requestId}] Tool calls:`, {
toolCalls: message.tool_calls,
})
const actions = message.tool_calls.map((call) => ({
name: call.function.name,
parameters: JSON.parse(call.function.arguments),
}))
// Handle streaming response (ReadableStream or StreamingExecution)
let streamToRead: ReadableStream | null = null
return NextResponse.json({
message: message.content || "I've updated the workflow based on your request.",
actions,
})
// Debug logging to see what we actually got
logger.info(`[${requestId}] Response type analysis:`, {
responseType: typeof result.response,
isReadableStream: result.response instanceof ReadableStream,
hasStreamProperty:
typeof result.response === 'object' && result.response && 'stream' in result.response,
hasExecutionProperty:
typeof result.response === 'object' && result.response && 'execution' in result.response,
responseKeys:
typeof result.response === 'object' && result.response ? Object.keys(result.response) : [],
})
if (result.response instanceof ReadableStream) {
logger.info(`[${requestId}] Direct ReadableStream detected`)
streamToRead = result.response
} else if (
typeof result.response === 'object' &&
result.response &&
'stream' in result.response &&
'execution' in result.response
) {
// Handle StreamingExecution (from providers with tool calls)
logger.info(`[${requestId}] StreamingExecution detected`)
const streamingExecution = result.response as StreamingExecution
streamToRead = streamingExecution.stream
// No need to extract citations - LLM generates direct markdown links
}
// Return response with no actions
if (streamToRead) {
logger.info(`[${requestId}] Returning streaming response`)
const encoder = new TextEncoder()
return new Response(
new ReadableStream({
async start(controller) {
const reader = streamToRead!.getReader()
let accumulatedResponse = ''
// Send initial metadata
const metadata = {
type: 'metadata',
chatId: result.chatId,
metadata: {
requestId,
message,
},
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(metadata)}\n\n`))
try {
while (true) {
const { done, value } = await reader.read()
if (done) break
const chunkText = new TextDecoder().decode(value)
accumulatedResponse += chunkText
const contentChunk = {
type: 'content',
content: chunkText,
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(contentChunk)}\n\n`))
}
// Send completion signal
const completion = {
type: 'complete',
finalContent: accumulatedResponse,
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(completion)}\n\n`))
controller.close()
} catch (error) {
logger.error(`[${requestId}] Streaming error:`, error)
const errorChunk = {
type: 'error',
error: 'Streaming failed',
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(errorChunk)}\n\n`))
controller.close()
}
},
}),
{
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
},
}
)
}
// Handle non-streaming response
logger.info(`[${requestId}] Chat response generated successfully`)
return NextResponse.json({
message:
message.content ||
"I'm not sure what changes to make to the workflow. Can you please provide more specific instructions?",
success: true,
response: result.response,
chatId: result.chatId,
metadata: {
requestId,
message,
},
})
} catch (error) {
logger.error(`[${requestId}] Copilot API error:`, { error })
// Handle specific error types
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Invalid request format', details: error.errors },
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
return NextResponse.json({ error: 'Failed to process copilot message' }, { status: 500 })
logger.error(`[${requestId}] Copilot error:`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* GET /api/copilot
* List chats or get a specific chat
*/
export async function GET(req: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(req.url)
const chatId = searchParams.get('chatId')
// If chatId is provided, get specific chat
if (chatId) {
const chat = await getChat(chatId, session.user.id)
if (!chat) {
return NextResponse.json({ error: 'Chat not found' }, { status: 404 })
}
return NextResponse.json({
success: true,
chat,
})
}
// Otherwise, list chats
const workflowId = searchParams.get('workflowId')
const limit = Number.parseInt(searchParams.get('limit') || '50')
const offset = Number.parseInt(searchParams.get('offset') || '0')
if (!workflowId) {
return NextResponse.json(
{ error: 'workflowId is required for listing chats' },
{ status: 400 }
)
}
const chats = await listChats(session.user.id, workflowId, { limit, offset })
return NextResponse.json({
success: true,
chats,
})
} catch (error) {
logger.error('Failed to handle GET request:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* PUT /api/copilot
* Create a new chat
*/
export async function PUT(req: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await req.json()
const { workflowId, title, initialMessage } = CreateChatSchema.parse(body)
logger.info(`Creating new chat for user ${session.user.id}, workflow ${workflowId}`)
const chat = await createChat(session.user.id, workflowId, {
title,
initialMessage,
})
logger.info(`Created chat ${chat.id} for user ${session.user.id}`)
return NextResponse.json({
success: true,
chat,
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
logger.error('Failed to create chat:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* PATCH /api/copilot
* Update a chat with new messages
*/
export async function PATCH(req: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await req.json()
const { chatId, messages, title } = UpdateChatSchema.parse(body)
logger.info(`Updating chat ${chatId} for user ${session.user.id}`)
// Get the current chat to check if it has a title
const existingChat = await getChat(chatId, session.user.id)
let titleToUse = title
// Generate title if chat doesn't have one and we have messages
if (!titleToUse && existingChat && !existingChat.title && messages && messages.length > 0) {
const firstUserMessage = messages.find((msg) => msg.role === 'user')
if (firstUserMessage) {
logger.info('Generating LLM-based title for chat without title')
try {
titleToUse = await generateChatTitle(firstUserMessage.content)
logger.info(`Generated title: ${titleToUse}`)
} catch (error) {
logger.error('Failed to generate chat title:', error)
titleToUse = 'New Chat'
}
}
}
const chat = await updateChat(chatId, session.user.id, {
messages,
title: titleToUse,
})
if (!chat) {
return NextResponse.json({ error: 'Chat not found or access denied' }, { status: 404 })
}
return NextResponse.json({
success: true,
chat,
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
logger.error('Failed to update chat:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* DELETE /api/copilot
* Delete a chat
*/
export async function DELETE(req: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(req.url)
const chatId = searchParams.get('chatId')
if (!chatId) {
return NextResponse.json({ error: 'chatId is required' }, { status: 400 })
}
const success = await deleteChat(chatId, session.user.id)
if (!success) {
return NextResponse.json({ error: 'Chat not found or access denied' }, { status: 404 })
}
return NextResponse.json({
success: true,
message: 'Chat deleted successfully',
})
} catch (error) {
logger.error('Failed to delete chat:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,76 @@
import { type NextRequest, NextResponse } from 'next/server'
import { searchDocumentation } from '@/lib/copilot/service'
import { createLogger } from '@/lib/logs/console-logger'
const logger = createLogger('DocsSearchAPI')
// Request and response type definitions
interface DocsSearchRequest {
query: string
topK?: number
}
interface DocsSearchResult {
id: number
title: string
url: string
content: string
similarity: number
}
interface DocsSearchSuccessResponse {
success: true
results: DocsSearchResult[]
query: string
totalResults: number
searchTime?: number
}
interface DocsSearchErrorResponse {
success: false
error: string
}
export async function POST(
request: NextRequest
): Promise<NextResponse<DocsSearchSuccessResponse | DocsSearchErrorResponse>> {
try {
const requestBody: DocsSearchRequest = await request.json()
const { query, topK = 5 } = requestBody
if (!query) {
const errorResponse: DocsSearchErrorResponse = {
success: false,
error: 'Query is required',
}
return NextResponse.json(errorResponse, { status: 400 })
}
logger.info('Executing documentation search', { query, topK })
const startTime = Date.now()
const results = await searchDocumentation(query, { topK })
const searchTime = Date.now() - startTime
logger.info(`Found ${results.length} documentation results`, { query })
const successResponse: DocsSearchSuccessResponse = {
success: true,
results,
query,
totalResults: results.length,
searchTime,
}
return NextResponse.json(successResponse)
} catch (error) {
logger.error('Documentation search API failed', error)
const errorResponse: DocsSearchErrorResponse = {
success: false,
error: `Documentation search failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
}
return NextResponse.json(errorResponse, { status: 500 })
}
}

View File

@@ -0,0 +1,213 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console-logger'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
import { generateWorkflowYaml } from '@/lib/workflows/yaml-generator'
import { getBlock } from '@/blocks'
import { db } from '@/db'
import { workflow as workflowTable } from '@/db/schema'
const logger = createLogger('GetUserWorkflowAPI')
export async function POST(request: NextRequest) {
try {
const body = await request.json()
const { workflowId, includeMetadata = false } = body
if (!workflowId) {
return NextResponse.json(
{ success: false, error: 'Workflow ID is required' },
{ status: 400 }
)
}
logger.info('Fetching workflow for YAML generation', { workflowId })
// Fetch workflow from database
const [workflowRecord] = await db
.select()
.from(workflowTable)
.where(eq(workflowTable.id, workflowId))
.limit(1)
if (!workflowRecord) {
return NextResponse.json(
{ success: false, error: `Workflow ${workflowId} not found` },
{ status: 404 }
)
}
// Try to load from normalized tables first, fallback to JSON blob
let workflowState: any = null
const subBlockValues: Record<string, Record<string, any>> = {}
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
if (normalizedData) {
workflowState = {
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
}
// Extract subblock values from normalized data
Object.entries(normalizedData.blocks).forEach(([blockId, block]) => {
subBlockValues[blockId] = {}
Object.entries((block as any).subBlocks || {}).forEach(([subBlockId, subBlock]) => {
if ((subBlock as any).value !== undefined) {
subBlockValues[blockId][subBlockId] = (subBlock as any).value
}
})
})
} else if (workflowRecord.state) {
// Fallback to JSON blob
workflowState = workflowRecord.state as any
// For JSON blob, subblock values are embedded in the block state
Object.entries((workflowState.blocks as any) || {}).forEach(([blockId, block]) => {
subBlockValues[blockId] = {}
Object.entries((block as any).subBlocks || {}).forEach(([subBlockId, subBlock]) => {
if ((subBlock as any).value !== undefined) {
subBlockValues[blockId][subBlockId] = (subBlock as any).value
}
})
})
}
if (!workflowState || !workflowState.blocks) {
return NextResponse.json(
{ success: false, error: 'Workflow state is empty or invalid' },
{ status: 400 }
)
}
// Generate YAML using server-side function
const yaml = generateWorkflowYaml(workflowState, subBlockValues)
if (!yaml || yaml.trim() === '') {
return NextResponse.json(
{ success: false, error: 'Generated YAML is empty' },
{ status: 400 }
)
}
// Generate detailed block information with schemas
const blockSchemas: Record<string, any> = {}
Object.entries(workflowState.blocks).forEach(([blockId, blockState]) => {
const block = blockState as any
const blockConfig = getBlock(block.type)
if (blockConfig) {
blockSchemas[blockId] = {
type: block.type,
name: block.name,
description: blockConfig.description,
longDescription: blockConfig.longDescription,
category: blockConfig.category,
docsLink: blockConfig.docsLink,
inputs: {},
inputRequirements: blockConfig.inputs || {},
outputs: blockConfig.outputs || {},
tools: blockConfig.tools,
}
// Add input schema from subBlocks configuration
if (blockConfig.subBlocks) {
blockConfig.subBlocks.forEach((subBlock) => {
blockSchemas[blockId].inputs[subBlock.id] = {
type: subBlock.type,
title: subBlock.title,
description: subBlock.description || '',
layout: subBlock.layout,
...(subBlock.options && { options: subBlock.options }),
...(subBlock.placeholder && { placeholder: subBlock.placeholder }),
...(subBlock.min !== undefined && { min: subBlock.min }),
...(subBlock.max !== undefined && { max: subBlock.max }),
...(subBlock.columns && { columns: subBlock.columns }),
...(subBlock.hidden !== undefined && { hidden: subBlock.hidden }),
...(subBlock.condition && { condition: subBlock.condition }),
}
})
}
} else {
// Handle special block types like loops and parallels
blockSchemas[blockId] = {
type: block.type,
name: block.name,
description: `${block.type.charAt(0).toUpperCase() + block.type.slice(1)} container block`,
category: 'Control Flow',
inputs: {},
outputs: {},
}
}
})
// Generate workflow summary
const blockTypes = Object.values(workflowState.blocks).reduce(
(acc: Record<string, number>, block: any) => {
acc[block.type] = (acc[block.type] || 0) + 1
return acc
},
{}
)
const categories = Object.values(blockSchemas).reduce(
(acc: Record<string, number>, schema: any) => {
if (schema.category) {
acc[schema.category] = (acc[schema.category] || 0) + 1
}
return acc
},
{}
)
// Prepare response with clear context markers
const response: any = {
workflowContext: 'USER_SPECIFIC_WORKFLOW', // Clear marker for the LLM
note: 'This data represents only the blocks and configurations that the user has actually built in their current workflow, not all available Sim Studio capabilities.',
yaml,
format: 'yaml',
summary: {
workflowName: workflowRecord.name,
blockCount: Object.keys(workflowState.blocks).length,
edgeCount: (workflowState.edges || []).length,
blockTypes,
categories,
hasLoops: Object.keys(workflowState.loops || {}).length > 0,
hasParallels: Object.keys(workflowState.parallels || {}).length > 0,
},
userBuiltBlocks: blockSchemas, // Renamed to be clearer
}
// Add metadata if requested
if (includeMetadata) {
response.metadata = {
workflowId: workflowRecord.id,
name: workflowRecord.name,
description: workflowRecord.description,
workspaceId: workflowRecord.workspaceId,
createdAt: workflowRecord.createdAt,
updatedAt: workflowRecord.updatedAt,
}
}
logger.info('Successfully generated workflow YAML', {
workflowId,
blockCount: response.blockCount,
yamlLength: yaml.length,
})
return NextResponse.json({
success: true,
output: response,
})
} catch (error) {
logger.error('Failed to get workflow YAML:', error)
return NextResponse.json(
{
success: false,
error: `Failed to get workflow YAML: ${error instanceof Error ? error.message : 'Unknown error'}`,
},
{ status: 500 }
)
}
}

View File

@@ -110,6 +110,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
parallelsCount: Object.keys(normalizedData.parallels).length,
loops: normalizedData.loops,
})
// Use normalized table data - reconstruct complete state object
// First get any existing state properties, then override with normalized data
const existingState =

View File

@@ -0,0 +1,236 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console-logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/db-helpers'
import { db } from '@/db'
import { workflow } from '@/db/schema'
const logger = createLogger('WorkflowStateAPI')
// Zod schemas for workflow state validation
const PositionSchema = z.object({
x: z.number(),
y: z.number(),
})
const BlockDataSchema = z.object({
parentId: z.string().optional(),
extent: z.literal('parent').optional(),
width: z.number().optional(),
height: z.number().optional(),
collection: z.unknown().optional(),
count: z.number().optional(),
loopType: z.enum(['for', 'forEach']).optional(),
parallelType: z.enum(['collection', 'count']).optional(),
type: z.string().optional(),
})
const SubBlockStateSchema = z.object({
id: z.string(),
type: z.string(),
value: z.union([z.string(), z.number(), z.array(z.array(z.string())), z.null()]),
})
const BlockOutputSchema = z.any()
const BlockStateSchema = z.object({
id: z.string(),
type: z.string(),
name: z.string(),
position: PositionSchema,
subBlocks: z.record(SubBlockStateSchema),
outputs: z.record(BlockOutputSchema),
enabled: z.boolean(),
horizontalHandles: z.boolean().optional(),
isWide: z.boolean().optional(),
height: z.number().optional(),
advancedMode: z.boolean().optional(),
data: BlockDataSchema.optional(),
})
const EdgeSchema = z.object({
id: z.string(),
source: z.string(),
target: z.string(),
sourceHandle: z.string().optional(),
targetHandle: z.string().optional(),
type: z.string().optional(),
animated: z.boolean().optional(),
style: z.record(z.any()).optional(),
data: z.record(z.any()).optional(),
label: z.string().optional(),
labelStyle: z.record(z.any()).optional(),
labelShowBg: z.boolean().optional(),
labelBgStyle: z.record(z.any()).optional(),
labelBgPadding: z.array(z.number()).optional(),
labelBgBorderRadius: z.number().optional(),
markerStart: z.string().optional(),
markerEnd: z.string().optional(),
})
const LoopSchema = z.object({
id: z.string(),
nodes: z.array(z.string()),
iterations: z.number(),
loopType: z.enum(['for', 'forEach']),
forEachItems: z.union([z.array(z.any()), z.record(z.any()), z.string()]).optional(),
})
const ParallelSchema = z.object({
id: z.string(),
nodes: z.array(z.string()),
distribution: z.union([z.array(z.any()), z.record(z.any()), z.string()]).optional(),
count: z.number().optional(),
parallelType: z.enum(['count', 'collection']).optional(),
})
const DeploymentStatusSchema = z.object({
id: z.string(),
status: z.enum(['deploying', 'deployed', 'failed', 'stopping', 'stopped']),
deployedAt: z.date().optional(),
error: z.string().optional(),
})
const WorkflowStateSchema = z.object({
blocks: z.record(BlockStateSchema),
edges: z.array(EdgeSchema),
loops: z.record(LoopSchema).optional(),
parallels: z.record(ParallelSchema).optional(),
lastSaved: z.number().optional(),
isDeployed: z.boolean().optional(),
deployedAt: z.date().optional(),
deploymentStatuses: z.record(DeploymentStatusSchema).optional(),
hasActiveSchedule: z.boolean().optional(),
hasActiveWebhook: z.boolean().optional(),
})
/**
* PUT /api/workflows/[id]/state
* Save complete workflow state to normalized database tables
*/
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const startTime = Date.now()
const { id: workflowId } = await params
try {
// Get the session
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized state update attempt for workflow ${workflowId}`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
// Parse and validate request body
const body = await request.json()
const state = WorkflowStateSchema.parse(body)
// Fetch the workflow to check ownership/access
const workflowData = await db
.select()
.from(workflow)
.where(eq(workflow.id, workflowId))
.then((rows) => rows[0])
if (!workflowData) {
logger.warn(`[${requestId}] Workflow ${workflowId} not found for state update`)
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
}
// Check if user has permission to update this workflow
let canUpdate = false
// Case 1: User owns the workflow
if (workflowData.userId === userId) {
canUpdate = true
}
// Case 2: Workflow belongs to a workspace and user has write or admin permission
if (!canUpdate && workflowData.workspaceId) {
const userPermission = await getUserEntityPermissions(
userId,
'workspace',
workflowData.workspaceId
)
if (userPermission === 'write' || userPermission === 'admin') {
canUpdate = true
}
}
if (!canUpdate) {
logger.warn(
`[${requestId}] User ${userId} denied permission to update workflow state ${workflowId}`
)
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// Save to normalized tables
// Ensure all required fields are present for WorkflowState type
const workflowState = {
blocks: state.blocks,
edges: state.edges,
loops: state.loops || {},
parallels: state.parallels || {},
lastSaved: state.lastSaved || Date.now(),
isDeployed: state.isDeployed || false,
deployedAt: state.deployedAt,
deploymentStatuses: state.deploymentStatuses || {},
hasActiveSchedule: state.hasActiveSchedule || false,
hasActiveWebhook: state.hasActiveWebhook || false,
}
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowState as any)
if (!saveResult.success) {
logger.error(`[${requestId}] Failed to save workflow ${workflowId} state:`, saveResult.error)
return NextResponse.json(
{ error: 'Failed to save workflow state', details: saveResult.error },
{ status: 500 }
)
}
// Update workflow's lastSynced timestamp
await db
.update(workflow)
.set({
lastSynced: new Date(),
updatedAt: new Date(),
state: saveResult.jsonBlob, // Also update JSON blob for backward compatibility
})
.where(eq(workflow.id, workflowId))
const elapsed = Date.now() - startTime
logger.info(`[${requestId}] Successfully saved workflow ${workflowId} state in ${elapsed}ms`)
return NextResponse.json(
{
success: true,
blocksCount: Object.keys(state.blocks).length,
edgesCount: state.edges.length,
},
{ status: 200 }
)
} catch (error: unknown) {
const elapsed = Date.now() - startTime
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid workflow state data for ${workflowId}`, {
errors: error.errors,
})
return NextResponse.json(
{ error: 'Invalid state data', details: error.errors },
{ status: 400 }
)
}
logger.error(
`[${requestId}] Error saving workflow ${workflowId} state after ${elapsed}ms`,
error
)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,44 @@
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console-logger'
import { generateWorkflowYaml } from '@/lib/workflows/yaml-generator'
const logger = createLogger('WorkflowYamlAPI')
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
logger.info(`[${requestId}] Converting workflow JSON to YAML`)
const body = await request.json()
const { workflowState, subBlockValues, includeMetadata = false } = body
if (!workflowState) {
return NextResponse.json(
{ success: false, error: 'workflowState is required' },
{ status: 400 }
)
}
// Generate YAML using the shared utility
const yamlContent = generateWorkflowYaml(workflowState, subBlockValues)
logger.info(`[${requestId}] Successfully generated YAML`, {
yamlLength: yamlContent.length,
})
return NextResponse.json({
success: true,
yaml: yamlContent,
})
} catch (error) {
logger.error(`[${requestId}] YAML generation failed`, error)
return NextResponse.json(
{
success: false,
error: `Failed to generate YAML: ${error instanceof Error ? error.message : 'Unknown error'}`,
},
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,106 @@
'use client'
import { useState } from 'react'
import { Download, FileText } from 'lucide-react'
import { Button } from '@/components/ui/button'
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from '@/components/ui/dropdown-menu'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { createLogger } from '@/lib/logs/console-logger'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useWorkflowYamlStore } from '@/stores/workflows/yaml/store'
const logger = createLogger('ExportControls')
interface ExportControlsProps {
disabled?: boolean
}
export function ExportControls({ disabled = false }: ExportControlsProps) {
const [isExporting, setIsExporting] = useState(false)
const { workflows, activeWorkflowId } = useWorkflowRegistry()
const getYaml = useWorkflowYamlStore((state) => state.getYaml)
const currentWorkflow = activeWorkflowId ? workflows[activeWorkflowId] : null
const downloadFile = (content: string, filename: string, mimeType: string) => {
try {
const blob = new Blob([content], { type: mimeType })
const url = URL.createObjectURL(blob)
const a = document.createElement('a')
a.href = url
a.download = filename
document.body.appendChild(a)
a.click()
document.body.removeChild(a)
URL.revokeObjectURL(url)
} catch (error) {
logger.error('Failed to download file:', error)
}
}
const handleExportYaml = () => {
if (!currentWorkflow || !activeWorkflowId) {
logger.warn('No active workflow to export')
return
}
setIsExporting(true)
try {
const yamlContent = getYaml()
const filename = `${currentWorkflow.name.replace(/[^a-z0-9]/gi, '_')}_workflow.yaml`
downloadFile(yamlContent, filename, 'text/yaml')
logger.info('Workflow exported as YAML')
} catch (error) {
logger.error('Failed to export workflow as YAML:', error)
} finally {
setIsExporting(false)
}
}
return (
<DropdownMenu>
<Tooltip>
<TooltipTrigger asChild>
<DropdownMenuTrigger asChild>
<Button
variant='ghost'
size='icon'
disabled={disabled || isExporting || !currentWorkflow}
className='hover:text-foreground'
>
<Download className='h-5 w-5' />
<span className='sr-only'>Export Workflow</span>
</Button>
</DropdownMenuTrigger>
</TooltipTrigger>
<TooltipContent>
{disabled
? 'Export not available'
: !currentWorkflow
? 'No workflow to export'
: 'Export Workflow'}
</TooltipContent>
</Tooltip>
<DropdownMenuContent align='end' className='w-48'>
<DropdownMenuItem
onClick={handleExportYaml}
disabled={isExporting || !currentWorkflow}
className='flex cursor-pointer items-center gap-2'
>
<FileText className='h-4 w-4' />
<div className='flex flex-col'>
<span>Export as YAML</span>
<span className='text-muted-foreground text-xs'>workflow language</span>
</div>
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
)
}

View File

@@ -56,6 +56,7 @@ import {
} from '../../../hooks/use-keyboard-shortcuts'
import { useWorkflowExecution } from '../../hooks/use-workflow-execution'
import { DeploymentControls } from './components/deployment-controls/deployment-controls'
import { ExportControls } from './components/export-controls/export-controls'
import { HistoryDropdownItem } from './components/history-dropdown-item/history-dropdown-item'
import { MarketplaceModal } from './components/marketplace-modal/marketplace-modal'
import { NotificationDropdownItem } from './components/notification-dropdown-item/notification-dropdown-item'
@@ -1297,6 +1298,9 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
{renderDuplicateButton()}
{renderAutoLayoutButton()}
{renderDebugModeToggle()}
<ExportControls disabled={!userPermissions.canRead} />
{/* <WorkflowTextEditorModal disabled={!userPermissions.canEdit} /> */}
{/* {renderPublishButton()} */}
{renderDeployButton()}
{renderRunButton()}

View File

@@ -1,78 +0,0 @@
'use client'
import { useState } from 'react'
import { MessageCircle, Send, X } from 'lucide-react'
import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { useCopilotStore } from '@/stores/copilot/store'
export function Copilot() {
const { sendMessage } = useCopilotStore()
const [isOpen, setIsOpen] = useState(false)
const [message, setMessage] = useState('')
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault()
if (!message.trim()) return
await sendMessage(message)
setMessage('')
}
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === 'Enter') {
e.preventDefault()
handleSubmit(e as unknown as React.FormEvent)
}
}
if (!isOpen) {
return (
<Tooltip>
<TooltipTrigger asChild>
<button
onClick={() => setIsOpen(true)}
className='fixed right-16 bottom-[18px] z-10 flex h-9 w-9 items-center justify-center rounded-lg border bg-background text-muted-foreground transition-colors hover:bg-accent hover:text-foreground'
>
<MessageCircle className='h-5 w-5' />
<span className='sr-only'>Open Chat</span>
</button>
</TooltipTrigger>
<TooltipContent side='left'>Open Chat</TooltipContent>
</Tooltip>
)
}
return (
<div className='-translate-x-1/2 fixed bottom-16 left-1/2 z-50 w-[50%] min-w-[280px] max-w-[500px] rounded-2xl border bg-background shadow-lg'>
<form onSubmit={handleSubmit} className='flex items-center gap-2 p-2'>
<Button
variant='ghost'
size='icon'
onClick={() => setIsOpen(false)}
className='h-8 w-8 rounded-full text-muted-foreground hover:bg-accent/50 hover:text-foreground'
>
<X className='h-4 w-4' />
<span className='sr-only'>Close Chat</span>
</Button>
<Input
value={message}
onChange={(e) => setMessage(e.target.value)}
onKeyDown={handleKeyDown}
placeholder='Type your message...'
className='flex-1 rounded-xl border-0 text-foreground text-sm placeholder:text-muted-foreground/50 focus-visible:ring-0 focus-visible:ring-offset-0'
/>
<Button
type='submit'
variant='ghost'
size='icon'
className='h-8 w-8 rounded-full text-muted-foreground hover:bg-accent/50 hover:text-foreground'
>
<Send className='h-4 w-4' />
<span className='sr-only'>Send message</span>
</Button>
</form>
</div>
)
}

View File

@@ -0,0 +1,373 @@
'use client'
import { type KeyboardEvent, useEffect, useRef } from 'react'
import {
ArrowUp,
Bot,
ChevronDown,
MessageSquarePlus,
MoreHorizontal,
Trash2,
X,
} from 'lucide-react'
import { Button } from '@/components/ui/button'
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from '@/components/ui/dropdown-menu'
import { Input } from '@/components/ui/input'
import type { CopilotChat } from '@/lib/copilot-api'
import { createLogger } from '@/lib/logs/console-logger'
const logger = createLogger('CopilotModal')
interface Message {
id: string
content: string
type: 'user' | 'assistant'
timestamp: Date
citations?: Array<{
id: number
title: string
url: string
}>
}
interface CopilotModalMessage {
message: Message
}
// Modal-specific message component
function ModalCopilotMessage({ message }: CopilotModalMessage) {
const renderMarkdown = (text: string) => {
let processedText = text
// Process markdown links: [text](url)
processedText = processedText.replace(
/\[([^\]]+)\]\(([^)]+)\)/g,
'<a href="$2" target="_blank" rel="noopener noreferrer" class="text-blue-600 hover:text-blue-800 font-semibold underline transition-colors">$1</a>'
)
// Handle code blocks
processedText = processedText.replace(
/```(\w+)?\n([\s\S]*?)\n```/g,
'<pre class="bg-muted rounded-md p-3 my-2 overflow-x-auto"><code class="text-sm">$2</code></pre>'
)
// Handle inline code
processedText = processedText.replace(
/`([^`]+)`/g,
'<code class="bg-muted px-1 rounded text-sm">$1</code>'
)
// Handle headers
processedText = processedText.replace(
/^### (.*$)/gm,
'<h3 class="text-lg font-semibold mt-4 mb-2">$1</h3>'
)
processedText = processedText.replace(
/^## (.*$)/gm,
'<h2 class="text-xl font-semibold mt-4 mb-2">$1</h2>'
)
processedText = processedText.replace(
/^# (.*$)/gm,
'<h1 class="text-2xl font-bold mt-4 mb-2">$1</h1>'
)
// Handle bold
processedText = processedText.replace(/\*\*(.*?)\*\*/g, '<strong>$1</strong>')
// Handle lists
processedText = processedText.replace(/^- (.*$)/gm, '<li class="ml-4">• $1</li>')
// Handle line breaks (reduce spacing)
processedText = processedText.replace(/\n\n+/g, '</p><p class="mt-2">')
processedText = processedText.replace(/\n/g, '<br>')
return processedText
}
// For user messages (on the right)
if (message.type === 'user') {
return (
<div className='px-4 py-5'>
<div className='mx-auto max-w-3xl'>
<div className='flex justify-end'>
<div className='max-w-[80%] rounded-3xl bg-[#F4F4F4] px-4 py-3 shadow-sm dark:bg-primary/10'>
<div className='whitespace-pre-wrap break-words text-[#0D0D0D] text-base leading-relaxed dark:text-white'>
{message.content}
</div>
</div>
</div>
</div>
</div>
)
}
// For assistant messages (on the left)
return (
<div className='px-4 py-5'>
<div className='mx-auto max-w-3xl'>
<div className='flex'>
<div className='max-w-[80%]'>
<div
className='prose prose-sm dark:prose-invert max-w-none whitespace-pre-wrap break-words text-base leading-normal'
dangerouslySetInnerHTML={{ __html: renderMarkdown(message.content) }}
/>
</div>
</div>
</div>
</div>
)
}
interface CopilotModalProps {
open: boolean
onOpenChange: (open: boolean) => void
copilotMessage: string
setCopilotMessage: (message: string) => void
messages: Message[]
onSendMessage: (message: string) => Promise<void>
isLoading: boolean
// Chat management props
chats: CopilotChat[]
currentChat: CopilotChat | null
onSelectChat: (chat: CopilotChat) => void
onStartNewChat: () => void
onDeleteChat: (chatId: string) => void
}
export function CopilotModal({
open,
onOpenChange,
copilotMessage,
setCopilotMessage,
messages,
onSendMessage,
isLoading,
chats,
currentChat,
onSelectChat,
onStartNewChat,
onDeleteChat,
}: CopilotModalProps) {
const messagesEndRef = useRef<HTMLDivElement>(null)
const messagesContainerRef = useRef<HTMLDivElement>(null)
const inputRef = useRef<HTMLInputElement>(null)
// Auto-scroll to bottom when new messages are added
useEffect(() => {
if (messagesEndRef.current) {
messagesEndRef.current.scrollIntoView({ behavior: 'smooth' })
}
}, [messages])
// Focus input when modal opens
useEffect(() => {
if (open && inputRef.current) {
inputRef.current.focus()
}
}, [open])
// Handle send message
const handleSendMessage = async () => {
if (!copilotMessage.trim() || isLoading) return
try {
await onSendMessage(copilotMessage.trim())
setCopilotMessage('')
// Ensure input stays focused
if (inputRef.current) {
inputRef.current.focus()
}
} catch (error) {
logger.error('Failed to send message', error)
}
}
// Handle key press
const handleKeyPress = (e: KeyboardEvent<HTMLInputElement>) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault()
handleSendMessage()
}
}
if (!open) return null
return (
<div className='fixed inset-0 z-[100] flex flex-col bg-background'>
<style jsx>{`
@keyframes growShrink {
0%,
100% {
transform: scale(0.9);
}
50% {
transform: scale(1.1);
}
}
.loading-dot {
animation: growShrink 1.5s infinite ease-in-out;
}
`}</style>
{/* Header with chat title, management, and close button */}
<div className='flex items-center justify-between border-b px-4 py-3'>
<div className='flex flex-1 items-center gap-2'>
{/* Chat Title Dropdown */}
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button variant='ghost' className='h-8 max-w-[300px] flex-1 justify-start px-3'>
<span className='truncate'>{currentChat?.title || 'New Chat'}</span>
<ChevronDown className='ml-2 h-4 w-4 shrink-0' />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align='start' className='z-[110] w-64' sideOffset={8}>
{chats.map((chat) => (
<div key={chat.id} className='flex items-center'>
<DropdownMenuItem
onClick={() => onSelectChat(chat)}
className='flex-1 cursor-pointer'
>
<div className='min-w-0 flex-1'>
<div className='truncate font-medium text-sm'>
{chat.title || 'Untitled Chat'}
</div>
<div className='text-muted-foreground text-xs'>
{chat.messageCount} messages {' '}
{new Date(chat.updatedAt).toLocaleDateString()}
</div>
</div>
</DropdownMenuItem>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button variant='ghost' size='sm' className='h-8 w-8 shrink-0 p-0'>
<MoreHorizontal className='h-4 w-4' />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align='end' className='z-[120]'>
<DropdownMenuItem
onClick={() => onDeleteChat(chat.id)}
className='cursor-pointer text-destructive'
>
<Trash2 className='mr-2 h-4 w-4' />
Delete
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</div>
))}
</DropdownMenuContent>
</DropdownMenu>
{/* New Chat Button */}
<Button
variant='ghost'
size='sm'
onClick={onStartNewChat}
className='h-8 w-8 p-0'
title='New Chat'
>
<MessageSquarePlus className='h-4 w-4' />
</Button>
</div>
<Button
variant='ghost'
size='icon'
className='h-8 w-8 rounded-md hover:bg-accent/50'
onClick={() => onOpenChange(false)}
>
<X className='h-4 w-4' />
<span className='sr-only'>Close</span>
</Button>
</div>
{/* Messages container */}
<div ref={messagesContainerRef} className='flex-1 overflow-y-auto'>
<div className='mx-auto max-w-3xl'>
{messages.length === 0 ? (
<div className='flex h-full flex-col items-center justify-center px-4 py-10'>
<div className='space-y-4 text-center'>
<Bot className='mx-auto h-12 w-12 text-muted-foreground' />
<div className='space-y-2'>
<h3 className='font-medium text-lg'>Welcome to Documentation Copilot</h3>
<p className='text-muted-foreground text-sm'>
Ask me anything about Sim Studio features, workflows, tools, or how to get
started.
</p>
</div>
<div className='mx-auto max-w-xs space-y-2 text-left'>
<div className='text-muted-foreground text-xs'>Try asking:</div>
<div className='space-y-1'>
<div className='rounded bg-muted/50 px-2 py-1 text-xs'>
"How do I create a workflow?"
</div>
<div className='rounded bg-muted/50 px-2 py-1 text-xs'>
"What tools are available?"
</div>
<div className='rounded bg-muted/50 px-2 py-1 text-xs'>
"How do I deploy my workflow?"
</div>
</div>
</div>
</div>
</div>
) : (
messages.map((message) => <ModalCopilotMessage key={message.id} message={message} />)
)}
{/* Loading indicator (shows only when loading) */}
{isLoading && (
<div className='px-4 py-5'>
<div className='mx-auto max-w-3xl'>
<div className='flex'>
<div className='max-w-[80%]'>
<div className='flex h-6 items-center'>
<div className='loading-dot h-3 w-3 rounded-full bg-black dark:bg-black' />
</div>
</div>
</div>
</div>
</div>
)}
<div ref={messagesEndRef} className='h-1' />
</div>
</div>
{/* Input area (fixed at bottom) */}
<div className='bg-background p-4'>
<div className='mx-auto max-w-3xl'>
<div className='relative rounded-2xl border bg-background shadow-sm'>
<Input
ref={inputRef}
value={copilotMessage}
onChange={(e) => setCopilotMessage(e.target.value)}
onKeyDown={handleKeyPress}
placeholder='Ask about Sim Studio documentation...'
className='min-h-[50px] flex-1 rounded-2xl border-0 bg-transparent py-7 pr-16 pl-6 text-base focus-visible:ring-0 focus-visible:ring-offset-0'
disabled={isLoading}
/>
<Button
onClick={handleSendMessage}
size='icon'
disabled={!copilotMessage.trim() || isLoading}
className='-translate-y-1/2 absolute top-1/2 right-3 h-10 w-10 rounded-xl bg-black p-0 text-white hover:bg-gray-800 dark:bg-primary dark:hover:bg-primary/80'
>
<ArrowUp className='h-4 w-4 dark:text-black' />
</Button>
</div>
<div className='mt-2 text-center text-muted-foreground text-xs'>
<p>Ask questions about Sim Studio documentation and features</p>
</div>
</div>
</div>
</div>
)
}

View File

@@ -0,0 +1,419 @@
'use client'
import { forwardRef, useCallback, useEffect, useImperativeHandle, useRef } from 'react'
import {
Bot,
ChevronDown,
Loader2,
MessageSquarePlus,
MoreHorizontal,
Send,
Trash2,
User,
} from 'lucide-react'
import { Button } from '@/components/ui/button'
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from '@/components/ui/dropdown-menu'
import { Input } from '@/components/ui/input'
import { ScrollArea } from '@/components/ui/scroll-area'
import { createLogger } from '@/lib/logs/console-logger'
import { useCopilotStore } from '@/stores/copilot/store'
import type { CopilotMessage } from '@/stores/copilot/types'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { CopilotModal } from './components/copilot-modal/copilot-modal'
const logger = createLogger('Copilot')
interface CopilotProps {
panelWidth: number
isFullscreen?: boolean
onFullscreenToggle?: (fullscreen: boolean) => void
fullscreenInput?: string
onFullscreenInputChange?: (input: string) => void
}
interface CopilotRef {
clearMessages: () => void
startNewChat: () => void
}
export const Copilot = forwardRef<CopilotRef, CopilotProps>(
(
{
panelWidth,
isFullscreen = false,
onFullscreenToggle,
fullscreenInput = '',
onFullscreenInputChange,
},
ref
) => {
const inputRef = useRef<HTMLInputElement>(null)
const scrollAreaRef = useRef<HTMLDivElement>(null)
const { activeWorkflowId } = useWorkflowRegistry()
// Use the new copilot store
const {
currentChat,
chats,
messages,
isLoading,
isLoadingChats,
isSendingMessage,
error,
workflowId,
setWorkflowId,
selectChat,
createNewChat,
deleteChat,
sendMessage,
clearMessages,
clearError,
} = useCopilotStore()
// Sync workflow ID with store
useEffect(() => {
if (activeWorkflowId !== workflowId) {
setWorkflowId(activeWorkflowId)
}
}, [activeWorkflowId, workflowId, setWorkflowId])
// Auto-scroll to bottom when new messages are added
useEffect(() => {
if (scrollAreaRef.current) {
const scrollContainer = scrollAreaRef.current.querySelector(
'[data-radix-scroll-area-viewport]'
)
if (scrollContainer) {
scrollContainer.scrollTop = scrollContainer.scrollHeight
}
}
}, [messages])
// Handle chat deletion
const handleDeleteChat = useCallback(
async (chatId: string) => {
try {
await deleteChat(chatId)
logger.info('Chat deleted successfully')
} catch (error) {
logger.error('Error deleting chat:', error)
}
},
[deleteChat]
)
// Handle new chat creation
const handleStartNewChat = useCallback(() => {
clearMessages()
logger.info('Started new chat')
}, [clearMessages])
// Expose functions to parent
useImperativeHandle(
ref,
() => ({
clearMessages: handleStartNewChat,
startNewChat: handleStartNewChat,
}),
[handleStartNewChat]
)
// Handle message submission
const handleSubmit = useCallback(
async (e?: React.FormEvent, message?: string) => {
e?.preventDefault()
const query = message || inputRef.current?.value?.trim() || ''
if (!query || isSendingMessage || !activeWorkflowId) return
// Clear input if using the form input
if (!message && inputRef.current) {
inputRef.current.value = ''
}
try {
await sendMessage(query, { stream: true })
logger.info('Sent message:', query)
} catch (error) {
logger.error('Failed to send message:', error)
}
},
[isSendingMessage, activeWorkflowId, sendMessage]
)
// Format timestamp for display
const formatTimestamp = (timestamp: string) => {
return new Date(timestamp).toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' })
}
// Function to render content with basic markdown (including direct links from LLM)
const renderMarkdownContent = (content: string) => {
if (!content) return content
let processedContent = content
// Process markdown links: [text](url)
processedContent = processedContent.replace(
/\[([^\]]+)\]\(([^)]+)\)/g,
'<a href="$2" target="_blank" rel="noopener noreferrer" class="text-blue-600 hover:text-blue-800 font-semibold underline transition-colors">$1</a>'
)
// Basic markdown processing
processedContent = processedContent
.replace(
/```(\w+)?\n([\s\S]*?)```/g,
'<pre class="bg-muted p-3 rounded-lg overflow-x-auto my-3 text-sm"><code>$2</code></pre>'
)
.replace(
/`([^`]+)`/g,
'<code class="bg-muted px-1.5 py-0.5 rounded text-sm font-mono">$1</code>'
)
.replace(/\*\*(.*?)\*\*/g, '<strong class="font-semibold">$1</strong>')
.replace(/\*(.*?)\*/g, '<em class="italic">$1</em>')
.replace(/^### (.*$)/gm, '<h3 class="font-semibold text-base mt-4 mb-2">$1</h3>')
.replace(/^## (.*$)/gm, '<h2 class="font-semibold text-lg mt-4 mb-2">$1</h2>')
.replace(/^# (.*$)/gm, '<h1 class="font-bold text-xl mt-4 mb-3">$1</h1>')
.replace(/^\* (.*$)/gm, '<li class="ml-4">• $1</li>')
.replace(/^- (.*$)/gm, '<li class="ml-4">• $1</li>')
.replace(/\n\n+/g, '</p><p class="mt-2">')
.replace(/\n/g, '<br>')
// Wrap in paragraph tags if needed
if (
!processedContent.includes('<p>') &&
!processedContent.includes('<h1>') &&
!processedContent.includes('<h2>') &&
!processedContent.includes('<h3>')
) {
processedContent = `<p>${processedContent}</p>`
}
return processedContent
}
// Render individual message
const renderMessage = (message: CopilotMessage) => {
return (
<div key={message.id} className='group flex gap-3 p-4 hover:bg-muted/30'>
<div
className={`flex h-8 w-8 items-center justify-center rounded-full ${
message.role === 'user' ? 'bg-muted' : 'bg-primary'
}`}
>
{message.role === 'user' ? (
<User className='h-4 w-4 text-muted-foreground' />
) : (
<Bot className='h-4 w-4 text-primary-foreground' />
)}
</div>
<div className='min-w-0 flex-1'>
<div className='mb-3 flex items-center gap-2'>
<span className='font-medium text-sm'>
{message.role === 'user' ? 'You' : 'Copilot'}
</span>
<span className='text-muted-foreground text-xs'>
{formatTimestamp(message.timestamp)}
</span>
</div>
{/* Enhanced content rendering with markdown links */}
<div className='prose prose-sm dark:prose-invert max-w-none'>
<div
className='text-foreground text-sm leading-normal'
dangerouslySetInnerHTML={{
__html: renderMarkdownContent(message.content),
}}
/>
</div>
{/* Streaming cursor */}
{!message.content && (
<div className='flex items-center gap-2 text-muted-foreground'>
<Loader2 className='h-4 w-4 animate-spin' />
<span className='text-sm'>Thinking...</span>
</div>
)}
</div>
</div>
)
}
// Convert messages for modal (role -> type)
const modalMessages = messages.map((msg) => ({
id: msg.id,
content: msg.content,
type: msg.role as 'user' | 'assistant',
timestamp: new Date(msg.timestamp),
citations: msg.citations,
}))
// Handle modal message sending
const handleModalSendMessage = useCallback(
async (message: string) => {
await handleSubmit(undefined, message)
},
[handleSubmit]
)
return (
<>
<div className='flex h-full flex-col'>
{/* Header with Chat Title and Management */}
<div className='border-b p-4'>
<div className='flex items-center justify-between'>
{/* Chat Title Dropdown */}
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button variant='ghost' className='h-8 min-w-0 flex-1 justify-start px-3'>
<span className='truncate'>{currentChat?.title || 'New Chat'}</span>
<ChevronDown className='ml-2 h-4 w-4 shrink-0' />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align='start' className='z-[110] w-64' sideOffset={8}>
{chats.map((chat) => (
<div key={chat.id} className='flex items-center'>
<DropdownMenuItem
onClick={() => selectChat(chat)}
className='flex-1 cursor-pointer'
>
<div className='min-w-0 flex-1'>
<div className='truncate font-medium text-sm'>
{chat.title || 'Untitled Chat'}
</div>
<div className='text-muted-foreground text-xs'>
{chat.messageCount} messages {' '}
{new Date(chat.updatedAt).toLocaleDateString()}
</div>
</div>
</DropdownMenuItem>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button variant='ghost' size='sm' className='h-8 w-8 shrink-0 p-0'>
<MoreHorizontal className='h-4 w-4' />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align='end' className='z-[120]'>
<DropdownMenuItem
onClick={() => handleDeleteChat(chat.id)}
className='cursor-pointer text-destructive'
>
<Trash2 className='mr-2 h-4 w-4' />
Delete
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</div>
))}
</DropdownMenuContent>
</DropdownMenu>
{/* New Chat Button */}
<Button
variant='ghost'
size='sm'
onClick={handleStartNewChat}
className='h-8 w-8 p-0'
title='New Chat'
>
<MessageSquarePlus className='h-4 w-4' />
</Button>
</div>
{/* Error display */}
{error && (
<div className='mt-2 rounded-md bg-destructive/10 p-2 text-destructive text-sm'>
{error}
<Button
variant='ghost'
size='sm'
onClick={clearError}
className='ml-2 h-auto p-1 text-destructive'
>
Dismiss
</Button>
</div>
)}
</div>
{/* Messages area */}
<ScrollArea ref={scrollAreaRef} className='flex-1'>
{messages.length === 0 ? (
<div className='flex h-full flex-col items-center justify-center px-4 py-10'>
<div className='space-y-4 text-center'>
<Bot className='mx-auto h-12 w-12 text-muted-foreground' />
<div className='space-y-2'>
<h3 className='font-medium text-lg'>Welcome to Documentation Copilot</h3>
<p className='text-muted-foreground text-sm'>
Ask me anything about Sim Studio features, workflows, tools, or how to get
started.
</p>
</div>
<div className='mx-auto max-w-xs space-y-2 text-left'>
<div className='text-muted-foreground text-xs'>Try asking:</div>
<div className='space-y-1'>
<div className='rounded bg-muted/50 px-2 py-1 text-xs'>
"How do I create a workflow?"
</div>
<div className='rounded bg-muted/50 px-2 py-1 text-xs'>
"What tools are available?"
</div>
<div className='rounded bg-muted/50 px-2 py-1 text-xs'>
"How do I deploy my workflow?"
</div>
</div>
</div>
</div>
</div>
) : (
messages.map(renderMessage)
)}
</ScrollArea>
{/* Input area */}
<div className='border-t p-4'>
<form onSubmit={handleSubmit} className='flex gap-2'>
<Input
ref={inputRef}
placeholder='Ask about Sim Studio documentation...'
disabled={isSendingMessage}
className='flex-1'
autoComplete='off'
/>
<Button type='submit' size='icon' disabled={isSendingMessage} className='h-10 w-10'>
{isSendingMessage ? (
<Loader2 className='h-4 w-4 animate-spin' />
) : (
<Send className='h-4 w-4' />
)}
</Button>
</form>
</div>
</div>
{/* Fullscreen Modal */}
<CopilotModal
open={isFullscreen}
onOpenChange={(open) => onFullscreenToggle?.(open)}
copilotMessage={fullscreenInput}
setCopilotMessage={(message) => onFullscreenInputChange?.(message)}
messages={modalMessages}
onSendMessage={handleModalSendMessage}
isLoading={isSendingMessage}
chats={chats}
currentChat={currentChat}
onSelectChat={selectChat}
onStartNewChat={handleStartNewChat}
onDeleteChat={handleDeleteChat}
/>
</>
)
}
)
Copilot.displayName = 'Copilot'

View File

@@ -1,6 +1,6 @@
'use client'
import { useEffect, useState } from 'react'
import { useEffect, useRef, useState } from 'react'
import { Expand, PanelRight } from 'lucide-react'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { useChatStore } from '@/stores/panel/chat/store'
@@ -16,7 +16,10 @@ export function Panel() {
const [width, setWidth] = useState(336) // 84 * 4 = 336px (default width)
const [isDragging, setIsDragging] = useState(false)
const [chatMessage, setChatMessage] = useState<string>('')
const [copilotMessage, setCopilotMessage] = useState<string>('')
const [isChatModalOpen, setIsChatModalOpen] = useState(false)
const [isCopilotModalOpen, setIsCopilotModalOpen] = useState(false)
const copilotRef = useRef<{ clearMessages: () => void }>(null)
const isOpen = usePanelStore((state) => state.isOpen)
const togglePanel = usePanelStore((state) => state.togglePanel)
@@ -116,15 +119,30 @@ export function Panel() {
>
Variables
</button>
{/* <button
onClick={() => setActiveTab('copilot')}
className={`rounded-md px-3 py-1 text-sm transition-colors ${
activeTab === 'copilot'
? 'bg-accent text-foreground'
: 'text-muted-foreground hover:bg-accent/50 hover:text-foreground'
}`}
>
Copilot
</button> */}
</div>
{(activeTab === 'console' || activeTab === 'chat') && (
{(activeTab === 'console' || activeTab === 'chat') /* || activeTab === 'copilot' */ && (
<button
onClick={() =>
activeTab === 'console'
? clearConsole(activeWorkflowId)
: clearChat(activeWorkflowId)
}
onClick={() => {
if (activeTab === 'console') {
clearConsole(activeWorkflowId)
} else if (activeTab === 'chat') {
clearChat(activeWorkflowId)
}
// else if (activeTab === 'copilot') {
// copilotRef.current?.clearMessages()
// }
}}
className='rounded-md px-3 py-1 text-muted-foreground text-sm transition-colors hover:bg-accent/50 hover:text-foreground'
>
Clear
@@ -139,7 +157,16 @@ export function Panel() {
) : activeTab === 'console' ? (
<Console panelWidth={width} />
) : (
<Variables panelWidth={width} />
/* activeTab === 'copilot' ? (
<Copilot
ref={copilotRef}
panelWidth={width}
isFullscreen={isCopilotModalOpen}
onFullscreenToggle={setIsCopilotModalOpen}
fullscreenInput={copilotMessage}
onFullscreenInputChange={setCopilotMessage}
/>
) : */ <Variables panelWidth={width} />
)}
</div>
@@ -172,6 +199,21 @@ export function Panel() {
<TooltipContent side='left'>Expand Chat</TooltipContent>
</Tooltip>
)}
{/* activeTab === 'copilot' && (
<Tooltip>
<TooltipTrigger asChild>
<button
onClick={() => setIsCopilotModalOpen(true)}
className='flex h-9 w-9 items-center justify-center rounded-lg text-muted-foreground transition-colors hover:bg-accent hover:text-foreground'
>
<Expand className='h-5 w-5' />
<span className='sr-only'>Expand Copilot</span>
</button>
</TooltipTrigger>
<TooltipContent side='left'>Expand Copilot</TooltipContent>
</Tooltip>
) */}
</div>
</div>

View File

@@ -0,0 +1,272 @@
import { createLogger } from '@/lib/logs/console-logger'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import { importWorkflowFromYaml } from '@/stores/workflows/yaml/importer'
import type { EditorFormat } from './workflow-text-editor'
const logger = createLogger('WorkflowApplier')
export interface ApplyResult {
success: boolean
errors: string[]
warnings: string[]
appliedOperations: number
}
/**
* Apply workflow changes by using the existing importer for YAML
* or direct state replacement for JSON
*/
export async function applyWorkflowDiff(
content: string,
format: EditorFormat
): Promise<ApplyResult> {
try {
const { activeWorkflowId } = useWorkflowRegistry.getState()
if (!activeWorkflowId) {
return {
success: false,
errors: ['No active workflow found'],
warnings: [],
appliedOperations: 0,
}
}
if (format === 'yaml') {
// Use the existing YAML importer which handles ID mapping and complete state replacement
const workflowActions = {
addBlock: () => {}, // Not used in this path
addEdge: () => {}, // Not used in this path
applyAutoLayout: () => {
// Trigger auto layout after import
window.dispatchEvent(new CustomEvent('trigger-auto-layout'))
},
setSubBlockValue: () => {}, // Not used in this path
getExistingBlocks: () => useWorkflowStore.getState().blocks,
}
const result = await importWorkflowFromYaml(content, workflowActions)
return {
success: result.success,
errors: result.errors,
warnings: result.warnings,
appliedOperations: result.success ? 1 : 0, // One complete import operation
}
}
// Handle JSON format - complete state replacement
let parsedData: any
try {
parsedData = JSON.parse(content)
} catch (error) {
return {
success: false,
errors: [`Invalid JSON: ${error instanceof Error ? error.message : 'Parse error'}`],
warnings: [],
appliedOperations: 0,
}
}
// Validate JSON structure
if (!parsedData.state || !parsedData.state.blocks) {
return {
success: false,
errors: ['Invalid JSON structure: missing state.blocks'],
warnings: [],
appliedOperations: 0,
}
}
// Extract workflow state and subblock values
const newWorkflowState = {
blocks: parsedData.state.blocks,
edges: parsedData.state.edges || [],
loops: parsedData.state.loops || {},
parallels: parsedData.state.parallels || {},
lastSaved: Date.now(),
isDeployed: parsedData.state.isDeployed || false,
deployedAt: parsedData.state.deployedAt,
deploymentStatuses: parsedData.state.deploymentStatuses || {},
hasActiveSchedule: parsedData.state.hasActiveSchedule || false,
hasActiveWebhook: parsedData.state.hasActiveWebhook || false,
}
// Atomically update local state with rollback on failure
const previousWorkflowState = useWorkflowStore.getState()
const previousSubBlockState = useSubBlockStore.getState()
try {
// Update workflow state first
useWorkflowStore.setState(newWorkflowState)
// Update subblock values if provided
if (parsedData.subBlockValues) {
useSubBlockStore.setState((state: any) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: parsedData.subBlockValues,
},
}))
}
} catch (error) {
// Rollback state changes on any failure
logger.error('State update failed, rolling back:', error)
useWorkflowStore.setState(previousWorkflowState)
useSubBlockStore.setState(previousSubBlockState)
return {
success: false,
errors: [
`State update failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
],
warnings: [],
appliedOperations: 0,
}
}
// Update workflow metadata if provided
if (parsedData.workflow) {
const { updateWorkflow } = useWorkflowRegistry.getState()
const metadata = parsedData.workflow
updateWorkflow(activeWorkflowId, {
name: metadata.name,
description: metadata.description,
color: metadata.color,
})
}
// Save to database
try {
const response = await fetch(`/api/workflows/${activeWorkflowId}/state`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(newWorkflowState),
})
if (!response.ok) {
const errorData = await response.json()
logger.error('Failed to save workflow state:', errorData.error)
return {
success: false,
errors: [`Database save failed: ${errorData.error || 'Unknown error'}`],
warnings: [],
appliedOperations: 0,
}
}
} catch (error) {
logger.error('Failed to save workflow state:', error)
return {
success: false,
errors: [
`Failed to save workflow state: ${error instanceof Error ? error.message : 'Unknown error'}`,
],
warnings: [],
appliedOperations: 0,
}
}
// Trigger auto layout
window.dispatchEvent(new CustomEvent('trigger-auto-layout'))
return {
success: true,
errors: [],
warnings: [],
appliedOperations: 1, // One complete state replacement
}
} catch (error) {
logger.error('Failed to apply workflow changes:', error)
return {
success: false,
errors: [`Apply failed: ${error instanceof Error ? error.message : 'Unknown error'}`],
warnings: [],
appliedOperations: 0,
}
}
}
/**
* Preview what changes would be applied (simplified for the new approach)
*/
export function previewWorkflowDiff(
content: string,
format: EditorFormat
): {
summary: string
operations: Array<{
type: string
description: string
}>
} {
try {
if (format === 'yaml') {
// For YAML, we would do a complete import
return {
summary: 'Complete workflow replacement from YAML',
operations: [
{
type: 'complete_replacement',
description: 'Replace entire workflow with YAML content',
},
],
}
}
// For JSON, we would do a complete state replacement
let parsedData: any
try {
parsedData = JSON.parse(content)
} catch (error) {
return {
summary: 'Invalid JSON format',
operations: [],
}
}
const operations = []
if (parsedData.state?.blocks) {
const blockCount = Object.keys(parsedData.state.blocks).length
operations.push({
type: 'replace_blocks',
description: `Replace workflow with ${blockCount} blocks`,
})
}
if (parsedData.state?.edges) {
const edgeCount = parsedData.state.edges.length
operations.push({
type: 'replace_edges',
description: `Replace connections with ${edgeCount} edges`,
})
}
if (parsedData.subBlockValues) {
operations.push({
type: 'replace_values',
description: 'Replace all input values',
})
}
if (parsedData.workflow) {
operations.push({
type: 'update_metadata',
description: 'Update workflow metadata',
})
}
return {
summary: 'Complete workflow state replacement from JSON',
operations,
}
} catch (error) {
return {
summary: 'Error analyzing changes',
operations: [],
}
}
}

View File

@@ -0,0 +1,125 @@
import { dump as yamlDump, load as yamlLoad } from 'js-yaml'
import { createLogger } from '@/lib/logs/console-logger'
import { generateWorkflowYaml } from '@/lib/workflows/yaml-generator'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { EditorFormat } from './workflow-text-editor'
const logger = createLogger('WorkflowExporter')
/**
* Get subblock values organized by block for the exporter
*/
function getSubBlockValues() {
const workflowState = useWorkflowStore.getState()
const subBlockStore = useSubBlockStore.getState()
const subBlockValues: Record<string, Record<string, any>> = {}
Object.entries(workflowState.blocks).forEach(([blockId]) => {
subBlockValues[blockId] = {}
// Get all subblock values for this block
Object.keys(workflowState.blocks[blockId].subBlocks || {}).forEach((subBlockId) => {
const value = subBlockStore.getValue(blockId, subBlockId)
if (value !== undefined) {
subBlockValues[blockId][subBlockId] = value
}
})
})
return subBlockValues
}
/**
* Generate full workflow data including metadata and state
*/
export function generateFullWorkflowData() {
const workflowState = useWorkflowStore.getState()
const { workflows, activeWorkflowId } = useWorkflowRegistry.getState()
const currentWorkflow = activeWorkflowId ? workflows[activeWorkflowId] : null
if (!currentWorkflow || !activeWorkflowId) {
throw new Error('No active workflow found')
}
const subBlockValues = getSubBlockValues()
return {
workflow: {
id: activeWorkflowId,
name: currentWorkflow.name,
description: currentWorkflow.description,
color: currentWorkflow.color,
workspaceId: currentWorkflow.workspaceId,
folderId: currentWorkflow.folderId,
},
state: {
blocks: workflowState.blocks,
edges: workflowState.edges,
loops: workflowState.loops,
parallels: workflowState.parallels,
},
subBlockValues,
exportedAt: new Date().toISOString(),
version: '1.0',
}
}
/**
* Export workflow in the specified format
*/
export function exportWorkflow(format: EditorFormat): string {
try {
if (format === 'yaml') {
// Use the existing YAML generator for condensed format
const workflowState = useWorkflowStore.getState()
const subBlockValues = getSubBlockValues()
return generateWorkflowYaml(workflowState, subBlockValues)
}
// Generate full JSON format
const fullData = generateFullWorkflowData()
return JSON.stringify(fullData, null, 2)
} catch (error) {
logger.error(`Failed to export workflow as ${format}:`, error)
throw error
}
}
/**
* Parse workflow content based on format
*/
export function parseWorkflowContent(content: string, format: EditorFormat): any {
if (format === 'yaml') {
return yamlLoad(content)
}
return JSON.parse(content)
}
/**
* Convert between YAML and JSON formats
*/
export function convertBetweenFormats(
content: string,
fromFormat: EditorFormat,
toFormat: EditorFormat
): string {
if (fromFormat === toFormat) return content
try {
const parsed = parseWorkflowContent(content, fromFormat)
if (toFormat === 'yaml') {
return yamlDump(parsed, {
indent: 2,
lineWidth: -1,
noRefs: true,
sortKeys: false,
})
}
return JSON.stringify(parsed, null, 2)
} catch (error) {
logger.error(`Failed to convert from ${fromFormat} to ${toFormat}:`, error)
throw error
}
}

View File

@@ -0,0 +1,175 @@
'use client'
import { useCallback, useEffect, useState } from 'react'
import { FileCode } from 'lucide-react'
import { Button } from '@/components/ui/button'
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
DialogTrigger,
} from '@/components/ui/dialog'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { createLogger } from '@/lib/logs/console-logger'
import { cn } from '@/lib/utils'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { applyWorkflowDiff } from './workflow-applier'
import { exportWorkflow } from './workflow-exporter'
import { type EditorFormat, WorkflowTextEditor } from './workflow-text-editor'
const logger = createLogger('WorkflowTextEditorModal')
interface WorkflowTextEditorModalProps {
disabled?: boolean
className?: string
}
export function WorkflowTextEditorModal({
disabled = false,
className,
}: WorkflowTextEditorModalProps) {
const [isOpen, setIsOpen] = useState(false)
const [format, setFormat] = useState<EditorFormat>('yaml')
const [initialContent, setInitialContent] = useState('')
const [isLoading, setIsLoading] = useState(false)
const { activeWorkflowId } = useWorkflowRegistry()
// Load initial content when modal opens
useEffect(() => {
if (isOpen && activeWorkflowId) {
setIsLoading(true)
try {
const content = exportWorkflow(format)
setInitialContent(content)
} catch (error) {
logger.error('Failed to export workflow:', error)
setInitialContent('# Error loading workflow content')
} finally {
setIsLoading(false)
}
}
}, [isOpen, format, activeWorkflowId])
// Handle format changes
const handleFormatChange = useCallback((newFormat: EditorFormat) => {
setFormat(newFormat)
}, [])
// Handle save operation
const handleSave = useCallback(
async (content: string, contentFormat: EditorFormat) => {
if (!activeWorkflowId) {
return { success: false, errors: ['No active workflow'] }
}
try {
logger.info('Applying workflow changes from text editor', { format: contentFormat })
// Apply changes using the simplified approach
const applyResult = await applyWorkflowDiff(content, contentFormat)
if (applyResult.success) {
logger.info('Successfully applied workflow changes', {
appliedOperations: applyResult.appliedOperations,
})
// Update initial content to reflect current state
try {
const updatedContent = exportWorkflow(contentFormat)
setInitialContent(updatedContent)
} catch (error) {
logger.error('Failed to refresh content after save:', error)
}
}
return {
success: applyResult.success,
errors: applyResult.errors,
warnings: applyResult.warnings,
}
} catch (error) {
logger.error('Failed to save workflow changes:', error)
return {
success: false,
errors: [error instanceof Error ? error.message : 'Unknown error'],
}
}
},
[activeWorkflowId]
)
const handleOpenChange = useCallback((open: boolean) => {
setIsOpen(open)
if (!open) {
// Reset state when closing
setInitialContent('')
}
}, [])
const isDisabled = disabled || !activeWorkflowId
return (
<Dialog open={isOpen} onOpenChange={handleOpenChange}>
<Tooltip>
<TooltipTrigger asChild>
<DialogTrigger asChild>
{isDisabled ? (
<div className='inline-flex h-10 w-10 cursor-not-allowed items-center justify-center gap-2 whitespace-nowrap rounded-md font-medium text-sm opacity-50 ring-offset-background transition-colors [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0'>
<FileCode className='h-5 w-5' />
</div>
) : (
<Button
variant='ghost'
size='icon'
className={cn('hover:text-foreground', className)}
>
<FileCode className='h-5 w-5' />
<span className='sr-only'>Edit as Text</span>
</Button>
)}
</DialogTrigger>
</TooltipTrigger>
<TooltipContent>
{isDisabled
? disabled
? 'Text editor not available'
: 'No active workflow'
: 'Edit as Text'}
</TooltipContent>
</Tooltip>
<DialogContent className='flex h-[85vh] w-[90vw] max-w-6xl flex-col p-0'>
<DialogHeader className='flex-shrink-0 border-b px-6 py-4'>
<DialogTitle>Workflow Text Editor</DialogTitle>
<DialogDescription>
Edit your workflow as YAML or JSON. Changes will completely replace the current workflow
when you save.
</DialogDescription>
</DialogHeader>
<div className='flex-1 overflow-hidden'>
{isLoading ? (
<div className='flex h-full items-center justify-center'>
<div className='text-center'>
<div className='mx-auto mb-4 h-8 w-8 animate-spin rounded-full border-primary border-b-2' />
<p className='text-muted-foreground'>Loading workflow content...</p>
</div>
</div>
) : (
<WorkflowTextEditor
initialValue={initialContent}
format={format}
onSave={handleSave}
onFormatChange={handleFormatChange}
disabled={isDisabled}
className='h-full rounded-none border-0'
/>
)}
</div>
</DialogContent>
</Dialog>
)
}

View File

@@ -0,0 +1,348 @@
'use client'
import { useCallback, useEffect, useState } from 'react'
import { dump as yamlDump, load as yamlParse } from 'js-yaml'
import { AlertCircle, Check, FileCode, Save } from 'lucide-react'
import { Alert, AlertDescription } from '@/components/ui/alert'
import { Button } from '@/components/ui/button'
import { Tabs, TabsList, TabsTrigger } from '@/components/ui/tabs'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { createLogger } from '@/lib/logs/console-logger'
import { cn } from '@/lib/utils'
import { CodeEditor } from '../workflow-block/components/sub-block/components/tool-input/components/code-editor/code-editor'
const logger = createLogger('WorkflowTextEditor')
export type EditorFormat = 'yaml' | 'json'
interface ValidationError {
line?: number
column?: number
message: string
}
interface WorkflowTextEditorProps {
initialValue: string
format: EditorFormat
onSave: (
content: string,
format: EditorFormat
) => Promise<{ success: boolean; errors?: string[]; warnings?: string[] }>
onFormatChange?: (format: EditorFormat) => void
className?: string
disabled?: boolean
}
export function WorkflowTextEditor({
initialValue,
format,
onSave,
onFormatChange,
className,
disabled = false,
}: WorkflowTextEditorProps) {
const [content, setContent] = useState(initialValue)
const [currentFormat, setCurrentFormat] = useState<EditorFormat>(format)
const [validationErrors, setValidationErrors] = useState<ValidationError[]>([])
const [isSaving, setIsSaving] = useState(false)
const [saveResult, setSaveResult] = useState<{
success: boolean
errors?: string[]
warnings?: string[]
} | null>(null)
const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false)
// Validate content based on format
const validateContent = useCallback((text: string, fmt: EditorFormat): ValidationError[] => {
const errors: ValidationError[] = []
if (!text.trim()) {
return errors // Empty content is valid
}
try {
if (fmt === 'yaml') {
yamlParse(text)
} else if (fmt === 'json') {
JSON.parse(text)
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Parse error'
// Extract line/column info if available
const lineMatch = errorMessage.match(/line (\d+)/i)
const columnMatch = errorMessage.match(/column (\d+)/i)
errors.push({
line: lineMatch ? Number.parseInt(lineMatch[1], 10) : undefined,
column: columnMatch ? Number.parseInt(columnMatch[1], 10) : undefined,
message: errorMessage,
})
}
return errors
}, [])
// Convert between formats
const convertFormat = useCallback(
(text: string, fromFormat: EditorFormat, toFormat: EditorFormat): string => {
if (fromFormat === toFormat || !text.trim()) {
return text
}
try {
let parsed: any
if (fromFormat === 'yaml') {
parsed = yamlParse(text)
} else {
parsed = JSON.parse(text)
}
if (toFormat === 'yaml') {
return yamlDump(parsed, {
indent: 2,
lineWidth: -1,
noRefs: true,
})
}
return JSON.stringify(parsed, null, 2)
} catch (error) {
logger.warn(`Failed to convert from ${fromFormat} to ${toFormat}:`, error)
return text // Return original if conversion fails
}
},
[]
)
// Handle content changes
const handleContentChange = useCallback(
(newContent: string) => {
setContent(newContent)
setHasUnsavedChanges(newContent !== initialValue)
// Validate on change
const errors = validateContent(newContent, currentFormat)
setValidationErrors(errors)
// Clear save result when editing
setSaveResult(null)
},
[initialValue, currentFormat, validateContent]
)
// Handle format changes
const handleFormatChange = useCallback(
(newFormat: EditorFormat) => {
if (newFormat === currentFormat) return
// Convert content to new format
const convertedContent = convertFormat(content, currentFormat, newFormat)
setCurrentFormat(newFormat)
setContent(convertedContent)
// Validate converted content
const errors = validateContent(convertedContent, newFormat)
setValidationErrors(errors)
// Notify parent
onFormatChange?.(newFormat)
},
[content, currentFormat, convertFormat, validateContent, onFormatChange]
)
// Handle save
const handleSave = useCallback(async () => {
if (validationErrors.length > 0) {
logger.warn('Cannot save with validation errors')
return
}
setIsSaving(true)
setSaveResult(null)
try {
const result = await onSave(content, currentFormat)
setSaveResult(result)
if (result.success) {
setHasUnsavedChanges(false)
logger.info('Workflow successfully updated from text editor')
} else {
logger.error('Failed to save workflow:', result.errors)
}
} catch (error) {
logger.error('Save failed with exception:', error)
setSaveResult({
success: false,
errors: [error instanceof Error ? error.message : 'Unknown error'],
})
} finally {
setIsSaving(false)
}
}, [content, currentFormat, validationErrors, onSave])
// Update content when initialValue changes
useEffect(() => {
setContent(initialValue)
setHasUnsavedChanges(false)
setSaveResult(null)
}, [initialValue])
// Validation status
const isValid = validationErrors.length === 0
const canSave = isValid && hasUnsavedChanges && !disabled
// Get editor language for syntax highlighting
const editorLanguage = currentFormat === 'yaml' ? 'javascript' : 'json' // yaml highlighting not available, use js
return (
<div className={cn('flex h-full flex-col bg-background', className)}>
{/* Header with controls */}
<div className='flex-shrink-0 border-b bg-background px-6 py-4'>
<div className='mb-3 flex items-center justify-between'>
<div className='flex items-center gap-2'>
<FileCode className='h-5 w-5' />
<span className='font-semibold'>Workflow Text Editor</span>
</div>
<div className='flex items-center gap-2'>
<Tabs
value={currentFormat}
onValueChange={(value) => handleFormatChange(value as EditorFormat)}
>
<TabsList className='grid w-fit grid-cols-2'>
<TabsTrigger value='yaml' disabled={disabled}>
YAML
</TabsTrigger>
<TabsTrigger value='json' disabled={disabled}>
JSON
</TabsTrigger>
</TabsList>
</Tabs>
<Tooltip>
<TooltipTrigger asChild>
<Button
onClick={handleSave}
disabled={!canSave || isSaving}
size='sm'
className='flex items-center gap-2'
>
<Save className='h-4 w-4' />
{isSaving ? 'Saving...' : 'Save'}
</Button>
</TooltipTrigger>
<TooltipContent>
{!isValid
? 'Fix validation errors to save'
: !hasUnsavedChanges
? 'No changes to save'
: disabled
? 'Editor is disabled'
: 'Save changes to workflow'}
</TooltipContent>
</Tooltip>
</div>
</div>
{/* Status indicators */}
<div className='flex items-center gap-2 text-sm'>
{isValid ? (
<div className='flex items-center gap-1 text-green-600'>
<Check className='h-4 w-4' />
Valid {currentFormat.toUpperCase()}
</div>
) : (
<div className='flex items-center gap-1 text-red-600'>
<AlertCircle className='h-4 w-4' />
{validationErrors.length} validation error{validationErrors.length !== 1 ? 's' : ''}
</div>
)}
{hasUnsavedChanges && <div className='text-orange-600'> Unsaved changes</div>}
</div>
</div>
{/* Alerts section - fixed height, scrollable if needed */}
{(validationErrors.length > 0 || saveResult) && (
<div className='scrollbar-thin scrollbar-thumb-muted-foreground/20 scrollbar-track-transparent max-h-32 flex-shrink-0 overflow-y-auto border-b bg-muted/20'>
<div className='space-y-2 p-4'>
{/* Validation errors */}
{validationErrors.length > 0 && (
<>
{validationErrors.map((error, index) => (
<Alert key={index} variant='destructive' className='py-2'>
<AlertCircle className='h-4 w-4' />
<AlertDescription className='text-sm'>
{error.line && error.column
? `Line ${error.line}, Column ${error.column}: ${error.message}`
: error.message}
</AlertDescription>
</Alert>
))}
</>
)}
{/* Save result */}
{saveResult && (
<Alert variant={saveResult.success ? 'default' : 'destructive'} className='py-2'>
{saveResult.success ? (
<Check className='h-4 w-4' />
) : (
<AlertCircle className='h-4 w-4' />
)}
<AlertDescription className='text-sm'>
{saveResult.success ? (
<>
Workflow updated successfully!
{saveResult.warnings && saveResult.warnings.length > 0 && (
<div className='mt-2'>
<strong>Warnings:</strong>
<ul className='mt-1 list-inside list-disc text-xs'>
{saveResult.warnings.map((warning, index) => (
<li key={index}>{warning}</li>
))}
</ul>
</div>
)}
</>
) : (
<>
Failed to update workflow:
{saveResult.errors && (
<ul className='mt-1 list-inside list-disc text-xs'>
{saveResult.errors.map((error, index) => (
<li key={index}>{error}</li>
))}
</ul>
)}
</>
)}
</AlertDescription>
</Alert>
)}
</div>
</div>
)}
{/* Code editor - takes remaining space */}
<div className='min-h-0 flex-1 overflow-hidden'>
<div className='h-full p-4'>
<CodeEditor
value={content}
onChange={handleContentChange}
language={editorLanguage}
placeholder={`Enter ${currentFormat.toUpperCase()} workflow definition...`}
className={cn(
'h-full w-full overflow-auto rounded-md border',
!isValid && 'border-red-500',
hasUnsavedChanges && 'border-orange-500'
)}
minHeight='calc(100vh - 300px)'
disabled={disabled}
/>
</div>
</div>
</div>
)
}

View File

@@ -1,16 +1,19 @@
'use client'
import { useState } from 'react'
import { useRef, useState } from 'react'
import { logger } from '@sentry/nextjs'
import { File, Folder, Plus } from 'lucide-react'
import { ChevronRight, File, Folder, Plus, Upload } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button } from '@/components/ui/button'
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
import { Input } from '@/components/ui/input'
import { Label } from '@/components/ui/label'
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
import { Separator } from '@/components/ui/separator'
import { cn } from '@/lib/utils'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
import { useFolderStore } from '@/stores/folders/store'
import { ImportControls, type ImportControlsRef } from './import-controls'
interface CreateMenuProps {
onCreateWorkflow: (folderId?: string) => void
@@ -27,10 +30,15 @@ export function CreateMenu({
const [folderName, setFolderName] = useState('')
const [isCreating, setIsCreating] = useState(false)
const [isHoverOpen, setIsHoverOpen] = useState(false)
const [isImportSubmenuOpen, setIsImportSubmenuOpen] = useState(false)
const params = useParams()
const workspaceId = params.workspaceId as string
const { createFolder } = useFolderStore()
const userPermissions = useUserPermissionsContext()
// Ref for the file input that will be used by ImportControls
const importControlsRef = useRef<ImportControlsRef>(null)
const handleCreateWorkflow = () => {
setIsHoverOpen(false)
@@ -42,6 +50,13 @@ export function CreateMenu({
setShowFolderDialog(true)
}
const handleUploadYaml = () => {
setIsHoverOpen(false)
setIsImportSubmenuOpen(false)
// Trigger the file upload from ImportControls component
importControlsRef.current?.triggerFileUpload()
}
const handleFolderSubmit = async (e: React.FormEvent) => {
e.preventDefault()
if (!folderName.trim() || !workspaceId) return
@@ -99,7 +114,7 @@ export function CreateMenu({
'data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2',
'z-50 animate-in overflow-hidden rounded-md border bg-popover p-1 text-popover-foreground shadow-md',
'data-[state=closed]:animate-out',
'w-40'
'w-48'
)}
onMouseEnter={() => setIsHoverOpen(true)}
onMouseLeave={() => setIsHoverOpen(false)}
@@ -119,6 +134,7 @@ export function CreateMenu({
<File className='h-4 w-4' />
{isCreatingWorkflow ? 'Creating...' : 'New Workflow'}
</button>
<button
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
onClick={handleCreateFolder}
@@ -126,9 +142,61 @@ export function CreateMenu({
<Folder className='h-4 w-4' />
New Folder
</button>
{userPermissions.canEdit && (
<>
<Separator className='my-1' />
<Popover open={isImportSubmenuOpen} onOpenChange={setIsImportSubmenuOpen}>
<PopoverTrigger asChild>
<button
className='flex w-full cursor-default select-none items-center justify-between rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
onMouseEnter={() => setIsImportSubmenuOpen(true)}
>
<div className='flex items-center gap-2'>
<Upload className='h-4 w-4' />
<span>Import Workflow</span>
</div>
<ChevronRight className='h-3 w-3' />
</button>
</PopoverTrigger>
<PopoverContent
side='right'
align='start'
sideOffset={4}
className='w-48 p-1'
onMouseEnter={() => setIsImportSubmenuOpen(true)}
onMouseLeave={() => setIsImportSubmenuOpen(false)}
onOpenAutoFocus={(e) => e.preventDefault()}
onCloseAutoFocus={(e) => e.preventDefault()}
>
<button
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
onClick={handleUploadYaml}
>
<Upload className='h-4 w-4' />
<div className='flex flex-col items-start'>
<span>YAML</span>
<span className='text-muted-foreground text-xs'>.yaml or .yml</span>
</div>
</button>
</PopoverContent>
</Popover>
</>
)}
</PopoverContent>
</Popover>
{/* Import Controls Component - handles all import functionality */}
<ImportControls
ref={importControlsRef}
disabled={!userPermissions.canEdit}
onClose={() => {
setIsHoverOpen(false)
setIsImportSubmenuOpen(false)
}}
/>
{/* Folder creation dialog */}
<Dialog open={showFolderDialog} onOpenChange={setShowFolderDialog}>
<DialogContent className='sm:max-w-[425px]'>

View File

@@ -0,0 +1,283 @@
'use client'
import { forwardRef, useImperativeHandle, useRef, useState } from 'react'
import { AlertCircle, CheckCircle } from 'lucide-react'
import { useParams, useRouter } from 'next/navigation'
import { Alert, AlertDescription } from '@/components/ui/alert'
import { Button } from '@/components/ui/button'
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
} from '@/components/ui/dialog'
import { Textarea } from '@/components/ui/textarea'
import { createLogger } from '@/lib/logs/console-logger'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { importWorkflowFromYaml, parseWorkflowYaml } from '@/stores/workflows/yaml/importer'
const logger = createLogger('ImportControls')
interface ImportControlsProps {
disabled?: boolean
onClose?: () => void
}
export interface ImportControlsRef {
triggerFileUpload: () => void
}
export const ImportControls = forwardRef<ImportControlsRef, ImportControlsProps>(
({ disabled = false, onClose }, ref) => {
const [isImporting, setIsImporting] = useState(false)
const [showYamlDialog, setShowYamlDialog] = useState(false)
const [yamlContent, setYamlContent] = useState('')
const [importResult, setImportResult] = useState<{
success: boolean
errors: string[]
warnings: string[]
summary?: string
} | null>(null)
const fileInputRef = useRef<HTMLInputElement>(null)
const router = useRouter()
const params = useParams()
const workspaceId = params.workspaceId as string
// Stores and hooks
const { createWorkflow } = useWorkflowRegistry()
const { collaborativeAddBlock, collaborativeAddEdge, collaborativeSetSubblockValue } =
useCollaborativeWorkflow()
// Expose methods to parent component
useImperativeHandle(ref, () => ({
triggerFileUpload: () => {
fileInputRef.current?.click()
},
}))
const handleFileUpload = async (event: React.ChangeEvent<HTMLInputElement>) => {
const file = event.target.files?.[0]
if (!file) return
try {
const content = await file.text()
setYamlContent(content)
setShowYamlDialog(true)
onClose?.()
} catch (error) {
logger.error('Failed to read file:', error)
setImportResult({
success: false,
errors: [
`Failed to read file: ${error instanceof Error ? error.message : 'Unknown error'}`,
],
warnings: [],
})
}
// Reset file input
if (fileInputRef.current) {
fileInputRef.current.value = ''
}
}
const handleYamlImport = async () => {
if (!yamlContent.trim()) {
setImportResult({
success: false,
errors: ['YAML content is required'],
warnings: [],
})
return
}
setIsImporting(true)
setImportResult(null)
try {
// First validate the YAML without importing
const { data: yamlWorkflow, errors: parseErrors } = parseWorkflowYaml(yamlContent)
if (!yamlWorkflow || parseErrors.length > 0) {
setImportResult({
success: false,
errors: parseErrors,
warnings: [],
})
return
}
// Create a new workflow
const newWorkflowId = await createWorkflow({
name: `Imported Workflow - ${new Date().toLocaleString()}`,
description: 'Workflow imported from YAML',
workspaceId,
})
// Import the YAML into the new workflow BEFORE navigation (creates complete state and saves directly to DB)
// This avoids timing issues with workflow reload during navigation
const result = await importWorkflowFromYaml(
yamlContent,
{
addBlock: collaborativeAddBlock,
addEdge: collaborativeAddEdge,
applyAutoLayout: () => {
// Trigger auto layout
window.dispatchEvent(new CustomEvent('trigger-auto-layout'))
},
setSubBlockValue: (blockId: string, subBlockId: string, value: unknown) => {
// Use the collaborative function - the same one called when users type into fields
collaborativeSetSubblockValue(blockId, subBlockId, value)
},
getExistingBlocks: () => {
// For a new workflow, we'll get the starter block from the server
return {}
},
},
newWorkflowId
) // Pass the new workflow ID to import into
// Navigate to the new workflow AFTER import is complete
if (result.success) {
logger.info('Navigating to imported workflow')
router.push(`/workspace/${workspaceId}/w/${newWorkflowId}`)
}
setImportResult(result)
if (result.success) {
setYamlContent('')
setShowYamlDialog(false)
logger.info('YAML import completed successfully')
}
} catch (error) {
logger.error('Failed to import YAML workflow:', error)
setImportResult({
success: false,
errors: [`Import failed: ${error instanceof Error ? error.message : 'Unknown error'}`],
warnings: [],
})
} finally {
setIsImporting(false)
}
}
const isDisabled = disabled || isImporting
return (
<>
{/* Hidden file input */}
<input
ref={fileInputRef}
type='file'
accept='.yaml,.yml'
onChange={handleFileUpload}
className='hidden'
/>
{/* YAML Import Dialog */}
<Dialog open={showYamlDialog} onOpenChange={setShowYamlDialog}>
<DialogContent className='flex max-h-[80vh] max-w-4xl flex-col'>
<DialogHeader>
<DialogTitle>Import Workflow from YAML</DialogTitle>
<DialogDescription>
Review the YAML content below and click "Import Workflow" to create a new workflow
with the blocks and connections defined in the YAML.
</DialogDescription>
</DialogHeader>
<div className='flex-1 space-y-4 overflow-hidden'>
<Textarea
placeholder={`version: "1.0"
blocks:
start:
type: "starter"
name: "Start"
inputs:
startWorkflow: "manual"
following:
- "process"
process:
type: "agent"
name: "Process Data"
inputs:
systemPrompt: "You are a helpful assistant"
userPrompt: "Process the data"
model: "gpt-4"
preceding:
- "start"`}
value={yamlContent}
onChange={(e) => setYamlContent(e.target.value)}
className='min-h-[300px] font-mono text-sm'
disabled={isImporting}
/>
{/* Import Result */}
{importResult && (
<div className='space-y-2'>
{importResult.success ? (
<Alert>
<CheckCircle className='h-4 w-4' />
<AlertDescription>
<div className='font-medium text-green-700'>Import Successful!</div>
{importResult.summary && (
<div className='mt-1 text-sm'>{importResult.summary}</div>
)}
{importResult.warnings.length > 0 && (
<div className='mt-2'>
<div className='font-medium text-sm'>Warnings:</div>
<ul className='mt-1 space-y-1 text-sm'>
{importResult.warnings.map((warning, index) => (
<li key={index} className='text-yellow-700'>
{warning}
</li>
))}
</ul>
</div>
)}
</AlertDescription>
</Alert>
) : (
<Alert variant='destructive'>
<AlertCircle className='h-4 w-4' />
<AlertDescription>
<div className='font-medium'>Import Failed</div>
{importResult.errors.length > 0 && (
<ul className='mt-2 space-y-1 text-sm'>
{importResult.errors.map((error, index) => (
<li key={index}> {error}</li>
))}
</ul>
)}
</AlertDescription>
</Alert>
)}
</div>
)}
</div>
<DialogFooter>
<Button
variant='outline'
onClick={() => setShowYamlDialog(false)}
disabled={isImporting}
>
Cancel
</Button>
<Button onClick={handleYamlImport} disabled={isImporting || !yamlContent.trim()}>
{isImporting ? 'Importing...' : 'Import Workflow'}
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
</>
)
}
)
ImportControls.displayName = 'ImportControls'

View File

@@ -0,0 +1,37 @@
CREATE TABLE "docs_embeddings" (
"chunk_id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"chunk_text" text NOT NULL,
"source_document" text NOT NULL,
"source_link" text NOT NULL,
"header_text" text NOT NULL,
"header_level" integer NOT NULL,
"token_count" integer NOT NULL,
"embedding" vector(1536) NOT NULL,
"embedding_model" text DEFAULT 'text-embedding-3-small' NOT NULL,
"metadata" jsonb DEFAULT '{}' NOT NULL,
"chunk_text_tsv" "tsvector" GENERATED ALWAYS AS (to_tsvector('english', "docs_embeddings"."chunk_text")) STORED,
"created_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL,
CONSTRAINT "docs_embedding_not_null_check" CHECK ("embedding" IS NOT NULL),
CONSTRAINT "docs_header_level_check" CHECK ("header_level" >= 1 AND "header_level" <= 6)
);
--> statement-breakpoint
CREATE INDEX "docs_emb_source_document_idx" ON "docs_embeddings" USING btree ("source_document");--> statement-breakpoint
CREATE INDEX "docs_emb_header_level_idx" ON "docs_embeddings" USING btree ("header_level");--> statement-breakpoint
CREATE INDEX "docs_emb_source_header_idx" ON "docs_embeddings" USING btree ("source_document","header_level");--> statement-breakpoint
CREATE INDEX "docs_emb_model_idx" ON "docs_embeddings" USING btree ("embedding_model");--> statement-breakpoint
CREATE INDEX "docs_emb_created_at_idx" ON "docs_embeddings" USING btree ("created_at");--> statement-breakpoint
CREATE INDEX "docs_embedding_vector_hnsw_idx" ON "docs_embeddings" USING hnsw ("embedding" vector_cosine_ops) WITH (m=16,ef_construction=64);--> statement-breakpoint
CREATE INDEX "docs_emb_metadata_gin_idx" ON "docs_embeddings" USING gin ("metadata");--> statement-breakpoint
CREATE INDEX "docs_emb_chunk_text_fts_idx" ON "docs_embeddings" USING gin ("chunk_text_tsv");--> statement-breakpoint
CREATE OR REPLACE FUNCTION trigger_set_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;--> statement-breakpoint
CREATE TRIGGER set_updated_at
BEFORE UPDATE ON docs_embeddings
FOR EACH ROW
EXECUTE FUNCTION trigger_set_timestamp();

View File

@@ -0,0 +1,18 @@
CREATE TABLE "copilot_chats" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"user_id" text NOT NULL,
"workflow_id" text NOT NULL,
"title" text,
"messages" jsonb DEFAULT '[]' NOT NULL,
"model" text DEFAULT 'claude-3-7-sonnet-latest' NOT NULL,
"created_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL
);
--> statement-breakpoint
ALTER TABLE "copilot_chats" ADD CONSTRAINT "copilot_chats_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "copilot_chats" ADD CONSTRAINT "copilot_chats_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "copilot_chats_user_id_idx" ON "copilot_chats" USING btree ("user_id");--> statement-breakpoint
CREATE INDEX "copilot_chats_workflow_id_idx" ON "copilot_chats" USING btree ("workflow_id");--> statement-breakpoint
CREATE INDEX "copilot_chats_user_workflow_idx" ON "copilot_chats" USING btree ("user_id","workflow_id");--> statement-breakpoint
CREATE INDEX "copilot_chats_created_at_idx" ON "copilot_chats" USING btree ("created_at");--> statement-breakpoint
CREATE INDEX "copilot_chats_updated_at_idx" ON "copilot_chats" USING btree ("updated_at");

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -351,6 +351,20 @@
"when": 1751659528896,
"tag": "0050_big_mattie_franklin",
"breakpoints": true
},
{
"idx": 51,
"version": "7",
"when": 1752014976338,
"tag": "0051_typical_expediter",
"breakpoints": true
},
{
"idx": 52,
"version": "7",
"when": 1752019053066,
"tag": "0052_fluffy_shinobi_shaw",
"breakpoints": true
}
]
}

View File

@@ -13,6 +13,7 @@ import {
text,
timestamp,
uniqueIndex,
uuid,
vector,
} from 'drizzle-orm/pg-core'
@@ -909,3 +910,100 @@ export const embedding = pgTable(
embeddingNotNullCheck: check('embedding_not_null_check', sql`"embedding" IS NOT NULL`),
})
)
export const docsEmbeddings = pgTable(
'docs_embeddings',
{
chunkId: uuid('chunk_id').primaryKey().defaultRandom(),
chunkText: text('chunk_text').notNull(),
sourceDocument: text('source_document').notNull(),
sourceLink: text('source_link').notNull(),
headerText: text('header_text').notNull(),
headerLevel: integer('header_level').notNull(),
tokenCount: integer('token_count').notNull(),
// Vector embedding - optimized for text-embedding-3-small with HNSW support
embedding: vector('embedding', { dimensions: 1536 }).notNull(),
embeddingModel: text('embedding_model').notNull().default('text-embedding-3-small'),
// Metadata for flexible filtering
metadata: jsonb('metadata').notNull().default('{}'),
// Full-text search support - generated tsvector column
chunkTextTsv: tsvector('chunk_text_tsv').generatedAlwaysAs(
(): SQL => sql`to_tsvector('english', ${docsEmbeddings.chunkText})`
),
// Timestamps
createdAt: timestamp('created_at').notNull().defaultNow(),
updatedAt: timestamp('updated_at').notNull().defaultNow(),
},
(table) => ({
// Source document queries
sourceDocumentIdx: index('docs_emb_source_document_idx').on(table.sourceDocument),
// Header level filtering
headerLevelIdx: index('docs_emb_header_level_idx').on(table.headerLevel),
// Combined source and header queries
sourceHeaderIdx: index('docs_emb_source_header_idx').on(
table.sourceDocument,
table.headerLevel
),
// Model-specific queries
modelIdx: index('docs_emb_model_idx').on(table.embeddingModel),
// Timestamp queries
createdAtIdx: index('docs_emb_created_at_idx').on(table.createdAt),
// Vector similarity search indexes (HNSW) - optimized for documentation embeddings
embeddingVectorHnswIdx: index('docs_embedding_vector_hnsw_idx')
.using('hnsw', table.embedding.op('vector_cosine_ops'))
.with({
m: 16,
ef_construction: 64,
}),
// GIN index for JSONB metadata queries
metadataGinIdx: index('docs_emb_metadata_gin_idx').using('gin', table.metadata),
// Full-text search index
chunkTextFtsIdx: index('docs_emb_chunk_text_fts_idx').using('gin', table.chunkTextTsv),
// Constraints
embeddingNotNullCheck: check('docs_embedding_not_null_check', sql`"embedding" IS NOT NULL`),
headerLevelCheck: check(
'docs_header_level_check',
sql`"header_level" >= 1 AND "header_level" <= 6`
),
})
)
export const copilotChats = pgTable(
'copilot_chats',
{
id: uuid('id').primaryKey().defaultRandom(),
userId: text('user_id')
.notNull()
.references(() => user.id, { onDelete: 'cascade' }),
workflowId: text('workflow_id')
.notNull()
.references(() => workflow.id, { onDelete: 'cascade' }),
title: text('title'),
messages: jsonb('messages').notNull().default('[]'),
model: text('model').notNull().default('claude-3-7-sonnet-latest'),
createdAt: timestamp('created_at').notNull().defaultNow(),
updatedAt: timestamp('updated_at').notNull().defaultNow(),
},
(table) => ({
// Primary access patterns
userIdIdx: index('copilot_chats_user_id_idx').on(table.userId),
workflowIdIdx: index('copilot_chats_workflow_id_idx').on(table.workflowId),
userWorkflowIdx: index('copilot_chats_user_workflow_idx').on(table.userId, table.workflowId),
// Ordering indexes
createdAtIdx: index('copilot_chats_created_at_idx').on(table.createdAt),
updatedAtIdx: index('copilot_chats_updated_at_idx').on(table.updatedAt),
})
)

447
apps/sim/lib/copilot-api.ts Normal file
View File

@@ -0,0 +1,447 @@
import { createLogger } from '@/lib/logs/console-logger'
const logger = createLogger('CopilotAPI')
/**
* Message interface for copilot conversations
*/
export interface CopilotMessage {
id: string
role: 'user' | 'assistant' | 'system'
content: string
timestamp: string
citations?: Array<{
id: number
title: string
url: string
similarity?: number
}>
}
/**
* Chat interface for copilot conversations
*/
export interface CopilotChat {
id: string
title: string | null
model: string
messages: CopilotMessage[]
messageCount: number
createdAt: Date
updatedAt: Date
}
/**
* Request interface for sending messages
*/
export interface SendMessageRequest {
message: string
chatId?: string
workflowId?: string
createNewChat?: boolean
stream?: boolean
}
/**
* Request interface for docs queries
*/
export interface DocsQueryRequest {
query: string
topK?: number
provider?: string
model?: string
stream?: boolean
chatId?: string
workflowId?: string
createNewChat?: boolean
}
/**
* Create a new copilot chat
*/
export async function createChat(
workflowId: string,
options: {
title?: string
initialMessage?: string
} = {}
): Promise<{
success: boolean
chat?: CopilotChat
error?: string
}> {
try {
const response = await fetch('/api/copilot', {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
workflowId,
...options,
}),
})
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to create chat')
}
return {
success: true,
chat: data.chat,
}
} catch (error) {
logger.error('Failed to create chat:', error)
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
}
}
}
/**
* List chats for a specific workflow
*/
export async function listChats(
workflowId: string,
options: {
limit?: number
offset?: number
} = {}
): Promise<{
success: boolean
chats: CopilotChat[]
error?: string
}> {
try {
const params = new URLSearchParams({
workflowId,
limit: (options.limit || 50).toString(),
offset: (options.offset || 0).toString(),
})
const response = await fetch(`/api/copilot?${params}`)
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to list chats')
}
return {
success: true,
chats: data.chats || [],
}
} catch (error) {
logger.error('Failed to list chats:', error)
return {
success: false,
chats: [],
error: error instanceof Error ? error.message : 'Unknown error',
}
}
}
/**
* Get a specific chat with full message history
*/
export async function getChat(chatId: string): Promise<{
success: boolean
chat?: CopilotChat
error?: string
}> {
try {
const response = await fetch(`/api/copilot?chatId=${chatId}`)
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to get chat')
}
return {
success: true,
chat: data.chat,
}
} catch (error) {
logger.error('Failed to get chat:', error)
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
}
}
}
/**
* Update a chat with new messages
*/
export async function updateChatMessages(
chatId: string,
messages: CopilotMessage[]
): Promise<{
success: boolean
chat?: CopilotChat
error?: string
}> {
try {
const response = await fetch(`/api/copilot`, {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
chatId,
messages,
}),
})
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to update chat')
}
return {
success: true,
chat: data.chat,
}
} catch (error) {
logger.error('Failed to update chat messages:', error)
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
}
}
}
/**
* Delete a chat
*/
export async function deleteChat(chatId: string): Promise<{
success: boolean
error?: string
}> {
try {
const response = await fetch(`/api/copilot?chatId=${chatId}`, {
method: 'DELETE',
})
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to delete chat')
}
return { success: true }
} catch (error) {
logger.error('Failed to delete chat:', error)
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
}
}
}
/**
* Send a message using the unified copilot API
*/
export async function sendMessage(request: SendMessageRequest): Promise<{
success: boolean
response?: string
chatId?: string
citations?: Array<{
id: number
title: string
url: string
similarity?: number
}>
error?: string
}> {
try {
const response = await fetch('/api/copilot', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(request),
})
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to send message')
}
return {
success: true,
response: data.response,
chatId: data.chatId,
citations: data.citations,
}
} catch (error) {
logger.error('Failed to send message:', error)
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
}
}
}
/**
* Send a streaming message using the unified copilot API
*/
export async function sendStreamingMessage(request: SendMessageRequest): Promise<{
success: boolean
stream?: ReadableStream
chatId?: string
error?: string
}> {
try {
logger.debug('Sending streaming message request:', {
message: request.message,
stream: true,
hasWorkflowId: !!request.workflowId,
})
const response = await fetch('/api/copilot', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ ...request, stream: true }),
})
logger.debug('Fetch response received:', {
ok: response.ok,
status: response.status,
statusText: response.statusText,
hasBody: !!response.body,
contentType: response.headers.get('content-type'),
})
if (!response.ok) {
let errorMessage = 'Failed to send streaming message'
try {
const errorData = await response.json()
logger.error('Error response:', errorData)
errorMessage = errorData.error || errorMessage
} catch {
// Response is not JSON, use status text or default message
logger.error('Non-JSON error response:', response.statusText)
errorMessage = response.statusText || errorMessage
}
throw new Error(errorMessage)
}
if (!response.body) {
logger.error('No response body received')
throw new Error('No response body received')
}
logger.debug('Successfully received stream')
return {
success: true,
stream: response.body,
}
} catch (error) {
logger.error('Failed to send streaming message:', error)
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
}
}
}
/**
* Send a message using the docs RAG API with chat context
*/
export async function sendDocsMessage(request: DocsQueryRequest): Promise<{
success: boolean
response?: string
chatId?: string
sources?: Array<{
title: string
document: string
link: string
similarity: number
}>
error?: string
}> {
try {
const response = await fetch('/api/copilot/docs', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(request),
})
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to send message')
}
return {
success: true,
response: data.response,
chatId: data.chatId,
sources: data.sources,
}
} catch (error) {
logger.error('Failed to send docs message:', error)
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
}
}
}
/**
* Send a streaming docs message
*/
export async function sendStreamingDocsMessage(request: DocsQueryRequest): Promise<{
success: boolean
stream?: ReadableStream
chatId?: string
error?: string
}> {
try {
logger.debug('sendStreamingDocsMessage called with:', request)
const response = await fetch('/api/copilot/docs', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ ...request, stream: true }),
})
logger.debug('Fetch response received:', {
status: response.status,
statusText: response.statusText,
headers: Object.fromEntries(response.headers.entries()),
ok: response.ok,
hasBody: !!response.body,
})
if (!response.ok) {
let errorMessage = 'Failed to send streaming docs message'
try {
const errorData = await response.json()
logger.error('API error response:', errorData)
errorMessage = errorData.error || errorMessage
} catch {
// Response is not JSON, use status text or default message
logger.error('Non-JSON error response:', response.statusText)
errorMessage = response.statusText || errorMessage
}
throw new Error(errorMessage)
}
if (!response.body) {
logger.error('No response body received')
throw new Error('No response body received')
}
logger.debug('Returning successful result with stream')
return {
success: true,
stream: response.body,
}
} catch (error) {
logger.error('Failed to send streaming docs message:', error)
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
}
}
}

View File

@@ -0,0 +1,362 @@
import { createLogger } from '@/lib/logs/console-logger'
import { getProviderDefaultModel } from '@/providers/models'
import type { ProviderId } from '@/providers/types'
const logger = createLogger('CopilotConfig')
/**
* Valid provider IDs for validation
*/
const VALID_PROVIDER_IDS: ProviderId[] = [
'openai',
'azure-openai',
'anthropic',
'google',
'deepseek',
'xai',
'cerebras',
'groq',
'ollama',
]
/**
* Validate and return a ProviderId if valid, otherwise return null
*/
function validateProviderId(value: string | undefined): ProviderId | null {
if (!value) return null
return VALID_PROVIDER_IDS.includes(value as ProviderId) ? (value as ProviderId) : null
}
/**
* Safely parse a float from environment variable with validation
*/
function parseFloatEnv(value: string | undefined, name: string): number | null {
if (!value) return null
const parsed = Number.parseFloat(value)
if (Number.isNaN(parsed)) {
logger.warn(`Invalid ${name}: ${value}. Expected a valid number.`)
return null
}
return parsed
}
/**
* Safely parse an integer from environment variable with validation
*/
function parseIntEnv(value: string | undefined, name: string): number | null {
if (!value) return null
const parsed = Number.parseInt(value, 10)
if (Number.isNaN(parsed)) {
logger.warn(`Invalid ${name}: ${value}. Expected a valid integer.`)
return null
}
return parsed
}
/**
* Copilot configuration interface
*/
export interface CopilotConfig {
// Chat LLM configuration
chat: {
defaultProvider: ProviderId
defaultModel: string
temperature: number
maxTokens: number
systemPrompt: string
}
// RAG (documentation search) LLM configuration
rag: {
defaultProvider: ProviderId
defaultModel: string
temperature: number
maxTokens: number
embeddingModel: string
maxSources: number
similarityThreshold: number
}
// General configuration
general: {
streamingEnabled: boolean
maxConversationHistory: number
titleGenerationModel: string // Lighter model for generating chat titles
}
}
/**
* Default copilot configuration
* Uses Claude 4 Sonnet as requested
*/
export const DEFAULT_COPILOT_CONFIG: CopilotConfig = {
chat: {
defaultProvider: 'anthropic',
defaultModel: 'claude-3-7-sonnet-latest',
temperature: 0.1,
maxTokens: 4000,
systemPrompt: `You are a helpful AI assistant for Sim Studio, a powerful workflow automation platform. You can help users with questions about:
- Creating and managing workflows
- Using different tools and blocks
- Understanding features and capabilities
- Troubleshooting issues
- Best practices
IMPORTANT DISTINCTION - Two types of information:
1. **USER'S SPECIFIC WORKFLOW**: Use "Get User's Specific Workflow" tool when users ask about "my workflow", "this workflow", "what I have built", or "my current blocks"
2. **GENERAL SIM STUDIO CAPABILITIES**: Use documentation search for general questions about what's possible, how features work, or "what blocks are available"
WHEN TO USE WORKFLOW TOOL:
- "What does my workflow do?"
- "What blocks do I have?"
- "How is my workflow configured?"
- "Show me my current setup"
- "What's in this workflow?"
- "How do I add [X] to my workflow?" - ALWAYS get their workflow first to give specific advice
- "How can I improve my workflow?"
- "What's missing from my workflow?"
- "How do I connect [X] in my workflow?"
WHEN TO SEARCH DOCUMENTATION:
- "What blocks are available in Sim Studio?"
- "How do I use the Gmail block?"
- "What features does Sim Studio have?"
- "How do I create a workflow?"
WHEN NOT TO SEARCH:
- Simple greetings or casual conversation
- General programming questions unrelated to Sim Studio
- Thank you messages or small talk
DOCUMENTATION SEARCH REQUIREMENT:
Whenever you use the "Search Documentation" tool, you MUST:
1. Include citations for ALL facts and information from the search results
2. Link to relevant documentation pages using the exact URLs provided
3. Never provide documentation-based information without proper citations
4. Acknowledge the sources that helped answer the user's question
CITATION FORMAT:
MANDATORY: Whenever you use the documentation search tool, you MUST include citations in your response:
- Include direct links using markdown format: [link text](URL)
- Use descriptive link text (e.g., "workflow documentation" not "here")
- Place links naturally in context, not clustered at the end
- Cite ALL sources that contributed to your answer - don't cherry-pick
- When mentioning specific features, tools, or concepts from docs, ALWAYS link to the relevant documentation
- Add citations immediately after stating facts or information from the documentation
- IMPORTANT: Only cite each source ONCE per response - do not repeat the same URL multiple times
WORKFLOW-SPECIFIC GUIDANCE:
When users ask "How do I..." questions about their workflow:
1. **ALWAYS get their workflow first** using the workflow tool
2. **Analyze their current setup** - what blocks they have, how they're connected
3. **Give specific, actionable steps** based on their actual configuration
4. **Reference their actual block names** and current values
5. **Provide concrete next steps** they can take immediately
Example approach:
- User: "How do I add error handling to my workflow?"
- You: [Get their workflow] → "I can see your workflow has a Starter block connected to an Agent block, then an API block. Here's how to add error handling specifically for your setup: 1) Add a Condition block after your API block to check if the response was successful, 2) Connect the 'false' path to a new Agent block that handles the error..."
IMPORTANT: Always be clear about whether you're talking about the user's specific workflow or general Sim Studio capabilities. When showing workflow data, explicitly state "In your current workflow..." or "Your workflow contains..." Be actionable and specific - don't give generic advice when you can see their actual setup.`,
},
rag: {
defaultProvider: 'anthropic',
defaultModel: 'claude-3-7-sonnet-latest',
temperature: 0.1,
maxTokens: 2000,
embeddingModel: 'text-embedding-3-small',
maxSources: 10,
similarityThreshold: 0.3,
},
general: {
streamingEnabled: true,
maxConversationHistory: 10,
titleGenerationModel: 'claude-3-haiku-20240307', // Faster model for titles
},
}
/**
* Get copilot configuration with environment variable overrides
*/
export function getCopilotConfig(): CopilotConfig {
const config = { ...DEFAULT_COPILOT_CONFIG }
// Allow environment variable overrides
try {
// Chat configuration overrides
const chatProvider = validateProviderId(process.env.COPILOT_CHAT_PROVIDER)
if (chatProvider) {
config.chat.defaultProvider = chatProvider
} else if (process.env.COPILOT_CHAT_PROVIDER) {
logger.warn(
`Invalid COPILOT_CHAT_PROVIDER: ${process.env.COPILOT_CHAT_PROVIDER}. Valid providers: ${VALID_PROVIDER_IDS.join(', ')}`
)
}
if (process.env.COPILOT_CHAT_MODEL) {
config.chat.defaultModel = process.env.COPILOT_CHAT_MODEL
}
const chatTemperature = parseFloatEnv(
process.env.COPILOT_CHAT_TEMPERATURE,
'COPILOT_CHAT_TEMPERATURE'
)
if (chatTemperature !== null) {
config.chat.temperature = chatTemperature
}
const chatMaxTokens = parseIntEnv(
process.env.COPILOT_CHAT_MAX_TOKENS,
'COPILOT_CHAT_MAX_TOKENS'
)
if (chatMaxTokens !== null) {
config.chat.maxTokens = chatMaxTokens
}
// RAG configuration overrides
const ragProvider = validateProviderId(process.env.COPILOT_RAG_PROVIDER)
if (ragProvider) {
config.rag.defaultProvider = ragProvider
} else if (process.env.COPILOT_RAG_PROVIDER) {
logger.warn(
`Invalid COPILOT_RAG_PROVIDER: ${process.env.COPILOT_RAG_PROVIDER}. Valid providers: ${VALID_PROVIDER_IDS.join(', ')}`
)
}
if (process.env.COPILOT_RAG_MODEL) {
config.rag.defaultModel = process.env.COPILOT_RAG_MODEL
}
const ragTemperature = parseFloatEnv(
process.env.COPILOT_RAG_TEMPERATURE,
'COPILOT_RAG_TEMPERATURE'
)
if (ragTemperature !== null) {
config.rag.temperature = ragTemperature
}
const ragMaxTokens = parseIntEnv(process.env.COPILOT_RAG_MAX_TOKENS, 'COPILOT_RAG_MAX_TOKENS')
if (ragMaxTokens !== null) {
config.rag.maxTokens = ragMaxTokens
}
const ragMaxSources = parseIntEnv(
process.env.COPILOT_RAG_MAX_SOURCES,
'COPILOT_RAG_MAX_SOURCES'
)
if (ragMaxSources !== null) {
config.rag.maxSources = ragMaxSources
}
const ragSimilarityThreshold = parseFloatEnv(
process.env.COPILOT_RAG_SIMILARITY_THRESHOLD,
'COPILOT_RAG_SIMILARITY_THRESHOLD'
)
if (ragSimilarityThreshold !== null) {
config.rag.similarityThreshold = ragSimilarityThreshold
}
// General configuration overrides
if (process.env.COPILOT_STREAMING_ENABLED) {
config.general.streamingEnabled = process.env.COPILOT_STREAMING_ENABLED === 'true'
}
const maxConversationHistory = parseIntEnv(
process.env.COPILOT_MAX_CONVERSATION_HISTORY,
'COPILOT_MAX_CONVERSATION_HISTORY'
)
if (maxConversationHistory !== null) {
config.general.maxConversationHistory = maxConversationHistory
}
logger.info('Copilot configuration loaded', {
chatProvider: config.chat.defaultProvider,
chatModel: config.chat.defaultModel,
ragProvider: config.rag.defaultProvider,
ragModel: config.rag.defaultModel,
streamingEnabled: config.general.streamingEnabled,
})
} catch (error) {
logger.warn('Error applying environment variable overrides, using defaults', { error })
}
return config
}
/**
* Get the model to use for a specific copilot function
*/
export function getCopilotModel(type: 'chat' | 'rag' | 'title'): {
provider: ProviderId
model: string
} {
const config = getCopilotConfig()
switch (type) {
case 'chat':
return {
provider: config.chat.defaultProvider,
model: config.chat.defaultModel,
}
case 'rag':
return {
provider: config.rag.defaultProvider,
model: config.rag.defaultModel,
}
case 'title':
return {
provider: config.chat.defaultProvider, // Use same provider as chat
model: config.general.titleGenerationModel,
}
default:
throw new Error(`Unknown copilot model type: ${type}`)
}
}
/**
* Validate that a provider/model combination is available
*/
export function validateCopilotConfig(config: CopilotConfig): {
isValid: boolean
errors: string[]
} {
const errors: string[] = []
// Validate chat provider/model
try {
const chatDefaultModel = getProviderDefaultModel(config.chat.defaultProvider)
if (!chatDefaultModel) {
errors.push(`Chat provider '${config.chat.defaultProvider}' not found`)
}
} catch (error) {
errors.push(`Invalid chat provider: ${config.chat.defaultProvider}`)
}
// Validate RAG provider/model
try {
const ragDefaultModel = getProviderDefaultModel(config.rag.defaultProvider)
if (!ragDefaultModel) {
errors.push(`RAG provider '${config.rag.defaultProvider}' not found`)
}
} catch (error) {
errors.push(`Invalid RAG provider: ${config.rag.defaultProvider}`)
}
// Validate configuration values
if (config.chat.temperature < 0 || config.chat.temperature > 2) {
errors.push('Chat temperature must be between 0 and 2')
}
if (config.rag.temperature < 0 || config.rag.temperature > 2) {
errors.push('RAG temperature must be between 0 and 2')
}
if (config.chat.maxTokens < 1 || config.chat.maxTokens > 100000) {
errors.push('Chat maxTokens must be between 1 and 100000')
}
if (config.rag.maxTokens < 1 || config.rag.maxTokens > 100000) {
errors.push('RAG maxTokens must be between 1 and 100000')
}
if (config.rag.maxSources < 1 || config.rag.maxSources > 20) {
errors.push('RAG maxSources must be between 1 and 20')
}
if (config.rag.similarityThreshold < 0 || config.rag.similarityThreshold > 1) {
errors.push('RAG similarityThreshold must be between 0 and 1')
}
if (config.general.maxConversationHistory < 1 || config.general.maxConversationHistory > 50) {
errors.push('General maxConversationHistory must be between 1 and 50')
}
return {
isValid: errors.length === 0,
errors,
}
}

View File

@@ -0,0 +1,784 @@
import { and, desc, eq } from 'drizzle-orm'
import { createLogger } from '@/lib/logs/console-logger'
import { db } from '@/db'
import { copilotChats } from '@/db/schema'
import { executeProviderRequest } from '@/providers'
import type { ProviderToolConfig } from '@/providers/types'
import { getApiKey } from '@/providers/utils'
import { getCopilotConfig, getCopilotModel } from './config'
const logger = createLogger('CopilotService')
/**
* Message interface for copilot conversations
*/
export interface CopilotMessage {
id: string
role: 'user' | 'assistant' | 'system'
content: string
timestamp: string
citations?: Array<{
id: number
title: string
url: string
similarity?: number
}>
}
/**
* Chat interface for copilot conversations
*/
export interface CopilotChat {
id: string
title: string | null
model: string
messages: CopilotMessage[]
messageCount: number
createdAt: Date
updatedAt: Date
}
/**
* Request interface for sending messages
*/
export interface SendMessageRequest {
message: string
chatId?: string
workflowId?: string
createNewChat?: boolean
stream?: boolean
userId: string
}
/**
* Response interface for sending messages
*/
export interface SendMessageResponse {
content: string
chatId?: string
citations?: Array<{
id: number
title: string
url: string
similarity?: number
}>
metadata?: Record<string, any>
}
/**
* Generate a chat title using LLM
*/
export async function generateChatTitle(userMessage: string): Promise<string> {
try {
const { provider, model } = getCopilotModel('title')
let apiKey: string
try {
// Use rotating key directly for hosted providers
if (provider === 'openai' || provider === 'anthropic') {
const { getRotatingApiKey } = await import('@/lib/utils')
apiKey = getRotatingApiKey(provider)
} else {
apiKey = getApiKey(provider, model)
}
} catch (error) {
logger.error(`Failed to get API key for title generation (${provider} ${model}):`, error)
return 'New Chat' // Fallback if API key is not available
}
const response = await executeProviderRequest(provider, {
model,
systemPrompt:
'You are a helpful assistant that generates concise, descriptive titles for chat conversations. Create a title that captures the main topic or question being discussed. Keep it under 50 characters and make it specific and clear.',
context: `Generate a concise title for a conversation that starts with this user message: "${userMessage}"\n\nReturn only the title text, nothing else.`,
temperature: 0.3,
maxTokens: 50,
apiKey,
stream: false,
})
if (typeof response === 'object' && 'content' in response) {
return response.content?.trim() || 'New Chat'
}
return 'New Chat'
} catch (error) {
logger.error('Failed to generate chat title:', error)
return 'New Chat'
}
}
/**
* Search documentation using RAG
*/
export async function searchDocumentation(
query: string,
options: {
topK?: number
threshold?: number
} = {}
): Promise<
Array<{
id: number
title: string
url: string
content: string
similarity: number
}>
> {
const { generateEmbeddings } = await import('@/app/api/knowledge/utils')
const { docsEmbeddings } = await import('@/db/schema')
const { sql } = await import('drizzle-orm')
const config = getCopilotConfig()
const { topK = config.rag.maxSources, threshold = config.rag.similarityThreshold } = options
try {
logger.info('Documentation search requested', { query, topK, threshold })
// Generate embedding for the query
const embeddings = await generateEmbeddings([query])
const queryEmbedding = embeddings[0]
if (!queryEmbedding || queryEmbedding.length === 0) {
logger.warn('Failed to generate query embedding')
return []
}
// Search docs embeddings using vector similarity
const results = await db
.select({
chunkId: docsEmbeddings.chunkId,
chunkText: docsEmbeddings.chunkText,
sourceDocument: docsEmbeddings.sourceDocument,
sourceLink: docsEmbeddings.sourceLink,
headerText: docsEmbeddings.headerText,
headerLevel: docsEmbeddings.headerLevel,
similarity: sql<number>`1 - (${docsEmbeddings.embedding} <=> ${JSON.stringify(queryEmbedding)}::vector)`,
})
.from(docsEmbeddings)
.orderBy(sql`${docsEmbeddings.embedding} <=> ${JSON.stringify(queryEmbedding)}::vector`)
.limit(topK)
// Filter by similarity threshold
const filteredResults = results.filter((result) => result.similarity >= threshold)
logger.info(`Found ${filteredResults.length} relevant documentation chunks`, {
totalResults: results.length,
afterFiltering: filteredResults.length,
threshold,
})
return filteredResults.map((result, index) => ({
id: index + 1,
title: String(result.headerText || 'Untitled Section'),
url: String(result.sourceLink || '#'),
content: String(result.chunkText || ''),
similarity: result.similarity,
}))
} catch (error) {
logger.error('Failed to search documentation:', error)
return []
}
}
/**
* Generate documentation-based response using RAG
*/
export async function generateDocsResponse(
query: string,
conversationHistory: CopilotMessage[] = [],
options: {
stream?: boolean
topK?: number
provider?: string
model?: string
workflowId?: string
requestId?: string
} = {}
): Promise<{
response: string | ReadableStream
sources: Array<{
id: number
title: string
url: string
similarity: number
}>
}> {
const config = getCopilotConfig()
const { provider, model } = getCopilotModel('rag')
const {
stream = config.general.streamingEnabled,
topK = config.rag.maxSources,
provider: overrideProvider,
model: overrideModel,
} = options
const selectedProvider = overrideProvider || provider
const selectedModel = overrideModel || model
try {
let apiKey: string
try {
// Use rotating key directly for hosted providers
if (selectedProvider === 'openai' || selectedProvider === 'anthropic') {
const { getRotatingApiKey } = await import('@/lib/utils')
apiKey = getRotatingApiKey(selectedProvider)
} else {
apiKey = getApiKey(selectedProvider, selectedModel)
}
} catch (error) {
logger.error(
`Failed to get API key for docs response (${selectedProvider} ${selectedModel}):`,
error
)
throw new Error(
`API key not configured for ${selectedProvider}. Please set up API keys for this provider or use a different one.`
)
}
// Search documentation
const searchResults = await searchDocumentation(query, { topK })
if (searchResults.length === 0) {
const fallbackResponse =
"I couldn't find any relevant documentation for your question. Please try rephrasing your query or check if you're asking about a feature that exists in Sim Studio."
return {
response: fallbackResponse,
sources: [],
}
}
// Format search results as context with numbered sources
const context = searchResults
.map((result, index) => {
return `[${index + 1}] ${result.title}
Document: ${result.title}
URL: ${result.url}
Content: ${result.content}`
})
.join('\n\n')
// Build conversation context if we have history
let conversationContext = ''
if (conversationHistory.length > 0) {
conversationContext = '\n\nConversation History:\n'
conversationHistory.slice(-config.general.maxConversationHistory).forEach((msg) => {
const role = msg.role === 'user' ? 'Human' : 'Assistant'
conversationContext += `${role}: ${msg.content}\n`
})
conversationContext += '\n'
}
const systemPrompt = `You are a helpful assistant that answers questions about Sim Studio documentation. You are having a conversation with the user, so refer to the conversation history when relevant.
MANDATORY CITATION REQUIREMENT: You MUST include citations for ALL information derived from the provided sources.
Citation Guidelines:
- ALWAYS cite sources when mentioning specific features, concepts, or instructions from the documentation
- Use direct links with markdown format: [link text](URL)
- Use the exact URLs provided in the source context
- Make link text descriptive (e.g., "workflow documentation" not "here")
- Place citations immediately after stating facts from the documentation
- Cite ALL relevant sources that contributed to your answer - do not omit any
- When multiple sources cover the same topic, cite the most comprehensive or relevant one
- Place links naturally in context, not clustered at the end
- IMPORTANT: Only cite each source ONCE per response - avoid repeating the same URL multiple times
Content Guidelines:
- Answer the user's question accurately using the provided documentation
- Consider the conversation history and refer to previous messages when relevant
- Format your response in clean, readable markdown
- Use bullet points, code blocks, and headers where appropriate
- If the question cannot be answered from the context, say so clearly
- Be conversational but precise
- NEVER include object representations like "[object Object]" - always use proper text
- When mentioning tool names, use their actual names from the documentation
Each source in the context below includes a URL that you can reference directly.`
const userPrompt = `${conversationContext}Current Question: ${query}
Documentation Context:
${context}`
logger.info(
`Generating docs response using provider: ${selectedProvider}, model: ${selectedModel}`
)
const response = await executeProviderRequest(selectedProvider, {
model: selectedModel,
systemPrompt,
context: userPrompt,
temperature: config.rag.temperature,
maxTokens: config.rag.maxTokens,
apiKey,
stream,
})
// Format sources for response
const sources = searchResults.map((result) => ({
id: result.id,
title: result.title,
url: result.url,
similarity: Math.round(result.similarity * 100) / 100,
}))
// Handle different response types
if (response instanceof ReadableStream) {
return { response, sources }
}
if ('stream' in response && 'execution' in response) {
// Handle StreamingExecution for providers like Anthropic
if (stream) {
return { response: response.stream, sources }
}
throw new Error('Unexpected streaming execution response when non-streaming was requested')
}
// At this point, we have a ProviderResponse
const content = response.content || 'Sorry, I could not generate a response.'
// Clean up any object serialization artifacts
const cleanedContent = content
.replace(/\[object Object\],?/g, '') // Remove [object Object] artifacts
.replace(/\s+/g, ' ') // Normalize whitespace
.trim()
return {
response: cleanedContent,
sources,
}
} catch (error) {
logger.error('Failed to generate docs response:', error)
throw new Error(
`Failed to generate docs response: ${error instanceof Error ? error.message : 'Unknown error'}`
)
}
}
/**
* Generate chat response using LLM with optional documentation search
*/
export async function generateChatResponse(
message: string,
conversationHistory: CopilotMessage[] = [],
options: {
stream?: boolean
workflowId?: string
requestId?: string
} = {}
): Promise<string | ReadableStream> {
const config = getCopilotConfig()
const { provider, model } = getCopilotModel('chat')
const { stream = config.general.streamingEnabled } = options
try {
let apiKey: string
try {
// Use rotating key directly for hosted providers
if (provider === 'openai' || provider === 'anthropic') {
const { getRotatingApiKey } = await import('@/lib/utils')
apiKey = getRotatingApiKey(provider)
} else {
apiKey = getApiKey(provider, model)
}
} catch (error) {
logger.error(`Failed to get API key for chat (${provider} ${model}):`, error)
throw new Error(
`API key not configured for ${provider}. Please set up API keys for this provider or use a different one.`
)
}
// Build conversation context
const messages = []
// Add conversation history (limited by config)
const historyLimit = config.general.maxConversationHistory
const recentHistory = conversationHistory.slice(-historyLimit)
for (const msg of recentHistory) {
messages.push({
role: msg.role as 'user' | 'assistant' | 'system',
content: msg.content,
})
}
// Add current user message
messages.push({
role: 'user' as const,
content: message,
})
// Define the tools available to the LLM
const tools: ProviderToolConfig[] = [
{
id: 'docs_search_internal',
name: 'Search Documentation',
description:
'Search Sim Studio documentation for information about features, tools, workflows, and functionality',
params: {},
parameters: {
type: 'object',
properties: {
query: {
type: 'string',
description: 'The search query to find relevant documentation',
},
topK: {
type: 'number',
description: 'Number of results to return (default: 5, max: 10)',
default: 5,
},
},
required: ['query'],
},
},
{
id: 'get_user_workflow',
name: "Get User's Specific Workflow",
description:
'Get the user\'s current workflow - this shows ONLY the blocks they have actually built and configured in their specific workflow, not general Sim Studio capabilities. Use this when the user asks about "my workflow", "this workflow", wants to know what blocks they currently have, OR when they ask "How do I..." questions about their workflow so you can give specific, actionable advice based on their actual setup.',
params: {},
parameters: {
type: 'object',
properties: {
includeMetadata: {
type: 'boolean',
description:
'Whether to include additional metadata about the workflow (default: false)',
default: false,
},
},
required: [],
},
},
]
const response = await executeProviderRequest(provider, {
model,
systemPrompt: config.chat.systemPrompt,
messages,
tools,
temperature: config.chat.temperature,
maxTokens: config.chat.maxTokens,
apiKey,
stream,
workflowId: options.workflowId,
})
// Handle StreamingExecution (from providers with tool calls)
if (
typeof response === 'object' &&
response &&
'stream' in response &&
'execution' in response
) {
logger.info('Detected StreamingExecution from provider')
return (response as any).stream
}
// Handle ProviderResponse (non-streaming with tool calls)
if (typeof response === 'object' && 'content' in response) {
const content = response.content || 'Sorry, I could not generate a response.'
// If streaming was requested, wrap the content in a ReadableStream
if (stream) {
return new ReadableStream({
start(controller) {
const encoder = new TextEncoder()
controller.enqueue(encoder.encode(content))
controller.close()
},
})
}
return content
}
// Handle direct ReadableStream response
if (response instanceof ReadableStream) {
return response
}
return 'Sorry, I could not generate a response.'
} catch (error) {
logger.error('Failed to generate chat response:', error)
throw new Error(
`Failed to generate response: ${error instanceof Error ? error.message : 'Unknown error'}`
)
}
}
/**
* Create a new copilot chat
*/
export async function createChat(
userId: string,
workflowId: string,
options: {
title?: string
initialMessage?: string
} = {}
): Promise<CopilotChat> {
const config = getCopilotConfig()
const { provider, model } = getCopilotModel('chat')
const { title, initialMessage } = options
try {
// Prepare initial messages array
const initialMessages: CopilotMessage[] = initialMessage
? [
{
id: crypto.randomUUID(),
role: 'user',
content: initialMessage,
timestamp: new Date().toISOString(),
},
]
: []
// Create the chat
const [newChat] = await db
.insert(copilotChats)
.values({
userId,
workflowId,
title: title || null, // Will be generated later if null
model,
messages: initialMessages,
})
.returning()
if (!newChat) {
throw new Error('Failed to create chat')
}
logger.info(`Created chat ${newChat.id} for user ${userId}`)
return {
id: newChat.id,
title: newChat.title,
model: newChat.model,
messages: Array.isArray(newChat.messages) ? newChat.messages : [],
messageCount: Array.isArray(newChat.messages) ? newChat.messages.length : 0,
createdAt: newChat.createdAt,
updatedAt: newChat.updatedAt,
}
} catch (error) {
logger.error('Failed to create chat:', error)
throw new Error(
`Failed to create chat: ${error instanceof Error ? error.message : 'Unknown error'}`
)
}
}
/**
* Get a specific chat
*/
export async function getChat(chatId: string, userId: string): Promise<CopilotChat | null> {
try {
const [chat] = await db
.select()
.from(copilotChats)
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, userId)))
.limit(1)
if (!chat) {
return null
}
return {
id: chat.id,
title: chat.title,
model: chat.model,
messages: Array.isArray(chat.messages) ? chat.messages : [],
messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0,
createdAt: chat.createdAt,
updatedAt: chat.updatedAt,
}
} catch (error) {
logger.error('Failed to get chat:', error)
return null
}
}
/**
* List chats for a workflow
*/
export async function listChats(
userId: string,
workflowId: string,
options: {
limit?: number
offset?: number
} = {}
): Promise<CopilotChat[]> {
const { limit = 50, offset = 0 } = options
try {
const chats = await db
.select()
.from(copilotChats)
.where(and(eq(copilotChats.userId, userId), eq(copilotChats.workflowId, workflowId)))
.orderBy(desc(copilotChats.updatedAt))
.limit(limit)
.offset(offset)
return chats.map((chat) => ({
id: chat.id,
title: chat.title,
model: chat.model,
messages: Array.isArray(chat.messages) ? chat.messages : [],
messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0,
createdAt: chat.createdAt,
updatedAt: chat.updatedAt,
}))
} catch (error) {
logger.error('Failed to list chats:', error)
return []
}
}
/**
* Update a chat (add messages, update title, etc.)
*/
export async function updateChat(
chatId: string,
userId: string,
updates: {
title?: string
messages?: CopilotMessage[]
}
): Promise<CopilotChat | null> {
try {
// Verify the chat exists and belongs to the user
const existingChat = await getChat(chatId, userId)
if (!existingChat) {
return null
}
// Prepare update data
const updateData: any = {
updatedAt: new Date(),
}
if (updates.title !== undefined) updateData.title = updates.title
if (updates.messages !== undefined) updateData.messages = updates.messages
// Update the chat
const [updatedChat] = await db
.update(copilotChats)
.set(updateData)
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, userId)))
.returning()
if (!updatedChat) {
return null
}
return {
id: updatedChat.id,
title: updatedChat.title,
model: updatedChat.model,
messages: Array.isArray(updatedChat.messages) ? updatedChat.messages : [],
messageCount: Array.isArray(updatedChat.messages) ? updatedChat.messages.length : 0,
createdAt: updatedChat.createdAt,
updatedAt: updatedChat.updatedAt,
}
} catch (error) {
logger.error('Failed to update chat:', error)
return null
}
}
/**
* Delete a chat
*/
export async function deleteChat(chatId: string, userId: string): Promise<boolean> {
try {
const result = await db
.delete(copilotChats)
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, userId)))
.returning({ id: copilotChats.id })
return result.length > 0
} catch (error) {
logger.error('Failed to delete chat:', error)
return false
}
}
/**
* Send a message and get a response
*/
export async function sendMessage(request: SendMessageRequest): Promise<{
response: string | ReadableStream | any
chatId?: string
}> {
const { message, chatId, workflowId, createNewChat, stream, userId } = request
try {
// Handle chat context
let currentChat: CopilotChat | null = null
let conversationHistory: CopilotMessage[] = []
if (chatId) {
// Load existing chat
currentChat = await getChat(chatId, userId)
if (currentChat) {
conversationHistory = currentChat.messages
}
} else if (createNewChat && workflowId) {
// Create new chat
currentChat = await createChat(userId, workflowId)
}
// Generate chat response
const response = await generateChatResponse(message, conversationHistory, {
stream,
workflowId,
})
// No need to extract citations - LLM generates direct markdown links
// For non-streaming responses, save immediately
// For streaming responses, save will be handled by the API layer after stream completes
if (currentChat && typeof response === 'string') {
const userMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'user',
content: message,
timestamp: new Date().toISOString(),
}
const assistantMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'assistant',
content: response,
timestamp: new Date().toISOString(),
}
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
// Generate title if this is the first message
let updatedTitle = currentChat.title
if (!updatedTitle && conversationHistory.length === 0) {
updatedTitle = await generateChatTitle(message)
}
await updateChat(currentChat.id, userId, {
title: updatedTitle || undefined,
messages: updatedMessages,
})
}
return {
response,
chatId: currentChat?.id,
}
} catch (error) {
logger.error('Failed to send message:', error)
throw error
}
}

View File

@@ -0,0 +1,174 @@
import { createLogger } from '@/lib/logs/console-logger'
import { searchDocumentation } from './service'
const logger = createLogger('CopilotTools')
// Interface for copilot tool execution results
export interface CopilotToolResult {
success: boolean
data?: any
error?: string
}
// Interface for copilot tool definitions
export interface CopilotTool {
id: string
name: string
description: string
parameters: {
type: 'object'
properties: Record<string, any>
required: string[]
}
execute: (args: Record<string, any>) => Promise<CopilotToolResult>
}
// Documentation search tool for copilot
const docsSearchTool: CopilotTool = {
id: 'docs_search_internal',
name: 'Search Documentation',
description:
'Search Sim Studio documentation for information about features, tools, workflows, and functionality',
parameters: {
type: 'object',
properties: {
query: {
type: 'string',
description: 'The search query to find relevant documentation',
},
topK: {
type: 'number',
description: 'Number of results to return (default: 5, max: 10)',
default: 5,
},
},
required: ['query'],
},
execute: async (args: Record<string, any>): Promise<CopilotToolResult> => {
try {
const { query, topK = 5 } = args
logger.info('Executing documentation search', { query, topK })
const results = await searchDocumentation(query, { topK })
logger.info(`Found ${results.length} documentation results`, { query })
return {
success: true,
data: {
results,
query,
totalResults: results.length,
},
}
} catch (error) {
logger.error('Documentation search failed', error)
return {
success: false,
error: `Documentation search failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
}
}
},
}
// Get user workflow as YAML tool for copilot
const getUserWorkflowTool: CopilotTool = {
id: 'get_user_workflow',
name: 'Get User Workflow',
description:
'Get the current user workflow as YAML format. This shows all blocks, their configurations, inputs, and connections in the workflow.',
parameters: {
type: 'object',
properties: {},
required: [],
},
execute: async (args: Record<string, any>): Promise<CopilotToolResult> => {
try {
logger.info('Executing get user workflow')
// Import the workflow YAML store dynamically to avoid import issues
const { useWorkflowYamlStore } = await import('@/stores/workflows/yaml/store')
const { useWorkflowRegistry } = await import('@/stores/workflows/registry/store')
// Get the current workflow YAML using the same logic as export
const yamlContent = useWorkflowYamlStore.getState().getYaml()
// Get workflow metadata
const registry = useWorkflowRegistry.getState()
const activeWorkflowId = registry.activeWorkflowId
const activeWorkflow = activeWorkflowId ? registry.workflows[activeWorkflowId] : null
let metadata
if (activeWorkflow) {
metadata = {
workflowId: activeWorkflowId,
name: activeWorkflow.name,
description: activeWorkflow.description,
workspaceId: activeWorkflow.workspaceId,
}
}
logger.info('Successfully retrieved user workflow YAML')
return {
success: true,
data: {
yaml: yamlContent,
metadata: metadata,
},
}
} catch (error) {
logger.error('Get user workflow failed', error)
return {
success: false,
error: `Failed to get user workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
}
}
},
}
// Copilot tools registry
const copilotTools: Record<string, CopilotTool> = {
docs_search_internal: docsSearchTool,
get_user_workflow: getUserWorkflowTool,
}
// Get a copilot tool by ID
export function getCopilotTool(toolId: string): CopilotTool | undefined {
return copilotTools[toolId]
}
// Execute a copilot tool
export async function executeCopilotTool(
toolId: string,
args: Record<string, any>
): Promise<CopilotToolResult> {
const tool = getCopilotTool(toolId)
if (!tool) {
logger.error(`Copilot tool not found: ${toolId}`)
return {
success: false,
error: `Tool not found: ${toolId}`,
}
}
try {
logger.info(`Executing copilot tool: ${toolId}`, { args })
const result = await tool.execute(args)
logger.info(`Copilot tool execution completed: ${toolId}`, { success: result.success })
return result
} catch (error) {
logger.error(`Copilot tool execution failed: ${toolId}`, error)
return {
success: false,
error: `Tool execution failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
}
}
}
// Get all available copilot tools (for tool definitions in LLM requests)
export function getAllCopilotTools(): CopilotTool[] {
return Object.values(copilotTools)
}

View File

@@ -0,0 +1,658 @@
import fs from 'fs/promises'
import path from 'path'
import { createLogger } from '@/lib/logs/console-logger'
import { generateEmbeddings } from '@/app/api/knowledge/utils'
import { TextChunker } from './chunker'
import type { DocChunk, DocsChunkerOptions, HeaderInfo } from './types'
interface Frontmatter {
title?: string
description?: string
[key: string]: any
}
const logger = createLogger('DocsChunker')
/**
* Docs-specific chunker that processes .mdx files and tracks header context
*/
export class DocsChunker {
private readonly textChunker: TextChunker
private readonly baseUrl: string
constructor(options: DocsChunkerOptions = {}) {
// Use the existing TextChunker for chunking logic
this.textChunker = new TextChunker({
chunkSize: options.chunkSize ?? 300, // Max 300 tokens per chunk
minChunkSize: options.minChunkSize ?? 100,
overlap: options.overlap ?? 50,
})
// Use localhost docs in development, production docs otherwise
const isDev = process.env.NODE_ENV === 'development'
this.baseUrl =
options.baseUrl ?? (isDev ? 'http://localhost:3001' : 'https://docs.simstudio.ai')
}
/**
* Process all .mdx files in the docs directory
*/
async chunkAllDocs(docsPath: string): Promise<DocChunk[]> {
const allChunks: DocChunk[] = []
try {
const mdxFiles = await this.findMdxFiles(docsPath)
logger.info(`Found ${mdxFiles.length} .mdx files to process`)
for (const filePath of mdxFiles) {
try {
const chunks = await this.chunkMdxFile(filePath, docsPath)
allChunks.push(...chunks)
logger.info(`Processed ${filePath}: ${chunks.length} chunks`)
} catch (error) {
logger.error(`Error processing ${filePath}:`, error)
}
}
logger.info(`Total chunks generated: ${allChunks.length}`)
return allChunks
} catch (error) {
logger.error('Error processing docs:', error)
throw error
}
}
/**
* Process a single .mdx file
*/
async chunkMdxFile(filePath: string, basePath: string): Promise<DocChunk[]> {
const content = await fs.readFile(filePath, 'utf-8')
const relativePath = path.relative(basePath, filePath)
// Parse frontmatter and content
const { data: frontmatter, content: markdownContent } = this.parseFrontmatter(content)
// Extract headers from the content
const headers = this.extractHeaders(markdownContent)
// Generate document URL
const documentUrl = this.generateDocumentUrl(relativePath)
// Split content into chunks
const textChunks = await this.splitContent(markdownContent)
// Generate embeddings for all chunks at once (batch processing)
logger.info(`Generating embeddings for ${textChunks.length} chunks in ${relativePath}`)
const embeddings = textChunks.length > 0 ? await generateEmbeddings(textChunks) : []
const embeddingModel = 'text-embedding-3-small'
// Convert to DocChunk objects with header context and embeddings
const chunks: DocChunk[] = []
let currentPosition = 0
for (let i = 0; i < textChunks.length; i++) {
const chunkText = textChunks[i]
const chunkStart = currentPosition
const chunkEnd = currentPosition + chunkText.length
// Find the most relevant header for this chunk
const relevantHeader = this.findRelevantHeader(headers, chunkStart)
const chunk: DocChunk = {
text: chunkText,
tokenCount: Math.ceil(chunkText.length / 4), // Simple token estimation
sourceDocument: relativePath,
headerLink: relevantHeader ? `${documentUrl}#${relevantHeader.anchor}` : documentUrl,
headerText: relevantHeader?.text || frontmatter.title || 'Document Root',
headerLevel: relevantHeader?.level || 1,
embedding: embeddings[i] || [],
embeddingModel,
metadata: {
startIndex: chunkStart,
endIndex: chunkEnd,
hasFrontmatter: i === 0 && content.startsWith('---'),
documentTitle: frontmatter.title,
documentDescription: frontmatter.description,
},
}
chunks.push(chunk)
currentPosition = chunkEnd
}
return chunks
}
/**
* Find all .mdx files recursively
*/
private async findMdxFiles(dirPath: string): Promise<string[]> {
const files: string[] = []
const entries = await fs.readdir(dirPath, { withFileTypes: true })
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name)
if (entry.isDirectory()) {
const subFiles = await this.findMdxFiles(fullPath)
files.push(...subFiles)
} else if (entry.isFile() && entry.name.endsWith('.mdx')) {
files.push(fullPath)
}
}
return files
}
/**
* Extract headers and their positions from markdown content
*/
private extractHeaders(content: string): HeaderInfo[] {
const headers: HeaderInfo[] = []
const headerRegex = /^(#{1,6})\s+(.+)$/gm
let match
while ((match = headerRegex.exec(content)) !== null) {
const level = match[1].length
const text = match[2].trim()
const anchor = this.generateAnchor(text)
headers.push({
text,
level,
anchor,
position: match.index,
})
}
return headers
}
/**
* Generate URL-safe anchor from header text
*/
private generateAnchor(headerText: string): string {
return headerText
.toLowerCase()
.replace(/[^\w\s-]/g, '') // Remove special characters except hyphens
.replace(/\s+/g, '-') // Replace spaces with hyphens
.replace(/-+/g, '-') // Replace multiple hyphens with single
.replace(/^-|-$/g, '') // Remove leading/trailing hyphens
}
/**
* Generate document URL from relative path
*/
private generateDocumentUrl(relativePath: string): string {
// Convert file path to URL path
// e.g., "tools/knowledge.mdx" -> "/tools/knowledge"
const urlPath = relativePath.replace(/\.mdx$/, '').replace(/\\/g, '/') // Handle Windows paths
return `${this.baseUrl}/${urlPath}`
}
/**
* Find the most relevant header for a given position
*/
private findRelevantHeader(headers: HeaderInfo[], position: number): HeaderInfo | null {
if (headers.length === 0) return null
// Find the last header that comes before this position
let relevantHeader: HeaderInfo | null = null
for (const header of headers) {
if (header.position <= position) {
relevantHeader = header
} else {
break
}
}
return relevantHeader
}
/**
* Split content into chunks using the existing TextChunker with table awareness
*/
private async splitContent(content: string): Promise<string[]> {
// Clean the content first
const cleanedContent = this.cleanContent(content)
// Detect table boundaries to avoid splitting them
const tableBoundaries = this.detectTableBoundaries(cleanedContent)
// Use the existing TextChunker
const chunks = await this.textChunker.chunk(cleanedContent)
// Post-process chunks to ensure tables aren't split
const processedChunks = this.mergeTableChunks(
chunks.map((chunk) => chunk.text),
tableBoundaries,
cleanedContent
)
// Ensure no chunk exceeds 300 tokens
const finalChunks = this.enforceSizeLimit(processedChunks)
return finalChunks
}
/**
* Clean content by removing MDX-specific elements and excessive whitespace
*/
private cleanContent(content: string): string {
return (
content
// Remove import statements
.replace(/^import\s+.*$/gm, '')
// Remove JSX components and React-style comments
.replace(/<[^>]+>/g, ' ')
.replace(/\{\/\*[\s\S]*?\*\/\}/g, ' ')
// Remove excessive whitespace
.replace(/\n{3,}/g, '\n\n')
.replace(/[ \t]{2,}/g, ' ')
.trim()
)
}
/**
* Parse frontmatter from MDX content
*/
private parseFrontmatter(content: string): { data: Frontmatter; content: string } {
const frontmatterRegex = /^---\r?\n([\s\S]*?)\r?\n---\r?\n([\s\S]*)$/
const match = content.match(frontmatterRegex)
if (!match) {
return { data: {}, content }
}
const [, frontmatterText, markdownContent] = match
const data: Frontmatter = {}
// Simple YAML parsing for title and description
const lines = frontmatterText.split('\n')
for (const line of lines) {
const colonIndex = line.indexOf(':')
if (colonIndex > 0) {
const key = line.slice(0, colonIndex).trim()
const value = line
.slice(colonIndex + 1)
.trim()
.replace(/^['"]|['"]$/g, '')
data[key] = value
}
}
return { data, content: markdownContent }
}
/**
* Split content by headers to respect document structure
*/
private splitByHeaders(
content: string
): Array<{ header: string | null; content: string; level: number }> {
const lines = content.split('\n')
const sections: Array<{ header: string | null; content: string; level: number }> = []
let currentHeader: string | null = null
let currentLevel = 0
let currentContent: string[] = []
for (const line of lines) {
const headerMatch = line.match(/^(#{1,3})\s+(.+)$/) // Only split on H1-H3, not H4-H6
if (headerMatch) {
// Save previous section
if (currentContent.length > 0) {
sections.push({
header: currentHeader,
content: currentContent.join('\n').trim(),
level: currentLevel,
})
}
// Start new section
currentHeader = line
currentLevel = headerMatch[1].length
currentContent = []
} else {
currentContent.push(line)
}
}
// Add final section
if (currentContent.length > 0) {
sections.push({
header: currentHeader,
content: currentContent.join('\n').trim(),
level: currentLevel,
})
}
return sections.filter((section) => section.content.trim().length > 0)
}
/**
* Estimate token count (rough approximation)
*/
private estimateTokens(text: string): number {
// Rough approximation: 1 token ≈ 4 characters
return Math.ceil(text.length / 4)
}
/**
* Merge small adjacent chunks to reach target size
*/
private mergeSmallChunks(chunks: string[]): string[] {
const merged: string[] = []
let currentChunk = ''
for (const chunk of chunks) {
const currentTokens = this.estimateTokens(currentChunk)
const chunkTokens = this.estimateTokens(chunk)
// If adding this chunk would exceed target size, save current and start new
if (currentTokens > 0 && currentTokens + chunkTokens > 500) {
if (currentChunk.trim()) {
merged.push(currentChunk.trim())
}
currentChunk = chunk
} else {
// Merge with current chunk
currentChunk = currentChunk ? `${currentChunk}\n\n${chunk}` : chunk
}
}
// Add final chunk
if (currentChunk.trim()) {
merged.push(currentChunk.trim())
}
return merged
}
/**
* Chunk a section while preserving tables and structure
*/
private async chunkSection(section: {
header: string | null
content: string
level: number
}): Promise<string[]> {
const content = section.content
const header = section.header
// Check if content contains tables
const hasTable = this.containsTable(content)
if (hasTable) {
// Split by tables and handle each part
return this.splitContentWithTables(content, header)
}
// Regular chunking for text-only content
const chunks = await this.textChunker.chunk(content)
return chunks.map((chunk, index) => {
// Add header to first chunk only
if (index === 0 && header) {
return `${header}\n\n${chunk.text}`.trim()
}
return chunk.text
})
}
/**
* Check if content contains markdown tables
*/
private containsTable(content: string): boolean {
const lines = content.split('\n')
return lines.some((line, index) => {
if (line.includes('|') && line.split('|').length >= 3) {
const nextLine = lines[index + 1]
return nextLine?.includes('|') && nextLine.includes('-')
}
return false
})
}
/**
* Split content that contains tables, keeping tables intact
*/
private splitContentWithTables(content: string, header: string | null): string[] {
const lines = content.split('\n')
const chunks: string[] = []
let currentChunk: string[] = []
let inTable = false
let tableLines: string[] = []
for (let i = 0; i < lines.length; i++) {
const line = lines[i]
// Detect table start
if (line.includes('|') && line.split('|').length >= 3 && !inTable) {
const nextLine = lines[i + 1]
if (nextLine?.includes('|') && nextLine.includes('-')) {
inTable = true
// Save current chunk if it has content
if (currentChunk.length > 0 && currentChunk.join('\n').trim().length > 50) {
const chunkText = currentChunk.join('\n').trim()
const withHeader =
chunks.length === 0 && header ? `${header}\n\n${chunkText}` : chunkText
chunks.push(withHeader)
currentChunk = []
}
tableLines = [line]
continue
}
}
if (inTable) {
tableLines.push(line)
// Detect table end
if (!line.includes('|') || line.trim() === '') {
inTable = false
// Save table as its own chunk
const tableText = tableLines
.filter((l) => l.trim())
.join('\n')
.trim()
if (tableText.length > 0) {
const withHeader =
chunks.length === 0 && header ? `${header}\n\n${tableText}` : tableText
chunks.push(withHeader)
}
tableLines = []
// Start new chunk if current line has content
if (line.trim() !== '') {
currentChunk = [line]
}
}
} else {
currentChunk.push(line)
// If chunk is getting large, save it
if (this.estimateTokens(currentChunk.join('\n')) > 250) {
const chunkText = currentChunk.join('\n').trim()
if (chunkText.length > 50) {
const withHeader =
chunks.length === 0 && header ? `${header}\n\n${chunkText}` : chunkText
chunks.push(withHeader)
}
currentChunk = []
}
}
}
// Handle remaining content
if (inTable && tableLines.length > 0) {
const tableText = tableLines
.filter((l) => l.trim())
.join('\n')
.trim()
if (tableText.length > 0) {
const withHeader = chunks.length === 0 && header ? `${header}\n\n${tableText}` : tableText
chunks.push(withHeader)
}
} else if (currentChunk.length > 0) {
const chunkText = currentChunk.join('\n').trim()
if (chunkText.length > 50) {
const withHeader = chunks.length === 0 && header ? `${header}\n\n${chunkText}` : chunkText
chunks.push(withHeader)
}
}
return chunks.filter((chunk) => chunk.trim().length > 50)
}
/**
* Detect table boundaries in markdown content to avoid splitting them
*/
private detectTableBoundaries(content: string): { start: number; end: number }[] {
const tables: { start: number; end: number }[] = []
const lines = content.split('\n')
let inTable = false
let tableStart = -1
for (let i = 0; i < lines.length; i++) {
const line = lines[i].trim()
// Detect table start (markdown table row with pipes)
if (line.includes('|') && line.split('|').length >= 3 && !inTable) {
// Check if next line is table separator (contains dashes and pipes)
const nextLine = lines[i + 1]?.trim()
if (nextLine?.includes('|') && nextLine.includes('-')) {
inTable = true
tableStart = i
}
}
// Detect table end (empty line or non-table content)
else if (inTable && (!line.includes('|') || line === '' || line.startsWith('#'))) {
tables.push({
start: this.getCharacterPosition(lines, tableStart),
end: this.getCharacterPosition(lines, i - 1) + lines[i - 1]?.length || 0,
})
inTable = false
}
}
// Handle table at end of content
if (inTable && tableStart >= 0) {
tables.push({
start: this.getCharacterPosition(lines, tableStart),
end: content.length,
})
}
return tables
}
/**
* Get character position from line number
*/
private getCharacterPosition(lines: string[], lineIndex: number): number {
return lines.slice(0, lineIndex).reduce((acc, line) => acc + line.length + 1, 0)
}
/**
* Merge chunks that would split tables
*/
private mergeTableChunks(
chunks: string[],
tableBoundaries: { start: number; end: number }[],
originalContent: string
): string[] {
if (tableBoundaries.length === 0) {
return chunks
}
const mergedChunks: string[] = []
let currentPosition = 0
for (const chunk of chunks) {
const chunkStart = originalContent.indexOf(chunk, currentPosition)
const chunkEnd = chunkStart + chunk.length
// Check if this chunk intersects with any table
const intersectsTable = tableBoundaries.some(
(table) =>
(chunkStart >= table.start && chunkStart <= table.end) ||
(chunkEnd >= table.start && chunkEnd <= table.end) ||
(chunkStart <= table.start && chunkEnd >= table.end)
)
if (intersectsTable) {
// Find which table(s) this chunk intersects with
const affectedTables = tableBoundaries.filter(
(table) =>
(chunkStart >= table.start && chunkStart <= table.end) ||
(chunkEnd >= table.start && chunkEnd <= table.end) ||
(chunkStart <= table.start && chunkEnd >= table.end)
)
// Create a chunk that includes the complete table(s)
const minStart = Math.min(chunkStart, ...affectedTables.map((t) => t.start))
const maxEnd = Math.max(chunkEnd, ...affectedTables.map((t) => t.end))
const completeChunk = originalContent.slice(minStart, maxEnd)
// Only add if we haven't already included this content
if (!mergedChunks.some((existing) => existing.includes(completeChunk.trim()))) {
mergedChunks.push(completeChunk.trim())
}
} else {
mergedChunks.push(chunk)
}
currentPosition = chunkEnd
}
return mergedChunks.filter((chunk) => chunk.length > 50) // Filter out tiny chunks
}
/**
* Enforce 300 token size limit on chunks
*/
private enforceSizeLimit(chunks: string[]): string[] {
const finalChunks: string[] = []
for (const chunk of chunks) {
const tokens = this.estimateTokens(chunk)
if (tokens <= 300) {
// Chunk is within limit
finalChunks.push(chunk)
} else {
// Chunk is too large - split it
const lines = chunk.split('\n')
let currentChunk = ''
for (const line of lines) {
const testChunk = currentChunk ? `${currentChunk}\n${line}` : line
if (this.estimateTokens(testChunk) <= 300) {
currentChunk = testChunk
} else {
// Adding this line would exceed limit
if (currentChunk.trim()) {
finalChunks.push(currentChunk.trim())
}
currentChunk = line
}
}
// Add final chunk if it has content
if (currentChunk.trim()) {
finalChunks.push(currentChunk.trim())
}
}
}
return finalChunks.filter((chunk) => chunk.trim().length > 100)
}
}

View File

@@ -0,0 +1,53 @@
export interface DocChunk {
/** The chunk text content */
text: string
/** Token count estimate for the chunk */
tokenCount: number
/** Source document path relative to docs/ */
sourceDocument: string
/** Link to the most relevant header section */
headerLink: string
/** The header text that this chunk belongs to */
headerText: string
/** Header level (1-6) */
headerLevel: number
/** OpenAI text embedding vector (1536 dimensions for text-embedding-3-small) */
embedding: number[]
/** Model used to generate the embedding */
embeddingModel: string
/** Metadata about the chunk */
metadata: {
/** Start position in the original document */
startIndex: number
/** End position in the original document */
endIndex: number
/** Whether this chunk contains the document frontmatter */
hasFrontmatter?: boolean
/** Document title from frontmatter */
documentTitle?: string
/** Document description from frontmatter */
documentDescription?: string
}
}
export interface DocsChunkerOptions {
/** Target chunk size in tokens */
chunkSize?: number
/** Minimum chunk size in tokens */
minChunkSize?: number
/** Overlap between chunks in tokens */
overlap?: number
/** Base URL for generating links */
baseUrl?: string
}
export interface HeaderInfo {
/** Header text */
text: string
/** Header level (1-6) */
level: number
/** Anchor link */
anchor: string
/** Position in document */
position: number
}

View File

@@ -93,24 +93,16 @@ export async function loadWorkflowFromNormalizedTables(
id: subflow.id,
...config,
}
logger.debug(
`[DB-HELPERS] Loaded loop ${subflow.id} with iterations: ${loopConfig.iterations || 'unknown'}`
)
} else if (subflow.type === SUBFLOW_TYPES.PARALLEL) {
parallels[subflow.id] = {
id: subflow.id,
...config,
}
logger.debug(`[DB-HELPERS] Loaded parallel ${subflow.id}`)
} else {
logger.warn(`Unknown subflow type: ${subflow.type} for subflow ${subflow.id}`)
}
})
logger.info(
`Loaded workflow ${workflowId} from normalized tables: ${blocks.length} blocks, ${edges.length} edges, ${subflows.length} subflows`
)
return {
blocks: blocksMap,
edges: edgesArray,
@@ -223,8 +215,6 @@ export async function saveWorkflowToNormalizedTables(
hasActiveWebhook: state.hasActiveWebhook,
}
logger.info(`Successfully saved workflow ${workflowId} to normalized tables`)
return {
success: true,
jsonBlob,
@@ -281,7 +271,6 @@ export async function migrateWorkflowToNormalizedTables(
const result = await saveWorkflowToNormalizedTables(workflowId, workflowState)
if (result.success) {
logger.info(`Successfully migrated workflow ${workflowId} to normalized tables`)
return { success: true }
}
return { success: false, error: result.error }

View File

@@ -0,0 +1,268 @@
import { dump as yamlDump } from 'js-yaml'
import type { Edge } from 'reactflow'
import { createLogger } from '@/lib/logs/console-logger'
import { getBlock } from '@/blocks'
import type { SubBlockConfig } from '@/blocks/types'
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowYamlGenerator')
interface YamlBlock {
type: string
name: string
inputs?: Record<string, any>
connections?: {
incoming?: Array<{
source: string
sourceHandle?: string
targetHandle?: string
}>
outgoing?: Array<{
target: string
sourceHandle?: string
targetHandle?: string
}>
}
parentId?: string // Add parentId for nested blocks
}
interface YamlWorkflow {
version: string
blocks: Record<string, YamlBlock>
}
/**
* Extract input values from a block's subBlocks based on its configuration
* This version works without client-side stores by using the provided subblock values
*/
function extractBlockInputs(
blockState: BlockState,
blockId: string,
subBlockValues?: Record<string, Record<string, any>>
): Record<string, any> {
const blockConfig = getBlock(blockState.type)
const inputs: Record<string, any> = {}
// Get subblock values for this block (if provided)
const blockSubBlockValues = subBlockValues?.[blockId] || {}
// Special handling for loop and parallel blocks
if (blockState.type === 'loop' || blockState.type === 'parallel') {
// Extract configuration from blockState.data instead of subBlocks
if (blockState.data) {
Object.entries(blockState.data).forEach(([key, value]) => {
// Include relevant configuration properties
if (
key === 'count' ||
key === 'loopType' ||
key === 'collection' ||
key === 'parallelType' ||
key === 'distribution'
) {
if (value !== undefined && value !== null && value !== '') {
inputs[key] = value
}
}
// Also include any override values from subBlockValues if they exist
const overrideValue = blockSubBlockValues[key]
if (overrideValue !== undefined && overrideValue !== null && overrideValue !== '') {
inputs[key] = overrideValue
}
})
}
// Include any additional values from subBlockValues that might not be in data
Object.entries(blockSubBlockValues).forEach(([key, value]) => {
if (value !== undefined && value !== null && value !== '' && !Object.hasOwn(inputs, key)) {
inputs[key] = value
}
})
return inputs
}
if (!blockConfig) {
// For other custom blocks without config, extract available subBlock values
Object.entries(blockState.subBlocks || {}).forEach(([subBlockId, subBlockState]) => {
const value = blockSubBlockValues[subBlockId] ?? subBlockState.value
if (value !== undefined && value !== null && value !== '') {
inputs[subBlockId] = value
}
})
return inputs
}
// Process each subBlock configuration for regular blocks
blockConfig.subBlocks.forEach((subBlockConfig: SubBlockConfig) => {
const subBlockId = subBlockConfig.id
// Skip hidden or conditional fields that aren't active
if (subBlockConfig.hidden) return
// Get value from provided values or fallback to block state
const value = blockSubBlockValues[subBlockId] ?? blockState.subBlocks[subBlockId]?.value
// Include value if it exists and isn't empty
if (value !== undefined && value !== null && value !== '') {
// Handle different input types appropriately
switch (subBlockConfig.type) {
case 'table':
// Tables are arrays of objects
if (Array.isArray(value) && value.length > 0) {
inputs[subBlockId] = value
}
break
case 'checkbox-list':
// Checkbox lists return arrays
if (Array.isArray(value) && value.length > 0) {
inputs[subBlockId] = value
}
break
case 'code':
// Code blocks should preserve formatting
if (typeof value === 'string' && value.trim()) {
inputs[subBlockId] = value
} else if (typeof value === 'object') {
inputs[subBlockId] = value
}
break
case 'switch':
// Boolean values
inputs[subBlockId] = Boolean(value)
break
case 'slider':
// Numeric values
if (
typeof value === 'number' ||
(typeof value === 'string' && !Number.isNaN(Number(value)))
) {
inputs[subBlockId] = Number(value)
}
break
default:
// Text inputs, dropdowns, etc.
if (typeof value === 'string' && value.trim()) {
inputs[subBlockId] = value.trim()
} else if (
typeof value === 'object' ||
typeof value === 'number' ||
typeof value === 'boolean'
) {
inputs[subBlockId] = value
}
break
}
}
})
return inputs
}
/**
* Find incoming connections for a given block ID
*/
function findIncomingConnections(
blockId: string,
edges: Edge[]
): Array<{
source: string
sourceHandle?: string
targetHandle?: string
}> {
return edges
.filter((edge) => edge.target === blockId)
.map((edge) => ({
source: edge.source,
sourceHandle: edge.sourceHandle ?? undefined,
targetHandle: edge.targetHandle ?? undefined,
}))
}
/**
* Find outgoing connections for a given block ID
*/
function findOutgoingConnections(
blockId: string,
edges: Edge[]
): Array<{
target: string
sourceHandle?: string
targetHandle?: string
}> {
return edges
.filter((edge) => edge.source === blockId)
.map((edge) => ({
target: edge.target,
sourceHandle: edge.sourceHandle ?? undefined,
targetHandle: edge.targetHandle ?? undefined,
}))
}
/**
* Generate YAML representation of the workflow
* This is the core function extracted from the client store, made server-compatible
*/
export function generateWorkflowYaml(
workflowState: WorkflowState,
subBlockValues?: Record<string, Record<string, any>>
): string {
try {
const yamlWorkflow: YamlWorkflow = {
version: '1.0',
blocks: {},
}
// Process each block
Object.entries(workflowState.blocks).forEach(([blockId, blockState]) => {
const inputs = extractBlockInputs(blockState, blockId, subBlockValues)
const incoming = findIncomingConnections(blockId, workflowState.edges)
const outgoing = findOutgoingConnections(blockId, workflowState.edges)
const yamlBlock: YamlBlock = {
type: blockState.type,
name: blockState.name,
}
// Only include inputs if they exist
if (Object.keys(inputs).length > 0) {
yamlBlock.inputs = inputs
}
// Only include connections if they exist
if (incoming.length > 0 || outgoing.length > 0) {
yamlBlock.connections = {}
if (incoming.length > 0) {
yamlBlock.connections.incoming = incoming
}
if (outgoing.length > 0) {
yamlBlock.connections.outgoing = outgoing
}
}
// Include parent-child relationship for nested blocks
if (blockState.data?.parentId) {
yamlBlock.parentId = blockState.data.parentId
}
yamlWorkflow.blocks[blockId] = yamlBlock
})
// Convert to YAML with clean formatting
return yamlDump(yamlWorkflow, {
indent: 2,
lineWidth: -1, // Disable line wrapping
noRefs: true,
sortKeys: false,
})
} catch (error) {
logger.error('Failed to generate workflow YAML:', error)
return `# Error generating YAML: ${error instanceof Error ? error.message : 'Unknown error'}`
}
}

View File

@@ -86,6 +86,7 @@
"input-otp": "^1.4.2",
"ioredis": "^5.6.0",
"jose": "6.0.11",
"js-yaml": "4.1.0",
"jwt-decode": "^4.0.0",
"lenis": "^1.2.3",
"lucide-react": "^0.479.0",
@@ -120,6 +121,7 @@
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.3.0",
"@testing-library/user-event": "^14.6.1",
"@types/js-yaml": "4.0.9",
"@types/jsdom": "21.1.7",
"@types/lodash": "^4.17.16",
"@types/node": "^22",

View File

@@ -0,0 +1,98 @@
#!/usr/bin/env bun
import path from 'path'
import { DocsChunker } from '@/lib/documents/docs-chunker'
import type { DocChunk } from '@/lib/documents/types'
import { createLogger } from '@/lib/logs/console-logger'
const logger = createLogger('ChunkDocsScript')
/**
* Script to chunk all .mdx files in the docs directory
*/
async function main() {
try {
// Initialize the docs chunker
const chunker = new DocsChunker({
chunkSize: 1024,
minChunkSize: 100,
overlap: 200,
baseUrl: 'https://docs.simstudio.ai',
})
// Path to the docs content directory
const docsPath = path.join(process.cwd(), '../../apps/docs/content/docs')
logger.info(`Processing docs from: ${docsPath}`)
// Process all .mdx files
const chunks = await chunker.chunkAllDocs(docsPath)
logger.info(`\n=== CHUNKING RESULTS ===`)
logger.info(`Total chunks: ${chunks.length}`)
// Group chunks by document
const chunksByDoc = chunks.reduce<Record<string, DocChunk[]>>((acc, chunk) => {
if (!acc[chunk.sourceDocument]) {
acc[chunk.sourceDocument] = []
}
acc[chunk.sourceDocument].push(chunk)
return acc
}, {})
// Display summary
logger.info(`\n=== DOCUMENT SUMMARY ===`)
for (const [doc, docChunks] of Object.entries(chunksByDoc)) {
logger.info(`${doc}: ${docChunks.length} chunks`)
}
// Display a few sample chunks
logger.info(`\n=== SAMPLE CHUNKS ===`)
chunks.slice(0, 3).forEach((chunk, index) => {
logger.info(`\nChunk ${index + 1}:`)
logger.info(` Source: ${chunk.sourceDocument}`)
logger.info(` Header: ${chunk.headerText} (Level ${chunk.headerLevel})`)
logger.info(` Link: ${chunk.headerLink}`)
logger.info(` Tokens: ${chunk.tokenCount}`)
logger.info(` Embedding: ${chunk.embedding.length} dimensions (${chunk.embeddingModel})`)
logger.info(
` Embedding Preview: [${chunk.embedding
.slice(0, 5)
.map((n) => n.toFixed(4))
.join(', ')}...]`
)
logger.info(` Text Preview: ${chunk.text.slice(0, 100)}...`)
})
// Calculate total token count
const totalTokens = chunks.reduce((sum, chunk) => sum + chunk.tokenCount, 0)
const chunksWithEmbeddings = chunks.filter((chunk) => chunk.embedding.length > 0).length
logger.info(`\n=== STATISTICS ===`)
logger.info(`Total tokens: ${totalTokens}`)
logger.info(`Average tokens per chunk: ${Math.round(totalTokens / chunks.length)}`)
logger.info(`Chunks with embeddings: ${chunksWithEmbeddings}/${chunks.length}`)
if (chunks.length > 0 && chunks[0].embedding.length > 0) {
logger.info(`Embedding model: ${chunks[0].embeddingModel}`)
logger.info(`Embedding dimensions: ${chunks[0].embedding.length}`)
}
const headerLevels = chunks.reduce<Record<number, number>>((acc, chunk) => {
acc[chunk.headerLevel] = (acc[chunk.headerLevel] || 0) + 1
return acc
}, {})
logger.info(`Header level distribution:`)
Object.entries(headerLevels)
.sort(([a], [b]) => Number(a) - Number(b))
.forEach(([level, count]) => {
logger.info(` H${level}: ${count} chunks`)
})
} catch (error) {
logger.error('Error processing docs:', error)
process.exit(1)
}
}
// Run the script
main().catch(console.error)

View File

@@ -0,0 +1,229 @@
#!/usr/bin/env bun
import path from 'path'
import { sql } from 'drizzle-orm'
import { DocsChunker } from '@/lib/documents/docs-chunker'
import { createLogger } from '@/lib/logs/console-logger'
import { db } from '@/db'
import { docsEmbeddings } from '@/db/schema'
const logger = createLogger('ProcessDocsEmbeddings')
interface ProcessingOptions {
/** Clear existing docs embeddings before processing */
clearExisting?: boolean
/** Path to docs directory */
docsPath?: string
/** Base URL for generating links */
baseUrl?: string
/** Chunk size in tokens */
chunkSize?: number
/** Minimum chunk size in tokens */
minChunkSize?: number
/** Overlap between chunks in tokens */
overlap?: number
}
/**
* Production script to process documentation and save embeddings to database
*/
async function processDocsEmbeddings(options: ProcessingOptions = {}) {
const startTime = Date.now()
let processedChunks = 0
let failedChunks = 0
try {
// Configuration
const config = {
clearExisting: options.clearExisting ?? false,
docsPath: options.docsPath ?? path.join(process.cwd(), '../../apps/docs/content/docs'),
// Use localhost docs in development, production docs otherwise
baseUrl:
options.baseUrl ??
(process.env.NODE_ENV === 'development'
? 'http://localhost:3001'
: 'https://docs.simstudio.ai'),
chunkSize: options.chunkSize ?? 300, // Max 300 tokens per chunk
minChunkSize: options.minChunkSize ?? 100,
overlap: options.overlap ?? 50,
}
logger.info('🚀 Starting docs embedding processing...')
logger.info(`Configuration:`, {
docsPath: config.docsPath,
baseUrl: config.baseUrl,
chunkSize: config.chunkSize,
clearExisting: config.clearExisting,
})
// Clear existing embeddings if requested
if (config.clearExisting) {
logger.info('🗑️ Clearing existing docs embeddings...')
try {
const deleteResult = await db.delete(docsEmbeddings)
logger.info(`✅ Successfully deleted existing embeddings`)
} catch (error) {
logger.error('❌ Failed to delete existing embeddings:', error)
throw new Error('Failed to clear existing embeddings')
}
}
// Initialize the docs chunker
const chunker = new DocsChunker({
chunkSize: config.chunkSize,
minChunkSize: config.minChunkSize,
overlap: config.overlap,
baseUrl: config.baseUrl,
})
// Process all .mdx files
logger.info(`📚 Processing docs from: ${config.docsPath}`)
const chunks = await chunker.chunkAllDocs(config.docsPath)
if (chunks.length === 0) {
logger.warn('⚠️ No chunks generated from docs')
return { success: false, processedChunks: 0, failedChunks: 0 }
}
logger.info(`📊 Generated ${chunks.length} chunks with embeddings`)
// Save chunks to database in batches for better performance
const batchSize = 10
logger.info(`💾 Saving chunks to database (batch size: ${batchSize})...`)
for (let i = 0; i < chunks.length; i += batchSize) {
const batch = chunks.slice(i, i + batchSize)
try {
// Prepare batch data
const batchData = batch.map((chunk) => ({
chunkText: chunk.text,
sourceDocument: chunk.sourceDocument,
sourceLink: chunk.headerLink,
headerText: chunk.headerText,
headerLevel: chunk.headerLevel,
tokenCount: chunk.tokenCount,
embedding: chunk.embedding,
embeddingModel: chunk.embeddingModel,
metadata: chunk.metadata,
}))
// Insert batch
await db.insert(docsEmbeddings).values(batchData)
processedChunks += batch.length
if (i % (batchSize * 5) === 0 || i + batchSize >= chunks.length) {
logger.info(
` 💾 Saved ${Math.min(i + batchSize, chunks.length)}/${chunks.length} chunks`
)
}
} catch (error) {
logger.error(`❌ Failed to save batch ${Math.floor(i / batchSize) + 1}:`, error)
failedChunks += batch.length
}
}
// Verify results
const savedCount = await db
.select({ count: sql<number>`count(*)` })
.from(docsEmbeddings)
.then((result) => result[0]?.count || 0)
const duration = Date.now() - startTime
logger.info(`✅ Processing complete!`)
logger.info(`📊 Results:`)
logger.info(` • Total chunks processed: ${chunks.length}`)
logger.info(` • Successfully saved: ${processedChunks}`)
logger.info(` • Failed: ${failedChunks}`)
logger.info(` • Database total: ${savedCount}`)
logger.info(` • Duration: ${Math.round(duration / 1000)}s`)
// Summary by document
const documentStats = chunks.reduce(
(acc, chunk) => {
if (!acc[chunk.sourceDocument]) {
acc[chunk.sourceDocument] = { chunks: 0, tokens: 0 }
}
acc[chunk.sourceDocument].chunks++
acc[chunk.sourceDocument].tokens += chunk.tokenCount
return acc
},
{} as Record<string, { chunks: number; tokens: number }>
)
logger.info(`📋 Document breakdown:`)
Object.entries(documentStats)
.sort(([, a], [, b]) => b.chunks - a.chunks)
.slice(0, 10) // Top 10 documents
.forEach(([doc, stats]) => {
logger.info(`${doc}: ${stats.chunks} chunks, ${stats.tokens} tokens`)
})
if (Object.keys(documentStats).length > 10) {
logger.info(` • ... and ${Object.keys(documentStats).length - 10} more documents`)
}
return {
success: failedChunks === 0,
processedChunks,
failedChunks,
totalChunks: chunks.length,
databaseCount: savedCount,
duration,
}
} catch (error) {
logger.error('💥 Fatal error during processing:', error)
return {
success: false,
processedChunks,
failedChunks,
error: error instanceof Error ? error.message : 'Unknown error',
}
}
}
/**
* Main function - handle command line arguments
*/
async function main() {
const args = process.argv.slice(2)
const options: ProcessingOptions = {}
// Parse command line arguments
if (args.includes('--clear')) {
options.clearExisting = true
}
if (args.includes('--help') || args.includes('-h')) {
console.log(`
Usage: bun run scripts/process-docs-embeddings.ts [options]
Options:
--clear Clear existing docs embeddings before processing
--help, -h Show this help message
Examples:
bun run scripts/process-docs-embeddings.ts
bun run scripts/process-docs-embeddings.ts --clear
`)
process.exit(0)
}
const result = await processDocsEmbeddings(options)
if (!result.success) {
process.exit(1)
}
}
// Run the script if executed directly
if (import.meta.url.includes('process-docs-embeddings.ts')) {
main().catch((error) => {
console.error('Script failed:', error)
process.exit(1)
})
}
export { processDocsEmbeddings }

View File

@@ -0,0 +1,8 @@
export { useCopilotStore } from './store'
export type {
CopilotActions,
CopilotChat,
CopilotMessage,
CopilotState,
CopilotStore,
} from './types'

View File

@@ -1,152 +1,553 @@
import { create } from 'zustand'
import { devtools } from 'zustand/middleware'
import {
type CopilotChat,
type CopilotMessage,
createChat,
deleteChat as deleteApiChat,
getChat,
listChats,
sendStreamingDocsMessage,
sendStreamingMessage,
updateChatMessages,
} from '@/lib/copilot-api'
import { createLogger } from '@/lib/logs/console-logger'
import { useEnvironmentStore } from '../settings/environment/store'
import { useWorkflowStore } from '../workflows/workflow/store'
import type { CopilotMessage, CopilotStore } from './types'
import { calculateBlockPosition, getNextBlockNumber } from './utils'
import type { CopilotStore } from './types'
const logger = createLogger('CopilotStore')
/**
* Initial state for the copilot store
*/
const initialState = {
currentChat: null,
chats: [],
messages: [],
isLoading: false,
isLoadingChats: false,
isSendingMessage: false,
isSaving: false,
error: null,
saveError: null,
workflowId: null,
}
/**
* Copilot store using the new unified API
*/
export const useCopilotStore = create<CopilotStore>()(
devtools(
(set, get) => ({
messages: [],
isProcessing: false,
error: null,
...initialState,
sendMessage: async (content: string) => {
try {
set({ isProcessing: true, error: null })
const workflowStore = useWorkflowStore.getState()
const apiKey = useEnvironmentStore.getState().getVariable('OPENAI_API_KEY')
if (!apiKey) {
throw new Error(
'OpenAI API key not found. Please add it to your environment variables.'
)
}
// User message
const newMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'user',
content: content.trim(),
timestamp: Date.now(),
}
// Format messages for OpenAI API
const formattedMessages = [
...get().messages.map((msg) => ({
role: msg.role,
content: msg.content,
})),
{
role: newMessage.role,
content: newMessage.content,
},
]
// Add message to local state first
set((state) => ({
messages: [...state.messages, newMessage],
}))
const response = await fetch('/api/copilot', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-OpenAI-Key': apiKey,
},
body: JSON.stringify({
messages: formattedMessages,
workflowState: {
blocks: workflowStore.blocks,
edges: workflowStore.edges,
},
}),
})
if (!response.ok) {
throw new Error('Failed to send message')
}
const data = await response.json()
// Handle any actions returned from the API
if (data.actions) {
// Process all block additions first to properly calculate positions
const blockActions = data.actions.filter((action: any) => action.name === 'addBlock')
blockActions.forEach((action: any, index: number) => {
const { type, name } = action.parameters
const id = crypto.randomUUID()
// Calculate position based on current blocks and action index
const position = calculateBlockPosition(workflowStore.blocks, index)
// Generate name if not provided
const blockName = name || `${type} ${getNextBlockNumber(workflowStore.blocks, type)}`
workflowStore.addBlock(id, type, blockName, position)
})
// Handle other actions (edges, removals, etc.)
const otherActions = data.actions.filter((action: any) => action.name !== 'addBlock')
otherActions.forEach((action: any) => {
switch (action.name) {
case 'addEdge': {
const { sourceId, targetId, sourceHandle, targetHandle } = action.parameters
workflowStore.addEdge({
id: crypto.randomUUID(),
source: sourceId,
target: targetId,
sourceHandle,
targetHandle,
type: 'custom',
})
break
}
case 'removeBlock': {
workflowStore.removeBlock(action.parameters.id)
break
}
case 'removeEdge': {
workflowStore.removeEdge(action.parameters.id)
break
}
}
})
}
// Add assistant's response to chat
if (data.message) {
set((state) => ({
messages: [
...state.messages,
{
id: crypto.randomUUID(),
role: 'assistant',
content: data.message,
timestamp: Date.now(),
},
],
}))
}
} catch (error) {
logger.error('Copilot error:', { error })
// Set current workflow ID
setWorkflowId: (workflowId: string | null) => {
const currentWorkflowId = get().workflowId
if (currentWorkflowId !== workflowId) {
set({
error: error instanceof Error ? error.message : 'Unknown error',
workflowId,
currentChat: null,
chats: [],
messages: [],
error: null,
saveError: null,
isSaving: false,
})
} finally {
set({ isProcessing: false })
// Load chats for the new workflow
if (workflowId) {
get().loadChats()
}
}
},
clearCopilot: () => set({ messages: [], error: null }),
setError: (error) => set({ error }),
// Load chats for current workflow
loadChats: async () => {
const { workflowId } = get()
if (!workflowId) {
logger.warn('Cannot load chats: no workflow ID set')
return
}
set({ isLoadingChats: true, error: null })
try {
const result = await listChats(workflowId)
if (result.success) {
set({
chats: result.chats,
isLoadingChats: false,
})
// If no current chat and we have chats, optionally select the most recent one
const { currentChat } = get()
if (!currentChat && result.chats.length > 0) {
// Auto-select most recent chat
await get().selectChat(result.chats[0])
}
logger.info(`Loaded ${result.chats.length} chats for workflow ${workflowId}`)
} else {
throw new Error(result.error || 'Failed to load chats')
}
} catch (error) {
logger.error('Failed to load chats:', error)
set({
error: error instanceof Error ? error.message : 'Failed to load chats',
isLoadingChats: false,
})
}
},
// Select a specific chat
selectChat: async (chat: CopilotChat) => {
set({ isLoading: true, error: null })
try {
const result = await getChat(chat.id)
if (result.success && result.chat) {
set({
currentChat: result.chat,
messages: result.chat.messages,
isLoading: false,
})
logger.info(`Selected chat: ${result.chat.title || 'Untitled'}`)
} else {
throw new Error(result.error || 'Failed to load chat')
}
} catch (error) {
logger.error('Failed to select chat:', error)
set({
error: error instanceof Error ? error.message : 'Failed to load chat',
isLoading: false,
})
}
},
// Create a new chat
createNewChat: async (options = {}) => {
const { workflowId } = get()
if (!workflowId) {
logger.warn('Cannot create chat: no workflow ID set')
return
}
set({ isLoading: true, error: null })
try {
const result = await createChat(workflowId, options)
if (result.success && result.chat) {
set({
currentChat: result.chat,
messages: result.chat.messages,
isLoading: false,
})
// Reload chats to include the new one
await get().loadChats()
logger.info(`Created new chat: ${result.chat.id}`)
} else {
throw new Error(result.error || 'Failed to create chat')
}
} catch (error) {
logger.error('Failed to create new chat:', error)
set({
error: error instanceof Error ? error.message : 'Failed to create chat',
isLoading: false,
})
}
},
// Delete a chat
deleteChat: async (chatId: string) => {
try {
const result = await deleteApiChat(chatId)
if (result.success) {
const { currentChat } = get()
// Remove from chats list
set((state) => ({
chats: state.chats.filter((chat) => chat.id !== chatId),
}))
// If this was the current chat, clear it
if (currentChat?.id === chatId) {
set({
currentChat: null,
messages: [],
})
}
logger.info(`Deleted chat: ${chatId}`)
} else {
throw new Error(result.error || 'Failed to delete chat')
}
} catch (error) {
logger.error('Failed to delete chat:', error)
set({
error: error instanceof Error ? error.message : 'Failed to delete chat',
})
}
},
// Send a regular message
sendMessage: async (message: string, options = {}) => {
const { workflowId, currentChat } = get()
const { stream = true } = options
console.log('[CopilotStore] sendMessage called:', {
message,
workflowId,
hasCurrentChat: !!currentChat,
stream,
})
if (!workflowId) {
console.warn('[CopilotStore] No workflow ID set')
logger.warn('Cannot send message: no workflow ID set')
return
}
set({ isSendingMessage: true, error: null })
// Add user message immediately
const userMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'user',
content: message,
timestamp: new Date().toISOString(),
}
// Add placeholder for streaming response
const streamingMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'assistant',
content: '',
timestamp: new Date().toISOString(),
}
console.log('[CopilotStore] Adding messages to state:', {
userMessageId: userMessage.id,
streamingMessageId: streamingMessage.id,
})
set((state) => ({
messages: [...state.messages, userMessage, streamingMessage],
}))
try {
console.log('[CopilotStore] Requesting streaming response')
const result = await sendStreamingMessage({
message,
chatId: currentChat?.id,
workflowId,
createNewChat: !currentChat,
stream,
})
console.log('[CopilotStore] Streaming result:', {
success: result.success,
hasStream: !!result.stream,
error: result.error,
})
if (result.success && result.stream) {
console.log('[CopilotStore] Starting stream processing')
await get().handleStreamingResponse(result.stream, streamingMessage.id)
console.log('[CopilotStore] Stream processing completed')
} else {
console.error('[CopilotStore] Stream request failed:', result.error)
throw new Error(result.error || 'Failed to send message')
}
} catch (error) {
logger.error('Failed to send message:', error)
// Replace streaming message with error
const errorMessage: CopilotMessage = {
id: streamingMessage.id,
role: 'assistant',
content:
'Sorry, I encountered an error while processing your message. Please try again.',
timestamp: new Date().toISOString(),
}
set((state) => ({
messages: state.messages.map((msg) =>
msg.id === streamingMessage.id ? errorMessage : msg
),
error: error instanceof Error ? error.message : 'Failed to send message',
isSendingMessage: false,
}))
}
},
// Send a docs RAG message
sendDocsMessage: async (query: string, options = {}) => {
const { workflowId, currentChat } = get()
const { stream = true, topK = 5 } = options
if (!workflowId) {
logger.warn('Cannot send docs message: no workflow ID set')
return
}
set({ isSendingMessage: true, error: null })
// Add user message immediately
const userMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'user',
content: query,
timestamp: new Date().toISOString(),
}
// Add placeholder for streaming response
const streamingMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'assistant',
content: '',
timestamp: new Date().toISOString(),
}
set((state) => ({
messages: [...state.messages, userMessage, streamingMessage],
}))
try {
const result = await sendStreamingDocsMessage({
query,
topK,
chatId: currentChat?.id,
workflowId,
createNewChat: !currentChat,
stream,
})
if (result.success && result.stream) {
await get().handleStreamingResponse(result.stream, streamingMessage.id)
} else {
throw new Error(result.error || 'Failed to send docs message')
}
} catch (error) {
logger.error('Failed to send docs message:', error)
// Replace streaming message with error
const errorMessage: CopilotMessage = {
id: streamingMessage.id,
role: 'assistant',
content:
'Sorry, I encountered an error while searching the documentation. Please try again.',
timestamp: new Date().toISOString(),
}
set((state) => ({
messages: state.messages.map((msg) =>
msg.id === streamingMessage.id ? errorMessage : msg
),
error: error instanceof Error ? error.message : 'Failed to send docs message',
isSendingMessage: false,
}))
}
},
// Handle streaming response (shared by both message types)
handleStreamingResponse: async (stream: ReadableStream, messageId: string) => {
console.log('[CopilotStore] handleStreamingResponse started:', {
messageId,
hasStream: !!stream,
})
const reader = stream.getReader()
const decoder = new TextDecoder()
let accumulatedContent = ''
let newChatId: string | undefined
// Citations no longer needed - LLM generates direct markdown links
let streamComplete = false
try {
while (true) {
const { done, value } = await reader.read()
if (done || streamComplete) break
const chunk = decoder.decode(value, { stream: true })
const lines = chunk.split('\n')
for (const line of lines) {
if (line.startsWith('data: ')) {
try {
const data = JSON.parse(line.slice(6))
if (data.type === 'metadata') {
// Get chatId from metadata
if (data.chatId) {
newChatId = data.chatId
}
// Citations no longer needed - LLM generates direct markdown links
} else if (data.type === 'content') {
console.log('[CopilotStore] Received content chunk:', data.content)
accumulatedContent += data.content
console.log(
'[CopilotStore] Accumulated content length:',
accumulatedContent.length
)
// Update the streaming message
set((state) => ({
messages: state.messages.map((msg) =>
msg.id === messageId
? {
...msg,
content: accumulatedContent,
}
: msg
),
}))
console.log('[CopilotStore] Updated message state with content')
} else if (data.type === 'done' || data.type === 'complete') {
console.log('[CopilotStore] Received completion marker:', data.type)
// Final update
set((state) => ({
messages: state.messages.map((msg) =>
msg.id === messageId
? {
...msg,
content: accumulatedContent,
}
: msg
),
isSendingMessage: false,
}))
// Save chat to database after streaming completes
const chatIdToSave = newChatId || get().currentChat?.id
if (chatIdToSave) {
console.log('[CopilotStore] Saving chat to database:', chatIdToSave)
try {
await get().saveChatMessages(chatIdToSave)
} catch (saveError) {
// Save error is already handled in saveChatMessages and reflected in store state
// Don't break the streaming flow - user gets the message but knows save failed
logger.warn(`Chat save failed after streaming completed: ${saveError}`)
}
}
// Handle new chat creation
if (newChatId && !get().currentChat) {
console.log('[CopilotStore] Reloading chats for new chat:', newChatId)
// Reload chats to get the updated list
await get().loadChats()
}
streamComplete = true
console.log('[CopilotStore] Stream marked as complete')
break
} else if (data.type === 'error') {
console.error('[CopilotStore] Received error from stream:', data.error)
throw new Error(data.error || 'Streaming error')
}
} catch (parseError) {
console.warn(
'[CopilotStore] Failed to parse SSE data:',
parseError,
'Line:',
line
)
logger.warn('Failed to parse SSE data:', parseError)
}
} else if (line.trim()) {
console.log('[CopilotStore] Non-SSE line (ignored):', line)
}
}
}
console.log('[CopilotStore] Stream processing completed successfully')
logger.info(`Completed streaming response, content length: ${accumulatedContent.length}`)
} catch (error) {
console.error('[CopilotStore] Error handling streaming response:', error)
logger.error('Error handling streaming response:', error)
throw error
}
},
// Clear current messages
clearMessages: () => {
set({
currentChat: null,
messages: [],
error: null,
})
},
// Save chat messages to database
saveChatMessages: async (chatId: string) => {
const { messages } = get()
set({ isSaving: true, saveError: null })
try {
logger.info(`Saving ${messages.length} messages for chat ${chatId}`)
// Let the API handle title generation if needed
const result = await updateChatMessages(chatId, messages)
if (result.success && result.chat) {
// Update local state with the saved chat
set({
currentChat: result.chat,
messages: result.chat.messages,
isSaving: false,
saveError: null,
})
logger.info(
`Successfully saved chat ${chatId} with ${result.chat.messages.length} messages`
)
} else {
const errorMessage = result.error || 'Failed to save chat'
logger.error(`Failed to save chat ${chatId}:`, errorMessage)
set({
isSaving: false,
saveError: errorMessage,
})
throw new Error(errorMessage)
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error saving chat'
logger.error(`Error saving chat ${chatId}:`, error)
set({
isSaving: false,
saveError: errorMessage,
})
throw error
}
},
// Clear error state
clearError: () => {
set({ error: null })
},
// Clear save error state
clearSaveError: () => {
set({ saveError: null })
},
// Retry saving chat messages
retrySave: async (chatId: string) => {
await get().saveChatMessages(chatId)
},
// Reset entire store
reset: () => {
set(initialState)
},
}),
{ name: 'copilot-store' }
)

View File

@@ -1,20 +1,89 @@
/**
* Message interface for copilot conversations
*/
export interface CopilotMessage {
id: string
role: 'user' | 'assistant' | 'system'
content: string
timestamp: number
timestamp: string
citations?: Array<{
id: number
title: string
url: string
similarity?: number
}>
}
export interface CopilotState {
/**
* Chat interface for copilot conversations
*/
export interface CopilotChat {
id: string
title: string | null
model: string
messages: CopilotMessage[]
isProcessing: boolean
messageCount: number
createdAt: Date
updatedAt: Date
}
/**
* Copilot store state
*/
export interface CopilotState {
// Current active chat
currentChat: CopilotChat | null
// List of available chats for current workflow
chats: CopilotChat[]
// Current messages (from active chat)
messages: CopilotMessage[]
// Loading states
isLoading: boolean
isLoadingChats: boolean
isSendingMessage: boolean
// Error state
error: string | null
// Save operation error (separate from general errors)
saveError: string | null
isSaving: boolean
// Current workflow ID (for chat context)
workflowId: string | null
}
/**
* Copilot store actions
*/
export interface CopilotActions {
sendMessage: (content: string) => Promise<void>
clearCopilot: () => void
setError: (error: string | null) => void
// Chat management
setWorkflowId: (workflowId: string | null) => void
loadChats: () => Promise<void>
selectChat: (chat: CopilotChat) => Promise<void>
createNewChat: (options?: { title?: string; initialMessage?: string }) => Promise<void>
deleteChat: (chatId: string) => Promise<void>
// Message handling
sendMessage: (message: string, options?: { stream?: boolean }) => Promise<void>
sendDocsMessage: (query: string, options?: { stream?: boolean; topK?: number }) => Promise<void>
saveChatMessages: (chatId: string) => Promise<void>
// Utility actions
clearMessages: () => void
clearError: () => void
clearSaveError: () => void
retrySave: (chatId: string) => Promise<void>
reset: () => void
// Internal helper (not exposed publicly)
handleStreamingResponse: (stream: ReadableStream, messageId: string) => Promise<void>
}
/**
* Combined copilot store interface
*/
export type CopilotStore = CopilotState & CopilotActions

View File

@@ -1,33 +0,0 @@
// Helper function to get the next block number for a given type
export const getNextBlockNumber = (blocks: Record<string, any>, type: string) => {
const typeBlocks = Object.values(blocks)
.filter((block: any) => block.type.toLowerCase() === type.toLowerCase())
.map((block: any) => {
const match = block.name.match(new RegExp(`${type}\\s*(\\d+)`, 'i'))
return match ? Number.parseInt(match[1]) : 0
})
const maxNumber = Math.max(0, ...typeBlocks)
return maxNumber + 1
}
// Calculate block position based on existing blocks and current action index
export const calculateBlockPosition = (
existingBlocks: Record<string, any>,
index: number,
startX = 100,
startY = 100,
xSpacing = 500,
ySpacing = 150
) => {
const blocksCount = Object.keys(existingBlocks).length
// Calculate position based on existing blocks and current action index
const row = Math.floor((blocksCount + index) / 5) // 5 blocks per row
const col = (blocksCount + index) % 5
return {
x: startX + col * xSpacing,
y: startY + row * ySpacing,
}
}

View File

@@ -228,7 +228,7 @@ export const resetAllStores = () => {
})
useExecutionStore.getState().reset()
useConsoleStore.setState({ entries: [], isOpen: false })
useCopilotStore.setState({ messages: [], isProcessing: false, error: null })
useCopilotStore.setState({ messages: [], isSendingMessage: false, error: null })
useCustomToolsStore.setState({ tools: {} })
useVariablesStore.getState().resetLoaded() // Reset variables store tracking
}

View File

@@ -1,4 +1,4 @@
export type PanelTab = 'console' | 'variables' | 'chat'
export type PanelTab = 'console' | 'variables' | 'chat' | 'copilot'
export interface PanelStore {
isOpen: boolean

View File

@@ -0,0 +1,143 @@
# Workflow YAML Store
This store dynamically generates a condensed YAML representation of workflows from the JSON workflow state. It extracts input values, connections, and block relationships to create a clean, readable format.
## Features
- **Dynamic Input Extraction**: Automatically reads input values from block configurations and subblock stores
- **Connection Mapping**: Determines preceding and following blocks from workflow edges
- **Type-Aware Processing**: Handles different input types (text, numbers, booleans, objects) appropriately
- **Auto-Refresh**: Automatically updates when workflow state or input values change
- **Clean Format**: Generates well-formatted YAML with proper indentation
## YAML Structure
```yaml
version: "1.0"
blocks:
block-id-1:
type: "starter"
name: "Start"
inputs:
startWorkflow: "manual"
following:
- "block-id-2"
block-id-2:
type: "agent"
name: "AI Agent"
inputs:
systemPrompt: "You are a helpful assistant"
userPrompt: "Process the input data"
model: "gpt-4"
temperature: 0.7
preceding:
- "block-id-1"
following:
- "block-id-3"
```
## Usage
### Basic Usage
```typescript
import { useWorkflowYamlStore } from '@/stores/workflows/yaml/store'
function WorkflowYamlViewer() {
const yaml = useWorkflowYamlStore(state => state.getYaml())
return (
<pre>
<code>{yaml}</code>
</pre>
)
}
```
### Manual Refresh
```typescript
import { useWorkflowYamlStore } from '@/stores/workflows/yaml/store'
function WorkflowControls() {
const refreshYaml = useWorkflowYamlStore(state => state.refreshYaml)
return (
<button onClick={refreshYaml}>
Refresh YAML
</button>
)
}
```
### Advanced Usage
```typescript
import { useWorkflowYamlStore } from '@/stores/workflows/yaml/store'
function WorkflowExporter() {
const { yaml, lastGenerated, generateYaml } = useWorkflowYamlStore()
const exportToFile = () => {
const blob = new Blob([yaml], { type: 'text/yaml' })
const url = URL.createObjectURL(blob)
const a = document.createElement('a')
a.href = url
a.download = 'workflow.yaml'
a.click()
URL.revokeObjectURL(url)
}
return (
<div>
<p>Last generated: {lastGenerated ? new Date(lastGenerated).toLocaleString() : 'Never'}</p>
<button onClick={generateYaml}>Regenerate</button>
<button onClick={exportToFile}>Export YAML</button>
</div>
)
}
```
## Input Types Handled
The store intelligently processes different subblock input types:
- **Text Inputs** (`short-input`, `long-input`): Trimmed strings
- **Dropdowns/Combobox** (`dropdown`, `combobox`): Selected values
- **Tables** (`table`): Arrays of objects (only if non-empty)
- **Code Blocks** (`code`): Preserves formatting for strings and objects
- **Switches** (`switch`): Boolean values
- **Sliders** (`slider`): Numeric values
- **Checkbox Lists** (`checkbox-list`): Arrays of selected items
## Auto-Refresh Behavior
The store automatically refreshes in these scenarios:
1. **Workflow Structure Changes**: When blocks are added, removed, or connections change
2. **Input Value Changes**: When any subblock input values are modified
3. **Debounced Updates**: Changes are debounced to prevent excessive regeneration
## Performance
- **Lazy Generation**: YAML is only generated when requested
- **Caching**: Results are cached and only regenerated when data changes
- **Debouncing**: Rapid changes are debounced to improve performance
- **Selective Updates**: Only regenerates when meaningful changes occur
## Error Handling
If YAML generation fails, the store returns an error message in YAML comment format:
```yaml
# Error generating YAML: [error message]
```
## Dependencies
- `js-yaml`: For YAML serialization
- `zustand`: For state management
- `@/blocks`: For block configuration access
- `@/stores/workflows/workflow/store`: For workflow state
- `@/stores/workflows/subblock/store`: For input values

View File

@@ -0,0 +1,761 @@
import { load as yamlParse } from 'js-yaml'
import type { Edge } from 'reactflow'
import { createLogger } from '@/lib/logs/console-logger'
import { getBlock } from '@/blocks'
import { resolveOutputType } from '@/blocks/utils'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('WorkflowYamlImporter')
interface YamlBlock {
type: string
name: string
inputs?: Record<string, any>
connections?: {
incoming?: Array<{
source: string
sourceHandle?: string
targetHandle?: string
}>
outgoing?: Array<{
target: string
sourceHandle?: string
targetHandle?: string
}>
}
parentId?: string // Add parentId for nested blocks
}
interface YamlWorkflow {
version: string
blocks: Record<string, YamlBlock>
}
interface ImportedBlock {
id: string
type: string
name: string
inputs: Record<string, any>
position: { x: number; y: number }
data?: Record<string, any>
parentId?: string
extent?: 'parent'
}
interface ImportedEdge {
id: string
source: string
target: string
sourceHandle: string
targetHandle: string
type: string
}
interface ImportResult {
blocks: ImportedBlock[]
edges: ImportedEdge[]
errors: string[]
warnings: string[]
}
/**
* Parse YAML content and validate its structure
*/
export function parseWorkflowYaml(yamlContent: string): {
data: YamlWorkflow | null
errors: string[]
} {
const errors: string[] = []
try {
const data = yamlParse(yamlContent) as unknown
// Validate top-level structure
if (!data || typeof data !== 'object') {
errors.push('Invalid YAML: Root must be an object')
return { data: null, errors }
}
// Type guard to check if data has the expected structure
const parsedData = data as Record<string, unknown>
if (!parsedData.version) {
errors.push('Missing required field: version')
}
if (!parsedData.blocks || typeof parsedData.blocks !== 'object') {
errors.push('Missing or invalid field: blocks')
return { data: null, errors }
}
// Validate blocks structure
const blocks = parsedData.blocks as Record<string, unknown>
Object.entries(blocks).forEach(([blockId, block]: [string, unknown]) => {
if (!block || typeof block !== 'object') {
errors.push(`Invalid block definition for '${blockId}': must be an object`)
return
}
const blockData = block as Record<string, unknown>
if (!blockData.type || typeof blockData.type !== 'string') {
errors.push(`Invalid block '${blockId}': missing or invalid 'type' field`)
}
if (!blockData.name || typeof blockData.name !== 'string') {
errors.push(`Invalid block '${blockId}': missing or invalid 'name' field`)
}
if (blockData.inputs && typeof blockData.inputs !== 'object') {
errors.push(`Invalid block '${blockId}': 'inputs' must be an object`)
}
if (blockData.preceding && !Array.isArray(blockData.preceding)) {
errors.push(`Invalid block '${blockId}': 'preceding' must be an array`)
}
if (blockData.following && !Array.isArray(blockData.following)) {
errors.push(`Invalid block '${blockId}': 'following' must be an array`)
}
})
if (errors.length > 0) {
return { data: null, errors }
}
return { data: parsedData as unknown as YamlWorkflow, errors: [] }
} catch (error) {
errors.push(`YAML parsing error: ${error instanceof Error ? error.message : 'Unknown error'}`)
return { data: null, errors }
}
}
/**
* Validate that block references in connections exist
*/
function validateBlockReferences(yamlWorkflow: YamlWorkflow): string[] {
const errors: string[] = []
const blockIds = new Set(Object.keys(yamlWorkflow.blocks))
Object.entries(yamlWorkflow.blocks).forEach(([blockId, block]) => {
// Check incoming connection references
if (block.connections?.incoming) {
block.connections.incoming.forEach((connection) => {
if (!blockIds.has(connection.source)) {
errors.push(
`Block '${blockId}' references non-existent source block '${connection.source}'`
)
}
})
}
// Check outgoing connection references
if (block.connections?.outgoing) {
block.connections.outgoing.forEach((connection) => {
if (!blockIds.has(connection.target)) {
errors.push(
`Block '${blockId}' references non-existent target block '${connection.target}'`
)
}
})
}
// Check parent references
if (block.parentId && !blockIds.has(block.parentId)) {
errors.push(`Block '${blockId}' references non-existent parent block '${block.parentId}'`)
}
})
return errors
}
/**
* Validate that block types exist and are valid
*/
function validateBlockTypes(yamlWorkflow: YamlWorkflow): { errors: string[]; warnings: string[] } {
const errors: string[] = []
const warnings: string[] = []
Object.entries(yamlWorkflow.blocks).forEach(([blockId, block]) => {
// Check if block type exists
const blockConfig = getBlock(block.type)
// Special handling for container blocks
if (block.type === 'loop' || block.type === 'parallel') {
// These are valid container types
return
}
if (!blockConfig) {
errors.push(`Unknown block type '${block.type}' for block '${blockId}'`)
return
}
// Validate inputs against block configuration
if (block.inputs && blockConfig.subBlocks) {
Object.keys(block.inputs).forEach((inputKey) => {
const subBlockConfig = blockConfig.subBlocks.find((sb) => sb.id === inputKey)
if (!subBlockConfig) {
warnings.push(
`Block '${blockId}' has unknown input '${inputKey}' for type '${block.type}'`
)
}
})
}
})
return { errors, warnings }
}
/**
* Calculate positions for blocks based on their connections
* Uses a simple layered approach similar to the auto-layout algorithm
*/
function calculateBlockPositions(
yamlWorkflow: YamlWorkflow
): Record<string, { x: number; y: number }> {
const positions: Record<string, { x: number; y: number }> = {}
const blockIds = Object.keys(yamlWorkflow.blocks)
// Find starter blocks (no incoming connections)
const starterBlocks = blockIds.filter((id) => {
const block = yamlWorkflow.blocks[id]
return !block.connections?.incoming || block.connections.incoming.length === 0
})
// If no starter blocks found, use first block as starter
if (starterBlocks.length === 0 && blockIds.length > 0) {
starterBlocks.push(blockIds[0])
}
// Build layers
const layers: string[][] = []
const visited = new Set<string>()
const queue = [...starterBlocks]
// BFS to organize blocks into layers
while (queue.length > 0) {
const currentLayer: string[] = []
const currentLayerSize = queue.length
for (let i = 0; i < currentLayerSize; i++) {
const blockId = queue.shift()!
if (visited.has(blockId)) continue
visited.add(blockId)
currentLayer.push(blockId)
// Add following blocks to queue
const block = yamlWorkflow.blocks[blockId]
if (block.connections?.outgoing) {
block.connections.outgoing.forEach((connection) => {
if (!visited.has(connection.target)) {
queue.push(connection.target)
}
})
}
}
if (currentLayer.length > 0) {
layers.push(currentLayer)
}
}
// Add any remaining blocks as isolated layer
const remainingBlocks = blockIds.filter((id) => !visited.has(id))
if (remainingBlocks.length > 0) {
layers.push(remainingBlocks)
}
// Calculate positions
const horizontalSpacing = 600
const verticalSpacing = 200
const startX = 150
const startY = 300
layers.forEach((layer, layerIndex) => {
const layerX = startX + layerIndex * horizontalSpacing
layer.forEach((blockId, blockIndex) => {
const blockY = startY + (blockIndex - layer.length / 2) * verticalSpacing
positions[blockId] = { x: layerX, y: blockY }
})
})
return positions
}
/**
* Sort blocks to ensure parents are processed before children
* This ensures proper creation order for nested blocks
*/
function sortBlocksByParentChildOrder(blocks: ImportedBlock[]): ImportedBlock[] {
const sorted: ImportedBlock[] = []
const processed = new Set<string>()
const visiting = new Set<string>() // Track blocks currently being processed to detect cycles
// Create a map for quick lookup
const blockMap = new Map<string, ImportedBlock>()
blocks.forEach((block) => blockMap.set(block.id, block))
// Process blocks recursively, ensuring parents are added first
function processBlock(block: ImportedBlock) {
if (processed.has(block.id)) {
return // Already processed
}
if (visiting.has(block.id)) {
// Circular dependency detected - break the cycle by processing this block without its parent
logger.warn(`Circular parent-child dependency detected for block ${block.id}, breaking cycle`)
sorted.push(block)
processed.add(block.id)
return
}
visiting.add(block.id)
// If this block has a parent, ensure the parent is processed first
if (block.parentId) {
const parentBlock = blockMap.get(block.parentId)
if (parentBlock && !processed.has(block.parentId)) {
processBlock(parentBlock)
}
}
// Now process this block
visiting.delete(block.id)
sorted.push(block)
processed.add(block.id)
}
// Process all blocks
blocks.forEach((block) => processBlock(block))
return sorted
}
/**
* Convert YAML workflow to importable format
*/
export function convertYamlToWorkflow(yamlWorkflow: YamlWorkflow): ImportResult {
const errors: string[] = []
const warnings: string[] = []
const blocks: ImportedBlock[] = []
const edges: ImportedEdge[] = []
// Validate block references
const referenceErrors = validateBlockReferences(yamlWorkflow)
errors.push(...referenceErrors)
// Validate block types
const { errors: typeErrors, warnings: typeWarnings } = validateBlockTypes(yamlWorkflow)
errors.push(...typeErrors)
warnings.push(...typeWarnings)
if (errors.length > 0) {
return { blocks: [], edges: [], errors, warnings }
}
// Calculate positions
const positions = calculateBlockPositions(yamlWorkflow)
// Convert blocks
Object.entries(yamlWorkflow.blocks).forEach(([blockId, yamlBlock]) => {
const position = positions[blockId] || { x: 100, y: 100 }
const importedBlock: ImportedBlock = {
id: blockId,
type: yamlBlock.type,
name: yamlBlock.name,
inputs: yamlBlock.inputs || {},
position,
}
// Add container-specific data
if (yamlBlock.type === 'loop' || yamlBlock.type === 'parallel') {
// For loop/parallel blocks, map the inputs to the data field since they don't use subBlocks
importedBlock.data = {
width: 500,
height: 300,
type: yamlBlock.type === 'loop' ? 'loopNode' : 'parallelNode',
// Map YAML inputs to data properties for loop/parallel blocks
...(yamlBlock.inputs || {}),
}
// Clear inputs since they're now in data
importedBlock.inputs = {}
}
// Handle parent-child relationships for nested blocks
if (yamlBlock.parentId) {
importedBlock.parentId = yamlBlock.parentId
importedBlock.extent = 'parent'
// Also add to data for consistency with how the system works
if (!importedBlock.data) {
importedBlock.data = {}
}
importedBlock.data.parentId = yamlBlock.parentId
importedBlock.data.extent = 'parent'
}
blocks.push(importedBlock)
})
// Convert edges from connections
Object.entries(yamlWorkflow.blocks).forEach(([blockId, yamlBlock]) => {
if (yamlBlock.connections?.outgoing) {
yamlBlock.connections.outgoing.forEach((connection) => {
const edgeId = `${blockId}-${connection.target}-${Date.now()}`
const edge: ImportedEdge = {
id: edgeId,
source: blockId,
target: connection.target,
sourceHandle: connection.sourceHandle || 'source',
targetHandle: connection.targetHandle || 'target',
type: 'workflowEdge',
}
edges.push(edge)
})
}
})
// Sort blocks to ensure parents are created before children
const sortedBlocks = sortBlocksByParentChildOrder(blocks)
return { blocks: sortedBlocks, edges, errors, warnings }
}
/**
* Import workflow from YAML by creating complete state upfront (no UI simulation)
*/
export async function importWorkflowFromYaml(
yamlContent: string,
workflowActions: {
addBlock: (
id: string,
type: string,
name: string,
position: { x: number; y: number },
data?: Record<string, any>,
parentId?: string,
extent?: 'parent'
) => void
addEdge: (edge: Edge) => void
applyAutoLayout: () => void
setSubBlockValue: (blockId: string, subBlockId: string, value: any) => void
getExistingBlocks: () => Record<string, any>
},
targetWorkflowId?: string
): Promise<{ success: boolean; errors: string[]; warnings: string[]; summary?: string }> {
try {
// Parse YAML
const { data: yamlWorkflow, errors: parseErrors } = parseWorkflowYaml(yamlContent)
if (!yamlWorkflow || parseErrors.length > 0) {
return { success: false, errors: parseErrors, warnings: [] }
}
// Convert to importable format
const { blocks, edges, errors, warnings } = convertYamlToWorkflow(yamlWorkflow)
if (errors.length > 0) {
return { success: false, errors, warnings }
}
// Get the existing workflow state (to preserve starter blocks if they exist)
let existingBlocks: Record<string, any> = {}
if (targetWorkflowId) {
// For target workflow, fetch from API
try {
const response = await fetch(`/api/workflows/${targetWorkflowId}`)
if (response.ok) {
const workflowData = await response.json()
existingBlocks = workflowData.data?.state?.blocks || {}
}
} catch (error) {
logger.warn(`Failed to fetch existing blocks for workflow ${targetWorkflowId}:`, error)
}
} else {
// For active workflow, use from store
existingBlocks = workflowActions.getExistingBlocks()
}
const existingStarterBlocks = Object.values(existingBlocks).filter(
(block: any) => block.type === 'starter'
)
// Get stores and current workflow info
// Get current workflow state
const currentWorkflowState = useWorkflowStore.getState()
const activeWorkflowId = targetWorkflowId || useWorkflowRegistry.getState().activeWorkflowId
if (!activeWorkflowId) {
return { success: false, errors: ['No active workflow'], warnings: [] }
}
// Build complete blocks object
const completeBlocks: Record<string, any> = {}
const completeSubBlockValues: Record<string, Record<string, any>> = {}
const yamlIdToActualId = new Map<string, string>()
// Handle starter block
let starterBlockId: string | null = null
const starterBlock = blocks.find((block) => block.type === 'starter')
if (starterBlock) {
if (existingStarterBlocks.length > 0) {
// Use existing starter block
const existingStarter = existingStarterBlocks[0] as any
starterBlockId = existingStarter.id
yamlIdToActualId.set(starterBlock.id, existingStarter.id)
// Keep existing starter but update its inputs
completeBlocks[existingStarter.id] = {
...existingStarter,
// Update name if provided in YAML
name: starterBlock.name !== 'Start' ? starterBlock.name : existingStarter.name,
}
// Set starter block values
completeSubBlockValues[existingStarter.id] = {
...(currentWorkflowState.blocks[existingStarter.id]?.subBlocks
? Object.fromEntries(
Object.entries(currentWorkflowState.blocks[existingStarter.id].subBlocks).map(
([key, subBlock]: [string, any]) => [key, subBlock.value]
)
)
: {}),
...starterBlock.inputs, // Override with YAML values
}
} else {
// Create new starter block
starterBlockId = crypto.randomUUID()
yamlIdToActualId.set(starterBlock.id, starterBlockId)
// Create complete starter block from block config
const blockConfig = getBlock('starter')
if (blockConfig) {
const subBlocks: Record<string, any> = {}
blockConfig.subBlocks.forEach((subBlock) => {
subBlocks[subBlock.id] = {
id: subBlock.id,
type: subBlock.type,
value: null,
}
})
completeBlocks[starterBlockId] = {
id: starterBlockId,
type: 'starter',
name: starterBlock.name,
position: starterBlock.position,
subBlocks,
outputs: resolveOutputType(blockConfig.outputs),
enabled: true,
horizontalHandles: true,
isWide: false,
height: 0,
data: starterBlock.data || {},
}
// Set starter block values
completeSubBlockValues[starterBlockId] = { ...starterBlock.inputs }
}
}
}
// Create all other blocks
// Note: blocks are now sorted to ensure parents come before children,
// but we still need the two-phase approach because we're generating new UUIDs
let blocksProcessed = 0
for (const block of blocks) {
if (block.type === 'starter') {
continue // Already handled above
}
const blockId = crypto.randomUUID()
yamlIdToActualId.set(block.id, blockId)
// Create complete block from block config
const blockConfig = getBlock(block.type)
if (!blockConfig && (block.type === 'loop' || block.type === 'parallel')) {
// Handle loop/parallel blocks
completeBlocks[blockId] = {
id: blockId,
type: block.type,
name: block.name,
position: block.position,
subBlocks: {},
outputs: {},
enabled: true,
horizontalHandles: true,
isWide: false,
height: 0,
data: block.data || {}, // Configuration is already in block.data from convertYamlToWorkflow
}
// Loop/parallel blocks don't use subBlocks, their config is in data
// No need to set completeSubBlockValues since they don't have subBlocks
blocksProcessed++
} else if (blockConfig) {
// Handle regular blocks
const subBlocks: Record<string, any> = {}
blockConfig.subBlocks.forEach((subBlock) => {
subBlocks[subBlock.id] = {
id: subBlock.id,
type: subBlock.type,
value: null,
}
})
completeBlocks[blockId] = {
id: blockId,
type: block.type,
name: block.name,
position: block.position,
subBlocks,
outputs: resolveOutputType(blockConfig.outputs),
enabled: true,
horizontalHandles: true,
isWide: false,
height: 0,
data: block.data || {}, // This already includes parentId and extent from convertYamlToWorkflow
}
// Set block input values
completeSubBlockValues[blockId] = { ...block.inputs }
blocksProcessed++
} else {
logger.warn(`No block config found for type: ${block.type} (block: ${block.id})`)
}
}
// Update parent-child relationships with mapped IDs
// This two-phase approach is necessary because:
// 1. We generate new UUIDs for all blocks (can't reuse YAML IDs)
// 2. Parent references in YAML use the original IDs, need to map to new UUIDs
// 3. All blocks must exist before we can map their parent references
for (const [blockId, blockData] of Object.entries(completeBlocks)) {
if (blockData.data?.parentId) {
const mappedParentId = yamlIdToActualId.get(blockData.data.parentId)
if (mappedParentId) {
blockData.data.parentId = mappedParentId
} else {
logger.warn(`Parent block not found for mapping: ${blockData.data.parentId}`)
// Remove invalid parent reference
blockData.data.parentId = undefined
blockData.data.extent = undefined
}
}
}
// Create complete edges using the ID mapping
const completeEdges: any[] = []
for (const edge of edges) {
const sourceId = yamlIdToActualId.get(edge.source)
const targetId = yamlIdToActualId.get(edge.target)
if (sourceId && targetId) {
completeEdges.push({
...edge,
source: sourceId,
target: targetId,
})
} else {
logger.warn(`Skipping edge - missing blocks: ${edge.source} -> ${edge.target}`)
}
}
// Create complete workflow state with values already set in subBlocks
// Merge subblock values directly into block subBlocks
for (const [blockId, blockData] of Object.entries(completeBlocks)) {
const blockValues = completeSubBlockValues[blockId] || {}
// Update subBlock values in place
for (const [subBlockId, subBlockData] of Object.entries(blockData.subBlocks || {})) {
if (blockValues[subBlockId] !== undefined && blockValues[subBlockId] !== null) {
;(subBlockData as any).value = blockValues[subBlockId]
}
}
}
// Create final workflow state
const completeWorkflowState = {
blocks: completeBlocks,
edges: completeEdges,
loops: {},
parallels: {},
lastSaved: Date.now(),
isDeployed: false,
deployedAt: undefined,
deploymentStatuses: {},
hasActiveSchedule: false,
hasActiveWebhook: false,
}
// Save directly to database via API
const response = await fetch(`/api/workflows/${activeWorkflowId}/state`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(completeWorkflowState),
})
if (!response.ok) {
const errorData = await response.json()
logger.error('Failed to save workflow state:', errorData.error)
return {
success: false,
errors: [`Database save failed: ${errorData.error || 'Unknown error'}`],
warnings,
}
}
const saveResponse = await response.json()
// Update local state for immediate UI display (only if importing into active workflow)
if (!targetWorkflowId) {
useWorkflowStore.setState(completeWorkflowState)
// Set subblock values in local store
useSubBlockStore.setState((state: any) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: completeSubBlockValues,
},
}))
}
// Apply auto layout
workflowActions.applyAutoLayout()
const totalBlocksCreated =
Object.keys(completeBlocks).length - (existingStarterBlocks.length > 0 ? 1 : 0)
return {
success: true,
errors: [],
warnings,
summary: `Imported ${totalBlocksCreated} new blocks and ${completeEdges.length} connections. ${
existingStarterBlocks.length > 0
? 'Updated existing starter block.'
: 'Created new starter block.'
}`,
}
} catch (error) {
logger.error('YAML import failed:', error)
return {
success: false,
errors: [`Import failed: ${error instanceof Error ? error.message : 'Unknown error'}`],
warnings: [],
}
}
}

View File

@@ -0,0 +1,156 @@
import { create } from 'zustand'
import { devtools } from 'zustand/middleware'
import { createLogger } from '@/lib/logs/console-logger'
import { generateWorkflowYaml } from '@/lib/workflows/yaml-generator'
import { useSubBlockStore } from '../subblock/store'
import { useWorkflowStore } from '../workflow/store'
const logger = createLogger('WorkflowYamlStore')
interface WorkflowYamlState {
yaml: string
lastGenerated?: number
}
interface WorkflowYamlActions {
generateYaml: () => void
getYaml: () => string
refreshYaml: () => void
}
type WorkflowYamlStore = WorkflowYamlState & WorkflowYamlActions
/**
* Get subblock values organized by block for the shared utility
*/
function getSubBlockValues() {
const workflowState = useWorkflowStore.getState()
const subBlockStore = useSubBlockStore.getState()
const subBlockValues: Record<string, Record<string, any>> = {}
Object.entries(workflowState.blocks).forEach(([blockId]) => {
subBlockValues[blockId] = {}
// Get all subblock values for this block
Object.keys(workflowState.blocks[blockId].subBlocks || {}).forEach((subBlockId) => {
const value = subBlockStore.getValue(blockId, subBlockId)
if (value !== undefined) {
subBlockValues[blockId][subBlockId] = value
}
})
})
return subBlockValues
}
// Track if subscriptions have been initialized
let subscriptionsInitialized = false
// Track timeout IDs for cleanup
let workflowRefreshTimeoutId: NodeJS.Timeout | null = null
let subBlockRefreshTimeoutId: NodeJS.Timeout | null = null
// Initialize subscriptions lazily
function initializeSubscriptions() {
if (subscriptionsInitialized) return
subscriptionsInitialized = true
// Auto-refresh YAML when workflow state changes
let lastWorkflowState: { blockCount: number; edgeCount: number } | null = null
useWorkflowStore.subscribe((state) => {
const currentState = {
blockCount: Object.keys(state.blocks).length,
edgeCount: state.edges.length,
}
// Only refresh if the structure has changed
if (
!lastWorkflowState ||
lastWorkflowState.blockCount !== currentState.blockCount ||
lastWorkflowState.edgeCount !== currentState.edgeCount
) {
lastWorkflowState = currentState
// Clear existing timeout to properly debounce
if (workflowRefreshTimeoutId) {
clearTimeout(workflowRefreshTimeoutId)
}
// Debounce the refresh to avoid excessive updates
const refreshYaml = useWorkflowYamlStore.getState().refreshYaml
workflowRefreshTimeoutId = setTimeout(() => {
refreshYaml()
workflowRefreshTimeoutId = null
}, 100)
}
})
// Subscribe to subblock store changes
let lastSubBlockChangeTime = 0
useSubBlockStore.subscribe((state) => {
const currentTime = Date.now()
// Debounce rapid changes
if (currentTime - lastSubBlockChangeTime > 100) {
lastSubBlockChangeTime = currentTime
// Clear existing timeout to properly debounce
if (subBlockRefreshTimeoutId) {
clearTimeout(subBlockRefreshTimeoutId)
}
const refreshYaml = useWorkflowYamlStore.getState().refreshYaml
subBlockRefreshTimeoutId = setTimeout(() => {
refreshYaml()
subBlockRefreshTimeoutId = null
}, 100)
}
})
}
export const useWorkflowYamlStore = create<WorkflowYamlStore>()(
devtools(
(set, get) => ({
yaml: '',
lastGenerated: undefined,
generateYaml: () => {
// Initialize subscriptions on first use
initializeSubscriptions()
const workflowState = useWorkflowStore.getState()
const subBlockValues = getSubBlockValues()
const yaml = generateWorkflowYaml(workflowState, subBlockValues)
set({
yaml,
lastGenerated: Date.now(),
})
},
getYaml: () => {
// Initialize subscriptions on first use
initializeSubscriptions()
const currentTime = Date.now()
const { yaml, lastGenerated } = get()
// Auto-refresh if data is stale (older than 1 second) or never generated
if (!lastGenerated || currentTime - lastGenerated > 1000) {
get().generateYaml()
return get().yaml
}
return yaml
},
refreshYaml: () => {
get().generateYaml()
},
}),
{
name: 'workflow-yaml-store',
}
)
)

View File

@@ -0,0 +1,103 @@
import type { ToolConfig, ToolResponse } from '../types'
interface DocsSearchParams {
query: string
topK?: number
}
interface DocsSearchResult {
id: string
title: string
content: string
url: string
score: number
metadata?: Record<string, any>
}
interface DocsSearchResponse extends ToolResponse {
output: {
results: DocsSearchResult[]
query: string
totalResults: number
searchTime: number
}
}
export const docsSearchTool: ToolConfig<DocsSearchParams, DocsSearchResponse> = {
id: 'docs_search_internal',
name: 'Search Documentation',
description:
'Search Sim Studio documentation for information about features, tools, workflows, and functionality',
version: '1.0.0',
params: {
query: {
type: 'string',
required: true,
description: 'The search query to find relevant documentation',
},
topK: {
type: 'number',
required: false,
description: 'Number of results to return (default: 5, max: 20)',
},
},
request: {
url: '/api/docs/search',
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => {
// Validate and clamp topK parameter
let topK = params.topK || 5
if (topK > 20) topK = 20
if (topK < 1) topK = 1
return {
query: params.query,
topK,
}
},
isInternalRoute: true,
},
transformResponse: async (
response: Response,
params?: DocsSearchParams
): Promise<DocsSearchResponse> => {
if (!response.ok) {
throw new Error(`Docs search failed: ${response.status} ${response.statusText}`)
}
const data = await response.json()
// Validate and transform the API response
const results: DocsSearchResult[] = (data.results || []).map((result: any) => ({
id: result.id || '',
title: result.title || 'Untitled',
content: result.content || '',
url: result.url || '',
score: typeof result.score === 'number' ? result.score : 0,
metadata: result.metadata || {},
}))
return {
success: true,
output: {
results,
query: params?.query || '',
totalResults: results.length,
searchTime: data.searchTime || 0,
},
}
},
transformError: (error: any): string => {
if (error instanceof Error) {
return `Documentation search failed: ${error.message}`
}
return 'An unexpected error occurred while searching documentation'
},
}

View File

@@ -2,11 +2,19 @@ import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console-logger'
import { useCustomToolsStore } from '@/stores/custom-tools/store'
import { useEnvironmentStore } from '@/stores/settings/environment/store'
import { docsSearchTool } from './docs/search'
import { tools } from './registry'
import type { TableRow, ToolConfig, ToolResponse } from './types'
import { getUserWorkflowTool } from './workflow/get-yaml'
const logger = createLogger('ToolsUtils')
// Internal-only tools (not exposed to users in workflows)
const internalTools: Record<string, ToolConfig> = {
docs_search_internal: docsSearchTool,
get_user_workflow: getUserWorkflowTool,
}
/**
* Transforms a table from the store format to a key-value object
* @param table Array of table rows from the store
@@ -269,6 +277,10 @@ export function createCustomToolRequestBody(
// Get a tool by its ID
export function getTool(toolId: string): ToolConfig | undefined {
// Check for internal tools first
const internalTool = internalTools[toolId]
if (internalTool) return internalTool
// Check for built-in tools
const builtInTool = tools[toolId]
if (builtInTool) return builtInTool
@@ -302,6 +314,10 @@ export async function getToolAsync(
toolId: string,
workflowId?: string
): Promise<ToolConfig | undefined> {
// Check for internal tools first
const internalTool = internalTools[toolId]
if (internalTool) return internalTool
// Check for built-in tools
const builtInTool = tools[toolId]
if (builtInTool) return builtInTool

View File

@@ -0,0 +1,49 @@
import type { ToolConfig, ToolResponse } from '../types'
interface GetWorkflowParams {
includeMetadata?: boolean
_context?: {
workflowId: string
}
}
interface GetWorkflowResponse extends ToolResponse {
output: {
yaml: string
metadata?: {
blockCount: number
connectionCount: number
lastModified: string
}
}
}
export const getUserWorkflowTool: ToolConfig<GetWorkflowParams, GetWorkflowResponse> = {
id: 'get_user_workflow',
name: 'Get User Workflow',
description:
"Get the current user's specific workflow (not general Sim Studio documentation). Returns YAML format showing only the blocks that the user has actually built in their workflow, with their specific configurations, inputs, and connections.",
version: '1.0.0',
params: {
includeMetadata: {
type: 'boolean',
required: false,
description: 'Whether to include additional metadata about the workflow (default: false)',
},
},
// Use API endpoint to avoid Node.js module import issues in browser
request: {
url: '/api/tools/get-user-workflow',
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
workflowId: params._context?.workflowId,
includeMetadata: params.includeMetadata || false,
}),
isInternalRoute: true,
},
}

View File

@@ -95,6 +95,7 @@
"@radix-ui/react-tooltip": "^1.1.6",
"@react-email/components": "^0.0.34",
"@sentry/nextjs": "^9.15.0",
"@types/js-yaml": "4.0.9",
"@types/three": "0.177.0",
"@vercel/og": "^0.6.5",
"@vercel/speed-insights": "^1.2.0",
@@ -117,6 +118,7 @@
"input-otp": "^1.4.2",
"ioredis": "^5.6.0",
"jose": "6.0.11",
"js-yaml": "4.1.0",
"jwt-decode": "^4.0.0",
"lenis": "^1.2.3",
"lucide-react": "^0.479.0",
@@ -1325,6 +1327,8 @@
"@types/jest": ["@types/jest@26.0.24", "", { "dependencies": { "jest-diff": "^26.0.0", "pretty-format": "^26.0.0" } }, "sha512-E/X5Vib8BWqZNRlDxj9vYXhsDwPYbPINqKF9BsnSoon4RQ0D9moEuLD8txgyypFLH7J4+Lho9Nr/c8H0Fi+17w=="],
"@types/js-yaml": ["@types/js-yaml@4.0.9", "", {}, "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg=="],
"@types/jsdom": ["@types/jsdom@21.1.7", "", { "dependencies": { "@types/node": "*", "@types/tough-cookie": "*", "parse5": "^7.0.0" } }, "sha512-yOriVnggzrnQ3a9OKOCxaVuSug3w3/SbOj5i7VwXWZEyUNl3bLF9V3MfxGbZKuwqJOQyRfqXyROBB1CoZLFWzA=="],
"@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
@@ -1483,7 +1487,7 @@
"arg": ["arg@5.0.2", "", {}, "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg=="],
"argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="],
"argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
"aria-hidden": ["aria-hidden@1.2.6", "", { "dependencies": { "tslib": "^2.0.0" } }, "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA=="],
@@ -3475,8 +3479,6 @@
"jest-diff/pretty-format": ["pretty-format@26.6.2", "", { "dependencies": { "@jest/types": "^26.6.2", "ansi-regex": "^5.0.0", "ansi-styles": "^4.0.0", "react-is": "^17.0.1" } }, "sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg=="],
"js-yaml/argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
"jsondiffpatch/chalk": ["chalk@5.4.1", "", {}, "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w=="],
"linebreak/base64-js": ["base64-js@0.0.8", "", {}, "sha512-3XSA2cR/h/73EzlXXdU6YNycmYI7+kicTxks4eJg2g39biHR84slg2+des+p7iHYhbRg/udIS4TD53WabcOUkw=="],
@@ -3501,6 +3503,8 @@
"loose-envify/js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
"mammoth/argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="],
"mdast-util-find-and-replace/escape-string-regexp": ["escape-string-regexp@5.0.0", "", {}, "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="],
"micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
@@ -3847,6 +3851,8 @@
"openai/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
"openapi/js-yaml/argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="],
"openapi/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
"ora/cli-cursor/restore-cursor": ["restore-cursor@5.1.0", "", { "dependencies": { "onetime": "^7.0.0", "signal-exit": "^4.1.0" } }, "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA=="],