Greptile fixes

This commit is contained in:
Siddharth Ganesan
2025-07-09 12:54:14 -07:00
parent 5c487f59f9
commit ef681d8a04
24 changed files with 626 additions and 205 deletions

View File

@@ -2,6 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import {
type CopilotChat,
type CopilotMessage,
createChat,
generateChatTitle,
generateDocsResponse,
@@ -52,8 +54,8 @@ export async function POST(req: NextRequest) {
})
// Handle chat context
let currentChat: any = null
let conversationHistory: any[] = []
let currentChat: CopilotChat | null = null
let conversationHistory: CopilotMessage[] = []
if (chatId) {
// Load existing chat
@@ -126,53 +128,75 @@ export async function POST(req: NextRequest) {
controller.enqueue(encoder.encode(`data: ${JSON.stringify(contentChunk)}\n\n`))
}
// Save conversation to database after streaming completes
if (currentChat) {
const userMessage = {
id: crypto.randomUUID(),
role: 'user',
content: query,
timestamp: new Date().toISOString(),
}
const assistantMessage = {
id: crypto.randomUUID(),
role: 'assistant',
content: accumulatedResponse,
timestamp: new Date().toISOString(),
citations: result.sources.map((source, index) => ({
id: index + 1,
title: source.title,
url: source.url,
})),
}
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
// Generate title if this is the first message
let updatedTitle = currentChat.title
if (!updatedTitle && conversationHistory.length === 0) {
updatedTitle = await generateChatTitle(query)
}
// Update the chat in database
await updateChat(currentChat.id, session.user.id, {
title: updatedTitle,
messages: updatedMessages,
})
logger.info(`[${requestId}] Updated chat ${currentChat.id} with new docs messages`)
}
// Send completion marker
// Send completion marker first to unblock the user
controller.enqueue(encoder.encode(`data: {"type":"done"}\n\n`))
// Save conversation to database asynchronously (non-blocking)
if (currentChat) {
// Fire-and-forget database save to avoid blocking stream completion
Promise.resolve()
.then(async () => {
try {
const userMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'user',
content: query,
timestamp: new Date().toISOString(),
}
const assistantMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'assistant',
content: accumulatedResponse,
timestamp: new Date().toISOString(),
citations: result.sources.map((source, index) => ({
id: index + 1,
title: source.title,
url: source.url,
})),
}
const updatedMessages = [
...conversationHistory,
userMessage,
assistantMessage,
]
// Generate title if this is the first message
let updatedTitle = currentChat.title ?? undefined
if (!updatedTitle && conversationHistory.length === 0) {
updatedTitle = await generateChatTitle(query)
}
// Update the chat in database
await updateChat(currentChat.id, session.user.id, {
title: updatedTitle,
messages: updatedMessages,
})
logger.info(
`[${requestId}] Updated chat ${currentChat.id} with new docs messages`
)
} catch (dbError) {
logger.error(`[${requestId}] Failed to save chat to database:`, dbError)
// Database errors don't affect the user's streaming experience
}
})
.catch((error) => {
logger.error(`[${requestId}] Unexpected error in async database save:`, error)
})
}
} catch (error) {
logger.error(`[${requestId}] Docs streaming error:`, error)
const errorChunk = {
type: 'error',
error: 'Streaming failed',
try {
const errorChunk = {
type: 'error',
error: 'Streaming failed',
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(errorChunk)}\n\n`))
} catch (enqueueError) {
logger.error(`[${requestId}] Failed to enqueue error response:`, enqueueError)
}
controller.enqueue(encoder.encode(`data: ${JSON.stringify(errorChunk)}\n\n`))
} finally {
controller.close()
}
@@ -193,14 +217,14 @@ export async function POST(req: NextRequest) {
// Save conversation to database if we have a chat
if (currentChat) {
const userMessage = {
const userMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'user',
content: query,
timestamp: new Date().toISOString(),
}
const assistantMessage = {
const assistantMessage: CopilotMessage = {
id: crypto.randomUUID(),
role: 'assistant',
content: typeof result.response === 'string' ? result.response : '[Streaming Response]',
@@ -215,7 +239,7 @@ export async function POST(req: NextRequest) {
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
// Generate title if this is the first message
let updatedTitle = currentChat.title
let updatedTitle = currentChat.title ?? undefined
if (!updatedTitle && conversationHistory.length === 0) {
updatedTitle = await generateChatTitle(query)
}

View File

@@ -14,6 +14,12 @@ import { createLogger } from '@/lib/logs/console-logger'
const logger = createLogger('CopilotAPI')
// Interface for StreamingExecution response
interface StreamingExecution {
stream: ReadableStream
execution: Promise<any>
}
// Schema for sending messages
const SendMessageSchema = z.object({
message: z.string().min(1, 'Message is required'),
@@ -135,7 +141,8 @@ export async function POST(req: NextRequest) {
) {
// Handle StreamingExecution (from providers with tool calls)
logger.info(`[${requestId}] StreamingExecution detected`)
streamToRead = (result.response as any).stream
const streamingExecution = result.response as StreamingExecution
streamToRead = streamingExecution.stream
// No need to extract citations - LLM generates direct markdown links
}

View File

@@ -4,34 +4,73 @@ import { createLogger } from '@/lib/logs/console-logger'
const logger = createLogger('DocsSearchAPI')
export async function POST(request: NextRequest) {
// Request and response type definitions
interface DocsSearchRequest {
query: string
topK?: number
}
interface DocsSearchResult {
id: number
title: string
url: string
content: string
similarity: number
}
interface DocsSearchSuccessResponse {
success: true
results: DocsSearchResult[]
query: string
totalResults: number
searchTime?: number
}
interface DocsSearchErrorResponse {
success: false
error: string
}
export async function POST(
request: NextRequest
): Promise<NextResponse<DocsSearchSuccessResponse | DocsSearchErrorResponse>> {
try {
const { query, topK = 5 } = await request.json()
const requestBody: DocsSearchRequest = await request.json()
const { query, topK = 5 } = requestBody
if (!query) {
return NextResponse.json({ error: 'Query is required' }, { status: 400 })
const errorResponse: DocsSearchErrorResponse = {
success: false,
error: 'Query is required',
}
return NextResponse.json(errorResponse, { status: 400 })
}
logger.info('Executing documentation search', { query, topK })
const startTime = Date.now()
const results = await searchDocumentation(query, { topK })
const searchTime = Date.now() - startTime
logger.info(`Found ${results.length} documentation results`, { query })
return NextResponse.json({
const successResponse: DocsSearchSuccessResponse = {
success: true,
results,
query,
totalResults: results.length,
})
searchTime,
}
return NextResponse.json(successResponse)
} catch (error) {
logger.error('Documentation search API failed', error)
return NextResponse.json(
{
success: false,
error: `Documentation search failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
},
{ status: 500 }
)
const errorResponse: DocsSearchErrorResponse = {
success: false,
error: `Documentation search failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
}
return NextResponse.json(errorResponse, { status: 500 })
}
}

View File

@@ -7,18 +7,103 @@ import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/db-helpers'
import { db } from '@/db'
import { workflow } from '@/db/schema'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowStateAPI')
// Zod schemas for workflow state validation
const PositionSchema = z.object({
x: z.number(),
y: z.number(),
})
const BlockDataSchema = z.object({
parentId: z.string().optional(),
extent: z.literal('parent').optional(),
width: z.number().optional(),
height: z.number().optional(),
collection: z.unknown().optional(),
count: z.number().optional(),
loopType: z.enum(['for', 'forEach']).optional(),
parallelType: z.enum(['collection', 'count']).optional(),
type: z.string().optional(),
})
const SubBlockStateSchema = z.object({
id: z.string(),
type: z.string(),
value: z.union([z.string(), z.number(), z.array(z.array(z.string())), z.null()]),
})
const BlockOutputSchema = z.any()
const BlockStateSchema = z.object({
id: z.string(),
type: z.string(),
name: z.string(),
position: PositionSchema,
subBlocks: z.record(SubBlockStateSchema),
outputs: z.record(BlockOutputSchema),
enabled: z.boolean(),
horizontalHandles: z.boolean().optional(),
isWide: z.boolean().optional(),
height: z.number().optional(),
advancedMode: z.boolean().optional(),
data: BlockDataSchema.optional(),
})
const EdgeSchema = z.object({
id: z.string(),
source: z.string(),
target: z.string(),
sourceHandle: z.string().optional(),
targetHandle: z.string().optional(),
type: z.string().optional(),
animated: z.boolean().optional(),
style: z.record(z.any()).optional(),
data: z.record(z.any()).optional(),
label: z.string().optional(),
labelStyle: z.record(z.any()).optional(),
labelShowBg: z.boolean().optional(),
labelBgStyle: z.record(z.any()).optional(),
labelBgPadding: z.array(z.number()).optional(),
labelBgBorderRadius: z.number().optional(),
markerStart: z.string().optional(),
markerEnd: z.string().optional(),
})
const LoopSchema = z.object({
id: z.string(),
nodes: z.array(z.string()),
iterations: z.number(),
loopType: z.enum(['for', 'forEach']),
forEachItems: z.union([z.array(z.any()), z.record(z.any()), z.string()]).optional(),
})
const ParallelSchema = z.object({
id: z.string(),
nodes: z.array(z.string()),
distribution: z.union([z.array(z.any()), z.record(z.any()), z.string()]).optional(),
count: z.number().optional(),
parallelType: z.enum(['count', 'collection']).optional(),
})
const DeploymentStatusSchema = z.object({
id: z.string(),
status: z.enum(['deploying', 'deployed', 'failed', 'stopping', 'stopped']),
deployedAt: z.date().optional(),
error: z.string().optional(),
})
const WorkflowStateSchema = z.object({
blocks: z.record(z.any()),
edges: z.array(z.any()),
loops: z.record(z.any()).optional(),
parallels: z.record(z.any()).optional(),
blocks: z.record(BlockStateSchema),
edges: z.array(EdgeSchema),
loops: z.record(LoopSchema).optional(),
parallels: z.record(ParallelSchema).optional(),
lastSaved: z.number().optional(),
isDeployed: z.boolean().optional(),
deployedAt: z.date().optional(),
deploymentStatuses: z.record(z.any()).optional(),
deploymentStatuses: z.record(DeploymentStatusSchema).optional(),
hasActiveSchedule: z.boolean().optional(),
hasActiveWebhook: z.boolean().optional(),
})
@@ -100,7 +185,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
hasActiveWebhook: state.hasActiveWebhook || false,
}
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowState)
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowState as any)
if (!saveResult.success) {
logger.error(`[${requestId}] Failed to save workflow ${workflowId} state:`, saveResult.error)
@@ -131,7 +216,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
},
{ status: 200 }
)
} catch (error: any) {
} catch (error: unknown) {
const elapsed = Date.now() - startTime
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid workflow state data for ${workflowId}`, {

View File

@@ -43,7 +43,7 @@ export function ExportControls({ disabled = false }: ExportControlsProps) {
}
}
const handleExportYaml = async () => {
const handleExportYaml = () => {
if (!currentWorkflow || !activeWorkflowId) {
logger.warn('No active workflow to export')
return

View File

@@ -126,8 +126,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(
// Handle message submission
const handleSubmit = useCallback(
async (e: React.FormEvent, message?: string) => {
e.preventDefault()
async (e?: React.FormEvent, message?: string) => {
e?.preventDefault()
const query = message || inputRef.current?.value?.trim() || ''
if (!query || isSendingMessage || !activeWorkflowId) return
@@ -256,8 +256,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(
// Handle modal message sending
const handleModalSendMessage = useCallback(
async (message: string) => {
const mockEvent = { preventDefault: () => {} } as React.FormEvent
await handleSubmit(mockEvent, message)
await handleSubmit(undefined, message)
},
[handleSubmit]
)

View File

@@ -41,9 +41,7 @@ export async function applyWorkflowDiff(
addEdge: () => {}, // Not used in this path
applyAutoLayout: () => {
// Trigger auto layout after import
setTimeout(() => {
window.dispatchEvent(new CustomEvent('trigger-auto-layout'))
}, 100)
window.dispatchEvent(new CustomEvent('trigger-auto-layout'))
},
setSubBlockValue: () => {}, // Not used in this path
getExistingBlocks: () => useWorkflowStore.getState().blocks,
@@ -95,17 +93,37 @@ export async function applyWorkflowDiff(
hasActiveWebhook: parsedData.state.hasActiveWebhook || false,
}
// Update local workflow state
useWorkflowStore.setState(newWorkflowState)
// Atomically update local state with rollback on failure
const previousWorkflowState = useWorkflowStore.getState()
const previousSubBlockState = useSubBlockStore.getState()
// Update subblock values if provided
if (parsedData.subBlockValues) {
useSubBlockStore.setState((state: any) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: parsedData.subBlockValues,
},
}))
try {
// Update workflow state first
useWorkflowStore.setState(newWorkflowState)
// Update subblock values if provided
if (parsedData.subBlockValues) {
useSubBlockStore.setState((state: any) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: parsedData.subBlockValues,
},
}))
}
} catch (error) {
// Rollback state changes on any failure
logger.error('State update failed, rolling back:', error)
useWorkflowStore.setState(previousWorkflowState)
useSubBlockStore.setState(previousSubBlockState)
return {
success: false,
errors: [
`State update failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
],
warnings: [],
appliedOperations: 0,
}
}
// Update workflow metadata if provided
@@ -153,9 +171,7 @@ export async function applyWorkflowDiff(
}
// Trigger auto layout
setTimeout(() => {
window.dispatchEvent(new CustomEvent('trigger-auto-layout'))
}, 100)
window.dispatchEvent(new CustomEvent('trigger-auto-layout'))
return {
success: true,

View File

@@ -1,4 +1,4 @@
import { dump as yamlDump } from 'js-yaml'
import { dump as yamlDump, load as yamlLoad } from 'js-yaml'
import { createLogger } from '@/lib/logs/console-logger'
import { generateWorkflowYaml } from '@/lib/workflows/yaml-generator'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -91,8 +91,7 @@ export function exportWorkflow(format: EditorFormat): string {
*/
export function parseWorkflowContent(content: string, format: EditorFormat): any {
if (format === 'yaml') {
const { load: yamlParse } = require('js-yaml')
return yamlParse(content)
return yamlLoad(content)
}
return JSON.parse(content)
}

View File

@@ -15,7 +15,6 @@ import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip
import { createLogger } from '@/lib/logs/console-logger'
import { cn } from '@/lib/utils'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import { applyWorkflowDiff } from './workflow-applier'
import { exportWorkflow } from './workflow-exporter'
import { type EditorFormat, WorkflowTextEditor } from './workflow-text-editor'
@@ -37,7 +36,6 @@ export function WorkflowTextEditorModal({
const [isLoading, setIsLoading] = useState(false)
const { activeWorkflowId } = useWorkflowRegistry()
const workflowState = useWorkflowStore()
// Load initial content when modal opens
useEffect(() => {
@@ -79,14 +77,12 @@ export function WorkflowTextEditorModal({
})
// Update initial content to reflect current state
setTimeout(() => {
try {
const updatedContent = exportWorkflow(contentFormat)
setInitialContent(updatedContent)
} catch (error) {
logger.error('Failed to refresh content after save:', error)
}
}, 500) // Give more time for the workflow state to update
try {
const updatedContent = exportWorkflow(contentFormat)
setInitialContent(updatedContent)
} catch (error) {
logger.error('Failed to refresh content after save:', error)
}
}
return {

View File

@@ -17,7 +17,6 @@ import { Textarea } from '@/components/ui/textarea'
import { createLogger } from '@/lib/logs/console-logger'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { importWorkflowFromYaml, parseWorkflowYaml } from '@/stores/workflows/yaml/importer'
const logger = createLogger('ImportControls')
@@ -52,7 +51,6 @@ export const ImportControls = forwardRef<ImportControlsRef, ImportControlsProps>
const { createWorkflow } = useWorkflowRegistry()
const { collaborativeAddBlock, collaborativeAddEdge, collaborativeSetSubblockValue } =
useCollaborativeWorkflow()
const subBlockStore = useSubBlockStore()
// Expose methods to parent component
useImperativeHandle(ref, () => ({
@@ -131,7 +129,7 @@ export const ImportControls = forwardRef<ImportControlsRef, ImportControlsProps>
// Trigger auto layout
window.dispatchEvent(new CustomEvent('trigger-auto-layout'))
},
setSubBlockValue: (blockId: string, subBlockId: string, value: any) => {
setSubBlockValue: (blockId: string, subBlockId: string, value: unknown) => {
// Use the collaborative function - the same one called when users type into fields
collaborativeSetSubblockValue(blockId, subBlockId, value)
},

View File

@@ -23,4 +23,15 @@ CREATE INDEX "docs_emb_model_idx" ON "docs_embeddings" USING btree ("embedding_m
CREATE INDEX "docs_emb_created_at_idx" ON "docs_embeddings" USING btree ("created_at");--> statement-breakpoint
CREATE INDEX "docs_embedding_vector_hnsw_idx" ON "docs_embeddings" USING hnsw ("embedding" vector_cosine_ops) WITH (m=16,ef_construction=64);--> statement-breakpoint
CREATE INDEX "docs_emb_metadata_gin_idx" ON "docs_embeddings" USING gin ("metadata");--> statement-breakpoint
CREATE INDEX "docs_emb_chunk_text_fts_idx" ON "docs_embeddings" USING gin ("chunk_text_tsv");
CREATE INDEX "docs_emb_chunk_text_fts_idx" ON "docs_embeddings" USING gin ("chunk_text_tsv");--> statement-breakpoint
CREATE OR REPLACE FUNCTION trigger_set_timestamp()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;--> statement-breakpoint
CREATE TRIGGER set_updated_at
BEFORE UPDATE ON docs_embeddings
FOR EACH ROW
EXECUTE FUNCTION trigger_set_timestamp();

View File

@@ -291,7 +291,7 @@ export async function sendStreamingMessage(request: SendMessageRequest): Promise
error?: string
}> {
try {
console.log('[CopilotAPI] Sending streaming message request:', {
logger.debug('Sending streaming message request:', {
message: request.message,
stream: true,
hasWorkflowId: !!request.workflowId,
@@ -303,7 +303,7 @@ export async function sendStreamingMessage(request: SendMessageRequest): Promise
body: JSON.stringify({ ...request, stream: true }),
})
console.log('[CopilotAPI] Fetch response received:', {
logger.debug('Fetch response received:', {
ok: response.ok,
status: response.status,
statusText: response.statusText,
@@ -312,23 +312,30 @@ export async function sendStreamingMessage(request: SendMessageRequest): Promise
})
if (!response.ok) {
const errorData = await response.json()
console.error('[CopilotAPI] Error response:', errorData)
throw new Error(errorData.error || 'Failed to send streaming message')
let errorMessage = 'Failed to send streaming message'
try {
const errorData = await response.json()
logger.error('Error response:', errorData)
errorMessage = errorData.error || errorMessage
} catch {
// Response is not JSON, use status text or default message
logger.error('Non-JSON error response:', response.statusText)
errorMessage = response.statusText || errorMessage
}
throw new Error(errorMessage)
}
if (!response.body) {
console.error('[CopilotAPI] No response body received')
logger.error('No response body received')
throw new Error('No response body received')
}
console.log('[CopilotAPI] Successfully received stream')
logger.debug('Successfully received stream')
return {
success: true,
stream: response.body,
}
} catch (error) {
console.error('[CopilotAPI] Failed to send streaming message:', error)
logger.error('Failed to send streaming message:', error)
return {
success: false,
@@ -390,7 +397,7 @@ export async function sendStreamingDocsMessage(request: DocsQueryRequest): Promi
error?: string
}> {
try {
console.log('[CopilotAPI] sendStreamingDocsMessage called with:', request)
logger.debug('sendStreamingDocsMessage called with:', request)
const response = await fetch('/api/copilot/docs', {
method: 'POST',
@@ -398,7 +405,7 @@ export async function sendStreamingDocsMessage(request: DocsQueryRequest): Promi
body: JSON.stringify({ ...request, stream: true }),
})
console.log('[CopilotAPI] Fetch response received:', {
logger.debug('Fetch response received:', {
status: response.status,
statusText: response.statusText,
headers: Object.fromEntries(response.headers.entries()),
@@ -407,23 +414,30 @@ export async function sendStreamingDocsMessage(request: DocsQueryRequest): Promi
})
if (!response.ok) {
const errorData = await response.json()
console.error('[CopilotAPI] API error response:', errorData)
throw new Error(errorData.error || 'Failed to send streaming docs message')
let errorMessage = 'Failed to send streaming docs message'
try {
const errorData = await response.json()
logger.error('API error response:', errorData)
errorMessage = errorData.error || errorMessage
} catch {
// Response is not JSON, use status text or default message
logger.error('Non-JSON error response:', response.statusText)
errorMessage = response.statusText || errorMessage
}
throw new Error(errorMessage)
}
if (!response.body) {
console.error('[CopilotAPI] No response body received')
logger.error('No response body received')
throw new Error('No response body received')
}
console.log('[CopilotAPI] Returning successful result with stream')
logger.debug('Returning successful result with stream')
return {
success: true,
stream: response.body,
}
} catch (error) {
console.error('[CopilotAPI] Error in sendStreamingDocsMessage:', error)
logger.error('Failed to send streaming docs message:', error)
return {
success: false,

View File

@@ -4,6 +4,55 @@ import type { ProviderId } from '@/providers/types'
const logger = createLogger('CopilotConfig')
/**
* Valid provider IDs for validation
*/
const VALID_PROVIDER_IDS: ProviderId[] = [
'openai',
'azure-openai',
'anthropic',
'google',
'deepseek',
'xai',
'cerebras',
'groq',
'ollama',
]
/**
* Validate and return a ProviderId if valid, otherwise return null
*/
function validateProviderId(value: string | undefined): ProviderId | null {
if (!value) return null
return VALID_PROVIDER_IDS.includes(value as ProviderId) ? (value as ProviderId) : null
}
/**
* Safely parse a float from environment variable with validation
*/
function parseFloatEnv(value: string | undefined, name: string): number | null {
if (!value) return null
const parsed = Number.parseFloat(value)
if (Number.isNaN(parsed)) {
logger.warn(`Invalid ${name}: ${value}. Expected a valid number.`)
return null
}
return parsed
}
/**
* Safely parse an integer from environment variable with validation
*/
function parseIntEnv(value: string | undefined, name: string): number | null {
if (!value) return null
const parsed = Number.parseInt(value, 10)
if (Number.isNaN(parsed)) {
logger.warn(`Invalid ${name}: ${value}. Expected a valid integer.`)
return null
}
return parsed
}
/**
* Copilot configuration interface
*/
@@ -134,49 +183,80 @@ export function getCopilotConfig(): CopilotConfig {
// Allow environment variable overrides
try {
// Chat configuration overrides
if (process.env.COPILOT_CHAT_PROVIDER) {
config.chat.defaultProvider = process.env.COPILOT_CHAT_PROVIDER as ProviderId
const chatProvider = validateProviderId(process.env.COPILOT_CHAT_PROVIDER)
if (chatProvider) {
config.chat.defaultProvider = chatProvider
} else if (process.env.COPILOT_CHAT_PROVIDER) {
logger.warn(
`Invalid COPILOT_CHAT_PROVIDER: ${process.env.COPILOT_CHAT_PROVIDER}. Valid providers: ${VALID_PROVIDER_IDS.join(', ')}`
)
}
if (process.env.COPILOT_CHAT_MODEL) {
config.chat.defaultModel = process.env.COPILOT_CHAT_MODEL
}
if (process.env.COPILOT_CHAT_TEMPERATURE) {
config.chat.temperature = Number.parseFloat(process.env.COPILOT_CHAT_TEMPERATURE)
const chatTemperature = parseFloatEnv(
process.env.COPILOT_CHAT_TEMPERATURE,
'COPILOT_CHAT_TEMPERATURE'
)
if (chatTemperature !== null) {
config.chat.temperature = chatTemperature
}
if (process.env.COPILOT_CHAT_MAX_TOKENS) {
config.chat.maxTokens = Number.parseInt(process.env.COPILOT_CHAT_MAX_TOKENS)
const chatMaxTokens = parseIntEnv(
process.env.COPILOT_CHAT_MAX_TOKENS,
'COPILOT_CHAT_MAX_TOKENS'
)
if (chatMaxTokens !== null) {
config.chat.maxTokens = chatMaxTokens
}
// RAG configuration overrides
if (process.env.COPILOT_RAG_PROVIDER) {
config.rag.defaultProvider = process.env.COPILOT_RAG_PROVIDER as ProviderId
const ragProvider = validateProviderId(process.env.COPILOT_RAG_PROVIDER)
if (ragProvider) {
config.rag.defaultProvider = ragProvider
} else if (process.env.COPILOT_RAG_PROVIDER) {
logger.warn(
`Invalid COPILOT_RAG_PROVIDER: ${process.env.COPILOT_RAG_PROVIDER}. Valid providers: ${VALID_PROVIDER_IDS.join(', ')}`
)
}
if (process.env.COPILOT_RAG_MODEL) {
config.rag.defaultModel = process.env.COPILOT_RAG_MODEL
}
if (process.env.COPILOT_RAG_TEMPERATURE) {
config.rag.temperature = Number.parseFloat(process.env.COPILOT_RAG_TEMPERATURE)
const ragTemperature = parseFloatEnv(
process.env.COPILOT_RAG_TEMPERATURE,
'COPILOT_RAG_TEMPERATURE'
)
if (ragTemperature !== null) {
config.rag.temperature = ragTemperature
}
if (process.env.COPILOT_RAG_MAX_TOKENS) {
config.rag.maxTokens = Number.parseInt(process.env.COPILOT_RAG_MAX_TOKENS)
const ragMaxTokens = parseIntEnv(process.env.COPILOT_RAG_MAX_TOKENS, 'COPILOT_RAG_MAX_TOKENS')
if (ragMaxTokens !== null) {
config.rag.maxTokens = ragMaxTokens
}
if (process.env.COPILOT_RAG_MAX_SOURCES) {
config.rag.maxSources = Number.parseInt(process.env.COPILOT_RAG_MAX_SOURCES)
const ragMaxSources = parseIntEnv(
process.env.COPILOT_RAG_MAX_SOURCES,
'COPILOT_RAG_MAX_SOURCES'
)
if (ragMaxSources !== null) {
config.rag.maxSources = ragMaxSources
}
if (process.env.COPILOT_RAG_SIMILARITY_THRESHOLD) {
config.rag.similarityThreshold = Number.parseFloat(
process.env.COPILOT_RAG_SIMILARITY_THRESHOLD
)
const ragSimilarityThreshold = parseFloatEnv(
process.env.COPILOT_RAG_SIMILARITY_THRESHOLD,
'COPILOT_RAG_SIMILARITY_THRESHOLD'
)
if (ragSimilarityThreshold !== null) {
config.rag.similarityThreshold = ragSimilarityThreshold
}
// General configuration overrides
if (process.env.COPILOT_STREAMING_ENABLED) {
config.general.streamingEnabled = process.env.COPILOT_STREAMING_ENABLED === 'true'
}
if (process.env.COPILOT_MAX_CONVERSATION_HISTORY) {
config.general.maxConversationHistory = Number.parseInt(
process.env.COPILOT_MAX_CONVERSATION_HISTORY
)
const maxConversationHistory = parseIntEnv(
process.env.COPILOT_MAX_CONVERSATION_HISTORY,
'COPILOT_MAX_CONVERSATION_HISTORY'
)
if (maxConversationHistory !== null) {
config.general.maxConversationHistory = maxConversationHistory
}
logger.info('Copilot configuration loaded', {

View File

@@ -1,4 +1,5 @@
import { dump as yamlDump } from 'js-yaml'
import type { Edge } from 'reactflow'
import { createLogger } from '@/lib/logs/console-logger'
import { getBlock } from '@/blocks'
import type { SubBlockConfig } from '@/blocks/types'
@@ -167,7 +168,7 @@ function extractBlockInputs(
*/
function findIncomingConnections(
blockId: string,
edges: any[]
edges: Edge[]
): Array<{
source: string
sourceHandle?: string
@@ -177,8 +178,8 @@ function findIncomingConnections(
.filter((edge) => edge.target === blockId)
.map((edge) => ({
source: edge.source,
sourceHandle: edge.sourceHandle,
targetHandle: edge.targetHandle,
sourceHandle: edge.sourceHandle ?? undefined,
targetHandle: edge.targetHandle ?? undefined,
}))
}
@@ -187,7 +188,7 @@ function findIncomingConnections(
*/
function findOutgoingConnections(
blockId: string,
edges: any[]
edges: Edge[]
): Array<{
target: string
sourceHandle?: string
@@ -197,8 +198,8 @@ function findOutgoingConnections(
.filter((edge) => edge.source === blockId)
.map((edge) => ({
target: edge.target,
sourceHandle: edge.sourceHandle,
targetHandle: edge.targetHandle,
sourceHandle: edge.sourceHandle ?? undefined,
targetHandle: edge.targetHandle ?? undefined,
}))
}

View File

@@ -65,7 +65,6 @@
"@radix-ui/react-tooltip": "^1.1.6",
"@react-email/components": "^0.0.34",
"@sentry/nextjs": "^9.15.0",
"@types/js-yaml": "4.0.9",
"@types/three": "0.177.0",
"@vercel/og": "^0.6.5",
"@vercel/speed-insights": "^1.2.0",
@@ -122,6 +121,7 @@
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.3.0",
"@testing-library/user-event": "^14.6.1",
"@types/js-yaml": "4.0.9",
"@types/jsdom": "21.1.7",
"@types/lodash": "^4.17.16",
"@types/node": "^22",

View File

@@ -2,6 +2,7 @@
import path from 'path'
import { DocsChunker } from '@/lib/documents/docs-chunker'
import type { DocChunk } from '@/lib/documents/types'
import { createLogger } from '@/lib/logs/console-logger'
const logger = createLogger('ChunkDocsScript')
@@ -31,16 +32,13 @@ async function main() {
logger.info(`Total chunks: ${chunks.length}`)
// Group chunks by document
const chunksByDoc = chunks.reduce(
(acc, chunk) => {
if (!acc[chunk.sourceDocument]) {
acc[chunk.sourceDocument] = []
}
acc[chunk.sourceDocument].push(chunk)
return acc
},
{} as Record<string, typeof chunks>
)
const chunksByDoc = chunks.reduce<Record<string, DocChunk[]>>((acc, chunk) => {
if (!acc[chunk.sourceDocument]) {
acc[chunk.sourceDocument] = []
}
acc[chunk.sourceDocument].push(chunk)
return acc
}, {})
// Display summary
logger.info(`\n=== DOCUMENT SUMMARY ===`)
@@ -79,13 +77,10 @@ async function main() {
logger.info(`Embedding dimensions: ${chunks[0].embedding.length}`)
}
const headerLevels = chunks.reduce(
(acc, chunk) => {
acc[chunk.headerLevel] = (acc[chunk.headerLevel] || 0) + 1
return acc
},
{} as Record<number, number>
)
const headerLevels = chunks.reduce<Record<number, number>>((acc, chunk) => {
acc[chunk.headerLevel] = (acc[chunk.headerLevel] || 0) + 1
return acc
}, {})
logger.info(`Header level distribution:`)
Object.entries(headerLevels)

View File

@@ -59,8 +59,13 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
// Clear existing embeddings if requested
if (config.clearExisting) {
logger.info('🗑️ Clearing existing docs embeddings...')
const deleteResult = await db.delete(docsEmbeddings)
logger.info(`Deleted existing embeddings`)
try {
const deleteResult = await db.delete(docsEmbeddings)
logger.info(`✅ Successfully deleted existing embeddings`)
} catch (error) {
logger.error('❌ Failed to delete existing embeddings:', error)
throw new Error('Failed to clear existing embeddings')
}
}
// Initialize the docs chunker
@@ -213,8 +218,8 @@ Examples:
}
}
// Run the script
if (process.argv[1]?.includes('process-docs-embeddings')) {
// Run the script if executed directly
if (import.meta.url.includes('process-docs-embeddings.ts')) {
main().catch((error) => {
console.error('Script failed:', error)
process.exit(1)

View File

@@ -26,7 +26,9 @@ const initialState = {
isLoading: false,
isLoadingChats: false,
isSendingMessage: false,
isSaving: false,
error: null,
saveError: null,
workflowId: null,
}
@@ -48,6 +50,8 @@ export const useCopilotStore = create<CopilotStore>()(
chats: [],
messages: [],
error: null,
saveError: null,
isSaving: false,
})
// Load chats for the new workflow
@@ -425,7 +429,13 @@ export const useCopilotStore = create<CopilotStore>()(
const chatIdToSave = newChatId || get().currentChat?.id
if (chatIdToSave) {
console.log('[CopilotStore] Saving chat to database:', chatIdToSave)
await get().saveChatMessages(chatIdToSave)
try {
await get().saveChatMessages(chatIdToSave)
} catch (saveError) {
// Save error is already handled in saveChatMessages and reflected in store state
// Don't break the streaming flow - user gets the message but knows save failed
logger.warn(`Chat save failed after streaming completed: ${saveError}`)
}
}
// Handle new chat creation
@@ -477,9 +487,11 @@ export const useCopilotStore = create<CopilotStore>()(
// Save chat messages to database
saveChatMessages: async (chatId: string) => {
try {
const { messages, currentChat } = get()
const { messages } = get()
set({ isSaving: true, saveError: null })
try {
logger.info(`Saving ${messages.length} messages for chat ${chatId}`)
// Let the API handle title generation if needed
@@ -490,16 +502,30 @@ export const useCopilotStore = create<CopilotStore>()(
set({
currentChat: result.chat,
messages: result.chat.messages,
isSaving: false,
saveError: null,
})
logger.info(
`Successfully saved chat ${chatId} with ${result.chat.messages.length} messages`
)
} else {
logger.error(`Failed to save chat ${chatId}:`, result.error)
const errorMessage = result.error || 'Failed to save chat'
logger.error(`Failed to save chat ${chatId}:`, errorMessage)
set({
isSaving: false,
saveError: errorMessage,
})
throw new Error(errorMessage)
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error saving chat'
logger.error(`Error saving chat ${chatId}:`, error)
set({
isSaving: false,
saveError: errorMessage,
})
throw error
}
},
@@ -508,6 +534,16 @@ export const useCopilotStore = create<CopilotStore>()(
set({ error: null })
},
// Clear save error state
clearSaveError: () => {
set({ saveError: null })
},
// Retry saving chat messages
retrySave: async (chatId: string) => {
await get().saveChatMessages(chatId)
},
// Reset entire store
reset: () => {
set(initialState)

View File

@@ -48,6 +48,10 @@ export interface CopilotState {
// Error state
error: string | null
// Save operation error (separate from general errors)
saveError: string | null
isSaving: boolean
// Current workflow ID (for chat context)
workflowId: string | null
}
@@ -71,6 +75,8 @@ export interface CopilotActions {
// Utility actions
clearMessages: () => void
clearError: () => void
clearSaveError: () => void
retrySave: (chatId: string) => Promise<void>
reset: () => void
// Internal helper (not exposed publicly)

View File

@@ -228,7 +228,7 @@ export const resetAllStores = () => {
})
useExecutionStore.getState().reset()
useConsoleStore.setState({ entries: [], isOpen: false })
useCopilotStore.setState({ messages: [], isProcessing: false, error: null })
useCopilotStore.setState({ messages: [], isSendingMessage: false, error: null })
useCustomToolsStore.setState({ tools: {} })
useVariablesStore.getState().resetLoaded() // Reset variables store tracking
}

View File

@@ -1,7 +1,11 @@
import { load as yamlParse } from 'js-yaml'
import type { Edge } from 'reactflow'
import { createLogger } from '@/lib/logs/console-logger'
import { getBlock } from '@/blocks'
import { resolveOutputType } from '@/blocks/utils'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('WorkflowYamlImporter')
@@ -66,7 +70,7 @@ export function parseWorkflowYaml(yamlContent: string): {
const errors: string[] = []
try {
const data = yamlParse(yamlContent) as any
const data = yamlParse(yamlContent) as unknown
// Validate top-level structure
if (!data || typeof data !== 'object') {
@@ -74,39 +78,45 @@ export function parseWorkflowYaml(yamlContent: string): {
return { data: null, errors }
}
if (!data.version) {
// Type guard to check if data has the expected structure
const parsedData = data as Record<string, unknown>
if (!parsedData.version) {
errors.push('Missing required field: version')
}
if (!data.blocks || typeof data.blocks !== 'object') {
if (!parsedData.blocks || typeof parsedData.blocks !== 'object') {
errors.push('Missing or invalid field: blocks')
return { data: null, errors }
}
// Validate blocks structure
Object.entries(data.blocks).forEach(([blockId, block]: [string, any]) => {
const blocks = parsedData.blocks as Record<string, unknown>
Object.entries(blocks).forEach(([blockId, block]: [string, unknown]) => {
if (!block || typeof block !== 'object') {
errors.push(`Invalid block definition for '${blockId}': must be an object`)
return
}
if (!block.type || typeof block.type !== 'string') {
const blockData = block as Record<string, unknown>
if (!blockData.type || typeof blockData.type !== 'string') {
errors.push(`Invalid block '${blockId}': missing or invalid 'type' field`)
}
if (!block.name || typeof block.name !== 'string') {
if (!blockData.name || typeof blockData.name !== 'string') {
errors.push(`Invalid block '${blockId}': missing or invalid 'name' field`)
}
if (block.inputs && typeof block.inputs !== 'object') {
if (blockData.inputs && typeof blockData.inputs !== 'object') {
errors.push(`Invalid block '${blockId}': 'inputs' must be an object`)
}
if (block.preceding && !Array.isArray(block.preceding)) {
if (blockData.preceding && !Array.isArray(blockData.preceding)) {
errors.push(`Invalid block '${blockId}': 'preceding' must be an array`)
}
if (block.following && !Array.isArray(block.following)) {
if (blockData.following && !Array.isArray(blockData.following)) {
errors.push(`Invalid block '${blockId}': 'following' must be an array`)
}
})
@@ -115,7 +125,7 @@ export function parseWorkflowYaml(yamlContent: string): {
return { data: null, errors }
}
return { data: data as YamlWorkflow, errors: [] }
return { data: parsedData as unknown as YamlWorkflow, errors: [] }
} catch (error) {
errors.push(`YAML parsing error: ${error instanceof Error ? error.message : 'Unknown error'}`)
return { data: null, errors }
@@ -433,7 +443,7 @@ export async function importWorkflowFromYaml(
parentId?: string,
extent?: 'parent'
) => void
addEdge: (edge: any) => void
addEdge: (edge: Edge) => void
applyAutoLayout: () => void
setSubBlockValue: (blockId: string, subBlockId: string, value: any) => void
getExistingBlocks: () => Record<string, any>
@@ -479,9 +489,6 @@ export async function importWorkflowFromYaml(
)
// Get stores and current workflow info
const { useWorkflowStore } = require('@/stores/workflows/workflow/store')
const { useSubBlockStore } = require('@/stores/workflows/subblock/store')
const { useWorkflowRegistry } = require('@/stores/workflows/registry/store')
// Get current workflow state
const currentWorkflowState = useWorkflowStore.getState()
@@ -727,9 +734,6 @@ export async function importWorkflowFromYaml(
}))
}
// Brief delay for UI to update
await new Promise((resolve) => setTimeout(resolve, 100))
// Apply auto layout
workflowActions.applyAutoLayout()

View File

@@ -45,6 +45,10 @@ function getSubBlockValues() {
// Track if subscriptions have been initialized
let subscriptionsInitialized = false
// Track timeout IDs for cleanup
let workflowRefreshTimeoutId: NodeJS.Timeout | null = null
let subBlockRefreshTimeoutId: NodeJS.Timeout | null = null
// Initialize subscriptions lazily
function initializeSubscriptions() {
if (subscriptionsInitialized) return
@@ -67,9 +71,17 @@ function initializeSubscriptions() {
) {
lastWorkflowState = currentState
// Clear existing timeout to properly debounce
if (workflowRefreshTimeoutId) {
clearTimeout(workflowRefreshTimeoutId)
}
// Debounce the refresh to avoid excessive updates
const refreshYaml = useWorkflowYamlStore.getState().refreshYaml
setTimeout(refreshYaml, 100)
workflowRefreshTimeoutId = setTimeout(() => {
refreshYaml()
workflowRefreshTimeoutId = null
}, 100)
}
})
@@ -83,8 +95,16 @@ function initializeSubscriptions() {
if (currentTime - lastSubBlockChangeTime > 100) {
lastSubBlockChangeTime = currentTime
// Clear existing timeout to properly debounce
if (subBlockRefreshTimeoutId) {
clearTimeout(subBlockRefreshTimeoutId)
}
const refreshYaml = useWorkflowYamlStore.getState().refreshYaml
setTimeout(refreshYaml, 100)
subBlockRefreshTimeoutId = setTimeout(() => {
refreshYaml()
subBlockRefreshTimeoutId = null
}, 100)
}
})
}

View File

@@ -1,6 +1,29 @@
import type { ToolConfig } from '../types'
import type { ToolConfig, ToolResponse } from '../types'
export const docsSearchTool: ToolConfig = {
interface DocsSearchParams {
query: string
topK?: number
}
interface DocsSearchResult {
id: string
title: string
content: string
url: string
score: number
metadata?: Record<string, any>
}
interface DocsSearchResponse extends ToolResponse {
output: {
results: DocsSearchResult[]
query: string
totalResults: number
searchTime: number
}
}
export const docsSearchTool: ToolConfig<DocsSearchParams, DocsSearchResponse> = {
id: 'docs_search_internal',
name: 'Search Documentation',
description:
@@ -16,7 +39,7 @@ export const docsSearchTool: ToolConfig = {
topK: {
type: 'number',
required: false,
description: 'Number of results to return (default: 5, max: 10)',
description: 'Number of results to return (default: 5, max: 20)',
},
},
@@ -26,10 +49,55 @@ export const docsSearchTool: ToolConfig = {
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
query: params.query,
topK: params.topK || 5,
}),
body: (params) => {
// Validate and clamp topK parameter
let topK = params.topK || 5
if (topK > 20) topK = 20
if (topK < 1) topK = 1
return {
query: params.query,
topK,
}
},
isInternalRoute: true,
},
transformResponse: async (
response: Response,
params?: DocsSearchParams
): Promise<DocsSearchResponse> => {
if (!response.ok) {
throw new Error(`Docs search failed: ${response.status} ${response.statusText}`)
}
const data = await response.json()
// Validate and transform the API response
const results: DocsSearchResult[] = (data.results || []).map((result: any) => ({
id: result.id || '',
title: result.title || 'Untitled',
content: result.content || '',
url: result.url || '',
score: typeof result.score === 'number' ? result.score : 0,
metadata: result.metadata || {},
}))
return {
success: true,
output: {
results,
query: params?.query || '',
totalResults: results.length,
searchTime: data.searchTime || 0,
},
}
},
transformError: (error: any): string => {
if (error instanceof Error) {
return `Documentation search failed: ${error.message}`
}
return 'An unexpected error occurred while searching documentation'
},
}

View File

@@ -1,6 +1,24 @@
import type { ToolConfig } from '../types'
import type { ToolConfig, ToolResponse } from '../types'
export const getUserWorkflowTool: ToolConfig = {
interface GetWorkflowParams {
includeMetadata?: boolean
_context?: {
workflowId: string
}
}
interface GetWorkflowResponse extends ToolResponse {
output: {
yaml: string
metadata?: {
blockCount: number
connectionCount: number
lastModified: string
}
}
}
export const getUserWorkflowTool: ToolConfig<GetWorkflowParams, GetWorkflowResponse> = {
id: 'get_user_workflow',
name: 'Get User Workflow',
description: