mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-22 21:38:05 -05:00
Compare commits
9 Commits
v0.5.67
...
improvemen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3aedf4137b | ||
|
|
748793e07d | ||
|
|
91da7e183a | ||
|
|
ab09a5ad23 | ||
|
|
fcd0240db6 | ||
|
|
4e4149792a | ||
|
|
9a8b591257 | ||
|
|
f3ae3f8442 | ||
|
|
66dfe2c6b2 |
@@ -313,7 +313,7 @@ describe('Function Execute API Route', () => {
|
||||
'block-2': 'world',
|
||||
},
|
||||
blockNameMapping: {
|
||||
validVar: 'block-1',
|
||||
validvar: 'block-1',
|
||||
another_valid: 'block-2',
|
||||
},
|
||||
})
|
||||
@@ -539,7 +539,7 @@ describe('Function Execute API Route', () => {
|
||||
'block-complex': complexData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
complexData: 'block-complex',
|
||||
complexdata: 'block-complex',
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -6,11 +6,11 @@ import { executeInE2B } from '@/lib/execution/e2b'
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
|
||||
import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
|
||||
import { type OutputSchema, resolveBlockReference } from '@/executor/utils/block-reference'
|
||||
import {
|
||||
createEnvVarPattern,
|
||||
createWorkflowVariablePattern,
|
||||
} from '@/executor/utils/reference-validation'
|
||||
import { navigatePath } from '@/executor/variables/resolvers/reference'
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
@@ -470,14 +470,17 @@ function resolveEnvironmentVariables(
|
||||
|
||||
function resolveTagVariables(
|
||||
code: string,
|
||||
blockData: Record<string, any>,
|
||||
blockData: Record<string, unknown>,
|
||||
blockNameMapping: Record<string, string>,
|
||||
contextVariables: Record<string, any>
|
||||
blockOutputSchemas: Record<string, OutputSchema>,
|
||||
contextVariables: Record<string, unknown>,
|
||||
language = 'javascript'
|
||||
): string {
|
||||
let resolvedCode = code
|
||||
const undefinedLiteral = language === 'python' ? 'None' : 'undefined'
|
||||
|
||||
const tagPattern = new RegExp(
|
||||
`${REFERENCE.START}([a-zA-Z_][a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])${REFERENCE.END}`,
|
||||
`${REFERENCE.START}([a-zA-Z_](?:[a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])?)${REFERENCE.END}`,
|
||||
'g'
|
||||
)
|
||||
const tagMatches = resolvedCode.match(tagPattern) || []
|
||||
@@ -486,41 +489,37 @@ function resolveTagVariables(
|
||||
const tagName = match.slice(REFERENCE.START.length, -REFERENCE.END.length).trim()
|
||||
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
|
||||
const blockName = pathParts[0]
|
||||
const fieldPath = pathParts.slice(1)
|
||||
|
||||
const blockId = blockNameMapping[blockName]
|
||||
if (!blockId) {
|
||||
const result = resolveBlockReference(blockName, fieldPath, {
|
||||
blockNameMapping,
|
||||
blockData,
|
||||
blockOutputSchemas,
|
||||
})
|
||||
|
||||
if (!result) {
|
||||
continue
|
||||
}
|
||||
|
||||
const blockOutput = blockData[blockId]
|
||||
if (blockOutput === undefined) {
|
||||
continue
|
||||
}
|
||||
|
||||
let tagValue: any
|
||||
if (pathParts.length === 1) {
|
||||
tagValue = blockOutput
|
||||
} else {
|
||||
tagValue = navigatePath(blockOutput, pathParts.slice(1))
|
||||
}
|
||||
let tagValue = result.value
|
||||
|
||||
if (tagValue === undefined) {
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), undefinedLiteral)
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
typeof tagValue === 'string' &&
|
||||
tagValue.length > 100 &&
|
||||
(tagValue.startsWith('{') || tagValue.startsWith('['))
|
||||
) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch {
|
||||
// Keep as-is
|
||||
if (typeof tagValue === 'string') {
|
||||
const trimmed = tagValue.trimStart()
|
||||
if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch {
|
||||
// Keep as string if not valid JSON
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const safeVarName = `__tag_${tagName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
const safeVarName = `__tag_${tagName.replace(/_/g, '_1').replace(/\./g, '_0')}`
|
||||
contextVariables[safeVarName] = tagValue
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
}
|
||||
@@ -537,18 +536,27 @@ function resolveTagVariables(
|
||||
*/
|
||||
function resolveCodeVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
params: Record<string, unknown>,
|
||||
envVars: Record<string, string> = {},
|
||||
blockData: Record<string, any> = {},
|
||||
blockData: Record<string, unknown> = {},
|
||||
blockNameMapping: Record<string, string> = {},
|
||||
workflowVariables: Record<string, any> = {}
|
||||
): { resolvedCode: string; contextVariables: Record<string, any> } {
|
||||
blockOutputSchemas: Record<string, OutputSchema> = {},
|
||||
workflowVariables: Record<string, unknown> = {},
|
||||
language = 'javascript'
|
||||
): { resolvedCode: string; contextVariables: Record<string, unknown> } {
|
||||
let resolvedCode = code
|
||||
const contextVariables: Record<string, any> = {}
|
||||
const contextVariables: Record<string, unknown> = {}
|
||||
|
||||
resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables)
|
||||
resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables)
|
||||
resolvedCode = resolveTagVariables(resolvedCode, blockData, blockNameMapping, contextVariables)
|
||||
resolvedCode = resolveTagVariables(
|
||||
resolvedCode,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
contextVariables,
|
||||
language
|
||||
)
|
||||
|
||||
return { resolvedCode, contextVariables }
|
||||
}
|
||||
@@ -585,6 +593,7 @@ export async function POST(req: NextRequest) {
|
||||
envVars = {},
|
||||
blockData = {},
|
||||
blockNameMapping = {},
|
||||
blockOutputSchemas = {},
|
||||
workflowVariables = {},
|
||||
workflowId,
|
||||
isCustomTool = false,
|
||||
@@ -601,20 +610,21 @@ export async function POST(req: NextRequest) {
|
||||
isCustomTool,
|
||||
})
|
||||
|
||||
// Resolve variables in the code with workflow environment variables
|
||||
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
|
||||
|
||||
const codeResolution = resolveCodeVariables(
|
||||
code,
|
||||
executionParams,
|
||||
envVars,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
workflowVariables
|
||||
blockOutputSchemas,
|
||||
workflowVariables,
|
||||
lang
|
||||
)
|
||||
resolvedCode = codeResolution.resolvedCode
|
||||
const contextVariables = codeResolution.contextVariables
|
||||
|
||||
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
|
||||
|
||||
let jsImports = ''
|
||||
let jsRemainingCode = resolvedCode
|
||||
let hasImports = false
|
||||
@@ -670,7 +680,11 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `const environmentVariables = JSON.parse(${JSON.stringify(JSON.stringify(envVars))});\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
if (v === undefined) {
|
||||
prologue += `const ${k} = undefined;\n`
|
||||
} else {
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
}
|
||||
prologueLineCount++
|
||||
}
|
||||
|
||||
@@ -741,7 +755,11 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `environmentVariables = json.loads(${JSON.stringify(JSON.stringify(envVars))})\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
if (v === undefined) {
|
||||
prologue += `${k} = None\n`
|
||||
} else {
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
}
|
||||
prologueLineCount++
|
||||
}
|
||||
const wrapped = [
|
||||
|
||||
@@ -5,6 +5,7 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
bulkDocumentOperation,
|
||||
bulkDocumentOperationByFilter,
|
||||
createDocumentRecords,
|
||||
createSingleDocument,
|
||||
getDocuments,
|
||||
@@ -57,13 +58,20 @@ const BulkCreateDocumentsSchema = z.object({
|
||||
bulk: z.literal(true),
|
||||
})
|
||||
|
||||
const BulkUpdateDocumentsSchema = z.object({
|
||||
operation: z.enum(['enable', 'disable', 'delete']),
|
||||
documentIds: z
|
||||
.array(z.string())
|
||||
.min(1, 'At least one document ID is required')
|
||||
.max(100, 'Cannot operate on more than 100 documents at once'),
|
||||
})
|
||||
const BulkUpdateDocumentsSchema = z
|
||||
.object({
|
||||
operation: z.enum(['enable', 'disable', 'delete']),
|
||||
documentIds: z
|
||||
.array(z.string())
|
||||
.min(1, 'At least one document ID is required')
|
||||
.max(100, 'Cannot operate on more than 100 documents at once')
|
||||
.optional(),
|
||||
selectAll: z.boolean().optional(),
|
||||
enabledFilter: z.enum(['all', 'enabled', 'disabled']).optional(),
|
||||
})
|
||||
.refine((data) => data.selectAll || (data.documentIds && data.documentIds.length > 0), {
|
||||
message: 'Either selectAll must be true or documentIds must be provided',
|
||||
})
|
||||
|
||||
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
@@ -90,14 +98,17 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
|
||||
const url = new URL(req.url)
|
||||
const includeDisabled = url.searchParams.get('includeDisabled') === 'true'
|
||||
const enabledFilter = url.searchParams.get('enabledFilter') as
|
||||
| 'all'
|
||||
| 'enabled'
|
||||
| 'disabled'
|
||||
| null
|
||||
const search = url.searchParams.get('search') || undefined
|
||||
const limit = Number.parseInt(url.searchParams.get('limit') || '50')
|
||||
const offset = Number.parseInt(url.searchParams.get('offset') || '0')
|
||||
const sortByParam = url.searchParams.get('sortBy')
|
||||
const sortOrderParam = url.searchParams.get('sortOrder')
|
||||
|
||||
// Validate sort parameters
|
||||
const validSortFields: DocumentSortField[] = [
|
||||
'filename',
|
||||
'fileSize',
|
||||
@@ -105,6 +116,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
'chunkCount',
|
||||
'uploadedAt',
|
||||
'processingStatus',
|
||||
'enabled',
|
||||
]
|
||||
const validSortOrders: SortOrder[] = ['asc', 'desc']
|
||||
|
||||
@@ -120,7 +132,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const result = await getDocuments(
|
||||
knowledgeBaseId,
|
||||
{
|
||||
includeDisabled,
|
||||
enabledFilter: enabledFilter || undefined,
|
||||
search,
|
||||
limit,
|
||||
offset,
|
||||
@@ -190,8 +202,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const createdDocuments = await createDocumentRecords(
|
||||
validatedData.documents,
|
||||
knowledgeBaseId,
|
||||
requestId,
|
||||
userId
|
||||
requestId
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -250,16 +261,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
throw validationError
|
||||
}
|
||||
} else {
|
||||
// Handle single document creation
|
||||
try {
|
||||
const validatedData = CreateDocumentSchema.parse(body)
|
||||
|
||||
const newDocument = await createSingleDocument(
|
||||
validatedData,
|
||||
knowledgeBaseId,
|
||||
requestId,
|
||||
userId
|
||||
)
|
||||
const newDocument = await createSingleDocument(validatedData, knowledgeBaseId, requestId)
|
||||
|
||||
try {
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
@@ -294,7 +299,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating document`, error)
|
||||
|
||||
// Check if it's a storage limit error
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to create document'
|
||||
const isStorageLimitError =
|
||||
errorMessage.includes('Storage limit exceeded') || errorMessage.includes('storage limit')
|
||||
@@ -331,16 +335,20 @@ export async function PATCH(req: NextRequest, { params }: { params: Promise<{ id
|
||||
|
||||
try {
|
||||
const validatedData = BulkUpdateDocumentsSchema.parse(body)
|
||||
const { operation, documentIds } = validatedData
|
||||
const { operation, documentIds, selectAll, enabledFilter } = validatedData
|
||||
|
||||
try {
|
||||
const result = await bulkDocumentOperation(
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
documentIds,
|
||||
requestId,
|
||||
session.user.id
|
||||
)
|
||||
let result
|
||||
if (selectAll) {
|
||||
result = await bulkDocumentOperationByFilter(
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
enabledFilter,
|
||||
requestId
|
||||
)
|
||||
} else {
|
||||
result = await bulkDocumentOperation(knowledgeBaseId, operation, documentIds!, requestId)
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
|
||||
@@ -61,6 +61,7 @@ export function EditChunkModal({
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
const [tokenizerOn, setTokenizerOn] = useState(false)
|
||||
const [hoveredTokenIndex, setHoveredTokenIndex] = useState<number | null>(null)
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null)
|
||||
|
||||
const error = mutationError?.message ?? null
|
||||
@@ -254,6 +255,8 @@ export function EditChunkModal({
|
||||
style={{
|
||||
backgroundColor: getTokenBgColor(index),
|
||||
}}
|
||||
onMouseEnter={() => setHoveredTokenIndex(index)}
|
||||
onMouseLeave={() => setHoveredTokenIndex(null)}
|
||||
>
|
||||
{token}
|
||||
</span>
|
||||
@@ -281,6 +284,11 @@ export function EditChunkModal({
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>Tokenizer</span>
|
||||
<Switch checked={tokenizerOn} onCheckedChange={setTokenizerOn} />
|
||||
{tokenizerOn && hoveredTokenIndex !== null && (
|
||||
<span className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
Token #{hoveredTokenIndex + 1}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>
|
||||
{tokenCount.toLocaleString()}
|
||||
|
||||
@@ -36,6 +36,7 @@ import {
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import type { ChunkData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
ChunkContextMenu,
|
||||
@@ -58,55 +59,6 @@ import {
|
||||
|
||||
const logger = createLogger('Document')
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
interface DocumentProps {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
@@ -304,7 +256,6 @@ export function Document({
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [debouncedSearchQuery, setDebouncedSearchQuery] = useState('')
|
||||
const [isSearching, setIsSearching] = useState(false)
|
||||
|
||||
const {
|
||||
chunks: initialChunks,
|
||||
@@ -344,7 +295,6 @@ export function Document({
|
||||
const handler = setTimeout(() => {
|
||||
startTransition(() => {
|
||||
setDebouncedSearchQuery(searchQuery)
|
||||
setIsSearching(searchQuery.trim().length > 0)
|
||||
})
|
||||
}, 200)
|
||||
|
||||
@@ -353,6 +303,7 @@ export function Document({
|
||||
}
|
||||
}, [searchQuery])
|
||||
|
||||
const isSearching = debouncedSearchQuery.trim().length > 0
|
||||
const showingSearch = isSearching && searchQuery.trim().length > 0 && searchResults.length > 0
|
||||
const SEARCH_PAGE_SIZE = 50
|
||||
const maxSearchPages = Math.ceil(searchResults.length / SEARCH_PAGE_SIZE)
|
||||
|
||||
@@ -27,6 +27,10 @@ import {
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
@@ -40,8 +44,11 @@ import { Input } from '@/components/ui/input'
|
||||
import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import { ALL_TAG_SLOTS, type AllTagSlot, getFieldTypeForSlot } from '@/lib/knowledge/constants'
|
||||
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import { formatFileSize } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
ActionBar,
|
||||
AddDocumentsModal,
|
||||
@@ -189,8 +196,8 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='mt-[4px]'>
|
||||
<Skeleton className='h-[21px] w-[300px] rounded-[4px]' />
|
||||
<div>
|
||||
<Skeleton className='mt-[4px] h-[21px] w-[300px] rounded-[4px]' />
|
||||
</div>
|
||||
|
||||
<div className='mt-[16px] flex items-center gap-[8px]'>
|
||||
@@ -208,9 +215,12 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
/>
|
||||
</div>
|
||||
<Button disabled variant='tertiary' className='h-[32px] rounded-[6px]'>
|
||||
Add Documents
|
||||
</Button>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Skeleton className='h-[32px] w-[52px] rounded-[6px]' />
|
||||
<Button disabled variant='tertiary' className='h-[32px] rounded-[6px]'>
|
||||
Add Documents
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='mt-[12px] flex flex-1 flex-col overflow-hidden'>
|
||||
@@ -222,73 +232,11 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
interface KnowledgeBaseProps {
|
||||
id: string
|
||||
knowledgeBaseName?: string
|
||||
}
|
||||
|
||||
function getFileIcon(mimeType: string, filename: string) {
|
||||
const IconComponent = getDocumentIcon(mimeType, filename)
|
||||
return <IconComponent className='h-6 w-5 flex-shrink-0' />
|
||||
}
|
||||
|
||||
function formatFileSize(bytes: number): string {
|
||||
if (bytes === 0) return '0 Bytes'
|
||||
const k = 1024
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB']
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||
return `${Number.parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}`
|
||||
}
|
||||
|
||||
const AnimatedLoader = ({ className }: { className?: string }) => (
|
||||
<Loader2 className={cn(className, 'animate-spin')} />
|
||||
)
|
||||
@@ -336,53 +284,24 @@ const getStatusBadge = (doc: DocumentData) => {
|
||||
}
|
||||
}
|
||||
|
||||
const TAG_SLOTS = [
|
||||
'tag1',
|
||||
'tag2',
|
||||
'tag3',
|
||||
'tag4',
|
||||
'tag5',
|
||||
'tag6',
|
||||
'tag7',
|
||||
'number1',
|
||||
'number2',
|
||||
'number3',
|
||||
'number4',
|
||||
'number5',
|
||||
'date1',
|
||||
'date2',
|
||||
'boolean1',
|
||||
'boolean2',
|
||||
'boolean3',
|
||||
] as const
|
||||
|
||||
type TagSlot = (typeof TAG_SLOTS)[number]
|
||||
|
||||
interface TagValue {
|
||||
slot: TagSlot
|
||||
slot: AllTagSlot
|
||||
displayName: string
|
||||
value: string
|
||||
}
|
||||
|
||||
const TAG_FIELD_TYPES: Record<string, string> = {
|
||||
tag: 'text',
|
||||
number: 'number',
|
||||
date: 'date',
|
||||
boolean: 'boolean',
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes tag values for a document
|
||||
*/
|
||||
function getDocumentTags(doc: DocumentData, definitions: TagDefinition[]): TagValue[] {
|
||||
const result: TagValue[] = []
|
||||
|
||||
for (const slot of TAG_SLOTS) {
|
||||
for (const slot of ALL_TAG_SLOTS) {
|
||||
const raw = doc[slot]
|
||||
if (raw == null) continue
|
||||
|
||||
const def = definitions.find((d) => d.tagSlot === slot)
|
||||
const fieldType = def?.fieldType || TAG_FIELD_TYPES[slot.replace(/\d+$/, '')] || 'text'
|
||||
const fieldType = def?.fieldType || getFieldTypeForSlot(slot) || 'text'
|
||||
|
||||
let value: string
|
||||
if (fieldType === 'date') {
|
||||
@@ -424,6 +343,8 @@ export function KnowledgeBase({
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [showTagsModal, setShowTagsModal] = useState(false)
|
||||
const [enabledFilter, setEnabledFilter] = useState<'all' | 'enabled' | 'disabled'>('all')
|
||||
const [isFilterPopoverOpen, setIsFilterPopoverOpen] = useState(false)
|
||||
|
||||
/**
|
||||
* Memoize the search query setter to prevent unnecessary re-renders
|
||||
@@ -434,6 +355,7 @@ export function KnowledgeBase({
|
||||
}, [])
|
||||
|
||||
const [selectedDocuments, setSelectedDocuments] = useState<Set<string>>(new Set())
|
||||
const [isSelectAllMode, setIsSelectAllMode] = useState(false)
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [showAddDocumentsModal, setShowAddDocumentsModal] = useState(false)
|
||||
const [showDeleteDocumentModal, setShowDeleteDocumentModal] = useState(false)
|
||||
@@ -460,7 +382,6 @@ export function KnowledgeBase({
|
||||
error: knowledgeBaseError,
|
||||
refresh: refreshKnowledgeBase,
|
||||
} = useKnowledgeBase(id)
|
||||
const [hasProcessingDocuments, setHasProcessingDocuments] = useState(false)
|
||||
|
||||
const {
|
||||
documents,
|
||||
@@ -469,6 +390,7 @@ export function KnowledgeBase({
|
||||
isFetching: isFetchingDocuments,
|
||||
isPlaceholderData: isPlaceholderDocuments,
|
||||
error: documentsError,
|
||||
hasProcessingDocuments,
|
||||
updateDocument,
|
||||
refreshDocuments,
|
||||
} = useKnowledgeBaseDocuments(id, {
|
||||
@@ -477,7 +399,14 @@ export function KnowledgeBase({
|
||||
offset: (currentPage - 1) * DOCUMENTS_PER_PAGE,
|
||||
sortBy,
|
||||
sortOrder,
|
||||
refetchInterval: hasProcessingDocuments && !isDeleting ? 3000 : false,
|
||||
refetchInterval: (data) => {
|
||||
if (isDeleting) return false
|
||||
const hasPending = data?.documents?.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
)
|
||||
return hasPending ? 3000 : false
|
||||
},
|
||||
enabledFilter,
|
||||
})
|
||||
|
||||
const { tagDefinitions } = useKnowledgeBaseTagDefinitions(id)
|
||||
@@ -543,52 +472,52 @@ export function KnowledgeBase({
|
||||
</TableHead>
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
const processing = documents.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
)
|
||||
setHasProcessingDocuments(processing)
|
||||
|
||||
if (processing) {
|
||||
checkForDeadProcesses()
|
||||
}
|
||||
}, [documents])
|
||||
|
||||
/**
|
||||
* Checks for documents with stale processing states and marks them as failed
|
||||
*/
|
||||
const checkForDeadProcesses = () => {
|
||||
const now = new Date()
|
||||
const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes
|
||||
const checkForDeadProcesses = useCallback(
|
||||
(docsToCheck: DocumentData[]) => {
|
||||
const now = new Date()
|
||||
const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes
|
||||
|
||||
const staleDocuments = documents.filter((doc) => {
|
||||
if (doc.processingStatus !== 'processing' || !doc.processingStartedAt) {
|
||||
return false
|
||||
}
|
||||
|
||||
const processingDuration = now.getTime() - new Date(doc.processingStartedAt).getTime()
|
||||
return processingDuration > DEAD_PROCESS_THRESHOLD_MS
|
||||
})
|
||||
|
||||
if (staleDocuments.length === 0) return
|
||||
|
||||
logger.warn(`Found ${staleDocuments.length} documents with dead processes`)
|
||||
|
||||
staleDocuments.forEach((doc) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: doc.id,
|
||||
updates: { markFailedDueToTimeout: true },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`)
|
||||
},
|
||||
const staleDocuments = docsToCheck.filter((doc) => {
|
||||
if (doc.processingStatus !== 'processing' || !doc.processingStartedAt) {
|
||||
return false
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
const processingDuration = now.getTime() - new Date(doc.processingStartedAt).getTime()
|
||||
return processingDuration > DEAD_PROCESS_THRESHOLD_MS
|
||||
})
|
||||
|
||||
if (staleDocuments.length === 0) return
|
||||
|
||||
logger.warn(`Found ${staleDocuments.length} documents with dead processes`)
|
||||
|
||||
staleDocuments.forEach((doc) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: doc.id,
|
||||
updates: { markFailedDueToTimeout: true },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(
|
||||
`Successfully marked dead process as failed for document: ${doc.filename}`
|
||||
)
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
[id, updateDocumentMutation]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
if (hasProcessingDocuments) {
|
||||
checkForDeadProcesses(documents)
|
||||
}
|
||||
}, [hasProcessingDocuments, documents, checkForDeadProcesses])
|
||||
|
||||
const handleToggleEnabled = (docId: string) => {
|
||||
const document = documents.find((doc) => doc.id === docId)
|
||||
@@ -748,6 +677,7 @@ export function KnowledgeBase({
|
||||
setSelectedDocuments(new Set(documents.map((doc) => doc.id)))
|
||||
} else {
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -793,6 +723,26 @@ export function KnowledgeBase({
|
||||
* Handles bulk enabling of selected documents
|
||||
*/
|
||||
const handleBulkEnable = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'enable',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully enabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
refreshDocuments()
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToEnable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && !doc.enabled
|
||||
)
|
||||
@@ -821,6 +771,26 @@ export function KnowledgeBase({
|
||||
* Handles bulk disabling of selected documents
|
||||
*/
|
||||
const handleBulkDisable = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'disable',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully disabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
refreshDocuments()
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToDisable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && doc.enabled
|
||||
)
|
||||
@@ -845,18 +815,35 @@ export function KnowledgeBase({
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the bulk delete confirmation modal
|
||||
*/
|
||||
const handleBulkDelete = () => {
|
||||
if (selectedDocuments.size === 0) return
|
||||
setShowBulkDeleteModal(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Confirms and executes the bulk deletion of selected documents
|
||||
*/
|
||||
const confirmBulkDelete = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'delete',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully deleted ${result.successCount} documents`)
|
||||
refreshDocuments()
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
},
|
||||
onSettled: () => {
|
||||
setShowBulkDeleteModal(false)
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToDelete = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
|
||||
if (documentsToDelete.length === 0) return
|
||||
@@ -881,14 +868,17 @@ export function KnowledgeBase({
|
||||
}
|
||||
|
||||
const selectedDocumentsList = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
const enabledCount = selectedDocumentsList.filter((doc) => doc.enabled).length
|
||||
const disabledCount = selectedDocumentsList.filter((doc) => !doc.enabled).length
|
||||
const enabledCount = isSelectAllMode
|
||||
? enabledFilter === 'disabled'
|
||||
? 0
|
||||
: pagination.total
|
||||
: selectedDocumentsList.filter((doc) => doc.enabled).length
|
||||
const disabledCount = isSelectAllMode
|
||||
? enabledFilter === 'enabled'
|
||||
? 0
|
||||
: pagination.total
|
||||
: selectedDocumentsList.filter((doc) => !doc.enabled).length
|
||||
|
||||
/**
|
||||
* Handle right-click on a document row
|
||||
* If right-clicking on an unselected document, select only that document
|
||||
* If right-clicking on a selected document with multiple selections, keep all selections
|
||||
*/
|
||||
const handleDocumentContextMenu = useCallback(
|
||||
(e: React.MouseEvent, doc: DocumentData) => {
|
||||
const isCurrentlySelected = selectedDocuments.has(doc.id)
|
||||
@@ -1005,11 +995,13 @@ export function KnowledgeBase({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{knowledgeBase?.description && (
|
||||
<p className='mt-[4px] line-clamp-2 max-w-[40vw] font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{knowledgeBase.description}
|
||||
</p>
|
||||
)}
|
||||
<div>
|
||||
{knowledgeBase?.description && (
|
||||
<p className='mt-[4px] line-clamp-2 max-w-[40vw] font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{knowledgeBase.description}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className='mt-[16px] flex items-center gap-[8px]'>
|
||||
<span className='text-[14px] text-[var(--text-muted)]'>
|
||||
@@ -1052,21 +1044,76 @@ export function KnowledgeBase({
|
||||
))}
|
||||
</div>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={handleAddDocuments}
|
||||
disabled={userPermissions.canEdit !== true}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Add Documents
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{userPermissions.canEdit !== true && (
|
||||
<Tooltip.Content>Write permission required to add documents</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Popover open={isFilterPopoverOpen} onOpenChange={setIsFilterPopoverOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='default' className='h-[32px] rounded-[6px]'>
|
||||
{enabledFilter === 'all'
|
||||
? 'All'
|
||||
: enabledFilter === 'enabled'
|
||||
? 'Enabled'
|
||||
: 'Disabled'}
|
||||
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' side='bottom' sideOffset={4}>
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'all'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('all')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
All
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'enabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('enabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
Enabled
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'disabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('disabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
Disabled
|
||||
</PopoverItem>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={handleAddDocuments}
|
||||
disabled={userPermissions.canEdit !== true}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Add Documents
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{userPermissions.canEdit !== true && (
|
||||
<Tooltip.Content>Write permission required to add documents</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{error && !isLoadingKnowledgeBase && (
|
||||
@@ -1089,14 +1136,20 @@ export function KnowledgeBase({
|
||||
<div className='mt-[10px] flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'>
|
||||
<div className='text-center'>
|
||||
<p className='font-medium text-[var(--text-secondary)] text-sm'>
|
||||
{searchQuery ? 'No documents found' : 'No documents yet'}
|
||||
{searchQuery
|
||||
? 'No documents found'
|
||||
: enabledFilter !== 'all'
|
||||
? 'Nothing matches your filter'
|
||||
: 'No documents yet'}
|
||||
</p>
|
||||
<p className='mt-1 text-[var(--text-muted)] text-xs'>
|
||||
{searchQuery
|
||||
? 'Try a different search term'
|
||||
: userPermissions.canEdit === true
|
||||
? 'Add documents to get started'
|
||||
: 'Documents will appear here once added'}
|
||||
: enabledFilter !== 'all'
|
||||
? 'Try changing the filter'
|
||||
: userPermissions.canEdit === true
|
||||
? 'Add documents to get started'
|
||||
: 'Documents will appear here once added'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -1120,7 +1173,7 @@ export function KnowledgeBase({
|
||||
{renderSortableHeader('tokenCount', 'Tokens', 'hidden w-[8%] lg:table-cell')}
|
||||
{renderSortableHeader('chunkCount', 'Chunks', 'w-[8%]')}
|
||||
{renderSortableHeader('uploadedAt', 'Uploaded', 'w-[11%]')}
|
||||
{renderSortableHeader('processingStatus', 'Status', 'w-[10%]')}
|
||||
{renderSortableHeader('enabled', 'Status', 'w-[10%]')}
|
||||
<TableHead className='w-[12%] px-[12px] py-[8px] text-[12px] text-[var(--text-secondary)]'>
|
||||
Tags
|
||||
</TableHead>
|
||||
@@ -1164,7 +1217,10 @@ export function KnowledgeBase({
|
||||
</TableCell>
|
||||
<TableCell className='w-[180px] max-w-[180px] px-[12px] py-[8px]'>
|
||||
<div className='flex min-w-0 items-center gap-[8px]'>
|
||||
{getFileIcon(doc.mimeType, doc.filename)}
|
||||
{(() => {
|
||||
const IconComponent = getDocumentIcon(doc.mimeType, doc.filename)
|
||||
return <IconComponent className='h-6 w-5 flex-shrink-0' />
|
||||
})()}
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<span
|
||||
@@ -1508,6 +1564,14 @@ export function KnowledgeBase({
|
||||
enabledCount={enabledCount}
|
||||
disabledCount={disabledCount}
|
||||
isLoading={isBulkOperating}
|
||||
totalCount={pagination.total}
|
||||
isAllPageSelected={isAllSelected}
|
||||
isAllSelected={isSelectAllMode}
|
||||
onSelectAll={() => setIsSelectAllMode(true)}
|
||||
onClearSelectAll={() => {
|
||||
setIsSelectAllMode(false)
|
||||
setSelectedDocuments(new Set())
|
||||
}}
|
||||
/>
|
||||
|
||||
<DocumentContextMenu
|
||||
|
||||
@@ -13,6 +13,11 @@ interface ActionBarProps {
|
||||
disabledCount?: number
|
||||
isLoading?: boolean
|
||||
className?: string
|
||||
totalCount?: number
|
||||
isAllPageSelected?: boolean
|
||||
isAllSelected?: boolean
|
||||
onSelectAll?: () => void
|
||||
onClearSelectAll?: () => void
|
||||
}
|
||||
|
||||
export function ActionBar({
|
||||
@@ -24,14 +29,21 @@ export function ActionBar({
|
||||
disabledCount = 0,
|
||||
isLoading = false,
|
||||
className,
|
||||
totalCount = 0,
|
||||
isAllPageSelected = false,
|
||||
isAllSelected = false,
|
||||
onSelectAll,
|
||||
onClearSelectAll,
|
||||
}: ActionBarProps) {
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
if (selectedCount === 0) return null
|
||||
if (selectedCount === 0 && !isAllSelected) return null
|
||||
|
||||
const canEdit = userPermissions.canEdit
|
||||
const showEnableButton = disabledCount > 0 && onEnable && canEdit
|
||||
const showDisableButton = enabledCount > 0 && onDisable && canEdit
|
||||
const showSelectAllOption =
|
||||
isAllPageSelected && !isAllSelected && totalCount > selectedCount && onSelectAll
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
@@ -43,7 +55,31 @@ export function ActionBar({
|
||||
>
|
||||
<div className='flex items-center gap-[8px] rounded-[10px] border border-[var(--border)] bg-[var(--surface-2)] px-[8px] py-[6px]'>
|
||||
<span className='px-[4px] text-[13px] text-[var(--text-secondary)]'>
|
||||
{selectedCount} selected
|
||||
{isAllSelected ? totalCount : selectedCount} selected
|
||||
{showSelectAllOption && (
|
||||
<>
|
||||
{' · '}
|
||||
<button
|
||||
type='button'
|
||||
onClick={onSelectAll}
|
||||
className='text-[var(--brand-primary)] hover:underline'
|
||||
>
|
||||
Select all
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{isAllSelected && onClearSelectAll && (
|
||||
<>
|
||||
{' · '}
|
||||
<button
|
||||
type='button'
|
||||
onClick={onClearSelectAll}
|
||||
className='text-[var(--brand-primary)] hover:underline'
|
||||
>
|
||||
Clear
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
|
||||
<div className='flex items-center gap-[5px]'>
|
||||
|
||||
@@ -123,7 +123,11 @@ export function RenameDocumentModal({
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='tertiary' type='submit' disabled={isSubmitting || !name?.trim()}>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
type='submit'
|
||||
disabled={isSubmitting || !name?.trim() || name.trim() === initialName}
|
||||
>
|
||||
{isSubmitting ? 'Renaming...' : 'Rename'}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { Badge, DocumentAttachment, Tooltip } from '@/components/emcn'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import { BaseTagsModal } from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
@@ -21,55 +22,6 @@ interface BaseCardProps {
|
||||
onDelete?: (id: string) => Promise<void>
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Skeleton placeholder for a knowledge base card
|
||||
*/
|
||||
|
||||
@@ -344,53 +344,51 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
<Textarea
|
||||
id='description'
|
||||
placeholder='Describe this knowledge base (optional)'
|
||||
rows={3}
|
||||
rows={4}
|
||||
{...register('description')}
|
||||
className={cn(errors.description && 'border-[var(--text-error)]')}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='space-y-[12px] rounded-[6px] bg-[var(--surface-5)] px-[12px] py-[14px]'>
|
||||
<div className='grid grid-cols-2 gap-[12px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='minChunkSize'>Min Chunk Size (characters)</Label>
|
||||
<Input
|
||||
id='minChunkSize'
|
||||
placeholder='100'
|
||||
{...register('minChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.minChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='min-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='maxChunkSize'>Max Chunk Size (tokens)</Label>
|
||||
<Input
|
||||
id='maxChunkSize'
|
||||
placeholder='1024'
|
||||
{...register('maxChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.maxChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='max-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
<div className='grid grid-cols-2 gap-[12px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='minChunkSize'>Min Chunk Size (characters)</Label>
|
||||
<Input
|
||||
id='minChunkSize'
|
||||
placeholder='100'
|
||||
{...register('minChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.minChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='min-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='overlapSize'>Overlap (tokens)</Label>
|
||||
<Label htmlFor='maxChunkSize'>Max Chunk Size (tokens)</Label>
|
||||
<Input
|
||||
id='overlapSize'
|
||||
placeholder='200'
|
||||
{...register('overlapSize', { valueAsNumber: true })}
|
||||
className={cn(errors.overlapSize && 'border-[var(--text-error)]')}
|
||||
id='maxChunkSize'
|
||||
placeholder='1024'
|
||||
{...register('maxChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.maxChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='overlap-size'
|
||||
name='max-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='overlapSize'>Overlap (tokens)</Label>
|
||||
<Input
|
||||
id='overlapSize'
|
||||
placeholder='200'
|
||||
{...register('overlapSize', { valueAsNumber: true })}
|
||||
className={cn(errors.overlapSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='overlap-size'
|
||||
/>
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>
|
||||
1 token ≈ 4 characters. Max chunk size and overlap are in tokens.
|
||||
</p>
|
||||
|
||||
@@ -59,7 +59,7 @@ export function EditKnowledgeBaseModal({
|
||||
handleSubmit,
|
||||
reset,
|
||||
watch,
|
||||
formState: { errors },
|
||||
formState: { errors, isDirty },
|
||||
} = useForm<FormValues>({
|
||||
resolver: zodResolver(FormSchema),
|
||||
defaultValues: {
|
||||
@@ -127,7 +127,7 @@ export function EditKnowledgeBaseModal({
|
||||
<Textarea
|
||||
id='description'
|
||||
placeholder='Describe this knowledge base (optional)'
|
||||
rows={3}
|
||||
rows={4}
|
||||
{...register('description')}
|
||||
className={cn(errors.description && 'border-[var(--text-error)]')}
|
||||
/>
|
||||
@@ -161,7 +161,7 @@ export function EditKnowledgeBaseModal({
|
||||
<Button
|
||||
variant='tertiary'
|
||||
type='submit'
|
||||
disabled={isSubmitting || !nameValue?.trim()}
|
||||
disabled={isSubmitting || !nameValue?.trim() || !isDirty}
|
||||
>
|
||||
{isSubmitting ? 'Saving...' : 'Save'}
|
||||
</Button>
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { WorkflowIcon } from '@/components/icons'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||
import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config'
|
||||
import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config'
|
||||
import { getBlock, getBlockByToolName } from '@/blocks'
|
||||
@@ -142,14 +143,6 @@ function normalizeAndSortSpans(spans: TraceSpan[]): TraceSpan[] {
|
||||
|
||||
const DEFAULT_BLOCK_COLOR = '#6b7280'
|
||||
|
||||
/**
|
||||
* Formats duration in ms
|
||||
*/
|
||||
function formatDuration(ms: number): string {
|
||||
if (ms < 1000) return `${ms}ms`
|
||||
return `${(ms / 1000).toFixed(2)}s`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets icon and color for a span type using block config
|
||||
*/
|
||||
@@ -314,7 +307,7 @@ function ExpandableRowHeader({
|
||||
</span>
|
||||
</div>
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
{formatDuration(duration)}
|
||||
{formatDuration(duration, { precision: 2 })}
|
||||
</span>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -151,6 +151,29 @@ export const ActionBar = memo(
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{isSubflowBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (!disabled) {
|
||||
collaborativeBatchToggleBlockEnabled([blockId])
|
||||
}
|
||||
}}
|
||||
className={ACTION_BUTTON_STYLES}
|
||||
disabled={disabled}
|
||||
>
|
||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{!isStartBlock && !isResponseBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
@@ -222,29 +245,6 @@ export const ActionBar = memo(
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{isSubflowBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (!disabled) {
|
||||
collaborativeBatchToggleBlockEnabled([blockId])
|
||||
}
|
||||
}}
|
||||
className={ACTION_BUTTON_STYLES}
|
||||
disabled={disabled}
|
||||
>
|
||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
|
||||
@@ -78,6 +78,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
mode,
|
||||
setMode,
|
||||
isAborting,
|
||||
maskCredentialValue,
|
||||
} = useCopilotStore()
|
||||
|
||||
const messageCheckpoints = isUser ? allMessageCheckpoints[message.id] || [] : []
|
||||
@@ -210,7 +211,10 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
const isLastTextBlock =
|
||||
index === message.contentBlocks!.length - 1 && block.type === 'text'
|
||||
const parsed = parseSpecialTags(block.content)
|
||||
const cleanBlockContent = parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
||||
// Mask credential IDs in the displayed content
|
||||
const cleanBlockContent = maskCredentialValue(
|
||||
parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
||||
)
|
||||
|
||||
if (!cleanBlockContent.trim()) return null
|
||||
|
||||
@@ -238,7 +242,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
return (
|
||||
<div key={blockKey} className='w-full'>
|
||||
<ThinkingBlock
|
||||
content={block.content}
|
||||
content={maskCredentialValue(block.content)}
|
||||
isStreaming={isActivelyStreaming}
|
||||
hasFollowingContent={hasFollowingContent}
|
||||
hasSpecialTags={hasSpecialTags}
|
||||
@@ -261,7 +265,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
}
|
||||
return null
|
||||
})
|
||||
}, [message.contentBlocks, isActivelyStreaming, parsedTags, isLastMessage])
|
||||
}, [message.contentBlocks, isActivelyStreaming, parsedTags, isLastMessage, maskCredentialValue])
|
||||
|
||||
if (isUser) {
|
||||
return (
|
||||
|
||||
@@ -782,6 +782,7 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
const [isExpanded, setIsExpanded] = useState(true)
|
||||
const [duration, setDuration] = useState(0)
|
||||
const startTimeRef = useRef<number>(Date.now())
|
||||
const maskCredentialValue = useCopilotStore((s) => s.maskCredentialValue)
|
||||
const wasStreamingRef = useRef(false)
|
||||
|
||||
// Only show streaming animations for current message
|
||||
@@ -816,14 +817,16 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
currentText += parsed.cleanContent
|
||||
} else if (block.type === 'subagent_tool_call' && block.toolCall) {
|
||||
if (currentText.trim()) {
|
||||
segments.push({ type: 'text', content: currentText })
|
||||
// Mask any credential IDs in the accumulated text before displaying
|
||||
segments.push({ type: 'text', content: maskCredentialValue(currentText) })
|
||||
currentText = ''
|
||||
}
|
||||
segments.push({ type: 'tool', block })
|
||||
}
|
||||
}
|
||||
if (currentText.trim()) {
|
||||
segments.push({ type: 'text', content: currentText })
|
||||
// Mask any credential IDs in the accumulated text before displaying
|
||||
segments.push({ type: 'text', content: maskCredentialValue(currentText) })
|
||||
}
|
||||
|
||||
const allParsed = parseSpecialTags(allRawText)
|
||||
@@ -952,6 +955,7 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
toolCall: CopilotToolCall
|
||||
}) {
|
||||
const blocks = useWorkflowStore((s) => s.blocks)
|
||||
const maskCredentialValue = useCopilotStore((s) => s.maskCredentialValue)
|
||||
|
||||
const cachedBlockInfoRef = useRef<Record<string, { name: string; type: string }>>({})
|
||||
|
||||
@@ -983,6 +987,7 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
title: string
|
||||
value: any
|
||||
isPassword?: boolean
|
||||
isCredential?: boolean
|
||||
}
|
||||
|
||||
interface BlockChange {
|
||||
@@ -1091,6 +1096,7 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
title: subBlockConfig.title ?? subBlockConfig.id,
|
||||
value,
|
||||
isPassword: subBlockConfig.password === true,
|
||||
isCredential: subBlockConfig.type === 'oauth-input',
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1172,8 +1178,15 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
{subBlocksToShow && subBlocksToShow.length > 0 && (
|
||||
<div className='border-[var(--border-1)] border-t px-2.5 py-1.5'>
|
||||
{subBlocksToShow.map((sb) => {
|
||||
// Mask password fields like the canvas does
|
||||
const displayValue = sb.isPassword ? '•••' : getDisplayValue(sb.value)
|
||||
// Mask password fields and credential IDs
|
||||
let displayValue: string
|
||||
if (sb.isPassword) {
|
||||
displayValue = '•••'
|
||||
} else {
|
||||
// Get display value first, then mask any credential IDs that might be in it
|
||||
const rawValue = getDisplayValue(sb.value)
|
||||
displayValue = maskCredentialValue(rawValue)
|
||||
}
|
||||
return (
|
||||
<div key={sb.id} className='flex items-start gap-1.5 py-0.5 text-[11px]'>
|
||||
<span
|
||||
@@ -1412,10 +1425,13 @@ function RunSkipButtons({
|
||||
setIsProcessing(true)
|
||||
setButtonsHidden(true)
|
||||
try {
|
||||
// Add to auto-allowed list first
|
||||
// Add to auto-allowed list - this also executes all pending integration tools of this type
|
||||
await addAutoAllowedTool(toolCall.name)
|
||||
// Then execute
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||
// For client tools with interrupts (not integration tools), we still need to call handleRun
|
||||
// since executeIntegrationTool only works for server-side tools
|
||||
if (!isIntegrationTool(toolCall.name)) {
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||
}
|
||||
} finally {
|
||||
setIsProcessing(false)
|
||||
actionInProgressRef.current = false
|
||||
@@ -1438,10 +1454,10 @@ function RunSkipButtons({
|
||||
|
||||
if (buttonsHidden) return null
|
||||
|
||||
// Hide "Always Allow" for integration tools (only show for client tools with interrupts)
|
||||
const showAlwaysAllow = !isIntegrationTool(toolCall.name)
|
||||
// Show "Always Allow" for all tools that require confirmation
|
||||
const showAlwaysAllow = true
|
||||
|
||||
// Standardized buttons for all interrupt tools: Allow, (Always Allow for client tools only), Skip
|
||||
// Standardized buttons for all interrupt tools: Allow, Always Allow, Skip
|
||||
return (
|
||||
<div className='mt-[10px] flex gap-[6px]'>
|
||||
<Button onClick={onRun} disabled={isProcessing} variant='tertiary'>
|
||||
|
||||
@@ -105,10 +105,10 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
isSendingMessage,
|
||||
])
|
||||
|
||||
/** Load auto-allowed tools once on mount */
|
||||
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
||||
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||
useEffect(() => {
|
||||
if (hasMountedRef.current && !hasLoadedAutoAllowedToolsRef.current) {
|
||||
if (!hasLoadedAutoAllowedToolsRef.current) {
|
||||
hasLoadedAutoAllowedToolsRef.current = true
|
||||
loadAutoAllowedTools().catch((err) => {
|
||||
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
import { ReactFlowProvider } from 'reactflow'
|
||||
import { Badge, Button, ChevronDown, Code, Combobox, Input, Label } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||
import { extractReferencePrefixes } from '@/lib/workflows/sanitization/references'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
@@ -704,14 +705,6 @@ interface PreviewEditorProps {
|
||||
onClose?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Format duration for display
|
||||
*/
|
||||
function formatDuration(ms: number): string {
|
||||
if (ms < 1000) return `${ms}ms`
|
||||
return `${(ms / 1000).toFixed(2)}s`
|
||||
}
|
||||
|
||||
/** Minimum height for the connections section (header only) */
|
||||
const MIN_CONNECTIONS_HEIGHT = 30
|
||||
/** Maximum height for the connections section */
|
||||
@@ -1180,7 +1173,7 @@ function PreviewEditorContent({
|
||||
)}
|
||||
{executionData.durationMs !== undefined && (
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
{formatDuration(executionData.durationMs)}
|
||||
{formatDuration(executionData.durationMs, { precision: 2 })}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { Input, Skeleton } from '@/components/ui'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { formatDate } from '@/lib/core/utils/formatting'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
type ApiKey,
|
||||
@@ -133,13 +134,9 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
}
|
||||
}, [shouldScrollToBottom])
|
||||
|
||||
const formatDate = (dateString?: string) => {
|
||||
const formatLastUsed = (dateString?: string) => {
|
||||
if (!dateString) return 'Never'
|
||||
return new Date(dateString).toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})
|
||||
return formatDate(new Date(dateString))
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -216,7 +213,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
{key.name}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
@@ -251,7 +248,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
{key.name}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
@@ -291,7 +288,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
{key.name}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
ModalHeader,
|
||||
} from '@/components/emcn'
|
||||
import { Input, Skeleton } from '@/components/ui'
|
||||
import { formatDate } from '@/lib/core/utils/formatting'
|
||||
import {
|
||||
type CopilotKey,
|
||||
useCopilotKeys,
|
||||
@@ -115,13 +116,9 @@ export function Copilot() {
|
||||
}
|
||||
}
|
||||
|
||||
const formatDate = (dateString?: string | null) => {
|
||||
const formatLastUsed = (dateString?: string | null) => {
|
||||
if (!dateString) return 'Never'
|
||||
return new Date(dateString).toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})
|
||||
return formatDate(new Date(dateString))
|
||||
}
|
||||
|
||||
const hasKeys = keys.length > 0
|
||||
@@ -180,7 +177,7 @@ export function Copilot() {
|
||||
{key.name || 'Unnamed Key'}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
|
||||
@@ -11,7 +11,7 @@ import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
* Avatar display configuration for responsive layout.
|
||||
*/
|
||||
const AVATAR_CONFIG = {
|
||||
MIN_COUNT: 3,
|
||||
MIN_COUNT: 4,
|
||||
MAX_COUNT: 12,
|
||||
WIDTH_PER_AVATAR: 20,
|
||||
} as const
|
||||
@@ -106,7 +106,9 @@ export function Avatars({ workflowId }: AvatarsProps) {
|
||||
}, [presenceUsers, currentWorkflowId, workflowId, currentSocketId])
|
||||
|
||||
/**
|
||||
* Calculate visible users and overflow count
|
||||
* Calculate visible users and overflow count.
|
||||
* Shows up to maxVisible avatars, with overflow indicator for any remaining.
|
||||
* Users are reversed so new avatars appear on the left (keeping right side stable).
|
||||
*/
|
||||
const { visibleUsers, overflowCount } = useMemo(() => {
|
||||
if (workflowUsers.length === 0) {
|
||||
@@ -116,7 +118,8 @@ export function Avatars({ workflowId }: AvatarsProps) {
|
||||
const visible = workflowUsers.slice(0, maxVisible)
|
||||
const overflow = Math.max(0, workflowUsers.length - maxVisible)
|
||||
|
||||
return { visibleUsers: visible, overflowCount: overflow }
|
||||
// Reverse so rightmost avatars stay stable as new ones are revealed on the left
|
||||
return { visibleUsers: [...visible].reverse(), overflowCount: overflow }
|
||||
}, [workflowUsers, maxVisible])
|
||||
|
||||
if (visibleUsers.length === 0) {
|
||||
@@ -139,9 +142,8 @@ export function Avatars({ workflowId }: AvatarsProps) {
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{visibleUsers.map((user, index) => (
|
||||
<UserAvatar key={user.socketId} user={user} index={overflowCount > 0 ? index + 1 : index} />
|
||||
<UserAvatar key={user.socketId} user={user} index={index} />
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -347,7 +347,7 @@ export function WorkflowItem({
|
||||
) : (
|
||||
<div
|
||||
className={clsx(
|
||||
'min-w-0 flex-1 truncate font-medium',
|
||||
'min-w-0 truncate font-medium',
|
||||
active
|
||||
? 'text-[var(--text-primary)]'
|
||||
: 'text-[var(--text-tertiary)] group-hover:text-[var(--text-primary)]'
|
||||
|
||||
@@ -242,15 +242,9 @@ Return ONLY the email body - no explanations, no extra text.`,
|
||||
id: 'messageId',
|
||||
title: 'Message ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter message ID to read (optional)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'read_gmail',
|
||||
and: {
|
||||
field: 'folder',
|
||||
value: '',
|
||||
},
|
||||
},
|
||||
placeholder: 'Read specific email by ID (overrides label/folder)',
|
||||
condition: { field: 'operation', value: 'read_gmail' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Search Fields
|
||||
{
|
||||
|
||||
@@ -129,12 +129,9 @@ ROUTING RULES:
|
||||
3. If the context is even partially related to a route's description, select that route
|
||||
4. ONLY output NO_MATCH if the context is completely unrelated to ALL route descriptions
|
||||
|
||||
OUTPUT FORMAT:
|
||||
- Output EXACTLY one route ID (copied exactly as shown above) OR "NO_MATCH"
|
||||
- No explanation, no punctuation, no additional text
|
||||
- Just the route ID or NO_MATCH
|
||||
|
||||
Your response:`
|
||||
Respond with a JSON object containing:
|
||||
- route: EXACTLY one route ID (copied exactly as shown above) OR "NO_MATCH"
|
||||
- reasoning: A brief explanation (1-2 sentences) of why you chose this route`
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -272,6 +269,7 @@ interface RouterV2Response extends ToolResponse {
|
||||
total: number
|
||||
}
|
||||
selectedRoute: string
|
||||
reasoning: string
|
||||
selectedPath: {
|
||||
blockId: string
|
||||
blockType: string
|
||||
@@ -355,6 +353,7 @@ export const RouterV2Block: BlockConfig<RouterV2Response> = {
|
||||
tokens: { type: 'json', description: 'Token usage' },
|
||||
cost: { type: 'json', description: 'Cost information' },
|
||||
selectedRoute: { type: 'string', description: 'Selected route ID' },
|
||||
reasoning: { type: 'string', description: 'Explanation of why this route was chosen' },
|
||||
selectedPath: { type: 'json', description: 'Selected routing path' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -23,7 +23,13 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
* ```
|
||||
*/
|
||||
const checkboxVariants = cva(
|
||||
'peer shrink-0 rounded-sm border border-[var(--border-1)] bg-[var(--surface-4)] ring-offset-background transition-colors hover:border-[var(--border-muted)] hover:bg-[var(--surface-7)] focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 data-[disabled]:cursor-not-allowed data-[disabled]:opacity-50 data-[state=checked]:border-[var(--text-muted)] data-[state=checked]:bg-[var(--text-muted)] data-[state=checked]:text-white dark:bg-[var(--surface-5)] dark:data-[state=checked]:border-[var(--surface-7)] dark:data-[state=checked]:bg-[var(--surface-7)] dark:data-[state=checked]:text-[var(--text-primary)] dark:hover:border-[var(--surface-7)] dark:hover:bg-[var(--border-1)]',
|
||||
[
|
||||
'peer shrink-0 cursor-pointer rounded-[4px] border transition-colors',
|
||||
'border-[var(--border-1)] bg-transparent',
|
||||
'focus-visible:outline-none',
|
||||
'data-[disabled]:cursor-not-allowed data-[disabled]:opacity-50',
|
||||
'data-[state=checked]:border-[var(--text-primary)] data-[state=checked]:bg-[var(--text-primary)]',
|
||||
].join(' '),
|
||||
{
|
||||
variants: {
|
||||
size: {
|
||||
@@ -83,7 +89,7 @@ const Checkbox = React.forwardRef<React.ElementRef<typeof CheckboxPrimitive.Root
|
||||
className={cn(checkboxVariants({ size }), className)}
|
||||
{...props}
|
||||
>
|
||||
<CheckboxPrimitive.Indicator className={cn('flex items-center justify-center text-current')}>
|
||||
<CheckboxPrimitive.Indicator className='flex items-center justify-center text-[var(--white)]'>
|
||||
<Check className={cn(checkboxIconVariants({ size }))} />
|
||||
</CheckboxPrimitive.Indicator>
|
||||
</CheckboxPrimitive.Root>
|
||||
|
||||
@@ -24,6 +24,71 @@ function createBlock(id: string, metadataId: string): SerializedBlock {
|
||||
}
|
||||
}
|
||||
|
||||
describe('DAGBuilder disabled subflow validation', () => {
|
||||
it('skips validation for disabled loops with no blocks inside', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
version: '1',
|
||||
blocks: [
|
||||
createBlock('start', BlockType.STARTER),
|
||||
{ ...createBlock('loop-block', BlockType.FUNCTION), enabled: false },
|
||||
],
|
||||
connections: [],
|
||||
loops: {
|
||||
'loop-1': {
|
||||
id: 'loop-1',
|
||||
nodes: [], // Empty loop - would normally throw
|
||||
iterations: 3,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const builder = new DAGBuilder()
|
||||
// Should not throw even though loop has no blocks inside
|
||||
expect(() => builder.build(workflow)).not.toThrow()
|
||||
})
|
||||
|
||||
it('skips validation for disabled parallels with no blocks inside', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
version: '1',
|
||||
blocks: [createBlock('start', BlockType.STARTER)],
|
||||
connections: [],
|
||||
loops: {},
|
||||
parallels: {
|
||||
'parallel-1': {
|
||||
id: 'parallel-1',
|
||||
nodes: [], // Empty parallel - would normally throw
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const builder = new DAGBuilder()
|
||||
// Should not throw even though parallel has no blocks inside
|
||||
expect(() => builder.build(workflow)).not.toThrow()
|
||||
})
|
||||
|
||||
it('skips validation for loops where all inner blocks are disabled', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
version: '1',
|
||||
blocks: [
|
||||
createBlock('start', BlockType.STARTER),
|
||||
{ ...createBlock('inner-block', BlockType.FUNCTION), enabled: false },
|
||||
],
|
||||
connections: [],
|
||||
loops: {
|
||||
'loop-1': {
|
||||
id: 'loop-1',
|
||||
nodes: ['inner-block'], // Has node but it's disabled
|
||||
iterations: 3,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const builder = new DAGBuilder()
|
||||
// Should not throw - loop is effectively disabled since all inner blocks are disabled
|
||||
expect(() => builder.build(workflow)).not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('DAGBuilder human-in-the-loop transformation', () => {
|
||||
it('creates trigger nodes and rewires edges for pause blocks', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
|
||||
@@ -136,17 +136,18 @@ export class DAGBuilder {
|
||||
nodes: string[] | undefined,
|
||||
type: 'Loop' | 'Parallel'
|
||||
): void {
|
||||
const sentinelStartId =
|
||||
type === 'Loop' ? buildSentinelStartId(id) : buildParallelSentinelStartId(id)
|
||||
const sentinelStartNode = dag.nodes.get(sentinelStartId)
|
||||
|
||||
if (!sentinelStartNode) return
|
||||
|
||||
if (!nodes || nodes.length === 0) {
|
||||
throw new Error(
|
||||
`${type} has no blocks inside. Add at least one block to the ${type.toLowerCase()}.`
|
||||
)
|
||||
}
|
||||
|
||||
const sentinelStartId =
|
||||
type === 'Loop' ? buildSentinelStartId(id) : buildParallelSentinelStartId(id)
|
||||
const sentinelStartNode = dag.nodes.get(sentinelStartId)
|
||||
if (!sentinelStartNode) return
|
||||
|
||||
const hasConnections = Array.from(sentinelStartNode.outgoingEdges.values()).some((edge) =>
|
||||
nodes.includes(extractBaseBlockId(edge.target))
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -20,21 +20,13 @@ export class EdgeManager {
|
||||
const activatedTargets: string[] = []
|
||||
const edgesToDeactivate: Array<{ target: string; handle?: string }> = []
|
||||
|
||||
// First pass: categorize edges as activating or deactivating
|
||||
// Don't modify incomingEdges yet - we need the original state for deactivation checks
|
||||
for (const [edgeId, edge] of node.outgoingEdges) {
|
||||
for (const [, edge] of node.outgoingEdges) {
|
||||
if (skipBackwardsEdge && this.isBackwardsEdge(edge.sourceHandle)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const shouldActivate = this.shouldActivateEdge(edge, output)
|
||||
if (!shouldActivate) {
|
||||
const isLoopEdge =
|
||||
edge.sourceHandle === EDGE.LOOP_CONTINUE ||
|
||||
edge.sourceHandle === EDGE.LOOP_CONTINUE_ALT ||
|
||||
edge.sourceHandle === EDGE.LOOP_EXIT
|
||||
|
||||
if (!isLoopEdge) {
|
||||
if (!this.shouldActivateEdge(edge, output)) {
|
||||
if (!this.isLoopEdge(edge.sourceHandle)) {
|
||||
edgesToDeactivate.push({ target: edge.target, handle: edge.sourceHandle })
|
||||
}
|
||||
continue
|
||||
@@ -43,13 +35,19 @@ export class EdgeManager {
|
||||
activatedTargets.push(edge.target)
|
||||
}
|
||||
|
||||
// Second pass: process deactivations while incomingEdges is still intact
|
||||
// This ensures hasActiveIncomingEdges can find all potential sources
|
||||
const cascadeTargets = new Set<string>()
|
||||
for (const { target, handle } of edgesToDeactivate) {
|
||||
this.deactivateEdgeAndDescendants(node.id, target, handle)
|
||||
this.deactivateEdgeAndDescendants(node.id, target, handle, cascadeTargets)
|
||||
}
|
||||
|
||||
if (activatedTargets.length === 0) {
|
||||
for (const { target } of edgesToDeactivate) {
|
||||
if (this.isTerminalControlNode(target)) {
|
||||
cascadeTargets.add(target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Third pass: update incomingEdges for activated targets
|
||||
for (const targetId of activatedTargets) {
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
if (!targetNode) {
|
||||
@@ -59,28 +57,25 @@ export class EdgeManager {
|
||||
targetNode.incomingEdges.delete(node.id)
|
||||
}
|
||||
|
||||
// Fourth pass: check readiness after all edge processing is complete
|
||||
for (const targetId of activatedTargets) {
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
if (targetNode && this.isNodeReady(targetNode)) {
|
||||
if (this.isTargetReady(targetId)) {
|
||||
readyNodes.push(targetId)
|
||||
}
|
||||
}
|
||||
|
||||
for (const targetId of cascadeTargets) {
|
||||
if (!readyNodes.includes(targetId) && !activatedTargets.includes(targetId)) {
|
||||
if (this.isTargetReady(targetId)) {
|
||||
readyNodes.push(targetId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return readyNodes
|
||||
}
|
||||
|
||||
isNodeReady(node: DAGNode): boolean {
|
||||
if (node.incomingEdges.size === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
const activeIncomingCount = this.countActiveIncomingEdges(node)
|
||||
if (activeIncomingCount > 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
return node.incomingEdges.size === 0 || this.countActiveIncomingEdges(node) === 0
|
||||
}
|
||||
|
||||
restoreIncomingEdge(targetNodeId: string, sourceNodeId: string): void {
|
||||
@@ -99,13 +94,10 @@ export class EdgeManager {
|
||||
|
||||
/**
|
||||
* Clear deactivated edges for a set of nodes (used when restoring loop state for next iteration).
|
||||
* This ensures error/success edges can be re-evaluated on each iteration.
|
||||
*/
|
||||
clearDeactivatedEdgesForNodes(nodeIds: Set<string>): void {
|
||||
const edgesToRemove: string[] = []
|
||||
for (const edgeKey of this.deactivatedEdges) {
|
||||
// Edge key format is "sourceId-targetId-handle"
|
||||
// Check if either source or target is in the nodeIds set
|
||||
for (const nodeId of nodeIds) {
|
||||
if (edgeKey.startsWith(`${nodeId}-`) || edgeKey.includes(`-${nodeId}-`)) {
|
||||
edgesToRemove.push(edgeKey)
|
||||
@@ -118,6 +110,44 @@ export class EdgeManager {
|
||||
}
|
||||
}
|
||||
|
||||
private isTargetReady(targetId: string): boolean {
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
return targetNode ? this.isNodeReady(targetNode) : false
|
||||
}
|
||||
|
||||
private isLoopEdge(handle?: string): boolean {
|
||||
return (
|
||||
handle === EDGE.LOOP_CONTINUE ||
|
||||
handle === EDGE.LOOP_CONTINUE_ALT ||
|
||||
handle === EDGE.LOOP_EXIT
|
||||
)
|
||||
}
|
||||
|
||||
private isControlEdge(handle?: string): boolean {
|
||||
return (
|
||||
handle === EDGE.LOOP_CONTINUE ||
|
||||
handle === EDGE.LOOP_CONTINUE_ALT ||
|
||||
handle === EDGE.LOOP_EXIT ||
|
||||
handle === EDGE.PARALLEL_EXIT
|
||||
)
|
||||
}
|
||||
|
||||
private isBackwardsEdge(sourceHandle?: string): boolean {
|
||||
return sourceHandle === EDGE.LOOP_CONTINUE || sourceHandle === EDGE.LOOP_CONTINUE_ALT
|
||||
}
|
||||
|
||||
private isTerminalControlNode(nodeId: string): boolean {
|
||||
const node = this.dag.nodes.get(nodeId)
|
||||
if (!node || node.outgoingEdges.size === 0) return false
|
||||
|
||||
for (const [, edge] of node.outgoingEdges) {
|
||||
if (!this.isControlEdge(edge.sourceHandle)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
private shouldActivateEdge(edge: DAGEdge, output: NormalizedBlockOutput): boolean {
|
||||
const handle = edge.sourceHandle
|
||||
|
||||
@@ -159,14 +189,12 @@ export class EdgeManager {
|
||||
}
|
||||
}
|
||||
|
||||
private isBackwardsEdge(sourceHandle?: string): boolean {
|
||||
return sourceHandle === EDGE.LOOP_CONTINUE || sourceHandle === EDGE.LOOP_CONTINUE_ALT
|
||||
}
|
||||
|
||||
private deactivateEdgeAndDescendants(
|
||||
sourceId: string,
|
||||
targetId: string,
|
||||
sourceHandle?: string
|
||||
sourceHandle?: string,
|
||||
cascadeTargets?: Set<string>,
|
||||
isCascade = false
|
||||
): void {
|
||||
const edgeKey = this.createEdgeKey(sourceId, targetId, sourceHandle)
|
||||
if (this.deactivatedEdges.has(edgeKey)) {
|
||||
@@ -174,38 +202,46 @@ export class EdgeManager {
|
||||
}
|
||||
|
||||
this.deactivatedEdges.add(edgeKey)
|
||||
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
if (!targetNode) return
|
||||
|
||||
// Check if target has other active incoming edges
|
||||
// Pass the specific edge key being deactivated, not just source ID,
|
||||
// to handle multiple edges from same source to same target (e.g., condition branches)
|
||||
const hasOtherActiveIncoming = this.hasActiveIncomingEdges(targetNode, edgeKey)
|
||||
if (!hasOtherActiveIncoming) {
|
||||
for (const [_, outgoingEdge] of targetNode.outgoingEdges) {
|
||||
this.deactivateEdgeAndDescendants(targetId, outgoingEdge.target, outgoingEdge.sourceHandle)
|
||||
if (isCascade && this.isTerminalControlNode(targetId)) {
|
||||
cascadeTargets?.add(targetId)
|
||||
}
|
||||
|
||||
if (this.hasActiveIncomingEdges(targetNode, edgeKey)) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const [, outgoingEdge] of targetNode.outgoingEdges) {
|
||||
if (!this.isControlEdge(outgoingEdge.sourceHandle)) {
|
||||
this.deactivateEdgeAndDescendants(
|
||||
targetId,
|
||||
outgoingEdge.target,
|
||||
outgoingEdge.sourceHandle,
|
||||
cascadeTargets,
|
||||
true
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a node has any active incoming edges besides the one being excluded.
|
||||
* This properly handles the case where multiple edges from the same source go to
|
||||
* the same target (e.g., multiple condition branches pointing to one block).
|
||||
*/
|
||||
private hasActiveIncomingEdges(node: DAGNode, excludeEdgeKey: string): boolean {
|
||||
for (const incomingSourceId of node.incomingEdges) {
|
||||
const incomingNode = this.dag.nodes.get(incomingSourceId)
|
||||
if (!incomingNode) continue
|
||||
|
||||
for (const [_, incomingEdge] of incomingNode.outgoingEdges) {
|
||||
for (const [, incomingEdge] of incomingNode.outgoingEdges) {
|
||||
if (incomingEdge.target === node.id) {
|
||||
const incomingEdgeKey = this.createEdgeKey(
|
||||
incomingSourceId,
|
||||
node.id,
|
||||
incomingEdge.sourceHandle
|
||||
)
|
||||
// Skip the specific edge being excluded, but check other edges from same source
|
||||
if (incomingEdgeKey === excludeEdgeKey) continue
|
||||
if (!this.deactivatedEdges.has(incomingEdgeKey)) {
|
||||
return true
|
||||
|
||||
@@ -554,6 +554,413 @@ describe('ExecutionEngine', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error handling in execution', () => {
|
||||
it('should fail execution when a single node throws an error', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
throw new Error('Block execution failed')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Block execution failed')
|
||||
})
|
||||
|
||||
it('should stop parallel branches when one branch throws an error', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const parallelNodes = Array.from({ length: 5 }, (_, i) =>
|
||||
createMockNode(`parallel${i}`, 'function')
|
||||
)
|
||||
|
||||
parallelNodes.forEach((_, i) => {
|
||||
startNode.outgoingEdges.set(`edge${i}`, { target: `parallel${i}` })
|
||||
})
|
||||
|
||||
const dag = createMockDAG([startNode, ...parallelNodes])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return parallelNodes.map((_, i) => `parallel${i}`)
|
||||
return []
|
||||
})
|
||||
|
||||
const executedNodes: string[] = []
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
executedNodes.push(nodeId)
|
||||
if (nodeId === 'parallel0') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
throw new Error('Parallel branch failed')
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Parallel branch failed')
|
||||
})
|
||||
|
||||
it('should capture only the first error when multiple parallel branches fail', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const parallelNodes = Array.from({ length: 3 }, (_, i) =>
|
||||
createMockNode(`parallel${i}`, 'function')
|
||||
)
|
||||
|
||||
parallelNodes.forEach((_, i) => {
|
||||
startNode.outgoingEdges.set(`edge${i}`, { target: `parallel${i}` })
|
||||
})
|
||||
|
||||
const dag = createMockDAG([startNode, ...parallelNodes])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return parallelNodes.map((_, i) => `parallel${i}`)
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'parallel0') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
throw new Error('First error')
|
||||
}
|
||||
if (nodeId === 'parallel1') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 20))
|
||||
throw new Error('Second error')
|
||||
}
|
||||
if (nodeId === 'parallel2') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 30))
|
||||
throw new Error('Third error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('First error')
|
||||
})
|
||||
|
||||
it('should wait for ongoing executions to complete before throwing error', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const fastErrorNode = createMockNode('fast-error', 'function')
|
||||
const slowNode = createMockNode('slow', 'function')
|
||||
|
||||
startNode.outgoingEdges.set('edge1', { target: 'fast-error' })
|
||||
startNode.outgoingEdges.set('edge2', { target: 'slow' })
|
||||
|
||||
const dag = createMockDAG([startNode, fastErrorNode, slowNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['fast-error', 'slow']
|
||||
return []
|
||||
})
|
||||
|
||||
let slowNodeCompleted = false
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'fast-error') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
throw new Error('Fast error')
|
||||
}
|
||||
if (nodeId === 'slow') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 50))
|
||||
slowNodeCompleted = true
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Fast error')
|
||||
|
||||
expect(slowNodeCompleted).toBe(true)
|
||||
})
|
||||
|
||||
it('should not queue new nodes after an error occurs', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
const afterErrorNode = createMockNode('after-error', 'function')
|
||||
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
errorNode.outgoingEdges.set('edge2', { target: 'after-error' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode, afterErrorNode])
|
||||
const context = createMockContext()
|
||||
|
||||
const queuedNodes: string[] = []
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') {
|
||||
queuedNodes.push('error-node')
|
||||
return ['error-node']
|
||||
}
|
||||
if (node.id === 'error-node') {
|
||||
queuedNodes.push('after-error')
|
||||
return ['after-error']
|
||||
}
|
||||
return []
|
||||
})
|
||||
|
||||
const executedNodes: string[] = []
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
executedNodes.push(nodeId)
|
||||
if (nodeId === 'error-node') {
|
||||
throw new Error('Node error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Node error')
|
||||
|
||||
expect(executedNodes).not.toContain('after-error')
|
||||
})
|
||||
|
||||
it('should populate error result with metadata when execution fails', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext()
|
||||
context.blockLogs.push({
|
||||
blockId: 'start',
|
||||
blockName: 'Start',
|
||||
blockType: 'starter',
|
||||
startedAt: new Date().toISOString(),
|
||||
endedAt: new Date().toISOString(),
|
||||
durationMs: 10,
|
||||
success: true,
|
||||
})
|
||||
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
const error = new Error('Execution failed') as any
|
||||
error.executionResult = {
|
||||
success: false,
|
||||
output: { partial: 'data' },
|
||||
logs: context.blockLogs,
|
||||
metadata: context.metadata,
|
||||
}
|
||||
throw error
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
try {
|
||||
await engine.run('start')
|
||||
expect.fail('Should have thrown')
|
||||
} catch (error: any) {
|
||||
expect(error.executionResult).toBeDefined()
|
||||
expect(error.executionResult.metadata.endTime).toBeDefined()
|
||||
expect(error.executionResult.metadata.duration).toBeDefined()
|
||||
}
|
||||
})
|
||||
|
||||
it('should prefer cancellation status over error when both occur', async () => {
|
||||
const abortController = new AbortController()
|
||||
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext({ abortSignal: abortController.signal })
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
abortController.abort()
|
||||
throw new Error('Node error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
const result = await engine.run('start')
|
||||
|
||||
expect(result.status).toBe('cancelled')
|
||||
expect(result.success).toBe(false)
|
||||
})
|
||||
|
||||
it('should stop loop iteration when error occurs in loop body', async () => {
|
||||
const loopStartNode = createMockNode('loop-start', 'loop_sentinel')
|
||||
loopStartNode.metadata = { isSentinel: true, sentinelType: 'start', loopId: 'loop1' }
|
||||
|
||||
const loopBodyNode = createMockNode('loop-body', 'function')
|
||||
loopBodyNode.metadata = { isLoopNode: true, loopId: 'loop1' }
|
||||
|
||||
const loopEndNode = createMockNode('loop-end', 'loop_sentinel')
|
||||
loopEndNode.metadata = { isSentinel: true, sentinelType: 'end', loopId: 'loop1' }
|
||||
|
||||
const afterLoopNode = createMockNode('after-loop', 'function')
|
||||
|
||||
loopStartNode.outgoingEdges.set('edge1', { target: 'loop-body' })
|
||||
loopBodyNode.outgoingEdges.set('edge2', { target: 'loop-end' })
|
||||
loopEndNode.outgoingEdges.set('loop_continue', {
|
||||
target: 'loop-start',
|
||||
sourceHandle: 'loop_continue',
|
||||
})
|
||||
loopEndNode.outgoingEdges.set('loop_complete', {
|
||||
target: 'after-loop',
|
||||
sourceHandle: 'loop_complete',
|
||||
})
|
||||
|
||||
const dag = createMockDAG([loopStartNode, loopBodyNode, loopEndNode, afterLoopNode])
|
||||
const context = createMockContext()
|
||||
|
||||
let iterationCount = 0
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'loop-start') return ['loop-body']
|
||||
if (node.id === 'loop-body') return ['loop-end']
|
||||
if (node.id === 'loop-end') {
|
||||
iterationCount++
|
||||
if (iterationCount < 5) return ['loop-start']
|
||||
return ['after-loop']
|
||||
}
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'loop-body' && iterationCount >= 2) {
|
||||
throw new Error('Loop body error on iteration 3')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('loop-start')).rejects.toThrow('Loop body error on iteration 3')
|
||||
|
||||
expect(iterationCount).toBeLessThanOrEqual(3)
|
||||
})
|
||||
|
||||
it('should handle error that is not an Error instance', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
throw 'String error message'
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('String error message')
|
||||
})
|
||||
|
||||
it('should preserve partial output when error occurs after some blocks complete', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const successNode = createMockNode('success', 'function')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
|
||||
startNode.outgoingEdges.set('edge1', { target: 'success' })
|
||||
successNode.outgoingEdges.set('edge2', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, successNode, errorNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['success']
|
||||
if (node.id === 'success') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'success') {
|
||||
return { nodeId, output: { successData: 'preserved' }, isFinalOutput: false }
|
||||
}
|
||||
if (nodeId === 'error-node') {
|
||||
throw new Error('Late error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
try {
|
||||
await engine.run('start')
|
||||
expect.fail('Should have thrown')
|
||||
} catch (error: any) {
|
||||
// Verify the error was thrown
|
||||
expect(error.message).toBe('Late error')
|
||||
// The partial output should be available in executionResult if attached
|
||||
if (error.executionResult) {
|
||||
expect(error.executionResult.output).toBeDefined()
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Cancellation flag behavior', () => {
|
||||
it('should set cancelledFlag when abort signal fires', async () => {
|
||||
const abortController = new AbortController()
|
||||
|
||||
@@ -25,6 +25,8 @@ export class ExecutionEngine {
|
||||
private pausedBlocks: Map<string, PauseMetadata> = new Map()
|
||||
private allowResumeTriggers: boolean
|
||||
private cancelledFlag = false
|
||||
private errorFlag = false
|
||||
private executionError: Error | null = null
|
||||
private lastCancellationCheck = 0
|
||||
private readonly useRedisCancellation: boolean
|
||||
private readonly CANCELLATION_CHECK_INTERVAL_MS = 500
|
||||
@@ -103,7 +105,7 @@ export class ExecutionEngine {
|
||||
this.initializeQueue(triggerBlockId)
|
||||
|
||||
while (this.hasWork()) {
|
||||
if (await this.checkCancellation()) {
|
||||
if ((await this.checkCancellation()) || this.errorFlag) {
|
||||
break
|
||||
}
|
||||
await this.processQueue()
|
||||
@@ -113,6 +115,11 @@ export class ExecutionEngine {
|
||||
await this.waitForAllExecutions()
|
||||
}
|
||||
|
||||
// Rethrow the captured error so it's handled by the catch block
|
||||
if (this.errorFlag && this.executionError) {
|
||||
throw this.executionError
|
||||
}
|
||||
|
||||
if (this.pausedBlocks.size > 0) {
|
||||
return this.buildPausedResult(startTime)
|
||||
}
|
||||
@@ -196,11 +203,17 @@ export class ExecutionEngine {
|
||||
}
|
||||
|
||||
private trackExecution(promise: Promise<void>): void {
|
||||
this.executing.add(promise)
|
||||
promise.catch(() => {})
|
||||
promise.finally(() => {
|
||||
this.executing.delete(promise)
|
||||
})
|
||||
const trackedPromise = promise
|
||||
.catch((error) => {
|
||||
if (!this.errorFlag) {
|
||||
this.errorFlag = true
|
||||
this.executionError = error instanceof Error ? error : new Error(String(error))
|
||||
}
|
||||
})
|
||||
.finally(() => {
|
||||
this.executing.delete(trackedPromise)
|
||||
})
|
||||
this.executing.add(trackedPromise)
|
||||
}
|
||||
|
||||
private async waitForAnyExecution(): Promise<void> {
|
||||
@@ -315,7 +328,7 @@ export class ExecutionEngine {
|
||||
|
||||
private async processQueue(): Promise<void> {
|
||||
while (this.readyQueue.length > 0) {
|
||||
if (await this.checkCancellation()) {
|
||||
if ((await this.checkCancellation()) || this.errorFlag) {
|
||||
break
|
||||
}
|
||||
const nodeId = this.dequeue()
|
||||
@@ -324,7 +337,7 @@ export class ExecutionEngine {
|
||||
this.trackExecution(promise)
|
||||
}
|
||||
|
||||
if (this.executing.size > 0 && !this.cancelledFlag) {
|
||||
if (this.executing.size > 0 && !this.cancelledFlag && !this.errorFlag) {
|
||||
await this.waitForAnyExecution()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -305,7 +305,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
base.executeFunction = async (callParams: Record<string, any>) => {
|
||||
const mergedParams = mergeToolParameters(userProvidedParams, callParams)
|
||||
|
||||
const { blockData, blockNameMapping } = collectBlockData(ctx)
|
||||
const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
|
||||
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
@@ -317,6 +317,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
workflowVariables: ctx.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
isCustomTool: true,
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
|
||||
@@ -26,7 +26,7 @@ export async function evaluateConditionExpression(
|
||||
const contextSetup = `const context = ${JSON.stringify(evalContext)};`
|
||||
const code = `${contextSetup}\nreturn Boolean(${conditionExpression})`
|
||||
|
||||
const { blockData, blockNameMapping } = collectBlockData(ctx)
|
||||
const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
|
||||
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
@@ -37,6 +37,7 @@ export async function evaluateConditionExpression(
|
||||
workflowVariables: ctx.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
|
||||
@@ -75,7 +75,12 @@ describe('FunctionBlockHandler', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
|
||||
blockOutputSchemas: {},
|
||||
_context: {
|
||||
workflowId: mockContext.workflowId,
|
||||
workspaceId: mockContext.workspaceId,
|
||||
isDeployedContext: mockContext.isDeployedContext,
|
||||
},
|
||||
}
|
||||
const expectedOutput: any = { result: 'Success' }
|
||||
|
||||
@@ -84,8 +89,8 @@ describe('FunctionBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expectedToolParams,
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
false,
|
||||
mockContext
|
||||
)
|
||||
expect(result).toEqual(expectedOutput)
|
||||
})
|
||||
@@ -107,7 +112,12 @@ describe('FunctionBlockHandler', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
|
||||
blockOutputSchemas: {},
|
||||
_context: {
|
||||
workflowId: mockContext.workflowId,
|
||||
workspaceId: mockContext.workspaceId,
|
||||
isDeployedContext: mockContext.isDeployedContext,
|
||||
},
|
||||
}
|
||||
const expectedOutput: any = { result: 'Success' }
|
||||
|
||||
@@ -116,8 +126,8 @@ describe('FunctionBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expectedToolParams,
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
false,
|
||||
mockContext
|
||||
)
|
||||
expect(result).toEqual(expectedOutput)
|
||||
})
|
||||
@@ -132,7 +142,12 @@ describe('FunctionBlockHandler', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
|
||||
blockOutputSchemas: {},
|
||||
_context: {
|
||||
workflowId: mockContext.workflowId,
|
||||
workspaceId: mockContext.workspaceId,
|
||||
isDeployedContext: mockContext.isDeployedContext,
|
||||
},
|
||||
}
|
||||
|
||||
await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
@@ -23,7 +23,7 @@ export class FunctionBlockHandler implements BlockHandler {
|
||||
? inputs.code.map((c: { content: string }) => c.content).join('\n')
|
||||
: inputs.code
|
||||
|
||||
const { blockData, blockNameMapping } = collectBlockData(ctx)
|
||||
const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
|
||||
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
@@ -35,6 +35,7 @@ export class FunctionBlockHandler implements BlockHandler {
|
||||
workflowVariables: ctx.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import '@sim/testing/mocks/executor'
|
||||
|
||||
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
||||
import { generateRouterPrompt } from '@/blocks/blocks/router'
|
||||
import { generateRouterPrompt, generateRouterV2Prompt } from '@/blocks/blocks/router'
|
||||
import { BlockType } from '@/executor/constants'
|
||||
import { RouterBlockHandler } from '@/executor/handlers/router/router-handler'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
@@ -9,6 +9,7 @@ import { getProviderFromModel } from '@/providers/utils'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
const mockGenerateRouterPrompt = generateRouterPrompt as Mock
|
||||
const mockGenerateRouterV2Prompt = generateRouterV2Prompt as Mock
|
||||
const mockGetProviderFromModel = getProviderFromModel as Mock
|
||||
const mockFetch = global.fetch as unknown as Mock
|
||||
|
||||
@@ -44,7 +45,7 @@ describe('RouterBlockHandler', () => {
|
||||
metadata: { id: BlockType.ROUTER, name: 'Test Router' },
|
||||
position: { x: 50, y: 50 },
|
||||
config: { tool: BlockType.ROUTER, params: {} },
|
||||
inputs: { prompt: 'string', model: 'string' }, // Using ParamType strings
|
||||
inputs: { prompt: 'string', model: 'string' },
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
@@ -72,14 +73,11 @@ describe('RouterBlockHandler', () => {
|
||||
workflow: mockWorkflow as SerializedWorkflow,
|
||||
}
|
||||
|
||||
// Reset mocks using vi
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Default mock implementations
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
mockGenerateRouterPrompt.mockReturnValue('Generated System Prompt')
|
||||
|
||||
// Set up fetch mock to return a successful response
|
||||
mockFetch.mockImplementation(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
@@ -147,7 +145,6 @@ describe('RouterBlockHandler', () => {
|
||||
})
|
||||
)
|
||||
|
||||
// Verify the request body contains the expected data
|
||||
const fetchCallArgs = mockFetch.mock.calls[0]
|
||||
const requestBody = JSON.parse(fetchCallArgs[1].body)
|
||||
expect(requestBody).toMatchObject({
|
||||
@@ -180,7 +177,6 @@ describe('RouterBlockHandler', () => {
|
||||
const inputs = { prompt: 'Test' }
|
||||
mockContext.workflow!.blocks = [mockBlock, mockTargetBlock2]
|
||||
|
||||
// Expect execute to throw because getTargetBlocks (called internally) will throw
|
||||
await expect(handler.execute(mockContext, mockBlock, inputs)).rejects.toThrow(
|
||||
'Target block target-block-1 not found'
|
||||
)
|
||||
@@ -190,7 +186,6 @@ describe('RouterBlockHandler', () => {
|
||||
it('should throw error if LLM response is not a valid target block ID', async () => {
|
||||
const inputs = { prompt: 'Test', apiKey: 'test-api-key' }
|
||||
|
||||
// Override fetch mock to return an invalid block ID
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
@@ -228,7 +223,6 @@ describe('RouterBlockHandler', () => {
|
||||
it('should handle server error responses', async () => {
|
||||
const inputs = { prompt: 'Test error handling.', apiKey: 'test-api-key' }
|
||||
|
||||
// Override fetch mock to return an error
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: false,
|
||||
@@ -276,13 +270,12 @@ describe('RouterBlockHandler', () => {
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('vertex')
|
||||
|
||||
// Mock the database query for Vertex credential
|
||||
const mockDb = await import('@sim/db')
|
||||
const mockAccount = {
|
||||
id: 'test-vertex-credential-id',
|
||||
accessToken: 'mock-access-token',
|
||||
refreshToken: 'mock-refresh-token',
|
||||
expiresAt: new Date(Date.now() + 3600000), // 1 hour from now
|
||||
expiresAt: new Date(Date.now() + 3600000),
|
||||
}
|
||||
vi.spyOn(mockDb.db.query.account, 'findFirst').mockResolvedValue(mockAccount as any)
|
||||
|
||||
@@ -300,3 +293,287 @@ describe('RouterBlockHandler', () => {
|
||||
expect(requestBody.apiKey).toBe('mock-access-token')
|
||||
})
|
||||
})
|
||||
|
||||
describe('RouterBlockHandler V2', () => {
|
||||
let handler: RouterBlockHandler
|
||||
let mockRouterV2Block: SerializedBlock
|
||||
let mockContext: ExecutionContext
|
||||
let mockWorkflow: Partial<SerializedWorkflow>
|
||||
let mockTargetBlock1: SerializedBlock
|
||||
let mockTargetBlock2: SerializedBlock
|
||||
|
||||
beforeEach(() => {
|
||||
mockTargetBlock1 = {
|
||||
id: 'target-block-1',
|
||||
metadata: { id: 'agent', name: 'Support Agent' },
|
||||
position: { x: 100, y: 100 },
|
||||
config: { tool: 'agent', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
mockTargetBlock2 = {
|
||||
id: 'target-block-2',
|
||||
metadata: { id: 'agent', name: 'Sales Agent' },
|
||||
position: { x: 100, y: 150 },
|
||||
config: { tool: 'agent', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
mockRouterV2Block = {
|
||||
id: 'router-v2-block-1',
|
||||
metadata: { id: BlockType.ROUTER_V2, name: 'Test Router V2' },
|
||||
position: { x: 50, y: 50 },
|
||||
config: { tool: BlockType.ROUTER_V2, params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
mockWorkflow = {
|
||||
blocks: [mockRouterV2Block, mockTargetBlock1, mockTargetBlock2],
|
||||
connections: [
|
||||
{
|
||||
source: mockRouterV2Block.id,
|
||||
target: mockTargetBlock1.id,
|
||||
sourceHandle: 'router-route-support',
|
||||
},
|
||||
{
|
||||
source: mockRouterV2Block.id,
|
||||
target: mockTargetBlock2.id,
|
||||
sourceHandle: 'router-route-sales',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
handler = new RouterBlockHandler({})
|
||||
|
||||
mockContext = {
|
||||
workflowId: 'test-workflow-id',
|
||||
blockStates: new Map(),
|
||||
blockLogs: [],
|
||||
metadata: { duration: 0 },
|
||||
environmentVariables: {},
|
||||
decisions: { router: new Map(), condition: new Map() },
|
||||
loopExecutions: new Map(),
|
||||
completedLoops: new Set(),
|
||||
executedBlocks: new Set(),
|
||||
activeExecutionPath: new Set(),
|
||||
workflow: mockWorkflow as SerializedWorkflow,
|
||||
}
|
||||
|
||||
vi.clearAllMocks()
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
mockGenerateRouterV2Prompt.mockReturnValue('Generated V2 System Prompt')
|
||||
})
|
||||
|
||||
it('should handle router_v2 blocks', () => {
|
||||
expect(handler.canHandle(mockRouterV2Block)).toBe(true)
|
||||
})
|
||||
|
||||
it('should execute router V2 and return reasoning', async () => {
|
||||
const inputs = {
|
||||
context: 'I need help with a billing issue',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([
|
||||
{ id: 'route-support', title: 'Support', value: 'Customer support inquiries' },
|
||||
{ id: 'route-sales', title: 'Sales', value: 'Sales and pricing questions' },
|
||||
]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({
|
||||
route: 'route-support',
|
||||
reasoning: 'The user mentioned a billing issue which is a customer support matter.',
|
||||
}),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 150, output: 25, total: 175 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
expect(result).toMatchObject({
|
||||
context: 'I need help with a billing issue',
|
||||
model: 'gpt-4o',
|
||||
selectedRoute: 'route-support',
|
||||
reasoning: 'The user mentioned a billing issue which is a customer support matter.',
|
||||
selectedPath: {
|
||||
blockId: 'target-block-1',
|
||||
blockType: 'agent',
|
||||
blockTitle: 'Support Agent',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should include responseFormat in provider request', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description 1' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({ route: 'route-1', reasoning: 'Test reasoning' }),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
const fetchCallArgs = mockFetch.mock.calls[0]
|
||||
const requestBody = JSON.parse(fetchCallArgs[1].body)
|
||||
|
||||
expect(requestBody.responseFormat).toEqual({
|
||||
name: 'router_response',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
route: {
|
||||
type: 'string',
|
||||
description: 'The selected route ID or NO_MATCH',
|
||||
},
|
||||
reasoning: {
|
||||
type: 'string',
|
||||
description: 'Brief explanation of why this route was chosen',
|
||||
},
|
||||
},
|
||||
required: ['route', 'reasoning'],
|
||||
additionalProperties: false,
|
||||
},
|
||||
strict: true,
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle NO_MATCH response with reasoning', async () => {
|
||||
const inputs = {
|
||||
context: 'Random unrelated query',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Specific topic' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({
|
||||
route: 'NO_MATCH',
|
||||
reasoning: 'The query does not relate to any available route.',
|
||||
}),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
|
||||
'Router could not determine a matching route: The query does not relate to any available route.'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw error for invalid route ID in response', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({ route: 'invalid-route', reasoning: 'Some reasoning' }),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
|
||||
/Router could not determine a valid route/
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle routes passed as array instead of JSON string', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: [{ id: 'route-1', title: 'Route 1', value: 'Description' }],
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({ route: 'route-1', reasoning: 'Matched route 1' }),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
expect(result.selectedRoute).toBe('route-1')
|
||||
expect(result.reasoning).toBe('Matched route 1')
|
||||
})
|
||||
|
||||
it('should throw error when no routes are defined', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: '[]',
|
||||
}
|
||||
|
||||
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
|
||||
'No routes defined for router'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle fallback when JSON parsing fails', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: 'route-1',
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 5, total: 105 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
expect(result.selectedRoute).toBe('route-1')
|
||||
expect(result.reasoning).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -238,6 +238,25 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
apiKey: finalApiKey,
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
responseFormat: {
|
||||
name: 'router_response',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
route: {
|
||||
type: 'string',
|
||||
description: 'The selected route ID or NO_MATCH',
|
||||
},
|
||||
reasoning: {
|
||||
type: 'string',
|
||||
description: 'Brief explanation of why this route was chosen',
|
||||
},
|
||||
},
|
||||
required: ['route', 'reasoning'],
|
||||
additionalProperties: false,
|
||||
},
|
||||
strict: true,
|
||||
},
|
||||
}
|
||||
|
||||
if (providerId === 'vertex') {
|
||||
@@ -277,16 +296,31 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
const chosenRouteId = result.content.trim()
|
||||
let chosenRouteId: string
|
||||
let reasoning = ''
|
||||
|
||||
try {
|
||||
const parsedResponse = JSON.parse(result.content)
|
||||
chosenRouteId = parsedResponse.route?.trim() || ''
|
||||
reasoning = parsedResponse.reasoning || ''
|
||||
} catch (_parseError) {
|
||||
logger.error('Router response was not valid JSON despite responseFormat', {
|
||||
content: result.content,
|
||||
})
|
||||
chosenRouteId = result.content.trim()
|
||||
}
|
||||
|
||||
if (chosenRouteId === 'NO_MATCH' || chosenRouteId.toUpperCase() === 'NO_MATCH') {
|
||||
logger.info('Router determined no route matches the context, routing to error path')
|
||||
throw new Error('Router could not determine a matching route for the given context')
|
||||
throw new Error(
|
||||
reasoning
|
||||
? `Router could not determine a matching route: ${reasoning}`
|
||||
: 'Router could not determine a matching route for the given context'
|
||||
)
|
||||
}
|
||||
|
||||
const chosenRoute = routes.find((r) => r.id === chosenRouteId)
|
||||
|
||||
// Throw error if LLM returns invalid route ID - this routes through error path
|
||||
if (!chosenRoute) {
|
||||
const availableRoutes = routes.map((r) => ({ id: r.id, title: r.title }))
|
||||
logger.error(
|
||||
@@ -298,7 +332,6 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
)
|
||||
}
|
||||
|
||||
// Find the target block connected to this route's handle
|
||||
const connection = ctx.workflow?.connections.find(
|
||||
(conn) => conn.source === block.id && conn.sourceHandle === `router-${chosenRoute.id}`
|
||||
)
|
||||
@@ -334,6 +367,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
total: cost.total,
|
||||
},
|
||||
selectedRoute: chosenRoute.id,
|
||||
reasoning,
|
||||
selectedPath: targetBlock
|
||||
? {
|
||||
blockId: targetBlock.id,
|
||||
@@ -353,7 +387,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse routes from input (can be JSON string or array).
|
||||
* Parse routes from input (can be JSON string or array)
|
||||
*/
|
||||
private parseRoutes(input: any): RouteDefinition[] {
|
||||
try {
|
||||
|
||||
@@ -204,26 +204,21 @@ describe('WorkflowBlockHandler', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('should map failed child output correctly', () => {
|
||||
it('should throw error for failed child output so BlockExecutor can check error port', () => {
|
||||
const childResult = {
|
||||
success: false,
|
||||
error: 'Child workflow failed',
|
||||
}
|
||||
|
||||
const result = (handler as any).mapChildOutputToParent(
|
||||
childResult,
|
||||
'child-id',
|
||||
'Child Workflow',
|
||||
100
|
||||
)
|
||||
expect(() =>
|
||||
(handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100)
|
||||
).toThrow('Error in child workflow "Child Workflow": Child workflow failed')
|
||||
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
childWorkflowName: 'Child Workflow',
|
||||
result: {},
|
||||
error: 'Child workflow failed',
|
||||
childTraceSpans: [],
|
||||
})
|
||||
try {
|
||||
;(handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100)
|
||||
} catch (error: any) {
|
||||
expect(error.childTraceSpans).toEqual([])
|
||||
}
|
||||
})
|
||||
|
||||
it('should handle nested response structures', () => {
|
||||
|
||||
@@ -144,6 +144,11 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
const workflowMetadata = workflows[workflowId]
|
||||
const childWorkflowName = workflowMetadata?.name || workflowId
|
||||
|
||||
const originalError = error.message || 'Unknown error'
|
||||
const wrappedError = new Error(
|
||||
`Error in child workflow "${childWorkflowName}": ${originalError}`
|
||||
)
|
||||
|
||||
if (error.executionResult?.logs) {
|
||||
const executionResult = error.executionResult as ExecutionResult
|
||||
|
||||
@@ -159,28 +164,12 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
)
|
||||
|
||||
logger.info(`Captured ${childTraceSpans.length} child trace spans from failed execution`)
|
||||
|
||||
return {
|
||||
success: false,
|
||||
childWorkflowName,
|
||||
result: {},
|
||||
error: error.message || 'Child workflow execution failed',
|
||||
childTraceSpans: childTraceSpans,
|
||||
} as Record<string, any>
|
||||
;(wrappedError as any).childTraceSpans = childTraceSpans
|
||||
} else if (error.childTraceSpans && Array.isArray(error.childTraceSpans)) {
|
||||
;(wrappedError as any).childTraceSpans = error.childTraceSpans
|
||||
}
|
||||
|
||||
if (error.childTraceSpans && Array.isArray(error.childTraceSpans)) {
|
||||
return {
|
||||
success: false,
|
||||
childWorkflowName,
|
||||
result: {},
|
||||
error: error.message || 'Child workflow execution failed',
|
||||
childTraceSpans: error.childTraceSpans,
|
||||
} as Record<string, any>
|
||||
}
|
||||
|
||||
const originalError = error.message || 'Unknown error'
|
||||
throw new Error(`Error in child workflow "${childWorkflowName}": ${originalError}`)
|
||||
throw wrappedError
|
||||
}
|
||||
}
|
||||
|
||||
@@ -452,17 +441,13 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
|
||||
if (!success) {
|
||||
logger.warn(`Child workflow ${childWorkflowName} failed`)
|
||||
// Return failure with child trace spans so they can be displayed
|
||||
return {
|
||||
success: false,
|
||||
childWorkflowName,
|
||||
result,
|
||||
error: childResult.error || 'Child workflow execution failed',
|
||||
childTraceSpans: childTraceSpans || [],
|
||||
} as Record<string, any>
|
||||
const error = new Error(
|
||||
`Error in child workflow "${childWorkflowName}": ${childResult.error || 'Child workflow execution failed'}`
|
||||
)
|
||||
;(error as any).childTraceSpans = childTraceSpans || []
|
||||
throw error
|
||||
}
|
||||
|
||||
// Success case
|
||||
return {
|
||||
success: true,
|
||||
childWorkflowName,
|
||||
|
||||
@@ -1,24 +1,43 @@
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
import type { OutputSchema } from '@/executor/utils/block-reference'
|
||||
|
||||
export interface BlockDataCollection {
|
||||
blockData: Record<string, any>
|
||||
blockData: Record<string, unknown>
|
||||
blockNameMapping: Record<string, string>
|
||||
blockOutputSchemas: Record<string, OutputSchema>
|
||||
}
|
||||
|
||||
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
|
||||
const blockData: Record<string, any> = {}
|
||||
const blockData: Record<string, unknown> = {}
|
||||
const blockNameMapping: Record<string, string> = {}
|
||||
const blockOutputSchemas: Record<string, OutputSchema> = {}
|
||||
|
||||
for (const [id, state] of ctx.blockStates.entries()) {
|
||||
if (state.output !== undefined) {
|
||||
blockData[id] = state.output
|
||||
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
|
||||
if (workflowBlock?.metadata?.name) {
|
||||
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
|
||||
}
|
||||
|
||||
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
|
||||
if (!workflowBlock) continue
|
||||
|
||||
if (workflowBlock.metadata?.name) {
|
||||
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
|
||||
}
|
||||
|
||||
const blockType = workflowBlock.metadata?.id
|
||||
if (blockType) {
|
||||
const params = workflowBlock.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const schema = getBlockOutputs(blockType, subBlocks)
|
||||
if (schema && Object.keys(schema).length > 0) {
|
||||
blockOutputSchemas[id] = schema
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { blockData, blockNameMapping }
|
||||
return { blockData, blockNameMapping, blockOutputSchemas }
|
||||
}
|
||||
|
||||
255
apps/sim/executor/utils/block-reference.test.ts
Normal file
255
apps/sim/executor/utils/block-reference.test.ts
Normal file
@@ -0,0 +1,255 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import {
|
||||
type BlockReferenceContext,
|
||||
InvalidFieldError,
|
||||
resolveBlockReference,
|
||||
} from './block-reference'
|
||||
|
||||
describe('resolveBlockReference', () => {
|
||||
const createContext = (
|
||||
overrides: Partial<BlockReferenceContext> = {}
|
||||
): BlockReferenceContext => ({
|
||||
blockNameMapping: { start: 'block-1', agent: 'block-2' },
|
||||
blockData: {},
|
||||
blockOutputSchemas: {},
|
||||
...overrides,
|
||||
})
|
||||
|
||||
describe('block name resolution', () => {
|
||||
it('should return undefined when block name does not exist', () => {
|
||||
const ctx = createContext()
|
||||
const result = resolveBlockReference('unknown', ['field'], ctx)
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should normalize block name before lookup', () => {
|
||||
const ctx = createContext({
|
||||
blockNameMapping: { myblock: 'block-1' },
|
||||
blockData: { 'block-1': { value: 'test' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('MyBlock', ['value'], ctx)
|
||||
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should handle block names with spaces', () => {
|
||||
const ctx = createContext({
|
||||
blockNameMapping: { myblock: 'block-1' },
|
||||
blockData: { 'block-1': { value: 'test' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('My Block', ['value'], ctx)
|
||||
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('field resolution', () => {
|
||||
it('should return entire block output when no path specified', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: 'hello', other: 'data' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', [], ctx)
|
||||
expect(result).toEqual({
|
||||
value: { input: 'hello', other: 'data' },
|
||||
blockId: 'block-1',
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve simple field path', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: 'hello' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: 'hello', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should resolve nested field path', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { response: { data: { name: 'test' } } } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['response', 'data', 'name'], ctx)
|
||||
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should resolve array index path', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { items: ['a', 'b', 'c'] } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['items', '1'], ctx)
|
||||
expect(result).toEqual({ value: 'b', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should return undefined value when field exists but has no value', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: undefined } },
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should return null value when field has null', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: null } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: null, blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('schema validation', () => {
|
||||
it('should throw InvalidFieldError when field not in schema', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { existing: 'value' } },
|
||||
blockOutputSchemas: {
|
||||
'block-1': {
|
||||
input: { type: 'string' },
|
||||
conversationId: { type: 'string' },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(InvalidFieldError)
|
||||
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(
|
||||
/"invalid" doesn't exist on block "start"/
|
||||
)
|
||||
})
|
||||
|
||||
it('should include available fields in error message', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': {} },
|
||||
blockOutputSchemas: {
|
||||
'block-1': {
|
||||
input: { type: 'string' },
|
||||
conversationId: { type: 'string' },
|
||||
files: { type: 'files' },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
resolveBlockReference('start', ['typo'], ctx)
|
||||
expect.fail('Should have thrown')
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(InvalidFieldError)
|
||||
const fieldError = error as InvalidFieldError
|
||||
expect(fieldError.availableFields).toContain('input')
|
||||
expect(fieldError.availableFields).toContain('conversationId')
|
||||
expect(fieldError.availableFields).toContain('files')
|
||||
}
|
||||
})
|
||||
|
||||
it('should allow valid field even when value is undefined', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': {} },
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should validate path when block has no output yet', () => {
|
||||
const ctx = createContext({
|
||||
blockData: {},
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(InvalidFieldError)
|
||||
})
|
||||
|
||||
it('should return undefined for valid field when block has no output', () => {
|
||||
const ctx = createContext({
|
||||
blockData: {},
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('without schema (pass-through mode)', () => {
|
||||
it('should return undefined value without throwing when no schema', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { existing: 'value' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['missing'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('file type handling', () => {
|
||||
it('should allow file property access', () => {
|
||||
const ctx = createContext({
|
||||
blockData: {
|
||||
'block-1': {
|
||||
files: [{ name: 'test.txt', url: 'http://example.com/file' }],
|
||||
},
|
||||
},
|
||||
blockOutputSchemas: {
|
||||
'block-1': { files: { type: 'files' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['files', '0', 'name'], ctx)
|
||||
expect(result).toEqual({ value: 'test.txt', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should validate file property names', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { files: [] } },
|
||||
blockOutputSchemas: {
|
||||
'block-1': { files: { type: 'files' } },
|
||||
},
|
||||
})
|
||||
|
||||
expect(() => resolveBlockReference('start', ['files', '0', 'invalid'], ctx)).toThrow(
|
||||
InvalidFieldError
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('InvalidFieldError', () => {
|
||||
it('should have correct properties', () => {
|
||||
const error = new InvalidFieldError('myBlock', 'invalid.path', ['field1', 'field2'])
|
||||
|
||||
expect(error.blockName).toBe('myBlock')
|
||||
expect(error.fieldPath).toBe('invalid.path')
|
||||
expect(error.availableFields).toEqual(['field1', 'field2'])
|
||||
expect(error.name).toBe('InvalidFieldError')
|
||||
})
|
||||
|
||||
it('should format message correctly', () => {
|
||||
const error = new InvalidFieldError('start', 'typo', ['input', 'files'])
|
||||
|
||||
expect(error.message).toBe(
|
||||
'"typo" doesn\'t exist on block "start". Available fields: input, files'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle empty available fields', () => {
|
||||
const error = new InvalidFieldError('start', 'field', [])
|
||||
|
||||
expect(error.message).toBe('"field" doesn\'t exist on block "start". Available fields: none')
|
||||
})
|
||||
})
|
||||
210
apps/sim/executor/utils/block-reference.ts
Normal file
210
apps/sim/executor/utils/block-reference.ts
Normal file
@@ -0,0 +1,210 @@
|
||||
import { USER_FILE_ACCESSIBLE_PROPERTIES } from '@/lib/workflows/types'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { navigatePath } from '@/executor/variables/resolvers/reference'
|
||||
|
||||
export type OutputSchema = Record<string, { type?: string; description?: string } | unknown>
|
||||
|
||||
export interface BlockReferenceContext {
|
||||
blockNameMapping: Record<string, string>
|
||||
blockData: Record<string, unknown>
|
||||
blockOutputSchemas?: Record<string, OutputSchema>
|
||||
}
|
||||
|
||||
export interface BlockReferenceResult {
|
||||
value: unknown
|
||||
blockId: string
|
||||
}
|
||||
|
||||
export class InvalidFieldError extends Error {
|
||||
constructor(
|
||||
public readonly blockName: string,
|
||||
public readonly fieldPath: string,
|
||||
public readonly availableFields: string[]
|
||||
) {
|
||||
super(
|
||||
`"${fieldPath}" doesn't exist on block "${blockName}". ` +
|
||||
`Available fields: ${availableFields.length > 0 ? availableFields.join(', ') : 'none'}`
|
||||
)
|
||||
this.name = 'InvalidFieldError'
|
||||
}
|
||||
}
|
||||
|
||||
function isFileType(value: unknown): boolean {
|
||||
if (typeof value !== 'object' || value === null) return false
|
||||
const typed = value as { type?: string }
|
||||
return typed.type === 'file[]' || typed.type === 'files'
|
||||
}
|
||||
|
||||
function isArrayType(value: unknown): value is { type: 'array'; items?: unknown } {
|
||||
if (typeof value !== 'object' || value === null) return false
|
||||
return (value as { type?: string }).type === 'array'
|
||||
}
|
||||
|
||||
function getArrayItems(schema: unknown): unknown {
|
||||
if (typeof schema !== 'object' || schema === null) return undefined
|
||||
return (schema as { items?: unknown }).items
|
||||
}
|
||||
|
||||
function getProperties(schema: unknown): Record<string, unknown> | undefined {
|
||||
if (typeof schema !== 'object' || schema === null) return undefined
|
||||
const props = (schema as { properties?: unknown }).properties
|
||||
return typeof props === 'object' && props !== null
|
||||
? (props as Record<string, unknown>)
|
||||
: undefined
|
||||
}
|
||||
|
||||
function lookupField(schema: unknown, fieldName: string): unknown | undefined {
|
||||
if (typeof schema !== 'object' || schema === null) return undefined
|
||||
const typed = schema as Record<string, unknown>
|
||||
|
||||
if (fieldName in typed) {
|
||||
return typed[fieldName]
|
||||
}
|
||||
|
||||
const props = getProperties(schema)
|
||||
if (props && fieldName in props) {
|
||||
return props[fieldName]
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
function isPathInSchema(schema: OutputSchema | undefined, pathParts: string[]): boolean {
|
||||
if (!schema || pathParts.length === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
let current: unknown = schema
|
||||
|
||||
for (let i = 0; i < pathParts.length; i++) {
|
||||
const part = pathParts[i]
|
||||
|
||||
if (current === null || current === undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (/^\d+$/.test(part)) {
|
||||
if (isFileType(current)) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
return (
|
||||
!nextPart ||
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
)
|
||||
}
|
||||
if (isArrayType(current)) {
|
||||
current = getArrayItems(current)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\]$/)
|
||||
if (arrayMatch) {
|
||||
const [, prop] = arrayMatch
|
||||
const fieldDef = lookupField(current, prop)
|
||||
if (!fieldDef) return false
|
||||
|
||||
if (isFileType(fieldDef)) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
return (
|
||||
!nextPart ||
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
current = isArrayType(fieldDef) ? getArrayItems(fieldDef) : fieldDef
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
isFileType(current) &&
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
part as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
const fieldDef = lookupField(current, part)
|
||||
if (fieldDef !== undefined) {
|
||||
if (isFileType(fieldDef)) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
if (!nextPart) return true
|
||||
if (/^\d+$/.test(nextPart)) {
|
||||
const afterIndex = pathParts[i + 2]
|
||||
return (
|
||||
!afterIndex ||
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
afterIndex as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
)
|
||||
}
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
}
|
||||
current = fieldDef
|
||||
continue
|
||||
}
|
||||
|
||||
if (isArrayType(current)) {
|
||||
const items = getArrayItems(current)
|
||||
const itemField = lookupField(items, part)
|
||||
if (itemField !== undefined) {
|
||||
current = itemField
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function getSchemaFieldNames(schema: OutputSchema | undefined): string[] {
|
||||
if (!schema) return []
|
||||
return Object.keys(schema)
|
||||
}
|
||||
|
||||
export function resolveBlockReference(
|
||||
blockName: string,
|
||||
pathParts: string[],
|
||||
context: BlockReferenceContext
|
||||
): BlockReferenceResult | undefined {
|
||||
const normalizedName = normalizeName(blockName)
|
||||
const blockId = context.blockNameMapping[normalizedName]
|
||||
|
||||
if (!blockId) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const blockOutput = context.blockData[blockId]
|
||||
const schema = context.blockOutputSchemas?.[blockId]
|
||||
|
||||
if (blockOutput === undefined) {
|
||||
if (schema && pathParts.length > 0) {
|
||||
if (!isPathInSchema(schema, pathParts)) {
|
||||
throw new InvalidFieldError(blockName, pathParts.join('.'), getSchemaFieldNames(schema))
|
||||
}
|
||||
}
|
||||
return { value: undefined, blockId }
|
||||
}
|
||||
|
||||
if (pathParts.length === 0) {
|
||||
return { value: blockOutput, blockId }
|
||||
}
|
||||
|
||||
const value = navigatePath(blockOutput, pathParts)
|
||||
|
||||
if (value === undefined && schema) {
|
||||
if (!isPathInSchema(schema, pathParts)) {
|
||||
throw new InvalidFieldError(blockName, pathParts.join('.'), getSchemaFieldNames(schema))
|
||||
}
|
||||
}
|
||||
|
||||
return { value, blockId }
|
||||
}
|
||||
@@ -1,11 +1,15 @@
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { USER_FILE_ACCESSIBLE_PROPERTIES } from '@/lib/workflows/types'
|
||||
import {
|
||||
isReference,
|
||||
normalizeName,
|
||||
parseReferencePath,
|
||||
SPECIAL_REFERENCE_PREFIXES,
|
||||
} from '@/executor/constants'
|
||||
import {
|
||||
InvalidFieldError,
|
||||
type OutputSchema,
|
||||
resolveBlockReference,
|
||||
} from '@/executor/utils/block-reference'
|
||||
import {
|
||||
navigatePath,
|
||||
type ResolutionContext,
|
||||
@@ -14,123 +18,6 @@ import {
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
import { getTool } from '@/tools/utils'
|
||||
|
||||
function isPathInOutputSchema(
|
||||
outputs: Record<string, any> | undefined,
|
||||
pathParts: string[]
|
||||
): boolean {
|
||||
if (!outputs || pathParts.length === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
const isFileArrayType = (value: any): boolean =>
|
||||
value?.type === 'file[]' || value?.type === 'files'
|
||||
|
||||
let current: any = outputs
|
||||
for (let i = 0; i < pathParts.length; i++) {
|
||||
const part = pathParts[i]
|
||||
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\]$/)
|
||||
if (arrayMatch) {
|
||||
const [, prop] = arrayMatch
|
||||
let fieldDef: any
|
||||
|
||||
if (prop in current) {
|
||||
fieldDef = current[prop]
|
||||
} else if (current.properties && prop in current.properties) {
|
||||
fieldDef = current.properties[prop]
|
||||
} else if (current.type === 'array' && current.items) {
|
||||
if (current.items.properties && prop in current.items.properties) {
|
||||
fieldDef = current.items.properties[prop]
|
||||
} else if (prop in current.items) {
|
||||
fieldDef = current.items[prop]
|
||||
}
|
||||
}
|
||||
|
||||
if (!fieldDef) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (isFileArrayType(fieldDef)) {
|
||||
if (i + 1 < pathParts.length) {
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(pathParts[i + 1] as any)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
if (fieldDef.type === 'array' && fieldDef.items) {
|
||||
current = fieldDef.items
|
||||
continue
|
||||
}
|
||||
|
||||
current = fieldDef
|
||||
continue
|
||||
}
|
||||
|
||||
if (/^\d+$/.test(part)) {
|
||||
if (isFileArrayType(current)) {
|
||||
if (i + 1 < pathParts.length) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(nextPart as any)
|
||||
}
|
||||
return true
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (current === null || current === undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (part in current) {
|
||||
const nextCurrent = current[part]
|
||||
if (nextCurrent?.type === 'file[]' && i + 1 < pathParts.length) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
if (/^\d+$/.test(nextPart) && i + 2 < pathParts.length) {
|
||||
const propertyPart = pathParts[i + 2]
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(propertyPart as any)
|
||||
}
|
||||
}
|
||||
current = nextCurrent
|
||||
continue
|
||||
}
|
||||
|
||||
if (current.properties && part in current.properties) {
|
||||
current = current.properties[part]
|
||||
continue
|
||||
}
|
||||
|
||||
if (current.type === 'array' && current.items) {
|
||||
if (current.items.properties && part in current.items.properties) {
|
||||
current = current.items.properties[part]
|
||||
continue
|
||||
}
|
||||
if (part in current.items) {
|
||||
current = current.items[part]
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (isFileArrayType(current) && USER_FILE_ACCESSIBLE_PROPERTIES.includes(part as any)) {
|
||||
return true
|
||||
}
|
||||
|
||||
if ('type' in current && typeof current.type === 'string') {
|
||||
if (!current.properties && !current.items) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function getSchemaFieldNames(outputs: Record<string, any> | undefined): string[] {
|
||||
if (!outputs) return []
|
||||
return Object.keys(outputs)
|
||||
}
|
||||
|
||||
export class BlockResolver implements Resolver {
|
||||
private nameToBlockId: Map<string, string>
|
||||
private blockById: Map<string, SerializedBlock>
|
||||
@@ -170,83 +57,94 @@ export class BlockResolver implements Resolver {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const block = this.blockById.get(blockId)
|
||||
const block = this.blockById.get(blockId)!
|
||||
const output = this.getBlockOutput(blockId, context)
|
||||
|
||||
if (output === undefined) {
|
||||
const blockData: Record<string, unknown> = {}
|
||||
const blockOutputSchemas: Record<string, OutputSchema> = {}
|
||||
|
||||
if (output !== undefined) {
|
||||
blockData[blockId] = output
|
||||
}
|
||||
|
||||
const blockType = block.metadata?.id
|
||||
const params = block.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const toolId = block.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const outputSchema =
|
||||
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block.outputs)
|
||||
|
||||
if (outputSchema && Object.keys(outputSchema).length > 0) {
|
||||
blockOutputSchemas[blockId] = outputSchema
|
||||
}
|
||||
|
||||
try {
|
||||
const result = resolveBlockReference(blockName, pathParts, {
|
||||
blockNameMapping: Object.fromEntries(this.nameToBlockId),
|
||||
blockData,
|
||||
blockOutputSchemas,
|
||||
})!
|
||||
|
||||
if (result.value !== undefined) {
|
||||
return result.value
|
||||
}
|
||||
|
||||
return this.handleBackwardsCompat(block, output, pathParts)
|
||||
} catch (error) {
|
||||
if (error instanceof InvalidFieldError) {
|
||||
const fallback = this.handleBackwardsCompat(block, output, pathParts)
|
||||
if (fallback !== undefined) {
|
||||
return fallback
|
||||
}
|
||||
throw new Error(error.message)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
private handleBackwardsCompat(
|
||||
block: SerializedBlock,
|
||||
output: unknown,
|
||||
pathParts: string[]
|
||||
): unknown {
|
||||
if (output === undefined || pathParts.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
if (pathParts.length === 0) {
|
||||
return output
|
||||
}
|
||||
|
||||
// Try the original path first
|
||||
let result = navigatePath(output, pathParts)
|
||||
|
||||
// If successful, return it immediately
|
||||
if (result !== undefined) {
|
||||
return result
|
||||
}
|
||||
|
||||
// Response block backwards compatibility:
|
||||
// Old: <responseBlock.response.data> -> New: <responseBlock.data>
|
||||
// Only apply fallback if:
|
||||
// 1. Block type is 'response'
|
||||
// 2. Path starts with 'response.'
|
||||
// 3. Output doesn't have a 'response' key (confirming it's the new format)
|
||||
if (
|
||||
block?.metadata?.id === 'response' &&
|
||||
block.metadata?.id === 'response' &&
|
||||
pathParts[0] === 'response' &&
|
||||
output?.response === undefined
|
||||
(output as Record<string, unknown>)?.response === undefined
|
||||
) {
|
||||
const adjustedPathParts = pathParts.slice(1)
|
||||
if (adjustedPathParts.length === 0) {
|
||||
return output
|
||||
}
|
||||
result = navigatePath(output, adjustedPathParts)
|
||||
if (result !== undefined) {
|
||||
return result
|
||||
const fallbackResult = navigatePath(output, adjustedPathParts)
|
||||
if (fallbackResult !== undefined) {
|
||||
return fallbackResult
|
||||
}
|
||||
}
|
||||
|
||||
// Workflow block backwards compatibility:
|
||||
// Old: <workflowBlock.result.response.data> -> New: <workflowBlock.result.data>
|
||||
// Only apply fallback if:
|
||||
// 1. Block type is 'workflow' or 'workflow_input'
|
||||
// 2. Path starts with 'result.response.'
|
||||
// 3. output.result.response doesn't exist (confirming child used new format)
|
||||
const isWorkflowBlock =
|
||||
block?.metadata?.id === 'workflow' || block?.metadata?.id === 'workflow_input'
|
||||
block.metadata?.id === 'workflow' || block.metadata?.id === 'workflow_input'
|
||||
const outputRecord = output as Record<string, Record<string, unknown> | undefined>
|
||||
if (
|
||||
isWorkflowBlock &&
|
||||
pathParts[0] === 'result' &&
|
||||
pathParts[1] === 'response' &&
|
||||
output?.result?.response === undefined
|
||||
outputRecord?.result?.response === undefined
|
||||
) {
|
||||
const adjustedPathParts = ['result', ...pathParts.slice(2)]
|
||||
result = navigatePath(output, adjustedPathParts)
|
||||
if (result !== undefined) {
|
||||
return result
|
||||
const fallbackResult = navigatePath(output, adjustedPathParts)
|
||||
if (fallbackResult !== undefined) {
|
||||
return fallbackResult
|
||||
}
|
||||
}
|
||||
|
||||
const blockType = block?.metadata?.id
|
||||
const params = block?.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const toolId = block?.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const outputSchema =
|
||||
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block?.outputs)
|
||||
const schemaFields = getSchemaFieldNames(outputSchema)
|
||||
if (schemaFields.length > 0 && !isPathInOutputSchema(outputSchema, pathParts)) {
|
||||
throw new Error(
|
||||
`"${pathParts.join('.')}" doesn't exist on block "${blockName}". ` +
|
||||
`Available fields: ${schemaFields.join(', ')}`
|
||||
)
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
|
||||
@@ -27,23 +27,28 @@ export function navigatePath(obj: any, path: string[]): any {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// Handle array indexing like "items[0]" or just numeric indices
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\](.*)$/)
|
||||
const arrayMatch = part.match(/^([^[]+)(\[.+)$/)
|
||||
if (arrayMatch) {
|
||||
// Handle complex array access like "items[0]"
|
||||
const [, prop, index] = arrayMatch
|
||||
const [, prop, bracketsPart] = arrayMatch
|
||||
current = current[prop]
|
||||
if (current === undefined || current === null) {
|
||||
return undefined
|
||||
}
|
||||
const idx = Number.parseInt(index, 10)
|
||||
current = Array.isArray(current) ? current[idx] : undefined
|
||||
|
||||
const indices = bracketsPart.match(/\[(\d+)\]/g)
|
||||
if (indices) {
|
||||
for (const indexMatch of indices) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined
|
||||
}
|
||||
const idx = Number.parseInt(indexMatch.slice(1, -1), 10)
|
||||
current = Array.isArray(current) ? current[idx] : undefined
|
||||
}
|
||||
}
|
||||
} else if (/^\d+$/.test(part)) {
|
||||
// Handle plain numeric index
|
||||
const index = Number.parseInt(part, 10)
|
||||
current = Array.isArray(current) ? current[index] : undefined
|
||||
} else {
|
||||
// Handle regular property access
|
||||
current = current[part]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useCallback } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import type { AllTagSlot } from '@/lib/knowledge/constants'
|
||||
|
||||
const logger = createLogger('useKnowledgeBaseTagDefinitions')
|
||||
import { knowledgeKeys, useTagDefinitionsQuery } from '@/hooks/queries/knowledge'
|
||||
|
||||
export interface TagDefinition {
|
||||
id: string
|
||||
@@ -17,54 +16,23 @@ export interface TagDefinition {
|
||||
|
||||
/**
|
||||
* Hook for fetching KB-scoped tag definitions (for filtering/selection)
|
||||
* @param knowledgeBaseId - The knowledge base ID
|
||||
* Uses React Query as single source of truth
|
||||
*/
|
||||
export function useKnowledgeBaseTagDefinitions(knowledgeBaseId: string | null) {
|
||||
const [tagDefinitions, setTagDefinitions] = useState<TagDefinition[]>([])
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const queryClient = useQueryClient()
|
||||
const query = useTagDefinitionsQuery(knowledgeBaseId)
|
||||
|
||||
const fetchTagDefinitions = useCallback(async () => {
|
||||
if (!knowledgeBaseId) {
|
||||
setTagDefinitions([])
|
||||
return
|
||||
}
|
||||
|
||||
setIsLoading(true)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (data.success && Array.isArray(data.data)) {
|
||||
setTagDefinitions(data.data)
|
||||
} else {
|
||||
throw new Error('Invalid response format')
|
||||
}
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'
|
||||
logger.error('Error fetching tag definitions:', err)
|
||||
setError(errorMessage)
|
||||
setTagDefinitions([])
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [knowledgeBaseId])
|
||||
|
||||
useEffect(() => {
|
||||
fetchTagDefinitions()
|
||||
}, [fetchTagDefinitions])
|
||||
if (!knowledgeBaseId) return
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId),
|
||||
})
|
||||
}, [queryClient, knowledgeBaseId])
|
||||
|
||||
return {
|
||||
tagDefinitions,
|
||||
isLoading,
|
||||
error,
|
||||
tagDefinitions: (query.data ?? []) as TagDefinition[],
|
||||
isLoading: query.isLoading,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
fetchTagDefinitions,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback } from 'react'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import type { ChunkData, DocumentData, KnowledgeBaseData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
@@ -67,12 +67,17 @@ export function useKnowledgeBaseDocuments(
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
enabled?: boolean
|
||||
refetchInterval?: number | false
|
||||
refetchInterval?:
|
||||
| number
|
||||
| false
|
||||
| ((data: KnowledgeDocumentsResponse | undefined) => number | false)
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
}
|
||||
) {
|
||||
const queryClient = useQueryClient()
|
||||
const requestLimit = options?.limit ?? DEFAULT_PAGE_SIZE
|
||||
const requestOffset = options?.offset ?? 0
|
||||
const enabledFilter = options?.enabledFilter ?? 'all'
|
||||
const paramsKey = serializeDocumentParams({
|
||||
knowledgeBaseId,
|
||||
limit: requestLimit,
|
||||
@@ -80,8 +85,19 @@ export function useKnowledgeBaseDocuments(
|
||||
search: options?.search,
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
enabledFilter,
|
||||
})
|
||||
|
||||
const refetchIntervalFn = useMemo(() => {
|
||||
if (typeof options?.refetchInterval === 'function') {
|
||||
const userFn = options.refetchInterval
|
||||
return (query: { state: { data?: KnowledgeDocumentsResponse } }) => {
|
||||
return userFn(query.state.data)
|
||||
}
|
||||
}
|
||||
return options?.refetchInterval
|
||||
}, [options?.refetchInterval])
|
||||
|
||||
const query = useKnowledgeDocumentsQuery(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
@@ -90,10 +106,11 @@ export function useKnowledgeBaseDocuments(
|
||||
search: options?.search,
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
enabled: (options?.enabled ?? true) && Boolean(knowledgeBaseId),
|
||||
refetchInterval: options?.refetchInterval,
|
||||
refetchInterval: refetchIntervalFn,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -105,6 +122,14 @@ export function useKnowledgeBaseDocuments(
|
||||
hasMore: false,
|
||||
}
|
||||
|
||||
const hasProcessingDocs = useMemo(
|
||||
() =>
|
||||
documents.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
),
|
||||
[documents]
|
||||
)
|
||||
|
||||
const refreshDocuments = useCallback(async () => {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.documents(knowledgeBaseId, paramsKey),
|
||||
@@ -136,6 +161,7 @@ export function useKnowledgeBaseDocuments(
|
||||
isFetching: query.isFetching,
|
||||
isPlaceholderData: query.isPlaceholderData,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
hasProcessingDocuments: hasProcessingDocs,
|
||||
refreshDocuments,
|
||||
updateDocument,
|
||||
}
|
||||
@@ -233,8 +259,8 @@ export function useDocumentChunks(
|
||||
const hasPrevPage = currentPage > 1
|
||||
|
||||
const goToPage = useCallback(
|
||||
async (newPage: number) => {
|
||||
if (newPage < 1 || newPage > totalPages) return
|
||||
(newPage: number): boolean => {
|
||||
return newPage >= 1 && newPage <= totalPages
|
||||
},
|
||||
[totalPages]
|
||||
)
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useCallback } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import type { AllTagSlot } from '@/lib/knowledge/constants'
|
||||
|
||||
const logger = createLogger('useTagDefinitions')
|
||||
import {
|
||||
type DocumentTagDefinitionInput,
|
||||
knowledgeKeys,
|
||||
useDeleteDocumentTagDefinitions,
|
||||
useDocumentTagDefinitionsQuery,
|
||||
useSaveDocumentTagDefinitions,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
|
||||
export interface TagDefinition {
|
||||
id: string
|
||||
@@ -19,57 +24,30 @@ export interface TagDefinitionInput {
|
||||
tagSlot: AllTagSlot
|
||||
displayName: string
|
||||
fieldType: string
|
||||
// Optional: for editing existing definitions
|
||||
_originalDisplayName?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing KB-scoped tag definitions
|
||||
* @param knowledgeBaseId - The knowledge base ID
|
||||
* @param documentId - The document ID (required for API calls)
|
||||
* Hook for managing document-scoped tag definitions
|
||||
* Uses React Query as single source of truth
|
||||
*/
|
||||
export function useTagDefinitions(
|
||||
knowledgeBaseId: string | null,
|
||||
documentId: string | null = null
|
||||
) {
|
||||
const [tagDefinitions, setTagDefinitions] = useState<TagDefinition[]>([])
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const queryClient = useQueryClient()
|
||||
const query = useDocumentTagDefinitionsQuery(knowledgeBaseId, documentId)
|
||||
const { mutateAsync: saveTagDefinitionsMutation } = useSaveDocumentTagDefinitions()
|
||||
const { mutateAsync: deleteTagDefinitionsMutation } = useDeleteDocumentTagDefinitions()
|
||||
|
||||
const tagDefinitions = (query.data ?? []) as TagDefinition[]
|
||||
|
||||
const fetchTagDefinitions = useCallback(async () => {
|
||||
if (!knowledgeBaseId || !documentId) {
|
||||
setTagDefinitions([])
|
||||
return
|
||||
}
|
||||
|
||||
setIsLoading(true)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (data.success && Array.isArray(data.data)) {
|
||||
setTagDefinitions(data.data)
|
||||
} else {
|
||||
throw new Error('Invalid response format')
|
||||
}
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'
|
||||
logger.error('Error fetching tag definitions:', err)
|
||||
setError(errorMessage)
|
||||
setTagDefinitions([])
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [knowledgeBaseId, documentId])
|
||||
if (!knowledgeBaseId || !documentId) return
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId, documentId),
|
||||
})
|
||||
}, [queryClient, knowledgeBaseId, documentId])
|
||||
|
||||
const saveTagDefinitions = useCallback(
|
||||
async (definitions: TagDefinitionInput[]) => {
|
||||
@@ -77,43 +55,13 @@ export function useTagDefinitions(
|
||||
throw new Error('Knowledge base ID and document ID are required')
|
||||
}
|
||||
|
||||
// Simple validation
|
||||
const validDefinitions = (definitions || []).filter(
|
||||
(def) => def?.tagSlot && def.displayName && def.displayName.trim()
|
||||
)
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ definitions: validDefinitions }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to save tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to save tag definitions')
|
||||
}
|
||||
|
||||
// Refresh the definitions after saving
|
||||
await fetchTagDefinitions()
|
||||
|
||||
return data.data
|
||||
} catch (err) {
|
||||
logger.error('Error saving tag definitions:', err)
|
||||
throw err
|
||||
}
|
||||
return saveTagDefinitionsMutation({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
definitions: definitions as DocumentTagDefinitionInput[],
|
||||
})
|
||||
},
|
||||
[knowledgeBaseId, documentId, fetchTagDefinitions]
|
||||
[knowledgeBaseId, documentId, saveTagDefinitionsMutation]
|
||||
)
|
||||
|
||||
const deleteTagDefinitions = useCallback(async () => {
|
||||
@@ -121,25 +69,11 @@ export function useTagDefinitions(
|
||||
throw new Error('Knowledge base ID and document ID are required')
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to delete tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
// Refresh the definitions after deleting
|
||||
await fetchTagDefinitions()
|
||||
} catch (err) {
|
||||
logger.error('Error deleting tag definitions:', err)
|
||||
throw err
|
||||
}
|
||||
}, [knowledgeBaseId, documentId, fetchTagDefinitions])
|
||||
return deleteTagDefinitionsMutation({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
})
|
||||
}, [knowledgeBaseId, documentId, deleteTagDefinitionsMutation])
|
||||
|
||||
const getTagLabel = useCallback(
|
||||
(tagSlot: string): string => {
|
||||
@@ -156,15 +90,10 @@ export function useTagDefinitions(
|
||||
[tagDefinitions]
|
||||
)
|
||||
|
||||
// Auto-fetch on mount and when dependencies change
|
||||
useEffect(() => {
|
||||
fetchTagDefinitions()
|
||||
}, [fetchTagDefinitions])
|
||||
|
||||
return {
|
||||
tagDefinitions,
|
||||
isLoading,
|
||||
error,
|
||||
isLoading: query.isLoading,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
fetchTagDefinitions,
|
||||
saveTagDefinitions,
|
||||
deleteTagDefinitions,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import type {
|
||||
ChunkData,
|
||||
@@ -7,15 +8,21 @@ import type {
|
||||
KnowledgeBaseData,
|
||||
} from '@/lib/knowledge/types'
|
||||
|
||||
const logger = createLogger('KnowledgeQueries')
|
||||
|
||||
export const knowledgeKeys = {
|
||||
all: ['knowledge'] as const,
|
||||
list: (workspaceId?: string) => [...knowledgeKeys.all, 'list', workspaceId ?? 'all'] as const,
|
||||
detail: (knowledgeBaseId?: string) =>
|
||||
[...knowledgeKeys.all, 'detail', knowledgeBaseId ?? ''] as const,
|
||||
tagDefinitions: (knowledgeBaseId: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'tagDefinitions'] as const,
|
||||
documents: (knowledgeBaseId: string, paramsKey: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'documents', paramsKey] as const,
|
||||
document: (knowledgeBaseId: string, documentId: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'document', documentId] as const,
|
||||
documentTagDefinitions: (knowledgeBaseId: string, documentId: string) =>
|
||||
[...knowledgeKeys.document(knowledgeBaseId, documentId), 'tagDefinitions'] as const,
|
||||
chunks: (knowledgeBaseId: string, documentId: string, paramsKey: string) =>
|
||||
[...knowledgeKeys.document(knowledgeBaseId, documentId), 'chunks', paramsKey] as const,
|
||||
}
|
||||
@@ -79,6 +86,7 @@ export interface KnowledgeDocumentsParams {
|
||||
offset?: number
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
}
|
||||
|
||||
export interface KnowledgeDocumentsResponse {
|
||||
@@ -93,6 +101,7 @@ export async function fetchKnowledgeDocuments({
|
||||
offset = 0,
|
||||
sortBy,
|
||||
sortOrder,
|
||||
enabledFilter,
|
||||
}: KnowledgeDocumentsParams): Promise<KnowledgeDocumentsResponse> {
|
||||
const params = new URLSearchParams()
|
||||
if (search) params.set('search', search)
|
||||
@@ -100,6 +109,7 @@ export async function fetchKnowledgeDocuments({
|
||||
if (sortOrder) params.set('sortOrder', sortOrder)
|
||||
params.set('limit', limit.toString())
|
||||
params.set('offset', offset.toString())
|
||||
if (enabledFilter) params.set('enabledFilter', enabledFilter)
|
||||
|
||||
const url = `/api/knowledge/${knowledgeBaseId}/documents${params.toString() ? `?${params.toString()}` : ''}`
|
||||
const response = await fetch(url)
|
||||
@@ -212,6 +222,7 @@ export function useDocumentQuery(knowledgeBaseId?: string, documentId?: string)
|
||||
queryFn: () => fetchDocument(knowledgeBaseId as string, documentId as string),
|
||||
enabled: Boolean(knowledgeBaseId && documentId),
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -222,13 +233,17 @@ export const serializeDocumentParams = (params: KnowledgeDocumentsParams) =>
|
||||
offset: params.offset ?? 0,
|
||||
sortBy: params.sortBy ?? '',
|
||||
sortOrder: params.sortOrder ?? '',
|
||||
enabledFilter: params.enabledFilter ?? 'all',
|
||||
})
|
||||
|
||||
export function useKnowledgeDocumentsQuery(
|
||||
params: KnowledgeDocumentsParams,
|
||||
options?: {
|
||||
enabled?: boolean
|
||||
refetchInterval?: number | false
|
||||
refetchInterval?:
|
||||
| number
|
||||
| false
|
||||
| ((query: { state: { data?: KnowledgeDocumentsResponse } }) => number | false)
|
||||
}
|
||||
) {
|
||||
const paramsKey = serializeDocumentParams(params)
|
||||
@@ -572,7 +587,9 @@ export function useDeleteDocument() {
|
||||
export interface BulkDocumentOperationParams {
|
||||
knowledgeBaseId: string
|
||||
operation: 'enable' | 'disable' | 'delete'
|
||||
documentIds: string[]
|
||||
documentIds?: string[]
|
||||
selectAll?: boolean
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
}
|
||||
|
||||
export interface BulkDocumentOperationResult {
|
||||
@@ -585,11 +602,21 @@ export async function bulkDocumentOperation({
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
documentIds,
|
||||
selectAll,
|
||||
enabledFilter,
|
||||
}: BulkDocumentOperationParams): Promise<BulkDocumentOperationResult> {
|
||||
const body: Record<string, unknown> = { operation }
|
||||
if (selectAll) {
|
||||
body.selectAll = true
|
||||
if (enabledFilter) body.enabledFilter = enabledFilter
|
||||
} else {
|
||||
body.documentIds = documentIds
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ operation, documentIds }),
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
@@ -858,6 +885,31 @@ export interface TagDefinitionData {
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export async function fetchTagDefinitions(knowledgeBaseId: string): Promise<TagDefinitionData[]> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch tag definitions: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to fetch tag definitions')
|
||||
}
|
||||
|
||||
return Array.isArray(result.data) ? result.data : []
|
||||
}
|
||||
|
||||
export function useTagDefinitionsQuery(knowledgeBaseId?: string | null) {
|
||||
return useQuery({
|
||||
queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId ?? ''),
|
||||
queryFn: () => fetchTagDefinitions(knowledgeBaseId as string),
|
||||
enabled: Boolean(knowledgeBaseId),
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
export interface CreateTagDefinitionParams {
|
||||
knowledgeBaseId: string
|
||||
displayName: string
|
||||
@@ -914,7 +966,7 @@ export function useCreateTagDefinition() {
|
||||
mutationFn: createTagDefinition,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -952,8 +1004,152 @@ export function useDeleteTagDefinition() {
|
||||
mutationFn: deleteTagDefinition,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface DocumentTagDefinitionData {
|
||||
id: string
|
||||
tagSlot: string
|
||||
displayName: string
|
||||
fieldType: string
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export async function fetchDocumentTagDefinitions(
|
||||
knowledgeBaseId: string,
|
||||
documentId: string
|
||||
): Promise<DocumentTagDefinitionData[]> {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`Failed to fetch document tag definitions: ${response.status} ${response.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to fetch document tag definitions')
|
||||
}
|
||||
|
||||
return Array.isArray(result.data) ? result.data : []
|
||||
}
|
||||
|
||||
export function useDocumentTagDefinitionsQuery(
|
||||
knowledgeBaseId?: string | null,
|
||||
documentId?: string | null
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId ?? '', documentId ?? ''),
|
||||
queryFn: () => fetchDocumentTagDefinitions(knowledgeBaseId as string, documentId as string),
|
||||
enabled: Boolean(knowledgeBaseId && documentId),
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
export interface DocumentTagDefinitionInput {
|
||||
tagSlot: string
|
||||
displayName: string
|
||||
fieldType: string
|
||||
}
|
||||
|
||||
export interface SaveDocumentTagDefinitionsParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
definitions: DocumentTagDefinitionInput[]
|
||||
}
|
||||
|
||||
export async function saveDocumentTagDefinitions({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
definitions,
|
||||
}: SaveDocumentTagDefinitionsParams): Promise<DocumentTagDefinitionData[]> {
|
||||
const validDefinitions = (definitions || []).filter(
|
||||
(def) => def?.tagSlot && def.displayName && def.displayName.trim()
|
||||
)
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ definitions: validDefinitions }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to save document tag definitions')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to save document tag definitions')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useSaveDocumentTagDefinitions() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: saveDocumentTagDefinitions,
|
||||
onSuccess: (_, { knowledgeBaseId, documentId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId, documentId),
|
||||
})
|
||||
},
|
||||
onError: (error) => {
|
||||
logger.error('Failed to save document tag definitions:', error)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface DeleteDocumentTagDefinitionsParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
}
|
||||
|
||||
export async function deleteDocumentTagDefinitions({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
}: DeleteDocumentTagDefinitionsParams): Promise<void> {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to delete document tag definitions')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to delete document tag definitions')
|
||||
}
|
||||
}
|
||||
|
||||
export function useDeleteDocumentTagDefinitions() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: deleteDocumentTagDefinitions,
|
||||
onSuccess: (_, { knowledgeBaseId, documentId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId, documentId),
|
||||
})
|
||||
},
|
||||
onError: (error) => {
|
||||
logger.error('Failed to delete document tag definitions:', error)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -34,17 +34,3 @@ import './workflow/set-global-workflow-variables'
|
||||
|
||||
// User tools
|
||||
import './user/set-environment-variables'
|
||||
|
||||
// Re-export UI config utilities for convenience
|
||||
export {
|
||||
getSubagentLabels,
|
||||
getToolUIConfig,
|
||||
hasInterrupt,
|
||||
type InterruptConfig,
|
||||
isSpecialTool,
|
||||
isSubagentTool,
|
||||
type ParamsTableConfig,
|
||||
type SecondaryActionConfig,
|
||||
type SubagentConfig,
|
||||
type ToolUIConfig,
|
||||
} from './ui-config'
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import {
|
||||
type KnowledgeBaseArgs,
|
||||
KnowledgeBaseArgsSchema,
|
||||
type KnowledgeBaseResult,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import type { KnowledgeBaseArgs, KnowledgeBaseResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { generateSearchEmbedding } from '@/lib/knowledge/embeddings'
|
||||
import {
|
||||
createKnowledgeBase,
|
||||
@@ -15,11 +11,6 @@ import { getQueryStrategy, handleVectorOnlySearch } from '@/app/api/knowledge/se
|
||||
|
||||
const logger = createLogger('KnowledgeBaseServerTool')
|
||||
|
||||
// Re-export for backwards compatibility
|
||||
export const KnowledgeBaseInput = KnowledgeBaseArgsSchema
|
||||
export type KnowledgeBaseInputType = KnowledgeBaseArgs
|
||||
export type KnowledgeBaseResultType = KnowledgeBaseResult
|
||||
|
||||
/**
|
||||
* Knowledge base tool for copilot to create, list, and get knowledge bases
|
||||
*/
|
||||
@@ -163,7 +154,6 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
// Verify knowledge base exists
|
||||
const kb = await getKnowledgeBaseById(args.knowledgeBaseId)
|
||||
if (!kb) {
|
||||
return {
|
||||
@@ -181,10 +171,8 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
)
|
||||
const queryVector = JSON.stringify(queryEmbedding)
|
||||
|
||||
// Get search strategy
|
||||
const strategy = getQueryStrategy(1, topK)
|
||||
|
||||
// Perform vector search
|
||||
const results = await handleVectorOnlySearch({
|
||||
knowledgeBaseIds: [args.knowledgeBaseId],
|
||||
topK,
|
||||
|
||||
@@ -6,10 +6,7 @@ import { getBlocksAndToolsServerTool } from '@/lib/copilot/tools/server/blocks/g
|
||||
import { getBlocksMetadataServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-metadata-tool'
|
||||
import { getTriggerBlocksServerTool } from '@/lib/copilot/tools/server/blocks/get-trigger-blocks'
|
||||
import { searchDocumentationServerTool } from '@/lib/copilot/tools/server/docs/search-documentation'
|
||||
import {
|
||||
KnowledgeBaseInput,
|
||||
knowledgeBaseServerTool,
|
||||
} from '@/lib/copilot/tools/server/knowledge/knowledge-base'
|
||||
import { knowledgeBaseServerTool } from '@/lib/copilot/tools/server/knowledge/knowledge-base'
|
||||
import { makeApiRequestServerTool } from '@/lib/copilot/tools/server/other/make-api-request'
|
||||
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||
@@ -28,6 +25,7 @@ import {
|
||||
GetBlocksMetadataResult,
|
||||
GetTriggerBlocksInput,
|
||||
GetTriggerBlocksResult,
|
||||
KnowledgeBaseArgsSchema,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
|
||||
// Generic execute response schemas (success path only for this route; errors handled via HTTP status)
|
||||
@@ -90,7 +88,7 @@ export async function routeExecution(
|
||||
args = GetTriggerBlocksInput.parse(args)
|
||||
}
|
||||
if (toolName === 'knowledge_base') {
|
||||
args = KnowledgeBaseInput.parse(args)
|
||||
args = KnowledgeBaseArgsSchema.parse(args)
|
||||
}
|
||||
|
||||
const result = await tool.execute(args, context)
|
||||
|
||||
@@ -2468,16 +2468,17 @@ async function validateWorkflowSelectorIds(
|
||||
const result = await validateSelectorIds(selector.selectorType, selector.value, context)
|
||||
|
||||
if (result.invalid.length > 0) {
|
||||
// Include warning info (like available credentials) in the error message for better LLM feedback
|
||||
const warningInfo = result.warning ? `. ${result.warning}` : ''
|
||||
errors.push({
|
||||
blockId: selector.blockId,
|
||||
blockType: selector.blockType,
|
||||
field: selector.fieldName,
|
||||
value: selector.value,
|
||||
error: `Invalid ${selector.selectorType} ID(s): ${result.invalid.join(', ')} - ID(s) do not exist`,
|
||||
error: `Invalid ${selector.selectorType} ID(s): ${result.invalid.join(', ')} - ID(s) do not exist or user doesn't have access${warningInfo}`,
|
||||
})
|
||||
}
|
||||
|
||||
if (result.warning) {
|
||||
} else if (result.warning) {
|
||||
// Log warnings that don't have errors (shouldn't happen for credentials but may for other selectors)
|
||||
logger.warn(result.warning, {
|
||||
blockId: selector.blockId,
|
||||
fieldName: selector.fieldName,
|
||||
|
||||
@@ -39,6 +39,31 @@ export async function validateSelectorIds(
|
||||
.from(account)
|
||||
.where(and(inArray(account.id, idsArray), eq(account.userId, context.userId)))
|
||||
existingIds = results.map((r) => r.id)
|
||||
|
||||
// If any IDs are invalid, fetch user's available credentials to include in error message
|
||||
const existingSet = new Set(existingIds)
|
||||
const invalidIds = idsArray.filter((id) => !existingSet.has(id))
|
||||
if (invalidIds.length > 0) {
|
||||
// Fetch all of the user's credentials to provide helpful feedback
|
||||
const allUserCredentials = await db
|
||||
.select({ id: account.id, providerId: account.providerId })
|
||||
.from(account)
|
||||
.where(eq(account.userId, context.userId))
|
||||
|
||||
const availableCredentials = allUserCredentials
|
||||
.map((c) => `${c.id} (${c.providerId})`)
|
||||
.join(', ')
|
||||
const noCredentialsMessage = 'User has no credentials configured.'
|
||||
|
||||
return {
|
||||
valid: existingIds,
|
||||
invalid: invalidIds,
|
||||
warning:
|
||||
allUserCredentials.length > 0
|
||||
? `Available credentials for this user: ${availableCredentials}`
|
||||
: noCredentialsMessage,
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
|
||||
@@ -82,10 +82,26 @@ export function formatDateTime(date: Date, timezone?: string): string {
|
||||
* @returns A formatted date string in the format "MMM D, YYYY"
|
||||
*/
|
||||
export function formatDate(date: Date): string {
|
||||
return date.toLocaleString('en-US', {
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
* @param dateString - ISO date string to format
|
||||
* @returns A formatted date string (e.g., "Jan 22, 2026, 01:30 PM")
|
||||
*/
|
||||
export function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
@@ -139,20 +155,24 @@ export function formatCompactTimestamp(iso: string): string {
|
||||
/**
|
||||
* Format a duration in milliseconds to a human-readable format
|
||||
* @param durationMs - The duration in milliseconds
|
||||
* @param options - Optional formatting options
|
||||
* @param options.precision - Number of decimal places for seconds (default: 0)
|
||||
* @returns A formatted duration string
|
||||
*/
|
||||
export function formatDuration(durationMs: number): string {
|
||||
export function formatDuration(durationMs: number, options?: { precision?: number }): string {
|
||||
const precision = options?.precision ?? 0
|
||||
|
||||
if (durationMs < 1000) {
|
||||
return `${durationMs}ms`
|
||||
}
|
||||
|
||||
const seconds = Math.floor(durationMs / 1000)
|
||||
const seconds = durationMs / 1000
|
||||
if (seconds < 60) {
|
||||
return `${seconds}s`
|
||||
return precision > 0 ? `${seconds.toFixed(precision)}s` : `${Math.floor(seconds)}s`
|
||||
}
|
||||
|
||||
const minutes = Math.floor(seconds / 60)
|
||||
const remainingSeconds = seconds % 60
|
||||
const remainingSeconds = Math.floor(seconds % 60)
|
||||
if (minutes < 60) {
|
||||
return `${minutes}m ${remainingSeconds}s`
|
||||
}
|
||||
@@ -161,3 +181,40 @@ export function formatDuration(durationMs: number): string {
|
||||
const remainingMinutes = minutes % 60
|
||||
return `${hours}h ${remainingMinutes}m`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
* @param dateString - ISO date string to format
|
||||
* @returns A human-readable relative time string
|
||||
*/
|
||||
export function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
@@ -130,7 +130,11 @@ async function executeCode(request) {
|
||||
await jail.set('environmentVariables', new ivm.ExternalCopy(envVars).copyInto())
|
||||
|
||||
for (const [key, value] of Object.entries(contextVariables)) {
|
||||
await jail.set(key, new ivm.ExternalCopy(value).copyInto())
|
||||
if (value === undefined) {
|
||||
await jail.set(key, undefined)
|
||||
} else {
|
||||
await jail.set(key, new ivm.ExternalCopy(value).copyInto())
|
||||
}
|
||||
}
|
||||
|
||||
const fetchCallback = new ivm.Reference(async (url, optionsJson) => {
|
||||
|
||||
@@ -127,7 +127,6 @@ export async function processDocumentTags(
|
||||
tagData: DocumentTagData[],
|
||||
requestId: string
|
||||
): Promise<ProcessedDocumentTags> {
|
||||
// Helper to set a tag value with proper typing
|
||||
const setTagValue = (
|
||||
tags: ProcessedDocumentTags,
|
||||
slot: string,
|
||||
@@ -672,21 +671,16 @@ export async function createDocumentRecords(
|
||||
tag7?: string
|
||||
}>,
|
||||
knowledgeBaseId: string,
|
||||
requestId: string,
|
||||
userId?: string
|
||||
requestId: string
|
||||
): Promise<DocumentData[]> {
|
||||
if (userId) {
|
||||
const totalSize = documents.reduce((sum, doc) => sum + doc.fileSize, 0)
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kb.length === 0) {
|
||||
throw new Error('Knowledge base not found')
|
||||
}
|
||||
if (kb.length === 0) {
|
||||
throw new Error('Knowledge base not found')
|
||||
}
|
||||
|
||||
return await db.transaction(async (tx) => {
|
||||
@@ -770,16 +764,6 @@ export async function createDocumentRecords(
|
||||
.update(knowledgeBase)
|
||||
.set({ updatedAt: now })
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
|
||||
if (userId) {
|
||||
const totalSize = documents.reduce((sum, doc) => sum + doc.fileSize, 0)
|
||||
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
}
|
||||
}
|
||||
|
||||
return returnData
|
||||
@@ -792,7 +776,7 @@ export async function createDocumentRecords(
|
||||
export async function getDocuments(
|
||||
knowledgeBaseId: string,
|
||||
options: {
|
||||
includeDisabled?: boolean
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
search?: string
|
||||
limit?: number
|
||||
offset?: number
|
||||
@@ -846,7 +830,7 @@ export async function getDocuments(
|
||||
}
|
||||
}> {
|
||||
const {
|
||||
includeDisabled = false,
|
||||
enabledFilter = 'all',
|
||||
search,
|
||||
limit = 50,
|
||||
offset = 0,
|
||||
@@ -854,26 +838,21 @@ export async function getDocuments(
|
||||
sortOrder = 'asc',
|
||||
} = options
|
||||
|
||||
// Build where conditions
|
||||
const whereConditions = [
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(document.deletedAt),
|
||||
]
|
||||
|
||||
// Filter out disabled documents unless specifically requested
|
||||
if (!includeDisabled) {
|
||||
if (enabledFilter === 'enabled') {
|
||||
whereConditions.push(eq(document.enabled, true))
|
||||
} else if (enabledFilter === 'disabled') {
|
||||
whereConditions.push(eq(document.enabled, false))
|
||||
}
|
||||
|
||||
// Add search condition if provided
|
||||
if (search) {
|
||||
whereConditions.push(
|
||||
// Search in filename
|
||||
sql`LOWER(${document.filename}) LIKE LOWER(${`%${search}%`})`
|
||||
)
|
||||
whereConditions.push(sql`LOWER(${document.filename}) LIKE LOWER(${`%${search}%`})`)
|
||||
}
|
||||
|
||||
// Get total count for pagination
|
||||
const totalResult = await db
|
||||
.select({ count: sql<number>`COUNT(*)` })
|
||||
.from(document)
|
||||
@@ -882,7 +861,6 @@ export async function getDocuments(
|
||||
const total = totalResult[0]?.count || 0
|
||||
const hasMore = offset + limit < total
|
||||
|
||||
// Create dynamic order by clause
|
||||
const getOrderByColumn = () => {
|
||||
switch (sortBy) {
|
||||
case 'filename':
|
||||
@@ -897,12 +875,13 @@ export async function getDocuments(
|
||||
return document.uploadedAt
|
||||
case 'processingStatus':
|
||||
return document.processingStatus
|
||||
case 'enabled':
|
||||
return document.enabled
|
||||
default:
|
||||
return document.uploadedAt
|
||||
}
|
||||
}
|
||||
|
||||
// Use stable secondary sort to prevent shifting when primary values are identical
|
||||
const primaryOrderBy = sortOrder === 'asc' ? asc(getOrderByColumn()) : desc(getOrderByColumn())
|
||||
const secondaryOrderBy =
|
||||
sortBy === 'filename' ? desc(document.uploadedAt) : asc(document.filename)
|
||||
@@ -1021,8 +1000,7 @@ export async function createSingleDocument(
|
||||
tag7?: string
|
||||
},
|
||||
knowledgeBaseId: string,
|
||||
requestId: string,
|
||||
userId?: string
|
||||
requestId: string
|
||||
): Promise<{
|
||||
id: string
|
||||
knowledgeBaseId: string
|
||||
@@ -1043,24 +1021,19 @@ export async function createSingleDocument(
|
||||
tag6: string | null
|
||||
tag7: string | null
|
||||
}> {
|
||||
// Check storage limits before creating document
|
||||
if (userId) {
|
||||
// Get knowledge base owner
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kb.length === 0) {
|
||||
throw new Error('Knowledge base not found')
|
||||
}
|
||||
if (kb.length === 0) {
|
||||
throw new Error('Knowledge base not found')
|
||||
}
|
||||
|
||||
const documentId = randomUUID()
|
||||
const now = new Date()
|
||||
|
||||
// Process structured tag data if provided
|
||||
let processedTags: ProcessedDocumentTags = {
|
||||
// Text tags (7 slots)
|
||||
tag1: documentData.tag1 ?? null,
|
||||
@@ -1089,11 +1062,9 @@ export async function createSingleDocument(
|
||||
try {
|
||||
const tagData = JSON.parse(documentData.documentTagsData)
|
||||
if (Array.isArray(tagData)) {
|
||||
// Process structured tag data and create tag definitions
|
||||
processedTags = await processDocumentTags(knowledgeBaseId, tagData, requestId)
|
||||
}
|
||||
} catch (error) {
|
||||
// Re-throw validation errors, only catch JSON parse errors
|
||||
if (error instanceof SyntaxError) {
|
||||
logger.warn(`[${requestId}] Failed to parse documentTagsData:`, error)
|
||||
} else {
|
||||
@@ -1126,15 +1097,6 @@ export async function createSingleDocument(
|
||||
|
||||
logger.info(`[${requestId}] Document created: ${documentId} in knowledge base ${knowledgeBaseId}`)
|
||||
|
||||
if (userId) {
|
||||
// Get knowledge base owner
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
}
|
||||
|
||||
return newDocument as {
|
||||
id: string
|
||||
knowledgeBaseId: string
|
||||
@@ -1164,8 +1126,7 @@ export async function bulkDocumentOperation(
|
||||
knowledgeBaseId: string,
|
||||
operation: 'enable' | 'disable' | 'delete',
|
||||
documentIds: string[],
|
||||
requestId: string,
|
||||
userId?: string
|
||||
requestId: string
|
||||
): Promise<{
|
||||
success: boolean
|
||||
successCount: number
|
||||
@@ -1180,7 +1141,6 @@ export async function bulkDocumentOperation(
|
||||
`[${requestId}] Starting bulk ${operation} operation on ${documentIds.length} documents in knowledge base ${knowledgeBaseId}`
|
||||
)
|
||||
|
||||
// Verify all documents belong to this knowledge base
|
||||
const documentsToUpdate = await db
|
||||
.select({
|
||||
id: document.id,
|
||||
@@ -1213,24 +1173,6 @@ export async function bulkDocumentOperation(
|
||||
}>
|
||||
|
||||
if (operation === 'delete') {
|
||||
// Get file sizes before deletion for storage tracking
|
||||
let totalSize = 0
|
||||
if (userId) {
|
||||
const documentsToDelete = await db
|
||||
.select({ fileSize: document.fileSize })
|
||||
.from(document)
|
||||
.where(
|
||||
and(
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
inArray(document.id, documentIds),
|
||||
isNull(document.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
totalSize = documentsToDelete.reduce((sum, doc) => sum + doc.fileSize, 0)
|
||||
}
|
||||
|
||||
// Handle bulk soft delete
|
||||
updateResult = await db
|
||||
.update(document)
|
||||
.set({
|
||||
@@ -1245,7 +1187,6 @@ export async function bulkDocumentOperation(
|
||||
)
|
||||
.returning({ id: document.id, deletedAt: document.deletedAt })
|
||||
} else {
|
||||
// Handle bulk enable/disable
|
||||
const enabled = operation === 'enable'
|
||||
|
||||
updateResult = await db
|
||||
@@ -1276,6 +1217,77 @@ export async function bulkDocumentOperation(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform bulk operations on all documents matching a filter
|
||||
*/
|
||||
export async function bulkDocumentOperationByFilter(
|
||||
knowledgeBaseId: string,
|
||||
operation: 'enable' | 'disable' | 'delete',
|
||||
enabledFilter: 'all' | 'enabled' | 'disabled' | undefined,
|
||||
requestId: string
|
||||
): Promise<{
|
||||
success: boolean
|
||||
successCount: number
|
||||
updatedDocuments: Array<{
|
||||
id: string
|
||||
enabled?: boolean
|
||||
deletedAt?: Date | null
|
||||
}>
|
||||
}> {
|
||||
logger.info(
|
||||
`[${requestId}] Starting bulk ${operation} operation on all documents (filter: ${enabledFilter || 'all'}) in knowledge base ${knowledgeBaseId}`
|
||||
)
|
||||
|
||||
const whereConditions = [
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(document.deletedAt),
|
||||
]
|
||||
|
||||
if (enabledFilter === 'enabled') {
|
||||
whereConditions.push(eq(document.enabled, true))
|
||||
} else if (enabledFilter === 'disabled') {
|
||||
whereConditions.push(eq(document.enabled, false))
|
||||
}
|
||||
|
||||
let updateResult: Array<{
|
||||
id: string
|
||||
enabled?: boolean
|
||||
deletedAt?: Date | null
|
||||
}>
|
||||
|
||||
if (operation === 'delete') {
|
||||
updateResult = await db
|
||||
.update(document)
|
||||
.set({
|
||||
deletedAt: new Date(),
|
||||
})
|
||||
.where(and(...whereConditions))
|
||||
.returning({ id: document.id, deletedAt: document.deletedAt })
|
||||
} else {
|
||||
const enabled = operation === 'enable'
|
||||
|
||||
updateResult = await db
|
||||
.update(document)
|
||||
.set({
|
||||
enabled,
|
||||
})
|
||||
.where(and(...whereConditions))
|
||||
.returning({ id: document.id, enabled: document.enabled })
|
||||
}
|
||||
|
||||
const successCount = updateResult.length
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Bulk ${operation} by filter completed: ${successCount} documents updated in knowledge base ${knowledgeBaseId}`
|
||||
)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
successCount,
|
||||
updatedDocuments: updateResult,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a document as failed due to timeout
|
||||
*/
|
||||
@@ -1325,7 +1337,6 @@ export async function retryDocumentProcessing(
|
||||
},
|
||||
requestId: string
|
||||
): Promise<{ success: boolean; status: string; message: string }> {
|
||||
// Fetch KB's chunkingConfig for retry processing
|
||||
const kb = await db
|
||||
.select({
|
||||
chunkingConfig: knowledgeBase.chunkingConfig,
|
||||
@@ -1336,7 +1347,6 @@ export async function retryDocumentProcessing(
|
||||
|
||||
const kbConfig = kb[0].chunkingConfig as { maxSize: number; minSize: number; overlap: number }
|
||||
|
||||
// Clear existing embeddings and reset document state
|
||||
await db.transaction(async (tx) => {
|
||||
await tx.delete(embedding).where(eq(embedding.documentId, documentId))
|
||||
|
||||
@@ -1362,7 +1372,6 @@ export async function retryDocumentProcessing(
|
||||
chunkOverlap: kbConfig.overlap,
|
||||
}
|
||||
|
||||
// Start processing in the background
|
||||
processDocumentAsync(knowledgeBaseId, documentId, docData, processingOptions).catch(
|
||||
(error: unknown) => {
|
||||
logger.error(`[${requestId}] Background retry processing error:`, error)
|
||||
@@ -1511,7 +1520,6 @@ export async function updateDocument(
|
||||
if (updateData.processingError !== undefined)
|
||||
dbUpdateData.processingError = updateData.processingError
|
||||
|
||||
// Helper to convert string values to proper types for the database
|
||||
const convertTagValue = (
|
||||
slot: string,
|
||||
value: string | undefined
|
||||
|
||||
@@ -6,6 +6,7 @@ export type DocumentSortField =
|
||||
| 'chunkCount'
|
||||
| 'uploadedAt'
|
||||
| 'processingStatus'
|
||||
| 'enabled'
|
||||
export type SortOrder = 'asc' | 'desc'
|
||||
|
||||
export interface DocumentSortOptions {
|
||||
|
||||
@@ -2,12 +2,9 @@
|
||||
* Autolayout Constants
|
||||
*
|
||||
* Layout algorithm specific constants for spacing, padding, and overlap detection.
|
||||
* Block dimensions are imported from the shared source: @/lib/workflows/blocks/block-dimensions
|
||||
* Block dimensions are in @/lib/workflows/blocks/block-dimensions
|
||||
*/
|
||||
|
||||
// Re-export block dimensions for autolayout consumers
|
||||
export { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
|
||||
/**
|
||||
* Horizontal spacing between layers (columns)
|
||||
*/
|
||||
|
||||
@@ -11,21 +11,6 @@ import type { BlockMetrics, BoundingBox, Edge, GraphNode } from '@/lib/workflows
|
||||
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
// Re-export layout constants for backwards compatibility
|
||||
export {
|
||||
CONTAINER_PADDING,
|
||||
CONTAINER_PADDING_X,
|
||||
CONTAINER_PADDING_Y,
|
||||
ROOT_PADDING_X,
|
||||
ROOT_PADDING_Y,
|
||||
}
|
||||
|
||||
// Re-export block dimensions for backwards compatibility
|
||||
export const DEFAULT_BLOCK_WIDTH = BLOCK_DIMENSIONS.FIXED_WIDTH
|
||||
export const DEFAULT_BLOCK_HEIGHT = BLOCK_DIMENSIONS.MIN_HEIGHT
|
||||
export const DEFAULT_CONTAINER_WIDTH = CONTAINER_DIMENSIONS.DEFAULT_WIDTH
|
||||
export const DEFAULT_CONTAINER_HEIGHT = CONTAINER_DIMENSIONS.DEFAULT_HEIGHT
|
||||
|
||||
/**
|
||||
* Resolves a potentially undefined numeric value to a fallback
|
||||
*/
|
||||
|
||||
@@ -771,12 +771,50 @@ function deepClone<T>(obj: T): T {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively masks credential IDs in any value (string, object, or array).
|
||||
* Used during serialization to ensure sensitive IDs are never persisted.
|
||||
*/
|
||||
function maskCredentialIdsInValue(value: any, credentialIds: Set<string>): any {
|
||||
if (!value || credentialIds.size === 0) return value
|
||||
|
||||
if (typeof value === 'string') {
|
||||
let masked = value
|
||||
// Sort by length descending to mask longer IDs first
|
||||
const sortedIds = Array.from(credentialIds).sort((a, b) => b.length - a.length)
|
||||
for (const id of sortedIds) {
|
||||
if (id && masked.includes(id)) {
|
||||
masked = masked.split(id).join('••••••••')
|
||||
}
|
||||
}
|
||||
return masked
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return value.map((item) => maskCredentialIdsInValue(item, credentialIds))
|
||||
}
|
||||
|
||||
if (typeof value === 'object') {
|
||||
const masked: any = {}
|
||||
for (const key of Object.keys(value)) {
|
||||
masked[key] = maskCredentialIdsInValue(value[key], credentialIds)
|
||||
}
|
||||
return masked
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializes messages for database storage.
|
||||
* Deep clones all fields to ensure proper JSON serialization.
|
||||
* Masks sensitive credential IDs before persisting.
|
||||
* This ensures they render identically when loaded back.
|
||||
*/
|
||||
function serializeMessagesForDB(messages: CopilotMessage[]): any[] {
|
||||
// Get credential IDs to mask
|
||||
const credentialIds = useCopilotStore.getState().sensitiveCredentialIds
|
||||
|
||||
const result = messages
|
||||
.map((msg) => {
|
||||
// Deep clone the entire message to ensure all nested data is serializable
|
||||
@@ -824,7 +862,8 @@ function serializeMessagesForDB(messages: CopilotMessage[]): any[] {
|
||||
serialized.errorType = msg.errorType
|
||||
}
|
||||
|
||||
return serialized
|
||||
// Mask credential IDs in the serialized message before persisting
|
||||
return maskCredentialIdsInValue(serialized, credentialIds)
|
||||
})
|
||||
.filter((msg) => {
|
||||
// Filter out empty assistant messages
|
||||
@@ -1320,7 +1359,16 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
typeof def.hasInterrupt === 'function'
|
||||
? !!def.hasInterrupt(args || {})
|
||||
: !!def.hasInterrupt
|
||||
if (!hasInterrupt && typeof def.execute === 'function') {
|
||||
// Check if tool is auto-allowed - if so, execute even if it has an interrupt
|
||||
const { autoAllowedTools } = get()
|
||||
const isAutoAllowed = name ? autoAllowedTools.includes(name) : false
|
||||
if ((!hasInterrupt || isAutoAllowed) && typeof def.execute === 'function') {
|
||||
if (isAutoAllowed && hasInterrupt) {
|
||||
logger.info('[toolCallsById] Auto-executing tool with interrupt (auto-allowed)', {
|
||||
id,
|
||||
name,
|
||||
})
|
||||
}
|
||||
const ctx = createExecutionContext({ toolCallId: id, toolName: name || 'unknown_tool' })
|
||||
// Defer executing transition by a tick to let pending render
|
||||
setTimeout(() => {
|
||||
@@ -1426,11 +1474,23 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
logger.warn('tool_call registry auto-exec check failed', { id, name, error: e })
|
||||
}
|
||||
|
||||
// Class-based auto-exec for non-interrupt tools
|
||||
// Class-based auto-exec for non-interrupt tools or auto-allowed tools
|
||||
try {
|
||||
const inst = getClientTool(id) as any
|
||||
const hasInterrupt = !!inst?.getInterruptDisplays?.()
|
||||
if (!hasInterrupt && typeof inst?.execute === 'function') {
|
||||
// Check if tool is auto-allowed - if so, execute even if it has an interrupt
|
||||
const { autoAllowedTools: classAutoAllowed } = get()
|
||||
const isClassAutoAllowed = name ? classAutoAllowed.includes(name) : false
|
||||
if (
|
||||
(!hasInterrupt || isClassAutoAllowed) &&
|
||||
(typeof inst?.execute === 'function' || typeof inst?.handleAccept === 'function')
|
||||
) {
|
||||
if (isClassAutoAllowed && hasInterrupt) {
|
||||
logger.info('[toolCallsById] Auto-executing class tool with interrupt (auto-allowed)', {
|
||||
id,
|
||||
name,
|
||||
})
|
||||
}
|
||||
setTimeout(() => {
|
||||
// Guard against duplicate execution - check if already executing or terminal
|
||||
const currentState = get().toolCallsById[id]?.state
|
||||
@@ -1449,7 +1509,12 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
|
||||
Promise.resolve()
|
||||
.then(async () => {
|
||||
await inst.execute(args || {})
|
||||
// Use handleAccept for tools with interrupts, execute for others
|
||||
if (hasInterrupt && typeof inst?.handleAccept === 'function') {
|
||||
await inst.handleAccept(args || {})
|
||||
} else {
|
||||
await inst.execute(args || {})
|
||||
}
|
||||
// Success/error will be synced via registerToolStateSync
|
||||
})
|
||||
.catch(() => {
|
||||
@@ -1474,20 +1539,35 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Integration tools: Stay in pending state until user confirms via buttons
|
||||
// Integration tools: Check auto-allowed or stay in pending state until user confirms
|
||||
// This handles tools like google_calendar_*, exa_*, gmail_read, etc. that aren't in the client registry
|
||||
// Only relevant if mode is 'build' (agent)
|
||||
const { mode, workflowId } = get()
|
||||
const { mode, workflowId, autoAllowedTools, executeIntegrationTool } = get()
|
||||
if (mode === 'build' && workflowId) {
|
||||
// Check if tool was NOT found in client registry
|
||||
const def = name ? getTool(name) : undefined
|
||||
const inst = getClientTool(id) as any
|
||||
if (!def && !inst && name) {
|
||||
// Integration tools stay in pending state until user confirms
|
||||
logger.info('[build mode] Integration tool awaiting user confirmation', {
|
||||
id,
|
||||
name,
|
||||
})
|
||||
// Check if this integration tool is auto-allowed - if so, execute it immediately
|
||||
if (autoAllowedTools.includes(name)) {
|
||||
logger.info('[build mode] Auto-executing integration tool (auto-allowed)', { id, name })
|
||||
// Defer to allow pending state to render briefly
|
||||
setTimeout(() => {
|
||||
executeIntegrationTool(id).catch((err) => {
|
||||
logger.error('[build mode] Auto-execute integration tool failed', {
|
||||
id,
|
||||
name,
|
||||
error: err,
|
||||
})
|
||||
})
|
||||
}, 0)
|
||||
} else {
|
||||
// Integration tools stay in pending state until user confirms
|
||||
logger.info('[build mode] Integration tool awaiting user confirmation', {
|
||||
id,
|
||||
name,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -1976,6 +2056,10 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
}
|
||||
|
||||
// Execute client tools in parallel (non-blocking) - same pattern as main tool_call handler
|
||||
// Check if tool is auto-allowed
|
||||
const { autoAllowedTools: subAgentAutoAllowed } = get()
|
||||
const isSubAgentAutoAllowed = name ? subAgentAutoAllowed.includes(name) : false
|
||||
|
||||
try {
|
||||
const def = getTool(name)
|
||||
if (def) {
|
||||
@@ -1983,8 +2067,15 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
typeof def.hasInterrupt === 'function'
|
||||
? !!def.hasInterrupt(args || {})
|
||||
: !!def.hasInterrupt
|
||||
if (!hasInterrupt) {
|
||||
// Auto-execute tools without interrupts - non-blocking
|
||||
// Auto-execute if no interrupt OR if auto-allowed
|
||||
if (!hasInterrupt || isSubAgentAutoAllowed) {
|
||||
if (isSubAgentAutoAllowed && hasInterrupt) {
|
||||
logger.info('[SubAgent] Auto-executing tool with interrupt (auto-allowed)', {
|
||||
id,
|
||||
name,
|
||||
})
|
||||
}
|
||||
// Auto-execute tools - non-blocking
|
||||
const ctx = createExecutionContext({ toolCallId: id, toolName: name })
|
||||
Promise.resolve()
|
||||
.then(() => def.execute(ctx, args || {}))
|
||||
@@ -2001,9 +2092,22 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
const instance = getClientTool(id)
|
||||
if (instance) {
|
||||
const hasInterruptDisplays = !!instance.getInterruptDisplays?.()
|
||||
if (!hasInterruptDisplays) {
|
||||
// Auto-execute if no interrupt OR if auto-allowed
|
||||
if (!hasInterruptDisplays || isSubAgentAutoAllowed) {
|
||||
if (isSubAgentAutoAllowed && hasInterruptDisplays) {
|
||||
logger.info('[SubAgent] Auto-executing class tool with interrupt (auto-allowed)', {
|
||||
id,
|
||||
name,
|
||||
})
|
||||
}
|
||||
Promise.resolve()
|
||||
.then(() => instance.execute(args || {}))
|
||||
.then(() => {
|
||||
// Use handleAccept for tools with interrupts, execute for others
|
||||
if (hasInterruptDisplays && typeof instance.handleAccept === 'function') {
|
||||
return instance.handleAccept(args || {})
|
||||
}
|
||||
return instance.execute(args || {})
|
||||
})
|
||||
.catch((execErr: any) => {
|
||||
logger.error('[SubAgent] Class tool execution failed', {
|
||||
id,
|
||||
@@ -2232,6 +2336,7 @@ const initialState = {
|
||||
autoAllowedTools: [] as string[],
|
||||
messageQueue: [] as import('./types').QueuedMessage[],
|
||||
suppressAbortContinueOption: false,
|
||||
sensitiveCredentialIds: new Set<string>(),
|
||||
}
|
||||
|
||||
export const useCopilotStore = create<CopilotStore>()(
|
||||
@@ -2614,6 +2719,12 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
}))
|
||||
}
|
||||
|
||||
// Load sensitive credential IDs for masking before streaming starts
|
||||
await get().loadSensitiveCredentialIds()
|
||||
|
||||
// Ensure auto-allowed tools are loaded before tool calls arrive
|
||||
await get().loadAutoAllowedTools()
|
||||
|
||||
let newMessages: CopilotMessage[]
|
||||
if (revertState) {
|
||||
const currentMessages = get().messages
|
||||
@@ -3676,6 +3787,16 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
|
||||
const { id, name, params } = toolCall
|
||||
|
||||
// Guard against double execution - skip if already executing or in terminal state
|
||||
if (toolCall.state === ClientToolCallState.executing || isTerminalState(toolCall.state)) {
|
||||
logger.info('[executeIntegrationTool] Skipping - already executing or terminal', {
|
||||
id,
|
||||
name,
|
||||
state: toolCall.state,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Set to executing state
|
||||
const executingMap = { ...get().toolCallsById }
|
||||
executingMap[id] = {
|
||||
@@ -3824,6 +3945,46 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const data = await res.json()
|
||||
set({ autoAllowedTools: data.autoAllowedTools || [] })
|
||||
logger.info('[AutoAllowedTools] Added tool', { toolId })
|
||||
|
||||
// Auto-execute all pending tools of the same type
|
||||
const { toolCallsById, executeIntegrationTool } = get()
|
||||
const pendingToolCalls = Object.values(toolCallsById).filter(
|
||||
(tc) => tc.name === toolId && tc.state === ClientToolCallState.pending
|
||||
)
|
||||
if (pendingToolCalls.length > 0) {
|
||||
const isIntegrationTool = !CLASS_TOOL_METADATA[toolId]
|
||||
logger.info('[AutoAllowedTools] Auto-executing pending tools', {
|
||||
toolId,
|
||||
count: pendingToolCalls.length,
|
||||
isIntegrationTool,
|
||||
})
|
||||
for (const tc of pendingToolCalls) {
|
||||
if (isIntegrationTool) {
|
||||
// Integration tools use executeIntegrationTool
|
||||
executeIntegrationTool(tc.id).catch((err) => {
|
||||
logger.error('[AutoAllowedTools] Auto-execute pending integration tool failed', {
|
||||
toolCallId: tc.id,
|
||||
toolId,
|
||||
error: err,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
// Client tools with interrupts use handleAccept
|
||||
const inst = getClientTool(tc.id) as any
|
||||
if (inst && typeof inst.handleAccept === 'function') {
|
||||
Promise.resolve()
|
||||
.then(() => inst.handleAccept(tc.params || {}))
|
||||
.catch((err: any) => {
|
||||
logger.error('[AutoAllowedTools] Auto-execute pending client tool failed', {
|
||||
toolCallId: tc.id,
|
||||
toolId,
|
||||
error: err,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('[AutoAllowedTools] Failed to add tool', { toolId, error: err })
|
||||
@@ -3853,6 +4014,57 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
return autoAllowedTools.includes(toolId)
|
||||
},
|
||||
|
||||
// Credential masking
|
||||
loadSensitiveCredentialIds: async () => {
|
||||
try {
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'get_credentials', payload: {} }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
logger.warn('[loadSensitiveCredentialIds] Failed to fetch credentials', {
|
||||
status: res.status,
|
||||
})
|
||||
return
|
||||
}
|
||||
const json = await res.json()
|
||||
// Credentials are at result.oauth.connected.credentials
|
||||
const credentials = json?.result?.oauth?.connected?.credentials || []
|
||||
logger.info('[loadSensitiveCredentialIds] Response', {
|
||||
hasResult: !!json?.result,
|
||||
credentialCount: credentials.length,
|
||||
})
|
||||
const ids = new Set<string>()
|
||||
for (const cred of credentials) {
|
||||
if (cred?.id) {
|
||||
ids.add(cred.id)
|
||||
}
|
||||
}
|
||||
set({ sensitiveCredentialIds: ids })
|
||||
logger.info('[loadSensitiveCredentialIds] Loaded credential IDs', {
|
||||
count: ids.size,
|
||||
})
|
||||
} catch (err) {
|
||||
logger.warn('[loadSensitiveCredentialIds] Error loading credentials', err)
|
||||
}
|
||||
},
|
||||
|
||||
maskCredentialValue: (value: string) => {
|
||||
const { sensitiveCredentialIds } = get()
|
||||
if (!value || sensitiveCredentialIds.size === 0) return value
|
||||
|
||||
let masked = value
|
||||
// Sort by length descending to mask longer IDs first
|
||||
const sortedIds = Array.from(sensitiveCredentialIds).sort((a, b) => b.length - a.length)
|
||||
for (const id of sortedIds) {
|
||||
if (id && masked.includes(id)) {
|
||||
masked = masked.split(id).join('••••••••')
|
||||
}
|
||||
}
|
||||
return masked
|
||||
},
|
||||
|
||||
// Message queue actions
|
||||
addToQueue: (message, options) => {
|
||||
const queuedMessage: import('./types').QueuedMessage = {
|
||||
|
||||
@@ -156,6 +156,9 @@ export interface CopilotState {
|
||||
|
||||
// Message queue for messages sent while another is in progress
|
||||
messageQueue: QueuedMessage[]
|
||||
|
||||
// Credential IDs to mask in UI (for sensitive data protection)
|
||||
sensitiveCredentialIds: Set<string>
|
||||
}
|
||||
|
||||
export interface CopilotActions {
|
||||
@@ -235,6 +238,10 @@ export interface CopilotActions {
|
||||
removeAutoAllowedTool: (toolId: string) => Promise<void>
|
||||
isToolAutoAllowed: (toolId: string) => boolean
|
||||
|
||||
// Credential masking
|
||||
loadSensitiveCredentialIds: () => Promise<void>
|
||||
maskCredentialValue: (value: string) => string
|
||||
|
||||
// Message queue actions
|
||||
addToQueue: (
|
||||
message: string,
|
||||
|
||||
@@ -56,6 +56,7 @@ describe('Function Execute Tool', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
isCustomTool: false,
|
||||
language: 'javascript',
|
||||
timeout: 5000,
|
||||
@@ -83,6 +84,7 @@ describe('Function Execute Tool', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
isCustomTool: false,
|
||||
language: 'javascript',
|
||||
workflowId: undefined,
|
||||
@@ -101,6 +103,7 @@ describe('Function Execute Tool', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
isCustomTool: false,
|
||||
language: 'javascript',
|
||||
workflowId: undefined,
|
||||
|
||||
@@ -53,6 +53,13 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
|
||||
description: 'Mapping of block names to block IDs',
|
||||
default: {},
|
||||
},
|
||||
blockOutputSchemas: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Mapping of block IDs to their output schemas for validation',
|
||||
default: {},
|
||||
},
|
||||
workflowVariables: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
@@ -81,6 +88,7 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
|
||||
workflowVariables: params.workflowVariables || {},
|
||||
blockData: params.blockData || {},
|
||||
blockNameMapping: params.blockNameMapping || {},
|
||||
blockOutputSchemas: params.blockOutputSchemas || {},
|
||||
workflowId: params._context?.workflowId,
|
||||
isCustomTool: params.isCustomTool || false,
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ export interface CodeExecutionInput {
|
||||
workflowVariables?: Record<string, unknown>
|
||||
blockData?: Record<string, unknown>
|
||||
blockNameMapping?: Record<string, string>
|
||||
blockOutputSchemas?: Record<string, Record<string, unknown>>
|
||||
_context?: {
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
@@ -110,12 +110,22 @@ spec:
|
||||
{{- end }}
|
||||
{{- include "sim.resources" .Values.app | nindent 10 }}
|
||||
{{- include "sim.securityContext" .Values.app | nindent 10 }}
|
||||
{{- with .Values.extraVolumeMounts }}
|
||||
{{- if or .Values.extraVolumeMounts .Values.app.extraVolumeMounts }}
|
||||
volumeMounts:
|
||||
{{- with .Values.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- with .Values.app.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- with .Values.extraVolumes }}
|
||||
{{- if or .Values.extraVolumes .Values.app.extraVolumes }}
|
||||
volumes:
|
||||
{{- with .Values.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.app.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -92,6 +92,7 @@ spec:
|
||||
{{- toYaml .Values.ollama.readinessProbe | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- include "sim.resources" .Values.ollama | nindent 10 }}
|
||||
{{- if or .Values.ollama.persistence.enabled .Values.extraVolumeMounts .Values.ollama.extraVolumeMounts }}
|
||||
volumeMounts:
|
||||
{{- if .Values.ollama.persistence.enabled }}
|
||||
- name: ollama-data
|
||||
@@ -100,13 +101,22 @@ spec:
|
||||
{{- with .Values.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if .Values.ollama.persistence.enabled }}
|
||||
{{- with .Values.ollama.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if or .Values.ollama.persistence.enabled .Values.extraVolumes .Values.ollama.extraVolumes }}
|
||||
volumes:
|
||||
{{- if .Values.ollama.persistence.enabled }}
|
||||
- name: ollama-data
|
||||
persistentVolumeClaim:
|
||||
claimName: {{ include "sim.fullname" . }}-ollama-data
|
||||
{{- end }}
|
||||
{{- with .Values.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.ollama.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -84,12 +84,22 @@ spec:
|
||||
{{- end }}
|
||||
{{- include "sim.resources" .Values.realtime | nindent 10 }}
|
||||
{{- include "sim.securityContext" .Values.realtime | nindent 10 }}
|
||||
{{- with .Values.extraVolumeMounts }}
|
||||
{{- if or .Values.extraVolumeMounts .Values.realtime.extraVolumeMounts }}
|
||||
volumeMounts:
|
||||
{{- with .Values.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- with .Values.realtime.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- with .Values.extraVolumes }}
|
||||
{{- if or .Values.extraVolumes .Values.realtime.extraVolumes }}
|
||||
volumes:
|
||||
{{- with .Values.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.realtime.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -224,6 +224,10 @@ app:
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
|
||||
# Additional volumes for app deployment (e.g., branding assets, custom configs)
|
||||
extraVolumes: []
|
||||
extraVolumeMounts: []
|
||||
|
||||
# Realtime socket server configuration
|
||||
realtime:
|
||||
# Enable/disable the realtime service
|
||||
@@ -301,6 +305,10 @@ realtime:
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
|
||||
# Additional volumes for realtime deployment
|
||||
extraVolumes: []
|
||||
extraVolumeMounts: []
|
||||
|
||||
# Database migrations job configuration
|
||||
migrations:
|
||||
# Enable/disable migrations job
|
||||
@@ -539,6 +547,10 @@ ollama:
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
|
||||
# Additional volumes for ollama deployment
|
||||
extraVolumes: []
|
||||
extraVolumeMounts: []
|
||||
|
||||
# Ingress configuration
|
||||
ingress:
|
||||
# Enable/disable ingress
|
||||
|
||||
Reference in New Issue
Block a user