Compare commits
3 Commits
feat/blog
...
fix/copilo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
528d8e7729 | ||
|
|
04a6f9d0a4 | ||
|
|
76dd4a0c95 |
@@ -1,27 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { ArrowLeft, ChevronLeft } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
|
||||
export function BackLink() {
|
||||
const [isHovered, setIsHovered] = useState(false)
|
||||
|
||||
return (
|
||||
<Link
|
||||
href='/studio'
|
||||
className='group flex items-center gap-1 text-gray-600 text-sm hover:text-gray-900'
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
>
|
||||
<span className='group-hover:-translate-x-0.5 inline-flex transition-transform duration-200'>
|
||||
{isHovered ? (
|
||||
<ArrowLeft className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronLeft className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
Back to Sim Studio
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
@@ -5,10 +5,7 @@ import { Avatar, AvatarFallback, AvatarImage } from '@/components/emcn'
|
||||
import { FAQ } from '@/lib/blog/faq'
|
||||
import { getAllPostMeta, getPostBySlug, getRelatedPosts } from '@/lib/blog/registry'
|
||||
import { buildArticleJsonLd, buildBreadcrumbJsonLd, buildPostMetadata } from '@/lib/blog/seo'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BackLink } from '@/app/(landing)/studio/[slug]/back-link'
|
||||
import { ShareButton } from '@/app/(landing)/studio/[slug]/share-button'
|
||||
|
||||
export async function generateStaticParams() {
|
||||
const posts = await getAllPostMeta()
|
||||
@@ -51,7 +48,9 @@ export default async function Page({ params }: { params: Promise<{ slug: string
|
||||
/>
|
||||
<header className='mx-auto max-w-[1450px] px-6 pt-8 sm:px-8 sm:pt-12 md:px-12 md:pt-16'>
|
||||
<div className='mb-6'>
|
||||
<BackLink />
|
||||
<Link href='/studio' className='text-gray-600 text-sm hover:text-gray-900'>
|
||||
← Back to Sim Studio
|
||||
</Link>
|
||||
</div>
|
||||
<div className='flex flex-col gap-8 md:flex-row md:gap-12'>
|
||||
<div className='w-full flex-shrink-0 md:w-[450px]'>
|
||||
@@ -76,31 +75,28 @@ export default async function Page({ params }: { params: Promise<{ slug: string
|
||||
>
|
||||
{post.title}
|
||||
</h1>
|
||||
<div className='mt-4 flex items-center justify-between'>
|
||||
<div className='flex items-center gap-3'>
|
||||
{(post.authors || [post.author]).map((a, idx) => (
|
||||
<div key={idx} className='flex items-center gap-2'>
|
||||
{a?.avatarUrl ? (
|
||||
<Avatar className='size-6'>
|
||||
<AvatarImage src={a.avatarUrl} alt={a.name} />
|
||||
<AvatarFallback>{a.name.slice(0, 2)}</AvatarFallback>
|
||||
</Avatar>
|
||||
) : null}
|
||||
<Link
|
||||
href={a?.url || '#'}
|
||||
target='_blank'
|
||||
rel='noopener noreferrer author'
|
||||
className='text-[14px] text-gray-600 leading-[1.5] hover:text-gray-900 sm:text-[16px]'
|
||||
itemProp='author'
|
||||
itemScope
|
||||
itemType='https://schema.org/Person'
|
||||
>
|
||||
<span itemProp='name'>{a?.name}</span>
|
||||
</Link>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<ShareButton url={`${getBaseUrl()}/studio/${slug}`} title={post.title} />
|
||||
<div className='mt-4 flex items-center gap-3'>
|
||||
{(post.authors || [post.author]).map((a, idx) => (
|
||||
<div key={idx} className='flex items-center gap-2'>
|
||||
{a?.avatarUrl ? (
|
||||
<Avatar className='size-6'>
|
||||
<AvatarImage src={a.avatarUrl} alt={a.name} />
|
||||
<AvatarFallback>{a.name.slice(0, 2)}</AvatarFallback>
|
||||
</Avatar>
|
||||
) : null}
|
||||
<Link
|
||||
href={a?.url || '#'}
|
||||
target='_blank'
|
||||
rel='noopener noreferrer author'
|
||||
className='text-[14px] text-gray-600 leading-[1.5] hover:text-gray-900 sm:text-[16px]'
|
||||
itemProp='author'
|
||||
itemScope
|
||||
itemType='https://schema.org/Person'
|
||||
>
|
||||
<span itemProp='name'>{a?.name}</span>
|
||||
</Link>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { Share2 } from 'lucide-react'
|
||||
import { Popover, PopoverContent, PopoverItem, PopoverTrigger } from '@/components/emcn'
|
||||
|
||||
interface ShareButtonProps {
|
||||
url: string
|
||||
title: string
|
||||
}
|
||||
|
||||
export function ShareButton({ url, title }: ShareButtonProps) {
|
||||
const [open, setOpen] = useState(false)
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
const handleCopyLink = async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(url)
|
||||
setCopied(true)
|
||||
setTimeout(() => {
|
||||
setCopied(false)
|
||||
setOpen(false)
|
||||
}, 1000)
|
||||
} catch {
|
||||
setOpen(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleShareTwitter = () => {
|
||||
const tweetUrl = `https://twitter.com/intent/tweet?url=${encodeURIComponent(url)}&text=${encodeURIComponent(title)}`
|
||||
window.open(tweetUrl, '_blank', 'noopener,noreferrer')
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
const handleShareLinkedIn = () => {
|
||||
const linkedInUrl = `https://www.linkedin.com/sharing/share-offsite/?url=${encodeURIComponent(url)}`
|
||||
window.open(linkedInUrl, '_blank', 'noopener,noreferrer')
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={open}
|
||||
onOpenChange={setOpen}
|
||||
variant='secondary'
|
||||
size='sm'
|
||||
colorScheme='inverted'
|
||||
>
|
||||
<PopoverTrigger asChild>
|
||||
<button
|
||||
className='flex items-center gap-1.5 text-gray-600 text-sm hover:text-gray-900'
|
||||
aria-label='Share this post'
|
||||
>
|
||||
<Share2 className='h-4 w-4' />
|
||||
<span>Share</span>
|
||||
</button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' minWidth={140}>
|
||||
<PopoverItem onClick={handleCopyLink}>{copied ? 'Copied!' : 'Copy link'}</PopoverItem>
|
||||
<PopoverItem onClick={handleShareTwitter}>Share on X</PopoverItem>
|
||||
<PopoverItem onClick={handleShareLinkedIn}>Share on LinkedIn</PopoverItem>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -313,7 +313,7 @@ describe('Function Execute API Route', () => {
|
||||
'block-2': 'world',
|
||||
},
|
||||
blockNameMapping: {
|
||||
validvar: 'block-1',
|
||||
validVar: 'block-1',
|
||||
another_valid: 'block-2',
|
||||
},
|
||||
})
|
||||
@@ -539,7 +539,7 @@ describe('Function Execute API Route', () => {
|
||||
'block-complex': complexData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
complexdata: 'block-complex',
|
||||
complexData: 'block-complex',
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -6,11 +6,11 @@ import { executeInE2B } from '@/lib/execution/e2b'
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
|
||||
import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
|
||||
import { type OutputSchema, resolveBlockReference } from '@/executor/utils/block-reference'
|
||||
import {
|
||||
createEnvVarPattern,
|
||||
createWorkflowVariablePattern,
|
||||
} from '@/executor/utils/reference-validation'
|
||||
import { navigatePath } from '@/executor/variables/resolvers/reference'
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
@@ -470,17 +470,14 @@ function resolveEnvironmentVariables(
|
||||
|
||||
function resolveTagVariables(
|
||||
code: string,
|
||||
blockData: Record<string, unknown>,
|
||||
blockData: Record<string, any>,
|
||||
blockNameMapping: Record<string, string>,
|
||||
blockOutputSchemas: Record<string, OutputSchema>,
|
||||
contextVariables: Record<string, unknown>,
|
||||
language = 'javascript'
|
||||
contextVariables: Record<string, any>
|
||||
): string {
|
||||
let resolvedCode = code
|
||||
const undefinedLiteral = language === 'python' ? 'None' : 'undefined'
|
||||
|
||||
const tagPattern = new RegExp(
|
||||
`${REFERENCE.START}([a-zA-Z_](?:[a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])?)${REFERENCE.END}`,
|
||||
`${REFERENCE.START}([a-zA-Z_][a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])${REFERENCE.END}`,
|
||||
'g'
|
||||
)
|
||||
const tagMatches = resolvedCode.match(tagPattern) || []
|
||||
@@ -489,37 +486,41 @@ function resolveTagVariables(
|
||||
const tagName = match.slice(REFERENCE.START.length, -REFERENCE.END.length).trim()
|
||||
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
|
||||
const blockName = pathParts[0]
|
||||
const fieldPath = pathParts.slice(1)
|
||||
|
||||
const result = resolveBlockReference(blockName, fieldPath, {
|
||||
blockNameMapping,
|
||||
blockData,
|
||||
blockOutputSchemas,
|
||||
})
|
||||
|
||||
if (!result) {
|
||||
const blockId = blockNameMapping[blockName]
|
||||
if (!blockId) {
|
||||
continue
|
||||
}
|
||||
|
||||
let tagValue = result.value
|
||||
const blockOutput = blockData[blockId]
|
||||
if (blockOutput === undefined) {
|
||||
continue
|
||||
}
|
||||
|
||||
let tagValue: any
|
||||
if (pathParts.length === 1) {
|
||||
tagValue = blockOutput
|
||||
} else {
|
||||
tagValue = navigatePath(blockOutput, pathParts.slice(1))
|
||||
}
|
||||
|
||||
if (tagValue === undefined) {
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), undefinedLiteral)
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof tagValue === 'string') {
|
||||
const trimmed = tagValue.trimStart()
|
||||
if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch {
|
||||
// Keep as string if not valid JSON
|
||||
}
|
||||
if (
|
||||
typeof tagValue === 'string' &&
|
||||
tagValue.length > 100 &&
|
||||
(tagValue.startsWith('{') || tagValue.startsWith('['))
|
||||
) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch {
|
||||
// Keep as-is
|
||||
}
|
||||
}
|
||||
|
||||
const safeVarName = `__tag_${tagName.replace(/_/g, '_1').replace(/\./g, '_0')}`
|
||||
const safeVarName = `__tag_${tagName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
contextVariables[safeVarName] = tagValue
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
}
|
||||
@@ -536,27 +537,18 @@ function resolveTagVariables(
|
||||
*/
|
||||
function resolveCodeVariables(
|
||||
code: string,
|
||||
params: Record<string, unknown>,
|
||||
params: Record<string, any>,
|
||||
envVars: Record<string, string> = {},
|
||||
blockData: Record<string, unknown> = {},
|
||||
blockData: Record<string, any> = {},
|
||||
blockNameMapping: Record<string, string> = {},
|
||||
blockOutputSchemas: Record<string, OutputSchema> = {},
|
||||
workflowVariables: Record<string, unknown> = {},
|
||||
language = 'javascript'
|
||||
): { resolvedCode: string; contextVariables: Record<string, unknown> } {
|
||||
workflowVariables: Record<string, any> = {}
|
||||
): { resolvedCode: string; contextVariables: Record<string, any> } {
|
||||
let resolvedCode = code
|
||||
const contextVariables: Record<string, unknown> = {}
|
||||
const contextVariables: Record<string, any> = {}
|
||||
|
||||
resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables)
|
||||
resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables)
|
||||
resolvedCode = resolveTagVariables(
|
||||
resolvedCode,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
contextVariables,
|
||||
language
|
||||
)
|
||||
resolvedCode = resolveTagVariables(resolvedCode, blockData, blockNameMapping, contextVariables)
|
||||
|
||||
return { resolvedCode, contextVariables }
|
||||
}
|
||||
@@ -593,7 +585,6 @@ export async function POST(req: NextRequest) {
|
||||
envVars = {},
|
||||
blockData = {},
|
||||
blockNameMapping = {},
|
||||
blockOutputSchemas = {},
|
||||
workflowVariables = {},
|
||||
workflowId,
|
||||
isCustomTool = false,
|
||||
@@ -610,21 +601,20 @@ export async function POST(req: NextRequest) {
|
||||
isCustomTool,
|
||||
})
|
||||
|
||||
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
|
||||
|
||||
// Resolve variables in the code with workflow environment variables
|
||||
const codeResolution = resolveCodeVariables(
|
||||
code,
|
||||
executionParams,
|
||||
envVars,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
workflowVariables,
|
||||
lang
|
||||
workflowVariables
|
||||
)
|
||||
resolvedCode = codeResolution.resolvedCode
|
||||
const contextVariables = codeResolution.contextVariables
|
||||
|
||||
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
|
||||
|
||||
let jsImports = ''
|
||||
let jsRemainingCode = resolvedCode
|
||||
let hasImports = false
|
||||
@@ -680,11 +670,7 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `const environmentVariables = JSON.parse(${JSON.stringify(JSON.stringify(envVars))});\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
if (v === undefined) {
|
||||
prologue += `const ${k} = undefined;\n`
|
||||
} else {
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
}
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
prologueLineCount++
|
||||
}
|
||||
|
||||
@@ -755,11 +741,7 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `environmentVariables = json.loads(${JSON.stringify(JSON.stringify(envVars))})\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
if (v === undefined) {
|
||||
prologue += `${k} = None\n`
|
||||
} else {
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
}
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
prologueLineCount++
|
||||
}
|
||||
const wrapped = [
|
||||
|
||||
@@ -157,7 +157,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
|
||||
'kb-123',
|
||||
{
|
||||
enabledFilter: undefined,
|
||||
includeDisabled: false,
|
||||
search: undefined,
|
||||
limit: 50,
|
||||
offset: 0,
|
||||
@@ -166,7 +166,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should return documents with default filter', async () => {
|
||||
it('should filter disabled documents by default', async () => {
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
const { getDocuments } = await import('@/lib/knowledge/documents/service')
|
||||
|
||||
@@ -194,7 +194,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
|
||||
'kb-123',
|
||||
{
|
||||
enabledFilter: undefined,
|
||||
includeDisabled: false,
|
||||
search: undefined,
|
||||
limit: 50,
|
||||
offset: 0,
|
||||
@@ -203,7 +203,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should filter documents by enabled status when requested', async () => {
|
||||
it('should include disabled documents when requested', async () => {
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
const { getDocuments } = await import('@/lib/knowledge/documents/service')
|
||||
|
||||
@@ -223,7 +223,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
},
|
||||
})
|
||||
|
||||
const url = 'http://localhost:3000/api/knowledge/kb-123/documents?enabledFilter=disabled'
|
||||
const url = 'http://localhost:3000/api/knowledge/kb-123/documents?includeDisabled=true'
|
||||
const req = new Request(url, { method: 'GET' }) as any
|
||||
|
||||
const { GET } = await import('@/app/api/knowledge/[id]/documents/route')
|
||||
@@ -233,7 +233,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
|
||||
'kb-123',
|
||||
{
|
||||
enabledFilter: 'disabled',
|
||||
includeDisabled: true,
|
||||
search: undefined,
|
||||
limit: 50,
|
||||
offset: 0,
|
||||
@@ -361,7 +361,8 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(createSingleDocument)).toHaveBeenCalledWith(
|
||||
validDocumentData,
|
||||
'kb-123',
|
||||
expect.any(String)
|
||||
expect.any(String),
|
||||
'user-123'
|
||||
)
|
||||
})
|
||||
|
||||
@@ -469,7 +470,8 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(createDocumentRecords)).toHaveBeenCalledWith(
|
||||
validBulkData.documents,
|
||||
'kb-123',
|
||||
expect.any(String)
|
||||
expect.any(String),
|
||||
'user-123'
|
||||
)
|
||||
expect(vi.mocked(processDocumentsWithQueue)).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
@@ -5,7 +5,6 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
bulkDocumentOperation,
|
||||
bulkDocumentOperationByFilter,
|
||||
createDocumentRecords,
|
||||
createSingleDocument,
|
||||
getDocuments,
|
||||
@@ -58,20 +57,13 @@ const BulkCreateDocumentsSchema = z.object({
|
||||
bulk: z.literal(true),
|
||||
})
|
||||
|
||||
const BulkUpdateDocumentsSchema = z
|
||||
.object({
|
||||
operation: z.enum(['enable', 'disable', 'delete']),
|
||||
documentIds: z
|
||||
.array(z.string())
|
||||
.min(1, 'At least one document ID is required')
|
||||
.max(100, 'Cannot operate on more than 100 documents at once')
|
||||
.optional(),
|
||||
selectAll: z.boolean().optional(),
|
||||
enabledFilter: z.enum(['all', 'enabled', 'disabled']).optional(),
|
||||
})
|
||||
.refine((data) => data.selectAll || (data.documentIds && data.documentIds.length > 0), {
|
||||
message: 'Either selectAll must be true or documentIds must be provided',
|
||||
})
|
||||
const BulkUpdateDocumentsSchema = z.object({
|
||||
operation: z.enum(['enable', 'disable', 'delete']),
|
||||
documentIds: z
|
||||
.array(z.string())
|
||||
.min(1, 'At least one document ID is required')
|
||||
.max(100, 'Cannot operate on more than 100 documents at once'),
|
||||
})
|
||||
|
||||
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
@@ -98,17 +90,14 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
|
||||
const url = new URL(req.url)
|
||||
const enabledFilter = url.searchParams.get('enabledFilter') as
|
||||
| 'all'
|
||||
| 'enabled'
|
||||
| 'disabled'
|
||||
| null
|
||||
const includeDisabled = url.searchParams.get('includeDisabled') === 'true'
|
||||
const search = url.searchParams.get('search') || undefined
|
||||
const limit = Number.parseInt(url.searchParams.get('limit') || '50')
|
||||
const offset = Number.parseInt(url.searchParams.get('offset') || '0')
|
||||
const sortByParam = url.searchParams.get('sortBy')
|
||||
const sortOrderParam = url.searchParams.get('sortOrder')
|
||||
|
||||
// Validate sort parameters
|
||||
const validSortFields: DocumentSortField[] = [
|
||||
'filename',
|
||||
'fileSize',
|
||||
@@ -116,7 +105,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
'chunkCount',
|
||||
'uploadedAt',
|
||||
'processingStatus',
|
||||
'enabled',
|
||||
]
|
||||
const validSortOrders: SortOrder[] = ['asc', 'desc']
|
||||
|
||||
@@ -132,7 +120,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const result = await getDocuments(
|
||||
knowledgeBaseId,
|
||||
{
|
||||
enabledFilter: enabledFilter || undefined,
|
||||
includeDisabled,
|
||||
search,
|
||||
limit,
|
||||
offset,
|
||||
@@ -202,7 +190,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const createdDocuments = await createDocumentRecords(
|
||||
validatedData.documents,
|
||||
knowledgeBaseId,
|
||||
requestId
|
||||
requestId,
|
||||
userId
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -261,10 +250,16 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
throw validationError
|
||||
}
|
||||
} else {
|
||||
// Handle single document creation
|
||||
try {
|
||||
const validatedData = CreateDocumentSchema.parse(body)
|
||||
|
||||
const newDocument = await createSingleDocument(validatedData, knowledgeBaseId, requestId)
|
||||
const newDocument = await createSingleDocument(
|
||||
validatedData,
|
||||
knowledgeBaseId,
|
||||
requestId,
|
||||
userId
|
||||
)
|
||||
|
||||
try {
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
@@ -299,6 +294,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating document`, error)
|
||||
|
||||
// Check if it's a storage limit error
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to create document'
|
||||
const isStorageLimitError =
|
||||
errorMessage.includes('Storage limit exceeded') || errorMessage.includes('storage limit')
|
||||
@@ -335,22 +331,16 @@ export async function PATCH(req: NextRequest, { params }: { params: Promise<{ id
|
||||
|
||||
try {
|
||||
const validatedData = BulkUpdateDocumentsSchema.parse(body)
|
||||
const { operation, documentIds, selectAll, enabledFilter } = validatedData
|
||||
const { operation, documentIds } = validatedData
|
||||
|
||||
try {
|
||||
let result
|
||||
if (selectAll) {
|
||||
result = await bulkDocumentOperationByFilter(
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
enabledFilter,
|
||||
requestId
|
||||
)
|
||||
} else if (documentIds && documentIds.length > 0) {
|
||||
result = await bulkDocumentOperation(knowledgeBaseId, operation, documentIds, requestId)
|
||||
} else {
|
||||
return NextResponse.json({ error: 'No documents specified' }, { status: 400 })
|
||||
}
|
||||
const result = await bulkDocumentOperation(
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
documentIds,
|
||||
requestId,
|
||||
session.user.id
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
|
||||
@@ -61,7 +61,6 @@ export function EditChunkModal({
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
const [tokenizerOn, setTokenizerOn] = useState(false)
|
||||
const [hoveredTokenIndex, setHoveredTokenIndex] = useState<number | null>(null)
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null)
|
||||
|
||||
const error = mutationError?.message ?? null
|
||||
@@ -255,8 +254,6 @@ export function EditChunkModal({
|
||||
style={{
|
||||
backgroundColor: getTokenBgColor(index),
|
||||
}}
|
||||
onMouseEnter={() => setHoveredTokenIndex(index)}
|
||||
onMouseLeave={() => setHoveredTokenIndex(null)}
|
||||
>
|
||||
{token}
|
||||
</span>
|
||||
@@ -284,11 +281,6 @@ export function EditChunkModal({
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>Tokenizer</span>
|
||||
<Switch checked={tokenizerOn} onCheckedChange={setTokenizerOn} />
|
||||
{tokenizerOn && hoveredTokenIndex !== null && (
|
||||
<span className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
Token #{hoveredTokenIndex + 1}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>
|
||||
{tokenCount.toLocaleString()}
|
||||
|
||||
@@ -36,7 +36,6 @@ import {
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import type { ChunkData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
ChunkContextMenu,
|
||||
@@ -59,6 +58,55 @@ import {
|
||||
|
||||
const logger = createLogger('Document')
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
interface DocumentProps {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
@@ -256,6 +304,7 @@ export function Document({
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [debouncedSearchQuery, setDebouncedSearchQuery] = useState('')
|
||||
const [isSearching, setIsSearching] = useState(false)
|
||||
|
||||
const {
|
||||
chunks: initialChunks,
|
||||
@@ -295,6 +344,7 @@ export function Document({
|
||||
const handler = setTimeout(() => {
|
||||
startTransition(() => {
|
||||
setDebouncedSearchQuery(searchQuery)
|
||||
setIsSearching(searchQuery.trim().length > 0)
|
||||
})
|
||||
}, 200)
|
||||
|
||||
@@ -303,7 +353,6 @@ export function Document({
|
||||
}
|
||||
}, [searchQuery])
|
||||
|
||||
const isSearching = debouncedSearchQuery.trim().length > 0
|
||||
const showingSearch = isSearching && searchQuery.trim().length > 0 && searchResults.length > 0
|
||||
const SEARCH_PAGE_SIZE = 50
|
||||
const maxSearchPages = Math.ceil(searchResults.length / SEARCH_PAGE_SIZE)
|
||||
|
||||
@@ -27,10 +27,6 @@ import {
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
@@ -44,11 +40,8 @@ import { Input } from '@/components/ui/input'
|
||||
import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import { ALL_TAG_SLOTS, type AllTagSlot, getFieldTypeForSlot } from '@/lib/knowledge/constants'
|
||||
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import { formatFileSize } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
ActionBar,
|
||||
AddDocumentsModal,
|
||||
@@ -196,8 +189,8 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<Skeleton className='mt-[4px] h-[21px] w-[300px] rounded-[4px]' />
|
||||
<div className='mt-[4px]'>
|
||||
<Skeleton className='h-[21px] w-[300px] rounded-[4px]' />
|
||||
</div>
|
||||
|
||||
<div className='mt-[16px] flex items-center gap-[8px]'>
|
||||
@@ -215,12 +208,9 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
/>
|
||||
</div>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Skeleton className='h-[32px] w-[52px] rounded-[6px]' />
|
||||
<Button disabled variant='tertiary' className='h-[32px] rounded-[6px]'>
|
||||
Add Documents
|
||||
</Button>
|
||||
</div>
|
||||
<Button disabled variant='tertiary' className='h-[32px] rounded-[6px]'>
|
||||
Add Documents
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className='mt-[12px] flex flex-1 flex-col overflow-hidden'>
|
||||
@@ -232,11 +222,73 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
interface KnowledgeBaseProps {
|
||||
id: string
|
||||
knowledgeBaseName?: string
|
||||
}
|
||||
|
||||
function getFileIcon(mimeType: string, filename: string) {
|
||||
const IconComponent = getDocumentIcon(mimeType, filename)
|
||||
return <IconComponent className='h-6 w-5 flex-shrink-0' />
|
||||
}
|
||||
|
||||
function formatFileSize(bytes: number): string {
|
||||
if (bytes === 0) return '0 Bytes'
|
||||
const k = 1024
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB']
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||
return `${Number.parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}`
|
||||
}
|
||||
|
||||
const AnimatedLoader = ({ className }: { className?: string }) => (
|
||||
<Loader2 className={cn(className, 'animate-spin')} />
|
||||
)
|
||||
@@ -284,24 +336,53 @@ const getStatusBadge = (doc: DocumentData) => {
|
||||
}
|
||||
}
|
||||
|
||||
const TAG_SLOTS = [
|
||||
'tag1',
|
||||
'tag2',
|
||||
'tag3',
|
||||
'tag4',
|
||||
'tag5',
|
||||
'tag6',
|
||||
'tag7',
|
||||
'number1',
|
||||
'number2',
|
||||
'number3',
|
||||
'number4',
|
||||
'number5',
|
||||
'date1',
|
||||
'date2',
|
||||
'boolean1',
|
||||
'boolean2',
|
||||
'boolean3',
|
||||
] as const
|
||||
|
||||
type TagSlot = (typeof TAG_SLOTS)[number]
|
||||
|
||||
interface TagValue {
|
||||
slot: AllTagSlot
|
||||
slot: TagSlot
|
||||
displayName: string
|
||||
value: string
|
||||
}
|
||||
|
||||
const TAG_FIELD_TYPES: Record<string, string> = {
|
||||
tag: 'text',
|
||||
number: 'number',
|
||||
date: 'date',
|
||||
boolean: 'boolean',
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes tag values for a document
|
||||
*/
|
||||
function getDocumentTags(doc: DocumentData, definitions: TagDefinition[]): TagValue[] {
|
||||
const result: TagValue[] = []
|
||||
|
||||
for (const slot of ALL_TAG_SLOTS) {
|
||||
for (const slot of TAG_SLOTS) {
|
||||
const raw = doc[slot]
|
||||
if (raw == null) continue
|
||||
|
||||
const def = definitions.find((d) => d.tagSlot === slot)
|
||||
const fieldType = def?.fieldType || getFieldTypeForSlot(slot) || 'text'
|
||||
const fieldType = def?.fieldType || TAG_FIELD_TYPES[slot.replace(/\d+$/, '')] || 'text'
|
||||
|
||||
let value: string
|
||||
if (fieldType === 'date') {
|
||||
@@ -343,8 +424,6 @@ export function KnowledgeBase({
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [showTagsModal, setShowTagsModal] = useState(false)
|
||||
const [enabledFilter, setEnabledFilter] = useState<'all' | 'enabled' | 'disabled'>('all')
|
||||
const [isFilterPopoverOpen, setIsFilterPopoverOpen] = useState(false)
|
||||
|
||||
/**
|
||||
* Memoize the search query setter to prevent unnecessary re-renders
|
||||
@@ -355,7 +434,6 @@ export function KnowledgeBase({
|
||||
}, [])
|
||||
|
||||
const [selectedDocuments, setSelectedDocuments] = useState<Set<string>>(new Set())
|
||||
const [isSelectAllMode, setIsSelectAllMode] = useState(false)
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [showAddDocumentsModal, setShowAddDocumentsModal] = useState(false)
|
||||
const [showDeleteDocumentModal, setShowDeleteDocumentModal] = useState(false)
|
||||
@@ -382,6 +460,7 @@ export function KnowledgeBase({
|
||||
error: knowledgeBaseError,
|
||||
refresh: refreshKnowledgeBase,
|
||||
} = useKnowledgeBase(id)
|
||||
const [hasProcessingDocuments, setHasProcessingDocuments] = useState(false)
|
||||
|
||||
const {
|
||||
documents,
|
||||
@@ -390,7 +469,6 @@ export function KnowledgeBase({
|
||||
isFetching: isFetchingDocuments,
|
||||
isPlaceholderData: isPlaceholderDocuments,
|
||||
error: documentsError,
|
||||
hasProcessingDocuments,
|
||||
updateDocument,
|
||||
refreshDocuments,
|
||||
} = useKnowledgeBaseDocuments(id, {
|
||||
@@ -399,14 +477,7 @@ export function KnowledgeBase({
|
||||
offset: (currentPage - 1) * DOCUMENTS_PER_PAGE,
|
||||
sortBy,
|
||||
sortOrder,
|
||||
refetchInterval: (data) => {
|
||||
if (isDeleting) return false
|
||||
const hasPending = data?.documents?.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
)
|
||||
return hasPending ? 3000 : false
|
||||
},
|
||||
enabledFilter,
|
||||
refetchInterval: hasProcessingDocuments && !isDeleting ? 3000 : false,
|
||||
})
|
||||
|
||||
const { tagDefinitions } = useKnowledgeBaseTagDefinitions(id)
|
||||
@@ -472,52 +543,52 @@ export function KnowledgeBase({
|
||||
</TableHead>
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
const processing = documents.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
)
|
||||
setHasProcessingDocuments(processing)
|
||||
|
||||
if (processing) {
|
||||
checkForDeadProcesses()
|
||||
}
|
||||
}, [documents])
|
||||
|
||||
/**
|
||||
* Checks for documents with stale processing states and marks them as failed
|
||||
*/
|
||||
const checkForDeadProcesses = useCallback(
|
||||
(docsToCheck: DocumentData[]) => {
|
||||
const now = new Date()
|
||||
const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes
|
||||
const checkForDeadProcesses = () => {
|
||||
const now = new Date()
|
||||
const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes
|
||||
|
||||
const staleDocuments = docsToCheck.filter((doc) => {
|
||||
if (doc.processingStatus !== 'processing' || !doc.processingStartedAt) {
|
||||
return false
|
||||
}
|
||||
const staleDocuments = documents.filter((doc) => {
|
||||
if (doc.processingStatus !== 'processing' || !doc.processingStartedAt) {
|
||||
return false
|
||||
}
|
||||
|
||||
const processingDuration = now.getTime() - new Date(doc.processingStartedAt).getTime()
|
||||
return processingDuration > DEAD_PROCESS_THRESHOLD_MS
|
||||
})
|
||||
const processingDuration = now.getTime() - new Date(doc.processingStartedAt).getTime()
|
||||
return processingDuration > DEAD_PROCESS_THRESHOLD_MS
|
||||
})
|
||||
|
||||
if (staleDocuments.length === 0) return
|
||||
if (staleDocuments.length === 0) return
|
||||
|
||||
logger.warn(`Found ${staleDocuments.length} documents with dead processes`)
|
||||
logger.warn(`Found ${staleDocuments.length} documents with dead processes`)
|
||||
|
||||
staleDocuments.forEach((doc) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: doc.id,
|
||||
updates: { markFailedDueToTimeout: true },
|
||||
staleDocuments.forEach((doc) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: doc.id,
|
||||
updates: { markFailedDueToTimeout: true },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`)
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(
|
||||
`Successfully marked dead process as failed for document: ${doc.filename}`
|
||||
)
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
[id, updateDocumentMutation]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
if (hasProcessingDocuments) {
|
||||
checkForDeadProcesses(documents)
|
||||
}
|
||||
}, [hasProcessingDocuments, documents, checkForDeadProcesses])
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
const handleToggleEnabled = (docId: string) => {
|
||||
const document = documents.find((doc) => doc.id === docId)
|
||||
@@ -677,7 +748,6 @@ export function KnowledgeBase({
|
||||
setSelectedDocuments(new Set(documents.map((doc) => doc.id)))
|
||||
} else {
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -723,26 +793,6 @@ export function KnowledgeBase({
|
||||
* Handles bulk enabling of selected documents
|
||||
*/
|
||||
const handleBulkEnable = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'enable',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully enabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
refreshDocuments()
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToEnable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && !doc.enabled
|
||||
)
|
||||
@@ -771,26 +821,6 @@ export function KnowledgeBase({
|
||||
* Handles bulk disabling of selected documents
|
||||
*/
|
||||
const handleBulkDisable = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'disable',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully disabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
refreshDocuments()
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToDisable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && doc.enabled
|
||||
)
|
||||
@@ -815,35 +845,18 @@ export function KnowledgeBase({
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the bulk delete confirmation modal
|
||||
*/
|
||||
const handleBulkDelete = () => {
|
||||
if (selectedDocuments.size === 0) return
|
||||
setShowBulkDeleteModal(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Confirms and executes the bulk deletion of selected documents
|
||||
*/
|
||||
const confirmBulkDelete = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'delete',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully deleted ${result.successCount} documents`)
|
||||
refreshDocuments()
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
},
|
||||
onSettled: () => {
|
||||
setShowBulkDeleteModal(false)
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToDelete = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
|
||||
if (documentsToDelete.length === 0) return
|
||||
@@ -868,17 +881,14 @@ export function KnowledgeBase({
|
||||
}
|
||||
|
||||
const selectedDocumentsList = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
const enabledCount = isSelectAllMode
|
||||
? enabledFilter === 'disabled'
|
||||
? 0
|
||||
: pagination.total
|
||||
: selectedDocumentsList.filter((doc) => doc.enabled).length
|
||||
const disabledCount = isSelectAllMode
|
||||
? enabledFilter === 'enabled'
|
||||
? 0
|
||||
: pagination.total
|
||||
: selectedDocumentsList.filter((doc) => !doc.enabled).length
|
||||
const enabledCount = selectedDocumentsList.filter((doc) => doc.enabled).length
|
||||
const disabledCount = selectedDocumentsList.filter((doc) => !doc.enabled).length
|
||||
|
||||
/**
|
||||
* Handle right-click on a document row
|
||||
* If right-clicking on an unselected document, select only that document
|
||||
* If right-clicking on a selected document with multiple selections, keep all selections
|
||||
*/
|
||||
const handleDocumentContextMenu = useCallback(
|
||||
(e: React.MouseEvent, doc: DocumentData) => {
|
||||
const isCurrentlySelected = selectedDocuments.has(doc.id)
|
||||
@@ -995,13 +1005,11 @@ export function KnowledgeBase({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
{knowledgeBase?.description && (
|
||||
<p className='mt-[4px] line-clamp-2 max-w-[40vw] font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{knowledgeBase.description}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
{knowledgeBase?.description && (
|
||||
<p className='mt-[4px] line-clamp-2 max-w-[40vw] font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{knowledgeBase.description}
|
||||
</p>
|
||||
)}
|
||||
|
||||
<div className='mt-[16px] flex items-center gap-[8px]'>
|
||||
<span className='text-[14px] text-[var(--text-muted)]'>
|
||||
@@ -1044,76 +1052,21 @@ export function KnowledgeBase({
|
||||
))}
|
||||
</div>
|
||||
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Popover open={isFilterPopoverOpen} onOpenChange={setIsFilterPopoverOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='default' className='h-[32px] rounded-[6px]'>
|
||||
{enabledFilter === 'all'
|
||||
? 'All'
|
||||
: enabledFilter === 'enabled'
|
||||
? 'Enabled'
|
||||
: 'Disabled'}
|
||||
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' side='bottom' sideOffset={4}>
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'all'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('all')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
All
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'enabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('enabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
Enabled
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'disabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('disabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
Disabled
|
||||
</PopoverItem>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={handleAddDocuments}
|
||||
disabled={userPermissions.canEdit !== true}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Add Documents
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{userPermissions.canEdit !== true && (
|
||||
<Tooltip.Content>Write permission required to add documents</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={handleAddDocuments}
|
||||
disabled={userPermissions.canEdit !== true}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Add Documents
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{userPermissions.canEdit !== true && (
|
||||
<Tooltip.Content>Write permission required to add documents</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
|
||||
{error && !isLoadingKnowledgeBase && (
|
||||
@@ -1136,20 +1089,14 @@ export function KnowledgeBase({
|
||||
<div className='mt-[10px] flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'>
|
||||
<div className='text-center'>
|
||||
<p className='font-medium text-[var(--text-secondary)] text-sm'>
|
||||
{searchQuery
|
||||
? 'No documents found'
|
||||
: enabledFilter !== 'all'
|
||||
? 'Nothing matches your filter'
|
||||
: 'No documents yet'}
|
||||
{searchQuery ? 'No documents found' : 'No documents yet'}
|
||||
</p>
|
||||
<p className='mt-1 text-[var(--text-muted)] text-xs'>
|
||||
{searchQuery
|
||||
? 'Try a different search term'
|
||||
: enabledFilter !== 'all'
|
||||
? 'Try changing the filter'
|
||||
: userPermissions.canEdit === true
|
||||
? 'Add documents to get started'
|
||||
: 'Documents will appear here once added'}
|
||||
: userPermissions.canEdit === true
|
||||
? 'Add documents to get started'
|
||||
: 'Documents will appear here once added'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -1173,7 +1120,7 @@ export function KnowledgeBase({
|
||||
{renderSortableHeader('tokenCount', 'Tokens', 'hidden w-[8%] lg:table-cell')}
|
||||
{renderSortableHeader('chunkCount', 'Chunks', 'w-[8%]')}
|
||||
{renderSortableHeader('uploadedAt', 'Uploaded', 'w-[11%]')}
|
||||
{renderSortableHeader('enabled', 'Status', 'w-[10%]')}
|
||||
{renderSortableHeader('processingStatus', 'Status', 'w-[10%]')}
|
||||
<TableHead className='w-[12%] px-[12px] py-[8px] text-[12px] text-[var(--text-secondary)]'>
|
||||
Tags
|
||||
</TableHead>
|
||||
@@ -1217,10 +1164,7 @@ export function KnowledgeBase({
|
||||
</TableCell>
|
||||
<TableCell className='w-[180px] max-w-[180px] px-[12px] py-[8px]'>
|
||||
<div className='flex min-w-0 items-center gap-[8px]'>
|
||||
{(() => {
|
||||
const IconComponent = getDocumentIcon(doc.mimeType, doc.filename)
|
||||
return <IconComponent className='h-6 w-5 flex-shrink-0' />
|
||||
})()}
|
||||
{getFileIcon(doc.mimeType, doc.filename)}
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<span
|
||||
@@ -1564,14 +1508,6 @@ export function KnowledgeBase({
|
||||
enabledCount={enabledCount}
|
||||
disabledCount={disabledCount}
|
||||
isLoading={isBulkOperating}
|
||||
totalCount={pagination.total}
|
||||
isAllPageSelected={isAllSelected}
|
||||
isAllSelected={isSelectAllMode}
|
||||
onSelectAll={() => setIsSelectAllMode(true)}
|
||||
onClearSelectAll={() => {
|
||||
setIsSelectAllMode(false)
|
||||
setSelectedDocuments(new Set())
|
||||
}}
|
||||
/>
|
||||
|
||||
<DocumentContextMenu
|
||||
|
||||
@@ -13,11 +13,6 @@ interface ActionBarProps {
|
||||
disabledCount?: number
|
||||
isLoading?: boolean
|
||||
className?: string
|
||||
totalCount?: number
|
||||
isAllPageSelected?: boolean
|
||||
isAllSelected?: boolean
|
||||
onSelectAll?: () => void
|
||||
onClearSelectAll?: () => void
|
||||
}
|
||||
|
||||
export function ActionBar({
|
||||
@@ -29,21 +24,14 @@ export function ActionBar({
|
||||
disabledCount = 0,
|
||||
isLoading = false,
|
||||
className,
|
||||
totalCount = 0,
|
||||
isAllPageSelected = false,
|
||||
isAllSelected = false,
|
||||
onSelectAll,
|
||||
onClearSelectAll,
|
||||
}: ActionBarProps) {
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
if (selectedCount === 0 && !isAllSelected) return null
|
||||
if (selectedCount === 0) return null
|
||||
|
||||
const canEdit = userPermissions.canEdit
|
||||
const showEnableButton = disabledCount > 0 && onEnable && canEdit
|
||||
const showDisableButton = enabledCount > 0 && onDisable && canEdit
|
||||
const showSelectAllOption =
|
||||
isAllPageSelected && !isAllSelected && totalCount > selectedCount && onSelectAll
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
@@ -55,31 +43,7 @@ export function ActionBar({
|
||||
>
|
||||
<div className='flex items-center gap-[8px] rounded-[10px] border border-[var(--border)] bg-[var(--surface-2)] px-[8px] py-[6px]'>
|
||||
<span className='px-[4px] text-[13px] text-[var(--text-secondary)]'>
|
||||
{isAllSelected ? totalCount : selectedCount} selected
|
||||
{showSelectAllOption && (
|
||||
<>
|
||||
{' · '}
|
||||
<button
|
||||
type='button'
|
||||
onClick={onSelectAll}
|
||||
className='text-[var(--brand-primary)] hover:underline'
|
||||
>
|
||||
Select all
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{isAllSelected && onClearSelectAll && (
|
||||
<>
|
||||
{' · '}
|
||||
<button
|
||||
type='button'
|
||||
onClick={onClearSelectAll}
|
||||
className='text-[var(--brand-primary)] hover:underline'
|
||||
>
|
||||
Clear
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{selectedCount} selected
|
||||
</span>
|
||||
|
||||
<div className='flex items-center gap-[5px]'>
|
||||
|
||||
@@ -123,11 +123,7 @@ export function RenameDocumentModal({
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
type='submit'
|
||||
disabled={isSubmitting || !name?.trim() || name.trim() === initialName}
|
||||
>
|
||||
<Button variant='tertiary' type='submit' disabled={isSubmitting || !name?.trim()}>
|
||||
{isSubmitting ? 'Renaming...' : 'Rename'}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { Badge, DocumentAttachment, Tooltip } from '@/components/emcn'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import { BaseTagsModal } from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
@@ -22,6 +21,55 @@ interface BaseCardProps {
|
||||
onDelete?: (id: string) => Promise<void>
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Skeleton placeholder for a knowledge base card
|
||||
*/
|
||||
|
||||
@@ -344,51 +344,53 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
<Textarea
|
||||
id='description'
|
||||
placeholder='Describe this knowledge base (optional)'
|
||||
rows={4}
|
||||
rows={3}
|
||||
{...register('description')}
|
||||
className={cn(errors.description && 'border-[var(--text-error)]')}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='grid grid-cols-2 gap-[12px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='minChunkSize'>Min Chunk Size (characters)</Label>
|
||||
<Input
|
||||
id='minChunkSize'
|
||||
placeholder='100'
|
||||
{...register('minChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.minChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='min-chunk-size'
|
||||
/>
|
||||
<div className='space-y-[12px] rounded-[6px] bg-[var(--surface-5)] px-[12px] py-[14px]'>
|
||||
<div className='grid grid-cols-2 gap-[12px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='minChunkSize'>Min Chunk Size (characters)</Label>
|
||||
<Input
|
||||
id='minChunkSize'
|
||||
placeholder='100'
|
||||
{...register('minChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.minChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='min-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='maxChunkSize'>Max Chunk Size (tokens)</Label>
|
||||
<Input
|
||||
id='maxChunkSize'
|
||||
placeholder='1024'
|
||||
{...register('maxChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.maxChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='max-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='maxChunkSize'>Max Chunk Size (tokens)</Label>
|
||||
<Label htmlFor='overlapSize'>Overlap (tokens)</Label>
|
||||
<Input
|
||||
id='maxChunkSize'
|
||||
placeholder='1024'
|
||||
{...register('maxChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.maxChunkSize && 'border-[var(--text-error)]')}
|
||||
id='overlapSize'
|
||||
placeholder='200'
|
||||
{...register('overlapSize', { valueAsNumber: true })}
|
||||
className={cn(errors.overlapSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='max-chunk-size'
|
||||
name='overlap-size'
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='overlapSize'>Overlap (tokens)</Label>
|
||||
<Input
|
||||
id='overlapSize'
|
||||
placeholder='200'
|
||||
{...register('overlapSize', { valueAsNumber: true })}
|
||||
className={cn(errors.overlapSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='overlap-size'
|
||||
/>
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>
|
||||
1 token ≈ 4 characters. Max chunk size and overlap are in tokens.
|
||||
</p>
|
||||
|
||||
@@ -59,7 +59,7 @@ export function EditKnowledgeBaseModal({
|
||||
handleSubmit,
|
||||
reset,
|
||||
watch,
|
||||
formState: { errors, isDirty },
|
||||
formState: { errors },
|
||||
} = useForm<FormValues>({
|
||||
resolver: zodResolver(FormSchema),
|
||||
defaultValues: {
|
||||
@@ -127,7 +127,7 @@ export function EditKnowledgeBaseModal({
|
||||
<Textarea
|
||||
id='description'
|
||||
placeholder='Describe this knowledge base (optional)'
|
||||
rows={4}
|
||||
rows={3}
|
||||
{...register('description')}
|
||||
className={cn(errors.description && 'border-[var(--text-error)]')}
|
||||
/>
|
||||
@@ -161,7 +161,7 @@ export function EditKnowledgeBaseModal({
|
||||
<Button
|
||||
variant='tertiary'
|
||||
type='submit'
|
||||
disabled={isSubmitting || !nameValue?.trim() || !isDirty}
|
||||
disabled={isSubmitting || !nameValue?.trim()}
|
||||
>
|
||||
{isSubmitting ? 'Saving...' : 'Save'}
|
||||
</Button>
|
||||
|
||||
@@ -18,7 +18,6 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { WorkflowIcon } from '@/components/icons'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||
import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config'
|
||||
import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config'
|
||||
import { getBlock, getBlockByToolName } from '@/blocks'
|
||||
@@ -143,6 +142,14 @@ function normalizeAndSortSpans(spans: TraceSpan[]): TraceSpan[] {
|
||||
|
||||
const DEFAULT_BLOCK_COLOR = '#6b7280'
|
||||
|
||||
/**
|
||||
* Formats duration in ms
|
||||
*/
|
||||
function formatDuration(ms: number): string {
|
||||
if (ms < 1000) return `${ms}ms`
|
||||
return `${(ms / 1000).toFixed(2)}s`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets icon and color for a span type using block config
|
||||
*/
|
||||
@@ -307,7 +314,7 @@ function ExpandableRowHeader({
|
||||
</span>
|
||||
</div>
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
{formatDuration(duration, { precision: 2 })}
|
||||
{formatDuration(duration)}
|
||||
</span>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -151,29 +151,6 @@ export const ActionBar = memo(
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{isSubflowBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (!disabled) {
|
||||
collaborativeBatchToggleBlockEnabled([blockId])
|
||||
}
|
||||
}}
|
||||
className={ACTION_BUTTON_STYLES}
|
||||
disabled={disabled}
|
||||
>
|
||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{!isStartBlock && !isResponseBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
@@ -245,6 +222,29 @@ export const ActionBar = memo(
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{isSubflowBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (!disabled) {
|
||||
collaborativeBatchToggleBlockEnabled([blockId])
|
||||
}
|
||||
}}
|
||||
className={ACTION_BUTTON_STYLES}
|
||||
disabled={disabled}
|
||||
>
|
||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
|
||||
@@ -1312,16 +1312,15 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (currentLoop && isLoopBlock) {
|
||||
containingLoopBlockId = blockId
|
||||
const loopType = currentLoop.loopType || 'for'
|
||||
const contextualTags: string[] = ['index']
|
||||
if (loopType === 'forEach') {
|
||||
contextualTags.push('currentItem')
|
||||
contextualTags.push('items')
|
||||
}
|
||||
|
||||
const loopBlock = blocks[blockId]
|
||||
if (loopBlock) {
|
||||
const loopBlockName = loopBlock.name || loopBlock.type
|
||||
const normalizedLoopName = normalizeName(loopBlockName)
|
||||
const contextualTags: string[] = [`${normalizedLoopName}.index`]
|
||||
if (loopType === 'forEach') {
|
||||
contextualTags.push(`${normalizedLoopName}.currentItem`)
|
||||
contextualTags.push(`${normalizedLoopName}.items`)
|
||||
}
|
||||
|
||||
loopBlockGroup = {
|
||||
blockName: loopBlockName,
|
||||
@@ -1329,23 +1328,21 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
blockType: 'loop',
|
||||
tags: contextualTags,
|
||||
distance: 0,
|
||||
isContextual: true,
|
||||
}
|
||||
}
|
||||
} else if (containingLoop) {
|
||||
const [loopId, loop] = containingLoop
|
||||
containingLoopBlockId = loopId
|
||||
const loopType = loop.loopType || 'for'
|
||||
const contextualTags: string[] = ['index']
|
||||
if (loopType === 'forEach') {
|
||||
contextualTags.push('currentItem')
|
||||
contextualTags.push('items')
|
||||
}
|
||||
|
||||
const containingLoopBlock = blocks[loopId]
|
||||
if (containingLoopBlock) {
|
||||
const loopBlockName = containingLoopBlock.name || containingLoopBlock.type
|
||||
const normalizedLoopName = normalizeName(loopBlockName)
|
||||
const contextualTags: string[] = [`${normalizedLoopName}.index`]
|
||||
if (loopType === 'forEach') {
|
||||
contextualTags.push(`${normalizedLoopName}.currentItem`)
|
||||
contextualTags.push(`${normalizedLoopName}.items`)
|
||||
}
|
||||
|
||||
loopBlockGroup = {
|
||||
blockName: loopBlockName,
|
||||
@@ -1353,7 +1350,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
blockType: 'loop',
|
||||
tags: contextualTags,
|
||||
distance: 0,
|
||||
isContextual: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1367,16 +1363,15 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const [parallelId, parallel] = containingParallel
|
||||
containingParallelBlockId = parallelId
|
||||
const parallelType = parallel.parallelType || 'count'
|
||||
const contextualTags: string[] = ['index']
|
||||
if (parallelType === 'collection') {
|
||||
contextualTags.push('currentItem')
|
||||
contextualTags.push('items')
|
||||
}
|
||||
|
||||
const containingParallelBlock = blocks[parallelId]
|
||||
if (containingParallelBlock) {
|
||||
const parallelBlockName = containingParallelBlock.name || containingParallelBlock.type
|
||||
const normalizedParallelName = normalizeName(parallelBlockName)
|
||||
const contextualTags: string[] = [`${normalizedParallelName}.index`]
|
||||
if (parallelType === 'collection') {
|
||||
contextualTags.push(`${normalizedParallelName}.currentItem`)
|
||||
contextualTags.push(`${normalizedParallelName}.items`)
|
||||
}
|
||||
|
||||
parallelBlockGroup = {
|
||||
blockName: parallelBlockName,
|
||||
@@ -1384,7 +1379,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
blockType: 'parallel',
|
||||
tags: contextualTags,
|
||||
distance: 0,
|
||||
isContextual: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1651,29 +1645,38 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const nestedBlockTagGroups: NestedBlockTagGroup[] = useMemo(() => {
|
||||
return filteredBlockTagGroups.map((group: BlockTagGroup) => {
|
||||
const normalizedBlockName = normalizeName(group.blockName)
|
||||
|
||||
// Handle loop/parallel contextual tags (index, currentItem, items)
|
||||
const directTags: NestedTag[] = []
|
||||
const tagsForTree: string[] = []
|
||||
|
||||
group.tags.forEach((tag: string) => {
|
||||
const tagParts = tag.split('.')
|
||||
|
||||
if (tagParts.length === 1) {
|
||||
// Loop/parallel contextual tags without block prefix
|
||||
if (
|
||||
(group.blockType === 'loop' || group.blockType === 'parallel') &&
|
||||
tagParts.length === 1
|
||||
) {
|
||||
directTags.push({
|
||||
key: tag,
|
||||
display: tag,
|
||||
fullTag: tag,
|
||||
})
|
||||
} else if (tagParts.length === 2) {
|
||||
// Direct property like blockname.property
|
||||
directTags.push({
|
||||
key: tagParts[1],
|
||||
display: tagParts[1],
|
||||
fullTag: tag,
|
||||
})
|
||||
} else {
|
||||
// Nested property - add to tree builder
|
||||
tagsForTree.push(tag)
|
||||
}
|
||||
})
|
||||
|
||||
// Build recursive tree from nested tags
|
||||
const nestedTags = [...directTags, ...buildNestedTagTree(tagsForTree, normalizedBlockName)]
|
||||
|
||||
return {
|
||||
@@ -1797,19 +1800,13 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
processedTag = tag
|
||||
}
|
||||
} else if (
|
||||
blockGroup?.isContextual &&
|
||||
blockGroup &&
|
||||
(blockGroup.blockType === 'loop' || blockGroup.blockType === 'parallel')
|
||||
) {
|
||||
const tagParts = tag.split('.')
|
||||
if (tagParts.length === 1) {
|
||||
processedTag = blockGroup.blockType
|
||||
if (!tag.includes('.') && ['index', 'currentItem', 'items'].includes(tag)) {
|
||||
processedTag = `${blockGroup.blockType}.${tag}`
|
||||
} else {
|
||||
const lastPart = tagParts[tagParts.length - 1]
|
||||
if (['index', 'currentItem', 'items'].includes(lastPart)) {
|
||||
processedTag = `${blockGroup.blockType}.${lastPart}`
|
||||
} else {
|
||||
processedTag = tag
|
||||
}
|
||||
processedTag = tag
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,8 +7,6 @@ export interface BlockTagGroup {
|
||||
blockType: string
|
||||
tags: string[]
|
||||
distance: number
|
||||
/** True if this is a contextual group (loop/parallel iteration context available inside the subflow) */
|
||||
isContextual?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
import { ReactFlowProvider } from 'reactflow'
|
||||
import { Badge, Button, ChevronDown, Code, Combobox, Input, Label } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||
import { extractReferencePrefixes } from '@/lib/workflows/sanitization/references'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
@@ -705,6 +704,14 @@ interface PreviewEditorProps {
|
||||
onClose?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Format duration for display
|
||||
*/
|
||||
function formatDuration(ms: number): string {
|
||||
if (ms < 1000) return `${ms}ms`
|
||||
return `${(ms / 1000).toFixed(2)}s`
|
||||
}
|
||||
|
||||
/** Minimum height for the connections section (header only) */
|
||||
const MIN_CONNECTIONS_HEIGHT = 30
|
||||
/** Maximum height for the connections section */
|
||||
@@ -1173,7 +1180,7 @@ function PreviewEditorContent({
|
||||
)}
|
||||
{executionData.durationMs !== undefined && (
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
{formatDuration(executionData.durationMs, { precision: 2 })}
|
||||
{formatDuration(executionData.durationMs)}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -16,7 +16,6 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { Input, Skeleton } from '@/components/ui'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { formatDate } from '@/lib/core/utils/formatting'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
type ApiKey,
|
||||
@@ -134,9 +133,13 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
}
|
||||
}, [shouldScrollToBottom])
|
||||
|
||||
const formatLastUsed = (dateString?: string) => {
|
||||
const formatDate = (dateString?: string) => {
|
||||
if (!dateString) return 'Never'
|
||||
return formatDate(new Date(dateString))
|
||||
return new Date(dateString).toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -213,7 +216,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
{key.name}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
@@ -248,7 +251,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
{key.name}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
@@ -288,7 +291,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
{key.name}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
ModalHeader,
|
||||
} from '@/components/emcn'
|
||||
import { Input, Skeleton } from '@/components/ui'
|
||||
import { formatDate } from '@/lib/core/utils/formatting'
|
||||
import {
|
||||
type CopilotKey,
|
||||
useCopilotKeys,
|
||||
@@ -116,9 +115,13 @@ export function Copilot() {
|
||||
}
|
||||
}
|
||||
|
||||
const formatLastUsed = (dateString?: string | null) => {
|
||||
const formatDate = (dateString?: string | null) => {
|
||||
if (!dateString) return 'Never'
|
||||
return formatDate(new Date(dateString))
|
||||
return new Date(dateString).toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})
|
||||
}
|
||||
|
||||
const hasKeys = keys.length > 0
|
||||
@@ -177,7 +180,7 @@ export function Copilot() {
|
||||
{key.name || 'Unnamed Key'}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
|
||||
@@ -242,9 +242,15 @@ Return ONLY the email body - no explanations, no extra text.`,
|
||||
id: 'messageId',
|
||||
title: 'Message ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Read specific email by ID (overrides label/folder)',
|
||||
condition: { field: 'operation', value: 'read_gmail' },
|
||||
mode: 'advanced',
|
||||
placeholder: 'Enter message ID to read (optional)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'read_gmail',
|
||||
and: {
|
||||
field: 'folder',
|
||||
value: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
// Search Fields
|
||||
{
|
||||
|
||||
@@ -129,9 +129,12 @@ ROUTING RULES:
|
||||
3. If the context is even partially related to a route's description, select that route
|
||||
4. ONLY output NO_MATCH if the context is completely unrelated to ALL route descriptions
|
||||
|
||||
Respond with a JSON object containing:
|
||||
- route: EXACTLY one route ID (copied exactly as shown above) OR "NO_MATCH"
|
||||
- reasoning: A brief explanation (1-2 sentences) of why you chose this route`
|
||||
OUTPUT FORMAT:
|
||||
- Output EXACTLY one route ID (copied exactly as shown above) OR "NO_MATCH"
|
||||
- No explanation, no punctuation, no additional text
|
||||
- Just the route ID or NO_MATCH
|
||||
|
||||
Your response:`
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -269,7 +272,6 @@ interface RouterV2Response extends ToolResponse {
|
||||
total: number
|
||||
}
|
||||
selectedRoute: string
|
||||
reasoning: string
|
||||
selectedPath: {
|
||||
blockId: string
|
||||
blockType: string
|
||||
@@ -353,7 +355,6 @@ export const RouterV2Block: BlockConfig<RouterV2Response> = {
|
||||
tokens: { type: 'json', description: 'Token usage' },
|
||||
cost: { type: 'json', description: 'Cost information' },
|
||||
selectedRoute: { type: 'string', description: 'Selected route ID' },
|
||||
reasoning: { type: 'string', description: 'Explanation of why this route was chosen' },
|
||||
selectedPath: { type: 'json', description: 'Selected routing path' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -23,13 +23,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
* ```
|
||||
*/
|
||||
const checkboxVariants = cva(
|
||||
[
|
||||
'peer shrink-0 cursor-pointer rounded-[4px] border transition-colors',
|
||||
'border-[var(--border-1)] bg-transparent',
|
||||
'focus-visible:outline-none',
|
||||
'data-[disabled]:cursor-not-allowed data-[disabled]:opacity-50',
|
||||
'data-[state=checked]:border-[var(--text-primary)] data-[state=checked]:bg-[var(--text-primary)]',
|
||||
].join(' '),
|
||||
'peer shrink-0 rounded-sm border border-[var(--border-1)] bg-[var(--surface-4)] ring-offset-background transition-colors hover:border-[var(--border-muted)] hover:bg-[var(--surface-7)] focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 data-[disabled]:cursor-not-allowed data-[disabled]:opacity-50 data-[state=checked]:border-[var(--text-muted)] data-[state=checked]:bg-[var(--text-muted)] data-[state=checked]:text-white dark:bg-[var(--surface-5)] dark:data-[state=checked]:border-[var(--surface-7)] dark:data-[state=checked]:bg-[var(--surface-7)] dark:data-[state=checked]:text-[var(--text-primary)] dark:hover:border-[var(--surface-7)] dark:hover:bg-[var(--border-1)]',
|
||||
{
|
||||
variants: {
|
||||
size: {
|
||||
@@ -89,7 +83,7 @@ const Checkbox = React.forwardRef<React.ElementRef<typeof CheckboxPrimitive.Root
|
||||
className={cn(checkboxVariants({ size }), className)}
|
||||
{...props}
|
||||
>
|
||||
<CheckboxPrimitive.Indicator className='flex items-center justify-center text-[var(--white)]'>
|
||||
<CheckboxPrimitive.Indicator className={cn('flex items-center justify-center text-current')}>
|
||||
<Check className={cn(checkboxIconVariants({ size }))} />
|
||||
</CheckboxPrimitive.Indicator>
|
||||
</CheckboxPrimitive.Root>
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
export { DiffControlsDemo } from './components/diff-controls-demo'
|
||||
@@ -1,111 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
|
||||
export function DiffControlsDemo() {
|
||||
const [rejectHover, setRejectHover] = useState(false)
|
||||
const [acceptHover, setAcceptHover] = useState(false)
|
||||
|
||||
return (
|
||||
<div style={{ display: 'flex', justifyContent: 'center', margin: '24px 0' }}>
|
||||
<div
|
||||
style={{
|
||||
position: 'relative',
|
||||
display: 'flex',
|
||||
height: '30px',
|
||||
overflow: 'hidden',
|
||||
borderRadius: '4px',
|
||||
isolation: 'isolate',
|
||||
}}
|
||||
>
|
||||
{/* Reject button */}
|
||||
<button
|
||||
onClick={() => {}}
|
||||
onMouseEnter={() => setRejectHover(true)}
|
||||
onMouseLeave={() => setRejectHover(false)}
|
||||
title='Reject changes'
|
||||
style={{
|
||||
position: 'relative',
|
||||
display: 'flex',
|
||||
height: '100%',
|
||||
alignItems: 'center',
|
||||
border: '1px solid #e0e0e0',
|
||||
backgroundColor: rejectHover ? '#f0f0f0' : '#f5f5f5',
|
||||
paddingRight: '20px',
|
||||
paddingLeft: '12px',
|
||||
fontWeight: 500,
|
||||
fontSize: '13px',
|
||||
color: rejectHover ? '#2d2d2d' : '#404040',
|
||||
clipPath: 'polygon(0 0, calc(100% + 10px) 0, 100% 100%, 0 100%)',
|
||||
borderRadius: '4px 0 0 4px',
|
||||
cursor: 'default',
|
||||
transition: 'color 150ms, background-color 150ms, border-color 150ms',
|
||||
}}
|
||||
>
|
||||
Reject
|
||||
</button>
|
||||
{/* Slanted divider - split gray/green */}
|
||||
<div
|
||||
style={{
|
||||
pointerEvents: 'none',
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
bottom: 0,
|
||||
left: '66px',
|
||||
width: '2px',
|
||||
transform: 'skewX(-18.4deg)',
|
||||
background: 'linear-gradient(to right, #e0e0e0 50%, #238458 50%)',
|
||||
zIndex: 10,
|
||||
}}
|
||||
/>
|
||||
{/* Accept button */}
|
||||
<button
|
||||
onClick={() => {}}
|
||||
onMouseEnter={() => setAcceptHover(true)}
|
||||
onMouseLeave={() => setAcceptHover(false)}
|
||||
title='Accept changes (⇧⌘⏎)'
|
||||
style={{
|
||||
position: 'relative',
|
||||
display: 'flex',
|
||||
height: '100%',
|
||||
alignItems: 'center',
|
||||
border: '1px solid rgba(0, 0, 0, 0.15)',
|
||||
backgroundColor: '#32bd7e',
|
||||
paddingRight: '12px',
|
||||
paddingLeft: '20px',
|
||||
fontWeight: 500,
|
||||
fontSize: '13px',
|
||||
color: '#ffffff',
|
||||
clipPath: 'polygon(10px 0, 100% 0, 100% 100%, 0 100%)',
|
||||
borderRadius: '0 4px 4px 0',
|
||||
marginLeft: '-10px',
|
||||
cursor: 'default',
|
||||
filter: acceptHover ? 'brightness(1.1)' : undefined,
|
||||
transition: 'background-color 150ms, border-color 150ms',
|
||||
}}
|
||||
>
|
||||
Accept
|
||||
<kbd
|
||||
style={{
|
||||
marginLeft: '8px',
|
||||
borderRadius: '4px',
|
||||
border: '1px solid rgba(255, 255, 255, 0.2)',
|
||||
backgroundColor: 'rgba(255, 255, 255, 0.1)',
|
||||
paddingLeft: '6px',
|
||||
paddingRight: '6px',
|
||||
paddingTop: '2px',
|
||||
paddingBottom: '2px',
|
||||
fontWeight: 500,
|
||||
fontFamily:
|
||||
'ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"',
|
||||
fontSize: '10px',
|
||||
color: '#ffffff',
|
||||
}}
|
||||
>
|
||||
⇧⌘<span style={{ display: 'inline-block', transform: 'translateY(-1px)' }}>⏎</span>
|
||||
</kbd>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,201 +0,0 @@
|
||||
---
|
||||
slug: v0-5
|
||||
title: 'Introducing Sim v0.5'
|
||||
description: 'This new release brings a state of the art Copilot, seamless MCP server and tool deployment, 100+ integrations with 300+ tools, comprehensive execution logs, and realtime collaboration—built for teams shipping AI agents in production.'
|
||||
date: 2026-01-22
|
||||
updated: 2026-01-22
|
||||
authors:
|
||||
- waleed
|
||||
readingTime: 8
|
||||
tags: [Release, Copilot, MCP, Observability, Collaboration, Integrations, Sim]
|
||||
ogImage: /studio/v0-5/cover.png
|
||||
ogAlt: 'Sim v0.5 release announcement'
|
||||
about: ['AI Agents', 'Workflow Automation', 'Developer Tools']
|
||||
timeRequired: PT8M
|
||||
canonical: https://sim.ai/studio/v0-5
|
||||
featured: true
|
||||
draft: false
|
||||
---
|
||||
|
||||
**Sim v0.5** is the next evolution of our agent workflow platform—built for teams shipping AI agents to production.
|
||||
|
||||
## Copilot
|
||||
|
||||

|
||||
|
||||
Copilot is a context-aware assistant embedded in the Sim editor. Unlike general-purpose AI assistants, Copilot has direct access to your workspace: workflows, block configurations, execution logs, connected credentials, and documentation. It can also search the web to pull in external context when needed.
|
||||
|
||||
Your workspace is indexed for hybrid retrieval. When you ask a question, Copilot queries this index to ground its responses in your actual workflow state. Ask "why did my workflow fail at 3am?" and it retrieves the relevant execution trace, identifies the error, and explains what happened.
|
||||
|
||||
Copilot supports slash commands that trigger specialized capabilities:
|
||||
|
||||
- `/deep-research` — performs multi-step web research on a topic, synthesizing results from multiple sources
|
||||
- `/api-docs` — fetches and parses API documentation from a URL, extracting endpoints, parameters, and authentication requirements
|
||||
- `/test` — runs your current workflow with sample inputs and reports results inline
|
||||
- `/build` — generates a complete workflow from a natural language description, wiring up blocks and configuring integrations
|
||||
|
||||
Use `@` commands to pull specific context into your conversation. `@block` references a specific block's configuration and recent outputs. `@workflow` includes the full workflow structure. `@logs` pulls in recent execution traces. This lets you ask targeted questions like "why is `@Slack1` returning an error?" and Copilot has the exact context it needs to diagnose the issue.
|
||||
|
||||
For complex tasks, Copilot uses subagents—breaking requests into discrete operations and executing them sequentially. Ask it to "add error handling to this workflow" and it will analyze your blocks, determine where failures could occur, add appropriate condition blocks, and wire up notification paths. Each change surfaces as a diff for your review before applying.
|
||||
|
||||
<DiffControlsDemo />
|
||||
|
||||
## MCP Deployment
|
||||
|
||||

|
||||
|
||||
Deploy any workflow as an [MCP](https://modelcontextprotocol.io) server. Once deployed, the workflow becomes a callable tool for any MCP-compatible agent—[Claude Desktop](https://claude.ai/download), [Cursor](https://cursor.com), or your own applications.
|
||||
|
||||
Sim generates a tool definition from your workflow: the name and description you specify, plus a JSON schema derived from your Start block's input format. The MCP server uses Streamable HTTP transport, so agents connect via a single URL. Authentication is handled via API key headers or public access, depending on your configuration.
|
||||
|
||||
Consider a lead enrichment workflow: it queries Apollo for contact data, checks Salesforce for existing records, formats the output, and posts a summary to Slack. That's 8 blocks in Sim. Deploy it as MCP, and any agent can call `enrich_lead("jane@acme.com")` and receive structured data back. The agent treats it as a single tool call—it doesn't need to know about Apollo, Salesforce, or Slack.
|
||||
|
||||
This pattern scales to research pipelines, data processing workflows, approval chains, and internal tooling. Anything you build in Sim becomes a tool any agent can invoke.
|
||||
|
||||
## Logs & Dashboard
|
||||
|
||||

|
||||
|
||||
Every workflow execution generates a full trace. Each block records its start time, end time, inputs, outputs, and any errors. For LLM blocks, we capture prompt tokens, completion tokens, and cost by model.
|
||||
|
||||
The dashboard aggregates this data into queryable views:
|
||||
|
||||
- **Trace spans**: Hierarchical view of block executions with timing waterfall
|
||||
- **Cost attribution**: Token usage and spend broken down by model per execution
|
||||
- **Error context**: Full stack traces with the block, input values, and failure reason
|
||||
- **Filtering**: Query by time range, trigger type, workflow, or status
|
||||
- **Execution snapshots**: Each run captures the workflow state at execution time—restore to see exactly what was running
|
||||
|
||||
This level of observability is necessary when workflows handle production traffic—sending customer emails, processing payments, or making API calls on behalf of users.
|
||||
|
||||
## Realtime Collaboration
|
||||
|
||||

|
||||
|
||||
Multiple users can edit the same workflow simultaneously. Changes propagate in real time—you see teammates' cursors, block additions, and configuration updates as they happen.
|
||||
|
||||
The editor now supports full undo/redo history (Cmd+Z / Cmd+Shift+Z), so you can step back through changes without losing work. Copy and paste works for individual blocks, groups of blocks, or entire subflows—select what you need, Cmd+C, and paste into the same workflow or a different one. This makes it easy to duplicate patterns, share components across workflows, or quickly prototype variations.
|
||||
|
||||
This is particularly useful during development sessions where engineers, product managers, and domain experts need to iterate together. Everyone works on the same workflow state, and changes sync immediately across all connected clients.
|
||||
|
||||
## Versioning
|
||||
|
||||

|
||||
|
||||
Every deployment creates a new version. The version history shows who deployed what and when, with a preview of the workflow state at that point in time. Roll back to any previous version with one click—the live deployment updates immediately.
|
||||
|
||||
This matters when something breaks in production. You can instantly revert to the last known good version while you debug, rather than scrambling to fix forward. It also provides a clear audit trail: you can see exactly what changed between versions and who made the change.
|
||||
|
||||
---
|
||||
|
||||
## 100+ Integrations
|
||||
|
||||

|
||||
|
||||
v0.5 adds **100+ integrations** with **300+ actions**. These cover the specific operations you need—not just generic CRUD, but actions like "send Slack message to channel," "create Jira ticket with custom fields," "query Postgres with parameterized SQL," or "enrich contact via Apollo."
|
||||
|
||||
- **CRMs & Sales**: Salesforce, HubSpot, Pipedrive, Apollo, Wealthbox
|
||||
- **Communication**: Slack, Discord, Microsoft Teams, Telegram, WhatsApp, Twilio
|
||||
- **Productivity**: Notion, Confluence, Google Workspace, Microsoft 365, Airtable, Asana, Trello
|
||||
- **Developer Tools**: GitHub, GitLab, Jira, Linear, Sentry, Datadog, Grafana
|
||||
- **Databases**: PostgreSQL, MySQL, MongoDB, [Supabase](https://supabase.com), DynamoDB, Elasticsearch, [Pinecone](https://pinecone.io), [Qdrant](https://qdrant.tech), Neo4j
|
||||
- **Finance**: Stripe, Kalshi, Polymarket
|
||||
- **Web & Search**: [Firecrawl](https://firecrawl.dev), [Exa](https://exa.ai), [Tavily](https://tavily.com), [Jina](https://jina.ai), [Serper](https://serper.dev)
|
||||
- **Cloud**: AWS (S3, RDS, SQS, Textract, Bedrock), [Browser Use](https://browser-use.com), [Stagehand](https://github.com/browserbase/stagehand)
|
||||
|
||||
Each integration handles OAuth or API key authentication. Connect once, and the credentials are available across all workflows in your workspace.
|
||||
|
||||
---
|
||||
|
||||
## Triggers
|
||||
|
||||
Workflows can be triggered through multiple mechanisms:
|
||||
|
||||
**Webhooks**: Sim provisions a unique HTTPS endpoint for each workflow. Incoming POST requests are parsed and passed to the first block as input. Supports standard webhook patterns including signature verification for services that provide it.
|
||||
|
||||
**Schedules**: Cron-based scheduling with timezone support. Use the visual scheduler or write expressions directly. Execution locks prevent overlapping runs.
|
||||
|
||||
**Chat**: Deploy workflows as conversational interfaces. Messages stream to your workflow, responses stream back to the user. Supports multi-turn context.
|
||||
|
||||
**API**: REST endpoint with your workflow's input schema. Call it from any system that can make HTTP requests.
|
||||
|
||||
**Integration triggers**: Event-driven triggers for specific services—GitHub (PR opened, issue created, push), Stripe (payment succeeded, subscription updated), TypeForm (form submitted), RSS (new item), and more.
|
||||
|
||||
**Forms**: Coming soon—build custom input forms that trigger workflows directly.
|
||||
|
||||
---
|
||||
|
||||
## Knowledge Base
|
||||
|
||||

|
||||
|
||||
Upload documents—PDFs, text files, markdown, HTML—and make them queryable by your agents. This is [RAG](https://en.wikipedia.org/wiki/Retrieval-augmented_generation) (Retrieval Augmented Generation) built directly into Sim.
|
||||
|
||||
Documents are chunked, embedded, and indexed using hybrid search ([BM25](https://en.wikipedia.org/wiki/Okapi_BM25) + vector embeddings). Agent blocks can query the knowledge base as a tool, retrieving relevant passages based on semantic similarity and keyword matching. When documents are updated, they re-index automatically.
|
||||
|
||||
Use cases:
|
||||
|
||||
- **Customer support agents** that reference your help docs and troubleshooting guides to resolve tickets
|
||||
- **Sales assistants** that pull from product specs, pricing sheets, and competitive intel
|
||||
- **Internal Q&A bots** that answer questions about company policies, HR docs, or engineering runbooks
|
||||
- **Research workflows** that synthesize information from uploaded papers, reports, or data exports
|
||||
|
||||
---
|
||||
|
||||
## New Blocks
|
||||
|
||||
### Human in the Loop
|
||||
|
||||
Pause workflow execution pending human approval. The block sends a notification (email, Slack, or webhook) with approve/reject actions. Execution resumes only on approval—useful for high-stakes operations like customer-facing emails, financial transactions, or content publishing.
|
||||
|
||||
### Agent Block
|
||||
|
||||
The Agent block now supports three additional tool types:
|
||||
|
||||
- **Workflows as tools**: Agents can invoke other Sim workflows, enabling hierarchical architectures where a coordinator agent delegates to specialized sub-workflows
|
||||
- **Knowledge base queries**: Agents search your indexed documents directly, retrieving relevant context for their responses
|
||||
- **Custom functions**: Execute JavaScript or Python code in isolated sandboxes with configurable timeout and memory limits
|
||||
|
||||
### Subflows
|
||||
|
||||
Group blocks into collapsible subflows. Use them for loops (iterate over arrays), parallel execution (run branches concurrently), or logical organization. Subflows can be nested and keep complex workflows manageable.
|
||||
|
||||
### Router
|
||||
|
||||
Conditional branching based on data or LLM classification. Define rules or let the router use an LLM to determine intent and select the appropriate path.
|
||||
|
||||
The router now exposes its reasoning in execution logs—when debugging unexpected routing, you can see exactly why a particular branch was selected.
|
||||
|
||||
---
|
||||
|
||||
## Model Providers
|
||||
|
||||
Sim supports 14 providers: [OpenAI](https://openai.com), [Anthropic](https://anthropic.com), [Google](https://ai.google.dev), [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-services/openai-service), [xAI](https://x.ai), [Mistral](https://mistral.ai), [Deepseek](https://deepseek.com), [Groq](https://groq.com), [Cerebras](https://cerebras.ai), [Ollama](https://ollama.com), and [OpenRouter](https://openrouter.ai).
|
||||
|
||||
New in v0.5:
|
||||
|
||||
- **[AWS Bedrock](https://aws.amazon.com/bedrock)**: Claude, Nova, Llama, Mistral, and Cohere models via your AWS account
|
||||
- **[Google Vertex AI](https://cloud.google.com/vertex-ai)**: Gemini models through Google Cloud
|
||||
- **[vLLM](https://github.com/vllm-project/vllm)**: Self-hosted models on your own infrastructure
|
||||
|
||||
Model selection is per-block, so you can use faster/cheaper models for simple tasks and more capable models where needed.
|
||||
|
||||
---
|
||||
|
||||
## Developer Experience
|
||||
|
||||
**Custom Tools**: Define your own integrations with custom HTTP endpoints, authentication (API key, OAuth, Bearer token), and request/response schemas. Custom tools appear in the block palette alongside built-in integrations.
|
||||
|
||||
**Environment Variables**: Encrypted key-value storage for secrets and configuration. Variables are decrypted at runtime and can be referenced in any block configuration.
|
||||
|
||||
**Import/Export**: Export workflows or entire workspaces as JSON. Imports preserve all blocks, connections, configurations, and variable references.
|
||||
|
||||
**File Manager**: Upload files to your workspace for use in workflows—templates, seed data, static assets. Files are accessible via internal references or presigned URLs.
|
||||
|
||||
---
|
||||
|
||||
## Get Started
|
||||
|
||||
Available now at [sim.ai](https://sim.ai). Check out the [docs](https://docs.sim.ai) to dive deeper.
|
||||
|
||||
*Questions? [help@sim.ai](mailto:help@sim.ai) · [Discord](https://sim.ai/discord)*
|
||||
@@ -120,12 +120,6 @@ export const SPECIAL_REFERENCE_PREFIXES = [
|
||||
REFERENCE.PREFIX.VARIABLE,
|
||||
] as const
|
||||
|
||||
export const RESERVED_BLOCK_NAMES = [
|
||||
REFERENCE.PREFIX.LOOP,
|
||||
REFERENCE.PREFIX.PARALLEL,
|
||||
REFERENCE.PREFIX.VARIABLE,
|
||||
] as const
|
||||
|
||||
export const LOOP_REFERENCE = {
|
||||
ITERATION: 'iteration',
|
||||
INDEX: 'index',
|
||||
|
||||
@@ -24,71 +24,6 @@ function createBlock(id: string, metadataId: string): SerializedBlock {
|
||||
}
|
||||
}
|
||||
|
||||
describe('DAGBuilder disabled subflow validation', () => {
|
||||
it('skips validation for disabled loops with no blocks inside', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
version: '1',
|
||||
blocks: [
|
||||
createBlock('start', BlockType.STARTER),
|
||||
{ ...createBlock('loop-block', BlockType.FUNCTION), enabled: false },
|
||||
],
|
||||
connections: [],
|
||||
loops: {
|
||||
'loop-1': {
|
||||
id: 'loop-1',
|
||||
nodes: [], // Empty loop - would normally throw
|
||||
iterations: 3,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const builder = new DAGBuilder()
|
||||
// Should not throw even though loop has no blocks inside
|
||||
expect(() => builder.build(workflow)).not.toThrow()
|
||||
})
|
||||
|
||||
it('skips validation for disabled parallels with no blocks inside', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
version: '1',
|
||||
blocks: [createBlock('start', BlockType.STARTER)],
|
||||
connections: [],
|
||||
loops: {},
|
||||
parallels: {
|
||||
'parallel-1': {
|
||||
id: 'parallel-1',
|
||||
nodes: [], // Empty parallel - would normally throw
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const builder = new DAGBuilder()
|
||||
// Should not throw even though parallel has no blocks inside
|
||||
expect(() => builder.build(workflow)).not.toThrow()
|
||||
})
|
||||
|
||||
it('skips validation for loops where all inner blocks are disabled', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
version: '1',
|
||||
blocks: [
|
||||
createBlock('start', BlockType.STARTER),
|
||||
{ ...createBlock('inner-block', BlockType.FUNCTION), enabled: false },
|
||||
],
|
||||
connections: [],
|
||||
loops: {
|
||||
'loop-1': {
|
||||
id: 'loop-1',
|
||||
nodes: ['inner-block'], // Has node but it's disabled
|
||||
iterations: 3,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const builder = new DAGBuilder()
|
||||
// Should not throw - loop is effectively disabled since all inner blocks are disabled
|
||||
expect(() => builder.build(workflow)).not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('DAGBuilder human-in-the-loop transformation', () => {
|
||||
it('creates trigger nodes and rewires edges for pause blocks', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
|
||||
@@ -136,18 +136,17 @@ export class DAGBuilder {
|
||||
nodes: string[] | undefined,
|
||||
type: 'Loop' | 'Parallel'
|
||||
): void {
|
||||
const sentinelStartId =
|
||||
type === 'Loop' ? buildSentinelStartId(id) : buildParallelSentinelStartId(id)
|
||||
const sentinelStartNode = dag.nodes.get(sentinelStartId)
|
||||
|
||||
if (!sentinelStartNode) return
|
||||
|
||||
if (!nodes || nodes.length === 0) {
|
||||
throw new Error(
|
||||
`${type} has no blocks inside. Add at least one block to the ${type.toLowerCase()}.`
|
||||
)
|
||||
}
|
||||
|
||||
const sentinelStartId =
|
||||
type === 'Loop' ? buildSentinelStartId(id) : buildParallelSentinelStartId(id)
|
||||
const sentinelStartNode = dag.nodes.get(sentinelStartId)
|
||||
if (!sentinelStartNode) return
|
||||
|
||||
const hasConnections = Array.from(sentinelStartNode.outgoingEdges.values()).some((edge) =>
|
||||
nodes.includes(extractBaseBlockId(edge.target))
|
||||
)
|
||||
|
||||
@@ -20,13 +20,21 @@ export class EdgeManager {
|
||||
const activatedTargets: string[] = []
|
||||
const edgesToDeactivate: Array<{ target: string; handle?: string }> = []
|
||||
|
||||
for (const [, edge] of node.outgoingEdges) {
|
||||
// First pass: categorize edges as activating or deactivating
|
||||
// Don't modify incomingEdges yet - we need the original state for deactivation checks
|
||||
for (const [edgeId, edge] of node.outgoingEdges) {
|
||||
if (skipBackwardsEdge && this.isBackwardsEdge(edge.sourceHandle)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (!this.shouldActivateEdge(edge, output)) {
|
||||
if (!this.isLoopEdge(edge.sourceHandle)) {
|
||||
const shouldActivate = this.shouldActivateEdge(edge, output)
|
||||
if (!shouldActivate) {
|
||||
const isLoopEdge =
|
||||
edge.sourceHandle === EDGE.LOOP_CONTINUE ||
|
||||
edge.sourceHandle === EDGE.LOOP_CONTINUE_ALT ||
|
||||
edge.sourceHandle === EDGE.LOOP_EXIT
|
||||
|
||||
if (!isLoopEdge) {
|
||||
edgesToDeactivate.push({ target: edge.target, handle: edge.sourceHandle })
|
||||
}
|
||||
continue
|
||||
@@ -35,19 +43,13 @@ export class EdgeManager {
|
||||
activatedTargets.push(edge.target)
|
||||
}
|
||||
|
||||
const cascadeTargets = new Set<string>()
|
||||
// Second pass: process deactivations while incomingEdges is still intact
|
||||
// This ensures hasActiveIncomingEdges can find all potential sources
|
||||
for (const { target, handle } of edgesToDeactivate) {
|
||||
this.deactivateEdgeAndDescendants(node.id, target, handle, cascadeTargets)
|
||||
}
|
||||
|
||||
if (activatedTargets.length === 0) {
|
||||
for (const { target } of edgesToDeactivate) {
|
||||
if (this.isTerminalControlNode(target)) {
|
||||
cascadeTargets.add(target)
|
||||
}
|
||||
}
|
||||
this.deactivateEdgeAndDescendants(node.id, target, handle)
|
||||
}
|
||||
|
||||
// Third pass: update incomingEdges for activated targets
|
||||
for (const targetId of activatedTargets) {
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
if (!targetNode) {
|
||||
@@ -57,25 +59,28 @@ export class EdgeManager {
|
||||
targetNode.incomingEdges.delete(node.id)
|
||||
}
|
||||
|
||||
// Fourth pass: check readiness after all edge processing is complete
|
||||
for (const targetId of activatedTargets) {
|
||||
if (this.isTargetReady(targetId)) {
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
if (targetNode && this.isNodeReady(targetNode)) {
|
||||
readyNodes.push(targetId)
|
||||
}
|
||||
}
|
||||
|
||||
for (const targetId of cascadeTargets) {
|
||||
if (!readyNodes.includes(targetId) && !activatedTargets.includes(targetId)) {
|
||||
if (this.isTargetReady(targetId)) {
|
||||
readyNodes.push(targetId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return readyNodes
|
||||
}
|
||||
|
||||
isNodeReady(node: DAGNode): boolean {
|
||||
return node.incomingEdges.size === 0 || this.countActiveIncomingEdges(node) === 0
|
||||
if (node.incomingEdges.size === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
const activeIncomingCount = this.countActiveIncomingEdges(node)
|
||||
if (activeIncomingCount > 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
restoreIncomingEdge(targetNodeId: string, sourceNodeId: string): void {
|
||||
@@ -94,10 +99,13 @@ export class EdgeManager {
|
||||
|
||||
/**
|
||||
* Clear deactivated edges for a set of nodes (used when restoring loop state for next iteration).
|
||||
* This ensures error/success edges can be re-evaluated on each iteration.
|
||||
*/
|
||||
clearDeactivatedEdgesForNodes(nodeIds: Set<string>): void {
|
||||
const edgesToRemove: string[] = []
|
||||
for (const edgeKey of this.deactivatedEdges) {
|
||||
// Edge key format is "sourceId-targetId-handle"
|
||||
// Check if either source or target is in the nodeIds set
|
||||
for (const nodeId of nodeIds) {
|
||||
if (edgeKey.startsWith(`${nodeId}-`) || edgeKey.includes(`-${nodeId}-`)) {
|
||||
edgesToRemove.push(edgeKey)
|
||||
@@ -110,44 +118,6 @@ export class EdgeManager {
|
||||
}
|
||||
}
|
||||
|
||||
private isTargetReady(targetId: string): boolean {
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
return targetNode ? this.isNodeReady(targetNode) : false
|
||||
}
|
||||
|
||||
private isLoopEdge(handle?: string): boolean {
|
||||
return (
|
||||
handle === EDGE.LOOP_CONTINUE ||
|
||||
handle === EDGE.LOOP_CONTINUE_ALT ||
|
||||
handle === EDGE.LOOP_EXIT
|
||||
)
|
||||
}
|
||||
|
||||
private isControlEdge(handle?: string): boolean {
|
||||
return (
|
||||
handle === EDGE.LOOP_CONTINUE ||
|
||||
handle === EDGE.LOOP_CONTINUE_ALT ||
|
||||
handle === EDGE.LOOP_EXIT ||
|
||||
handle === EDGE.PARALLEL_EXIT
|
||||
)
|
||||
}
|
||||
|
||||
private isBackwardsEdge(sourceHandle?: string): boolean {
|
||||
return sourceHandle === EDGE.LOOP_CONTINUE || sourceHandle === EDGE.LOOP_CONTINUE_ALT
|
||||
}
|
||||
|
||||
private isTerminalControlNode(nodeId: string): boolean {
|
||||
const node = this.dag.nodes.get(nodeId)
|
||||
if (!node || node.outgoingEdges.size === 0) return false
|
||||
|
||||
for (const [, edge] of node.outgoingEdges) {
|
||||
if (!this.isControlEdge(edge.sourceHandle)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
private shouldActivateEdge(edge: DAGEdge, output: NormalizedBlockOutput): boolean {
|
||||
const handle = edge.sourceHandle
|
||||
|
||||
@@ -189,12 +159,14 @@ export class EdgeManager {
|
||||
}
|
||||
}
|
||||
|
||||
private isBackwardsEdge(sourceHandle?: string): boolean {
|
||||
return sourceHandle === EDGE.LOOP_CONTINUE || sourceHandle === EDGE.LOOP_CONTINUE_ALT
|
||||
}
|
||||
|
||||
private deactivateEdgeAndDescendants(
|
||||
sourceId: string,
|
||||
targetId: string,
|
||||
sourceHandle?: string,
|
||||
cascadeTargets?: Set<string>,
|
||||
isCascade = false
|
||||
sourceHandle?: string
|
||||
): void {
|
||||
const edgeKey = this.createEdgeKey(sourceId, targetId, sourceHandle)
|
||||
if (this.deactivatedEdges.has(edgeKey)) {
|
||||
@@ -202,46 +174,38 @@ export class EdgeManager {
|
||||
}
|
||||
|
||||
this.deactivatedEdges.add(edgeKey)
|
||||
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
if (!targetNode) return
|
||||
|
||||
if (isCascade && this.isTerminalControlNode(targetId)) {
|
||||
cascadeTargets?.add(targetId)
|
||||
}
|
||||
|
||||
if (this.hasActiveIncomingEdges(targetNode, edgeKey)) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const [, outgoingEdge] of targetNode.outgoingEdges) {
|
||||
if (!this.isControlEdge(outgoingEdge.sourceHandle)) {
|
||||
this.deactivateEdgeAndDescendants(
|
||||
targetId,
|
||||
outgoingEdge.target,
|
||||
outgoingEdge.sourceHandle,
|
||||
cascadeTargets,
|
||||
true
|
||||
)
|
||||
// Check if target has other active incoming edges
|
||||
// Pass the specific edge key being deactivated, not just source ID,
|
||||
// to handle multiple edges from same source to same target (e.g., condition branches)
|
||||
const hasOtherActiveIncoming = this.hasActiveIncomingEdges(targetNode, edgeKey)
|
||||
if (!hasOtherActiveIncoming) {
|
||||
for (const [_, outgoingEdge] of targetNode.outgoingEdges) {
|
||||
this.deactivateEdgeAndDescendants(targetId, outgoingEdge.target, outgoingEdge.sourceHandle)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a node has any active incoming edges besides the one being excluded.
|
||||
* This properly handles the case where multiple edges from the same source go to
|
||||
* the same target (e.g., multiple condition branches pointing to one block).
|
||||
*/
|
||||
private hasActiveIncomingEdges(node: DAGNode, excludeEdgeKey: string): boolean {
|
||||
for (const incomingSourceId of node.incomingEdges) {
|
||||
const incomingNode = this.dag.nodes.get(incomingSourceId)
|
||||
if (!incomingNode) continue
|
||||
|
||||
for (const [, incomingEdge] of incomingNode.outgoingEdges) {
|
||||
for (const [_, incomingEdge] of incomingNode.outgoingEdges) {
|
||||
if (incomingEdge.target === node.id) {
|
||||
const incomingEdgeKey = this.createEdgeKey(
|
||||
incomingSourceId,
|
||||
node.id,
|
||||
incomingEdge.sourceHandle
|
||||
)
|
||||
// Skip the specific edge being excluded, but check other edges from same source
|
||||
if (incomingEdgeKey === excludeEdgeKey) continue
|
||||
if (!this.deactivatedEdges.has(incomingEdgeKey)) {
|
||||
return true
|
||||
|
||||
@@ -554,413 +554,6 @@ describe('ExecutionEngine', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error handling in execution', () => {
|
||||
it('should fail execution when a single node throws an error', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
throw new Error('Block execution failed')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Block execution failed')
|
||||
})
|
||||
|
||||
it('should stop parallel branches when one branch throws an error', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const parallelNodes = Array.from({ length: 5 }, (_, i) =>
|
||||
createMockNode(`parallel${i}`, 'function')
|
||||
)
|
||||
|
||||
parallelNodes.forEach((_, i) => {
|
||||
startNode.outgoingEdges.set(`edge${i}`, { target: `parallel${i}` })
|
||||
})
|
||||
|
||||
const dag = createMockDAG([startNode, ...parallelNodes])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return parallelNodes.map((_, i) => `parallel${i}`)
|
||||
return []
|
||||
})
|
||||
|
||||
const executedNodes: string[] = []
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
executedNodes.push(nodeId)
|
||||
if (nodeId === 'parallel0') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
throw new Error('Parallel branch failed')
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Parallel branch failed')
|
||||
})
|
||||
|
||||
it('should capture only the first error when multiple parallel branches fail', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const parallelNodes = Array.from({ length: 3 }, (_, i) =>
|
||||
createMockNode(`parallel${i}`, 'function')
|
||||
)
|
||||
|
||||
parallelNodes.forEach((_, i) => {
|
||||
startNode.outgoingEdges.set(`edge${i}`, { target: `parallel${i}` })
|
||||
})
|
||||
|
||||
const dag = createMockDAG([startNode, ...parallelNodes])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return parallelNodes.map((_, i) => `parallel${i}`)
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'parallel0') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
throw new Error('First error')
|
||||
}
|
||||
if (nodeId === 'parallel1') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 20))
|
||||
throw new Error('Second error')
|
||||
}
|
||||
if (nodeId === 'parallel2') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 30))
|
||||
throw new Error('Third error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('First error')
|
||||
})
|
||||
|
||||
it('should wait for ongoing executions to complete before throwing error', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const fastErrorNode = createMockNode('fast-error', 'function')
|
||||
const slowNode = createMockNode('slow', 'function')
|
||||
|
||||
startNode.outgoingEdges.set('edge1', { target: 'fast-error' })
|
||||
startNode.outgoingEdges.set('edge2', { target: 'slow' })
|
||||
|
||||
const dag = createMockDAG([startNode, fastErrorNode, slowNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['fast-error', 'slow']
|
||||
return []
|
||||
})
|
||||
|
||||
let slowNodeCompleted = false
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'fast-error') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
throw new Error('Fast error')
|
||||
}
|
||||
if (nodeId === 'slow') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 50))
|
||||
slowNodeCompleted = true
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Fast error')
|
||||
|
||||
expect(slowNodeCompleted).toBe(true)
|
||||
})
|
||||
|
||||
it('should not queue new nodes after an error occurs', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
const afterErrorNode = createMockNode('after-error', 'function')
|
||||
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
errorNode.outgoingEdges.set('edge2', { target: 'after-error' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode, afterErrorNode])
|
||||
const context = createMockContext()
|
||||
|
||||
const queuedNodes: string[] = []
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') {
|
||||
queuedNodes.push('error-node')
|
||||
return ['error-node']
|
||||
}
|
||||
if (node.id === 'error-node') {
|
||||
queuedNodes.push('after-error')
|
||||
return ['after-error']
|
||||
}
|
||||
return []
|
||||
})
|
||||
|
||||
const executedNodes: string[] = []
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
executedNodes.push(nodeId)
|
||||
if (nodeId === 'error-node') {
|
||||
throw new Error('Node error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Node error')
|
||||
|
||||
expect(executedNodes).not.toContain('after-error')
|
||||
})
|
||||
|
||||
it('should populate error result with metadata when execution fails', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext()
|
||||
context.blockLogs.push({
|
||||
blockId: 'start',
|
||||
blockName: 'Start',
|
||||
blockType: 'starter',
|
||||
startedAt: new Date().toISOString(),
|
||||
endedAt: new Date().toISOString(),
|
||||
durationMs: 10,
|
||||
success: true,
|
||||
})
|
||||
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
const error = new Error('Execution failed') as any
|
||||
error.executionResult = {
|
||||
success: false,
|
||||
output: { partial: 'data' },
|
||||
logs: context.blockLogs,
|
||||
metadata: context.metadata,
|
||||
}
|
||||
throw error
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
try {
|
||||
await engine.run('start')
|
||||
expect.fail('Should have thrown')
|
||||
} catch (error: any) {
|
||||
expect(error.executionResult).toBeDefined()
|
||||
expect(error.executionResult.metadata.endTime).toBeDefined()
|
||||
expect(error.executionResult.metadata.duration).toBeDefined()
|
||||
}
|
||||
})
|
||||
|
||||
it('should prefer cancellation status over error when both occur', async () => {
|
||||
const abortController = new AbortController()
|
||||
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext({ abortSignal: abortController.signal })
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
abortController.abort()
|
||||
throw new Error('Node error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
const result = await engine.run('start')
|
||||
|
||||
expect(result.status).toBe('cancelled')
|
||||
expect(result.success).toBe(false)
|
||||
})
|
||||
|
||||
it('should stop loop iteration when error occurs in loop body', async () => {
|
||||
const loopStartNode = createMockNode('loop-start', 'loop_sentinel')
|
||||
loopStartNode.metadata = { isSentinel: true, sentinelType: 'start', loopId: 'loop1' }
|
||||
|
||||
const loopBodyNode = createMockNode('loop-body', 'function')
|
||||
loopBodyNode.metadata = { isLoopNode: true, loopId: 'loop1' }
|
||||
|
||||
const loopEndNode = createMockNode('loop-end', 'loop_sentinel')
|
||||
loopEndNode.metadata = { isSentinel: true, sentinelType: 'end', loopId: 'loop1' }
|
||||
|
||||
const afterLoopNode = createMockNode('after-loop', 'function')
|
||||
|
||||
loopStartNode.outgoingEdges.set('edge1', { target: 'loop-body' })
|
||||
loopBodyNode.outgoingEdges.set('edge2', { target: 'loop-end' })
|
||||
loopEndNode.outgoingEdges.set('loop_continue', {
|
||||
target: 'loop-start',
|
||||
sourceHandle: 'loop_continue',
|
||||
})
|
||||
loopEndNode.outgoingEdges.set('loop_complete', {
|
||||
target: 'after-loop',
|
||||
sourceHandle: 'loop_complete',
|
||||
})
|
||||
|
||||
const dag = createMockDAG([loopStartNode, loopBodyNode, loopEndNode, afterLoopNode])
|
||||
const context = createMockContext()
|
||||
|
||||
let iterationCount = 0
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'loop-start') return ['loop-body']
|
||||
if (node.id === 'loop-body') return ['loop-end']
|
||||
if (node.id === 'loop-end') {
|
||||
iterationCount++
|
||||
if (iterationCount < 5) return ['loop-start']
|
||||
return ['after-loop']
|
||||
}
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'loop-body' && iterationCount >= 2) {
|
||||
throw new Error('Loop body error on iteration 3')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('loop-start')).rejects.toThrow('Loop body error on iteration 3')
|
||||
|
||||
expect(iterationCount).toBeLessThanOrEqual(3)
|
||||
})
|
||||
|
||||
it('should handle error that is not an Error instance', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
throw 'String error message'
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('String error message')
|
||||
})
|
||||
|
||||
it('should preserve partial output when error occurs after some blocks complete', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const successNode = createMockNode('success', 'function')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
|
||||
startNode.outgoingEdges.set('edge1', { target: 'success' })
|
||||
successNode.outgoingEdges.set('edge2', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, successNode, errorNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['success']
|
||||
if (node.id === 'success') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'success') {
|
||||
return { nodeId, output: { successData: 'preserved' }, isFinalOutput: false }
|
||||
}
|
||||
if (nodeId === 'error-node') {
|
||||
throw new Error('Late error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
try {
|
||||
await engine.run('start')
|
||||
expect.fail('Should have thrown')
|
||||
} catch (error: any) {
|
||||
// Verify the error was thrown
|
||||
expect(error.message).toBe('Late error')
|
||||
// The partial output should be available in executionResult if attached
|
||||
if (error.executionResult) {
|
||||
expect(error.executionResult.output).toBeDefined()
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Cancellation flag behavior', () => {
|
||||
it('should set cancelledFlag when abort signal fires', async () => {
|
||||
const abortController = new AbortController()
|
||||
|
||||
@@ -25,8 +25,6 @@ export class ExecutionEngine {
|
||||
private pausedBlocks: Map<string, PauseMetadata> = new Map()
|
||||
private allowResumeTriggers: boolean
|
||||
private cancelledFlag = false
|
||||
private errorFlag = false
|
||||
private executionError: Error | null = null
|
||||
private lastCancellationCheck = 0
|
||||
private readonly useRedisCancellation: boolean
|
||||
private readonly CANCELLATION_CHECK_INTERVAL_MS = 500
|
||||
@@ -105,7 +103,7 @@ export class ExecutionEngine {
|
||||
this.initializeQueue(triggerBlockId)
|
||||
|
||||
while (this.hasWork()) {
|
||||
if ((await this.checkCancellation()) || this.errorFlag) {
|
||||
if (await this.checkCancellation()) {
|
||||
break
|
||||
}
|
||||
await this.processQueue()
|
||||
@@ -115,11 +113,6 @@ export class ExecutionEngine {
|
||||
await this.waitForAllExecutions()
|
||||
}
|
||||
|
||||
// Rethrow the captured error so it's handled by the catch block
|
||||
if (this.errorFlag && this.executionError) {
|
||||
throw this.executionError
|
||||
}
|
||||
|
||||
if (this.pausedBlocks.size > 0) {
|
||||
return this.buildPausedResult(startTime)
|
||||
}
|
||||
@@ -203,17 +196,11 @@ export class ExecutionEngine {
|
||||
}
|
||||
|
||||
private trackExecution(promise: Promise<void>): void {
|
||||
const trackedPromise = promise
|
||||
.catch((error) => {
|
||||
if (!this.errorFlag) {
|
||||
this.errorFlag = true
|
||||
this.executionError = error instanceof Error ? error : new Error(String(error))
|
||||
}
|
||||
})
|
||||
.finally(() => {
|
||||
this.executing.delete(trackedPromise)
|
||||
})
|
||||
this.executing.add(trackedPromise)
|
||||
this.executing.add(promise)
|
||||
promise.catch(() => {})
|
||||
promise.finally(() => {
|
||||
this.executing.delete(promise)
|
||||
})
|
||||
}
|
||||
|
||||
private async waitForAnyExecution(): Promise<void> {
|
||||
@@ -328,7 +315,7 @@ export class ExecutionEngine {
|
||||
|
||||
private async processQueue(): Promise<void> {
|
||||
while (this.readyQueue.length > 0) {
|
||||
if ((await this.checkCancellation()) || this.errorFlag) {
|
||||
if (await this.checkCancellation()) {
|
||||
break
|
||||
}
|
||||
const nodeId = this.dequeue()
|
||||
@@ -337,7 +324,7 @@ export class ExecutionEngine {
|
||||
this.trackExecution(promise)
|
||||
}
|
||||
|
||||
if (this.executing.size > 0 && !this.cancelledFlag && !this.errorFlag) {
|
||||
if (this.executing.size > 0 && !this.cancelledFlag) {
|
||||
await this.waitForAnyExecution()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -305,7 +305,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
base.executeFunction = async (callParams: Record<string, any>) => {
|
||||
const mergedParams = mergeToolParameters(userProvidedParams, callParams)
|
||||
|
||||
const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
|
||||
const { blockData, blockNameMapping } = collectBlockData(ctx)
|
||||
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
@@ -317,7 +317,6 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
workflowVariables: ctx.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
isCustomTool: true,
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
|
||||
@@ -26,7 +26,7 @@ export async function evaluateConditionExpression(
|
||||
const contextSetup = `const context = ${JSON.stringify(evalContext)};`
|
||||
const code = `${contextSetup}\nreturn Boolean(${conditionExpression})`
|
||||
|
||||
const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
|
||||
const { blockData, blockNameMapping } = collectBlockData(ctx)
|
||||
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
@@ -37,7 +37,6 @@ export async function evaluateConditionExpression(
|
||||
workflowVariables: ctx.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
|
||||
@@ -75,12 +75,7 @@ describe('FunctionBlockHandler', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
_context: {
|
||||
workflowId: mockContext.workflowId,
|
||||
workspaceId: mockContext.workspaceId,
|
||||
isDeployedContext: mockContext.isDeployedContext,
|
||||
},
|
||||
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
|
||||
}
|
||||
const expectedOutput: any = { result: 'Success' }
|
||||
|
||||
@@ -89,8 +84,8 @@ describe('FunctionBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expectedToolParams,
|
||||
false,
|
||||
mockContext
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
expect(result).toEqual(expectedOutput)
|
||||
})
|
||||
@@ -112,12 +107,7 @@ describe('FunctionBlockHandler', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
_context: {
|
||||
workflowId: mockContext.workflowId,
|
||||
workspaceId: mockContext.workspaceId,
|
||||
isDeployedContext: mockContext.isDeployedContext,
|
||||
},
|
||||
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
|
||||
}
|
||||
const expectedOutput: any = { result: 'Success' }
|
||||
|
||||
@@ -126,8 +116,8 @@ describe('FunctionBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expectedToolParams,
|
||||
false,
|
||||
mockContext
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
expect(result).toEqual(expectedOutput)
|
||||
})
|
||||
@@ -142,12 +132,7 @@ describe('FunctionBlockHandler', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
_context: {
|
||||
workflowId: mockContext.workflowId,
|
||||
workspaceId: mockContext.workspaceId,
|
||||
isDeployedContext: mockContext.isDeployedContext,
|
||||
},
|
||||
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
|
||||
}
|
||||
|
||||
await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
@@ -23,7 +23,7 @@ export class FunctionBlockHandler implements BlockHandler {
|
||||
? inputs.code.map((c: { content: string }) => c.content).join('\n')
|
||||
: inputs.code
|
||||
|
||||
const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
|
||||
const { blockData, blockNameMapping } = collectBlockData(ctx)
|
||||
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
@@ -35,7 +35,6 @@ export class FunctionBlockHandler implements BlockHandler {
|
||||
workflowVariables: ctx.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import '@sim/testing/mocks/executor'
|
||||
|
||||
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
||||
import { generateRouterPrompt, generateRouterV2Prompt } from '@/blocks/blocks/router'
|
||||
import { generateRouterPrompt } from '@/blocks/blocks/router'
|
||||
import { BlockType } from '@/executor/constants'
|
||||
import { RouterBlockHandler } from '@/executor/handlers/router/router-handler'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
@@ -9,7 +9,6 @@ import { getProviderFromModel } from '@/providers/utils'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
const mockGenerateRouterPrompt = generateRouterPrompt as Mock
|
||||
const mockGenerateRouterV2Prompt = generateRouterV2Prompt as Mock
|
||||
const mockGetProviderFromModel = getProviderFromModel as Mock
|
||||
const mockFetch = global.fetch as unknown as Mock
|
||||
|
||||
@@ -45,7 +44,7 @@ describe('RouterBlockHandler', () => {
|
||||
metadata: { id: BlockType.ROUTER, name: 'Test Router' },
|
||||
position: { x: 50, y: 50 },
|
||||
config: { tool: BlockType.ROUTER, params: {} },
|
||||
inputs: { prompt: 'string', model: 'string' },
|
||||
inputs: { prompt: 'string', model: 'string' }, // Using ParamType strings
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
@@ -73,11 +72,14 @@ describe('RouterBlockHandler', () => {
|
||||
workflow: mockWorkflow as SerializedWorkflow,
|
||||
}
|
||||
|
||||
// Reset mocks using vi
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Default mock implementations
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
mockGenerateRouterPrompt.mockReturnValue('Generated System Prompt')
|
||||
|
||||
// Set up fetch mock to return a successful response
|
||||
mockFetch.mockImplementation(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
@@ -145,6 +147,7 @@ describe('RouterBlockHandler', () => {
|
||||
})
|
||||
)
|
||||
|
||||
// Verify the request body contains the expected data
|
||||
const fetchCallArgs = mockFetch.mock.calls[0]
|
||||
const requestBody = JSON.parse(fetchCallArgs[1].body)
|
||||
expect(requestBody).toMatchObject({
|
||||
@@ -177,6 +180,7 @@ describe('RouterBlockHandler', () => {
|
||||
const inputs = { prompt: 'Test' }
|
||||
mockContext.workflow!.blocks = [mockBlock, mockTargetBlock2]
|
||||
|
||||
// Expect execute to throw because getTargetBlocks (called internally) will throw
|
||||
await expect(handler.execute(mockContext, mockBlock, inputs)).rejects.toThrow(
|
||||
'Target block target-block-1 not found'
|
||||
)
|
||||
@@ -186,6 +190,7 @@ describe('RouterBlockHandler', () => {
|
||||
it('should throw error if LLM response is not a valid target block ID', async () => {
|
||||
const inputs = { prompt: 'Test', apiKey: 'test-api-key' }
|
||||
|
||||
// Override fetch mock to return an invalid block ID
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
@@ -223,6 +228,7 @@ describe('RouterBlockHandler', () => {
|
||||
it('should handle server error responses', async () => {
|
||||
const inputs = { prompt: 'Test error handling.', apiKey: 'test-api-key' }
|
||||
|
||||
// Override fetch mock to return an error
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: false,
|
||||
@@ -270,12 +276,13 @@ describe('RouterBlockHandler', () => {
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('vertex')
|
||||
|
||||
// Mock the database query for Vertex credential
|
||||
const mockDb = await import('@sim/db')
|
||||
const mockAccount = {
|
||||
id: 'test-vertex-credential-id',
|
||||
accessToken: 'mock-access-token',
|
||||
refreshToken: 'mock-refresh-token',
|
||||
expiresAt: new Date(Date.now() + 3600000),
|
||||
expiresAt: new Date(Date.now() + 3600000), // 1 hour from now
|
||||
}
|
||||
vi.spyOn(mockDb.db.query.account, 'findFirst').mockResolvedValue(mockAccount as any)
|
||||
|
||||
@@ -293,287 +300,3 @@ describe('RouterBlockHandler', () => {
|
||||
expect(requestBody.apiKey).toBe('mock-access-token')
|
||||
})
|
||||
})
|
||||
|
||||
describe('RouterBlockHandler V2', () => {
|
||||
let handler: RouterBlockHandler
|
||||
let mockRouterV2Block: SerializedBlock
|
||||
let mockContext: ExecutionContext
|
||||
let mockWorkflow: Partial<SerializedWorkflow>
|
||||
let mockTargetBlock1: SerializedBlock
|
||||
let mockTargetBlock2: SerializedBlock
|
||||
|
||||
beforeEach(() => {
|
||||
mockTargetBlock1 = {
|
||||
id: 'target-block-1',
|
||||
metadata: { id: 'agent', name: 'Support Agent' },
|
||||
position: { x: 100, y: 100 },
|
||||
config: { tool: 'agent', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
mockTargetBlock2 = {
|
||||
id: 'target-block-2',
|
||||
metadata: { id: 'agent', name: 'Sales Agent' },
|
||||
position: { x: 100, y: 150 },
|
||||
config: { tool: 'agent', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
mockRouterV2Block = {
|
||||
id: 'router-v2-block-1',
|
||||
metadata: { id: BlockType.ROUTER_V2, name: 'Test Router V2' },
|
||||
position: { x: 50, y: 50 },
|
||||
config: { tool: BlockType.ROUTER_V2, params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
mockWorkflow = {
|
||||
blocks: [mockRouterV2Block, mockTargetBlock1, mockTargetBlock2],
|
||||
connections: [
|
||||
{
|
||||
source: mockRouterV2Block.id,
|
||||
target: mockTargetBlock1.id,
|
||||
sourceHandle: 'router-route-support',
|
||||
},
|
||||
{
|
||||
source: mockRouterV2Block.id,
|
||||
target: mockTargetBlock2.id,
|
||||
sourceHandle: 'router-route-sales',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
handler = new RouterBlockHandler({})
|
||||
|
||||
mockContext = {
|
||||
workflowId: 'test-workflow-id',
|
||||
blockStates: new Map(),
|
||||
blockLogs: [],
|
||||
metadata: { duration: 0 },
|
||||
environmentVariables: {},
|
||||
decisions: { router: new Map(), condition: new Map() },
|
||||
loopExecutions: new Map(),
|
||||
completedLoops: new Set(),
|
||||
executedBlocks: new Set(),
|
||||
activeExecutionPath: new Set(),
|
||||
workflow: mockWorkflow as SerializedWorkflow,
|
||||
}
|
||||
|
||||
vi.clearAllMocks()
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
mockGenerateRouterV2Prompt.mockReturnValue('Generated V2 System Prompt')
|
||||
})
|
||||
|
||||
it('should handle router_v2 blocks', () => {
|
||||
expect(handler.canHandle(mockRouterV2Block)).toBe(true)
|
||||
})
|
||||
|
||||
it('should execute router V2 and return reasoning', async () => {
|
||||
const inputs = {
|
||||
context: 'I need help with a billing issue',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([
|
||||
{ id: 'route-support', title: 'Support', value: 'Customer support inquiries' },
|
||||
{ id: 'route-sales', title: 'Sales', value: 'Sales and pricing questions' },
|
||||
]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({
|
||||
route: 'route-support',
|
||||
reasoning: 'The user mentioned a billing issue which is a customer support matter.',
|
||||
}),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 150, output: 25, total: 175 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
expect(result).toMatchObject({
|
||||
context: 'I need help with a billing issue',
|
||||
model: 'gpt-4o',
|
||||
selectedRoute: 'route-support',
|
||||
reasoning: 'The user mentioned a billing issue which is a customer support matter.',
|
||||
selectedPath: {
|
||||
blockId: 'target-block-1',
|
||||
blockType: 'agent',
|
||||
blockTitle: 'Support Agent',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should include responseFormat in provider request', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description 1' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({ route: 'route-1', reasoning: 'Test reasoning' }),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
const fetchCallArgs = mockFetch.mock.calls[0]
|
||||
const requestBody = JSON.parse(fetchCallArgs[1].body)
|
||||
|
||||
expect(requestBody.responseFormat).toEqual({
|
||||
name: 'router_response',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
route: {
|
||||
type: 'string',
|
||||
description: 'The selected route ID or NO_MATCH',
|
||||
},
|
||||
reasoning: {
|
||||
type: 'string',
|
||||
description: 'Brief explanation of why this route was chosen',
|
||||
},
|
||||
},
|
||||
required: ['route', 'reasoning'],
|
||||
additionalProperties: false,
|
||||
},
|
||||
strict: true,
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle NO_MATCH response with reasoning', async () => {
|
||||
const inputs = {
|
||||
context: 'Random unrelated query',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Specific topic' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({
|
||||
route: 'NO_MATCH',
|
||||
reasoning: 'The query does not relate to any available route.',
|
||||
}),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
|
||||
'Router could not determine a matching route: The query does not relate to any available route.'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw error for invalid route ID in response', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({ route: 'invalid-route', reasoning: 'Some reasoning' }),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
|
||||
/Router could not determine a valid route/
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle routes passed as array instead of JSON string', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: [{ id: 'route-1', title: 'Route 1', value: 'Description' }],
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({ route: 'route-1', reasoning: 'Matched route 1' }),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
expect(result.selectedRoute).toBe('route-1')
|
||||
expect(result.reasoning).toBe('Matched route 1')
|
||||
})
|
||||
|
||||
it('should throw error when no routes are defined', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: '[]',
|
||||
}
|
||||
|
||||
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
|
||||
'No routes defined for router'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle fallback when JSON parsing fails', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: 'route-1',
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 5, total: 105 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
expect(result.selectedRoute).toBe('route-1')
|
||||
expect(result.reasoning).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -238,25 +238,6 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
apiKey: finalApiKey,
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
responseFormat: {
|
||||
name: 'router_response',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
route: {
|
||||
type: 'string',
|
||||
description: 'The selected route ID or NO_MATCH',
|
||||
},
|
||||
reasoning: {
|
||||
type: 'string',
|
||||
description: 'Brief explanation of why this route was chosen',
|
||||
},
|
||||
},
|
||||
required: ['route', 'reasoning'],
|
||||
additionalProperties: false,
|
||||
},
|
||||
strict: true,
|
||||
},
|
||||
}
|
||||
|
||||
if (providerId === 'vertex') {
|
||||
@@ -296,31 +277,16 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
let chosenRouteId: string
|
||||
let reasoning = ''
|
||||
|
||||
try {
|
||||
const parsedResponse = JSON.parse(result.content)
|
||||
chosenRouteId = parsedResponse.route?.trim() || ''
|
||||
reasoning = parsedResponse.reasoning || ''
|
||||
} catch (_parseError) {
|
||||
logger.error('Router response was not valid JSON despite responseFormat', {
|
||||
content: result.content,
|
||||
})
|
||||
chosenRouteId = result.content.trim()
|
||||
}
|
||||
const chosenRouteId = result.content.trim()
|
||||
|
||||
if (chosenRouteId === 'NO_MATCH' || chosenRouteId.toUpperCase() === 'NO_MATCH') {
|
||||
logger.info('Router determined no route matches the context, routing to error path')
|
||||
throw new Error(
|
||||
reasoning
|
||||
? `Router could not determine a matching route: ${reasoning}`
|
||||
: 'Router could not determine a matching route for the given context'
|
||||
)
|
||||
throw new Error('Router could not determine a matching route for the given context')
|
||||
}
|
||||
|
||||
const chosenRoute = routes.find((r) => r.id === chosenRouteId)
|
||||
|
||||
// Throw error if LLM returns invalid route ID - this routes through error path
|
||||
if (!chosenRoute) {
|
||||
const availableRoutes = routes.map((r) => ({ id: r.id, title: r.title }))
|
||||
logger.error(
|
||||
@@ -332,6 +298,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
)
|
||||
}
|
||||
|
||||
// Find the target block connected to this route's handle
|
||||
const connection = ctx.workflow?.connections.find(
|
||||
(conn) => conn.source === block.id && conn.sourceHandle === `router-${chosenRoute.id}`
|
||||
)
|
||||
@@ -367,7 +334,6 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
total: cost.total,
|
||||
},
|
||||
selectedRoute: chosenRoute.id,
|
||||
reasoning,
|
||||
selectedPath: targetBlock
|
||||
? {
|
||||
blockId: targetBlock.id,
|
||||
@@ -387,7 +353,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse routes from input (can be JSON string or array)
|
||||
* Parse routes from input (can be JSON string or array).
|
||||
*/
|
||||
private parseRoutes(input: any): RouteDefinition[] {
|
||||
try {
|
||||
|
||||
@@ -204,21 +204,26 @@ describe('WorkflowBlockHandler', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw error for failed child output so BlockExecutor can check error port', () => {
|
||||
it('should map failed child output correctly', () => {
|
||||
const childResult = {
|
||||
success: false,
|
||||
error: 'Child workflow failed',
|
||||
}
|
||||
|
||||
expect(() =>
|
||||
(handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100)
|
||||
).toThrow('Error in child workflow "Child Workflow": Child workflow failed')
|
||||
const result = (handler as any).mapChildOutputToParent(
|
||||
childResult,
|
||||
'child-id',
|
||||
'Child Workflow',
|
||||
100
|
||||
)
|
||||
|
||||
try {
|
||||
;(handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100)
|
||||
} catch (error: any) {
|
||||
expect(error.childTraceSpans).toEqual([])
|
||||
}
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
childWorkflowName: 'Child Workflow',
|
||||
result: {},
|
||||
error: 'Child workflow failed',
|
||||
childTraceSpans: [],
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle nested response structures', () => {
|
||||
|
||||
@@ -144,11 +144,6 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
const workflowMetadata = workflows[workflowId]
|
||||
const childWorkflowName = workflowMetadata?.name || workflowId
|
||||
|
||||
const originalError = error.message || 'Unknown error'
|
||||
const wrappedError = new Error(
|
||||
`Error in child workflow "${childWorkflowName}": ${originalError}`
|
||||
)
|
||||
|
||||
if (error.executionResult?.logs) {
|
||||
const executionResult = error.executionResult as ExecutionResult
|
||||
|
||||
@@ -164,12 +159,28 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
)
|
||||
|
||||
logger.info(`Captured ${childTraceSpans.length} child trace spans from failed execution`)
|
||||
;(wrappedError as any).childTraceSpans = childTraceSpans
|
||||
} else if (error.childTraceSpans && Array.isArray(error.childTraceSpans)) {
|
||||
;(wrappedError as any).childTraceSpans = error.childTraceSpans
|
||||
|
||||
return {
|
||||
success: false,
|
||||
childWorkflowName,
|
||||
result: {},
|
||||
error: error.message || 'Child workflow execution failed',
|
||||
childTraceSpans: childTraceSpans,
|
||||
} as Record<string, any>
|
||||
}
|
||||
|
||||
throw wrappedError
|
||||
if (error.childTraceSpans && Array.isArray(error.childTraceSpans)) {
|
||||
return {
|
||||
success: false,
|
||||
childWorkflowName,
|
||||
result: {},
|
||||
error: error.message || 'Child workflow execution failed',
|
||||
childTraceSpans: error.childTraceSpans,
|
||||
} as Record<string, any>
|
||||
}
|
||||
|
||||
const originalError = error.message || 'Unknown error'
|
||||
throw new Error(`Error in child workflow "${childWorkflowName}": ${originalError}`)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -441,13 +452,17 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
|
||||
if (!success) {
|
||||
logger.warn(`Child workflow ${childWorkflowName} failed`)
|
||||
const error = new Error(
|
||||
`Error in child workflow "${childWorkflowName}": ${childResult.error || 'Child workflow execution failed'}`
|
||||
)
|
||||
;(error as any).childTraceSpans = childTraceSpans || []
|
||||
throw error
|
||||
// Return failure with child trace spans so they can be displayed
|
||||
return {
|
||||
success: false,
|
||||
childWorkflowName,
|
||||
result,
|
||||
error: childResult.error || 'Child workflow execution failed',
|
||||
childTraceSpans: childTraceSpans || [],
|
||||
} as Record<string, any>
|
||||
}
|
||||
|
||||
// Success case
|
||||
return {
|
||||
success: true,
|
||||
childWorkflowName,
|
||||
|
||||
@@ -1,43 +1,24 @@
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
import type { OutputSchema } from '@/executor/utils/block-reference'
|
||||
|
||||
export interface BlockDataCollection {
|
||||
blockData: Record<string, unknown>
|
||||
blockData: Record<string, any>
|
||||
blockNameMapping: Record<string, string>
|
||||
blockOutputSchemas: Record<string, OutputSchema>
|
||||
}
|
||||
|
||||
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
|
||||
const blockData: Record<string, unknown> = {}
|
||||
const blockData: Record<string, any> = {}
|
||||
const blockNameMapping: Record<string, string> = {}
|
||||
const blockOutputSchemas: Record<string, OutputSchema> = {}
|
||||
|
||||
for (const [id, state] of ctx.blockStates.entries()) {
|
||||
if (state.output !== undefined) {
|
||||
blockData[id] = state.output
|
||||
}
|
||||
|
||||
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
|
||||
if (!workflowBlock) continue
|
||||
|
||||
if (workflowBlock.metadata?.name) {
|
||||
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
|
||||
}
|
||||
|
||||
const blockType = workflowBlock.metadata?.id
|
||||
if (blockType) {
|
||||
const params = workflowBlock.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const schema = getBlockOutputs(blockType, subBlocks)
|
||||
if (schema && Object.keys(schema).length > 0) {
|
||||
blockOutputSchemas[id] = schema
|
||||
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
|
||||
if (workflowBlock?.metadata?.name) {
|
||||
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { blockData, blockNameMapping, blockOutputSchemas }
|
||||
return { blockData, blockNameMapping }
|
||||
}
|
||||
|
||||
@@ -1,255 +0,0 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import {
|
||||
type BlockReferenceContext,
|
||||
InvalidFieldError,
|
||||
resolveBlockReference,
|
||||
} from './block-reference'
|
||||
|
||||
describe('resolveBlockReference', () => {
|
||||
const createContext = (
|
||||
overrides: Partial<BlockReferenceContext> = {}
|
||||
): BlockReferenceContext => ({
|
||||
blockNameMapping: { start: 'block-1', agent: 'block-2' },
|
||||
blockData: {},
|
||||
blockOutputSchemas: {},
|
||||
...overrides,
|
||||
})
|
||||
|
||||
describe('block name resolution', () => {
|
||||
it('should return undefined when block name does not exist', () => {
|
||||
const ctx = createContext()
|
||||
const result = resolveBlockReference('unknown', ['field'], ctx)
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should normalize block name before lookup', () => {
|
||||
const ctx = createContext({
|
||||
blockNameMapping: { myblock: 'block-1' },
|
||||
blockData: { 'block-1': { value: 'test' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('MyBlock', ['value'], ctx)
|
||||
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should handle block names with spaces', () => {
|
||||
const ctx = createContext({
|
||||
blockNameMapping: { myblock: 'block-1' },
|
||||
blockData: { 'block-1': { value: 'test' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('My Block', ['value'], ctx)
|
||||
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('field resolution', () => {
|
||||
it('should return entire block output when no path specified', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: 'hello', other: 'data' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', [], ctx)
|
||||
expect(result).toEqual({
|
||||
value: { input: 'hello', other: 'data' },
|
||||
blockId: 'block-1',
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve simple field path', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: 'hello' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: 'hello', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should resolve nested field path', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { response: { data: { name: 'test' } } } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['response', 'data', 'name'], ctx)
|
||||
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should resolve array index path', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { items: ['a', 'b', 'c'] } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['items', '1'], ctx)
|
||||
expect(result).toEqual({ value: 'b', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should return undefined value when field exists but has no value', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: undefined } },
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should return null value when field has null', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: null } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: null, blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('schema validation', () => {
|
||||
it('should throw InvalidFieldError when field not in schema', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { existing: 'value' } },
|
||||
blockOutputSchemas: {
|
||||
'block-1': {
|
||||
input: { type: 'string' },
|
||||
conversationId: { type: 'string' },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(InvalidFieldError)
|
||||
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(
|
||||
/"invalid" doesn't exist on block "start"/
|
||||
)
|
||||
})
|
||||
|
||||
it('should include available fields in error message', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': {} },
|
||||
blockOutputSchemas: {
|
||||
'block-1': {
|
||||
input: { type: 'string' },
|
||||
conversationId: { type: 'string' },
|
||||
files: { type: 'files' },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
resolveBlockReference('start', ['typo'], ctx)
|
||||
expect.fail('Should have thrown')
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(InvalidFieldError)
|
||||
const fieldError = error as InvalidFieldError
|
||||
expect(fieldError.availableFields).toContain('input')
|
||||
expect(fieldError.availableFields).toContain('conversationId')
|
||||
expect(fieldError.availableFields).toContain('files')
|
||||
}
|
||||
})
|
||||
|
||||
it('should allow valid field even when value is undefined', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': {} },
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should validate path when block has no output yet', () => {
|
||||
const ctx = createContext({
|
||||
blockData: {},
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(InvalidFieldError)
|
||||
})
|
||||
|
||||
it('should return undefined for valid field when block has no output', () => {
|
||||
const ctx = createContext({
|
||||
blockData: {},
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('without schema (pass-through mode)', () => {
|
||||
it('should return undefined value without throwing when no schema', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { existing: 'value' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['missing'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('file type handling', () => {
|
||||
it('should allow file property access', () => {
|
||||
const ctx = createContext({
|
||||
blockData: {
|
||||
'block-1': {
|
||||
files: [{ name: 'test.txt', url: 'http://example.com/file' }],
|
||||
},
|
||||
},
|
||||
blockOutputSchemas: {
|
||||
'block-1': { files: { type: 'files' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['files', '0', 'name'], ctx)
|
||||
expect(result).toEqual({ value: 'test.txt', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should validate file property names', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { files: [] } },
|
||||
blockOutputSchemas: {
|
||||
'block-1': { files: { type: 'files' } },
|
||||
},
|
||||
})
|
||||
|
||||
expect(() => resolveBlockReference('start', ['files', '0', 'invalid'], ctx)).toThrow(
|
||||
InvalidFieldError
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('InvalidFieldError', () => {
|
||||
it('should have correct properties', () => {
|
||||
const error = new InvalidFieldError('myBlock', 'invalid.path', ['field1', 'field2'])
|
||||
|
||||
expect(error.blockName).toBe('myBlock')
|
||||
expect(error.fieldPath).toBe('invalid.path')
|
||||
expect(error.availableFields).toEqual(['field1', 'field2'])
|
||||
expect(error.name).toBe('InvalidFieldError')
|
||||
})
|
||||
|
||||
it('should format message correctly', () => {
|
||||
const error = new InvalidFieldError('start', 'typo', ['input', 'files'])
|
||||
|
||||
expect(error.message).toBe(
|
||||
'"typo" doesn\'t exist on block "start". Available fields: input, files'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle empty available fields', () => {
|
||||
const error = new InvalidFieldError('start', 'field', [])
|
||||
|
||||
expect(error.message).toBe('"field" doesn\'t exist on block "start". Available fields: none')
|
||||
})
|
||||
})
|
||||
@@ -1,210 +0,0 @@
|
||||
import { USER_FILE_ACCESSIBLE_PROPERTIES } from '@/lib/workflows/types'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { navigatePath } from '@/executor/variables/resolvers/reference'
|
||||
|
||||
export type OutputSchema = Record<string, { type?: string; description?: string } | unknown>
|
||||
|
||||
export interface BlockReferenceContext {
|
||||
blockNameMapping: Record<string, string>
|
||||
blockData: Record<string, unknown>
|
||||
blockOutputSchemas?: Record<string, OutputSchema>
|
||||
}
|
||||
|
||||
export interface BlockReferenceResult {
|
||||
value: unknown
|
||||
blockId: string
|
||||
}
|
||||
|
||||
export class InvalidFieldError extends Error {
|
||||
constructor(
|
||||
public readonly blockName: string,
|
||||
public readonly fieldPath: string,
|
||||
public readonly availableFields: string[]
|
||||
) {
|
||||
super(
|
||||
`"${fieldPath}" doesn't exist on block "${blockName}". ` +
|
||||
`Available fields: ${availableFields.length > 0 ? availableFields.join(', ') : 'none'}`
|
||||
)
|
||||
this.name = 'InvalidFieldError'
|
||||
}
|
||||
}
|
||||
|
||||
function isFileType(value: unknown): boolean {
|
||||
if (typeof value !== 'object' || value === null) return false
|
||||
const typed = value as { type?: string }
|
||||
return typed.type === 'file[]' || typed.type === 'files'
|
||||
}
|
||||
|
||||
function isArrayType(value: unknown): value is { type: 'array'; items?: unknown } {
|
||||
if (typeof value !== 'object' || value === null) return false
|
||||
return (value as { type?: string }).type === 'array'
|
||||
}
|
||||
|
||||
function getArrayItems(schema: unknown): unknown {
|
||||
if (typeof schema !== 'object' || schema === null) return undefined
|
||||
return (schema as { items?: unknown }).items
|
||||
}
|
||||
|
||||
function getProperties(schema: unknown): Record<string, unknown> | undefined {
|
||||
if (typeof schema !== 'object' || schema === null) return undefined
|
||||
const props = (schema as { properties?: unknown }).properties
|
||||
return typeof props === 'object' && props !== null
|
||||
? (props as Record<string, unknown>)
|
||||
: undefined
|
||||
}
|
||||
|
||||
function lookupField(schema: unknown, fieldName: string): unknown | undefined {
|
||||
if (typeof schema !== 'object' || schema === null) return undefined
|
||||
const typed = schema as Record<string, unknown>
|
||||
|
||||
if (fieldName in typed) {
|
||||
return typed[fieldName]
|
||||
}
|
||||
|
||||
const props = getProperties(schema)
|
||||
if (props && fieldName in props) {
|
||||
return props[fieldName]
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
function isPathInSchema(schema: OutputSchema | undefined, pathParts: string[]): boolean {
|
||||
if (!schema || pathParts.length === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
let current: unknown = schema
|
||||
|
||||
for (let i = 0; i < pathParts.length; i++) {
|
||||
const part = pathParts[i]
|
||||
|
||||
if (current === null || current === undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (/^\d+$/.test(part)) {
|
||||
if (isFileType(current)) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
return (
|
||||
!nextPart ||
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
)
|
||||
}
|
||||
if (isArrayType(current)) {
|
||||
current = getArrayItems(current)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\]$/)
|
||||
if (arrayMatch) {
|
||||
const [, prop] = arrayMatch
|
||||
const fieldDef = lookupField(current, prop)
|
||||
if (!fieldDef) return false
|
||||
|
||||
if (isFileType(fieldDef)) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
return (
|
||||
!nextPart ||
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
current = isArrayType(fieldDef) ? getArrayItems(fieldDef) : fieldDef
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
isFileType(current) &&
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
part as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
const fieldDef = lookupField(current, part)
|
||||
if (fieldDef !== undefined) {
|
||||
if (isFileType(fieldDef)) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
if (!nextPart) return true
|
||||
if (/^\d+$/.test(nextPart)) {
|
||||
const afterIndex = pathParts[i + 2]
|
||||
return (
|
||||
!afterIndex ||
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
afterIndex as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
)
|
||||
}
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
}
|
||||
current = fieldDef
|
||||
continue
|
||||
}
|
||||
|
||||
if (isArrayType(current)) {
|
||||
const items = getArrayItems(current)
|
||||
const itemField = lookupField(items, part)
|
||||
if (itemField !== undefined) {
|
||||
current = itemField
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function getSchemaFieldNames(schema: OutputSchema | undefined): string[] {
|
||||
if (!schema) return []
|
||||
return Object.keys(schema)
|
||||
}
|
||||
|
||||
export function resolveBlockReference(
|
||||
blockName: string,
|
||||
pathParts: string[],
|
||||
context: BlockReferenceContext
|
||||
): BlockReferenceResult | undefined {
|
||||
const normalizedName = normalizeName(blockName)
|
||||
const blockId = context.blockNameMapping[normalizedName]
|
||||
|
||||
if (!blockId) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const blockOutput = context.blockData[blockId]
|
||||
const schema = context.blockOutputSchemas?.[blockId]
|
||||
|
||||
if (blockOutput === undefined) {
|
||||
if (schema && pathParts.length > 0) {
|
||||
if (!isPathInSchema(schema, pathParts)) {
|
||||
throw new InvalidFieldError(blockName, pathParts.join('.'), getSchemaFieldNames(schema))
|
||||
}
|
||||
}
|
||||
return { value: undefined, blockId }
|
||||
}
|
||||
|
||||
if (pathParts.length === 0) {
|
||||
return { value: blockOutput, blockId }
|
||||
}
|
||||
|
||||
const value = navigatePath(blockOutput, pathParts)
|
||||
|
||||
if (value === undefined && schema) {
|
||||
if (!isPathInSchema(schema, pathParts)) {
|
||||
throw new InvalidFieldError(blockName, pathParts.join('.'), getSchemaFieldNames(schema))
|
||||
}
|
||||
}
|
||||
|
||||
return { value, blockId }
|
||||
}
|
||||
@@ -1,15 +1,11 @@
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { USER_FILE_ACCESSIBLE_PROPERTIES } from '@/lib/workflows/types'
|
||||
import {
|
||||
isReference,
|
||||
normalizeName,
|
||||
parseReferencePath,
|
||||
SPECIAL_REFERENCE_PREFIXES,
|
||||
} from '@/executor/constants'
|
||||
import {
|
||||
InvalidFieldError,
|
||||
type OutputSchema,
|
||||
resolveBlockReference,
|
||||
} from '@/executor/utils/block-reference'
|
||||
import {
|
||||
navigatePath,
|
||||
type ResolutionContext,
|
||||
@@ -18,6 +14,123 @@ import {
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
import { getTool } from '@/tools/utils'
|
||||
|
||||
function isPathInOutputSchema(
|
||||
outputs: Record<string, any> | undefined,
|
||||
pathParts: string[]
|
||||
): boolean {
|
||||
if (!outputs || pathParts.length === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
const isFileArrayType = (value: any): boolean =>
|
||||
value?.type === 'file[]' || value?.type === 'files'
|
||||
|
||||
let current: any = outputs
|
||||
for (let i = 0; i < pathParts.length; i++) {
|
||||
const part = pathParts[i]
|
||||
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\]$/)
|
||||
if (arrayMatch) {
|
||||
const [, prop] = arrayMatch
|
||||
let fieldDef: any
|
||||
|
||||
if (prop in current) {
|
||||
fieldDef = current[prop]
|
||||
} else if (current.properties && prop in current.properties) {
|
||||
fieldDef = current.properties[prop]
|
||||
} else if (current.type === 'array' && current.items) {
|
||||
if (current.items.properties && prop in current.items.properties) {
|
||||
fieldDef = current.items.properties[prop]
|
||||
} else if (prop in current.items) {
|
||||
fieldDef = current.items[prop]
|
||||
}
|
||||
}
|
||||
|
||||
if (!fieldDef) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (isFileArrayType(fieldDef)) {
|
||||
if (i + 1 < pathParts.length) {
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(pathParts[i + 1] as any)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
if (fieldDef.type === 'array' && fieldDef.items) {
|
||||
current = fieldDef.items
|
||||
continue
|
||||
}
|
||||
|
||||
current = fieldDef
|
||||
continue
|
||||
}
|
||||
|
||||
if (/^\d+$/.test(part)) {
|
||||
if (isFileArrayType(current)) {
|
||||
if (i + 1 < pathParts.length) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(nextPart as any)
|
||||
}
|
||||
return true
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (current === null || current === undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (part in current) {
|
||||
const nextCurrent = current[part]
|
||||
if (nextCurrent?.type === 'file[]' && i + 1 < pathParts.length) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
if (/^\d+$/.test(nextPart) && i + 2 < pathParts.length) {
|
||||
const propertyPart = pathParts[i + 2]
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(propertyPart as any)
|
||||
}
|
||||
}
|
||||
current = nextCurrent
|
||||
continue
|
||||
}
|
||||
|
||||
if (current.properties && part in current.properties) {
|
||||
current = current.properties[part]
|
||||
continue
|
||||
}
|
||||
|
||||
if (current.type === 'array' && current.items) {
|
||||
if (current.items.properties && part in current.items.properties) {
|
||||
current = current.items.properties[part]
|
||||
continue
|
||||
}
|
||||
if (part in current.items) {
|
||||
current = current.items[part]
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (isFileArrayType(current) && USER_FILE_ACCESSIBLE_PROPERTIES.includes(part as any)) {
|
||||
return true
|
||||
}
|
||||
|
||||
if ('type' in current && typeof current.type === 'string') {
|
||||
if (!current.properties && !current.items) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function getSchemaFieldNames(outputs: Record<string, any> | undefined): string[] {
|
||||
if (!outputs) return []
|
||||
return Object.keys(outputs)
|
||||
}
|
||||
|
||||
export class BlockResolver implements Resolver {
|
||||
private nameToBlockId: Map<string, string>
|
||||
private blockById: Map<string, SerializedBlock>
|
||||
@@ -57,94 +170,83 @@ export class BlockResolver implements Resolver {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const block = this.blockById.get(blockId)!
|
||||
const block = this.blockById.get(blockId)
|
||||
const output = this.getBlockOutput(blockId, context)
|
||||
|
||||
const blockData: Record<string, unknown> = {}
|
||||
const blockOutputSchemas: Record<string, OutputSchema> = {}
|
||||
|
||||
if (output !== undefined) {
|
||||
blockData[blockId] = output
|
||||
}
|
||||
|
||||
const blockType = block.metadata?.id
|
||||
const params = block.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const toolId = block.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const outputSchema =
|
||||
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block.outputs)
|
||||
|
||||
if (outputSchema && Object.keys(outputSchema).length > 0) {
|
||||
blockOutputSchemas[blockId] = outputSchema
|
||||
}
|
||||
|
||||
try {
|
||||
const result = resolveBlockReference(blockName, pathParts, {
|
||||
blockNameMapping: Object.fromEntries(this.nameToBlockId),
|
||||
blockData,
|
||||
blockOutputSchemas,
|
||||
})!
|
||||
|
||||
if (result.value !== undefined) {
|
||||
return result.value
|
||||
}
|
||||
|
||||
return this.handleBackwardsCompat(block, output, pathParts)
|
||||
} catch (error) {
|
||||
if (error instanceof InvalidFieldError) {
|
||||
const fallback = this.handleBackwardsCompat(block, output, pathParts)
|
||||
if (fallback !== undefined) {
|
||||
return fallback
|
||||
}
|
||||
throw new Error(error.message)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
private handleBackwardsCompat(
|
||||
block: SerializedBlock,
|
||||
output: unknown,
|
||||
pathParts: string[]
|
||||
): unknown {
|
||||
if (output === undefined || pathParts.length === 0) {
|
||||
if (output === undefined) {
|
||||
return undefined
|
||||
}
|
||||
if (pathParts.length === 0) {
|
||||
return output
|
||||
}
|
||||
|
||||
// Try the original path first
|
||||
let result = navigatePath(output, pathParts)
|
||||
|
||||
// If successful, return it immediately
|
||||
if (result !== undefined) {
|
||||
return result
|
||||
}
|
||||
|
||||
// Response block backwards compatibility:
|
||||
// Old: <responseBlock.response.data> -> New: <responseBlock.data>
|
||||
// Only apply fallback if:
|
||||
// 1. Block type is 'response'
|
||||
// 2. Path starts with 'response.'
|
||||
// 3. Output doesn't have a 'response' key (confirming it's the new format)
|
||||
if (
|
||||
block.metadata?.id === 'response' &&
|
||||
block?.metadata?.id === 'response' &&
|
||||
pathParts[0] === 'response' &&
|
||||
(output as Record<string, unknown>)?.response === undefined
|
||||
output?.response === undefined
|
||||
) {
|
||||
const adjustedPathParts = pathParts.slice(1)
|
||||
if (adjustedPathParts.length === 0) {
|
||||
return output
|
||||
}
|
||||
const fallbackResult = navigatePath(output, adjustedPathParts)
|
||||
if (fallbackResult !== undefined) {
|
||||
return fallbackResult
|
||||
result = navigatePath(output, adjustedPathParts)
|
||||
if (result !== undefined) {
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
// Workflow block backwards compatibility:
|
||||
// Old: <workflowBlock.result.response.data> -> New: <workflowBlock.result.data>
|
||||
// Only apply fallback if:
|
||||
// 1. Block type is 'workflow' or 'workflow_input'
|
||||
// 2. Path starts with 'result.response.'
|
||||
// 3. output.result.response doesn't exist (confirming child used new format)
|
||||
const isWorkflowBlock =
|
||||
block.metadata?.id === 'workflow' || block.metadata?.id === 'workflow_input'
|
||||
const outputRecord = output as Record<string, Record<string, unknown> | undefined>
|
||||
block?.metadata?.id === 'workflow' || block?.metadata?.id === 'workflow_input'
|
||||
if (
|
||||
isWorkflowBlock &&
|
||||
pathParts[0] === 'result' &&
|
||||
pathParts[1] === 'response' &&
|
||||
outputRecord?.result?.response === undefined
|
||||
output?.result?.response === undefined
|
||||
) {
|
||||
const adjustedPathParts = ['result', ...pathParts.slice(2)]
|
||||
const fallbackResult = navigatePath(output, adjustedPathParts)
|
||||
if (fallbackResult !== undefined) {
|
||||
return fallbackResult
|
||||
result = navigatePath(output, adjustedPathParts)
|
||||
if (result !== undefined) {
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
const blockType = block?.metadata?.id
|
||||
const params = block?.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const toolId = block?.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const outputSchema =
|
||||
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block?.outputs)
|
||||
const schemaFields = getSchemaFieldNames(outputSchema)
|
||||
if (schemaFields.length > 0 && !isPathInOutputSchema(outputSchema, pathParts)) {
|
||||
throw new Error(
|
||||
`"${pathParts.join('.')}" doesn't exist on block "${blockName}". ` +
|
||||
`Available fields: ${schemaFields.join(', ')}`
|
||||
)
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import type { LoopScope } from '@/executor/execution/state'
|
||||
import { InvalidFieldError } from '@/executor/utils/block-reference'
|
||||
import { LoopResolver } from './loop'
|
||||
import type { ResolutionContext } from './reference'
|
||||
|
||||
@@ -63,12 +62,7 @@ function createTestContext(
|
||||
|
||||
describe('LoopResolver', () => {
|
||||
describe('canResolve', () => {
|
||||
it.concurrent('should return true for bare loop reference', () => {
|
||||
const resolver = new LoopResolver(createTestWorkflow())
|
||||
expect(resolver.canResolve('<loop>')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should return true for known loop properties', () => {
|
||||
it.concurrent('should return true for loop references', () => {
|
||||
const resolver = new LoopResolver(createTestWorkflow())
|
||||
expect(resolver.canResolve('<loop.index>')).toBe(true)
|
||||
expect(resolver.canResolve('<loop.iteration>')).toBe(true)
|
||||
@@ -84,13 +78,6 @@ describe('LoopResolver', () => {
|
||||
expect(resolver.canResolve('<loop.items.0>')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should return true for unknown loop properties (validates in resolve)', () => {
|
||||
const resolver = new LoopResolver(createTestWorkflow())
|
||||
expect(resolver.canResolve('<loop.results>')).toBe(true)
|
||||
expect(resolver.canResolve('<loop.output>')).toBe(true)
|
||||
expect(resolver.canResolve('<loop.unknownProperty>')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should return false for non-loop references', () => {
|
||||
const resolver = new LoopResolver(createTestWorkflow())
|
||||
expect(resolver.canResolve('<block.output>')).toBe(false)
|
||||
@@ -194,34 +181,20 @@ describe('LoopResolver', () => {
|
||||
})
|
||||
|
||||
describe('edge cases', () => {
|
||||
it.concurrent('should return context object for bare loop reference', () => {
|
||||
const resolver = new LoopResolver(createTestWorkflow())
|
||||
const loopScope = createLoopScope({ iteration: 2, item: 'test', items: ['a', 'b', 'c'] })
|
||||
const ctx = createTestContext('block-1', loopScope)
|
||||
|
||||
expect(resolver.resolve('<loop>', ctx)).toEqual({
|
||||
index: 2,
|
||||
currentItem: 'test',
|
||||
items: ['a', 'b', 'c'],
|
||||
})
|
||||
})
|
||||
|
||||
it.concurrent('should return minimal context object for for-loop (no items)', () => {
|
||||
const resolver = new LoopResolver(createTestWorkflow())
|
||||
const loopScope = createLoopScope({ iteration: 5 })
|
||||
const ctx = createTestContext('block-1', loopScope)
|
||||
|
||||
expect(resolver.resolve('<loop>', ctx)).toEqual({
|
||||
index: 5,
|
||||
})
|
||||
})
|
||||
|
||||
it.concurrent('should throw InvalidFieldError for unknown loop property', () => {
|
||||
it.concurrent('should return undefined for invalid loop reference (missing property)', () => {
|
||||
const resolver = new LoopResolver(createTestWorkflow())
|
||||
const loopScope = createLoopScope({ iteration: 0 })
|
||||
const ctx = createTestContext('block-1', loopScope)
|
||||
|
||||
expect(() => resolver.resolve('<loop.unknownProperty>', ctx)).toThrow(InvalidFieldError)
|
||||
expect(resolver.resolve('<loop>', ctx)).toBeUndefined()
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined for unknown loop property', () => {
|
||||
const resolver = new LoopResolver(createTestWorkflow())
|
||||
const loopScope = createLoopScope({ iteration: 0 })
|
||||
const ctx = createTestContext('block-1', loopScope)
|
||||
|
||||
expect(resolver.resolve('<loop.unknownProperty>', ctx)).toBeUndefined()
|
||||
})
|
||||
|
||||
it.concurrent('should handle iteration index 0 correctly', () => {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { isReference, parseReferencePath, REFERENCE } from '@/executor/constants'
|
||||
import { InvalidFieldError } from '@/executor/utils/block-reference'
|
||||
import { extractBaseBlockId } from '@/executor/utils/subflow-utils'
|
||||
import {
|
||||
navigatePath,
|
||||
@@ -14,8 +13,6 @@ const logger = createLogger('LoopResolver')
|
||||
export class LoopResolver implements Resolver {
|
||||
constructor(private workflow: SerializedWorkflow) {}
|
||||
|
||||
private static KNOWN_PROPERTIES = ['iteration', 'index', 'item', 'currentItem', 'items']
|
||||
|
||||
canResolve(reference: string): boolean {
|
||||
if (!isReference(reference)) {
|
||||
return false
|
||||
@@ -30,15 +27,16 @@ export class LoopResolver implements Resolver {
|
||||
|
||||
resolve(reference: string, context: ResolutionContext): any {
|
||||
const parts = parseReferencePath(reference)
|
||||
if (parts.length === 0) {
|
||||
logger.warn('Invalid loop reference', { reference })
|
||||
if (parts.length < 2) {
|
||||
logger.warn('Invalid loop reference - missing property', { reference })
|
||||
return undefined
|
||||
}
|
||||
|
||||
const loopId = this.findLoopForBlock(context.currentNodeId)
|
||||
const [_, property, ...pathParts] = parts
|
||||
let loopScope = context.loopScope
|
||||
|
||||
if (!loopScope) {
|
||||
const loopId = this.findLoopForBlock(context.currentNodeId)
|
||||
if (!loopId) {
|
||||
return undefined
|
||||
}
|
||||
@@ -50,27 +48,6 @@ export class LoopResolver implements Resolver {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const isForEach = loopId ? this.isForEachLoop(loopId) : loopScope.items !== undefined
|
||||
|
||||
if (parts.length === 1) {
|
||||
const result: Record<string, any> = {
|
||||
index: loopScope.iteration,
|
||||
}
|
||||
if (loopScope.item !== undefined) {
|
||||
result.currentItem = loopScope.item
|
||||
}
|
||||
if (loopScope.items !== undefined) {
|
||||
result.items = loopScope.items
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
const [_, property, ...pathParts] = parts
|
||||
if (!LoopResolver.KNOWN_PROPERTIES.includes(property)) {
|
||||
const availableFields = isForEach ? ['index', 'currentItem', 'items'] : ['index']
|
||||
throw new InvalidFieldError('loop', property, availableFields)
|
||||
}
|
||||
|
||||
let value: any
|
||||
switch (property) {
|
||||
case 'iteration':
|
||||
@@ -84,8 +61,12 @@ export class LoopResolver implements Resolver {
|
||||
case 'items':
|
||||
value = loopScope.items
|
||||
break
|
||||
default:
|
||||
logger.warn('Unknown loop property', { property })
|
||||
return undefined
|
||||
}
|
||||
|
||||
// If there are additional path parts, navigate deeper
|
||||
if (pathParts.length > 0) {
|
||||
return navigatePath(value, pathParts)
|
||||
}
|
||||
@@ -104,9 +85,4 @@ export class LoopResolver implements Resolver {
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
private isForEachLoop(loopId: string): boolean {
|
||||
const loopConfig = this.workflow.loops?.[loopId]
|
||||
return loopConfig?.loopType === 'forEach'
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { InvalidFieldError } from '@/executor/utils/block-reference'
|
||||
import { ParallelResolver } from './parallel'
|
||||
import type { ResolutionContext } from './reference'
|
||||
|
||||
@@ -82,12 +81,7 @@ function createTestContext(
|
||||
|
||||
describe('ParallelResolver', () => {
|
||||
describe('canResolve', () => {
|
||||
it.concurrent('should return true for bare parallel reference', () => {
|
||||
const resolver = new ParallelResolver(createTestWorkflow())
|
||||
expect(resolver.canResolve('<parallel>')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent('should return true for known parallel properties', () => {
|
||||
it.concurrent('should return true for parallel references', () => {
|
||||
const resolver = new ParallelResolver(createTestWorkflow())
|
||||
expect(resolver.canResolve('<parallel.index>')).toBe(true)
|
||||
expect(resolver.canResolve('<parallel.currentItem>')).toBe(true)
|
||||
@@ -100,16 +94,6 @@ describe('ParallelResolver', () => {
|
||||
expect(resolver.canResolve('<parallel.items.0>')).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should return true for unknown parallel properties (validates in resolve)',
|
||||
() => {
|
||||
const resolver = new ParallelResolver(createTestWorkflow())
|
||||
expect(resolver.canResolve('<parallel.results>')).toBe(true)
|
||||
expect(resolver.canResolve('<parallel.output>')).toBe(true)
|
||||
expect(resolver.canResolve('<parallel.unknownProperty>')).toBe(true)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should return false for non-parallel references', () => {
|
||||
const resolver = new ParallelResolver(createTestWorkflow())
|
||||
expect(resolver.canResolve('<block.output>')).toBe(false)
|
||||
@@ -270,40 +254,24 @@ describe('ParallelResolver', () => {
|
||||
})
|
||||
|
||||
describe('edge cases', () => {
|
||||
it.concurrent('should return context object for bare parallel reference', () => {
|
||||
const workflow = createTestWorkflow({
|
||||
'parallel-1': { nodes: ['block-1'], distribution: ['a', 'b', 'c'] },
|
||||
})
|
||||
const resolver = new ParallelResolver(workflow)
|
||||
const ctx = createTestContext('block-1₍1₎')
|
||||
it.concurrent(
|
||||
'should return undefined for invalid parallel reference (missing property)',
|
||||
() => {
|
||||
const resolver = new ParallelResolver(createTestWorkflow())
|
||||
const ctx = createTestContext('block-1₍0₎')
|
||||
|
||||
expect(resolver.resolve('<parallel>', ctx)).toEqual({
|
||||
index: 1,
|
||||
currentItem: 'b',
|
||||
items: ['a', 'b', 'c'],
|
||||
})
|
||||
})
|
||||
expect(resolver.resolve('<parallel>', ctx)).toBeUndefined()
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should return minimal context object when no distribution', () => {
|
||||
const workflow = createTestWorkflow({
|
||||
'parallel-1': { nodes: ['block-1'] },
|
||||
})
|
||||
const resolver = new ParallelResolver(workflow)
|
||||
const ctx = createTestContext('block-1₍0₎')
|
||||
|
||||
const result = resolver.resolve('<parallel>', ctx)
|
||||
expect(result).toHaveProperty('index', 0)
|
||||
expect(result).toHaveProperty('items')
|
||||
})
|
||||
|
||||
it.concurrent('should throw InvalidFieldError for unknown parallel property', () => {
|
||||
it.concurrent('should return undefined for unknown parallel property', () => {
|
||||
const workflow = createTestWorkflow({
|
||||
'parallel-1': { nodes: ['block-1'], distribution: ['a'] },
|
||||
})
|
||||
const resolver = new ParallelResolver(workflow)
|
||||
const ctx = createTestContext('block-1₍0₎')
|
||||
|
||||
expect(() => resolver.resolve('<parallel.unknownProperty>', ctx)).toThrow(InvalidFieldError)
|
||||
expect(resolver.resolve('<parallel.unknownProperty>', ctx)).toBeUndefined()
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined when block is not in any parallel', () => {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { isReference, parseReferencePath, REFERENCE } from '@/executor/constants'
|
||||
import { InvalidFieldError } from '@/executor/utils/block-reference'
|
||||
import { extractBaseBlockId, extractBranchIndex } from '@/executor/utils/subflow-utils'
|
||||
import {
|
||||
navigatePath,
|
||||
@@ -14,8 +13,6 @@ const logger = createLogger('ParallelResolver')
|
||||
export class ParallelResolver implements Resolver {
|
||||
constructor(private workflow: SerializedWorkflow) {}
|
||||
|
||||
private static KNOWN_PROPERTIES = ['index', 'currentItem', 'items']
|
||||
|
||||
canResolve(reference: string): boolean {
|
||||
if (!isReference(reference)) {
|
||||
return false
|
||||
@@ -30,11 +27,12 @@ export class ParallelResolver implements Resolver {
|
||||
|
||||
resolve(reference: string, context: ResolutionContext): any {
|
||||
const parts = parseReferencePath(reference)
|
||||
if (parts.length === 0) {
|
||||
logger.warn('Invalid parallel reference', { reference })
|
||||
if (parts.length < 2) {
|
||||
logger.warn('Invalid parallel reference - missing property', { reference })
|
||||
return undefined
|
||||
}
|
||||
|
||||
const [_, property, ...pathParts] = parts
|
||||
const parallelId = this.findParallelForBlock(context.currentNodeId)
|
||||
if (!parallelId) {
|
||||
return undefined
|
||||
@@ -51,33 +49,11 @@ export class ParallelResolver implements Resolver {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// First try to get items from the parallel scope (resolved at runtime)
|
||||
// This is the same pattern as LoopResolver reading from loopScope.items
|
||||
const parallelScope = context.executionContext.parallelExecutions?.get(parallelId)
|
||||
const distributionItems = parallelScope?.items ?? this.getDistributionItems(parallelConfig)
|
||||
|
||||
if (parts.length === 1) {
|
||||
const result: Record<string, any> = {
|
||||
index: branchIndex,
|
||||
}
|
||||
if (distributionItems !== undefined) {
|
||||
result.items = distributionItems
|
||||
if (Array.isArray(distributionItems)) {
|
||||
result.currentItem = distributionItems[branchIndex]
|
||||
} else if (typeof distributionItems === 'object' && distributionItems !== null) {
|
||||
const keys = Object.keys(distributionItems)
|
||||
const key = keys[branchIndex]
|
||||
result.currentItem = key !== undefined ? distributionItems[key] : undefined
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
const [_, property, ...pathParts] = parts
|
||||
if (!ParallelResolver.KNOWN_PROPERTIES.includes(property)) {
|
||||
const isCollection = parallelConfig.parallelType === 'collection'
|
||||
const availableFields = isCollection ? ['index', 'currentItem', 'items'] : ['index']
|
||||
throw new InvalidFieldError('parallel', property, availableFields)
|
||||
}
|
||||
|
||||
let value: any
|
||||
switch (property) {
|
||||
case 'index':
|
||||
@@ -97,8 +73,12 @@ export class ParallelResolver implements Resolver {
|
||||
case 'items':
|
||||
value = distributionItems
|
||||
break
|
||||
default:
|
||||
logger.warn('Unknown parallel property', { property })
|
||||
return undefined
|
||||
}
|
||||
|
||||
// If there are additional path parts, navigate deeper
|
||||
if (pathParts.length > 0) {
|
||||
return navigatePath(value, pathParts)
|
||||
}
|
||||
|
||||
@@ -27,28 +27,23 @@ export function navigatePath(obj: any, path: string[]): any {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const arrayMatch = part.match(/^([^[]+)(\[.+)$/)
|
||||
// Handle array indexing like "items[0]" or just numeric indices
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\](.*)$/)
|
||||
if (arrayMatch) {
|
||||
const [, prop, bracketsPart] = arrayMatch
|
||||
// Handle complex array access like "items[0]"
|
||||
const [, prop, index] = arrayMatch
|
||||
current = current[prop]
|
||||
if (current === undefined || current === null) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const indices = bracketsPart.match(/\[(\d+)\]/g)
|
||||
if (indices) {
|
||||
for (const indexMatch of indices) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined
|
||||
}
|
||||
const idx = Number.parseInt(indexMatch.slice(1, -1), 10)
|
||||
current = Array.isArray(current) ? current[idx] : undefined
|
||||
}
|
||||
}
|
||||
const idx = Number.parseInt(index, 10)
|
||||
current = Array.isArray(current) ? current[idx] : undefined
|
||||
} else if (/^\d+$/.test(part)) {
|
||||
// Handle plain numeric index
|
||||
const index = Number.parseInt(part, 10)
|
||||
current = Array.isArray(current) ? current[index] : undefined
|
||||
} else {
|
||||
// Handle regular property access
|
||||
current = current[part]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { AllTagSlot } from '@/lib/knowledge/constants'
|
||||
import { knowledgeKeys, useTagDefinitionsQuery } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('useKnowledgeBaseTagDefinitions')
|
||||
|
||||
export interface TagDefinition {
|
||||
id: string
|
||||
@@ -16,23 +17,54 @@ export interface TagDefinition {
|
||||
|
||||
/**
|
||||
* Hook for fetching KB-scoped tag definitions (for filtering/selection)
|
||||
* Uses React Query as single source of truth
|
||||
* @param knowledgeBaseId - The knowledge base ID
|
||||
*/
|
||||
export function useKnowledgeBaseTagDefinitions(knowledgeBaseId: string | null) {
|
||||
const queryClient = useQueryClient()
|
||||
const query = useTagDefinitionsQuery(knowledgeBaseId)
|
||||
const [tagDefinitions, setTagDefinitions] = useState<TagDefinition[]>([])
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const fetchTagDefinitions = useCallback(async () => {
|
||||
if (!knowledgeBaseId) return
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId),
|
||||
})
|
||||
}, [queryClient, knowledgeBaseId])
|
||||
if (!knowledgeBaseId) {
|
||||
setTagDefinitions([])
|
||||
return
|
||||
}
|
||||
|
||||
setIsLoading(true)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (data.success && Array.isArray(data.data)) {
|
||||
setTagDefinitions(data.data)
|
||||
} else {
|
||||
throw new Error('Invalid response format')
|
||||
}
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'
|
||||
logger.error('Error fetching tag definitions:', err)
|
||||
setError(errorMessage)
|
||||
setTagDefinitions([])
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [knowledgeBaseId])
|
||||
|
||||
useEffect(() => {
|
||||
fetchTagDefinitions()
|
||||
}, [fetchTagDefinitions])
|
||||
|
||||
return {
|
||||
tagDefinitions: (query.data ?? []) as TagDefinition[],
|
||||
isLoading: query.isLoading,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
tagDefinitions,
|
||||
isLoading,
|
||||
error,
|
||||
fetchTagDefinitions,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useCallback } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import type { ChunkData, DocumentData, KnowledgeBaseData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
@@ -67,17 +67,12 @@ export function useKnowledgeBaseDocuments(
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
enabled?: boolean
|
||||
refetchInterval?:
|
||||
| number
|
||||
| false
|
||||
| ((data: KnowledgeDocumentsResponse | undefined) => number | false)
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
refetchInterval?: number | false
|
||||
}
|
||||
) {
|
||||
const queryClient = useQueryClient()
|
||||
const requestLimit = options?.limit ?? DEFAULT_PAGE_SIZE
|
||||
const requestOffset = options?.offset ?? 0
|
||||
const enabledFilter = options?.enabledFilter ?? 'all'
|
||||
const paramsKey = serializeDocumentParams({
|
||||
knowledgeBaseId,
|
||||
limit: requestLimit,
|
||||
@@ -85,19 +80,8 @@ export function useKnowledgeBaseDocuments(
|
||||
search: options?.search,
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
enabledFilter,
|
||||
})
|
||||
|
||||
const refetchIntervalFn = useMemo(() => {
|
||||
if (typeof options?.refetchInterval === 'function') {
|
||||
const userFn = options.refetchInterval
|
||||
return (query: { state: { data?: KnowledgeDocumentsResponse } }) => {
|
||||
return userFn(query.state.data)
|
||||
}
|
||||
}
|
||||
return options?.refetchInterval
|
||||
}, [options?.refetchInterval])
|
||||
|
||||
const query = useKnowledgeDocumentsQuery(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
@@ -106,11 +90,10 @@ export function useKnowledgeBaseDocuments(
|
||||
search: options?.search,
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
enabled: (options?.enabled ?? true) && Boolean(knowledgeBaseId),
|
||||
refetchInterval: refetchIntervalFn,
|
||||
refetchInterval: options?.refetchInterval,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -122,14 +105,6 @@ export function useKnowledgeBaseDocuments(
|
||||
hasMore: false,
|
||||
}
|
||||
|
||||
const hasProcessingDocs = useMemo(
|
||||
() =>
|
||||
documents.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
),
|
||||
[documents]
|
||||
)
|
||||
|
||||
const refreshDocuments = useCallback(async () => {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.documents(knowledgeBaseId, paramsKey),
|
||||
@@ -161,7 +136,6 @@ export function useKnowledgeBaseDocuments(
|
||||
isFetching: query.isFetching,
|
||||
isPlaceholderData: query.isPlaceholderData,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
hasProcessingDocuments: hasProcessingDocs,
|
||||
refreshDocuments,
|
||||
updateDocument,
|
||||
}
|
||||
@@ -259,8 +233,8 @@ export function useDocumentChunks(
|
||||
const hasPrevPage = currentPage > 1
|
||||
|
||||
const goToPage = useCallback(
|
||||
(newPage: number): boolean => {
|
||||
return newPage >= 1 && newPage <= totalPages
|
||||
async (newPage: number) => {
|
||||
if (newPage < 1 || newPage > totalPages) return
|
||||
},
|
||||
[totalPages]
|
||||
)
|
||||
|
||||
@@ -1,15 +1,10 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { AllTagSlot } from '@/lib/knowledge/constants'
|
||||
import {
|
||||
type DocumentTagDefinitionInput,
|
||||
knowledgeKeys,
|
||||
useDeleteDocumentTagDefinitions,
|
||||
useDocumentTagDefinitionsQuery,
|
||||
useSaveDocumentTagDefinitions,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('useTagDefinitions')
|
||||
|
||||
export interface TagDefinition {
|
||||
id: string
|
||||
@@ -24,30 +19,57 @@ export interface TagDefinitionInput {
|
||||
tagSlot: AllTagSlot
|
||||
displayName: string
|
||||
fieldType: string
|
||||
// Optional: for editing existing definitions
|
||||
_originalDisplayName?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing document-scoped tag definitions
|
||||
* Uses React Query as single source of truth
|
||||
* Hook for managing KB-scoped tag definitions
|
||||
* @param knowledgeBaseId - The knowledge base ID
|
||||
* @param documentId - The document ID (required for API calls)
|
||||
*/
|
||||
export function useTagDefinitions(
|
||||
knowledgeBaseId: string | null,
|
||||
documentId: string | null = null
|
||||
) {
|
||||
const queryClient = useQueryClient()
|
||||
const query = useDocumentTagDefinitionsQuery(knowledgeBaseId, documentId)
|
||||
const { mutateAsync: saveTagDefinitionsMutation } = useSaveDocumentTagDefinitions()
|
||||
const { mutateAsync: deleteTagDefinitionsMutation } = useDeleteDocumentTagDefinitions()
|
||||
|
||||
const tagDefinitions = (query.data ?? []) as TagDefinition[]
|
||||
const [tagDefinitions, setTagDefinitions] = useState<TagDefinition[]>([])
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const fetchTagDefinitions = useCallback(async () => {
|
||||
if (!knowledgeBaseId || !documentId) return
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId, documentId),
|
||||
})
|
||||
}, [queryClient, knowledgeBaseId, documentId])
|
||||
if (!knowledgeBaseId || !documentId) {
|
||||
setTagDefinitions([])
|
||||
return
|
||||
}
|
||||
|
||||
setIsLoading(true)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (data.success && Array.isArray(data.data)) {
|
||||
setTagDefinitions(data.data)
|
||||
} else {
|
||||
throw new Error('Invalid response format')
|
||||
}
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'
|
||||
logger.error('Error fetching tag definitions:', err)
|
||||
setError(errorMessage)
|
||||
setTagDefinitions([])
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [knowledgeBaseId, documentId])
|
||||
|
||||
const saveTagDefinitions = useCallback(
|
||||
async (definitions: TagDefinitionInput[]) => {
|
||||
@@ -55,13 +77,43 @@ export function useTagDefinitions(
|
||||
throw new Error('Knowledge base ID and document ID are required')
|
||||
}
|
||||
|
||||
return saveTagDefinitionsMutation({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
definitions: definitions as DocumentTagDefinitionInput[],
|
||||
})
|
||||
// Simple validation
|
||||
const validDefinitions = (definitions || []).filter(
|
||||
(def) => def?.tagSlot && def.displayName && def.displayName.trim()
|
||||
)
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ definitions: validDefinitions }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to save tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to save tag definitions')
|
||||
}
|
||||
|
||||
// Refresh the definitions after saving
|
||||
await fetchTagDefinitions()
|
||||
|
||||
return data.data
|
||||
} catch (err) {
|
||||
logger.error('Error saving tag definitions:', err)
|
||||
throw err
|
||||
}
|
||||
},
|
||||
[knowledgeBaseId, documentId, saveTagDefinitionsMutation]
|
||||
[knowledgeBaseId, documentId, fetchTagDefinitions]
|
||||
)
|
||||
|
||||
const deleteTagDefinitions = useCallback(async () => {
|
||||
@@ -69,11 +121,25 @@ export function useTagDefinitions(
|
||||
throw new Error('Knowledge base ID and document ID are required')
|
||||
}
|
||||
|
||||
return deleteTagDefinitionsMutation({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
})
|
||||
}, [knowledgeBaseId, documentId, deleteTagDefinitionsMutation])
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to delete tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
// Refresh the definitions after deleting
|
||||
await fetchTagDefinitions()
|
||||
} catch (err) {
|
||||
logger.error('Error deleting tag definitions:', err)
|
||||
throw err
|
||||
}
|
||||
}, [knowledgeBaseId, documentId, fetchTagDefinitions])
|
||||
|
||||
const getTagLabel = useCallback(
|
||||
(tagSlot: string): string => {
|
||||
@@ -90,10 +156,15 @@ export function useTagDefinitions(
|
||||
[tagDefinitions]
|
||||
)
|
||||
|
||||
// Auto-fetch on mount and when dependencies change
|
||||
useEffect(() => {
|
||||
fetchTagDefinitions()
|
||||
}, [fetchTagDefinitions])
|
||||
|
||||
return {
|
||||
tagDefinitions,
|
||||
isLoading: query.isLoading,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
isLoading,
|
||||
error,
|
||||
fetchTagDefinitions,
|
||||
saveTagDefinitions,
|
||||
deleteTagDefinitions,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import type {
|
||||
ChunkData,
|
||||
@@ -8,21 +7,15 @@ import type {
|
||||
KnowledgeBaseData,
|
||||
} from '@/lib/knowledge/types'
|
||||
|
||||
const logger = createLogger('KnowledgeQueries')
|
||||
|
||||
export const knowledgeKeys = {
|
||||
all: ['knowledge'] as const,
|
||||
list: (workspaceId?: string) => [...knowledgeKeys.all, 'list', workspaceId ?? 'all'] as const,
|
||||
detail: (knowledgeBaseId?: string) =>
|
||||
[...knowledgeKeys.all, 'detail', knowledgeBaseId ?? ''] as const,
|
||||
tagDefinitions: (knowledgeBaseId: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'tagDefinitions'] as const,
|
||||
documents: (knowledgeBaseId: string, paramsKey: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'documents', paramsKey] as const,
|
||||
document: (knowledgeBaseId: string, documentId: string) =>
|
||||
[...knowledgeKeys.detail(knowledgeBaseId), 'document', documentId] as const,
|
||||
documentTagDefinitions: (knowledgeBaseId: string, documentId: string) =>
|
||||
[...knowledgeKeys.document(knowledgeBaseId, documentId), 'tagDefinitions'] as const,
|
||||
chunks: (knowledgeBaseId: string, documentId: string, paramsKey: string) =>
|
||||
[...knowledgeKeys.document(knowledgeBaseId, documentId), 'chunks', paramsKey] as const,
|
||||
}
|
||||
@@ -86,7 +79,6 @@ export interface KnowledgeDocumentsParams {
|
||||
offset?: number
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
}
|
||||
|
||||
export interface KnowledgeDocumentsResponse {
|
||||
@@ -101,7 +93,6 @@ export async function fetchKnowledgeDocuments({
|
||||
offset = 0,
|
||||
sortBy,
|
||||
sortOrder,
|
||||
enabledFilter,
|
||||
}: KnowledgeDocumentsParams): Promise<KnowledgeDocumentsResponse> {
|
||||
const params = new URLSearchParams()
|
||||
if (search) params.set('search', search)
|
||||
@@ -109,7 +100,6 @@ export async function fetchKnowledgeDocuments({
|
||||
if (sortOrder) params.set('sortOrder', sortOrder)
|
||||
params.set('limit', limit.toString())
|
||||
params.set('offset', offset.toString())
|
||||
if (enabledFilter) params.set('enabledFilter', enabledFilter)
|
||||
|
||||
const url = `/api/knowledge/${knowledgeBaseId}/documents${params.toString() ? `?${params.toString()}` : ''}`
|
||||
const response = await fetch(url)
|
||||
@@ -222,7 +212,6 @@ export function useDocumentQuery(knowledgeBaseId?: string, documentId?: string)
|
||||
queryFn: () => fetchDocument(knowledgeBaseId as string, documentId as string),
|
||||
enabled: Boolean(knowledgeBaseId && documentId),
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -233,17 +222,13 @@ export const serializeDocumentParams = (params: KnowledgeDocumentsParams) =>
|
||||
offset: params.offset ?? 0,
|
||||
sortBy: params.sortBy ?? '',
|
||||
sortOrder: params.sortOrder ?? '',
|
||||
enabledFilter: params.enabledFilter ?? 'all',
|
||||
})
|
||||
|
||||
export function useKnowledgeDocumentsQuery(
|
||||
params: KnowledgeDocumentsParams,
|
||||
options?: {
|
||||
enabled?: boolean
|
||||
refetchInterval?:
|
||||
| number
|
||||
| false
|
||||
| ((query: { state: { data?: KnowledgeDocumentsResponse } }) => number | false)
|
||||
refetchInterval?: number | false
|
||||
}
|
||||
) {
|
||||
const paramsKey = serializeDocumentParams(params)
|
||||
@@ -587,9 +572,7 @@ export function useDeleteDocument() {
|
||||
export interface BulkDocumentOperationParams {
|
||||
knowledgeBaseId: string
|
||||
operation: 'enable' | 'disable' | 'delete'
|
||||
documentIds?: string[]
|
||||
selectAll?: boolean
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
documentIds: string[]
|
||||
}
|
||||
|
||||
export interface BulkDocumentOperationResult {
|
||||
@@ -602,21 +585,11 @@ export async function bulkDocumentOperation({
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
documentIds,
|
||||
selectAll,
|
||||
enabledFilter,
|
||||
}: BulkDocumentOperationParams): Promise<BulkDocumentOperationResult> {
|
||||
const body: Record<string, unknown> = { operation }
|
||||
if (selectAll) {
|
||||
body.selectAll = true
|
||||
if (enabledFilter) body.enabledFilter = enabledFilter
|
||||
} else {
|
||||
body.documentIds = documentIds
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(body),
|
||||
body: JSON.stringify({ operation, documentIds }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
@@ -885,31 +858,6 @@ export interface TagDefinitionData {
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export async function fetchTagDefinitions(knowledgeBaseId: string): Promise<TagDefinitionData[]> {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch tag definitions: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to fetch tag definitions')
|
||||
}
|
||||
|
||||
return Array.isArray(result.data) ? result.data : []
|
||||
}
|
||||
|
||||
export function useTagDefinitionsQuery(knowledgeBaseId?: string | null) {
|
||||
return useQuery({
|
||||
queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId ?? ''),
|
||||
queryFn: () => fetchTagDefinitions(knowledgeBaseId as string),
|
||||
enabled: Boolean(knowledgeBaseId),
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
export interface CreateTagDefinitionParams {
|
||||
knowledgeBaseId: string
|
||||
displayName: string
|
||||
@@ -966,7 +914,7 @@ export function useCreateTagDefinition() {
|
||||
mutationFn: createTagDefinition,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId),
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -1004,152 +952,8 @@ export function useDeleteTagDefinition() {
|
||||
mutationFn: deleteTagDefinition,
|
||||
onSuccess: (_, { knowledgeBaseId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId),
|
||||
queryKey: knowledgeKeys.detail(knowledgeBaseId),
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface DocumentTagDefinitionData {
|
||||
id: string
|
||||
tagSlot: string
|
||||
displayName: string
|
||||
fieldType: string
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export async function fetchDocumentTagDefinitions(
|
||||
knowledgeBaseId: string,
|
||||
documentId: string
|
||||
): Promise<DocumentTagDefinitionData[]> {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`Failed to fetch document tag definitions: ${response.status} ${response.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to fetch document tag definitions')
|
||||
}
|
||||
|
||||
return Array.isArray(result.data) ? result.data : []
|
||||
}
|
||||
|
||||
export function useDocumentTagDefinitionsQuery(
|
||||
knowledgeBaseId?: string | null,
|
||||
documentId?: string | null
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId ?? '', documentId ?? ''),
|
||||
queryFn: () => fetchDocumentTagDefinitions(knowledgeBaseId as string, documentId as string),
|
||||
enabled: Boolean(knowledgeBaseId && documentId),
|
||||
staleTime: 60 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
export interface DocumentTagDefinitionInput {
|
||||
tagSlot: string
|
||||
displayName: string
|
||||
fieldType: string
|
||||
}
|
||||
|
||||
export interface SaveDocumentTagDefinitionsParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
definitions: DocumentTagDefinitionInput[]
|
||||
}
|
||||
|
||||
export async function saveDocumentTagDefinitions({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
definitions,
|
||||
}: SaveDocumentTagDefinitionsParams): Promise<DocumentTagDefinitionData[]> {
|
||||
const validDefinitions = (definitions || []).filter(
|
||||
(def) => def?.tagSlot && def.displayName && def.displayName.trim()
|
||||
)
|
||||
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ definitions: validDefinitions }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to save document tag definitions')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to save document tag definitions')
|
||||
}
|
||||
|
||||
return result.data
|
||||
}
|
||||
|
||||
export function useSaveDocumentTagDefinitions() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: saveDocumentTagDefinitions,
|
||||
onSuccess: (_, { knowledgeBaseId, documentId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId, documentId),
|
||||
})
|
||||
},
|
||||
onError: (error) => {
|
||||
logger.error('Failed to save document tag definitions:', error)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export interface DeleteDocumentTagDefinitionsParams {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
}
|
||||
|
||||
export async function deleteDocumentTagDefinitions({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
}: DeleteDocumentTagDefinitionsParams): Promise<void> {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
const result = await response.json()
|
||||
throw new Error(result.error || 'Failed to delete document tag definitions')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
if (!result?.success) {
|
||||
throw new Error(result?.error || 'Failed to delete document tag definitions')
|
||||
}
|
||||
}
|
||||
|
||||
export function useDeleteDocumentTagDefinitions() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: deleteDocumentTagDefinitions,
|
||||
onSuccess: (_, { knowledgeBaseId, documentId }) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId, documentId),
|
||||
})
|
||||
},
|
||||
onError: (error) => {
|
||||
logger.error('Failed to delete document tag definitions:', error)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import { useShallow } from 'zustand/react/shallow'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { useSocket } from '@/app/workspace/providers/socket-provider'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { useUndoRedo } from '@/hooks/use-undo-redo'
|
||||
import {
|
||||
BLOCK_OPERATIONS,
|
||||
@@ -740,16 +740,6 @@ export function useCollaborativeWorkflow() {
|
||||
return { success: false, error: 'Block name cannot be empty' }
|
||||
}
|
||||
|
||||
if ((RESERVED_BLOCK_NAMES as readonly string[]).includes(normalizedNewName)) {
|
||||
logger.error(`Cannot rename block to reserved name: "${trimmedName}"`)
|
||||
useNotificationStore.getState().addNotification({
|
||||
level: 'error',
|
||||
message: `"${trimmedName}" is a reserved name and cannot be used`,
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
return { success: false, error: `"${trimmedName}" is a reserved name` }
|
||||
}
|
||||
|
||||
const currentBlocks = useWorkflowStore.getState().blocks
|
||||
const conflictingBlock = Object.entries(currentBlocks).find(
|
||||
([blockId, block]) => blockId !== id && normalizeName(block.name) === normalizedNewName
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { Check, Copy } from 'lucide-react'
|
||||
import { Code } from '@/components/emcn'
|
||||
|
||||
interface CodeBlockProps {
|
||||
@@ -8,8 +10,30 @@ interface CodeBlockProps {
|
||||
}
|
||||
|
||||
export function CodeBlock({ code, language }: CodeBlockProps) {
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
const handleCopy = () => {
|
||||
navigator.clipboard.writeText(code)
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 2000)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='dark w-full overflow-hidden rounded-md border border-[#2a2a2a] bg-[#1F1F1F] text-sm'>
|
||||
<div className='flex items-center justify-between border-[#2a2a2a] border-b px-4 py-1.5'>
|
||||
<span className='text-[#A3A3A3] text-xs'>{language}</span>
|
||||
<button
|
||||
onClick={handleCopy}
|
||||
className='text-[#A3A3A3] transition-colors hover:text-gray-300'
|
||||
title='Copy code'
|
||||
>
|
||||
{copied ? (
|
||||
<Check className='h-3 w-3' strokeWidth={2} />
|
||||
) : (
|
||||
<Copy className='h-3 w-3' strokeWidth={2} />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={code}
|
||||
showGutter
|
||||
|
||||
@@ -61,7 +61,7 @@ export const mdxComponents: MDXRemoteProps['components'] = {
|
||||
)}
|
||||
/>
|
||||
),
|
||||
li: (props: any) => <li {...props} className={clsx('mb-1', props.className)} />,
|
||||
li: (props: any) => <li {...props} className={clsx('mb-2', props.className)} />,
|
||||
strong: (props: any) => <strong {...props} className={clsx('font-semibold', props.className)} />,
|
||||
em: (props: any) => <em {...props} className={clsx('italic', props.className)} />,
|
||||
a: (props: any) => {
|
||||
|
||||
@@ -10,8 +10,6 @@ import type { BlogMeta, BlogPost, TagWithCount } from '@/lib/blog/schema'
|
||||
import { AuthorSchema, BlogFrontmatterSchema } from '@/lib/blog/schema'
|
||||
import { AUTHORS_DIR, BLOG_DIR, byDateDesc, ensureContentDirs, toIsoDate } from '@/lib/blog/utils'
|
||||
|
||||
const postComponentsRegistry: Record<string, Record<string, React.ComponentType>> = {}
|
||||
|
||||
let cachedMeta: BlogMeta[] | null = null
|
||||
let cachedAuthors: Record<string, any> | null = null
|
||||
|
||||
@@ -101,21 +99,6 @@ export async function getAllTags(): Promise<TagWithCount[]> {
|
||||
.sort((a, b) => b.count - a.count || a.tag.localeCompare(b.tag))
|
||||
}
|
||||
|
||||
async function loadPostComponents(slug: string): Promise<Record<string, React.ComponentType>> {
|
||||
if (postComponentsRegistry[slug]) {
|
||||
return postComponentsRegistry[slug]
|
||||
}
|
||||
|
||||
try {
|
||||
const postComponents = await import(`@/content/blog/${slug}/components`)
|
||||
postComponentsRegistry[slug] = postComponents
|
||||
return postComponents
|
||||
} catch {
|
||||
postComponentsRegistry[slug] = {}
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
export async function getPostBySlug(slug: string): Promise<BlogPost> {
|
||||
const meta = await scanFrontmatters()
|
||||
const found = meta.find((m) => m.slug === slug)
|
||||
@@ -124,13 +107,9 @@ export async function getPostBySlug(slug: string): Promise<BlogPost> {
|
||||
const raw = await fs.readFile(mdxPath, 'utf-8')
|
||||
const { content, data } = matter(raw)
|
||||
const fm = BlogFrontmatterSchema.parse(data)
|
||||
|
||||
const postComponents = await loadPostComponents(slug)
|
||||
const mergedComponents = { ...mdxComponents, ...postComponents }
|
||||
|
||||
const compiled = await compileMDX({
|
||||
source: content,
|
||||
components: mergedComponents as any,
|
||||
components: mdxComponents as any,
|
||||
options: {
|
||||
parseFrontmatter: false,
|
||||
mdxOptions: {
|
||||
|
||||
@@ -34,3 +34,17 @@ import './workflow/set-global-workflow-variables'
|
||||
|
||||
// User tools
|
||||
import './user/set-environment-variables'
|
||||
|
||||
// Re-export UI config utilities for convenience
|
||||
export {
|
||||
getSubagentLabels,
|
||||
getToolUIConfig,
|
||||
hasInterrupt,
|
||||
type InterruptConfig,
|
||||
isSpecialTool,
|
||||
isSubagentTool,
|
||||
type ParamsTableConfig,
|
||||
type SecondaryActionConfig,
|
||||
type SubagentConfig,
|
||||
type ToolUIConfig,
|
||||
} from './ui-config'
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import type { KnowledgeBaseArgs, KnowledgeBaseResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import {
|
||||
type KnowledgeBaseArgs,
|
||||
KnowledgeBaseArgsSchema,
|
||||
type KnowledgeBaseResult,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { generateSearchEmbedding } from '@/lib/knowledge/embeddings'
|
||||
import {
|
||||
createKnowledgeBase,
|
||||
@@ -11,6 +15,11 @@ import { getQueryStrategy, handleVectorOnlySearch } from '@/app/api/knowledge/se
|
||||
|
||||
const logger = createLogger('KnowledgeBaseServerTool')
|
||||
|
||||
// Re-export for backwards compatibility
|
||||
export const KnowledgeBaseInput = KnowledgeBaseArgsSchema
|
||||
export type KnowledgeBaseInputType = KnowledgeBaseArgs
|
||||
export type KnowledgeBaseResultType = KnowledgeBaseResult
|
||||
|
||||
/**
|
||||
* Knowledge base tool for copilot to create, list, and get knowledge bases
|
||||
*/
|
||||
@@ -154,6 +163,7 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
// Verify knowledge base exists
|
||||
const kb = await getKnowledgeBaseById(args.knowledgeBaseId)
|
||||
if (!kb) {
|
||||
return {
|
||||
@@ -171,8 +181,10 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
)
|
||||
const queryVector = JSON.stringify(queryEmbedding)
|
||||
|
||||
// Get search strategy
|
||||
const strategy = getQueryStrategy(1, topK)
|
||||
|
||||
// Perform vector search
|
||||
const results = await handleVectorOnlySearch({
|
||||
knowledgeBaseIds: [args.knowledgeBaseId],
|
||||
topK,
|
||||
|
||||
@@ -6,7 +6,10 @@ import { getBlocksAndToolsServerTool } from '@/lib/copilot/tools/server/blocks/g
|
||||
import { getBlocksMetadataServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-metadata-tool'
|
||||
import { getTriggerBlocksServerTool } from '@/lib/copilot/tools/server/blocks/get-trigger-blocks'
|
||||
import { searchDocumentationServerTool } from '@/lib/copilot/tools/server/docs/search-documentation'
|
||||
import { knowledgeBaseServerTool } from '@/lib/copilot/tools/server/knowledge/knowledge-base'
|
||||
import {
|
||||
KnowledgeBaseInput,
|
||||
knowledgeBaseServerTool,
|
||||
} from '@/lib/copilot/tools/server/knowledge/knowledge-base'
|
||||
import { makeApiRequestServerTool } from '@/lib/copilot/tools/server/other/make-api-request'
|
||||
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||
@@ -25,7 +28,6 @@ import {
|
||||
GetBlocksMetadataResult,
|
||||
GetTriggerBlocksInput,
|
||||
GetTriggerBlocksResult,
|
||||
KnowledgeBaseArgsSchema,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
|
||||
// Generic execute response schemas (success path only for this route; errors handled via HTTP status)
|
||||
@@ -88,7 +90,7 @@ export async function routeExecution(
|
||||
args = GetTriggerBlocksInput.parse(args)
|
||||
}
|
||||
if (toolName === 'knowledge_base') {
|
||||
args = KnowledgeBaseArgsSchema.parse(args)
|
||||
args = KnowledgeBaseInput.parse(args)
|
||||
}
|
||||
|
||||
const result = await tool.execute(args, context)
|
||||
|
||||
@@ -14,7 +14,7 @@ import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import { getAllBlocks, getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { EDGE, normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
|
||||
import { EDGE, normalizeName } from '@/executor/constants'
|
||||
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
|
||||
@@ -63,7 +63,6 @@ type SkippedItemType =
|
||||
| 'invalid_subflow_parent'
|
||||
| 'nested_subflow_not_allowed'
|
||||
| 'duplicate_block_name'
|
||||
| 'reserved_block_name'
|
||||
| 'duplicate_trigger'
|
||||
| 'duplicate_single_instance_block'
|
||||
|
||||
@@ -1684,8 +1683,7 @@ function applyOperationsToWorkflowState(
|
||||
}
|
||||
}
|
||||
if (params?.name !== undefined) {
|
||||
const normalizedName = normalizeName(params.name)
|
||||
if (!normalizedName) {
|
||||
if (!normalizeName(params.name)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'edit',
|
||||
@@ -1693,14 +1691,6 @@ function applyOperationsToWorkflowState(
|
||||
reason: `Cannot rename to empty name`,
|
||||
details: { requestedName: params.name },
|
||||
})
|
||||
} else if ((RESERVED_BLOCK_NAMES as readonly string[]).includes(normalizedName)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'reserved_block_name',
|
||||
operationType: 'edit',
|
||||
blockId: block_id,
|
||||
reason: `Cannot rename to "${params.name}" - this is a reserved name`,
|
||||
details: { requestedName: params.name },
|
||||
})
|
||||
} else {
|
||||
const conflictingBlock = findBlockWithDuplicateNormalizedName(
|
||||
modifiedState.blocks,
|
||||
@@ -1921,8 +1911,7 @@ function applyOperationsToWorkflowState(
|
||||
}
|
||||
|
||||
case 'add': {
|
||||
const addNormalizedName = params?.name ? normalizeName(params.name) : ''
|
||||
if (!params?.type || !params?.name || !addNormalizedName) {
|
||||
if (!params?.type || !params?.name || !normalizeName(params.name)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'add',
|
||||
@@ -1933,17 +1922,6 @@ function applyOperationsToWorkflowState(
|
||||
break
|
||||
}
|
||||
|
||||
if ((RESERVED_BLOCK_NAMES as readonly string[]).includes(addNormalizedName)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'reserved_block_name',
|
||||
operationType: 'add',
|
||||
blockId: block_id,
|
||||
reason: `Block name "${params.name}" is a reserved name and cannot be used`,
|
||||
details: { requestedName: params.name },
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
const conflictingBlock = findBlockWithDuplicateNormalizedName(
|
||||
modifiedState.blocks,
|
||||
params.name,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { idempotencyKey } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, count, inArray, like, lt, max, min, sql } from 'drizzle-orm'
|
||||
import { and, eq, lt } from 'drizzle-orm'
|
||||
|
||||
const logger = createLogger('IdempotencyCleanup')
|
||||
|
||||
@@ -19,8 +19,7 @@ export interface CleanupOptions {
|
||||
batchSize?: number
|
||||
|
||||
/**
|
||||
* Specific namespace prefix to clean up (e.g., 'webhook', 'polling')
|
||||
* Keys are prefixed with namespace, so this filters by key prefix
|
||||
* Specific namespace to clean up, or undefined to clean all namespaces
|
||||
*/
|
||||
namespace?: string
|
||||
}
|
||||
@@ -54,17 +53,13 @@ export async function cleanupExpiredIdempotencyKeys(
|
||||
|
||||
while (hasMore) {
|
||||
try {
|
||||
// Build where condition - filter by cutoff date and optionally by namespace prefix
|
||||
const whereCondition = namespace
|
||||
? and(
|
||||
lt(idempotencyKey.createdAt, cutoffDate),
|
||||
like(idempotencyKey.key, `${namespace}:%`)
|
||||
)
|
||||
? and(lt(idempotencyKey.createdAt, cutoffDate), eq(idempotencyKey.namespace, namespace))
|
||||
: lt(idempotencyKey.createdAt, cutoffDate)
|
||||
|
||||
// Find keys to delete with limit
|
||||
// First, find IDs to delete with limit
|
||||
const toDelete = await db
|
||||
.select({ key: idempotencyKey.key })
|
||||
.select({ key: idempotencyKey.key, namespace: idempotencyKey.namespace })
|
||||
.from(idempotencyKey)
|
||||
.where(whereCondition)
|
||||
.limit(batchSize)
|
||||
@@ -73,13 +68,14 @@ export async function cleanupExpiredIdempotencyKeys(
|
||||
break
|
||||
}
|
||||
|
||||
// Delete the found records by key
|
||||
// Delete the found records
|
||||
const deleteResult = await db
|
||||
.delete(idempotencyKey)
|
||||
.where(
|
||||
inArray(
|
||||
idempotencyKey.key,
|
||||
toDelete.map((item) => item.key)
|
||||
and(
|
||||
...toDelete.map((item) =>
|
||||
and(eq(idempotencyKey.key, item.key), eq(idempotencyKey.namespace, item.namespace))
|
||||
)
|
||||
)
|
||||
)
|
||||
.returning({ key: idempotencyKey.key })
|
||||
@@ -130,7 +126,6 @@ export async function cleanupExpiredIdempotencyKeys(
|
||||
|
||||
/**
|
||||
* Get statistics about idempotency key usage
|
||||
* Uses SQL aggregations to avoid loading all keys into memory
|
||||
*/
|
||||
export async function getIdempotencyKeyStats(): Promise<{
|
||||
totalKeys: number
|
||||
@@ -139,35 +134,34 @@ export async function getIdempotencyKeyStats(): Promise<{
|
||||
newestKey: Date | null
|
||||
}> {
|
||||
try {
|
||||
// Get total count and date range in a single query
|
||||
const [statsResult] = await db
|
||||
const allKeys = await db
|
||||
.select({
|
||||
totalKeys: count(),
|
||||
oldestKey: min(idempotencyKey.createdAt),
|
||||
newestKey: max(idempotencyKey.createdAt),
|
||||
namespace: idempotencyKey.namespace,
|
||||
createdAt: idempotencyKey.createdAt,
|
||||
})
|
||||
.from(idempotencyKey)
|
||||
|
||||
// Get counts by namespace prefix using SQL substring
|
||||
// Extracts everything before the first ':' as the namespace
|
||||
const namespaceStats = await db
|
||||
.select({
|
||||
namespace: sql<string>`split_part(${idempotencyKey.key}, ':', 1)`.as('namespace'),
|
||||
count: count(),
|
||||
})
|
||||
.from(idempotencyKey)
|
||||
.groupBy(sql`split_part(${idempotencyKey.key}, ':', 1)`)
|
||||
|
||||
const totalKeys = allKeys.length
|
||||
const keysByNamespace: Record<string, number> = {}
|
||||
for (const row of namespaceStats) {
|
||||
keysByNamespace[row.namespace || 'unknown'] = row.count
|
||||
let oldestKey: Date | null = null
|
||||
let newestKey: Date | null = null
|
||||
|
||||
for (const key of allKeys) {
|
||||
keysByNamespace[key.namespace] = (keysByNamespace[key.namespace] || 0) + 1
|
||||
|
||||
if (!oldestKey || key.createdAt < oldestKey) {
|
||||
oldestKey = key.createdAt
|
||||
}
|
||||
if (!newestKey || key.createdAt > newestKey) {
|
||||
newestKey = key.createdAt
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalKeys: statsResult?.totalKeys ?? 0,
|
||||
totalKeys,
|
||||
keysByNamespace,
|
||||
oldestKey: statsResult?.oldestKey ?? null,
|
||||
newestKey: statsResult?.newestKey ?? null,
|
||||
oldestKey,
|
||||
newestKey,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to get idempotency key stats:', error)
|
||||
|
||||
@@ -2,7 +2,7 @@ import { randomUUID } from 'crypto'
|
||||
import { db } from '@sim/db'
|
||||
import { idempotencyKey } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { getRedisClient } from '@/lib/core/config/redis'
|
||||
import { getStorageMethod, type StorageMethod } from '@/lib/core/storage'
|
||||
import { extractProviderIdentifierFromBody } from '@/lib/webhooks/provider-utils'
|
||||
@@ -124,7 +124,12 @@ export class IdempotencyService {
|
||||
const existing = await db
|
||||
.select({ result: idempotencyKey.result, createdAt: idempotencyKey.createdAt })
|
||||
.from(idempotencyKey)
|
||||
.where(eq(idempotencyKey.key, normalizedKey))
|
||||
.where(
|
||||
and(
|
||||
eq(idempotencyKey.key, normalizedKey),
|
||||
eq(idempotencyKey.namespace, this.config.namespace)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existing.length > 0) {
|
||||
@@ -219,12 +224,11 @@ export class IdempotencyService {
|
||||
.insert(idempotencyKey)
|
||||
.values({
|
||||
key: normalizedKey,
|
||||
namespace: this.config.namespace,
|
||||
result: inProgressResult,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
.onConflictDoNothing({
|
||||
target: [idempotencyKey.key],
|
||||
})
|
||||
.onConflictDoNothing()
|
||||
.returning({ key: idempotencyKey.key })
|
||||
|
||||
if (insertResult.length > 0) {
|
||||
@@ -239,7 +243,12 @@ export class IdempotencyService {
|
||||
const existing = await db
|
||||
.select({ result: idempotencyKey.result })
|
||||
.from(idempotencyKey)
|
||||
.where(eq(idempotencyKey.key, normalizedKey))
|
||||
.where(
|
||||
and(
|
||||
eq(idempotencyKey.key, normalizedKey),
|
||||
eq(idempotencyKey.namespace, this.config.namespace)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
const existingResult =
|
||||
@@ -271,7 +280,12 @@ export class IdempotencyService {
|
||||
const existing = await db
|
||||
.select({ result: idempotencyKey.result })
|
||||
.from(idempotencyKey)
|
||||
.where(eq(idempotencyKey.key, normalizedKey))
|
||||
.where(
|
||||
and(
|
||||
eq(idempotencyKey.key, normalizedKey),
|
||||
eq(idempotencyKey.namespace, this.config.namespace)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
currentResult = existing.length > 0 ? (existing[0].result as ProcessingResult) : null
|
||||
}
|
||||
@@ -325,11 +339,12 @@ export class IdempotencyService {
|
||||
.insert(idempotencyKey)
|
||||
.values({
|
||||
key: normalizedKey,
|
||||
namespace: this.config.namespace,
|
||||
result: result,
|
||||
createdAt: new Date(),
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [idempotencyKey.key],
|
||||
target: [idempotencyKey.key, idempotencyKey.namespace],
|
||||
set: {
|
||||
result: result,
|
||||
createdAt: new Date(),
|
||||
|
||||
@@ -82,26 +82,10 @@ export function formatDateTime(date: Date, timezone?: string): string {
|
||||
* @returns A formatted date string in the format "MMM D, YYYY"
|
||||
*/
|
||||
export function formatDate(date: Date): string {
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
return date.toLocaleString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
* @param dateString - ISO date string to format
|
||||
* @returns A formatted date string (e.g., "Jan 22, 2026, 01:30 PM")
|
||||
*/
|
||||
export function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
@@ -155,24 +139,20 @@ export function formatCompactTimestamp(iso: string): string {
|
||||
/**
|
||||
* Format a duration in milliseconds to a human-readable format
|
||||
* @param durationMs - The duration in milliseconds
|
||||
* @param options - Optional formatting options
|
||||
* @param options.precision - Number of decimal places for seconds (default: 0)
|
||||
* @returns A formatted duration string
|
||||
*/
|
||||
export function formatDuration(durationMs: number, options?: { precision?: number }): string {
|
||||
const precision = options?.precision ?? 0
|
||||
|
||||
export function formatDuration(durationMs: number): string {
|
||||
if (durationMs < 1000) {
|
||||
return `${durationMs}ms`
|
||||
}
|
||||
|
||||
const seconds = durationMs / 1000
|
||||
const seconds = Math.floor(durationMs / 1000)
|
||||
if (seconds < 60) {
|
||||
return precision > 0 ? `${seconds.toFixed(precision)}s` : `${Math.floor(seconds)}s`
|
||||
return `${seconds}s`
|
||||
}
|
||||
|
||||
const minutes = Math.floor(seconds / 60)
|
||||
const remainingSeconds = Math.floor(seconds % 60)
|
||||
const remainingSeconds = seconds % 60
|
||||
if (minutes < 60) {
|
||||
return `${minutes}m ${remainingSeconds}s`
|
||||
}
|
||||
@@ -181,40 +161,3 @@ export function formatDuration(durationMs: number, options?: { precision?: numbe
|
||||
const remainingMinutes = minutes % 60
|
||||
return `${hours}h ${remainingMinutes}m`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
* @param dateString - ISO date string to format
|
||||
* @returns A human-readable relative time string
|
||||
*/
|
||||
export function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
@@ -130,11 +130,7 @@ async function executeCode(request) {
|
||||
await jail.set('environmentVariables', new ivm.ExternalCopy(envVars).copyInto())
|
||||
|
||||
for (const [key, value] of Object.entries(contextVariables)) {
|
||||
if (value === undefined) {
|
||||
await jail.set(key, undefined)
|
||||
} else {
|
||||
await jail.set(key, new ivm.ExternalCopy(value).copyInto())
|
||||
}
|
||||
await jail.set(key, new ivm.ExternalCopy(value).copyInto())
|
||||
}
|
||||
|
||||
const fetchCallback = new ivm.Reference(async (url, optionsJson) => {
|
||||
|
||||
@@ -127,6 +127,7 @@ export async function processDocumentTags(
|
||||
tagData: DocumentTagData[],
|
||||
requestId: string
|
||||
): Promise<ProcessedDocumentTags> {
|
||||
// Helper to set a tag value with proper typing
|
||||
const setTagValue = (
|
||||
tags: ProcessedDocumentTags,
|
||||
slot: string,
|
||||
@@ -671,16 +672,21 @@ export async function createDocumentRecords(
|
||||
tag7?: string
|
||||
}>,
|
||||
knowledgeBaseId: string,
|
||||
requestId: string
|
||||
requestId: string,
|
||||
userId?: string
|
||||
): Promise<DocumentData[]> {
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
if (userId) {
|
||||
const totalSize = documents.reduce((sum, doc) => sum + doc.fileSize, 0)
|
||||
|
||||
if (kb.length === 0) {
|
||||
throw new Error('Knowledge base not found')
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kb.length === 0) {
|
||||
throw new Error('Knowledge base not found')
|
||||
}
|
||||
}
|
||||
|
||||
return await db.transaction(async (tx) => {
|
||||
@@ -764,6 +770,16 @@ export async function createDocumentRecords(
|
||||
.update(knowledgeBase)
|
||||
.set({ updatedAt: now })
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
|
||||
if (userId) {
|
||||
const totalSize = documents.reduce((sum, doc) => sum + doc.fileSize, 0)
|
||||
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
}
|
||||
}
|
||||
|
||||
return returnData
|
||||
@@ -776,7 +792,7 @@ export async function createDocumentRecords(
|
||||
export async function getDocuments(
|
||||
knowledgeBaseId: string,
|
||||
options: {
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
includeDisabled?: boolean
|
||||
search?: string
|
||||
limit?: number
|
||||
offset?: number
|
||||
@@ -830,7 +846,7 @@ export async function getDocuments(
|
||||
}
|
||||
}> {
|
||||
const {
|
||||
enabledFilter = 'all',
|
||||
includeDisabled = false,
|
||||
search,
|
||||
limit = 50,
|
||||
offset = 0,
|
||||
@@ -838,21 +854,26 @@ export async function getDocuments(
|
||||
sortOrder = 'asc',
|
||||
} = options
|
||||
|
||||
// Build where conditions
|
||||
const whereConditions = [
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(document.deletedAt),
|
||||
]
|
||||
|
||||
if (enabledFilter === 'enabled') {
|
||||
// Filter out disabled documents unless specifically requested
|
||||
if (!includeDisabled) {
|
||||
whereConditions.push(eq(document.enabled, true))
|
||||
} else if (enabledFilter === 'disabled') {
|
||||
whereConditions.push(eq(document.enabled, false))
|
||||
}
|
||||
|
||||
// Add search condition if provided
|
||||
if (search) {
|
||||
whereConditions.push(sql`LOWER(${document.filename}) LIKE LOWER(${`%${search}%`})`)
|
||||
whereConditions.push(
|
||||
// Search in filename
|
||||
sql`LOWER(${document.filename}) LIKE LOWER(${`%${search}%`})`
|
||||
)
|
||||
}
|
||||
|
||||
// Get total count for pagination
|
||||
const totalResult = await db
|
||||
.select({ count: sql<number>`COUNT(*)` })
|
||||
.from(document)
|
||||
@@ -861,6 +882,7 @@ export async function getDocuments(
|
||||
const total = totalResult[0]?.count || 0
|
||||
const hasMore = offset + limit < total
|
||||
|
||||
// Create dynamic order by clause
|
||||
const getOrderByColumn = () => {
|
||||
switch (sortBy) {
|
||||
case 'filename':
|
||||
@@ -875,13 +897,12 @@ export async function getDocuments(
|
||||
return document.uploadedAt
|
||||
case 'processingStatus':
|
||||
return document.processingStatus
|
||||
case 'enabled':
|
||||
return document.enabled
|
||||
default:
|
||||
return document.uploadedAt
|
||||
}
|
||||
}
|
||||
|
||||
// Use stable secondary sort to prevent shifting when primary values are identical
|
||||
const primaryOrderBy = sortOrder === 'asc' ? asc(getOrderByColumn()) : desc(getOrderByColumn())
|
||||
const secondaryOrderBy =
|
||||
sortBy === 'filename' ? desc(document.uploadedAt) : asc(document.filename)
|
||||
@@ -1000,7 +1021,8 @@ export async function createSingleDocument(
|
||||
tag7?: string
|
||||
},
|
||||
knowledgeBaseId: string,
|
||||
requestId: string
|
||||
requestId: string,
|
||||
userId?: string
|
||||
): Promise<{
|
||||
id: string
|
||||
knowledgeBaseId: string
|
||||
@@ -1021,19 +1043,24 @@ export async function createSingleDocument(
|
||||
tag6: string | null
|
||||
tag7: string | null
|
||||
}> {
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
// Check storage limits before creating document
|
||||
if (userId) {
|
||||
// Get knowledge base owner
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kb.length === 0) {
|
||||
throw new Error('Knowledge base not found')
|
||||
if (kb.length === 0) {
|
||||
throw new Error('Knowledge base not found')
|
||||
}
|
||||
}
|
||||
|
||||
const documentId = randomUUID()
|
||||
const now = new Date()
|
||||
|
||||
// Process structured tag data if provided
|
||||
let processedTags: ProcessedDocumentTags = {
|
||||
// Text tags (7 slots)
|
||||
tag1: documentData.tag1 ?? null,
|
||||
@@ -1062,9 +1089,11 @@ export async function createSingleDocument(
|
||||
try {
|
||||
const tagData = JSON.parse(documentData.documentTagsData)
|
||||
if (Array.isArray(tagData)) {
|
||||
// Process structured tag data and create tag definitions
|
||||
processedTags = await processDocumentTags(knowledgeBaseId, tagData, requestId)
|
||||
}
|
||||
} catch (error) {
|
||||
// Re-throw validation errors, only catch JSON parse errors
|
||||
if (error instanceof SyntaxError) {
|
||||
logger.warn(`[${requestId}] Failed to parse documentTagsData:`, error)
|
||||
} else {
|
||||
@@ -1097,6 +1126,15 @@ export async function createSingleDocument(
|
||||
|
||||
logger.info(`[${requestId}] Document created: ${documentId} in knowledge base ${knowledgeBaseId}`)
|
||||
|
||||
if (userId) {
|
||||
// Get knowledge base owner
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
}
|
||||
|
||||
return newDocument as {
|
||||
id: string
|
||||
knowledgeBaseId: string
|
||||
@@ -1126,7 +1164,8 @@ export async function bulkDocumentOperation(
|
||||
knowledgeBaseId: string,
|
||||
operation: 'enable' | 'disable' | 'delete',
|
||||
documentIds: string[],
|
||||
requestId: string
|
||||
requestId: string,
|
||||
userId?: string
|
||||
): Promise<{
|
||||
success: boolean
|
||||
successCount: number
|
||||
@@ -1141,6 +1180,7 @@ export async function bulkDocumentOperation(
|
||||
`[${requestId}] Starting bulk ${operation} operation on ${documentIds.length} documents in knowledge base ${knowledgeBaseId}`
|
||||
)
|
||||
|
||||
// Verify all documents belong to this knowledge base
|
||||
const documentsToUpdate = await db
|
||||
.select({
|
||||
id: document.id,
|
||||
@@ -1173,6 +1213,24 @@ export async function bulkDocumentOperation(
|
||||
}>
|
||||
|
||||
if (operation === 'delete') {
|
||||
// Get file sizes before deletion for storage tracking
|
||||
let totalSize = 0
|
||||
if (userId) {
|
||||
const documentsToDelete = await db
|
||||
.select({ fileSize: document.fileSize })
|
||||
.from(document)
|
||||
.where(
|
||||
and(
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
inArray(document.id, documentIds),
|
||||
isNull(document.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
totalSize = documentsToDelete.reduce((sum, doc) => sum + doc.fileSize, 0)
|
||||
}
|
||||
|
||||
// Handle bulk soft delete
|
||||
updateResult = await db
|
||||
.update(document)
|
||||
.set({
|
||||
@@ -1187,6 +1245,7 @@ export async function bulkDocumentOperation(
|
||||
)
|
||||
.returning({ id: document.id, deletedAt: document.deletedAt })
|
||||
} else {
|
||||
// Handle bulk enable/disable
|
||||
const enabled = operation === 'enable'
|
||||
|
||||
updateResult = await db
|
||||
@@ -1217,77 +1276,6 @@ export async function bulkDocumentOperation(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform bulk operations on all documents matching a filter
|
||||
*/
|
||||
export async function bulkDocumentOperationByFilter(
|
||||
knowledgeBaseId: string,
|
||||
operation: 'enable' | 'disable' | 'delete',
|
||||
enabledFilter: 'all' | 'enabled' | 'disabled' | undefined,
|
||||
requestId: string
|
||||
): Promise<{
|
||||
success: boolean
|
||||
successCount: number
|
||||
updatedDocuments: Array<{
|
||||
id: string
|
||||
enabled?: boolean
|
||||
deletedAt?: Date | null
|
||||
}>
|
||||
}> {
|
||||
logger.info(
|
||||
`[${requestId}] Starting bulk ${operation} operation on all documents (filter: ${enabledFilter || 'all'}) in knowledge base ${knowledgeBaseId}`
|
||||
)
|
||||
|
||||
const whereConditions = [
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(document.deletedAt),
|
||||
]
|
||||
|
||||
if (enabledFilter === 'enabled') {
|
||||
whereConditions.push(eq(document.enabled, true))
|
||||
} else if (enabledFilter === 'disabled') {
|
||||
whereConditions.push(eq(document.enabled, false))
|
||||
}
|
||||
|
||||
let updateResult: Array<{
|
||||
id: string
|
||||
enabled?: boolean
|
||||
deletedAt?: Date | null
|
||||
}>
|
||||
|
||||
if (operation === 'delete') {
|
||||
updateResult = await db
|
||||
.update(document)
|
||||
.set({
|
||||
deletedAt: new Date(),
|
||||
})
|
||||
.where(and(...whereConditions))
|
||||
.returning({ id: document.id, deletedAt: document.deletedAt })
|
||||
} else {
|
||||
const enabled = operation === 'enable'
|
||||
|
||||
updateResult = await db
|
||||
.update(document)
|
||||
.set({
|
||||
enabled,
|
||||
})
|
||||
.where(and(...whereConditions))
|
||||
.returning({ id: document.id, enabled: document.enabled })
|
||||
}
|
||||
|
||||
const successCount = updateResult.length
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Bulk ${operation} by filter completed: ${successCount} documents updated in knowledge base ${knowledgeBaseId}`
|
||||
)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
successCount,
|
||||
updatedDocuments: updateResult,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a document as failed due to timeout
|
||||
*/
|
||||
@@ -1337,6 +1325,7 @@ export async function retryDocumentProcessing(
|
||||
},
|
||||
requestId: string
|
||||
): Promise<{ success: boolean; status: string; message: string }> {
|
||||
// Fetch KB's chunkingConfig for retry processing
|
||||
const kb = await db
|
||||
.select({
|
||||
chunkingConfig: knowledgeBase.chunkingConfig,
|
||||
@@ -1347,6 +1336,7 @@ export async function retryDocumentProcessing(
|
||||
|
||||
const kbConfig = kb[0].chunkingConfig as { maxSize: number; minSize: number; overlap: number }
|
||||
|
||||
// Clear existing embeddings and reset document state
|
||||
await db.transaction(async (tx) => {
|
||||
await tx.delete(embedding).where(eq(embedding.documentId, documentId))
|
||||
|
||||
@@ -1372,6 +1362,7 @@ export async function retryDocumentProcessing(
|
||||
chunkOverlap: kbConfig.overlap,
|
||||
}
|
||||
|
||||
// Start processing in the background
|
||||
processDocumentAsync(knowledgeBaseId, documentId, docData, processingOptions).catch(
|
||||
(error: unknown) => {
|
||||
logger.error(`[${requestId}] Background retry processing error:`, error)
|
||||
@@ -1520,6 +1511,7 @@ export async function updateDocument(
|
||||
if (updateData.processingError !== undefined)
|
||||
dbUpdateData.processingError = updateData.processingError
|
||||
|
||||
// Helper to convert string values to proper types for the database
|
||||
const convertTagValue = (
|
||||
slot: string,
|
||||
value: string | undefined
|
||||
|
||||
@@ -6,7 +6,6 @@ export type DocumentSortField =
|
||||
| 'chunkCount'
|
||||
| 'uploadedAt'
|
||||
| 'processingStatus'
|
||||
| 'enabled'
|
||||
export type SortOrder = 'asc' | 'desc'
|
||||
|
||||
export interface DocumentSortOptions {
|
||||
|
||||
@@ -2,9 +2,12 @@
|
||||
* Autolayout Constants
|
||||
*
|
||||
* Layout algorithm specific constants for spacing, padding, and overlap detection.
|
||||
* Block dimensions are in @/lib/workflows/blocks/block-dimensions
|
||||
* Block dimensions are imported from the shared source: @/lib/workflows/blocks/block-dimensions
|
||||
*/
|
||||
|
||||
// Re-export block dimensions for autolayout consumers
|
||||
export { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
|
||||
/**
|
||||
* Horizontal spacing between layers (columns)
|
||||
*/
|
||||
|
||||
@@ -11,6 +11,21 @@ import type { BlockMetrics, BoundingBox, Edge, GraphNode } from '@/lib/workflows
|
||||
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
// Re-export layout constants for backwards compatibility
|
||||
export {
|
||||
CONTAINER_PADDING,
|
||||
CONTAINER_PADDING_X,
|
||||
CONTAINER_PADDING_Y,
|
||||
ROOT_PADDING_X,
|
||||
ROOT_PADDING_Y,
|
||||
}
|
||||
|
||||
// Re-export block dimensions for backwards compatibility
|
||||
export const DEFAULT_BLOCK_WIDTH = BLOCK_DIMENSIONS.FIXED_WIDTH
|
||||
export const DEFAULT_BLOCK_HEIGHT = BLOCK_DIMENSIONS.MIN_HEIGHT
|
||||
export const DEFAULT_CONTAINER_WIDTH = CONTAINER_DIMENSIONS.DEFAULT_WIDTH
|
||||
export const DEFAULT_CONTAINER_HEIGHT = CONTAINER_DIMENSIONS.DEFAULT_HEIGHT
|
||||
|
||||
/**
|
||||
* Resolves a potentially undefined numeric value to a fallback
|
||||
*/
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
"papaparse": "5.5.3",
|
||||
"pdf-lib": "1.17.1",
|
||||
"postgres": "^3.4.5",
|
||||
"posthog-js": "1.334.1",
|
||||
"posthog-js": "1.268.9",
|
||||
"posthog-node": "5.9.2",
|
||||
"prismjs": "^1.30.0",
|
||||
"react": "19.2.1",
|
||||
|
||||
|
Before Width: | Height: | Size: 10 KiB |
|
Before Width: | Height: | Size: 122 KiB |
|
Before Width: | Height: | Size: 33 KiB |
|
Before Width: | Height: | Size: 1.4 MiB |
|
Before Width: | Height: | Size: 405 KiB |
|
Before Width: | Height: | Size: 228 KiB |
|
Before Width: | Height: | Size: 352 KiB |
|
Before Width: | Height: | Size: 257 KiB |
|
Before Width: | Height: | Size: 143 KiB |
@@ -7,7 +7,7 @@ import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { filterNewEdges, getUniqueBlockName, mergeSubblockState } from '@/stores/workflows/utils'
|
||||
@@ -726,11 +726,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
||||
return { success: false, changedSubblocks: [] }
|
||||
}
|
||||
|
||||
if ((RESERVED_BLOCK_NAMES as readonly string[]).includes(normalizedNewName)) {
|
||||
logger.error(`Cannot rename block to reserved name: "${name}"`)
|
||||
return { success: false, changedSubblocks: [] }
|
||||
}
|
||||
|
||||
const newState = {
|
||||
blocks: {
|
||||
...get().blocks,
|
||||
|
||||
@@ -56,7 +56,6 @@ describe('Function Execute Tool', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
isCustomTool: false,
|
||||
language: 'javascript',
|
||||
timeout: 5000,
|
||||
@@ -84,7 +83,6 @@ describe('Function Execute Tool', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
isCustomTool: false,
|
||||
language: 'javascript',
|
||||
workflowId: undefined,
|
||||
@@ -103,7 +101,6 @@ describe('Function Execute Tool', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
isCustomTool: false,
|
||||
language: 'javascript',
|
||||
workflowId: undefined,
|
||||
|
||||
@@ -53,13 +53,6 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
|
||||
description: 'Mapping of block names to block IDs',
|
||||
default: {},
|
||||
},
|
||||
blockOutputSchemas: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Mapping of block IDs to their output schemas for validation',
|
||||
default: {},
|
||||
},
|
||||
workflowVariables: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
@@ -88,7 +81,6 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
|
||||
workflowVariables: params.workflowVariables || {},
|
||||
blockData: params.blockData || {},
|
||||
blockNameMapping: params.blockNameMapping || {},
|
||||
blockOutputSchemas: params.blockOutputSchemas || {},
|
||||
workflowId: params._context?.workflowId,
|
||||
isCustomTool: params.isCustomTool || false,
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ export interface CodeExecutionInput {
|
||||
workflowVariables?: Record<string, unknown>
|
||||
blockData?: Record<string, unknown>
|
||||
blockNameMapping?: Record<string, string>
|
||||
blockOutputSchemas?: Record<string, Record<string, unknown>>
|
||||
_context?: {
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
40
bun.lock
@@ -164,7 +164,7 @@
|
||||
"papaparse": "5.5.3",
|
||||
"pdf-lib": "1.17.1",
|
||||
"postgres": "^3.4.5",
|
||||
"posthog-js": "1.334.1",
|
||||
"posthog-js": "1.268.9",
|
||||
"posthog-node": "5.9.2",
|
||||
"prismjs": "^1.30.0",
|
||||
"react": "19.2.1",
|
||||
@@ -963,9 +963,7 @@
|
||||
|
||||
"@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="],
|
||||
|
||||
"@posthog/core": ["@posthog/core@1.13.0", "", { "dependencies": { "cross-spawn": "^7.0.6" } }, "sha512-knjncrk7qRmssFRbGzBl1Tunt21GRpe0Wv+uVelyL0Rh7PdQUsgguulzXFTps8hA6wPwTU4kq85qnbAJ3eH6Wg=="],
|
||||
|
||||
"@posthog/types": ["@posthog/types@1.334.1", "", {}, "sha512-ypFnwTO7qbV7icylLbujbamPdQXbJq0a61GUUBnJAeTbBw/qYPIss5IRYICcbCj0uunQrwD7/CGxVb5TOYKWgA=="],
|
||||
"@posthog/core": ["@posthog/core@1.2.2", "", {}, "sha512-f16Ozx6LIigRG+HsJdt+7kgSxZTHeX5f1JlCGKI1lXcvlZgfsCR338FuMI2QRYXGl+jg/vYFzGOTQBxl90lnBg=="],
|
||||
|
||||
"@prisma/config": ["@prisma/config@6.19.2", "", { "dependencies": { "c12": "3.1.0", "deepmerge-ts": "7.1.5", "effect": "3.18.4", "empathic": "2.0.0" } }, "sha512-kadBGDl+aUswv/zZMk9Mx0C8UZs1kjao8H9/JpI4Wh4SHZaM7zkTwiKn/iFLfRg+XtOAo/Z/c6pAYhijKl0nzQ=="],
|
||||
|
||||
@@ -1543,8 +1541,6 @@
|
||||
|
||||
"@types/tough-cookie": ["@types/tough-cookie@4.0.5", "", {}, "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA=="],
|
||||
|
||||
"@types/trusted-types": ["@types/trusted-types@2.0.7", "", {}, "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw=="],
|
||||
|
||||
"@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="],
|
||||
|
||||
"@types/uuid": ["@types/uuid@10.0.0", "", {}, "sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ=="],
|
||||
@@ -1997,8 +1993,6 @@
|
||||
|
||||
"domhandler": ["domhandler@5.0.3", "", { "dependencies": { "domelementtype": "^2.3.0" } }, "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w=="],
|
||||
|
||||
"dompurify": ["dompurify@3.3.1", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q=="],
|
||||
|
||||
"domutils": ["domutils@3.2.2", "", { "dependencies": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3" } }, "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw=="],
|
||||
|
||||
"dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="],
|
||||
@@ -2959,7 +2953,7 @@
|
||||
|
||||
"postgres": ["postgres@3.4.8", "", {}, "sha512-d+JFcLM17njZaOLkv6SCev7uoLaBtfK86vMUXhW1Z4glPWh4jozno9APvW/XKFJ3CCxVoC7OL38BqRydtu5nGg=="],
|
||||
|
||||
"posthog-js": ["posthog-js@1.334.1", "", { "dependencies": { "@opentelemetry/api": "^1.9.0", "@opentelemetry/api-logs": "^0.208.0", "@opentelemetry/exporter-logs-otlp-http": "^0.208.0", "@opentelemetry/resources": "^2.2.0", "@opentelemetry/sdk-logs": "^0.208.0", "@posthog/core": "1.13.0", "@posthog/types": "1.334.1", "core-js": "^3.38.1", "dompurify": "^3.3.1", "fflate": "^0.4.8", "preact": "^10.28.0", "query-selector-shadow-dom": "^1.0.1", "web-vitals": "^5.1.0" } }, "sha512-5cDzLICr2afnwX/cR9fwoLC0vN0Nb5gP5HiCigzHkgHdO+E3WsYefla3EFMQz7U4r01CBPZ+nZ9/srkzeACxtQ=="],
|
||||
"posthog-js": ["posthog-js@1.268.9", "", { "dependencies": { "@posthog/core": "1.2.2", "core-js": "^3.38.1", "fflate": "^0.4.8", "preact": "^10.19.3", "web-vitals": "^4.2.4" }, "peerDependencies": { "@rrweb/types": "2.0.0-alpha.17", "rrweb-snapshot": "2.0.0-alpha.17" }, "optionalPeers": ["@rrweb/types", "rrweb-snapshot"] }, "sha512-ejK5/i0TUQ8I1SzaIn7xWNf5TzOjWquawpgjKit8DyucD3Z1yf7LTMtgCYZN8oRx9VjiPcP34fSk8YsWQmmkTQ=="],
|
||||
|
||||
"posthog-node": ["posthog-node@5.9.2", "", { "dependencies": { "@posthog/core": "1.2.2" } }, "sha512-oU7FbFcH5cn40nhP04cBeT67zE76EiGWjKKzDvm6IOm5P83sqM0Ij0wMJQSHp+QI6ZN7MLzb+4xfMPUEZ4q6CA=="],
|
||||
|
||||
@@ -3007,8 +3001,6 @@
|
||||
|
||||
"qs": ["qs@6.14.1", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ=="],
|
||||
|
||||
"query-selector-shadow-dom": ["query-selector-shadow-dom@1.0.1", "", {}, "sha512-lT5yCqEBgfoMYpf3F2xQRK7zEr1rhIIZuceDK6+xRkJQ4NMbHTwXqk4NkwDwQMNqXgG9r9fyHnzwNVs6zV5KRw=="],
|
||||
|
||||
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
|
||||
|
||||
"quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="],
|
||||
@@ -3517,7 +3509,7 @@
|
||||
|
||||
"web-streams-polyfill": ["web-streams-polyfill@4.0.0-beta.3", "", {}, "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug=="],
|
||||
|
||||
"web-vitals": ["web-vitals@5.1.0", "", {}, "sha512-ArI3kx5jI0atlTtmV0fWU3fjpLmq/nD3Zr1iFFlJLaqa5wLBkUSzINwBPySCX/8jRyjlmy1Volw1kz1g9XE4Jg=="],
|
||||
"web-vitals": ["web-vitals@4.2.4", "", {}, "sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw=="],
|
||||
|
||||
"webidl-conversions": ["webidl-conversions@7.0.0", "", {}, "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="],
|
||||
|
||||
@@ -4105,16 +4097,8 @@
|
||||
|
||||
"postcss-nested/postcss-selector-parser": ["postcss-selector-parser@6.1.2", "", { "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg=="],
|
||||
|
||||
"posthog-js/@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.208.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-CjruKY9V6NMssL/T1kAFgzosF1v9o6oeN+aX5JB/C/xPNtmgIJqcXHG7fA82Ou1zCpWGl4lROQUKwUNE1pMCyg=="],
|
||||
|
||||
"posthog-js/@opentelemetry/exporter-logs-otlp-http": ["@opentelemetry/exporter-logs-otlp-http@0.208.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.208.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.208.0", "@opentelemetry/otlp-transformer": "0.208.0", "@opentelemetry/sdk-logs": "0.208.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-jOv40Bs9jy9bZVLo/i8FwUiuCvbjWDI+ZW13wimJm4LjnlwJxGgB+N/VWOZUTpM+ah/awXeQqKdNlpLf2EjvYg=="],
|
||||
|
||||
"posthog-js/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.208.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.208.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-QlAyL1jRpOeaqx7/leG1vJMp84g0xKP6gJmfELBpnI4O/9xPX+Hu5m1POk9Kl+veNkyth5t19hRlN6tNY1sjbA=="],
|
||||
|
||||
"posthog-js/fflate": ["fflate@0.4.8", "", {}, "sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA=="],
|
||||
|
||||
"posthog-node/@posthog/core": ["@posthog/core@1.2.2", "", {}, "sha512-f16Ozx6LIigRG+HsJdt+7kgSxZTHeX5f1JlCGKI1lXcvlZgfsCR338FuMI2QRYXGl+jg/vYFzGOTQBxl90lnBg=="],
|
||||
|
||||
"protobufjs/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"proxy-addr/ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="],
|
||||
@@ -4593,16 +4577,6 @@
|
||||
|
||||
"ora/strip-ansi/ansi-regex": ["ansi-regex@6.2.2", "", {}, "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="],
|
||||
|
||||
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="],
|
||||
|
||||
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-exporter-base": ["@opentelemetry/otlp-exporter-base@0.208.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-transformer": "0.208.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-gMd39gIfVb2OgxldxUtOwGJYSH8P1kVFFlJLuut32L6KgUC4gl1dMhn+YC2mGn0bDOiQYSk/uHOdSjuKp58vvA=="],
|
||||
|
||||
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.208.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.208.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.208.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-DCFPY8C6lAQHUNkzcNT9R+qYExvsk6C5Bto2pbNxgicpcSWbe2WHShLxkOxIdNcBiYPdVHv/e7vH7K6TI+C+fQ=="],
|
||||
|
||||
"posthog-js/@opentelemetry/sdk-logs/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="],
|
||||
|
||||
"posthog-js/@opentelemetry/sdk-logs/@opentelemetry/resources": ["@opentelemetry/resources@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A=="],
|
||||
|
||||
"protobufjs/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"react-email/chokidar/readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
|
||||
@@ -4811,12 +4785,6 @@
|
||||
|
||||
"ora/cli-cursor/restore-cursor/onetime": ["onetime@7.0.0", "", { "dependencies": { "mimic-function": "^5.0.0" } }, "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ=="],
|
||||
|
||||
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/resources": ["@opentelemetry/resources@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A=="],
|
||||
|
||||
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="],
|
||||
|
||||
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="],
|
||||
|
||||
"rimraf/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
|
||||
|
||||
"sim/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
|
||||
|
||||
@@ -110,22 +110,12 @@ spec:
|
||||
{{- end }}
|
||||
{{- include "sim.resources" .Values.app | nindent 10 }}
|
||||
{{- include "sim.securityContext" .Values.app | nindent 10 }}
|
||||
{{- if or .Values.extraVolumeMounts .Values.app.extraVolumeMounts }}
|
||||
{{- with .Values.extraVolumeMounts }}
|
||||
volumeMounts:
|
||||
{{- with .Values.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- with .Values.app.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if or .Values.extraVolumes .Values.app.extraVolumes }}
|
||||
{{- with .Values.extraVolumes }}
|
||||
volumes:
|
||||
{{- with .Values.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.app.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -92,7 +92,6 @@ spec:
|
||||
{{- toYaml .Values.ollama.readinessProbe | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- include "sim.resources" .Values.ollama | nindent 10 }}
|
||||
{{- if or .Values.ollama.persistence.enabled .Values.extraVolumeMounts .Values.ollama.extraVolumeMounts }}
|
||||
volumeMounts:
|
||||
{{- if .Values.ollama.persistence.enabled }}
|
||||
- name: ollama-data
|
||||
@@ -101,22 +100,13 @@ spec:
|
||||
{{- with .Values.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- with .Values.ollama.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if or .Values.ollama.persistence.enabled .Values.extraVolumes .Values.ollama.extraVolumes }}
|
||||
{{- if .Values.ollama.persistence.enabled }}
|
||||
volumes:
|
||||
{{- if .Values.ollama.persistence.enabled }}
|
||||
- name: ollama-data
|
||||
persistentVolumeClaim:
|
||||
claimName: {{ include "sim.fullname" . }}-ollama-data
|
||||
{{- end }}
|
||||
{{- with .Values.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.ollama.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -84,22 +84,12 @@ spec:
|
||||
{{- end }}
|
||||
{{- include "sim.resources" .Values.realtime | nindent 10 }}
|
||||
{{- include "sim.securityContext" .Values.realtime | nindent 10 }}
|
||||
{{- if or .Values.extraVolumeMounts .Values.realtime.extraVolumeMounts }}
|
||||
{{- with .Values.extraVolumeMounts }}
|
||||
volumeMounts:
|
||||
{{- with .Values.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- with .Values.realtime.extraVolumeMounts }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- if or .Values.extraVolumes .Values.realtime.extraVolumes }}
|
||||
{{- with .Values.extraVolumes }}
|
||||
volumes:
|
||||
{{- with .Values.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.realtime.extraVolumes }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -224,10 +224,6 @@ app:
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
|
||||
# Additional volumes for app deployment (e.g., branding assets, custom configs)
|
||||
extraVolumes: []
|
||||
extraVolumeMounts: []
|
||||
|
||||
# Realtime socket server configuration
|
||||
realtime:
|
||||
# Enable/disable the realtime service
|
||||
@@ -305,10 +301,6 @@ realtime:
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
|
||||
# Additional volumes for realtime deployment
|
||||
extraVolumes: []
|
||||
extraVolumeMounts: []
|
||||
|
||||
# Database migrations job configuration
|
||||
migrations:
|
||||
# Enable/disable migrations job
|
||||
@@ -547,10 +539,6 @@ ollama:
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
|
||||
# Additional volumes for ollama deployment
|
||||
extraVolumes: []
|
||||
extraVolumeMounts: []
|
||||
|
||||
# Ingress configuration
|
||||
ingress:
|
||||
# Enable/disable ingress
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
DROP INDEX "idempotency_key_namespace_unique";--> statement-breakpoint
|
||||
DROP INDEX "idempotency_key_namespace_idx";--> statement-breakpoint
|
||||
ALTER TABLE "idempotency_key" ADD PRIMARY KEY ("key");--> statement-breakpoint
|
||||
ALTER TABLE "idempotency_key" DROP COLUMN "namespace";
|
||||
@@ -1023,13 +1023,6 @@
|
||||
"when": 1768867605608,
|
||||
"tag": "0146_cultured_ikaris",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 147,
|
||||
"version": "7",
|
||||
"when": 1769134350805,
|
||||
"tag": "0147_rare_firebrand",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1656,13 +1656,20 @@ export const workflowDeploymentVersion = pgTable(
|
||||
export const idempotencyKey = pgTable(
|
||||
'idempotency_key',
|
||||
{
|
||||
key: text('key').primaryKey(),
|
||||
key: text('key').notNull(),
|
||||
namespace: text('namespace').notNull().default('default'),
|
||||
result: json('result').notNull(),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
// Primary key is combination of key and namespace
|
||||
keyNamespacePk: uniqueIndex('idempotency_key_namespace_unique').on(table.key, table.namespace),
|
||||
|
||||
// Index for cleanup operations by creation time
|
||||
createdAtIdx: index('idempotency_key_created_at_idx').on(table.createdAt),
|
||||
|
||||
// Index for namespace-based queries
|
||||
namespaceIdx: index('idempotency_key_namespace_idx').on(table.namespace),
|
||||
})
|
||||
)
|
||||
|
||||
|
||||