Compare commits

..

17 Commits

Author SHA1 Message Date
Siddharth Ganesan
896d6b5529 Remove console logs 2026-01-23 13:02:37 -08:00
Siddharth Ganesan
fefcb61f8b Fix actions mapping 2026-01-23 12:28:23 -08:00
Siddharth Ganesan
24173bb008 Improvements 2026-01-23 12:10:29 -08:00
Waleed
64efeaa2e6 feat(admin): add credits endpoint to issue credits to users (#2954)
* feat(admin): add credits endpoint to issue credits to users

* fix(admin): use existing credit functions and handle enterprise seats

* fix(admin): reject NaN and Infinity in amount validation

* styling

* fix(admin): validate userId and email are strings
2026-01-23 11:33:13 -08:00
Waleed
9b72b52b33 feat(blog): v0.5 release post (#2953)
* feat(blog): v0.5 post

* improvement(blog): simplify title and remove code block header

- Simplified blog title from "Introducing Sim Studio v0.5" to "Introducing Sim v0.5"
- Removed language label header and copy button from code blocks for cleaner appearance

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

* ack PR comments

* small styling improvements

* created system to create post-specific components

* updated componnet

* cache invalidation

---------

Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-23 09:07:53 -08:00
Emir Karabeg
1467862488 improvement(logs): trace span, details (#2952)
* improvement(action-bar): ordering

* improvement(logs): details, trace span
2026-01-22 19:50:20 -08:00
Waleed
7f2262857c improvement(kb): add document filtering, select all, and React Query migration (#2951)
* improvement(kb): add document filtering, select all, and React Query migration

* test(kb): update tests for enabledFilter and removed userId params

* fix(kb): remove non-null assertion, add explicit guard
2026-01-22 19:25:16 -08:00
Vikhyath Mondreti
1b309b50e6 fix(idempotency): add conflict target to atomicallyClaimDb query + remove redundant db namespace tracking (#2950)
* fix(idempotency): add conflict target to atomicallyClaimDb query

* delete needs to account for namespace

* simplify namespace filtering logic

* fix cleanup

* consistent target
2026-01-22 18:38:08 -08:00
Waleed
f765b83a26 chore(deps): bump posthog-js to 1.334.1 (#2948) 2026-01-22 18:06:05 -08:00
Vikhyath Mondreti
aa99db6fdd fix(subflows): tag dropdown + resolution logic (#2949)
* fix(subflows): tag dropdown + resolution logic

* fixes;

* revert parallel change
2026-01-22 17:57:55 -08:00
Waleed
748793e07d fix(executor): handle condition dead-end branches in loops (#2944) 2026-01-22 13:30:11 -08:00
Siddharth Ganesan
91da7e183a fix(copilot): always allow, credential masking (#2947)
* Fix always allow, credential validation

* Credential masking

* Autoload
2026-01-22 13:07:16 -08:00
Waleed
ab09a5ad23 feat(router): expose reasoning output in router v2 block (#2945) 2026-01-22 12:43:57 -08:00
Vikhyath Mondreti
fcd0240db6 fix(resolver): consolidate reference resolution (#2941)
* fix(resolver): consolidate code to resolve references

* fix edge cases

* use already formatted error

* fix multi index

* fix backwards compat reachability

* handle backwards compatibility accurately

* use shared constant correctly
2026-01-22 12:38:50 -08:00
Waleed
4e4149792a fix(gmail): expose messageId field in read email block (#2943) 2026-01-22 11:46:34 -08:00
Waleed
9a8b591257 improvement(helm): add per-deployment extraVolumes support (#2942) 2026-01-22 11:35:23 -08:00
Waleed
f3ae3f8442 fix(executor): stop parallel execution when block errors (#2940) 2026-01-22 11:34:40 -08:00
102 changed files with 15678 additions and 1844 deletions

View File

@@ -0,0 +1,27 @@
'use client'
import { useState } from 'react'
import { ArrowLeft, ChevronLeft } from 'lucide-react'
import Link from 'next/link'
export function BackLink() {
const [isHovered, setIsHovered] = useState(false)
return (
<Link
href='/studio'
className='group flex items-center gap-1 text-gray-600 text-sm hover:text-gray-900'
onMouseEnter={() => setIsHovered(true)}
onMouseLeave={() => setIsHovered(false)}
>
<span className='group-hover:-translate-x-0.5 inline-flex transition-transform duration-200'>
{isHovered ? (
<ArrowLeft className='h-4 w-4' aria-hidden='true' />
) : (
<ChevronLeft className='h-4 w-4' aria-hidden='true' />
)}
</span>
Back to Sim Studio
</Link>
)
}

View File

@@ -5,7 +5,10 @@ import { Avatar, AvatarFallback, AvatarImage } from '@/components/emcn'
import { FAQ } from '@/lib/blog/faq' import { FAQ } from '@/lib/blog/faq'
import { getAllPostMeta, getPostBySlug, getRelatedPosts } from '@/lib/blog/registry' import { getAllPostMeta, getPostBySlug, getRelatedPosts } from '@/lib/blog/registry'
import { buildArticleJsonLd, buildBreadcrumbJsonLd, buildPostMetadata } from '@/lib/blog/seo' import { buildArticleJsonLd, buildBreadcrumbJsonLd, buildPostMetadata } from '@/lib/blog/seo'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { soehne } from '@/app/_styles/fonts/soehne/soehne' import { soehne } from '@/app/_styles/fonts/soehne/soehne'
import { BackLink } from '@/app/(landing)/studio/[slug]/back-link'
import { ShareButton } from '@/app/(landing)/studio/[slug]/share-button'
export async function generateStaticParams() { export async function generateStaticParams() {
const posts = await getAllPostMeta() const posts = await getAllPostMeta()
@@ -48,9 +51,7 @@ export default async function Page({ params }: { params: Promise<{ slug: string
/> />
<header className='mx-auto max-w-[1450px] px-6 pt-8 sm:px-8 sm:pt-12 md:px-12 md:pt-16'> <header className='mx-auto max-w-[1450px] px-6 pt-8 sm:px-8 sm:pt-12 md:px-12 md:pt-16'>
<div className='mb-6'> <div className='mb-6'>
<Link href='/studio' className='text-gray-600 text-sm hover:text-gray-900'> <BackLink />
Back to Sim Studio
</Link>
</div> </div>
<div className='flex flex-col gap-8 md:flex-row md:gap-12'> <div className='flex flex-col gap-8 md:flex-row md:gap-12'>
<div className='w-full flex-shrink-0 md:w-[450px]'> <div className='w-full flex-shrink-0 md:w-[450px]'>
@@ -75,7 +76,8 @@ export default async function Page({ params }: { params: Promise<{ slug: string
> >
{post.title} {post.title}
</h1> </h1>
<div className='mt-4 flex items-center gap-3'> <div className='mt-4 flex items-center justify-between'>
<div className='flex items-center gap-3'>
{(post.authors || [post.author]).map((a, idx) => ( {(post.authors || [post.author]).map((a, idx) => (
<div key={idx} className='flex items-center gap-2'> <div key={idx} className='flex items-center gap-2'>
{a?.avatarUrl ? ( {a?.avatarUrl ? (
@@ -98,6 +100,8 @@ export default async function Page({ params }: { params: Promise<{ slug: string
</div> </div>
))} ))}
</div> </div>
<ShareButton url={`${getBaseUrl()}/studio/${slug}`} title={post.title} />
</div>
</div> </div>
</div> </div>
<hr className='mt-8 border-gray-200 border-t sm:mt-12' /> <hr className='mt-8 border-gray-200 border-t sm:mt-12' />

View File

@@ -0,0 +1,65 @@
'use client'
import { useState } from 'react'
import { Share2 } from 'lucide-react'
import { Popover, PopoverContent, PopoverItem, PopoverTrigger } from '@/components/emcn'
interface ShareButtonProps {
url: string
title: string
}
export function ShareButton({ url, title }: ShareButtonProps) {
const [open, setOpen] = useState(false)
const [copied, setCopied] = useState(false)
const handleCopyLink = async () => {
try {
await navigator.clipboard.writeText(url)
setCopied(true)
setTimeout(() => {
setCopied(false)
setOpen(false)
}, 1000)
} catch {
setOpen(false)
}
}
const handleShareTwitter = () => {
const tweetUrl = `https://twitter.com/intent/tweet?url=${encodeURIComponent(url)}&text=${encodeURIComponent(title)}`
window.open(tweetUrl, '_blank', 'noopener,noreferrer')
setOpen(false)
}
const handleShareLinkedIn = () => {
const linkedInUrl = `https://www.linkedin.com/sharing/share-offsite/?url=${encodeURIComponent(url)}`
window.open(linkedInUrl, '_blank', 'noopener,noreferrer')
setOpen(false)
}
return (
<Popover
open={open}
onOpenChange={setOpen}
variant='secondary'
size='sm'
colorScheme='inverted'
>
<PopoverTrigger asChild>
<button
className='flex items-center gap-1.5 text-gray-600 text-sm hover:text-gray-900'
aria-label='Share this post'
>
<Share2 className='h-4 w-4' />
<span>Share</span>
</button>
</PopoverTrigger>
<PopoverContent align='end' minWidth={140}>
<PopoverItem onClick={handleCopyLink}>{copied ? 'Copied!' : 'Copy link'}</PopoverItem>
<PopoverItem onClick={handleShareTwitter}>Share on X</PopoverItem>
<PopoverItem onClick={handleShareLinkedIn}>Share on LinkedIn</PopoverItem>
</PopoverContent>
</Popover>
)
}

View File

@@ -313,7 +313,7 @@ describe('Function Execute API Route', () => {
'block-2': 'world', 'block-2': 'world',
}, },
blockNameMapping: { blockNameMapping: {
validVar: 'block-1', validvar: 'block-1',
another_valid: 'block-2', another_valid: 'block-2',
}, },
}) })
@@ -539,7 +539,7 @@ describe('Function Execute API Route', () => {
'block-complex': complexData, 'block-complex': complexData,
}, },
blockNameMapping: { blockNameMapping: {
complexData: 'block-complex', complexdata: 'block-complex',
}, },
}) })

View File

@@ -6,11 +6,11 @@ import { executeInE2B } from '@/lib/execution/e2b'
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm' import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages' import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants' import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
import { type OutputSchema, resolveBlockReference } from '@/executor/utils/block-reference'
import { import {
createEnvVarPattern, createEnvVarPattern,
createWorkflowVariablePattern, createWorkflowVariablePattern,
} from '@/executor/utils/reference-validation' } from '@/executor/utils/reference-validation'
import { navigatePath } from '@/executor/variables/resolvers/reference'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
export const runtime = 'nodejs' export const runtime = 'nodejs'
@@ -470,14 +470,17 @@ function resolveEnvironmentVariables(
function resolveTagVariables( function resolveTagVariables(
code: string, code: string,
blockData: Record<string, any>, blockData: Record<string, unknown>,
blockNameMapping: Record<string, string>, blockNameMapping: Record<string, string>,
contextVariables: Record<string, any> blockOutputSchemas: Record<string, OutputSchema>,
contextVariables: Record<string, unknown>,
language = 'javascript'
): string { ): string {
let resolvedCode = code let resolvedCode = code
const undefinedLiteral = language === 'python' ? 'None' : 'undefined'
const tagPattern = new RegExp( const tagPattern = new RegExp(
`${REFERENCE.START}([a-zA-Z_][a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])${REFERENCE.END}`, `${REFERENCE.START}([a-zA-Z_](?:[a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])?)${REFERENCE.END}`,
'g' 'g'
) )
const tagMatches = resolvedCode.match(tagPattern) || [] const tagMatches = resolvedCode.match(tagPattern) || []
@@ -486,41 +489,37 @@ function resolveTagVariables(
const tagName = match.slice(REFERENCE.START.length, -REFERENCE.END.length).trim() const tagName = match.slice(REFERENCE.START.length, -REFERENCE.END.length).trim()
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER) const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
const blockName = pathParts[0] const blockName = pathParts[0]
const fieldPath = pathParts.slice(1)
const blockId = blockNameMapping[blockName] const result = resolveBlockReference(blockName, fieldPath, {
if (!blockId) { blockNameMapping,
blockData,
blockOutputSchemas,
})
if (!result) {
continue continue
} }
const blockOutput = blockData[blockId] let tagValue = result.value
if (blockOutput === undefined) {
continue
}
let tagValue: any
if (pathParts.length === 1) {
tagValue = blockOutput
} else {
tagValue = navigatePath(blockOutput, pathParts.slice(1))
}
if (tagValue === undefined) { if (tagValue === undefined) {
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), undefinedLiteral)
continue continue
} }
if ( if (typeof tagValue === 'string') {
typeof tagValue === 'string' && const trimmed = tagValue.trimStart()
tagValue.length > 100 && if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
(tagValue.startsWith('{') || tagValue.startsWith('['))
) {
try { try {
tagValue = JSON.parse(tagValue) tagValue = JSON.parse(tagValue)
} catch { } catch {
// Keep as-is // Keep as string if not valid JSON
}
} }
} }
const safeVarName = `__tag_${tagName.replace(/[^a-zA-Z0-9_]/g, '_')}` const safeVarName = `__tag_${tagName.replace(/_/g, '_1').replace(/\./g, '_0')}`
contextVariables[safeVarName] = tagValue contextVariables[safeVarName] = tagValue
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName) resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
} }
@@ -537,18 +536,27 @@ function resolveTagVariables(
*/ */
function resolveCodeVariables( function resolveCodeVariables(
code: string, code: string,
params: Record<string, any>, params: Record<string, unknown>,
envVars: Record<string, string> = {}, envVars: Record<string, string> = {},
blockData: Record<string, any> = {}, blockData: Record<string, unknown> = {},
blockNameMapping: Record<string, string> = {}, blockNameMapping: Record<string, string> = {},
workflowVariables: Record<string, any> = {} blockOutputSchemas: Record<string, OutputSchema> = {},
): { resolvedCode: string; contextVariables: Record<string, any> } { workflowVariables: Record<string, unknown> = {},
language = 'javascript'
): { resolvedCode: string; contextVariables: Record<string, unknown> } {
let resolvedCode = code let resolvedCode = code
const contextVariables: Record<string, any> = {} const contextVariables: Record<string, unknown> = {}
resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables) resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables)
resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables) resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables)
resolvedCode = resolveTagVariables(resolvedCode, blockData, blockNameMapping, contextVariables) resolvedCode = resolveTagVariables(
resolvedCode,
blockData,
blockNameMapping,
blockOutputSchemas,
contextVariables,
language
)
return { resolvedCode, contextVariables } return { resolvedCode, contextVariables }
} }
@@ -585,6 +593,7 @@ export async function POST(req: NextRequest) {
envVars = {}, envVars = {},
blockData = {}, blockData = {},
blockNameMapping = {}, blockNameMapping = {},
blockOutputSchemas = {},
workflowVariables = {}, workflowVariables = {},
workflowId, workflowId,
isCustomTool = false, isCustomTool = false,
@@ -601,20 +610,21 @@ export async function POST(req: NextRequest) {
isCustomTool, isCustomTool,
}) })
// Resolve variables in the code with workflow environment variables const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
const codeResolution = resolveCodeVariables( const codeResolution = resolveCodeVariables(
code, code,
executionParams, executionParams,
envVars, envVars,
blockData, blockData,
blockNameMapping, blockNameMapping,
workflowVariables blockOutputSchemas,
workflowVariables,
lang
) )
resolvedCode = codeResolution.resolvedCode resolvedCode = codeResolution.resolvedCode
const contextVariables = codeResolution.contextVariables const contextVariables = codeResolution.contextVariables
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
let jsImports = '' let jsImports = ''
let jsRemainingCode = resolvedCode let jsRemainingCode = resolvedCode
let hasImports = false let hasImports = false
@@ -670,7 +680,11 @@ export async function POST(req: NextRequest) {
prologue += `const environmentVariables = JSON.parse(${JSON.stringify(JSON.stringify(envVars))});\n` prologue += `const environmentVariables = JSON.parse(${JSON.stringify(JSON.stringify(envVars))});\n`
prologueLineCount++ prologueLineCount++
for (const [k, v] of Object.entries(contextVariables)) { for (const [k, v] of Object.entries(contextVariables)) {
if (v === undefined) {
prologue += `const ${k} = undefined;\n`
} else {
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n` prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
}
prologueLineCount++ prologueLineCount++
} }
@@ -741,7 +755,11 @@ export async function POST(req: NextRequest) {
prologue += `environmentVariables = json.loads(${JSON.stringify(JSON.stringify(envVars))})\n` prologue += `environmentVariables = json.loads(${JSON.stringify(JSON.stringify(envVars))})\n`
prologueLineCount++ prologueLineCount++
for (const [k, v] of Object.entries(contextVariables)) { for (const [k, v] of Object.entries(contextVariables)) {
if (v === undefined) {
prologue += `${k} = None\n`
} else {
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n` prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
}
prologueLineCount++ prologueLineCount++
} }
const wrapped = [ const wrapped = [

View File

@@ -157,7 +157,7 @@ describe('Knowledge Base Documents API Route', () => {
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith( expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
'kb-123', 'kb-123',
{ {
includeDisabled: false, enabledFilter: undefined,
search: undefined, search: undefined,
limit: 50, limit: 50,
offset: 0, offset: 0,
@@ -166,7 +166,7 @@ describe('Knowledge Base Documents API Route', () => {
) )
}) })
it('should filter disabled documents by default', async () => { it('should return documents with default filter', async () => {
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils') const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
const { getDocuments } = await import('@/lib/knowledge/documents/service') const { getDocuments } = await import('@/lib/knowledge/documents/service')
@@ -194,7 +194,7 @@ describe('Knowledge Base Documents API Route', () => {
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith( expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
'kb-123', 'kb-123',
{ {
includeDisabled: false, enabledFilter: undefined,
search: undefined, search: undefined,
limit: 50, limit: 50,
offset: 0, offset: 0,
@@ -203,7 +203,7 @@ describe('Knowledge Base Documents API Route', () => {
) )
}) })
it('should include disabled documents when requested', async () => { it('should filter documents by enabled status when requested', async () => {
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils') const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
const { getDocuments } = await import('@/lib/knowledge/documents/service') const { getDocuments } = await import('@/lib/knowledge/documents/service')
@@ -223,7 +223,7 @@ describe('Knowledge Base Documents API Route', () => {
}, },
}) })
const url = 'http://localhost:3000/api/knowledge/kb-123/documents?includeDisabled=true' const url = 'http://localhost:3000/api/knowledge/kb-123/documents?enabledFilter=disabled'
const req = new Request(url, { method: 'GET' }) as any const req = new Request(url, { method: 'GET' }) as any
const { GET } = await import('@/app/api/knowledge/[id]/documents/route') const { GET } = await import('@/app/api/knowledge/[id]/documents/route')
@@ -233,7 +233,7 @@ describe('Knowledge Base Documents API Route', () => {
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith( expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
'kb-123', 'kb-123',
{ {
includeDisabled: true, enabledFilter: 'disabled',
search: undefined, search: undefined,
limit: 50, limit: 50,
offset: 0, offset: 0,
@@ -361,8 +361,7 @@ describe('Knowledge Base Documents API Route', () => {
expect(vi.mocked(createSingleDocument)).toHaveBeenCalledWith( expect(vi.mocked(createSingleDocument)).toHaveBeenCalledWith(
validDocumentData, validDocumentData,
'kb-123', 'kb-123',
expect.any(String), expect.any(String)
'user-123'
) )
}) })
@@ -470,8 +469,7 @@ describe('Knowledge Base Documents API Route', () => {
expect(vi.mocked(createDocumentRecords)).toHaveBeenCalledWith( expect(vi.mocked(createDocumentRecords)).toHaveBeenCalledWith(
validBulkData.documents, validBulkData.documents,
'kb-123', 'kb-123',
expect.any(String), expect.any(String)
'user-123'
) )
expect(vi.mocked(processDocumentsWithQueue)).toHaveBeenCalled() expect(vi.mocked(processDocumentsWithQueue)).toHaveBeenCalled()
}) })

View File

@@ -5,6 +5,7 @@ import { z } from 'zod'
import { getSession } from '@/lib/auth' import { getSession } from '@/lib/auth'
import { import {
bulkDocumentOperation, bulkDocumentOperation,
bulkDocumentOperationByFilter,
createDocumentRecords, createDocumentRecords,
createSingleDocument, createSingleDocument,
getDocuments, getDocuments,
@@ -57,12 +58,19 @@ const BulkCreateDocumentsSchema = z.object({
bulk: z.literal(true), bulk: z.literal(true),
}) })
const BulkUpdateDocumentsSchema = z.object({ const BulkUpdateDocumentsSchema = z
.object({
operation: z.enum(['enable', 'disable', 'delete']), operation: z.enum(['enable', 'disable', 'delete']),
documentIds: z documentIds: z
.array(z.string()) .array(z.string())
.min(1, 'At least one document ID is required') .min(1, 'At least one document ID is required')
.max(100, 'Cannot operate on more than 100 documents at once'), .max(100, 'Cannot operate on more than 100 documents at once')
.optional(),
selectAll: z.boolean().optional(),
enabledFilter: z.enum(['all', 'enabled', 'disabled']).optional(),
})
.refine((data) => data.selectAll || (data.documentIds && data.documentIds.length > 0), {
message: 'Either selectAll must be true or documentIds must be provided',
}) })
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
@@ -90,14 +98,17 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
} }
const url = new URL(req.url) const url = new URL(req.url)
const includeDisabled = url.searchParams.get('includeDisabled') === 'true' const enabledFilter = url.searchParams.get('enabledFilter') as
| 'all'
| 'enabled'
| 'disabled'
| null
const search = url.searchParams.get('search') || undefined const search = url.searchParams.get('search') || undefined
const limit = Number.parseInt(url.searchParams.get('limit') || '50') const limit = Number.parseInt(url.searchParams.get('limit') || '50')
const offset = Number.parseInt(url.searchParams.get('offset') || '0') const offset = Number.parseInt(url.searchParams.get('offset') || '0')
const sortByParam = url.searchParams.get('sortBy') const sortByParam = url.searchParams.get('sortBy')
const sortOrderParam = url.searchParams.get('sortOrder') const sortOrderParam = url.searchParams.get('sortOrder')
// Validate sort parameters
const validSortFields: DocumentSortField[] = [ const validSortFields: DocumentSortField[] = [
'filename', 'filename',
'fileSize', 'fileSize',
@@ -105,6 +116,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
'chunkCount', 'chunkCount',
'uploadedAt', 'uploadedAt',
'processingStatus', 'processingStatus',
'enabled',
] ]
const validSortOrders: SortOrder[] = ['asc', 'desc'] const validSortOrders: SortOrder[] = ['asc', 'desc']
@@ -120,7 +132,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
const result = await getDocuments( const result = await getDocuments(
knowledgeBaseId, knowledgeBaseId,
{ {
includeDisabled, enabledFilter: enabledFilter || undefined,
search, search,
limit, limit,
offset, offset,
@@ -190,8 +202,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const createdDocuments = await createDocumentRecords( const createdDocuments = await createDocumentRecords(
validatedData.documents, validatedData.documents,
knowledgeBaseId, knowledgeBaseId,
requestId, requestId
userId
) )
logger.info( logger.info(
@@ -250,16 +261,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
throw validationError throw validationError
} }
} else { } else {
// Handle single document creation
try { try {
const validatedData = CreateDocumentSchema.parse(body) const validatedData = CreateDocumentSchema.parse(body)
const newDocument = await createSingleDocument( const newDocument = await createSingleDocument(validatedData, knowledgeBaseId, requestId)
validatedData,
knowledgeBaseId,
requestId,
userId
)
try { try {
const { PlatformEvents } = await import('@/lib/core/telemetry') const { PlatformEvents } = await import('@/lib/core/telemetry')
@@ -294,7 +299,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
} catch (error) { } catch (error) {
logger.error(`[${requestId}] Error creating document`, error) logger.error(`[${requestId}] Error creating document`, error)
// Check if it's a storage limit error
const errorMessage = error instanceof Error ? error.message : 'Failed to create document' const errorMessage = error instanceof Error ? error.message : 'Failed to create document'
const isStorageLimitError = const isStorageLimitError =
errorMessage.includes('Storage limit exceeded') || errorMessage.includes('storage limit') errorMessage.includes('Storage limit exceeded') || errorMessage.includes('storage limit')
@@ -331,16 +335,22 @@ export async function PATCH(req: NextRequest, { params }: { params: Promise<{ id
try { try {
const validatedData = BulkUpdateDocumentsSchema.parse(body) const validatedData = BulkUpdateDocumentsSchema.parse(body)
const { operation, documentIds } = validatedData const { operation, documentIds, selectAll, enabledFilter } = validatedData
try { try {
const result = await bulkDocumentOperation( let result
if (selectAll) {
result = await bulkDocumentOperationByFilter(
knowledgeBaseId, knowledgeBaseId,
operation, operation,
documentIds, enabledFilter,
requestId, requestId
session.user.id
) )
} else if (documentIds && documentIds.length > 0) {
result = await bulkDocumentOperation(knowledgeBaseId, operation, documentIds, requestId)
} else {
return NextResponse.json({ error: 'No documents specified' }, { status: 400 })
}
return NextResponse.json({ return NextResponse.json({
success: true, success: true,

View File

@@ -0,0 +1,211 @@
/**
* POST /api/v1/admin/credits
*
* Issue credits to a user by user ID or email.
*
* Body:
* - userId?: string - The user ID to issue credits to
* - email?: string - The user email to issue credits to (alternative to userId)
* - amount: number - The amount of credits to issue (in dollars)
* - reason?: string - Reason for issuing credits (for audit logging)
*
* Response: AdminSingleResponse<{
* success: true,
* entityType: 'user' | 'organization',
* entityId: string,
* amount: number,
* newCreditBalance: number,
* newUsageLimit: number,
* }>
*
* For Pro users: credits are added to user_stats.credit_balance
* For Team users: credits are added to organization.credit_balance
* Usage limits are updated accordingly to allow spending the credits.
*/
import { db } from '@sim/db'
import { organization, subscription, user, userStats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { addCredits } from '@/lib/billing/credits/balance'
import { setUsageLimitForCredits } from '@/lib/billing/credits/purchase'
import { getEffectiveSeats } from '@/lib/billing/subscriptions/utils'
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
import {
badRequestResponse,
internalErrorResponse,
notFoundResponse,
singleResponse,
} from '@/app/api/v1/admin/responses'
const logger = createLogger('AdminCreditsAPI')
export const POST = withAdminAuth(async (request) => {
try {
const body = await request.json()
const { userId, email, amount, reason } = body
if (!userId && !email) {
return badRequestResponse('Either userId or email is required')
}
if (userId && typeof userId !== 'string') {
return badRequestResponse('userId must be a string')
}
if (email && typeof email !== 'string') {
return badRequestResponse('email must be a string')
}
if (typeof amount !== 'number' || !Number.isFinite(amount) || amount <= 0) {
return badRequestResponse('amount must be a positive number')
}
let resolvedUserId: string
let userEmail: string | null = null
if (userId) {
const [userData] = await db
.select({ id: user.id, email: user.email })
.from(user)
.where(eq(user.id, userId))
.limit(1)
if (!userData) {
return notFoundResponse('User')
}
resolvedUserId = userData.id
userEmail = userData.email
} else {
const normalizedEmail = email.toLowerCase().trim()
const [userData] = await db
.select({ id: user.id, email: user.email })
.from(user)
.where(eq(user.email, normalizedEmail))
.limit(1)
if (!userData) {
return notFoundResponse('User with email')
}
resolvedUserId = userData.id
userEmail = userData.email
}
const userSubscription = await getHighestPrioritySubscription(resolvedUserId)
if (!userSubscription || !['pro', 'team', 'enterprise'].includes(userSubscription.plan)) {
return badRequestResponse(
'User must have an active Pro, Team, or Enterprise subscription to receive credits'
)
}
let entityType: 'user' | 'organization'
let entityId: string
const plan = userSubscription.plan
let seats: number | null = null
if (plan === 'team' || plan === 'enterprise') {
entityType = 'organization'
entityId = userSubscription.referenceId
const [orgExists] = await db
.select({ id: organization.id })
.from(organization)
.where(eq(organization.id, entityId))
.limit(1)
if (!orgExists) {
return notFoundResponse('Organization')
}
const [subData] = await db
.select()
.from(subscription)
.where(and(eq(subscription.referenceId, entityId), eq(subscription.status, 'active')))
.limit(1)
seats = getEffectiveSeats(subData)
} else {
entityType = 'user'
entityId = resolvedUserId
const [existingStats] = await db
.select({ id: userStats.id })
.from(userStats)
.where(eq(userStats.userId, entityId))
.limit(1)
if (!existingStats) {
await db.insert(userStats).values({
id: nanoid(),
userId: entityId,
})
}
}
await addCredits(entityType, entityId, amount)
let newCreditBalance: number
if (entityType === 'organization') {
const [orgData] = await db
.select({ creditBalance: organization.creditBalance })
.from(organization)
.where(eq(organization.id, entityId))
.limit(1)
newCreditBalance = Number.parseFloat(orgData?.creditBalance || '0')
} else {
const [stats] = await db
.select({ creditBalance: userStats.creditBalance })
.from(userStats)
.where(eq(userStats.userId, entityId))
.limit(1)
newCreditBalance = Number.parseFloat(stats?.creditBalance || '0')
}
await setUsageLimitForCredits(entityType, entityId, plan, seats, newCreditBalance)
let newUsageLimit: number
if (entityType === 'organization') {
const [orgData] = await db
.select({ orgUsageLimit: organization.orgUsageLimit })
.from(organization)
.where(eq(organization.id, entityId))
.limit(1)
newUsageLimit = Number.parseFloat(orgData?.orgUsageLimit || '0')
} else {
const [stats] = await db
.select({ currentUsageLimit: userStats.currentUsageLimit })
.from(userStats)
.where(eq(userStats.userId, entityId))
.limit(1)
newUsageLimit = Number.parseFloat(stats?.currentUsageLimit || '0')
}
logger.info('Admin API: Issued credits', {
resolvedUserId,
userEmail,
entityType,
entityId,
amount,
newCreditBalance,
newUsageLimit,
reason: reason || 'No reason provided',
})
return singleResponse({
success: true,
userId: resolvedUserId,
userEmail,
entityType,
entityId,
amount,
newCreditBalance,
newUsageLimit,
})
} catch (error) {
logger.error('Admin API: Failed to issue credits', { error })
return internalErrorResponse('Failed to issue credits')
}
})

View File

@@ -63,6 +63,9 @@
* GET /api/v1/admin/subscriptions/:id - Get subscription details * GET /api/v1/admin/subscriptions/:id - Get subscription details
* DELETE /api/v1/admin/subscriptions/:id - Cancel subscription (?atPeriodEnd=true for scheduled) * DELETE /api/v1/admin/subscriptions/:id - Cancel subscription (?atPeriodEnd=true for scheduled)
* *
* Credits:
* POST /api/v1/admin/credits - Issue credits to user (by userId or email)
*
* Access Control (Permission Groups): * Access Control (Permission Groups):
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X) * GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X) * DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)

View File

@@ -61,6 +61,7 @@ export function EditChunkModal({
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false) const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null) const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
const [tokenizerOn, setTokenizerOn] = useState(false) const [tokenizerOn, setTokenizerOn] = useState(false)
const [hoveredTokenIndex, setHoveredTokenIndex] = useState<number | null>(null)
const textareaRef = useRef<HTMLTextAreaElement>(null) const textareaRef = useRef<HTMLTextAreaElement>(null)
const error = mutationError?.message ?? null const error = mutationError?.message ?? null
@@ -254,6 +255,8 @@ export function EditChunkModal({
style={{ style={{
backgroundColor: getTokenBgColor(index), backgroundColor: getTokenBgColor(index),
}} }}
onMouseEnter={() => setHoveredTokenIndex(index)}
onMouseLeave={() => setHoveredTokenIndex(null)}
> >
{token} {token}
</span> </span>
@@ -281,6 +284,11 @@ export function EditChunkModal({
<div className='flex items-center gap-[8px]'> <div className='flex items-center gap-[8px]'>
<span className='text-[12px] text-[var(--text-secondary)]'>Tokenizer</span> <span className='text-[12px] text-[var(--text-secondary)]'>Tokenizer</span>
<Switch checked={tokenizerOn} onCheckedChange={setTokenizerOn} /> <Switch checked={tokenizerOn} onCheckedChange={setTokenizerOn} />
{tokenizerOn && hoveredTokenIndex !== null && (
<span className='text-[12px] text-[var(--text-tertiary)]'>
Token #{hoveredTokenIndex + 1}
</span>
)}
</div> </div>
<span className='text-[12px] text-[var(--text-secondary)]'> <span className='text-[12px] text-[var(--text-secondary)]'>
{tokenCount.toLocaleString()} {tokenCount.toLocaleString()}

View File

@@ -36,6 +36,7 @@ import {
import { Input } from '@/components/ui/input' import { Input } from '@/components/ui/input'
import { SearchHighlight } from '@/components/ui/search-highlight' import { SearchHighlight } from '@/components/ui/search-highlight'
import { Skeleton } from '@/components/ui/skeleton' import { Skeleton } from '@/components/ui/skeleton'
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
import type { ChunkData } from '@/lib/knowledge/types' import type { ChunkData } from '@/lib/knowledge/types'
import { import {
ChunkContextMenu, ChunkContextMenu,
@@ -58,55 +59,6 @@ import {
const logger = createLogger('Document') const logger = createLogger('Document')
/**
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
*/
function formatRelativeTime(dateString: string): string {
const date = new Date(dateString)
const now = new Date()
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
if (diffInSeconds < 60) {
return 'just now'
}
if (diffInSeconds < 3600) {
const minutes = Math.floor(diffInSeconds / 60)
return `${minutes}m ago`
}
if (diffInSeconds < 86400) {
const hours = Math.floor(diffInSeconds / 3600)
return `${hours}h ago`
}
if (diffInSeconds < 604800) {
const days = Math.floor(diffInSeconds / 86400)
return `${days}d ago`
}
if (diffInSeconds < 2592000) {
const weeks = Math.floor(diffInSeconds / 604800)
return `${weeks}w ago`
}
if (diffInSeconds < 31536000) {
const months = Math.floor(diffInSeconds / 2592000)
return `${months}mo ago`
}
const years = Math.floor(diffInSeconds / 31536000)
return `${years}y ago`
}
/**
* Formats a date string to absolute format for tooltip display
*/
function formatAbsoluteDate(dateString: string): string {
const date = new Date(dateString)
return date.toLocaleDateString('en-US', {
year: 'numeric',
month: 'short',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
})
}
interface DocumentProps { interface DocumentProps {
knowledgeBaseId: string knowledgeBaseId: string
documentId: string documentId: string
@@ -304,7 +256,6 @@ export function Document({
const [searchQuery, setSearchQuery] = useState('') const [searchQuery, setSearchQuery] = useState('')
const [debouncedSearchQuery, setDebouncedSearchQuery] = useState('') const [debouncedSearchQuery, setDebouncedSearchQuery] = useState('')
const [isSearching, setIsSearching] = useState(false)
const { const {
chunks: initialChunks, chunks: initialChunks,
@@ -344,7 +295,6 @@ export function Document({
const handler = setTimeout(() => { const handler = setTimeout(() => {
startTransition(() => { startTransition(() => {
setDebouncedSearchQuery(searchQuery) setDebouncedSearchQuery(searchQuery)
setIsSearching(searchQuery.trim().length > 0)
}) })
}, 200) }, 200)
@@ -353,6 +303,7 @@ export function Document({
} }
}, [searchQuery]) }, [searchQuery])
const isSearching = debouncedSearchQuery.trim().length > 0
const showingSearch = isSearching && searchQuery.trim().length > 0 && searchResults.length > 0 const showingSearch = isSearching && searchQuery.trim().length > 0 && searchResults.length > 0
const SEARCH_PAGE_SIZE = 50 const SEARCH_PAGE_SIZE = 50
const maxSearchPages = Math.ceil(searchResults.length / SEARCH_PAGE_SIZE) const maxSearchPages = Math.ceil(searchResults.length / SEARCH_PAGE_SIZE)

View File

@@ -27,6 +27,10 @@ import {
ModalContent, ModalContent,
ModalFooter, ModalFooter,
ModalHeader, ModalHeader,
Popover,
PopoverContent,
PopoverItem,
PopoverTrigger,
Table, Table,
TableBody, TableBody,
TableCell, TableCell,
@@ -40,8 +44,11 @@ import { Input } from '@/components/ui/input'
import { SearchHighlight } from '@/components/ui/search-highlight' import { SearchHighlight } from '@/components/ui/search-highlight'
import { Skeleton } from '@/components/ui/skeleton' import { Skeleton } from '@/components/ui/skeleton'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
import { ALL_TAG_SLOTS, type AllTagSlot, getFieldTypeForSlot } from '@/lib/knowledge/constants'
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types' import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
import type { DocumentData } from '@/lib/knowledge/types' import type { DocumentData } from '@/lib/knowledge/types'
import { formatFileSize } from '@/lib/uploads/utils/file-utils'
import { import {
ActionBar, ActionBar,
AddDocumentsModal, AddDocumentsModal,
@@ -189,8 +196,8 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
</div> </div>
</div> </div>
<div className='mt-[4px]'> <div>
<Skeleton className='h-[21px] w-[300px] rounded-[4px]' /> <Skeleton className='mt-[4px] h-[21px] w-[300px] rounded-[4px]' />
</div> </div>
<div className='mt-[16px] flex items-center gap-[8px]'> <div className='mt-[16px] flex items-center gap-[8px]'>
@@ -208,10 +215,13 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0' className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
/> />
</div> </div>
<div className='flex items-center gap-[8px]'>
<Skeleton className='h-[32px] w-[52px] rounded-[6px]' />
<Button disabled variant='tertiary' className='h-[32px] rounded-[6px]'> <Button disabled variant='tertiary' className='h-[32px] rounded-[6px]'>
Add Documents Add Documents
</Button> </Button>
</div> </div>
</div>
<div className='mt-[12px] flex flex-1 flex-col overflow-hidden'> <div className='mt-[12px] flex flex-1 flex-col overflow-hidden'>
<DocumentTableSkeleton rowCount={8} /> <DocumentTableSkeleton rowCount={8} />
@@ -222,73 +232,11 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
) )
} }
/**
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
*/
function formatRelativeTime(dateString: string): string {
const date = new Date(dateString)
const now = new Date()
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
if (diffInSeconds < 60) {
return 'just now'
}
if (diffInSeconds < 3600) {
const minutes = Math.floor(diffInSeconds / 60)
return `${minutes}m ago`
}
if (diffInSeconds < 86400) {
const hours = Math.floor(diffInSeconds / 3600)
return `${hours}h ago`
}
if (diffInSeconds < 604800) {
const days = Math.floor(diffInSeconds / 86400)
return `${days}d ago`
}
if (diffInSeconds < 2592000) {
const weeks = Math.floor(diffInSeconds / 604800)
return `${weeks}w ago`
}
if (diffInSeconds < 31536000) {
const months = Math.floor(diffInSeconds / 2592000)
return `${months}mo ago`
}
const years = Math.floor(diffInSeconds / 31536000)
return `${years}y ago`
}
/**
* Formats a date string to absolute format for tooltip display
*/
function formatAbsoluteDate(dateString: string): string {
const date = new Date(dateString)
return date.toLocaleDateString('en-US', {
year: 'numeric',
month: 'short',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
})
}
interface KnowledgeBaseProps { interface KnowledgeBaseProps {
id: string id: string
knowledgeBaseName?: string knowledgeBaseName?: string
} }
function getFileIcon(mimeType: string, filename: string) {
const IconComponent = getDocumentIcon(mimeType, filename)
return <IconComponent className='h-6 w-5 flex-shrink-0' />
}
function formatFileSize(bytes: number): string {
if (bytes === 0) return '0 Bytes'
const k = 1024
const sizes = ['Bytes', 'KB', 'MB', 'GB']
const i = Math.floor(Math.log(bytes) / Math.log(k))
return `${Number.parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}`
}
const AnimatedLoader = ({ className }: { className?: string }) => ( const AnimatedLoader = ({ className }: { className?: string }) => (
<Loader2 className={cn(className, 'animate-spin')} /> <Loader2 className={cn(className, 'animate-spin')} />
) )
@@ -336,53 +284,24 @@ const getStatusBadge = (doc: DocumentData) => {
} }
} }
const TAG_SLOTS = [
'tag1',
'tag2',
'tag3',
'tag4',
'tag5',
'tag6',
'tag7',
'number1',
'number2',
'number3',
'number4',
'number5',
'date1',
'date2',
'boolean1',
'boolean2',
'boolean3',
] as const
type TagSlot = (typeof TAG_SLOTS)[number]
interface TagValue { interface TagValue {
slot: TagSlot slot: AllTagSlot
displayName: string displayName: string
value: string value: string
} }
const TAG_FIELD_TYPES: Record<string, string> = {
tag: 'text',
number: 'number',
date: 'date',
boolean: 'boolean',
}
/** /**
* Computes tag values for a document * Computes tag values for a document
*/ */
function getDocumentTags(doc: DocumentData, definitions: TagDefinition[]): TagValue[] { function getDocumentTags(doc: DocumentData, definitions: TagDefinition[]): TagValue[] {
const result: TagValue[] = [] const result: TagValue[] = []
for (const slot of TAG_SLOTS) { for (const slot of ALL_TAG_SLOTS) {
const raw = doc[slot] const raw = doc[slot]
if (raw == null) continue if (raw == null) continue
const def = definitions.find((d) => d.tagSlot === slot) const def = definitions.find((d) => d.tagSlot === slot)
const fieldType = def?.fieldType || TAG_FIELD_TYPES[slot.replace(/\d+$/, '')] || 'text' const fieldType = def?.fieldType || getFieldTypeForSlot(slot) || 'text'
let value: string let value: string
if (fieldType === 'date') { if (fieldType === 'date') {
@@ -424,6 +343,8 @@ export function KnowledgeBase({
const [searchQuery, setSearchQuery] = useState('') const [searchQuery, setSearchQuery] = useState('')
const [showTagsModal, setShowTagsModal] = useState(false) const [showTagsModal, setShowTagsModal] = useState(false)
const [enabledFilter, setEnabledFilter] = useState<'all' | 'enabled' | 'disabled'>('all')
const [isFilterPopoverOpen, setIsFilterPopoverOpen] = useState(false)
/** /**
* Memoize the search query setter to prevent unnecessary re-renders * Memoize the search query setter to prevent unnecessary re-renders
@@ -434,6 +355,7 @@ export function KnowledgeBase({
}, []) }, [])
const [selectedDocuments, setSelectedDocuments] = useState<Set<string>>(new Set()) const [selectedDocuments, setSelectedDocuments] = useState<Set<string>>(new Set())
const [isSelectAllMode, setIsSelectAllMode] = useState(false)
const [showDeleteDialog, setShowDeleteDialog] = useState(false) const [showDeleteDialog, setShowDeleteDialog] = useState(false)
const [showAddDocumentsModal, setShowAddDocumentsModal] = useState(false) const [showAddDocumentsModal, setShowAddDocumentsModal] = useState(false)
const [showDeleteDocumentModal, setShowDeleteDocumentModal] = useState(false) const [showDeleteDocumentModal, setShowDeleteDocumentModal] = useState(false)
@@ -460,7 +382,6 @@ export function KnowledgeBase({
error: knowledgeBaseError, error: knowledgeBaseError,
refresh: refreshKnowledgeBase, refresh: refreshKnowledgeBase,
} = useKnowledgeBase(id) } = useKnowledgeBase(id)
const [hasProcessingDocuments, setHasProcessingDocuments] = useState(false)
const { const {
documents, documents,
@@ -469,6 +390,7 @@ export function KnowledgeBase({
isFetching: isFetchingDocuments, isFetching: isFetchingDocuments,
isPlaceholderData: isPlaceholderDocuments, isPlaceholderData: isPlaceholderDocuments,
error: documentsError, error: documentsError,
hasProcessingDocuments,
updateDocument, updateDocument,
refreshDocuments, refreshDocuments,
} = useKnowledgeBaseDocuments(id, { } = useKnowledgeBaseDocuments(id, {
@@ -477,7 +399,14 @@ export function KnowledgeBase({
offset: (currentPage - 1) * DOCUMENTS_PER_PAGE, offset: (currentPage - 1) * DOCUMENTS_PER_PAGE,
sortBy, sortBy,
sortOrder, sortOrder,
refetchInterval: hasProcessingDocuments && !isDeleting ? 3000 : false, refetchInterval: (data) => {
if (isDeleting) return false
const hasPending = data?.documents?.some(
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
)
return hasPending ? 3000 : false
},
enabledFilter,
}) })
const { tagDefinitions } = useKnowledgeBaseTagDefinitions(id) const { tagDefinitions } = useKnowledgeBaseTagDefinitions(id)
@@ -543,25 +472,15 @@ export function KnowledgeBase({
</TableHead> </TableHead>
) )
useEffect(() => {
const processing = documents.some(
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
)
setHasProcessingDocuments(processing)
if (processing) {
checkForDeadProcesses()
}
}, [documents])
/** /**
* Checks for documents with stale processing states and marks them as failed * Checks for documents with stale processing states and marks them as failed
*/ */
const checkForDeadProcesses = () => { const checkForDeadProcesses = useCallback(
(docsToCheck: DocumentData[]) => {
const now = new Date() const now = new Date()
const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes
const staleDocuments = documents.filter((doc) => { const staleDocuments = docsToCheck.filter((doc) => {
if (doc.processingStatus !== 'processing' || !doc.processingStartedAt) { if (doc.processingStatus !== 'processing' || !doc.processingStartedAt) {
return false return false
} }
@@ -583,12 +502,22 @@ export function KnowledgeBase({
}, },
{ {
onSuccess: () => { onSuccess: () => {
logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`) logger.info(
`Successfully marked dead process as failed for document: ${doc.filename}`
)
}, },
} }
) )
}) })
},
[id, updateDocumentMutation]
)
useEffect(() => {
if (hasProcessingDocuments) {
checkForDeadProcesses(documents)
} }
}, [hasProcessingDocuments, documents, checkForDeadProcesses])
const handleToggleEnabled = (docId: string) => { const handleToggleEnabled = (docId: string) => {
const document = documents.find((doc) => doc.id === docId) const document = documents.find((doc) => doc.id === docId)
@@ -748,6 +677,7 @@ export function KnowledgeBase({
setSelectedDocuments(new Set(documents.map((doc) => doc.id))) setSelectedDocuments(new Set(documents.map((doc) => doc.id)))
} else { } else {
setSelectedDocuments(new Set()) setSelectedDocuments(new Set())
setIsSelectAllMode(false)
} }
} }
@@ -793,6 +723,26 @@ export function KnowledgeBase({
* Handles bulk enabling of selected documents * Handles bulk enabling of selected documents
*/ */
const handleBulkEnable = () => { const handleBulkEnable = () => {
if (isSelectAllMode) {
bulkDocumentMutation(
{
knowledgeBaseId: id,
operation: 'enable',
selectAll: true,
enabledFilter,
},
{
onSuccess: (result) => {
logger.info(`Successfully enabled ${result.successCount} documents`)
setSelectedDocuments(new Set())
setIsSelectAllMode(false)
refreshDocuments()
},
}
)
return
}
const documentsToEnable = documents.filter( const documentsToEnable = documents.filter(
(doc) => selectedDocuments.has(doc.id) && !doc.enabled (doc) => selectedDocuments.has(doc.id) && !doc.enabled
) )
@@ -821,6 +771,26 @@ export function KnowledgeBase({
* Handles bulk disabling of selected documents * Handles bulk disabling of selected documents
*/ */
const handleBulkDisable = () => { const handleBulkDisable = () => {
if (isSelectAllMode) {
bulkDocumentMutation(
{
knowledgeBaseId: id,
operation: 'disable',
selectAll: true,
enabledFilter,
},
{
onSuccess: (result) => {
logger.info(`Successfully disabled ${result.successCount} documents`)
setSelectedDocuments(new Set())
setIsSelectAllMode(false)
refreshDocuments()
},
}
)
return
}
const documentsToDisable = documents.filter( const documentsToDisable = documents.filter(
(doc) => selectedDocuments.has(doc.id) && doc.enabled (doc) => selectedDocuments.has(doc.id) && doc.enabled
) )
@@ -845,18 +815,35 @@ export function KnowledgeBase({
) )
} }
/**
* Opens the bulk delete confirmation modal
*/
const handleBulkDelete = () => { const handleBulkDelete = () => {
if (selectedDocuments.size === 0) return if (selectedDocuments.size === 0) return
setShowBulkDeleteModal(true) setShowBulkDeleteModal(true)
} }
/**
* Confirms and executes the bulk deletion of selected documents
*/
const confirmBulkDelete = () => { const confirmBulkDelete = () => {
if (isSelectAllMode) {
bulkDocumentMutation(
{
knowledgeBaseId: id,
operation: 'delete',
selectAll: true,
enabledFilter,
},
{
onSuccess: (result) => {
logger.info(`Successfully deleted ${result.successCount} documents`)
refreshDocuments()
setSelectedDocuments(new Set())
setIsSelectAllMode(false)
},
onSettled: () => {
setShowBulkDeleteModal(false)
},
}
)
return
}
const documentsToDelete = documents.filter((doc) => selectedDocuments.has(doc.id)) const documentsToDelete = documents.filter((doc) => selectedDocuments.has(doc.id))
if (documentsToDelete.length === 0) return if (documentsToDelete.length === 0) return
@@ -881,14 +868,17 @@ export function KnowledgeBase({
} }
const selectedDocumentsList = documents.filter((doc) => selectedDocuments.has(doc.id)) const selectedDocumentsList = documents.filter((doc) => selectedDocuments.has(doc.id))
const enabledCount = selectedDocumentsList.filter((doc) => doc.enabled).length const enabledCount = isSelectAllMode
const disabledCount = selectedDocumentsList.filter((doc) => !doc.enabled).length ? enabledFilter === 'disabled'
? 0
: pagination.total
: selectedDocumentsList.filter((doc) => doc.enabled).length
const disabledCount = isSelectAllMode
? enabledFilter === 'enabled'
? 0
: pagination.total
: selectedDocumentsList.filter((doc) => !doc.enabled).length
/**
* Handle right-click on a document row
* If right-clicking on an unselected document, select only that document
* If right-clicking on a selected document with multiple selections, keep all selections
*/
const handleDocumentContextMenu = useCallback( const handleDocumentContextMenu = useCallback(
(e: React.MouseEvent, doc: DocumentData) => { (e: React.MouseEvent, doc: DocumentData) => {
const isCurrentlySelected = selectedDocuments.has(doc.id) const isCurrentlySelected = selectedDocuments.has(doc.id)
@@ -1005,11 +995,13 @@ export function KnowledgeBase({
</div> </div>
</div> </div>
<div>
{knowledgeBase?.description && ( {knowledgeBase?.description && (
<p className='mt-[4px] line-clamp-2 max-w-[40vw] font-medium text-[14px] text-[var(--text-tertiary)]'> <p className='mt-[4px] line-clamp-2 max-w-[40vw] font-medium text-[14px] text-[var(--text-tertiary)]'>
{knowledgeBase.description} {knowledgeBase.description}
</p> </p>
)} )}
</div>
<div className='mt-[16px] flex items-center gap-[8px]'> <div className='mt-[16px] flex items-center gap-[8px]'>
<span className='text-[14px] text-[var(--text-muted)]'> <span className='text-[14px] text-[var(--text-muted)]'>
@@ -1052,6 +1044,60 @@ export function KnowledgeBase({
))} ))}
</div> </div>
<div className='flex items-center gap-[8px]'>
<Popover open={isFilterPopoverOpen} onOpenChange={setIsFilterPopoverOpen}>
<PopoverTrigger asChild>
<Button variant='default' className='h-[32px] rounded-[6px]'>
{enabledFilter === 'all'
? 'All'
: enabledFilter === 'enabled'
? 'Enabled'
: 'Disabled'}
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
</Button>
</PopoverTrigger>
<PopoverContent align='end' side='bottom' sideOffset={4}>
<div className='flex flex-col gap-[2px]'>
<PopoverItem
active={enabledFilter === 'all'}
onClick={() => {
setEnabledFilter('all')
setIsFilterPopoverOpen(false)
setCurrentPage(1)
setSelectedDocuments(new Set())
setIsSelectAllMode(false)
}}
>
All
</PopoverItem>
<PopoverItem
active={enabledFilter === 'enabled'}
onClick={() => {
setEnabledFilter('enabled')
setIsFilterPopoverOpen(false)
setCurrentPage(1)
setSelectedDocuments(new Set())
setIsSelectAllMode(false)
}}
>
Enabled
</PopoverItem>
<PopoverItem
active={enabledFilter === 'disabled'}
onClick={() => {
setEnabledFilter('disabled')
setIsFilterPopoverOpen(false)
setCurrentPage(1)
setSelectedDocuments(new Set())
setIsSelectAllMode(false)
}}
>
Disabled
</PopoverItem>
</div>
</PopoverContent>
</Popover>
<Tooltip.Root> <Tooltip.Root>
<Tooltip.Trigger asChild> <Tooltip.Trigger asChild>
<Button <Button
@@ -1068,6 +1114,7 @@ export function KnowledgeBase({
)} )}
</Tooltip.Root> </Tooltip.Root>
</div> </div>
</div>
{error && !isLoadingKnowledgeBase && ( {error && !isLoadingKnowledgeBase && (
<div className='mt-[24px]'> <div className='mt-[24px]'>
@@ -1089,11 +1136,17 @@ export function KnowledgeBase({
<div className='mt-[10px] flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'> <div className='mt-[10px] flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'>
<div className='text-center'> <div className='text-center'>
<p className='font-medium text-[var(--text-secondary)] text-sm'> <p className='font-medium text-[var(--text-secondary)] text-sm'>
{searchQuery ? 'No documents found' : 'No documents yet'} {searchQuery
? 'No documents found'
: enabledFilter !== 'all'
? 'Nothing matches your filter'
: 'No documents yet'}
</p> </p>
<p className='mt-1 text-[var(--text-muted)] text-xs'> <p className='mt-1 text-[var(--text-muted)] text-xs'>
{searchQuery {searchQuery
? 'Try a different search term' ? 'Try a different search term'
: enabledFilter !== 'all'
? 'Try changing the filter'
: userPermissions.canEdit === true : userPermissions.canEdit === true
? 'Add documents to get started' ? 'Add documents to get started'
: 'Documents will appear here once added'} : 'Documents will appear here once added'}
@@ -1120,7 +1173,7 @@ export function KnowledgeBase({
{renderSortableHeader('tokenCount', 'Tokens', 'hidden w-[8%] lg:table-cell')} {renderSortableHeader('tokenCount', 'Tokens', 'hidden w-[8%] lg:table-cell')}
{renderSortableHeader('chunkCount', 'Chunks', 'w-[8%]')} {renderSortableHeader('chunkCount', 'Chunks', 'w-[8%]')}
{renderSortableHeader('uploadedAt', 'Uploaded', 'w-[11%]')} {renderSortableHeader('uploadedAt', 'Uploaded', 'w-[11%]')}
{renderSortableHeader('processingStatus', 'Status', 'w-[10%]')} {renderSortableHeader('enabled', 'Status', 'w-[10%]')}
<TableHead className='w-[12%] px-[12px] py-[8px] text-[12px] text-[var(--text-secondary)]'> <TableHead className='w-[12%] px-[12px] py-[8px] text-[12px] text-[var(--text-secondary)]'>
Tags Tags
</TableHead> </TableHead>
@@ -1164,7 +1217,10 @@ export function KnowledgeBase({
</TableCell> </TableCell>
<TableCell className='w-[180px] max-w-[180px] px-[12px] py-[8px]'> <TableCell className='w-[180px] max-w-[180px] px-[12px] py-[8px]'>
<div className='flex min-w-0 items-center gap-[8px]'> <div className='flex min-w-0 items-center gap-[8px]'>
{getFileIcon(doc.mimeType, doc.filename)} {(() => {
const IconComponent = getDocumentIcon(doc.mimeType, doc.filename)
return <IconComponent className='h-6 w-5 flex-shrink-0' />
})()}
<Tooltip.Root> <Tooltip.Root>
<Tooltip.Trigger asChild> <Tooltip.Trigger asChild>
<span <span
@@ -1508,6 +1564,14 @@ export function KnowledgeBase({
enabledCount={enabledCount} enabledCount={enabledCount}
disabledCount={disabledCount} disabledCount={disabledCount}
isLoading={isBulkOperating} isLoading={isBulkOperating}
totalCount={pagination.total}
isAllPageSelected={isAllSelected}
isAllSelected={isSelectAllMode}
onSelectAll={() => setIsSelectAllMode(true)}
onClearSelectAll={() => {
setIsSelectAllMode(false)
setSelectedDocuments(new Set())
}}
/> />
<DocumentContextMenu <DocumentContextMenu

View File

@@ -13,6 +13,11 @@ interface ActionBarProps {
disabledCount?: number disabledCount?: number
isLoading?: boolean isLoading?: boolean
className?: string className?: string
totalCount?: number
isAllPageSelected?: boolean
isAllSelected?: boolean
onSelectAll?: () => void
onClearSelectAll?: () => void
} }
export function ActionBar({ export function ActionBar({
@@ -24,14 +29,21 @@ export function ActionBar({
disabledCount = 0, disabledCount = 0,
isLoading = false, isLoading = false,
className, className,
totalCount = 0,
isAllPageSelected = false,
isAllSelected = false,
onSelectAll,
onClearSelectAll,
}: ActionBarProps) { }: ActionBarProps) {
const userPermissions = useUserPermissionsContext() const userPermissions = useUserPermissionsContext()
if (selectedCount === 0) return null if (selectedCount === 0 && !isAllSelected) return null
const canEdit = userPermissions.canEdit const canEdit = userPermissions.canEdit
const showEnableButton = disabledCount > 0 && onEnable && canEdit const showEnableButton = disabledCount > 0 && onEnable && canEdit
const showDisableButton = enabledCount > 0 && onDisable && canEdit const showDisableButton = enabledCount > 0 && onDisable && canEdit
const showSelectAllOption =
isAllPageSelected && !isAllSelected && totalCount > selectedCount && onSelectAll
return ( return (
<motion.div <motion.div
@@ -43,7 +55,31 @@ export function ActionBar({
> >
<div className='flex items-center gap-[8px] rounded-[10px] border border-[var(--border)] bg-[var(--surface-2)] px-[8px] py-[6px]'> <div className='flex items-center gap-[8px] rounded-[10px] border border-[var(--border)] bg-[var(--surface-2)] px-[8px] py-[6px]'>
<span className='px-[4px] text-[13px] text-[var(--text-secondary)]'> <span className='px-[4px] text-[13px] text-[var(--text-secondary)]'>
{selectedCount} selected {isAllSelected ? totalCount : selectedCount} selected
{showSelectAllOption && (
<>
{' · '}
<button
type='button'
onClick={onSelectAll}
className='text-[var(--brand-primary)] hover:underline'
>
Select all
</button>
</>
)}
{isAllSelected && onClearSelectAll && (
<>
{' · '}
<button
type='button'
onClick={onClearSelectAll}
className='text-[var(--brand-primary)] hover:underline'
>
Clear
</button>
</>
)}
</span> </span>
<div className='flex items-center gap-[5px]'> <div className='flex items-center gap-[5px]'>

View File

@@ -123,7 +123,11 @@ export function RenameDocumentModal({
> >
Cancel Cancel
</Button> </Button>
<Button variant='tertiary' type='submit' disabled={isSubmitting || !name?.trim()}> <Button
variant='tertiary'
type='submit'
disabled={isSubmitting || !name?.trim() || name.trim() === initialName}
>
{isSubmitting ? 'Renaming...' : 'Rename'} {isSubmitting ? 'Renaming...' : 'Rename'}
</Button> </Button>
</div> </div>

View File

@@ -3,6 +3,7 @@
import { useCallback, useState } from 'react' import { useCallback, useState } from 'react'
import { useParams, useRouter } from 'next/navigation' import { useParams, useRouter } from 'next/navigation'
import { Badge, DocumentAttachment, Tooltip } from '@/components/emcn' import { Badge, DocumentAttachment, Tooltip } from '@/components/emcn'
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
import { BaseTagsModal } from '@/app/workspace/[workspaceId]/knowledge/[id]/components' import { BaseTagsModal } from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks' import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
@@ -21,55 +22,6 @@ interface BaseCardProps {
onDelete?: (id: string) => Promise<void> onDelete?: (id: string) => Promise<void>
} }
/**
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
*/
function formatRelativeTime(dateString: string): string {
const date = new Date(dateString)
const now = new Date()
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
if (diffInSeconds < 60) {
return 'just now'
}
if (diffInSeconds < 3600) {
const minutes = Math.floor(diffInSeconds / 60)
return `${minutes}m ago`
}
if (diffInSeconds < 86400) {
const hours = Math.floor(diffInSeconds / 3600)
return `${hours}h ago`
}
if (diffInSeconds < 604800) {
const days = Math.floor(diffInSeconds / 86400)
return `${days}d ago`
}
if (diffInSeconds < 2592000) {
const weeks = Math.floor(diffInSeconds / 604800)
return `${weeks}w ago`
}
if (diffInSeconds < 31536000) {
const months = Math.floor(diffInSeconds / 2592000)
return `${months}mo ago`
}
const years = Math.floor(diffInSeconds / 31536000)
return `${years}y ago`
}
/**
* Formats a date string to absolute format for tooltip display
*/
function formatAbsoluteDate(dateString: string): string {
const date = new Date(dateString)
return date.toLocaleDateString('en-US', {
year: 'numeric',
month: 'short',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
})
}
/** /**
* Skeleton placeholder for a knowledge base card * Skeleton placeholder for a knowledge base card
*/ */

View File

@@ -344,13 +344,12 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
<Textarea <Textarea
id='description' id='description'
placeholder='Describe this knowledge base (optional)' placeholder='Describe this knowledge base (optional)'
rows={3} rows={4}
{...register('description')} {...register('description')}
className={cn(errors.description && 'border-[var(--text-error)]')} className={cn(errors.description && 'border-[var(--text-error)]')}
/> />
</div> </div>
<div className='space-y-[12px] rounded-[6px] bg-[var(--surface-5)] px-[12px] py-[14px]'>
<div className='grid grid-cols-2 gap-[12px]'> <div className='grid grid-cols-2 gap-[12px]'>
<div className='flex flex-col gap-[8px]'> <div className='flex flex-col gap-[8px]'>
<Label htmlFor='minChunkSize'>Min Chunk Size (characters)</Label> <Label htmlFor='minChunkSize'>Min Chunk Size (characters)</Label>
@@ -390,7 +389,6 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
data-form-type='other' data-form-type='other'
name='overlap-size' name='overlap-size'
/> />
</div>
<p className='text-[11px] text-[var(--text-muted)]'> <p className='text-[11px] text-[var(--text-muted)]'>
1 token 4 characters. Max chunk size and overlap are in tokens. 1 token 4 characters. Max chunk size and overlap are in tokens.
</p> </p>

View File

@@ -59,7 +59,7 @@ export function EditKnowledgeBaseModal({
handleSubmit, handleSubmit,
reset, reset,
watch, watch,
formState: { errors }, formState: { errors, isDirty },
} = useForm<FormValues>({ } = useForm<FormValues>({
resolver: zodResolver(FormSchema), resolver: zodResolver(FormSchema),
defaultValues: { defaultValues: {
@@ -127,7 +127,7 @@ export function EditKnowledgeBaseModal({
<Textarea <Textarea
id='description' id='description'
placeholder='Describe this knowledge base (optional)' placeholder='Describe this knowledge base (optional)'
rows={3} rows={4}
{...register('description')} {...register('description')}
className={cn(errors.description && 'border-[var(--text-error)]')} className={cn(errors.description && 'border-[var(--text-error)]')}
/> />
@@ -161,7 +161,7 @@ export function EditKnowledgeBaseModal({
<Button <Button
variant='tertiary' variant='tertiary'
type='submit' type='submit'
disabled={isSubmitting || !nameValue?.trim()} disabled={isSubmitting || !nameValue?.trim() || !isDirty}
> >
{isSubmitting ? 'Saving...' : 'Save'} {isSubmitting ? 'Saving...' : 'Save'}
</Button> </Button>

View File

@@ -2,8 +2,7 @@
import type React from 'react' import type React from 'react'
import { memo, useCallback, useMemo, useRef, useState } from 'react' import { memo, useCallback, useMemo, useRef, useState } from 'react'
import clsx from 'clsx' import { ArrowDown, ArrowUp, Check, Clipboard, Search, X } from 'lucide-react'
import { ArrowDown, ArrowUp, X } from 'lucide-react'
import { createPortal } from 'react-dom' import { createPortal } from 'react-dom'
import { import {
Button, Button,
@@ -15,9 +14,11 @@ import {
PopoverContent, PopoverContent,
PopoverDivider, PopoverDivider,
PopoverItem, PopoverItem,
Tooltip,
} from '@/components/emcn' } from '@/components/emcn'
import { WorkflowIcon } from '@/components/icons' import { WorkflowIcon } from '@/components/icons'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import { formatDuration } from '@/lib/core/utils/formatting'
import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config' import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config'
import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config' import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config'
import { getBlock, getBlockByToolName } from '@/blocks' import { getBlock, getBlockByToolName } from '@/blocks'
@@ -26,7 +27,6 @@ import type { TraceSpan } from '@/stores/logs/filters/types'
interface TraceSpansProps { interface TraceSpansProps {
traceSpans?: TraceSpan[] traceSpans?: TraceSpan[]
totalDuration?: number
} }
/** /**
@@ -100,6 +100,20 @@ function parseTime(value?: string | number | null): number {
return Number.isFinite(ms) ? ms : 0 return Number.isFinite(ms) ? ms : 0
} }
/**
* Checks if a span or any of its descendants has an error
*/
function hasErrorInTree(span: TraceSpan): boolean {
if (span.status === 'error') return true
if (span.children && span.children.length > 0) {
return span.children.some((child) => hasErrorInTree(child))
}
if (span.toolCalls && span.toolCalls.length > 0) {
return span.toolCalls.some((tc) => tc.error)
}
return false
}
/** /**
* Normalizes and sorts trace spans recursively. * Normalizes and sorts trace spans recursively.
* Merges children from both span.children and span.output.childTraceSpans, * Merges children from both span.children and span.output.childTraceSpans,
@@ -142,14 +156,6 @@ function normalizeAndSortSpans(spans: TraceSpan[]): TraceSpan[] {
const DEFAULT_BLOCK_COLOR = '#6b7280' const DEFAULT_BLOCK_COLOR = '#6b7280'
/**
* Formats duration in ms
*/
function formatDuration(ms: number): string {
if (ms < 1000) return `${ms}ms`
return `${(ms / 1000).toFixed(2)}s`
}
/** /**
* Gets icon and color for a span type using block config * Gets icon and color for a span type using block config
*/ */
@@ -230,7 +236,7 @@ function ProgressBar({
}, [span, childSpans, workflowStartTime, totalDuration]) }, [span, childSpans, workflowStartTime, totalDuration])
return ( return (
<div className='relative mb-[8px] h-[5px] w-full overflow-hidden rounded-[18px] bg-[var(--divider)]'> <div className='relative h-[5px] w-full overflow-hidden rounded-[18px] bg-[var(--divider)]'>
{segments.map((segment, index) => ( {segments.map((segment, index) => (
<div <div
key={index} key={index}
@@ -246,143 +252,6 @@ function ProgressBar({
) )
} }
interface ExpandableRowHeaderProps {
name: string
duration: number
isError: boolean
isExpanded: boolean
hasChildren: boolean
showIcon: boolean
icon: React.ComponentType<{ className?: string }> | null
bgColor: string
onToggle: () => void
}
/**
* Reusable expandable row header with chevron, icon, name, and duration
*/
function ExpandableRowHeader({
name,
duration,
isError,
isExpanded,
hasChildren,
showIcon,
icon: Icon,
bgColor,
onToggle,
}: ExpandableRowHeaderProps) {
return (
<div
className={clsx('group flex items-center justify-between', hasChildren && 'cursor-pointer')}
onClick={hasChildren ? onToggle : undefined}
onKeyDown={
hasChildren
? (e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault()
onToggle()
}
}
: undefined
}
role={hasChildren ? 'button' : undefined}
tabIndex={hasChildren ? 0 : undefined}
aria-expanded={hasChildren ? isExpanded : undefined}
aria-label={hasChildren ? (isExpanded ? 'Collapse' : 'Expand') : undefined}
>
<div className='flex items-center gap-[8px]'>
{hasChildren && (
<ChevronDown
className='h-[10px] w-[10px] flex-shrink-0 text-[var(--text-tertiary)] transition-transform duration-100 group-hover:text-[var(--text-primary)]'
style={{ transform: isExpanded ? 'rotate(0deg)' : 'rotate(-90deg)' }}
/>
)}
{showIcon && (
<div
className='relative flex h-[14px] w-[14px] flex-shrink-0 items-center justify-center overflow-hidden rounded-[4px]'
style={{ background: bgColor }}
>
{Icon && <Icon className={clsx('text-white', '!h-[9px] !w-[9px]')} />}
</div>
)}
<span
className='font-medium text-[12px]'
style={{ color: isError ? 'var(--text-error)' : 'var(--text-secondary)' }}
>
{name}
</span>
</div>
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
{formatDuration(duration)}
</span>
</div>
)
}
interface SpanContentProps {
span: TraceSpan
spanId: string
isError: boolean
workflowStartTime: number
totalDuration: number
expandedSections: Set<string>
onToggle: (section: string) => void
}
/**
* Reusable component for rendering span content (progress bar + input/output sections)
*/
function SpanContent({
span,
spanId,
isError,
workflowStartTime,
totalDuration,
expandedSections,
onToggle,
}: SpanContentProps) {
const hasInput = Boolean(span.input)
const hasOutput = Boolean(span.output)
return (
<>
<ProgressBar
span={span}
childSpans={span.children}
workflowStartTime={workflowStartTime}
totalDuration={totalDuration}
/>
{hasInput && (
<InputOutputSection
label='Input'
data={span.input}
isError={false}
spanId={spanId}
sectionType='input'
expandedSections={expandedSections}
onToggle={onToggle}
/>
)}
{hasInput && hasOutput && <div className='border-[var(--border)] border-t border-dashed' />}
{hasOutput && (
<InputOutputSection
label={isError ? 'Error' : 'Output'}
data={span.output}
isError={isError}
spanId={spanId}
sectionType='output'
expandedSections={expandedSections}
onToggle={onToggle}
/>
)}
</>
)
}
/** /**
* Renders input/output section with collapsible content, context menu, and search * Renders input/output section with collapsible content, context menu, and search
*/ */
@@ -406,16 +275,14 @@ function InputOutputSection({
const sectionKey = `${spanId}-${sectionType}` const sectionKey = `${spanId}-${sectionType}`
const isExpanded = expandedSections.has(sectionKey) const isExpanded = expandedSections.has(sectionKey)
const contentRef = useRef<HTMLDivElement>(null) const contentRef = useRef<HTMLDivElement>(null)
const menuRef = useRef<HTMLDivElement>(null)
// Context menu state // Context menu state
const [isContextMenuOpen, setIsContextMenuOpen] = useState(false) const [isContextMenuOpen, setIsContextMenuOpen] = useState(false)
const [contextMenuPosition, setContextMenuPosition] = useState({ x: 0, y: 0 }) const [contextMenuPosition, setContextMenuPosition] = useState({ x: 0, y: 0 })
const [copied, setCopied] = useState(false)
// Code viewer features // Code viewer features
const { const {
wrapText,
toggleWrapText,
isSearchActive, isSearchActive,
searchQuery, searchQuery,
setSearchQuery, setSearchQuery,
@@ -447,6 +314,8 @@ function InputOutputSection({
const handleCopy = useCallback(() => { const handleCopy = useCallback(() => {
navigator.clipboard.writeText(jsonString) navigator.clipboard.writeText(jsonString)
setCopied(true)
setTimeout(() => setCopied(false), 1500)
closeContextMenu() closeContextMenu()
}, [jsonString, closeContextMenu]) }, [jsonString, closeContextMenu])
@@ -455,13 +324,8 @@ function InputOutputSection({
closeContextMenu() closeContextMenu()
}, [activateSearch, closeContextMenu]) }, [activateSearch, closeContextMenu])
const handleToggleWrap = useCallback(() => {
toggleWrapText()
closeContextMenu()
}, [toggleWrapText, closeContextMenu])
return ( return (
<div className='relative flex min-w-0 flex-col gap-[8px] overflow-hidden'> <div className='relative flex min-w-0 flex-col gap-[6px] overflow-hidden'>
<div <div
className='group flex cursor-pointer items-center justify-between' className='group flex cursor-pointer items-center justify-between'
onClick={() => onToggle(sectionKey)} onClick={() => onToggle(sectionKey)}
@@ -477,7 +341,7 @@ function InputOutputSection({
aria-label={`${isExpanded ? 'Collapse' : 'Expand'} ${label.toLowerCase()}`} aria-label={`${isExpanded ? 'Collapse' : 'Expand'} ${label.toLowerCase()}`}
> >
<span <span
className={clsx( className={cn(
'font-medium text-[12px] transition-colors', 'font-medium text-[12px] transition-colors',
isError isError
? 'text-[var(--text-error)]' ? 'text-[var(--text-error)]'
@@ -487,9 +351,7 @@ function InputOutputSection({
{label} {label}
</span> </span>
<ChevronDown <ChevronDown
className={clsx( className='h-[8px] w-[8px] text-[var(--text-tertiary)] transition-colors transition-transform group-hover:text-[var(--text-primary)]'
'h-[10px] w-[10px] text-[var(--text-tertiary)] transition-colors transition-transform group-hover:text-[var(--text-primary)]'
)}
style={{ style={{
transform: isExpanded ? 'rotate(180deg)' : 'rotate(0deg)', transform: isExpanded ? 'rotate(180deg)' : 'rotate(0deg)',
}} }}
@@ -497,16 +359,57 @@ function InputOutputSection({
</div> </div>
{isExpanded && ( {isExpanded && (
<> <>
<div ref={contentRef} onContextMenu={handleContextMenu}> <div ref={contentRef} onContextMenu={handleContextMenu} className='relative'>
<Code.Viewer <Code.Viewer
code={jsonString} code={jsonString}
language='json' language='json'
className='!bg-[var(--surface-3)] max-h-[300px] min-h-0 max-w-full rounded-[6px] border-0 [word-break:break-all]' className='!bg-[var(--surface-4)] dark:!bg-[var(--surface-3)] max-h-[300px] min-h-0 max-w-full rounded-[6px] border-0 [word-break:break-all]'
wrapText={wrapText} wrapText
searchQuery={isSearchActive ? searchQuery : undefined} searchQuery={isSearchActive ? searchQuery : undefined}
currentMatchIndex={currentMatchIndex} currentMatchIndex={currentMatchIndex}
onMatchCountChange={handleMatchCountChange} onMatchCountChange={handleMatchCountChange}
/> />
{/* Glass action buttons overlay */}
{!isSearchActive && (
<div className='absolute top-[7px] right-[6px] z-10 flex gap-[4px]'>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
type='button'
variant='default'
onClick={(e) => {
e.stopPropagation()
handleCopy()
}}
className='h-[20px] w-[20px] cursor-pointer border border-[var(--border-1)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
>
{copied ? (
<Check className='h-[10px] w-[10px] text-[var(--text-success)]' />
) : (
<Clipboard className='h-[10px] w-[10px]' />
)}
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>{copied ? 'Copied' : 'Copy'}</Tooltip.Content>
</Tooltip.Root>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
type='button'
variant='default'
onClick={(e) => {
e.stopPropagation()
activateSearch()
}}
className='h-[20px] w-[20px] cursor-pointer border border-[var(--border-1)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
>
<Search className='h-[10px] w-[10px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>Search</Tooltip.Content>
</Tooltip.Root>
</div>
)}
</div> </div>
{/* Search Overlay */} {/* Search Overlay */}
@@ -579,13 +482,10 @@ function InputOutputSection({
height: '1px', height: '1px',
}} }}
/> />
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}> <PopoverContent align='start' side='bottom' sideOffset={4}>
<PopoverItem onClick={handleCopy}>Copy</PopoverItem> <PopoverItem onClick={handleCopy}>Copy</PopoverItem>
<PopoverDivider /> <PopoverDivider />
<PopoverItem onClick={handleSearch}>Search</PopoverItem> <PopoverItem onClick={handleSearch}>Search</PopoverItem>
<PopoverItem showCheck={wrapText} onClick={handleToggleWrap}>
Wrap Text
</PopoverItem>
</PopoverContent> </PopoverContent>
</Popover>, </Popover>,
document.body document.body
@@ -596,136 +496,51 @@ function InputOutputSection({
) )
} }
interface NestedBlockItemProps { interface TraceSpanNodeProps {
span: TraceSpan span: TraceSpan
parentId: string workflowStartTime: number
index: number totalDuration: number
depth: number
expandedNodes: Set<string>
expandedSections: Set<string> expandedSections: Set<string>
onToggle: (section: string) => void onToggleNode: (nodeId: string) => void
workflowStartTime: number onToggleSection: (section: string) => void
totalDuration: number
expandedChildren: Set<string>
onToggleChildren: (spanId: string) => void
} }
/** /**
* Recursive component for rendering nested blocks at any depth * Recursive tree node component for rendering trace spans
*/ */
function NestedBlockItem({ const TraceSpanNode = memo(function TraceSpanNode({
span, span,
parentId, workflowStartTime,
index, totalDuration,
depth,
expandedNodes,
expandedSections, expandedSections,
onToggle, onToggleNode,
workflowStartTime, onToggleSection,
totalDuration, }: TraceSpanNodeProps): React.ReactNode {
expandedChildren,
onToggleChildren,
}: NestedBlockItemProps): React.ReactNode {
const spanId = span.id || `${parentId}-nested-${index}`
const isError = span.status === 'error'
const { icon: SpanIcon, bgColor } = getBlockIconAndColor(span.type, span.name)
const hasChildren = Boolean(span.children && span.children.length > 0)
const isChildrenExpanded = expandedChildren.has(spanId)
return (
<div className='flex min-w-0 flex-col gap-[8px] overflow-hidden'>
<ExpandableRowHeader
name={span.name}
duration={span.duration || 0}
isError={isError}
isExpanded={isChildrenExpanded}
hasChildren={hasChildren}
showIcon={!isIterationType(span.type)}
icon={SpanIcon}
bgColor={bgColor}
onToggle={() => onToggleChildren(spanId)}
/>
<SpanContent
span={span}
spanId={spanId}
isError={isError}
workflowStartTime={workflowStartTime}
totalDuration={totalDuration}
expandedSections={expandedSections}
onToggle={onToggle}
/>
{/* Nested children */}
{hasChildren && isChildrenExpanded && (
<div className='mt-[2px] flex min-w-0 flex-col gap-[10px] overflow-hidden border-[var(--border)] border-l pl-[10px]'>
{span.children!.map((child, childIndex) => (
<NestedBlockItem
key={child.id || `${spanId}-child-${childIndex}`}
span={child}
parentId={spanId}
index={childIndex}
expandedSections={expandedSections}
onToggle={onToggle}
workflowStartTime={workflowStartTime}
totalDuration={totalDuration}
expandedChildren={expandedChildren}
onToggleChildren={onToggleChildren}
/>
))}
</div>
)}
</div>
)
}
interface TraceSpanItemProps {
span: TraceSpan
totalDuration: number
workflowStartTime: number
isFirstSpan?: boolean
}
/**
* Individual trace span card component.
* Memoized to prevent re-renders when sibling spans change.
*/
const TraceSpanItem = memo(function TraceSpanItem({
span,
totalDuration,
workflowStartTime,
isFirstSpan = false,
}: TraceSpanItemProps): React.ReactNode {
const [expandedSections, setExpandedSections] = useState<Set<string>>(new Set())
const [expandedChildren, setExpandedChildren] = useState<Set<string>>(new Set())
const [isCardExpanded, setIsCardExpanded] = useState(false)
const toggleSet = useSetToggle()
const spanId = span.id || `span-${span.name}-${span.startTime}` const spanId = span.id || `span-${span.name}-${span.startTime}`
const spanStartTime = new Date(span.startTime).getTime() const spanStartTime = new Date(span.startTime).getTime()
const spanEndTime = new Date(span.endTime).getTime() const spanEndTime = new Date(span.endTime).getTime()
const duration = span.duration || spanEndTime - spanStartTime const duration = span.duration || spanEndTime - spanStartTime
const hasChildren = Boolean(span.children && span.children.length > 0) const isDirectError = span.status === 'error'
const hasToolCalls = Boolean(span.toolCalls && span.toolCalls.length > 0) const hasNestedError = hasErrorInTree(span)
const isError = span.status === 'error' const showErrorStyle = isDirectError || hasNestedError
const inlineChildTypes = new Set([ const { icon: BlockIcon, bgColor } = getBlockIconAndColor(span.type, span.name)
'tool',
'model',
'loop-iteration',
'parallel-iteration',
'workflow',
])
// For workflow-in-workflow blocks, all children should be rendered inline/nested // Root workflow execution is always expanded and has no toggle
const isWorkflowBlock = span.type?.toLowerCase().includes('workflow') const isRootWorkflow = depth === 0
const inlineChildren = isWorkflowBlock
? span.children || []
: span.children?.filter((child) => inlineChildTypes.has(child.type?.toLowerCase() || '')) || []
const otherChildren = isWorkflowBlock
? []
: span.children?.filter((child) => !inlineChildTypes.has(child.type?.toLowerCase() || '')) || []
const toolCallSpans = useMemo(() => { // Build all children including tool calls
if (!hasToolCalls) return [] const allChildren = useMemo(() => {
return span.toolCalls!.map((toolCall, index) => { const children: TraceSpan[] = []
// Add tool calls as child spans
if (span.toolCalls && span.toolCalls.length > 0) {
span.toolCalls.forEach((toolCall, index) => {
const toolStartTime = toolCall.startTime const toolStartTime = toolCall.startTime
? new Date(toolCall.startTime).getTime() ? new Date(toolCall.startTime).getTime()
: spanStartTime : spanStartTime
@@ -733,7 +548,7 @@ const TraceSpanItem = memo(function TraceSpanItem({
? new Date(toolCall.endTime).getTime() ? new Date(toolCall.endTime).getTime()
: toolStartTime + (toolCall.duration || 0) : toolStartTime + (toolCall.duration || 0)
return { children.push({
id: `${spanId}-tool-${index}`, id: `${spanId}-tool-${index}`,
name: toolCall.name, name: toolCall.name,
type: 'tool', type: 'tool',
@@ -745,206 +560,165 @@ const TraceSpanItem = memo(function TraceSpanItem({
output: toolCall.error output: toolCall.error
? { error: toolCall.error, ...(toolCall.output || {}) } ? { error: toolCall.error, ...(toolCall.output || {}) }
: toolCall.output, : toolCall.output,
} as TraceSpan } as TraceSpan)
}) })
}, [hasToolCalls, span.toolCalls, spanId, spanStartTime]) }
const handleSectionToggle = useCallback( // Add regular children
(section: string) => toggleSet(setExpandedSections, section), if (span.children && span.children.length > 0) {
[toggleSet] children.push(...span.children)
) }
const handleChildrenToggle = useCallback( // Sort by start time
(childSpanId: string) => toggleSet(setExpandedChildren, childSpanId), return children.sort((a, b) => parseTime(a.startTime) - parseTime(b.startTime))
[toggleSet] }, [span, spanId, spanStartTime])
)
const { icon: BlockIcon, bgColor } = getBlockIconAndColor(span.type, span.name) const hasChildren = allChildren.length > 0
const isExpanded = isRootWorkflow || expandedNodes.has(spanId)
const isToggleable = !isRootWorkflow
// Check if this card has expandable inline content const hasInput = Boolean(span.input)
const hasInlineContent = const hasOutput = Boolean(span.output)
(isWorkflowBlock && inlineChildren.length > 0) ||
(!isWorkflowBlock && (toolCallSpans.length > 0 || inlineChildren.length > 0))
const isExpandable = !isFirstSpan && hasInlineContent // For progress bar - show child segments for workflow/iteration types
const lowerType = span.type?.toLowerCase() || ''
const showChildrenInProgressBar =
isIterationType(lowerType) || lowerType === 'workflow' || lowerType === 'workflow_input'
return ( return (
<> <div className='flex min-w-0 flex-col'>
<div className='flex min-w-0 flex-col gap-[8px] overflow-hidden rounded-[6px] bg-[var(--surface-1)] px-[10px] py-[8px]'> {/* Node Header Row */}
<ExpandableRowHeader <div
name={span.name} className={cn(
duration={duration} 'group flex items-center justify-between gap-[8px] py-[6px]',
isError={isError} isToggleable && 'cursor-pointer'
isExpanded={isCardExpanded} )}
hasChildren={isExpandable} onClick={isToggleable ? () => onToggleNode(spanId) : undefined}
showIcon={!isFirstSpan} onKeyDown={
icon={BlockIcon} isToggleable
bgColor={bgColor} ? (e) => {
onToggle={() => setIsCardExpanded((prev) => !prev)} if (e.key === 'Enter' || e.key === ' ') {
/> e.preventDefault()
onToggleNode(spanId)
<SpanContent }
span={span} }
spanId={spanId} : undefined
isError={isError} }
workflowStartTime={workflowStartTime} role={isToggleable ? 'button' : undefined}
totalDuration={totalDuration} tabIndex={isToggleable ? 0 : undefined}
expandedSections={expandedSections} aria-expanded={isToggleable ? isExpanded : undefined}
onToggle={handleSectionToggle} aria-label={isToggleable ? (isExpanded ? 'Collapse' : 'Expand') : undefined}
/> >
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
{/* For workflow blocks, keep children nested within the card (not as separate cards) */} {!isIterationType(span.type) && (
{!isFirstSpan && isWorkflowBlock && inlineChildren.length > 0 && isCardExpanded && ( <div
<div className='mt-[2px] flex min-w-0 flex-col gap-[10px] overflow-hidden border-[var(--border)] border-l pl-[10px]'> className='relative flex h-[14px] w-[14px] flex-shrink-0 items-center justify-center overflow-hidden rounded-[4px]'
{inlineChildren.map((childSpan, index) => ( style={{ background: bgColor }}
<NestedBlockItem >
key={childSpan.id || `${spanId}-nested-${index}`} {BlockIcon && <BlockIcon className='h-[9px] w-[9px] text-white' />}
span={childSpan}
parentId={spanId}
index={index}
expandedSections={expandedSections}
onToggle={handleSectionToggle}
workflowStartTime={workflowStartTime}
totalDuration={totalDuration}
expandedChildren={expandedChildren}
onToggleChildren={handleChildrenToggle}
/>
))}
</div> </div>
)} )}
<span
{/* For non-workflow blocks, render inline children/tool calls */} className='min-w-0 max-w-[180px] truncate font-medium text-[12px]'
{!isFirstSpan && !isWorkflowBlock && isCardExpanded && ( style={{ color: showErrorStyle ? 'var(--text-error)' : 'var(--text-secondary)' }}
<div className='mt-[2px] flex min-w-0 flex-col gap-[10px] overflow-hidden border-[var(--border)] border-l pl-[10px]'>
{[...toolCallSpans, ...inlineChildren].map((childSpan, index) => {
const childId = childSpan.id || `${spanId}-inline-${index}`
const childIsError = childSpan.status === 'error'
const childLowerType = childSpan.type?.toLowerCase() || ''
const hasNestedChildren = Boolean(childSpan.children && childSpan.children.length > 0)
const isNestedExpanded = expandedChildren.has(childId)
const showChildrenInProgressBar =
isIterationType(childLowerType) || childLowerType === 'workflow'
const { icon: ChildIcon, bgColor: childBgColor } = getBlockIconAndColor(
childSpan.type,
childSpan.name
)
return (
<div
key={`inline-${childId}`}
className='flex min-w-0 flex-col gap-[8px] overflow-hidden'
> >
<ExpandableRowHeader {span.name}
name={childSpan.name} </span>
duration={childSpan.duration || 0} {isToggleable && (
isError={childIsError} <ChevronDown
isExpanded={isNestedExpanded} className='h-[8px] w-[8px] flex-shrink-0 text-[var(--text-tertiary)] transition-colors transition-transform duration-100 group-hover:text-[var(--text-primary)]'
hasChildren={hasNestedChildren} style={{
showIcon={!isIterationType(childSpan.type)} transform: `translateY(-0.25px) ${isExpanded ? 'rotate(0deg)' : 'rotate(-90deg)'}`,
icon={ChildIcon} }}
bgColor={childBgColor}
onToggle={() => handleChildrenToggle(childId)}
/> />
)}
</div>
<span className='flex-shrink-0 font-medium text-[12px] text-[var(--text-tertiary)]'>
{formatDuration(duration, { precision: 2 })}
</span>
</div>
{/* Expanded Content */}
{isExpanded && (
<div className='flex min-w-0 flex-col gap-[10px]'>
{/* Progress Bar */}
<ProgressBar <ProgressBar
span={childSpan} span={span}
childSpans={showChildrenInProgressBar ? childSpan.children : undefined} childSpans={showChildrenInProgressBar ? span.children : undefined}
workflowStartTime={workflowStartTime} workflowStartTime={workflowStartTime}
totalDuration={totalDuration} totalDuration={totalDuration}
/> />
{childSpan.input && ( {/* Input/Output Sections */}
{(hasInput || hasOutput) && (
<div className='flex min-w-0 flex-col gap-[6px] overflow-hidden py-[2px]'>
{hasInput && (
<InputOutputSection <InputOutputSection
label='Input' label='Input'
data={childSpan.input} data={span.input}
isError={false} isError={false}
spanId={childId} spanId={spanId}
sectionType='input' sectionType='input'
expandedSections={expandedSections} expandedSections={expandedSections}
onToggle={handleSectionToggle} onToggle={onToggleSection}
/> />
)} )}
{childSpan.input && childSpan.output && ( {hasInput && hasOutput && (
<div className='border-[var(--border)] border-t border-dashed' /> <div className='border-[var(--border)] border-t border-dashed' />
)} )}
{childSpan.output && ( {hasOutput && (
<InputOutputSection <InputOutputSection
label={childIsError ? 'Error' : 'Output'} label={isDirectError ? 'Error' : 'Output'}
data={childSpan.output} data={span.output}
isError={childIsError} isError={isDirectError}
spanId={childId} spanId={spanId}
sectionType='output' sectionType='output'
expandedSections={expandedSections} expandedSections={expandedSections}
onToggle={handleSectionToggle} onToggle={onToggleSection}
/> />
)} )}
</div>
)}
{/* Nested children */} {/* Nested Children */}
{showChildrenInProgressBar && hasNestedChildren && isNestedExpanded && ( {hasChildren && (
<div className='mt-[2px] flex min-w-0 flex-col gap-[10px] overflow-hidden border-[var(--border)] border-l pl-[10px]'> <div className='flex min-w-0 flex-col gap-[2px] border-[var(--border)] border-l pl-[10px]'>
{childSpan.children!.map((nestedChild, nestedIndex) => ( {allChildren.map((child, index) => (
<NestedBlockItem <div key={child.id || `${spanId}-child-${index}`} className='pl-[6px]'>
key={nestedChild.id || `${childId}-nested-${nestedIndex}`} <TraceSpanNode
span={nestedChild} span={child}
parentId={childId} workflowStartTime={workflowStartTime}
index={nestedIndex} totalDuration={totalDuration}
depth={depth + 1}
expandedNodes={expandedNodes}
expandedSections={expandedSections} expandedSections={expandedSections}
onToggle={handleSectionToggle} onToggleNode={onToggleNode}
workflowStartTime={workflowStartTime} onToggleSection={onToggleSection}
totalDuration={totalDuration}
expandedChildren={expandedChildren}
onToggleChildren={handleChildrenToggle}
/> />
</div>
))} ))}
</div> </div>
)} )}
</div> </div>
)
})}
</div>
)} )}
</div> </div>
{/* For the first span (workflow execution), render all children as separate top-level cards */}
{isFirstSpan &&
hasChildren &&
span.children!.map((childSpan, index) => (
<TraceSpanItem
key={childSpan.id || `${spanId}-child-${index}`}
span={childSpan}
totalDuration={totalDuration}
workflowStartTime={workflowStartTime}
isFirstSpan={false}
/>
))}
{!isFirstSpan &&
otherChildren.map((childSpan, index) => (
<TraceSpanItem
key={childSpan.id || `${spanId}-other-${index}`}
span={childSpan}
totalDuration={totalDuration}
workflowStartTime={workflowStartTime}
isFirstSpan={false}
/>
))}
</>
) )
}) })
/** /**
* Displays workflow execution trace spans with nested structure. * Displays workflow execution trace spans with nested tree structure.
* Memoized to prevent re-renders when parent LogDetails updates. * Memoized to prevent re-renders when parent LogDetails updates.
*/ */
export const TraceSpans = memo(function TraceSpans({ export const TraceSpans = memo(function TraceSpans({ traceSpans }: TraceSpansProps) {
traceSpans, const [expandedNodes, setExpandedNodes] = useState<Set<string>>(() => new Set())
totalDuration = 0, const [expandedSections, setExpandedSections] = useState<Set<string>>(new Set())
}: TraceSpansProps) { const toggleSet = useSetToggle()
const { workflowStartTime, actualTotalDuration, normalizedSpans } = useMemo(() => { const { workflowStartTime, actualTotalDuration, normalizedSpans } = useMemo(() => {
if (!traceSpans || traceSpans.length === 0) { if (!traceSpans || traceSpans.length === 0) {
return { workflowStartTime: 0, actualTotalDuration: totalDuration, normalizedSpans: [] } return { workflowStartTime: 0, actualTotalDuration: 0, normalizedSpans: [] }
} }
let earliest = Number.POSITIVE_INFINITY let earliest = Number.POSITIVE_INFINITY
@@ -962,26 +736,37 @@ export const TraceSpans = memo(function TraceSpans({
actualTotalDuration: latest - earliest, actualTotalDuration: latest - earliest,
normalizedSpans: normalizeAndSortSpans(traceSpans), normalizedSpans: normalizeAndSortSpans(traceSpans),
} }
}, [traceSpans, totalDuration]) }, [traceSpans])
const handleToggleNode = useCallback(
(nodeId: string) => toggleSet(setExpandedNodes, nodeId),
[toggleSet]
)
const handleToggleSection = useCallback(
(section: string) => toggleSet(setExpandedSections, section),
[toggleSet]
)
if (!traceSpans || traceSpans.length === 0) { if (!traceSpans || traceSpans.length === 0) {
return <div className='text-[12px] text-[var(--text-secondary)]'>No trace data available</div> return <div className='text-[12px] text-[var(--text-secondary)]'>No trace data available</div>
} }
return ( return (
<div className='flex w-full min-w-0 flex-col gap-[6px] overflow-hidden rounded-[6px] bg-[var(--surface-2)] px-[10px] py-[8px]'> <div className='flex w-full min-w-0 flex-col overflow-hidden'>
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>Trace Span</span>
<div className='flex min-w-0 flex-col gap-[8px] overflow-hidden'>
{normalizedSpans.map((span, index) => ( {normalizedSpans.map((span, index) => (
<TraceSpanItem <TraceSpanNode
key={span.id || index} key={span.id || index}
span={span} span={span}
totalDuration={actualTotalDuration}
workflowStartTime={workflowStartTime} workflowStartTime={workflowStartTime}
isFirstSpan={index === 0} totalDuration={actualTotalDuration}
depth={0}
expandedNodes={expandedNodes}
expandedSections={expandedSections}
onToggleNode={handleToggleNode}
onToggleSection={handleToggleSection}
/> />
))} ))}
</div> </div>
</div>
) )
}) })

View File

@@ -1,10 +1,23 @@
'use client' 'use client'
import { memo, useEffect, useMemo, useRef, useState } from 'react' import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { ChevronUp, X } from 'lucide-react' import { ArrowDown, ArrowUp, Check, ChevronUp, Clipboard, Search, X } from 'lucide-react'
import { Button, Eye } from '@/components/emcn' import { createPortal } from 'react-dom'
import {
Button,
Code,
Eye,
Input,
Popover,
PopoverAnchor,
PopoverContent,
PopoverDivider,
PopoverItem,
Tooltip,
} from '@/components/emcn'
import { ScrollArea } from '@/components/ui/scroll-area' import { ScrollArea } from '@/components/ui/scroll-area'
import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants' import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants'
import { cn } from '@/lib/core/utils/cn'
import { import {
ExecutionSnapshot, ExecutionSnapshot,
FileCards, FileCards,
@@ -17,11 +30,194 @@ import {
StatusBadge, StatusBadge,
TriggerBadge, TriggerBadge,
} from '@/app/workspace/[workspaceId]/logs/utils' } from '@/app/workspace/[workspaceId]/logs/utils'
import { useCodeViewerFeatures } from '@/hooks/use-code-viewer'
import { usePermissionConfig } from '@/hooks/use-permission-config' import { usePermissionConfig } from '@/hooks/use-permission-config'
import { formatCost } from '@/providers/utils' import { formatCost } from '@/providers/utils'
import type { WorkflowLog } from '@/stores/logs/filters/types' import type { WorkflowLog } from '@/stores/logs/filters/types'
import { useLogDetailsUIStore } from '@/stores/logs/store' import { useLogDetailsUIStore } from '@/stores/logs/store'
/**
* Workflow Output section with code viewer, copy, search, and context menu functionality
*/
function WorkflowOutputSection({ output }: { output: Record<string, unknown> }) {
const contentRef = useRef<HTMLDivElement>(null)
const [copied, setCopied] = useState(false)
// Context menu state
const [isContextMenuOpen, setIsContextMenuOpen] = useState(false)
const [contextMenuPosition, setContextMenuPosition] = useState({ x: 0, y: 0 })
const {
isSearchActive,
searchQuery,
setSearchQuery,
matchCount,
currentMatchIndex,
activateSearch,
closeSearch,
goToNextMatch,
goToPreviousMatch,
handleMatchCountChange,
searchInputRef,
} = useCodeViewerFeatures({ contentRef })
const jsonString = useMemo(() => JSON.stringify(output, null, 2), [output])
const handleContextMenu = useCallback((e: React.MouseEvent) => {
e.preventDefault()
e.stopPropagation()
setContextMenuPosition({ x: e.clientX, y: e.clientY })
setIsContextMenuOpen(true)
}, [])
const closeContextMenu = useCallback(() => {
setIsContextMenuOpen(false)
}, [])
const handleCopy = useCallback(() => {
navigator.clipboard.writeText(jsonString)
setCopied(true)
setTimeout(() => setCopied(false), 1500)
closeContextMenu()
}, [jsonString, closeContextMenu])
const handleSearch = useCallback(() => {
activateSearch()
closeContextMenu()
}, [activateSearch, closeContextMenu])
return (
<div className='relative flex min-w-0 flex-col overflow-hidden'>
<div ref={contentRef} onContextMenu={handleContextMenu} className='relative'>
<Code.Viewer
code={jsonString}
language='json'
className='!bg-[var(--surface-4)] dark:!bg-[var(--surface-3)] max-h-[300px] min-h-0 max-w-full rounded-[6px] border-0 [word-break:break-all]'
wrapText
searchQuery={isSearchActive ? searchQuery : undefined}
currentMatchIndex={currentMatchIndex}
onMatchCountChange={handleMatchCountChange}
/>
{/* Glass action buttons overlay */}
{!isSearchActive && (
<div className='absolute top-[7px] right-[6px] z-10 flex gap-[4px]'>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
type='button'
variant='default'
onClick={(e) => {
e.stopPropagation()
handleCopy()
}}
className='h-[20px] w-[20px] cursor-pointer border border-[var(--border-1)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
>
{copied ? (
<Check className='h-[10px] w-[10px] text-[var(--text-success)]' />
) : (
<Clipboard className='h-[10px] w-[10px]' />
)}
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>{copied ? 'Copied' : 'Copy'}</Tooltip.Content>
</Tooltip.Root>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
type='button'
variant='default'
onClick={(e) => {
e.stopPropagation()
activateSearch()
}}
className='h-[20px] w-[20px] cursor-pointer border border-[var(--border-1)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
>
<Search className='h-[10px] w-[10px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>Search</Tooltip.Content>
</Tooltip.Root>
</div>
)}
</div>
{/* Search Overlay */}
{isSearchActive && (
<div
className='absolute top-0 right-0 z-30 flex h-[34px] items-center gap-[6px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-1)] px-[6px] shadow-sm'
onClick={(e) => e.stopPropagation()}
>
<Input
ref={searchInputRef}
type='text'
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
placeholder='Search...'
className='mr-[2px] h-[23px] w-[94px] text-[12px]'
/>
<span
className={cn(
'min-w-[45px] text-center text-[11px]',
matchCount > 0 ? 'text-[var(--text-secondary)]' : 'text-[var(--text-tertiary)]'
)}
>
{matchCount > 0 ? `${currentMatchIndex + 1}/${matchCount}` : '0/0'}
</span>
<Button
variant='ghost'
className='!p-1'
onClick={goToPreviousMatch}
disabled={matchCount === 0}
aria-label='Previous match'
>
<ArrowUp className='h-[12px] w-[12px]' />
</Button>
<Button
variant='ghost'
className='!p-1'
onClick={goToNextMatch}
disabled={matchCount === 0}
aria-label='Next match'
>
<ArrowDown className='h-[12px] w-[12px]' />
</Button>
<Button variant='ghost' className='!p-1' onClick={closeSearch} aria-label='Close search'>
<X className='h-[12px] w-[12px]' />
</Button>
</div>
)}
{/* Context Menu - rendered in portal to avoid transform/overflow clipping */}
{typeof document !== 'undefined' &&
createPortal(
<Popover
open={isContextMenuOpen}
onOpenChange={closeContextMenu}
variant='secondary'
size='sm'
colorScheme='inverted'
>
<PopoverAnchor
style={{
position: 'fixed',
left: `${contextMenuPosition.x}px`,
top: `${contextMenuPosition.y}px`,
width: '1px',
height: '1px',
}}
/>
<PopoverContent align='start' side='bottom' sideOffset={4}>
<PopoverItem onClick={handleCopy}>Copy</PopoverItem>
<PopoverDivider />
<PopoverItem onClick={handleSearch}>Search</PopoverItem>
</PopoverContent>
</Popover>,
document.body
)}
</div>
)
}
interface LogDetailsProps { interface LogDetailsProps {
/** The log to display details for */ /** The log to display details for */
log: WorkflowLog | null log: WorkflowLog | null
@@ -78,6 +274,18 @@ export const LogDetails = memo(function LogDetails({
return isWorkflowExecutionLog && log?.cost return isWorkflowExecutionLog && log?.cost
}, [log, isWorkflowExecutionLog]) }, [log, isWorkflowExecutionLog])
// Extract and clean the workflow final output (remove childTraceSpans for cleaner display)
const workflowOutput = useMemo(() => {
const executionData = log?.executionData as
| { finalOutput?: Record<string, unknown> }
| undefined
if (!executionData?.finalOutput) return null
const { childTraceSpans, ...cleanOutput } = executionData.finalOutput as {
childTraceSpans?: unknown
} & Record<string, unknown>
return cleanOutput
}, [log?.executionData])
useEffect(() => { useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => { const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === 'Escape' && isOpen) { if (e.key === 'Escape' && isOpen) {
@@ -87,12 +295,12 @@ export const LogDetails = memo(function LogDetails({
if (isOpen) { if (isOpen) {
if (e.key === 'ArrowUp' && hasPrev && onNavigatePrev) { if (e.key === 'ArrowUp' && hasPrev && onNavigatePrev) {
e.preventDefault() e.preventDefault()
handleNavigate(onNavigatePrev) onNavigatePrev()
} }
if (e.key === 'ArrowDown' && hasNext && onNavigateNext) { if (e.key === 'ArrowDown' && hasNext && onNavigateNext) {
e.preventDefault() e.preventDefault()
handleNavigate(onNavigateNext) onNavigateNext()
} }
} }
} }
@@ -101,10 +309,6 @@ export const LogDetails = memo(function LogDetails({
return () => window.removeEventListener('keydown', handleKeyDown) return () => window.removeEventListener('keydown', handleKeyDown)
}, [isOpen, onClose, hasPrev, hasNext, onNavigatePrev, onNavigateNext]) }, [isOpen, onClose, hasPrev, hasNext, onNavigatePrev, onNavigateNext])
const handleNavigate = (navigateFunction: () => void) => {
navigateFunction()
}
const formattedTimestamp = useMemo( const formattedTimestamp = useMemo(
() => (log ? formatDate(log.createdAt) : null), () => (log ? formatDate(log.createdAt) : null),
[log?.createdAt] [log?.createdAt]
@@ -142,7 +346,7 @@ export const LogDetails = memo(function LogDetails({
<Button <Button
variant='ghost' variant='ghost'
className='!p-[4px]' className='!p-[4px]'
onClick={() => hasPrev && handleNavigate(onNavigatePrev!)} onClick={() => hasPrev && onNavigatePrev?.()}
disabled={!hasPrev} disabled={!hasPrev}
aria-label='Previous log' aria-label='Previous log'
> >
@@ -151,7 +355,7 @@ export const LogDetails = memo(function LogDetails({
<Button <Button
variant='ghost' variant='ghost'
className='!p-[4px]' className='!p-[4px]'
onClick={() => hasNext && handleNavigate(onNavigateNext!)} onClick={() => hasNext && onNavigateNext?.()}
disabled={!hasNext} disabled={!hasNext}
aria-label='Next log' aria-label='Next log'
> >
@@ -204,7 +408,7 @@ export const LogDetails = memo(function LogDetails({
{/* Execution ID */} {/* Execution ID */}
{log.executionId && ( {log.executionId && (
<div className='flex flex-col gap-[6px] rounded-[6px] bg-[var(--surface-2)] px-[10px] py-[8px]'> <div className='flex flex-col gap-[6px] rounded-[6px] border border-[var(--border)] bg-[var(--surface-2)] px-[10px] py-[8px]'>
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'> <span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
Execution ID Execution ID
</span> </span>
@@ -215,7 +419,7 @@ export const LogDetails = memo(function LogDetails({
)} )}
{/* Details Section */} {/* Details Section */}
<div className='flex min-w-0 flex-col overflow-hidden'> <div className='-my-[4px] flex min-w-0 flex-col overflow-hidden'>
{/* Level */} {/* Level */}
<div className='flex h-[48px] items-center justify-between border-[var(--border)] border-b p-[8px]'> <div className='flex h-[48px] items-center justify-between border-[var(--border)] border-b p-[8px]'>
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'> <span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
@@ -267,19 +471,35 @@ export const LogDetails = memo(function LogDetails({
{/* Workflow State */} {/* Workflow State */}
{isWorkflowExecutionLog && log.executionId && !permissionConfig.hideTraceSpans && ( {isWorkflowExecutionLog && log.executionId && !permissionConfig.hideTraceSpans && (
<div className='flex flex-col gap-[6px] rounded-[6px] bg-[var(--surface-2)] px-[10px] py-[8px]'> <div className='-mt-[8px] flex flex-col gap-[6px] rounded-[6px] border border-[var(--border)] bg-[var(--surface-2)] px-[10px] py-[8px]'>
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'> <span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
Workflow State Workflow State
</span> </span>
<button <Button
variant='active'
onClick={() => setIsExecutionSnapshotOpen(true)} onClick={() => setIsExecutionSnapshotOpen(true)}
className='flex items-center justify-between rounded-[6px] bg-[var(--surface-1)] px-[10px] py-[8px] transition-colors hover:bg-[var(--surface-4)]' className='flex w-full items-center justify-between px-[10px] py-[6px]'
> >
<span className='font-medium text-[12px] text-[var(--text-secondary)]'> <span className='font-medium text-[12px]'>View Snapshot</span>
View Snapshot <Eye className='h-[14px] w-[14px]' />
</Button>
</div>
)}
{/* Workflow Output */}
{isWorkflowExecutionLog && workflowOutput && !permissionConfig.hideTraceSpans && (
<div className='mt-[4px] flex flex-col gap-[6px] rounded-[6px] border border-[var(--border)] bg-[var(--surface-2)] px-[10px] py-[8px] dark:bg-transparent'>
<span
className={cn(
'font-medium text-[12px]',
workflowOutput.error
? 'text-[var(--text-error)]'
: 'text-[var(--text-tertiary)]'
)}
>
Workflow Output
</span> </span>
<Eye className='h-[14px] w-[14px] text-[var(--text-subtle)]' /> <WorkflowOutputSection output={workflowOutput} />
</button>
</div> </div>
)} )}
@@ -287,10 +507,12 @@ export const LogDetails = memo(function LogDetails({
{isWorkflowExecutionLog && {isWorkflowExecutionLog &&
log.executionData?.traceSpans && log.executionData?.traceSpans &&
!permissionConfig.hideTraceSpans && ( !permissionConfig.hideTraceSpans && (
<TraceSpans <div className='mt-[4px] flex flex-col gap-[6px] rounded-[6px] border border-[var(--border)] bg-[var(--surface-2)] px-[10px] py-[8px] dark:bg-transparent'>
traceSpans={log.executionData.traceSpans} <span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
totalDuration={log.executionData.totalDuration} Trace Span
/> </span>
<TraceSpans traceSpans={log.executionData.traceSpans} />
</div>
)} )}
{/* Files */} {/* Files */}

View File

@@ -128,7 +128,30 @@ export const ActionBar = memo(
'dark:border-transparent dark:bg-[var(--surface-4)]' 'dark:border-transparent dark:bg-[var(--surface-4)]'
)} )}
> >
{!isNoteBlock && !isSubflowBlock && ( {!isNoteBlock && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={(e) => {
e.stopPropagation()
if (!disabled) {
collaborativeBatchToggleBlockEnabled([blockId])
}
}}
className={ACTION_BUTTON_STYLES}
disabled={disabled}
>
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
</Tooltip.Content>
</Tooltip.Root>
)}
{isSubflowBlock && (
<Tooltip.Root> <Tooltip.Root>
<Tooltip.Trigger asChild> <Tooltip.Trigger asChild>
<Button <Button
@@ -222,29 +245,6 @@ export const ActionBar = memo(
</Tooltip.Root> </Tooltip.Root>
)} )}
{isSubflowBlock && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={(e) => {
e.stopPropagation()
if (!disabled) {
collaborativeBatchToggleBlockEnabled([blockId])
}
}}
className={ACTION_BUTTON_STYLES}
disabled={disabled}
>
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
</Tooltip.Content>
</Tooltip.Root>
)}
<Tooltip.Root> <Tooltip.Root>
<Tooltip.Trigger asChild> <Tooltip.Trigger asChild>
<Button <Button

View File

@@ -1425,10 +1425,7 @@ function RunSkipButtons({
setIsProcessing(true) setIsProcessing(true)
setButtonsHidden(true) setButtonsHidden(true)
try { try {
// Add to auto-allowed list - this also executes all pending integration tools of this type
await addAutoAllowedTool(toolCall.name) await addAutoAllowedTool(toolCall.name)
// For client tools with interrupts (not integration tools), we still need to call handleRun
// since executeIntegrationTool only works for server-side tools
if (!isIntegrationTool(toolCall.name)) { if (!isIntegrationTool(toolCall.name)) {
await handleRun(toolCall, setToolCallState, onStateChange, editedParams) await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
} }
@@ -1526,7 +1523,11 @@ export function ToolCall({
toolCall.name === 'user_memory' || toolCall.name === 'user_memory' ||
toolCall.name === 'edit_respond' || toolCall.name === 'edit_respond' ||
toolCall.name === 'debug_respond' || toolCall.name === 'debug_respond' ||
toolCall.name === 'plan_respond' toolCall.name === 'plan_respond' ||
toolCall.name === 'research_respond' ||
toolCall.name === 'info_respond' ||
toolCall.name === 'deploy_respond' ||
toolCall.name === 'superagent_respond'
) )
return null return null

View File

@@ -209,9 +209,20 @@ export interface SlashCommand {
export const TOP_LEVEL_COMMANDS: readonly SlashCommand[] = [ export const TOP_LEVEL_COMMANDS: readonly SlashCommand[] = [
{ id: 'fast', label: 'Fast' }, { id: 'fast', label: 'Fast' },
{ id: 'research', label: 'Research' }, { id: 'research', label: 'Research' },
{ id: 'superagent', label: 'Actions' }, { id: 'actions', label: 'Actions' },
] as const ] as const
/**
* Maps UI command IDs to API command IDs.
* Some commands have different IDs for display vs API (e.g., "actions" -> "superagent")
*/
export function getApiCommandId(uiCommandId: string): string {
const commandMapping: Record<string, string> = {
actions: 'superagent',
}
return commandMapping[uiCommandId] || uiCommandId
}
export const WEB_COMMANDS: readonly SlashCommand[] = [ export const WEB_COMMANDS: readonly SlashCommand[] = [
{ id: 'search', label: 'Search' }, { id: 'search', label: 'Search' },
{ id: 'read', label: 'Read' }, { id: 'read', label: 'Read' },

View File

@@ -1312,15 +1312,16 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
if (currentLoop && isLoopBlock) { if (currentLoop && isLoopBlock) {
containingLoopBlockId = blockId containingLoopBlockId = blockId
const loopType = currentLoop.loopType || 'for' const loopType = currentLoop.loopType || 'for'
const contextualTags: string[] = ['index']
if (loopType === 'forEach') {
contextualTags.push('currentItem')
contextualTags.push('items')
}
const loopBlock = blocks[blockId] const loopBlock = blocks[blockId]
if (loopBlock) { if (loopBlock) {
const loopBlockName = loopBlock.name || loopBlock.type const loopBlockName = loopBlock.name || loopBlock.type
const normalizedLoopName = normalizeName(loopBlockName)
const contextualTags: string[] = [`${normalizedLoopName}.index`]
if (loopType === 'forEach') {
contextualTags.push(`${normalizedLoopName}.currentItem`)
contextualTags.push(`${normalizedLoopName}.items`)
}
loopBlockGroup = { loopBlockGroup = {
blockName: loopBlockName, blockName: loopBlockName,
@@ -1328,21 +1329,23 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
blockType: 'loop', blockType: 'loop',
tags: contextualTags, tags: contextualTags,
distance: 0, distance: 0,
isContextual: true,
} }
} }
} else if (containingLoop) { } else if (containingLoop) {
const [loopId, loop] = containingLoop const [loopId, loop] = containingLoop
containingLoopBlockId = loopId containingLoopBlockId = loopId
const loopType = loop.loopType || 'for' const loopType = loop.loopType || 'for'
const contextualTags: string[] = ['index']
if (loopType === 'forEach') {
contextualTags.push('currentItem')
contextualTags.push('items')
}
const containingLoopBlock = blocks[loopId] const containingLoopBlock = blocks[loopId]
if (containingLoopBlock) { if (containingLoopBlock) {
const loopBlockName = containingLoopBlock.name || containingLoopBlock.type const loopBlockName = containingLoopBlock.name || containingLoopBlock.type
const normalizedLoopName = normalizeName(loopBlockName)
const contextualTags: string[] = [`${normalizedLoopName}.index`]
if (loopType === 'forEach') {
contextualTags.push(`${normalizedLoopName}.currentItem`)
contextualTags.push(`${normalizedLoopName}.items`)
}
loopBlockGroup = { loopBlockGroup = {
blockName: loopBlockName, blockName: loopBlockName,
@@ -1350,6 +1353,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
blockType: 'loop', blockType: 'loop',
tags: contextualTags, tags: contextualTags,
distance: 0, distance: 0,
isContextual: true,
} }
} }
} }
@@ -1363,15 +1367,16 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const [parallelId, parallel] = containingParallel const [parallelId, parallel] = containingParallel
containingParallelBlockId = parallelId containingParallelBlockId = parallelId
const parallelType = parallel.parallelType || 'count' const parallelType = parallel.parallelType || 'count'
const contextualTags: string[] = ['index']
if (parallelType === 'collection') {
contextualTags.push('currentItem')
contextualTags.push('items')
}
const containingParallelBlock = blocks[parallelId] const containingParallelBlock = blocks[parallelId]
if (containingParallelBlock) { if (containingParallelBlock) {
const parallelBlockName = containingParallelBlock.name || containingParallelBlock.type const parallelBlockName = containingParallelBlock.name || containingParallelBlock.type
const normalizedParallelName = normalizeName(parallelBlockName)
const contextualTags: string[] = [`${normalizedParallelName}.index`]
if (parallelType === 'collection') {
contextualTags.push(`${normalizedParallelName}.currentItem`)
contextualTags.push(`${normalizedParallelName}.items`)
}
parallelBlockGroup = { parallelBlockGroup = {
blockName: parallelBlockName, blockName: parallelBlockName,
@@ -1379,6 +1384,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
blockType: 'parallel', blockType: 'parallel',
tags: contextualTags, tags: contextualTags,
distance: 0, distance: 0,
isContextual: true,
} }
} }
} }
@@ -1645,38 +1651,29 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const nestedBlockTagGroups: NestedBlockTagGroup[] = useMemo(() => { const nestedBlockTagGroups: NestedBlockTagGroup[] = useMemo(() => {
return filteredBlockTagGroups.map((group: BlockTagGroup) => { return filteredBlockTagGroups.map((group: BlockTagGroup) => {
const normalizedBlockName = normalizeName(group.blockName) const normalizedBlockName = normalizeName(group.blockName)
// Handle loop/parallel contextual tags (index, currentItem, items)
const directTags: NestedTag[] = [] const directTags: NestedTag[] = []
const tagsForTree: string[] = [] const tagsForTree: string[] = []
group.tags.forEach((tag: string) => { group.tags.forEach((tag: string) => {
const tagParts = tag.split('.') const tagParts = tag.split('.')
// Loop/parallel contextual tags without block prefix if (tagParts.length === 1) {
if (
(group.blockType === 'loop' || group.blockType === 'parallel') &&
tagParts.length === 1
) {
directTags.push({ directTags.push({
key: tag, key: tag,
display: tag, display: tag,
fullTag: tag, fullTag: tag,
}) })
} else if (tagParts.length === 2) { } else if (tagParts.length === 2) {
// Direct property like blockname.property
directTags.push({ directTags.push({
key: tagParts[1], key: tagParts[1],
display: tagParts[1], display: tagParts[1],
fullTag: tag, fullTag: tag,
}) })
} else { } else {
// Nested property - add to tree builder
tagsForTree.push(tag) tagsForTree.push(tag)
} }
}) })
// Build recursive tree from nested tags
const nestedTags = [...directTags, ...buildNestedTagTree(tagsForTree, normalizedBlockName)] const nestedTags = [...directTags, ...buildNestedTagTree(tagsForTree, normalizedBlockName)]
return { return {
@@ -1800,15 +1797,21 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
processedTag = tag processedTag = tag
} }
} else if ( } else if (
blockGroup && blockGroup?.isContextual &&
(blockGroup.blockType === 'loop' || blockGroup.blockType === 'parallel') (blockGroup.blockType === 'loop' || blockGroup.blockType === 'parallel')
) { ) {
if (!tag.includes('.') && ['index', 'currentItem', 'items'].includes(tag)) { const tagParts = tag.split('.')
processedTag = `${blockGroup.blockType}.${tag}` if (tagParts.length === 1) {
processedTag = blockGroup.blockType
} else {
const lastPart = tagParts[tagParts.length - 1]
if (['index', 'currentItem', 'items'].includes(lastPart)) {
processedTag = `${blockGroup.blockType}.${lastPart}`
} else { } else {
processedTag = tag processedTag = tag
} }
} }
}
let newValue: string let newValue: string

View File

@@ -7,6 +7,8 @@ export interface BlockTagGroup {
blockType: string blockType: string
tags: string[] tags: string[]
distance: number distance: number
/** True if this is a contextual group (loop/parallel iteration context available inside the subflow) */
isContextual?: boolean
} }
/** /**

View File

@@ -13,6 +13,7 @@ import {
import { ReactFlowProvider } from 'reactflow' import { ReactFlowProvider } from 'reactflow'
import { Badge, Button, ChevronDown, Code, Combobox, Input, Label } from '@/components/emcn' import { Badge, Button, ChevronDown, Code, Combobox, Input, Label } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import { formatDuration } from '@/lib/core/utils/formatting'
import { extractReferencePrefixes } from '@/lib/workflows/sanitization/references' import { extractReferencePrefixes } from '@/lib/workflows/sanitization/references'
import { import {
buildCanonicalIndex, buildCanonicalIndex,
@@ -704,14 +705,6 @@ interface PreviewEditorProps {
onClose?: () => void onClose?: () => void
} }
/**
* Format duration for display
*/
function formatDuration(ms: number): string {
if (ms < 1000) return `${ms}ms`
return `${(ms / 1000).toFixed(2)}s`
}
/** Minimum height for the connections section (header only) */ /** Minimum height for the connections section (header only) */
const MIN_CONNECTIONS_HEIGHT = 30 const MIN_CONNECTIONS_HEIGHT = 30
/** Maximum height for the connections section */ /** Maximum height for the connections section */
@@ -1180,7 +1173,7 @@ function PreviewEditorContent({
)} )}
{executionData.durationMs !== undefined && ( {executionData.durationMs !== undefined && (
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'> <span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
{formatDuration(executionData.durationMs)} {formatDuration(executionData.durationMs, { precision: 2 })}
</span> </span>
)} )}
</div> </div>

View File

@@ -688,7 +688,7 @@ export function AccessControl() {
)} )}
</div> </div>
<div className='flex items-center justify-between rounded-[8px] border border-[var(--border)] px-[12px] py-[10px]'> <div className='flex items-center justify-between'>
<div className='flex flex-col gap-[2px]'> <div className='flex flex-col gap-[2px]'>
<span className='font-medium text-[13px] text-[var(--text-primary)]'> <span className='font-medium text-[13px] text-[var(--text-primary)]'>
Auto-add new members Auto-add new members
@@ -705,7 +705,7 @@ export function AccessControl() {
</div> </div>
<div className='min-h-0 flex-1 overflow-y-auto'> <div className='min-h-0 flex-1 overflow-y-auto'>
<div className='flex flex-col gap-[16px]'> <div className='flex flex-col gap-[8px]'>
<div className='flex items-center justify-between'> <div className='flex items-center justify-between'>
<span className='font-medium text-[13px] text-[var(--text-secondary)]'> <span className='font-medium text-[13px] text-[var(--text-secondary)]'>
Members Members

View File

@@ -16,6 +16,7 @@ import {
} from '@/components/emcn' } from '@/components/emcn'
import { Input, Skeleton } from '@/components/ui' import { Input, Skeleton } from '@/components/ui'
import { useSession } from '@/lib/auth/auth-client' import { useSession } from '@/lib/auth/auth-client'
import { formatDate } from '@/lib/core/utils/formatting'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { import {
type ApiKey, type ApiKey,
@@ -133,13 +134,9 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
} }
}, [shouldScrollToBottom]) }, [shouldScrollToBottom])
const formatDate = (dateString?: string) => { const formatLastUsed = (dateString?: string) => {
if (!dateString) return 'Never' if (!dateString) return 'Never'
return new Date(dateString).toLocaleDateString('en-US', { return formatDate(new Date(dateString))
year: 'numeric',
month: 'short',
day: 'numeric',
})
} }
return ( return (
@@ -216,7 +213,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
{key.name} {key.name}
</span> </span>
<span className='text-[13px] text-[var(--text-secondary)]'> <span className='text-[13px] text-[var(--text-secondary)]'>
(last used: {formatDate(key.lastUsed).toLowerCase()}) (last used: {formatLastUsed(key.lastUsed).toLowerCase()})
</span> </span>
</div> </div>
<p className='truncate text-[13px] text-[var(--text-muted)]'> <p className='truncate text-[13px] text-[var(--text-muted)]'>
@@ -251,7 +248,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
{key.name} {key.name}
</span> </span>
<span className='text-[13px] text-[var(--text-secondary)]'> <span className='text-[13px] text-[var(--text-secondary)]'>
(last used: {formatDate(key.lastUsed).toLowerCase()}) (last used: {formatLastUsed(key.lastUsed).toLowerCase()})
</span> </span>
</div> </div>
<p className='truncate text-[13px] text-[var(--text-muted)]'> <p className='truncate text-[13px] text-[var(--text-muted)]'>
@@ -291,7 +288,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
{key.name} {key.name}
</span> </span>
<span className='text-[13px] text-[var(--text-secondary)]'> <span className='text-[13px] text-[var(--text-secondary)]'>
(last used: {formatDate(key.lastUsed).toLowerCase()}) (last used: {formatLastUsed(key.lastUsed).toLowerCase()})
</span> </span>
</div> </div>
<p className='truncate text-[13px] text-[var(--text-muted)]'> <p className='truncate text-[13px] text-[var(--text-muted)]'>

View File

@@ -13,6 +13,7 @@ import {
ModalHeader, ModalHeader,
} from '@/components/emcn' } from '@/components/emcn'
import { Input, Skeleton } from '@/components/ui' import { Input, Skeleton } from '@/components/ui'
import { formatDate } from '@/lib/core/utils/formatting'
import { import {
type CopilotKey, type CopilotKey,
useCopilotKeys, useCopilotKeys,
@@ -115,13 +116,9 @@ export function Copilot() {
} }
} }
const formatDate = (dateString?: string | null) => { const formatLastUsed = (dateString?: string | null) => {
if (!dateString) return 'Never' if (!dateString) return 'Never'
return new Date(dateString).toLocaleDateString('en-US', { return formatDate(new Date(dateString))
year: 'numeric',
month: 'short',
day: 'numeric',
})
} }
const hasKeys = keys.length > 0 const hasKeys = keys.length > 0
@@ -180,7 +177,7 @@ export function Copilot() {
{key.name || 'Unnamed Key'} {key.name || 'Unnamed Key'}
</span> </span>
<span className='text-[13px] text-[var(--text-secondary)]'> <span className='text-[13px] text-[var(--text-secondary)]'>
(last used: {formatDate(key.lastUsed).toLowerCase()}) (last used: {formatLastUsed(key.lastUsed).toLowerCase()})
</span> </span>
</div> </div>
<p className='truncate text-[13px] text-[var(--text-muted)]'> <p className='truncate text-[13px] text-[var(--text-muted)]'>

View File

@@ -242,15 +242,9 @@ Return ONLY the email body - no explanations, no extra text.`,
id: 'messageId', id: 'messageId',
title: 'Message ID', title: 'Message ID',
type: 'short-input', type: 'short-input',
placeholder: 'Enter message ID to read (optional)', placeholder: 'Read specific email by ID (overrides label/folder)',
condition: { condition: { field: 'operation', value: 'read_gmail' },
field: 'operation', mode: 'advanced',
value: 'read_gmail',
and: {
field: 'folder',
value: '',
},
},
}, },
// Search Fields // Search Fields
{ {

View File

@@ -129,12 +129,9 @@ ROUTING RULES:
3. If the context is even partially related to a route's description, select that route 3. If the context is even partially related to a route's description, select that route
4. ONLY output NO_MATCH if the context is completely unrelated to ALL route descriptions 4. ONLY output NO_MATCH if the context is completely unrelated to ALL route descriptions
OUTPUT FORMAT: Respond with a JSON object containing:
- Output EXACTLY one route ID (copied exactly as shown above) OR "NO_MATCH" - route: EXACTLY one route ID (copied exactly as shown above) OR "NO_MATCH"
- No explanation, no punctuation, no additional text - reasoning: A brief explanation (1-2 sentences) of why you chose this route`
- Just the route ID or NO_MATCH
Your response:`
} }
/** /**
@@ -272,6 +269,7 @@ interface RouterV2Response extends ToolResponse {
total: number total: number
} }
selectedRoute: string selectedRoute: string
reasoning: string
selectedPath: { selectedPath: {
blockId: string blockId: string
blockType: string blockType: string
@@ -355,6 +353,7 @@ export const RouterV2Block: BlockConfig<RouterV2Response> = {
tokens: { type: 'json', description: 'Token usage' }, tokens: { type: 'json', description: 'Token usage' },
cost: { type: 'json', description: 'Cost information' }, cost: { type: 'json', description: 'Cost information' },
selectedRoute: { type: 'string', description: 'Selected route ID' }, selectedRoute: { type: 'string', description: 'Selected route ID' },
reasoning: { type: 'string', description: 'Explanation of why this route was chosen' },
selectedPath: { type: 'json', description: 'Selected routing path' }, selectedPath: { type: 'json', description: 'Selected routing path' },
}, },
} }

View File

@@ -23,7 +23,13 @@ import { cn } from '@/lib/core/utils/cn'
* ``` * ```
*/ */
const checkboxVariants = cva( const checkboxVariants = cva(
'peer shrink-0 rounded-sm border border-[var(--border-1)] bg-[var(--surface-4)] ring-offset-background transition-colors hover:border-[var(--border-muted)] hover:bg-[var(--surface-7)] focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 data-[disabled]:cursor-not-allowed data-[disabled]:opacity-50 data-[state=checked]:border-[var(--text-muted)] data-[state=checked]:bg-[var(--text-muted)] data-[state=checked]:text-white dark:bg-[var(--surface-5)] dark:data-[state=checked]:border-[var(--surface-7)] dark:data-[state=checked]:bg-[var(--surface-7)] dark:data-[state=checked]:text-[var(--text-primary)] dark:hover:border-[var(--surface-7)] dark:hover:bg-[var(--border-1)]', [
'peer shrink-0 cursor-pointer rounded-[4px] border transition-colors',
'border-[var(--border-1)] bg-transparent',
'focus-visible:outline-none',
'data-[disabled]:cursor-not-allowed data-[disabled]:opacity-50',
'data-[state=checked]:border-[var(--text-primary)] data-[state=checked]:bg-[var(--text-primary)]',
].join(' '),
{ {
variants: { variants: {
size: { size: {
@@ -83,7 +89,7 @@ const Checkbox = React.forwardRef<React.ElementRef<typeof CheckboxPrimitive.Root
className={cn(checkboxVariants({ size }), className)} className={cn(checkboxVariants({ size }), className)}
{...props} {...props}
> >
<CheckboxPrimitive.Indicator className={cn('flex items-center justify-center text-current')}> <CheckboxPrimitive.Indicator className='flex items-center justify-center text-[var(--white)]'>
<Check className={cn(checkboxIconVariants({ size }))} /> <Check className={cn(checkboxIconVariants({ size }))} />
</CheckboxPrimitive.Indicator> </CheckboxPrimitive.Indicator>
</CheckboxPrimitive.Root> </CheckboxPrimitive.Root>

View File

@@ -0,0 +1 @@
export { DiffControlsDemo } from './components/diff-controls-demo'

View File

@@ -0,0 +1,111 @@
'use client'
import { useState } from 'react'
export function DiffControlsDemo() {
const [rejectHover, setRejectHover] = useState(false)
const [acceptHover, setAcceptHover] = useState(false)
return (
<div style={{ display: 'flex', justifyContent: 'center', margin: '24px 0' }}>
<div
style={{
position: 'relative',
display: 'flex',
height: '30px',
overflow: 'hidden',
borderRadius: '4px',
isolation: 'isolate',
}}
>
{/* Reject button */}
<button
onClick={() => {}}
onMouseEnter={() => setRejectHover(true)}
onMouseLeave={() => setRejectHover(false)}
title='Reject changes'
style={{
position: 'relative',
display: 'flex',
height: '100%',
alignItems: 'center',
border: '1px solid #e0e0e0',
backgroundColor: rejectHover ? '#f0f0f0' : '#f5f5f5',
paddingRight: '20px',
paddingLeft: '12px',
fontWeight: 500,
fontSize: '13px',
color: rejectHover ? '#2d2d2d' : '#404040',
clipPath: 'polygon(0 0, calc(100% + 10px) 0, 100% 100%, 0 100%)',
borderRadius: '4px 0 0 4px',
cursor: 'default',
transition: 'color 150ms, background-color 150ms, border-color 150ms',
}}
>
Reject
</button>
{/* Slanted divider - split gray/green */}
<div
style={{
pointerEvents: 'none',
position: 'absolute',
top: 0,
bottom: 0,
left: '66px',
width: '2px',
transform: 'skewX(-18.4deg)',
background: 'linear-gradient(to right, #e0e0e0 50%, #238458 50%)',
zIndex: 10,
}}
/>
{/* Accept button */}
<button
onClick={() => {}}
onMouseEnter={() => setAcceptHover(true)}
onMouseLeave={() => setAcceptHover(false)}
title='Accept changes (⇧⌘⏎)'
style={{
position: 'relative',
display: 'flex',
height: '100%',
alignItems: 'center',
border: '1px solid rgba(0, 0, 0, 0.15)',
backgroundColor: '#32bd7e',
paddingRight: '12px',
paddingLeft: '20px',
fontWeight: 500,
fontSize: '13px',
color: '#ffffff',
clipPath: 'polygon(10px 0, 100% 0, 100% 100%, 0 100%)',
borderRadius: '0 4px 4px 0',
marginLeft: '-10px',
cursor: 'default',
filter: acceptHover ? 'brightness(1.1)' : undefined,
transition: 'background-color 150ms, border-color 150ms',
}}
>
Accept
<kbd
style={{
marginLeft: '8px',
borderRadius: '4px',
border: '1px solid rgba(255, 255, 255, 0.2)',
backgroundColor: 'rgba(255, 255, 255, 0.1)',
paddingLeft: '6px',
paddingRight: '6px',
paddingTop: '2px',
paddingBottom: '2px',
fontWeight: 500,
fontFamily:
'ui-sans-serif, system-ui, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji"',
fontSize: '10px',
color: '#ffffff',
}}
>
<span style={{ display: 'inline-block', transform: 'translateY(-1px)' }}></span>
</kbd>
</button>
</div>
</div>
)
}

View File

@@ -0,0 +1,201 @@
---
slug: v0-5
title: 'Introducing Sim v0.5'
description: 'This new release brings a state of the art Copilot, seamless MCP server and tool deployment, 100+ integrations with 300+ tools, comprehensive execution logs, and realtime collaboration—built for teams shipping AI agents in production.'
date: 2026-01-22
updated: 2026-01-22
authors:
- waleed
readingTime: 8
tags: [Release, Copilot, MCP, Observability, Collaboration, Integrations, Sim]
ogImage: /studio/v0-5/cover.png
ogAlt: 'Sim v0.5 release announcement'
about: ['AI Agents', 'Workflow Automation', 'Developer Tools']
timeRequired: PT8M
canonical: https://sim.ai/studio/v0-5
featured: true
draft: false
---
**Sim v0.5** is the next evolution of our agent workflow platform—built for teams shipping AI agents to production.
## Copilot
![Sim Copilot](/studio/v0-5/copilot.jpg)
Copilot is a context-aware assistant embedded in the Sim editor. Unlike general-purpose AI assistants, Copilot has direct access to your workspace: workflows, block configurations, execution logs, connected credentials, and documentation. It can also search the web to pull in external context when needed.
Your workspace is indexed for hybrid retrieval. When you ask a question, Copilot queries this index to ground its responses in your actual workflow state. Ask "why did my workflow fail at 3am?" and it retrieves the relevant execution trace, identifies the error, and explains what happened.
Copilot supports slash commands that trigger specialized capabilities:
- `/deep-research` — performs multi-step web research on a topic, synthesizing results from multiple sources
- `/api-docs` — fetches and parses API documentation from a URL, extracting endpoints, parameters, and authentication requirements
- `/test` — runs your current workflow with sample inputs and reports results inline
- `/build` — generates a complete workflow from a natural language description, wiring up blocks and configuring integrations
Use `@` commands to pull specific context into your conversation. `@block` references a specific block's configuration and recent outputs. `@workflow` includes the full workflow structure. `@logs` pulls in recent execution traces. This lets you ask targeted questions like "why is `@Slack1` returning an error?" and Copilot has the exact context it needs to diagnose the issue.
For complex tasks, Copilot uses subagents—breaking requests into discrete operations and executing them sequentially. Ask it to "add error handling to this workflow" and it will analyze your blocks, determine where failures could occur, add appropriate condition blocks, and wire up notification paths. Each change surfaces as a diff for your review before applying.
<DiffControlsDemo />
## MCP Deployment
![MCP Deployment](/studio/v0-5/mcp.png)
Deploy any workflow as an [MCP](https://modelcontextprotocol.io) server. Once deployed, the workflow becomes a callable tool for any MCP-compatible agent—[Claude Desktop](https://claude.ai/download), [Cursor](https://cursor.com), or your own applications.
Sim generates a tool definition from your workflow: the name and description you specify, plus a JSON schema derived from your Start block's input format. The MCP server uses Streamable HTTP transport, so agents connect via a single URL. Authentication is handled via API key headers or public access, depending on your configuration.
Consider a lead enrichment workflow: it queries Apollo for contact data, checks Salesforce for existing records, formats the output, and posts a summary to Slack. That's 8 blocks in Sim. Deploy it as MCP, and any agent can call `enrich_lead("jane@acme.com")` and receive structured data back. The agent treats it as a single tool call—it doesn't need to know about Apollo, Salesforce, or Slack.
This pattern scales to research pipelines, data processing workflows, approval chains, and internal tooling. Anything you build in Sim becomes a tool any agent can invoke.
## Logs & Dashboard
![Logs & Dashboard](/studio/v0-5/dashboard.jpg)
Every workflow execution generates a full trace. Each block records its start time, end time, inputs, outputs, and any errors. For LLM blocks, we capture prompt tokens, completion tokens, and cost by model.
The dashboard aggregates this data into queryable views:
- **Trace spans**: Hierarchical view of block executions with timing waterfall
- **Cost attribution**: Token usage and spend broken down by model per execution
- **Error context**: Full stack traces with the block, input values, and failure reason
- **Filtering**: Query by time range, trigger type, workflow, or status
- **Execution snapshots**: Each run captures the workflow state at execution time—restore to see exactly what was running
This level of observability is necessary when workflows handle production traffic—sending customer emails, processing payments, or making API calls on behalf of users.
## Realtime Collaboration
![Realtime Collaboration](/studio/v0-5/collaboration.png)
Multiple users can edit the same workflow simultaneously. Changes propagate in real time—you see teammates' cursors, block additions, and configuration updates as they happen.
The editor now supports full undo/redo history (Cmd+Z / Cmd+Shift+Z), so you can step back through changes without losing work. Copy and paste works for individual blocks, groups of blocks, or entire subflows—select what you need, Cmd+C, and paste into the same workflow or a different one. This makes it easy to duplicate patterns, share components across workflows, or quickly prototype variations.
This is particularly useful during development sessions where engineers, product managers, and domain experts need to iterate together. Everyone works on the same workflow state, and changes sync immediately across all connected clients.
## Versioning
![Versioning](/studio/v0-5/versioning.png)
Every deployment creates a new version. The version history shows who deployed what and when, with a preview of the workflow state at that point in time. Roll back to any previous version with one click—the live deployment updates immediately.
This matters when something breaks in production. You can instantly revert to the last known good version while you debug, rather than scrambling to fix forward. It also provides a clear audit trail: you can see exactly what changed between versions and who made the change.
---
## 100+ Integrations
![Integrations](/studio/v0-5/integrations.png)
v0.5 adds **100+ integrations** with **300+ actions**. These cover the specific operations you need—not just generic CRUD, but actions like "send Slack message to channel," "create Jira ticket with custom fields," "query Postgres with parameterized SQL," or "enrich contact via Apollo."
- **CRMs & Sales**: Salesforce, HubSpot, Pipedrive, Apollo, Wealthbox
- **Communication**: Slack, Discord, Microsoft Teams, Telegram, WhatsApp, Twilio
- **Productivity**: Notion, Confluence, Google Workspace, Microsoft 365, Airtable, Asana, Trello
- **Developer Tools**: GitHub, GitLab, Jira, Linear, Sentry, Datadog, Grafana
- **Databases**: PostgreSQL, MySQL, MongoDB, [Supabase](https://supabase.com), DynamoDB, Elasticsearch, [Pinecone](https://pinecone.io), [Qdrant](https://qdrant.tech), Neo4j
- **Finance**: Stripe, Kalshi, Polymarket
- **Web & Search**: [Firecrawl](https://firecrawl.dev), [Exa](https://exa.ai), [Tavily](https://tavily.com), [Jina](https://jina.ai), [Serper](https://serper.dev)
- **Cloud**: AWS (S3, RDS, SQS, Textract, Bedrock), [Browser Use](https://browser-use.com), [Stagehand](https://github.com/browserbase/stagehand)
Each integration handles OAuth or API key authentication. Connect once, and the credentials are available across all workflows in your workspace.
---
## Triggers
Workflows can be triggered through multiple mechanisms:
**Webhooks**: Sim provisions a unique HTTPS endpoint for each workflow. Incoming POST requests are parsed and passed to the first block as input. Supports standard webhook patterns including signature verification for services that provide it.
**Schedules**: Cron-based scheduling with timezone support. Use the visual scheduler or write expressions directly. Execution locks prevent overlapping runs.
**Chat**: Deploy workflows as conversational interfaces. Messages stream to your workflow, responses stream back to the user. Supports multi-turn context.
**API**: REST endpoint with your workflow's input schema. Call it from any system that can make HTTP requests.
**Integration triggers**: Event-driven triggers for specific services—GitHub (PR opened, issue created, push), Stripe (payment succeeded, subscription updated), TypeForm (form submitted), RSS (new item), and more.
**Forms**: Coming soon—build custom input forms that trigger workflows directly.
---
## Knowledge Base
![Knowledge Base](/studio/v0-5/kb.png)
Upload documents—PDFs, text files, markdown, HTML—and make them queryable by your agents. This is [RAG](https://en.wikipedia.org/wiki/Retrieval-augmented_generation) (Retrieval Augmented Generation) built directly into Sim.
Documents are chunked, embedded, and indexed using hybrid search ([BM25](https://en.wikipedia.org/wiki/Okapi_BM25) + vector embeddings). Agent blocks can query the knowledge base as a tool, retrieving relevant passages based on semantic similarity and keyword matching. When documents are updated, they re-index automatically.
Use cases:
- **Customer support agents** that reference your help docs and troubleshooting guides to resolve tickets
- **Sales assistants** that pull from product specs, pricing sheets, and competitive intel
- **Internal Q&A bots** that answer questions about company policies, HR docs, or engineering runbooks
- **Research workflows** that synthesize information from uploaded papers, reports, or data exports
---
## New Blocks
### Human in the Loop
Pause workflow execution pending human approval. The block sends a notification (email, Slack, or webhook) with approve/reject actions. Execution resumes only on approval—useful for high-stakes operations like customer-facing emails, financial transactions, or content publishing.
### Agent Block
The Agent block now supports three additional tool types:
- **Workflows as tools**: Agents can invoke other Sim workflows, enabling hierarchical architectures where a coordinator agent delegates to specialized sub-workflows
- **Knowledge base queries**: Agents search your indexed documents directly, retrieving relevant context for their responses
- **Custom functions**: Execute JavaScript or Python code in isolated sandboxes with configurable timeout and memory limits
### Subflows
Group blocks into collapsible subflows. Use them for loops (iterate over arrays), parallel execution (run branches concurrently), or logical organization. Subflows can be nested and keep complex workflows manageable.
### Router
Conditional branching based on data or LLM classification. Define rules or let the router use an LLM to determine intent and select the appropriate path.
The router now exposes its reasoning in execution logs—when debugging unexpected routing, you can see exactly why a particular branch was selected.
---
## Model Providers
Sim supports 14 providers: [OpenAI](https://openai.com), [Anthropic](https://anthropic.com), [Google](https://ai.google.dev), [Azure OpenAI](https://azure.microsoft.com/en-us/products/ai-services/openai-service), [xAI](https://x.ai), [Mistral](https://mistral.ai), [Deepseek](https://deepseek.com), [Groq](https://groq.com), [Cerebras](https://cerebras.ai), [Ollama](https://ollama.com), and [OpenRouter](https://openrouter.ai).
New in v0.5:
- **[AWS Bedrock](https://aws.amazon.com/bedrock)**: Claude, Nova, Llama, Mistral, and Cohere models via your AWS account
- **[Google Vertex AI](https://cloud.google.com/vertex-ai)**: Gemini models through Google Cloud
- **[vLLM](https://github.com/vllm-project/vllm)**: Self-hosted models on your own infrastructure
Model selection is per-block, so you can use faster/cheaper models for simple tasks and more capable models where needed.
---
## Developer Experience
**Custom Tools**: Define your own integrations with custom HTTP endpoints, authentication (API key, OAuth, Bearer token), and request/response schemas. Custom tools appear in the block palette alongside built-in integrations.
**Environment Variables**: Encrypted key-value storage for secrets and configuration. Variables are decrypted at runtime and can be referenced in any block configuration.
**Import/Export**: Export workflows or entire workspaces as JSON. Imports preserve all blocks, connections, configurations, and variable references.
**File Manager**: Upload files to your workspace for use in workflows—templates, seed data, static assets. Files are accessible via internal references or presigned URLs.
---
## Get Started
Available now at [sim.ai](https://sim.ai). Check out the [docs](https://docs.sim.ai) to dive deeper.
*Questions? [help@sim.ai](mailto:help@sim.ai) · [Discord](https://sim.ai/discord)*

View File

@@ -120,6 +120,12 @@ export const SPECIAL_REFERENCE_PREFIXES = [
REFERENCE.PREFIX.VARIABLE, REFERENCE.PREFIX.VARIABLE,
] as const ] as const
export const RESERVED_BLOCK_NAMES = [
REFERENCE.PREFIX.LOOP,
REFERENCE.PREFIX.PARALLEL,
REFERENCE.PREFIX.VARIABLE,
] as const
export const LOOP_REFERENCE = { export const LOOP_REFERENCE = {
ITERATION: 'iteration', ITERATION: 'iteration',
INDEX: 'index', INDEX: 'index',

View File

@@ -24,6 +24,71 @@ function createBlock(id: string, metadataId: string): SerializedBlock {
} }
} }
describe('DAGBuilder disabled subflow validation', () => {
it('skips validation for disabled loops with no blocks inside', () => {
const workflow: SerializedWorkflow = {
version: '1',
blocks: [
createBlock('start', BlockType.STARTER),
{ ...createBlock('loop-block', BlockType.FUNCTION), enabled: false },
],
connections: [],
loops: {
'loop-1': {
id: 'loop-1',
nodes: [], // Empty loop - would normally throw
iterations: 3,
},
},
}
const builder = new DAGBuilder()
// Should not throw even though loop has no blocks inside
expect(() => builder.build(workflow)).not.toThrow()
})
it('skips validation for disabled parallels with no blocks inside', () => {
const workflow: SerializedWorkflow = {
version: '1',
blocks: [createBlock('start', BlockType.STARTER)],
connections: [],
loops: {},
parallels: {
'parallel-1': {
id: 'parallel-1',
nodes: [], // Empty parallel - would normally throw
},
},
}
const builder = new DAGBuilder()
// Should not throw even though parallel has no blocks inside
expect(() => builder.build(workflow)).not.toThrow()
})
it('skips validation for loops where all inner blocks are disabled', () => {
const workflow: SerializedWorkflow = {
version: '1',
blocks: [
createBlock('start', BlockType.STARTER),
{ ...createBlock('inner-block', BlockType.FUNCTION), enabled: false },
],
connections: [],
loops: {
'loop-1': {
id: 'loop-1',
nodes: ['inner-block'], // Has node but it's disabled
iterations: 3,
},
},
}
const builder = new DAGBuilder()
// Should not throw - loop is effectively disabled since all inner blocks are disabled
expect(() => builder.build(workflow)).not.toThrow()
})
})
describe('DAGBuilder human-in-the-loop transformation', () => { describe('DAGBuilder human-in-the-loop transformation', () => {
it('creates trigger nodes and rewires edges for pause blocks', () => { it('creates trigger nodes and rewires edges for pause blocks', () => {
const workflow: SerializedWorkflow = { const workflow: SerializedWorkflow = {

View File

@@ -136,17 +136,18 @@ export class DAGBuilder {
nodes: string[] | undefined, nodes: string[] | undefined,
type: 'Loop' | 'Parallel' type: 'Loop' | 'Parallel'
): void { ): void {
const sentinelStartId =
type === 'Loop' ? buildSentinelStartId(id) : buildParallelSentinelStartId(id)
const sentinelStartNode = dag.nodes.get(sentinelStartId)
if (!sentinelStartNode) return
if (!nodes || nodes.length === 0) { if (!nodes || nodes.length === 0) {
throw new Error( throw new Error(
`${type} has no blocks inside. Add at least one block to the ${type.toLowerCase()}.` `${type} has no blocks inside. Add at least one block to the ${type.toLowerCase()}.`
) )
} }
const sentinelStartId =
type === 'Loop' ? buildSentinelStartId(id) : buildParallelSentinelStartId(id)
const sentinelStartNode = dag.nodes.get(sentinelStartId)
if (!sentinelStartNode) return
const hasConnections = Array.from(sentinelStartNode.outgoingEdges.values()).some((edge) => const hasConnections = Array.from(sentinelStartNode.outgoingEdges.values()).some((edge) =>
nodes.includes(extractBaseBlockId(edge.target)) nodes.includes(extractBaseBlockId(edge.target))
) )

File diff suppressed because it is too large Load Diff

View File

@@ -20,21 +20,13 @@ export class EdgeManager {
const activatedTargets: string[] = [] const activatedTargets: string[] = []
const edgesToDeactivate: Array<{ target: string; handle?: string }> = [] const edgesToDeactivate: Array<{ target: string; handle?: string }> = []
// First pass: categorize edges as activating or deactivating for (const [, edge] of node.outgoingEdges) {
// Don't modify incomingEdges yet - we need the original state for deactivation checks
for (const [edgeId, edge] of node.outgoingEdges) {
if (skipBackwardsEdge && this.isBackwardsEdge(edge.sourceHandle)) { if (skipBackwardsEdge && this.isBackwardsEdge(edge.sourceHandle)) {
continue continue
} }
const shouldActivate = this.shouldActivateEdge(edge, output) if (!this.shouldActivateEdge(edge, output)) {
if (!shouldActivate) { if (!this.isLoopEdge(edge.sourceHandle)) {
const isLoopEdge =
edge.sourceHandle === EDGE.LOOP_CONTINUE ||
edge.sourceHandle === EDGE.LOOP_CONTINUE_ALT ||
edge.sourceHandle === EDGE.LOOP_EXIT
if (!isLoopEdge) {
edgesToDeactivate.push({ target: edge.target, handle: edge.sourceHandle }) edgesToDeactivate.push({ target: edge.target, handle: edge.sourceHandle })
} }
continue continue
@@ -43,13 +35,19 @@ export class EdgeManager {
activatedTargets.push(edge.target) activatedTargets.push(edge.target)
} }
// Second pass: process deactivations while incomingEdges is still intact const cascadeTargets = new Set<string>()
// This ensures hasActiveIncomingEdges can find all potential sources
for (const { target, handle } of edgesToDeactivate) { for (const { target, handle } of edgesToDeactivate) {
this.deactivateEdgeAndDescendants(node.id, target, handle) this.deactivateEdgeAndDescendants(node.id, target, handle, cascadeTargets)
}
if (activatedTargets.length === 0) {
for (const { target } of edgesToDeactivate) {
if (this.isTerminalControlNode(target)) {
cascadeTargets.add(target)
}
}
} }
// Third pass: update incomingEdges for activated targets
for (const targetId of activatedTargets) { for (const targetId of activatedTargets) {
const targetNode = this.dag.nodes.get(targetId) const targetNode = this.dag.nodes.get(targetId)
if (!targetNode) { if (!targetNode) {
@@ -59,28 +57,25 @@ export class EdgeManager {
targetNode.incomingEdges.delete(node.id) targetNode.incomingEdges.delete(node.id)
} }
// Fourth pass: check readiness after all edge processing is complete
for (const targetId of activatedTargets) { for (const targetId of activatedTargets) {
const targetNode = this.dag.nodes.get(targetId) if (this.isTargetReady(targetId)) {
if (targetNode && this.isNodeReady(targetNode)) {
readyNodes.push(targetId) readyNodes.push(targetId)
} }
} }
for (const targetId of cascadeTargets) {
if (!readyNodes.includes(targetId) && !activatedTargets.includes(targetId)) {
if (this.isTargetReady(targetId)) {
readyNodes.push(targetId)
}
}
}
return readyNodes return readyNodes
} }
isNodeReady(node: DAGNode): boolean { isNodeReady(node: DAGNode): boolean {
if (node.incomingEdges.size === 0) { return node.incomingEdges.size === 0 || this.countActiveIncomingEdges(node) === 0
return true
}
const activeIncomingCount = this.countActiveIncomingEdges(node)
if (activeIncomingCount > 0) {
return false
}
return true
} }
restoreIncomingEdge(targetNodeId: string, sourceNodeId: string): void { restoreIncomingEdge(targetNodeId: string, sourceNodeId: string): void {
@@ -99,13 +94,10 @@ export class EdgeManager {
/** /**
* Clear deactivated edges for a set of nodes (used when restoring loop state for next iteration). * Clear deactivated edges for a set of nodes (used when restoring loop state for next iteration).
* This ensures error/success edges can be re-evaluated on each iteration.
*/ */
clearDeactivatedEdgesForNodes(nodeIds: Set<string>): void { clearDeactivatedEdgesForNodes(nodeIds: Set<string>): void {
const edgesToRemove: string[] = [] const edgesToRemove: string[] = []
for (const edgeKey of this.deactivatedEdges) { for (const edgeKey of this.deactivatedEdges) {
// Edge key format is "sourceId-targetId-handle"
// Check if either source or target is in the nodeIds set
for (const nodeId of nodeIds) { for (const nodeId of nodeIds) {
if (edgeKey.startsWith(`${nodeId}-`) || edgeKey.includes(`-${nodeId}-`)) { if (edgeKey.startsWith(`${nodeId}-`) || edgeKey.includes(`-${nodeId}-`)) {
edgesToRemove.push(edgeKey) edgesToRemove.push(edgeKey)
@@ -118,6 +110,44 @@ export class EdgeManager {
} }
} }
private isTargetReady(targetId: string): boolean {
const targetNode = this.dag.nodes.get(targetId)
return targetNode ? this.isNodeReady(targetNode) : false
}
private isLoopEdge(handle?: string): boolean {
return (
handle === EDGE.LOOP_CONTINUE ||
handle === EDGE.LOOP_CONTINUE_ALT ||
handle === EDGE.LOOP_EXIT
)
}
private isControlEdge(handle?: string): boolean {
return (
handle === EDGE.LOOP_CONTINUE ||
handle === EDGE.LOOP_CONTINUE_ALT ||
handle === EDGE.LOOP_EXIT ||
handle === EDGE.PARALLEL_EXIT
)
}
private isBackwardsEdge(sourceHandle?: string): boolean {
return sourceHandle === EDGE.LOOP_CONTINUE || sourceHandle === EDGE.LOOP_CONTINUE_ALT
}
private isTerminalControlNode(nodeId: string): boolean {
const node = this.dag.nodes.get(nodeId)
if (!node || node.outgoingEdges.size === 0) return false
for (const [, edge] of node.outgoingEdges) {
if (!this.isControlEdge(edge.sourceHandle)) {
return false
}
}
return true
}
private shouldActivateEdge(edge: DAGEdge, output: NormalizedBlockOutput): boolean { private shouldActivateEdge(edge: DAGEdge, output: NormalizedBlockOutput): boolean {
const handle = edge.sourceHandle const handle = edge.sourceHandle
@@ -159,14 +189,12 @@ export class EdgeManager {
} }
} }
private isBackwardsEdge(sourceHandle?: string): boolean {
return sourceHandle === EDGE.LOOP_CONTINUE || sourceHandle === EDGE.LOOP_CONTINUE_ALT
}
private deactivateEdgeAndDescendants( private deactivateEdgeAndDescendants(
sourceId: string, sourceId: string,
targetId: string, targetId: string,
sourceHandle?: string sourceHandle?: string,
cascadeTargets?: Set<string>,
isCascade = false
): void { ): void {
const edgeKey = this.createEdgeKey(sourceId, targetId, sourceHandle) const edgeKey = this.createEdgeKey(sourceId, targetId, sourceHandle)
if (this.deactivatedEdges.has(edgeKey)) { if (this.deactivatedEdges.has(edgeKey)) {
@@ -174,38 +202,46 @@ export class EdgeManager {
} }
this.deactivatedEdges.add(edgeKey) this.deactivatedEdges.add(edgeKey)
const targetNode = this.dag.nodes.get(targetId) const targetNode = this.dag.nodes.get(targetId)
if (!targetNode) return if (!targetNode) return
// Check if target has other active incoming edges if (isCascade && this.isTerminalControlNode(targetId)) {
// Pass the specific edge key being deactivated, not just source ID, cascadeTargets?.add(targetId)
// to handle multiple edges from same source to same target (e.g., condition branches) }
const hasOtherActiveIncoming = this.hasActiveIncomingEdges(targetNode, edgeKey)
if (!hasOtherActiveIncoming) { if (this.hasActiveIncomingEdges(targetNode, edgeKey)) {
for (const [_, outgoingEdge] of targetNode.outgoingEdges) { return
this.deactivateEdgeAndDescendants(targetId, outgoingEdge.target, outgoingEdge.sourceHandle) }
for (const [, outgoingEdge] of targetNode.outgoingEdges) {
if (!this.isControlEdge(outgoingEdge.sourceHandle)) {
this.deactivateEdgeAndDescendants(
targetId,
outgoingEdge.target,
outgoingEdge.sourceHandle,
cascadeTargets,
true
)
} }
} }
} }
/** /**
* Checks if a node has any active incoming edges besides the one being excluded. * Checks if a node has any active incoming edges besides the one being excluded.
* This properly handles the case where multiple edges from the same source go to
* the same target (e.g., multiple condition branches pointing to one block).
*/ */
private hasActiveIncomingEdges(node: DAGNode, excludeEdgeKey: string): boolean { private hasActiveIncomingEdges(node: DAGNode, excludeEdgeKey: string): boolean {
for (const incomingSourceId of node.incomingEdges) { for (const incomingSourceId of node.incomingEdges) {
const incomingNode = this.dag.nodes.get(incomingSourceId) const incomingNode = this.dag.nodes.get(incomingSourceId)
if (!incomingNode) continue if (!incomingNode) continue
for (const [_, incomingEdge] of incomingNode.outgoingEdges) { for (const [, incomingEdge] of incomingNode.outgoingEdges) {
if (incomingEdge.target === node.id) { if (incomingEdge.target === node.id) {
const incomingEdgeKey = this.createEdgeKey( const incomingEdgeKey = this.createEdgeKey(
incomingSourceId, incomingSourceId,
node.id, node.id,
incomingEdge.sourceHandle incomingEdge.sourceHandle
) )
// Skip the specific edge being excluded, but check other edges from same source
if (incomingEdgeKey === excludeEdgeKey) continue if (incomingEdgeKey === excludeEdgeKey) continue
if (!this.deactivatedEdges.has(incomingEdgeKey)) { if (!this.deactivatedEdges.has(incomingEdgeKey)) {
return true return true

View File

@@ -554,6 +554,413 @@ describe('ExecutionEngine', () => {
}) })
}) })
describe('Error handling in execution', () => {
it('should fail execution when a single node throws an error', async () => {
const startNode = createMockNode('start', 'starter')
const errorNode = createMockNode('error-node', 'function')
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
const dag = createMockDAG([startNode, errorNode])
const context = createMockContext()
const edgeManager = createMockEdgeManager((node) => {
if (node.id === 'start') return ['error-node']
return []
})
const nodeOrchestrator = {
executionCount: 0,
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
if (nodeId === 'error-node') {
throw new Error('Block execution failed')
}
return { nodeId, output: {}, isFinalOutput: false }
}),
handleNodeCompletion: vi.fn(),
} as unknown as MockNodeOrchestrator
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
await expect(engine.run('start')).rejects.toThrow('Block execution failed')
})
it('should stop parallel branches when one branch throws an error', async () => {
const startNode = createMockNode('start', 'starter')
const parallelNodes = Array.from({ length: 5 }, (_, i) =>
createMockNode(`parallel${i}`, 'function')
)
parallelNodes.forEach((_, i) => {
startNode.outgoingEdges.set(`edge${i}`, { target: `parallel${i}` })
})
const dag = createMockDAG([startNode, ...parallelNodes])
const context = createMockContext()
const edgeManager = createMockEdgeManager((node) => {
if (node.id === 'start') return parallelNodes.map((_, i) => `parallel${i}`)
return []
})
const executedNodes: string[] = []
const nodeOrchestrator = {
executionCount: 0,
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
executedNodes.push(nodeId)
if (nodeId === 'parallel0') {
await new Promise((resolve) => setTimeout(resolve, 10))
throw new Error('Parallel branch failed')
}
await new Promise((resolve) => setTimeout(resolve, 100))
return { nodeId, output: {}, isFinalOutput: false }
}),
handleNodeCompletion: vi.fn(),
} as unknown as MockNodeOrchestrator
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
await expect(engine.run('start')).rejects.toThrow('Parallel branch failed')
})
it('should capture only the first error when multiple parallel branches fail', async () => {
const startNode = createMockNode('start', 'starter')
const parallelNodes = Array.from({ length: 3 }, (_, i) =>
createMockNode(`parallel${i}`, 'function')
)
parallelNodes.forEach((_, i) => {
startNode.outgoingEdges.set(`edge${i}`, { target: `parallel${i}` })
})
const dag = createMockDAG([startNode, ...parallelNodes])
const context = createMockContext()
const edgeManager = createMockEdgeManager((node) => {
if (node.id === 'start') return parallelNodes.map((_, i) => `parallel${i}`)
return []
})
const nodeOrchestrator = {
executionCount: 0,
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
if (nodeId === 'parallel0') {
await new Promise((resolve) => setTimeout(resolve, 10))
throw new Error('First error')
}
if (nodeId === 'parallel1') {
await new Promise((resolve) => setTimeout(resolve, 20))
throw new Error('Second error')
}
if (nodeId === 'parallel2') {
await new Promise((resolve) => setTimeout(resolve, 30))
throw new Error('Third error')
}
return { nodeId, output: {}, isFinalOutput: false }
}),
handleNodeCompletion: vi.fn(),
} as unknown as MockNodeOrchestrator
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
await expect(engine.run('start')).rejects.toThrow('First error')
})
it('should wait for ongoing executions to complete before throwing error', async () => {
const startNode = createMockNode('start', 'starter')
const fastErrorNode = createMockNode('fast-error', 'function')
const slowNode = createMockNode('slow', 'function')
startNode.outgoingEdges.set('edge1', { target: 'fast-error' })
startNode.outgoingEdges.set('edge2', { target: 'slow' })
const dag = createMockDAG([startNode, fastErrorNode, slowNode])
const context = createMockContext()
const edgeManager = createMockEdgeManager((node) => {
if (node.id === 'start') return ['fast-error', 'slow']
return []
})
let slowNodeCompleted = false
const nodeOrchestrator = {
executionCount: 0,
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
if (nodeId === 'fast-error') {
await new Promise((resolve) => setTimeout(resolve, 10))
throw new Error('Fast error')
}
if (nodeId === 'slow') {
await new Promise((resolve) => setTimeout(resolve, 50))
slowNodeCompleted = true
return { nodeId, output: {}, isFinalOutput: false }
}
return { nodeId, output: {}, isFinalOutput: false }
}),
handleNodeCompletion: vi.fn(),
} as unknown as MockNodeOrchestrator
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
await expect(engine.run('start')).rejects.toThrow('Fast error')
expect(slowNodeCompleted).toBe(true)
})
it('should not queue new nodes after an error occurs', async () => {
const startNode = createMockNode('start', 'starter')
const errorNode = createMockNode('error-node', 'function')
const afterErrorNode = createMockNode('after-error', 'function')
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
errorNode.outgoingEdges.set('edge2', { target: 'after-error' })
const dag = createMockDAG([startNode, errorNode, afterErrorNode])
const context = createMockContext()
const queuedNodes: string[] = []
const edgeManager = createMockEdgeManager((node) => {
if (node.id === 'start') {
queuedNodes.push('error-node')
return ['error-node']
}
if (node.id === 'error-node') {
queuedNodes.push('after-error')
return ['after-error']
}
return []
})
const executedNodes: string[] = []
const nodeOrchestrator = {
executionCount: 0,
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
executedNodes.push(nodeId)
if (nodeId === 'error-node') {
throw new Error('Node error')
}
return { nodeId, output: {}, isFinalOutput: false }
}),
handleNodeCompletion: vi.fn(),
} as unknown as MockNodeOrchestrator
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
await expect(engine.run('start')).rejects.toThrow('Node error')
expect(executedNodes).not.toContain('after-error')
})
it('should populate error result with metadata when execution fails', async () => {
const startNode = createMockNode('start', 'starter')
const errorNode = createMockNode('error-node', 'function')
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
const dag = createMockDAG([startNode, errorNode])
const context = createMockContext()
context.blockLogs.push({
blockId: 'start',
blockName: 'Start',
blockType: 'starter',
startedAt: new Date().toISOString(),
endedAt: new Date().toISOString(),
durationMs: 10,
success: true,
})
const edgeManager = createMockEdgeManager((node) => {
if (node.id === 'start') return ['error-node']
return []
})
const nodeOrchestrator = {
executionCount: 0,
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
if (nodeId === 'error-node') {
const error = new Error('Execution failed') as any
error.executionResult = {
success: false,
output: { partial: 'data' },
logs: context.blockLogs,
metadata: context.metadata,
}
throw error
}
return { nodeId, output: {}, isFinalOutput: false }
}),
handleNodeCompletion: vi.fn(),
} as unknown as MockNodeOrchestrator
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
try {
await engine.run('start')
expect.fail('Should have thrown')
} catch (error: any) {
expect(error.executionResult).toBeDefined()
expect(error.executionResult.metadata.endTime).toBeDefined()
expect(error.executionResult.metadata.duration).toBeDefined()
}
})
it('should prefer cancellation status over error when both occur', async () => {
const abortController = new AbortController()
const startNode = createMockNode('start', 'starter')
const errorNode = createMockNode('error-node', 'function')
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
const dag = createMockDAG([startNode, errorNode])
const context = createMockContext({ abortSignal: abortController.signal })
const edgeManager = createMockEdgeManager((node) => {
if (node.id === 'start') return ['error-node']
return []
})
const nodeOrchestrator = {
executionCount: 0,
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
if (nodeId === 'error-node') {
abortController.abort()
throw new Error('Node error')
}
return { nodeId, output: {}, isFinalOutput: false }
}),
handleNodeCompletion: vi.fn(),
} as unknown as MockNodeOrchestrator
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
const result = await engine.run('start')
expect(result.status).toBe('cancelled')
expect(result.success).toBe(false)
})
it('should stop loop iteration when error occurs in loop body', async () => {
const loopStartNode = createMockNode('loop-start', 'loop_sentinel')
loopStartNode.metadata = { isSentinel: true, sentinelType: 'start', loopId: 'loop1' }
const loopBodyNode = createMockNode('loop-body', 'function')
loopBodyNode.metadata = { isLoopNode: true, loopId: 'loop1' }
const loopEndNode = createMockNode('loop-end', 'loop_sentinel')
loopEndNode.metadata = { isSentinel: true, sentinelType: 'end', loopId: 'loop1' }
const afterLoopNode = createMockNode('after-loop', 'function')
loopStartNode.outgoingEdges.set('edge1', { target: 'loop-body' })
loopBodyNode.outgoingEdges.set('edge2', { target: 'loop-end' })
loopEndNode.outgoingEdges.set('loop_continue', {
target: 'loop-start',
sourceHandle: 'loop_continue',
})
loopEndNode.outgoingEdges.set('loop_complete', {
target: 'after-loop',
sourceHandle: 'loop_complete',
})
const dag = createMockDAG([loopStartNode, loopBodyNode, loopEndNode, afterLoopNode])
const context = createMockContext()
let iterationCount = 0
const edgeManager = createMockEdgeManager((node) => {
if (node.id === 'loop-start') return ['loop-body']
if (node.id === 'loop-body') return ['loop-end']
if (node.id === 'loop-end') {
iterationCount++
if (iterationCount < 5) return ['loop-start']
return ['after-loop']
}
return []
})
const nodeOrchestrator = {
executionCount: 0,
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
if (nodeId === 'loop-body' && iterationCount >= 2) {
throw new Error('Loop body error on iteration 3')
}
return { nodeId, output: {}, isFinalOutput: false }
}),
handleNodeCompletion: vi.fn(),
} as unknown as MockNodeOrchestrator
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
await expect(engine.run('loop-start')).rejects.toThrow('Loop body error on iteration 3')
expect(iterationCount).toBeLessThanOrEqual(3)
})
it('should handle error that is not an Error instance', async () => {
const startNode = createMockNode('start', 'starter')
const errorNode = createMockNode('error-node', 'function')
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
const dag = createMockDAG([startNode, errorNode])
const context = createMockContext()
const edgeManager = createMockEdgeManager((node) => {
if (node.id === 'start') return ['error-node']
return []
})
const nodeOrchestrator = {
executionCount: 0,
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
if (nodeId === 'error-node') {
throw 'String error message'
}
return { nodeId, output: {}, isFinalOutput: false }
}),
handleNodeCompletion: vi.fn(),
} as unknown as MockNodeOrchestrator
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
await expect(engine.run('start')).rejects.toThrow('String error message')
})
it('should preserve partial output when error occurs after some blocks complete', async () => {
const startNode = createMockNode('start', 'starter')
const successNode = createMockNode('success', 'function')
const errorNode = createMockNode('error-node', 'function')
startNode.outgoingEdges.set('edge1', { target: 'success' })
successNode.outgoingEdges.set('edge2', { target: 'error-node' })
const dag = createMockDAG([startNode, successNode, errorNode])
const context = createMockContext()
const edgeManager = createMockEdgeManager((node) => {
if (node.id === 'start') return ['success']
if (node.id === 'success') return ['error-node']
return []
})
const nodeOrchestrator = {
executionCount: 0,
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
if (nodeId === 'success') {
return { nodeId, output: { successData: 'preserved' }, isFinalOutput: false }
}
if (nodeId === 'error-node') {
throw new Error('Late error')
}
return { nodeId, output: {}, isFinalOutput: false }
}),
handleNodeCompletion: vi.fn(),
} as unknown as MockNodeOrchestrator
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
try {
await engine.run('start')
expect.fail('Should have thrown')
} catch (error: any) {
// Verify the error was thrown
expect(error.message).toBe('Late error')
// The partial output should be available in executionResult if attached
if (error.executionResult) {
expect(error.executionResult.output).toBeDefined()
}
}
})
})
describe('Cancellation flag behavior', () => { describe('Cancellation flag behavior', () => {
it('should set cancelledFlag when abort signal fires', async () => { it('should set cancelledFlag when abort signal fires', async () => {
const abortController = new AbortController() const abortController = new AbortController()

View File

@@ -25,6 +25,8 @@ export class ExecutionEngine {
private pausedBlocks: Map<string, PauseMetadata> = new Map() private pausedBlocks: Map<string, PauseMetadata> = new Map()
private allowResumeTriggers: boolean private allowResumeTriggers: boolean
private cancelledFlag = false private cancelledFlag = false
private errorFlag = false
private executionError: Error | null = null
private lastCancellationCheck = 0 private lastCancellationCheck = 0
private readonly useRedisCancellation: boolean private readonly useRedisCancellation: boolean
private readonly CANCELLATION_CHECK_INTERVAL_MS = 500 private readonly CANCELLATION_CHECK_INTERVAL_MS = 500
@@ -103,7 +105,7 @@ export class ExecutionEngine {
this.initializeQueue(triggerBlockId) this.initializeQueue(triggerBlockId)
while (this.hasWork()) { while (this.hasWork()) {
if (await this.checkCancellation()) { if ((await this.checkCancellation()) || this.errorFlag) {
break break
} }
await this.processQueue() await this.processQueue()
@@ -113,6 +115,11 @@ export class ExecutionEngine {
await this.waitForAllExecutions() await this.waitForAllExecutions()
} }
// Rethrow the captured error so it's handled by the catch block
if (this.errorFlag && this.executionError) {
throw this.executionError
}
if (this.pausedBlocks.size > 0) { if (this.pausedBlocks.size > 0) {
return this.buildPausedResult(startTime) return this.buildPausedResult(startTime)
} }
@@ -196,11 +203,17 @@ export class ExecutionEngine {
} }
private trackExecution(promise: Promise<void>): void { private trackExecution(promise: Promise<void>): void {
this.executing.add(promise) const trackedPromise = promise
promise.catch(() => {}) .catch((error) => {
promise.finally(() => { if (!this.errorFlag) {
this.executing.delete(promise) this.errorFlag = true
this.executionError = error instanceof Error ? error : new Error(String(error))
}
}) })
.finally(() => {
this.executing.delete(trackedPromise)
})
this.executing.add(trackedPromise)
} }
private async waitForAnyExecution(): Promise<void> { private async waitForAnyExecution(): Promise<void> {
@@ -315,7 +328,7 @@ export class ExecutionEngine {
private async processQueue(): Promise<void> { private async processQueue(): Promise<void> {
while (this.readyQueue.length > 0) { while (this.readyQueue.length > 0) {
if (await this.checkCancellation()) { if ((await this.checkCancellation()) || this.errorFlag) {
break break
} }
const nodeId = this.dequeue() const nodeId = this.dequeue()
@@ -324,7 +337,7 @@ export class ExecutionEngine {
this.trackExecution(promise) this.trackExecution(promise)
} }
if (this.executing.size > 0 && !this.cancelledFlag) { if (this.executing.size > 0 && !this.cancelledFlag && !this.errorFlag) {
await this.waitForAnyExecution() await this.waitForAnyExecution()
} }
} }

View File

@@ -305,7 +305,7 @@ export class AgentBlockHandler implements BlockHandler {
base.executeFunction = async (callParams: Record<string, any>) => { base.executeFunction = async (callParams: Record<string, any>) => {
const mergedParams = mergeToolParameters(userProvidedParams, callParams) const mergedParams = mergeToolParameters(userProvidedParams, callParams)
const { blockData, blockNameMapping } = collectBlockData(ctx) const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
const result = await executeTool( const result = await executeTool(
'function_execute', 'function_execute',
@@ -317,6 +317,7 @@ export class AgentBlockHandler implements BlockHandler {
workflowVariables: ctx.workflowVariables || {}, workflowVariables: ctx.workflowVariables || {},
blockData, blockData,
blockNameMapping, blockNameMapping,
blockOutputSchemas,
isCustomTool: true, isCustomTool: true,
_context: { _context: {
workflowId: ctx.workflowId, workflowId: ctx.workflowId,

View File

@@ -26,7 +26,7 @@ export async function evaluateConditionExpression(
const contextSetup = `const context = ${JSON.stringify(evalContext)};` const contextSetup = `const context = ${JSON.stringify(evalContext)};`
const code = `${contextSetup}\nreturn Boolean(${conditionExpression})` const code = `${contextSetup}\nreturn Boolean(${conditionExpression})`
const { blockData, blockNameMapping } = collectBlockData(ctx) const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
const result = await executeTool( const result = await executeTool(
'function_execute', 'function_execute',
@@ -37,6 +37,7 @@ export async function evaluateConditionExpression(
workflowVariables: ctx.workflowVariables || {}, workflowVariables: ctx.workflowVariables || {},
blockData, blockData,
blockNameMapping, blockNameMapping,
blockOutputSchemas,
_context: { _context: {
workflowId: ctx.workflowId, workflowId: ctx.workflowId,
workspaceId: ctx.workspaceId, workspaceId: ctx.workspaceId,

View File

@@ -75,7 +75,12 @@ describe('FunctionBlockHandler', () => {
workflowVariables: {}, workflowVariables: {},
blockData: {}, blockData: {},
blockNameMapping: {}, blockNameMapping: {},
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId }, blockOutputSchemas: {},
_context: {
workflowId: mockContext.workflowId,
workspaceId: mockContext.workspaceId,
isDeployedContext: mockContext.isDeployedContext,
},
} }
const expectedOutput: any = { result: 'Success' } const expectedOutput: any = { result: 'Success' }
@@ -84,8 +89,8 @@ describe('FunctionBlockHandler', () => {
expect(mockExecuteTool).toHaveBeenCalledWith( expect(mockExecuteTool).toHaveBeenCalledWith(
'function_execute', 'function_execute',
expectedToolParams, expectedToolParams,
false, // skipPostProcess false,
mockContext // execution context mockContext
) )
expect(result).toEqual(expectedOutput) expect(result).toEqual(expectedOutput)
}) })
@@ -107,7 +112,12 @@ describe('FunctionBlockHandler', () => {
workflowVariables: {}, workflowVariables: {},
blockData: {}, blockData: {},
blockNameMapping: {}, blockNameMapping: {},
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId }, blockOutputSchemas: {},
_context: {
workflowId: mockContext.workflowId,
workspaceId: mockContext.workspaceId,
isDeployedContext: mockContext.isDeployedContext,
},
} }
const expectedOutput: any = { result: 'Success' } const expectedOutput: any = { result: 'Success' }
@@ -116,8 +126,8 @@ describe('FunctionBlockHandler', () => {
expect(mockExecuteTool).toHaveBeenCalledWith( expect(mockExecuteTool).toHaveBeenCalledWith(
'function_execute', 'function_execute',
expectedToolParams, expectedToolParams,
false, // skipPostProcess false,
mockContext // execution context mockContext
) )
expect(result).toEqual(expectedOutput) expect(result).toEqual(expectedOutput)
}) })
@@ -132,7 +142,12 @@ describe('FunctionBlockHandler', () => {
workflowVariables: {}, workflowVariables: {},
blockData: {}, blockData: {},
blockNameMapping: {}, blockNameMapping: {},
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId }, blockOutputSchemas: {},
_context: {
workflowId: mockContext.workflowId,
workspaceId: mockContext.workspaceId,
isDeployedContext: mockContext.isDeployedContext,
},
} }
await handler.execute(mockContext, mockBlock, inputs) await handler.execute(mockContext, mockBlock, inputs)

View File

@@ -23,7 +23,7 @@ export class FunctionBlockHandler implements BlockHandler {
? inputs.code.map((c: { content: string }) => c.content).join('\n') ? inputs.code.map((c: { content: string }) => c.content).join('\n')
: inputs.code : inputs.code
const { blockData, blockNameMapping } = collectBlockData(ctx) const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
const result = await executeTool( const result = await executeTool(
'function_execute', 'function_execute',
@@ -35,6 +35,7 @@ export class FunctionBlockHandler implements BlockHandler {
workflowVariables: ctx.workflowVariables || {}, workflowVariables: ctx.workflowVariables || {},
blockData, blockData,
blockNameMapping, blockNameMapping,
blockOutputSchemas,
_context: { _context: {
workflowId: ctx.workflowId, workflowId: ctx.workflowId,
workspaceId: ctx.workspaceId, workspaceId: ctx.workspaceId,

View File

@@ -1,7 +1,7 @@
import '@sim/testing/mocks/executor' import '@sim/testing/mocks/executor'
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest' import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
import { generateRouterPrompt } from '@/blocks/blocks/router' import { generateRouterPrompt, generateRouterV2Prompt } from '@/blocks/blocks/router'
import { BlockType } from '@/executor/constants' import { BlockType } from '@/executor/constants'
import { RouterBlockHandler } from '@/executor/handlers/router/router-handler' import { RouterBlockHandler } from '@/executor/handlers/router/router-handler'
import type { ExecutionContext } from '@/executor/types' import type { ExecutionContext } from '@/executor/types'
@@ -9,6 +9,7 @@ import { getProviderFromModel } from '@/providers/utils'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types' import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
const mockGenerateRouterPrompt = generateRouterPrompt as Mock const mockGenerateRouterPrompt = generateRouterPrompt as Mock
const mockGenerateRouterV2Prompt = generateRouterV2Prompt as Mock
const mockGetProviderFromModel = getProviderFromModel as Mock const mockGetProviderFromModel = getProviderFromModel as Mock
const mockFetch = global.fetch as unknown as Mock const mockFetch = global.fetch as unknown as Mock
@@ -44,7 +45,7 @@ describe('RouterBlockHandler', () => {
metadata: { id: BlockType.ROUTER, name: 'Test Router' }, metadata: { id: BlockType.ROUTER, name: 'Test Router' },
position: { x: 50, y: 50 }, position: { x: 50, y: 50 },
config: { tool: BlockType.ROUTER, params: {} }, config: { tool: BlockType.ROUTER, params: {} },
inputs: { prompt: 'string', model: 'string' }, // Using ParamType strings inputs: { prompt: 'string', model: 'string' },
outputs: {}, outputs: {},
enabled: true, enabled: true,
} }
@@ -72,14 +73,11 @@ describe('RouterBlockHandler', () => {
workflow: mockWorkflow as SerializedWorkflow, workflow: mockWorkflow as SerializedWorkflow,
} }
// Reset mocks using vi
vi.clearAllMocks() vi.clearAllMocks()
// Default mock implementations
mockGetProviderFromModel.mockReturnValue('openai') mockGetProviderFromModel.mockReturnValue('openai')
mockGenerateRouterPrompt.mockReturnValue('Generated System Prompt') mockGenerateRouterPrompt.mockReturnValue('Generated System Prompt')
// Set up fetch mock to return a successful response
mockFetch.mockImplementation(() => { mockFetch.mockImplementation(() => {
return Promise.resolve({ return Promise.resolve({
ok: true, ok: true,
@@ -147,7 +145,6 @@ describe('RouterBlockHandler', () => {
}) })
) )
// Verify the request body contains the expected data
const fetchCallArgs = mockFetch.mock.calls[0] const fetchCallArgs = mockFetch.mock.calls[0]
const requestBody = JSON.parse(fetchCallArgs[1].body) const requestBody = JSON.parse(fetchCallArgs[1].body)
expect(requestBody).toMatchObject({ expect(requestBody).toMatchObject({
@@ -180,7 +177,6 @@ describe('RouterBlockHandler', () => {
const inputs = { prompt: 'Test' } const inputs = { prompt: 'Test' }
mockContext.workflow!.blocks = [mockBlock, mockTargetBlock2] mockContext.workflow!.blocks = [mockBlock, mockTargetBlock2]
// Expect execute to throw because getTargetBlocks (called internally) will throw
await expect(handler.execute(mockContext, mockBlock, inputs)).rejects.toThrow( await expect(handler.execute(mockContext, mockBlock, inputs)).rejects.toThrow(
'Target block target-block-1 not found' 'Target block target-block-1 not found'
) )
@@ -190,7 +186,6 @@ describe('RouterBlockHandler', () => {
it('should throw error if LLM response is not a valid target block ID', async () => { it('should throw error if LLM response is not a valid target block ID', async () => {
const inputs = { prompt: 'Test', apiKey: 'test-api-key' } const inputs = { prompt: 'Test', apiKey: 'test-api-key' }
// Override fetch mock to return an invalid block ID
mockFetch.mockImplementationOnce(() => { mockFetch.mockImplementationOnce(() => {
return Promise.resolve({ return Promise.resolve({
ok: true, ok: true,
@@ -228,7 +223,6 @@ describe('RouterBlockHandler', () => {
it('should handle server error responses', async () => { it('should handle server error responses', async () => {
const inputs = { prompt: 'Test error handling.', apiKey: 'test-api-key' } const inputs = { prompt: 'Test error handling.', apiKey: 'test-api-key' }
// Override fetch mock to return an error
mockFetch.mockImplementationOnce(() => { mockFetch.mockImplementationOnce(() => {
return Promise.resolve({ return Promise.resolve({
ok: false, ok: false,
@@ -276,13 +270,12 @@ describe('RouterBlockHandler', () => {
mockGetProviderFromModel.mockReturnValue('vertex') mockGetProviderFromModel.mockReturnValue('vertex')
// Mock the database query for Vertex credential
const mockDb = await import('@sim/db') const mockDb = await import('@sim/db')
const mockAccount = { const mockAccount = {
id: 'test-vertex-credential-id', id: 'test-vertex-credential-id',
accessToken: 'mock-access-token', accessToken: 'mock-access-token',
refreshToken: 'mock-refresh-token', refreshToken: 'mock-refresh-token',
expiresAt: new Date(Date.now() + 3600000), // 1 hour from now expiresAt: new Date(Date.now() + 3600000),
} }
vi.spyOn(mockDb.db.query.account, 'findFirst').mockResolvedValue(mockAccount as any) vi.spyOn(mockDb.db.query.account, 'findFirst').mockResolvedValue(mockAccount as any)
@@ -300,3 +293,287 @@ describe('RouterBlockHandler', () => {
expect(requestBody.apiKey).toBe('mock-access-token') expect(requestBody.apiKey).toBe('mock-access-token')
}) })
}) })
describe('RouterBlockHandler V2', () => {
let handler: RouterBlockHandler
let mockRouterV2Block: SerializedBlock
let mockContext: ExecutionContext
let mockWorkflow: Partial<SerializedWorkflow>
let mockTargetBlock1: SerializedBlock
let mockTargetBlock2: SerializedBlock
beforeEach(() => {
mockTargetBlock1 = {
id: 'target-block-1',
metadata: { id: 'agent', name: 'Support Agent' },
position: { x: 100, y: 100 },
config: { tool: 'agent', params: {} },
inputs: {},
outputs: {},
enabled: true,
}
mockTargetBlock2 = {
id: 'target-block-2',
metadata: { id: 'agent', name: 'Sales Agent' },
position: { x: 100, y: 150 },
config: { tool: 'agent', params: {} },
inputs: {},
outputs: {},
enabled: true,
}
mockRouterV2Block = {
id: 'router-v2-block-1',
metadata: { id: BlockType.ROUTER_V2, name: 'Test Router V2' },
position: { x: 50, y: 50 },
config: { tool: BlockType.ROUTER_V2, params: {} },
inputs: {},
outputs: {},
enabled: true,
}
mockWorkflow = {
blocks: [mockRouterV2Block, mockTargetBlock1, mockTargetBlock2],
connections: [
{
source: mockRouterV2Block.id,
target: mockTargetBlock1.id,
sourceHandle: 'router-route-support',
},
{
source: mockRouterV2Block.id,
target: mockTargetBlock2.id,
sourceHandle: 'router-route-sales',
},
],
}
handler = new RouterBlockHandler({})
mockContext = {
workflowId: 'test-workflow-id',
blockStates: new Map(),
blockLogs: [],
metadata: { duration: 0 },
environmentVariables: {},
decisions: { router: new Map(), condition: new Map() },
loopExecutions: new Map(),
completedLoops: new Set(),
executedBlocks: new Set(),
activeExecutionPath: new Set(),
workflow: mockWorkflow as SerializedWorkflow,
}
vi.clearAllMocks()
mockGetProviderFromModel.mockReturnValue('openai')
mockGenerateRouterV2Prompt.mockReturnValue('Generated V2 System Prompt')
})
it('should handle router_v2 blocks', () => {
expect(handler.canHandle(mockRouterV2Block)).toBe(true)
})
it('should execute router V2 and return reasoning', async () => {
const inputs = {
context: 'I need help with a billing issue',
model: 'gpt-4o',
apiKey: 'test-api-key',
routes: JSON.stringify([
{ id: 'route-support', title: 'Support', value: 'Customer support inquiries' },
{ id: 'route-sales', title: 'Sales', value: 'Sales and pricing questions' },
]),
}
mockFetch.mockImplementationOnce(() => {
return Promise.resolve({
ok: true,
json: () =>
Promise.resolve({
content: JSON.stringify({
route: 'route-support',
reasoning: 'The user mentioned a billing issue which is a customer support matter.',
}),
model: 'gpt-4o',
tokens: { input: 150, output: 25, total: 175 },
}),
})
})
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
expect(result).toMatchObject({
context: 'I need help with a billing issue',
model: 'gpt-4o',
selectedRoute: 'route-support',
reasoning: 'The user mentioned a billing issue which is a customer support matter.',
selectedPath: {
blockId: 'target-block-1',
blockType: 'agent',
blockTitle: 'Support Agent',
},
})
})
it('should include responseFormat in provider request', async () => {
const inputs = {
context: 'Test context',
model: 'gpt-4o',
apiKey: 'test-api-key',
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description 1' }]),
}
mockFetch.mockImplementationOnce(() => {
return Promise.resolve({
ok: true,
json: () =>
Promise.resolve({
content: JSON.stringify({ route: 'route-1', reasoning: 'Test reasoning' }),
model: 'gpt-4o',
tokens: { input: 100, output: 20, total: 120 },
}),
})
})
await handler.execute(mockContext, mockRouterV2Block, inputs)
const fetchCallArgs = mockFetch.mock.calls[0]
const requestBody = JSON.parse(fetchCallArgs[1].body)
expect(requestBody.responseFormat).toEqual({
name: 'router_response',
schema: {
type: 'object',
properties: {
route: {
type: 'string',
description: 'The selected route ID or NO_MATCH',
},
reasoning: {
type: 'string',
description: 'Brief explanation of why this route was chosen',
},
},
required: ['route', 'reasoning'],
additionalProperties: false,
},
strict: true,
})
})
it('should handle NO_MATCH response with reasoning', async () => {
const inputs = {
context: 'Random unrelated query',
model: 'gpt-4o',
apiKey: 'test-api-key',
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Specific topic' }]),
}
mockFetch.mockImplementationOnce(() => {
return Promise.resolve({
ok: true,
json: () =>
Promise.resolve({
content: JSON.stringify({
route: 'NO_MATCH',
reasoning: 'The query does not relate to any available route.',
}),
model: 'gpt-4o',
tokens: { input: 100, output: 20, total: 120 },
}),
})
})
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
'Router could not determine a matching route: The query does not relate to any available route.'
)
})
it('should throw error for invalid route ID in response', async () => {
const inputs = {
context: 'Test context',
model: 'gpt-4o',
apiKey: 'test-api-key',
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description' }]),
}
mockFetch.mockImplementationOnce(() => {
return Promise.resolve({
ok: true,
json: () =>
Promise.resolve({
content: JSON.stringify({ route: 'invalid-route', reasoning: 'Some reasoning' }),
model: 'gpt-4o',
tokens: { input: 100, output: 20, total: 120 },
}),
})
})
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
/Router could not determine a valid route/
)
})
it('should handle routes passed as array instead of JSON string', async () => {
const inputs = {
context: 'Test context',
model: 'gpt-4o',
apiKey: 'test-api-key',
routes: [{ id: 'route-1', title: 'Route 1', value: 'Description' }],
}
mockFetch.mockImplementationOnce(() => {
return Promise.resolve({
ok: true,
json: () =>
Promise.resolve({
content: JSON.stringify({ route: 'route-1', reasoning: 'Matched route 1' }),
model: 'gpt-4o',
tokens: { input: 100, output: 20, total: 120 },
}),
})
})
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
expect(result.selectedRoute).toBe('route-1')
expect(result.reasoning).toBe('Matched route 1')
})
it('should throw error when no routes are defined', async () => {
const inputs = {
context: 'Test context',
model: 'gpt-4o',
apiKey: 'test-api-key',
routes: '[]',
}
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
'No routes defined for router'
)
})
it('should handle fallback when JSON parsing fails', async () => {
const inputs = {
context: 'Test context',
model: 'gpt-4o',
apiKey: 'test-api-key',
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description' }]),
}
mockFetch.mockImplementationOnce(() => {
return Promise.resolve({
ok: true,
json: () =>
Promise.resolve({
content: 'route-1',
model: 'gpt-4o',
tokens: { input: 100, output: 5, total: 105 },
}),
})
})
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
expect(result.selectedRoute).toBe('route-1')
expect(result.reasoning).toBe('')
})
})

View File

@@ -238,6 +238,25 @@ export class RouterBlockHandler implements BlockHandler {
apiKey: finalApiKey, apiKey: finalApiKey,
workflowId: ctx.workflowId, workflowId: ctx.workflowId,
workspaceId: ctx.workspaceId, workspaceId: ctx.workspaceId,
responseFormat: {
name: 'router_response',
schema: {
type: 'object',
properties: {
route: {
type: 'string',
description: 'The selected route ID or NO_MATCH',
},
reasoning: {
type: 'string',
description: 'Brief explanation of why this route was chosen',
},
},
required: ['route', 'reasoning'],
additionalProperties: false,
},
strict: true,
},
} }
if (providerId === 'vertex') { if (providerId === 'vertex') {
@@ -277,16 +296,31 @@ export class RouterBlockHandler implements BlockHandler {
const result = await response.json() const result = await response.json()
const chosenRouteId = result.content.trim() let chosenRouteId: string
let reasoning = ''
try {
const parsedResponse = JSON.parse(result.content)
chosenRouteId = parsedResponse.route?.trim() || ''
reasoning = parsedResponse.reasoning || ''
} catch (_parseError) {
logger.error('Router response was not valid JSON despite responseFormat', {
content: result.content,
})
chosenRouteId = result.content.trim()
}
if (chosenRouteId === 'NO_MATCH' || chosenRouteId.toUpperCase() === 'NO_MATCH') { if (chosenRouteId === 'NO_MATCH' || chosenRouteId.toUpperCase() === 'NO_MATCH') {
logger.info('Router determined no route matches the context, routing to error path') logger.info('Router determined no route matches the context, routing to error path')
throw new Error('Router could not determine a matching route for the given context') throw new Error(
reasoning
? `Router could not determine a matching route: ${reasoning}`
: 'Router could not determine a matching route for the given context'
)
} }
const chosenRoute = routes.find((r) => r.id === chosenRouteId) const chosenRoute = routes.find((r) => r.id === chosenRouteId)
// Throw error if LLM returns invalid route ID - this routes through error path
if (!chosenRoute) { if (!chosenRoute) {
const availableRoutes = routes.map((r) => ({ id: r.id, title: r.title })) const availableRoutes = routes.map((r) => ({ id: r.id, title: r.title }))
logger.error( logger.error(
@@ -298,7 +332,6 @@ export class RouterBlockHandler implements BlockHandler {
) )
} }
// Find the target block connected to this route's handle
const connection = ctx.workflow?.connections.find( const connection = ctx.workflow?.connections.find(
(conn) => conn.source === block.id && conn.sourceHandle === `router-${chosenRoute.id}` (conn) => conn.source === block.id && conn.sourceHandle === `router-${chosenRoute.id}`
) )
@@ -334,6 +367,7 @@ export class RouterBlockHandler implements BlockHandler {
total: cost.total, total: cost.total,
}, },
selectedRoute: chosenRoute.id, selectedRoute: chosenRoute.id,
reasoning,
selectedPath: targetBlock selectedPath: targetBlock
? { ? {
blockId: targetBlock.id, blockId: targetBlock.id,
@@ -353,7 +387,7 @@ export class RouterBlockHandler implements BlockHandler {
} }
/** /**
* Parse routes from input (can be JSON string or array). * Parse routes from input (can be JSON string or array)
*/ */
private parseRoutes(input: any): RouteDefinition[] { private parseRoutes(input: any): RouteDefinition[] {
try { try {

View File

@@ -204,26 +204,21 @@ describe('WorkflowBlockHandler', () => {
}) })
}) })
it('should map failed child output correctly', () => { it('should throw error for failed child output so BlockExecutor can check error port', () => {
const childResult = { const childResult = {
success: false, success: false,
error: 'Child workflow failed', error: 'Child workflow failed',
} }
const result = (handler as any).mapChildOutputToParent( expect(() =>
childResult, (handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100)
'child-id', ).toThrow('Error in child workflow "Child Workflow": Child workflow failed')
'Child Workflow',
100
)
expect(result).toEqual({ try {
success: false, ;(handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100)
childWorkflowName: 'Child Workflow', } catch (error: any) {
result: {}, expect(error.childTraceSpans).toEqual([])
error: 'Child workflow failed', }
childTraceSpans: [],
})
}) })
it('should handle nested response structures', () => { it('should handle nested response structures', () => {

View File

@@ -144,6 +144,11 @@ export class WorkflowBlockHandler implements BlockHandler {
const workflowMetadata = workflows[workflowId] const workflowMetadata = workflows[workflowId]
const childWorkflowName = workflowMetadata?.name || workflowId const childWorkflowName = workflowMetadata?.name || workflowId
const originalError = error.message || 'Unknown error'
const wrappedError = new Error(
`Error in child workflow "${childWorkflowName}": ${originalError}`
)
if (error.executionResult?.logs) { if (error.executionResult?.logs) {
const executionResult = error.executionResult as ExecutionResult const executionResult = error.executionResult as ExecutionResult
@@ -159,28 +164,12 @@ export class WorkflowBlockHandler implements BlockHandler {
) )
logger.info(`Captured ${childTraceSpans.length} child trace spans from failed execution`) logger.info(`Captured ${childTraceSpans.length} child trace spans from failed execution`)
;(wrappedError as any).childTraceSpans = childTraceSpans
return { } else if (error.childTraceSpans && Array.isArray(error.childTraceSpans)) {
success: false, ;(wrappedError as any).childTraceSpans = error.childTraceSpans
childWorkflowName,
result: {},
error: error.message || 'Child workflow execution failed',
childTraceSpans: childTraceSpans,
} as Record<string, any>
} }
if (error.childTraceSpans && Array.isArray(error.childTraceSpans)) { throw wrappedError
return {
success: false,
childWorkflowName,
result: {},
error: error.message || 'Child workflow execution failed',
childTraceSpans: error.childTraceSpans,
} as Record<string, any>
}
const originalError = error.message || 'Unknown error'
throw new Error(`Error in child workflow "${childWorkflowName}": ${originalError}`)
} }
} }
@@ -452,17 +441,13 @@ export class WorkflowBlockHandler implements BlockHandler {
if (!success) { if (!success) {
logger.warn(`Child workflow ${childWorkflowName} failed`) logger.warn(`Child workflow ${childWorkflowName} failed`)
// Return failure with child trace spans so they can be displayed const error = new Error(
return { `Error in child workflow "${childWorkflowName}": ${childResult.error || 'Child workflow execution failed'}`
success: false, )
childWorkflowName, ;(error as any).childTraceSpans = childTraceSpans || []
result, throw error
error: childResult.error || 'Child workflow execution failed',
childTraceSpans: childTraceSpans || [],
} as Record<string, any>
} }
// Success case
return { return {
success: true, success: true,
childWorkflowName, childWorkflowName,

View File

@@ -1,24 +1,43 @@
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { normalizeName } from '@/executor/constants' import { normalizeName } from '@/executor/constants'
import type { ExecutionContext } from '@/executor/types' import type { ExecutionContext } from '@/executor/types'
import type { OutputSchema } from '@/executor/utils/block-reference'
export interface BlockDataCollection { export interface BlockDataCollection {
blockData: Record<string, any> blockData: Record<string, unknown>
blockNameMapping: Record<string, string> blockNameMapping: Record<string, string>
blockOutputSchemas: Record<string, OutputSchema>
} }
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection { export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
const blockData: Record<string, any> = {} const blockData: Record<string, unknown> = {}
const blockNameMapping: Record<string, string> = {} const blockNameMapping: Record<string, string> = {}
const blockOutputSchemas: Record<string, OutputSchema> = {}
for (const [id, state] of ctx.blockStates.entries()) { for (const [id, state] of ctx.blockStates.entries()) {
if (state.output !== undefined) { if (state.output !== undefined) {
blockData[id] = state.output blockData[id] = state.output
}
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id) const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
if (workflowBlock?.metadata?.name) { if (!workflowBlock) continue
if (workflowBlock.metadata?.name) {
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
} }
const blockType = workflowBlock.metadata?.id
if (blockType) {
const params = workflowBlock.config?.params as Record<string, unknown> | undefined
const subBlocks = params
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
: undefined
const schema = getBlockOutputs(blockType, subBlocks)
if (schema && Object.keys(schema).length > 0) {
blockOutputSchemas[id] = schema
}
} }
} }
return { blockData, blockNameMapping } return { blockData, blockNameMapping, blockOutputSchemas }
} }

View File

@@ -0,0 +1,255 @@
/**
* @vitest-environment node
*/
import { describe, expect, it } from 'vitest'
import {
type BlockReferenceContext,
InvalidFieldError,
resolveBlockReference,
} from './block-reference'
describe('resolveBlockReference', () => {
const createContext = (
overrides: Partial<BlockReferenceContext> = {}
): BlockReferenceContext => ({
blockNameMapping: { start: 'block-1', agent: 'block-2' },
blockData: {},
blockOutputSchemas: {},
...overrides,
})
describe('block name resolution', () => {
it('should return undefined when block name does not exist', () => {
const ctx = createContext()
const result = resolveBlockReference('unknown', ['field'], ctx)
expect(result).toBeUndefined()
})
it('should normalize block name before lookup', () => {
const ctx = createContext({
blockNameMapping: { myblock: 'block-1' },
blockData: { 'block-1': { value: 'test' } },
})
const result = resolveBlockReference('MyBlock', ['value'], ctx)
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
})
it('should handle block names with spaces', () => {
const ctx = createContext({
blockNameMapping: { myblock: 'block-1' },
blockData: { 'block-1': { value: 'test' } },
})
const result = resolveBlockReference('My Block', ['value'], ctx)
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
})
})
describe('field resolution', () => {
it('should return entire block output when no path specified', () => {
const ctx = createContext({
blockData: { 'block-1': { input: 'hello', other: 'data' } },
})
const result = resolveBlockReference('start', [], ctx)
expect(result).toEqual({
value: { input: 'hello', other: 'data' },
blockId: 'block-1',
})
})
it('should resolve simple field path', () => {
const ctx = createContext({
blockData: { 'block-1': { input: 'hello' } },
})
const result = resolveBlockReference('start', ['input'], ctx)
expect(result).toEqual({ value: 'hello', blockId: 'block-1' })
})
it('should resolve nested field path', () => {
const ctx = createContext({
blockData: { 'block-1': { response: { data: { name: 'test' } } } },
})
const result = resolveBlockReference('start', ['response', 'data', 'name'], ctx)
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
})
it('should resolve array index path', () => {
const ctx = createContext({
blockData: { 'block-1': { items: ['a', 'b', 'c'] } },
})
const result = resolveBlockReference('start', ['items', '1'], ctx)
expect(result).toEqual({ value: 'b', blockId: 'block-1' })
})
it('should return undefined value when field exists but has no value', () => {
const ctx = createContext({
blockData: { 'block-1': { input: undefined } },
blockOutputSchemas: {
'block-1': { input: { type: 'string' } },
},
})
const result = resolveBlockReference('start', ['input'], ctx)
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
})
it('should return null value when field has null', () => {
const ctx = createContext({
blockData: { 'block-1': { input: null } },
})
const result = resolveBlockReference('start', ['input'], ctx)
expect(result).toEqual({ value: null, blockId: 'block-1' })
})
})
describe('schema validation', () => {
it('should throw InvalidFieldError when field not in schema', () => {
const ctx = createContext({
blockData: { 'block-1': { existing: 'value' } },
blockOutputSchemas: {
'block-1': {
input: { type: 'string' },
conversationId: { type: 'string' },
},
},
})
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(InvalidFieldError)
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(
/"invalid" doesn't exist on block "start"/
)
})
it('should include available fields in error message', () => {
const ctx = createContext({
blockData: { 'block-1': {} },
blockOutputSchemas: {
'block-1': {
input: { type: 'string' },
conversationId: { type: 'string' },
files: { type: 'files' },
},
},
})
try {
resolveBlockReference('start', ['typo'], ctx)
expect.fail('Should have thrown')
} catch (error) {
expect(error).toBeInstanceOf(InvalidFieldError)
const fieldError = error as InvalidFieldError
expect(fieldError.availableFields).toContain('input')
expect(fieldError.availableFields).toContain('conversationId')
expect(fieldError.availableFields).toContain('files')
}
})
it('should allow valid field even when value is undefined', () => {
const ctx = createContext({
blockData: { 'block-1': {} },
blockOutputSchemas: {
'block-1': { input: { type: 'string' } },
},
})
const result = resolveBlockReference('start', ['input'], ctx)
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
})
it('should validate path when block has no output yet', () => {
const ctx = createContext({
blockData: {},
blockOutputSchemas: {
'block-1': { input: { type: 'string' } },
},
})
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(InvalidFieldError)
})
it('should return undefined for valid field when block has no output', () => {
const ctx = createContext({
blockData: {},
blockOutputSchemas: {
'block-1': { input: { type: 'string' } },
},
})
const result = resolveBlockReference('start', ['input'], ctx)
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
})
})
describe('without schema (pass-through mode)', () => {
it('should return undefined value without throwing when no schema', () => {
const ctx = createContext({
blockData: { 'block-1': { existing: 'value' } },
})
const result = resolveBlockReference('start', ['missing'], ctx)
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
})
})
describe('file type handling', () => {
it('should allow file property access', () => {
const ctx = createContext({
blockData: {
'block-1': {
files: [{ name: 'test.txt', url: 'http://example.com/file' }],
},
},
blockOutputSchemas: {
'block-1': { files: { type: 'files' } },
},
})
const result = resolveBlockReference('start', ['files', '0', 'name'], ctx)
expect(result).toEqual({ value: 'test.txt', blockId: 'block-1' })
})
it('should validate file property names', () => {
const ctx = createContext({
blockData: { 'block-1': { files: [] } },
blockOutputSchemas: {
'block-1': { files: { type: 'files' } },
},
})
expect(() => resolveBlockReference('start', ['files', '0', 'invalid'], ctx)).toThrow(
InvalidFieldError
)
})
})
})
describe('InvalidFieldError', () => {
it('should have correct properties', () => {
const error = new InvalidFieldError('myBlock', 'invalid.path', ['field1', 'field2'])
expect(error.blockName).toBe('myBlock')
expect(error.fieldPath).toBe('invalid.path')
expect(error.availableFields).toEqual(['field1', 'field2'])
expect(error.name).toBe('InvalidFieldError')
})
it('should format message correctly', () => {
const error = new InvalidFieldError('start', 'typo', ['input', 'files'])
expect(error.message).toBe(
'"typo" doesn\'t exist on block "start". Available fields: input, files'
)
})
it('should handle empty available fields', () => {
const error = new InvalidFieldError('start', 'field', [])
expect(error.message).toBe('"field" doesn\'t exist on block "start". Available fields: none')
})
})

View File

@@ -0,0 +1,210 @@
import { USER_FILE_ACCESSIBLE_PROPERTIES } from '@/lib/workflows/types'
import { normalizeName } from '@/executor/constants'
import { navigatePath } from '@/executor/variables/resolvers/reference'
export type OutputSchema = Record<string, { type?: string; description?: string } | unknown>
export interface BlockReferenceContext {
blockNameMapping: Record<string, string>
blockData: Record<string, unknown>
blockOutputSchemas?: Record<string, OutputSchema>
}
export interface BlockReferenceResult {
value: unknown
blockId: string
}
export class InvalidFieldError extends Error {
constructor(
public readonly blockName: string,
public readonly fieldPath: string,
public readonly availableFields: string[]
) {
super(
`"${fieldPath}" doesn't exist on block "${blockName}". ` +
`Available fields: ${availableFields.length > 0 ? availableFields.join(', ') : 'none'}`
)
this.name = 'InvalidFieldError'
}
}
function isFileType(value: unknown): boolean {
if (typeof value !== 'object' || value === null) return false
const typed = value as { type?: string }
return typed.type === 'file[]' || typed.type === 'files'
}
function isArrayType(value: unknown): value is { type: 'array'; items?: unknown } {
if (typeof value !== 'object' || value === null) return false
return (value as { type?: string }).type === 'array'
}
function getArrayItems(schema: unknown): unknown {
if (typeof schema !== 'object' || schema === null) return undefined
return (schema as { items?: unknown }).items
}
function getProperties(schema: unknown): Record<string, unknown> | undefined {
if (typeof schema !== 'object' || schema === null) return undefined
const props = (schema as { properties?: unknown }).properties
return typeof props === 'object' && props !== null
? (props as Record<string, unknown>)
: undefined
}
function lookupField(schema: unknown, fieldName: string): unknown | undefined {
if (typeof schema !== 'object' || schema === null) return undefined
const typed = schema as Record<string, unknown>
if (fieldName in typed) {
return typed[fieldName]
}
const props = getProperties(schema)
if (props && fieldName in props) {
return props[fieldName]
}
return undefined
}
function isPathInSchema(schema: OutputSchema | undefined, pathParts: string[]): boolean {
if (!schema || pathParts.length === 0) {
return true
}
let current: unknown = schema
for (let i = 0; i < pathParts.length; i++) {
const part = pathParts[i]
if (current === null || current === undefined) {
return false
}
if (/^\d+$/.test(part)) {
if (isFileType(current)) {
const nextPart = pathParts[i + 1]
return (
!nextPart ||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
)
)
}
if (isArrayType(current)) {
current = getArrayItems(current)
}
continue
}
const arrayMatch = part.match(/^([^[]+)\[(\d+)\]$/)
if (arrayMatch) {
const [, prop] = arrayMatch
const fieldDef = lookupField(current, prop)
if (!fieldDef) return false
if (isFileType(fieldDef)) {
const nextPart = pathParts[i + 1]
return (
!nextPart ||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
)
)
}
current = isArrayType(fieldDef) ? getArrayItems(fieldDef) : fieldDef
continue
}
if (
isFileType(current) &&
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
part as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
)
) {
return true
}
const fieldDef = lookupField(current, part)
if (fieldDef !== undefined) {
if (isFileType(fieldDef)) {
const nextPart = pathParts[i + 1]
if (!nextPart) return true
if (/^\d+$/.test(nextPart)) {
const afterIndex = pathParts[i + 2]
return (
!afterIndex ||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
afterIndex as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
)
)
}
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
)
}
current = fieldDef
continue
}
if (isArrayType(current)) {
const items = getArrayItems(current)
const itemField = lookupField(items, part)
if (itemField !== undefined) {
current = itemField
continue
}
}
return false
}
return true
}
function getSchemaFieldNames(schema: OutputSchema | undefined): string[] {
if (!schema) return []
return Object.keys(schema)
}
export function resolveBlockReference(
blockName: string,
pathParts: string[],
context: BlockReferenceContext
): BlockReferenceResult | undefined {
const normalizedName = normalizeName(blockName)
const blockId = context.blockNameMapping[normalizedName]
if (!blockId) {
return undefined
}
const blockOutput = context.blockData[blockId]
const schema = context.blockOutputSchemas?.[blockId]
if (blockOutput === undefined) {
if (schema && pathParts.length > 0) {
if (!isPathInSchema(schema, pathParts)) {
throw new InvalidFieldError(blockName, pathParts.join('.'), getSchemaFieldNames(schema))
}
}
return { value: undefined, blockId }
}
if (pathParts.length === 0) {
return { value: blockOutput, blockId }
}
const value = navigatePath(blockOutput, pathParts)
if (value === undefined && schema) {
if (!isPathInSchema(schema, pathParts)) {
throw new InvalidFieldError(blockName, pathParts.join('.'), getSchemaFieldNames(schema))
}
}
return { value, blockId }
}

View File

@@ -1,11 +1,15 @@
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs' import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { USER_FILE_ACCESSIBLE_PROPERTIES } from '@/lib/workflows/types'
import { import {
isReference, isReference,
normalizeName, normalizeName,
parseReferencePath, parseReferencePath,
SPECIAL_REFERENCE_PREFIXES, SPECIAL_REFERENCE_PREFIXES,
} from '@/executor/constants' } from '@/executor/constants'
import {
InvalidFieldError,
type OutputSchema,
resolveBlockReference,
} from '@/executor/utils/block-reference'
import { import {
navigatePath, navigatePath,
type ResolutionContext, type ResolutionContext,
@@ -14,123 +18,6 @@ import {
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types' import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
import { getTool } from '@/tools/utils' import { getTool } from '@/tools/utils'
function isPathInOutputSchema(
outputs: Record<string, any> | undefined,
pathParts: string[]
): boolean {
if (!outputs || pathParts.length === 0) {
return true
}
const isFileArrayType = (value: any): boolean =>
value?.type === 'file[]' || value?.type === 'files'
let current: any = outputs
for (let i = 0; i < pathParts.length; i++) {
const part = pathParts[i]
const arrayMatch = part.match(/^([^[]+)\[(\d+)\]$/)
if (arrayMatch) {
const [, prop] = arrayMatch
let fieldDef: any
if (prop in current) {
fieldDef = current[prop]
} else if (current.properties && prop in current.properties) {
fieldDef = current.properties[prop]
} else if (current.type === 'array' && current.items) {
if (current.items.properties && prop in current.items.properties) {
fieldDef = current.items.properties[prop]
} else if (prop in current.items) {
fieldDef = current.items[prop]
}
}
if (!fieldDef) {
return false
}
if (isFileArrayType(fieldDef)) {
if (i + 1 < pathParts.length) {
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(pathParts[i + 1] as any)
}
return true
}
if (fieldDef.type === 'array' && fieldDef.items) {
current = fieldDef.items
continue
}
current = fieldDef
continue
}
if (/^\d+$/.test(part)) {
if (isFileArrayType(current)) {
if (i + 1 < pathParts.length) {
const nextPart = pathParts[i + 1]
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(nextPart as any)
}
return true
}
continue
}
if (current === null || current === undefined) {
return false
}
if (part in current) {
const nextCurrent = current[part]
if (nextCurrent?.type === 'file[]' && i + 1 < pathParts.length) {
const nextPart = pathParts[i + 1]
if (/^\d+$/.test(nextPart) && i + 2 < pathParts.length) {
const propertyPart = pathParts[i + 2]
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(propertyPart as any)
}
}
current = nextCurrent
continue
}
if (current.properties && part in current.properties) {
current = current.properties[part]
continue
}
if (current.type === 'array' && current.items) {
if (current.items.properties && part in current.items.properties) {
current = current.items.properties[part]
continue
}
if (part in current.items) {
current = current.items[part]
continue
}
}
if (isFileArrayType(current) && USER_FILE_ACCESSIBLE_PROPERTIES.includes(part as any)) {
return true
}
if ('type' in current && typeof current.type === 'string') {
if (!current.properties && !current.items) {
return false
}
}
return false
}
return true
}
function getSchemaFieldNames(outputs: Record<string, any> | undefined): string[] {
if (!outputs) return []
return Object.keys(outputs)
}
export class BlockResolver implements Resolver { export class BlockResolver implements Resolver {
private nameToBlockId: Map<string, string> private nameToBlockId: Map<string, string>
private blockById: Map<string, SerializedBlock> private blockById: Map<string, SerializedBlock>
@@ -170,83 +57,94 @@ export class BlockResolver implements Resolver {
return undefined return undefined
} }
const block = this.blockById.get(blockId) const block = this.blockById.get(blockId)!
const output = this.getBlockOutput(blockId, context) const output = this.getBlockOutput(blockId, context)
if (output === undefined) { const blockData: Record<string, unknown> = {}
const blockOutputSchemas: Record<string, OutputSchema> = {}
if (output !== undefined) {
blockData[blockId] = output
}
const blockType = block.metadata?.id
const params = block.config?.params as Record<string, unknown> | undefined
const subBlocks = params
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
: undefined
const toolId = block.config?.tool
const toolConfig = toolId ? getTool(toolId) : undefined
const outputSchema =
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block.outputs)
if (outputSchema && Object.keys(outputSchema).length > 0) {
blockOutputSchemas[blockId] = outputSchema
}
try {
const result = resolveBlockReference(blockName, pathParts, {
blockNameMapping: Object.fromEntries(this.nameToBlockId),
blockData,
blockOutputSchemas,
})!
if (result.value !== undefined) {
return result.value
}
return this.handleBackwardsCompat(block, output, pathParts)
} catch (error) {
if (error instanceof InvalidFieldError) {
const fallback = this.handleBackwardsCompat(block, output, pathParts)
if (fallback !== undefined) {
return fallback
}
throw new Error(error.message)
}
throw error
}
}
private handleBackwardsCompat(
block: SerializedBlock,
output: unknown,
pathParts: string[]
): unknown {
if (output === undefined || pathParts.length === 0) {
return undefined return undefined
} }
if (pathParts.length === 0) {
return output
}
// Try the original path first
let result = navigatePath(output, pathParts)
// If successful, return it immediately
if (result !== undefined) {
return result
}
// Response block backwards compatibility:
// Old: <responseBlock.response.data> -> New: <responseBlock.data>
// Only apply fallback if:
// 1. Block type is 'response'
// 2. Path starts with 'response.'
// 3. Output doesn't have a 'response' key (confirming it's the new format)
if ( if (
block?.metadata?.id === 'response' && block.metadata?.id === 'response' &&
pathParts[0] === 'response' && pathParts[0] === 'response' &&
output?.response === undefined (output as Record<string, unknown>)?.response === undefined
) { ) {
const adjustedPathParts = pathParts.slice(1) const adjustedPathParts = pathParts.slice(1)
if (adjustedPathParts.length === 0) { if (adjustedPathParts.length === 0) {
return output return output
} }
result = navigatePath(output, adjustedPathParts) const fallbackResult = navigatePath(output, adjustedPathParts)
if (result !== undefined) { if (fallbackResult !== undefined) {
return result return fallbackResult
} }
} }
// Workflow block backwards compatibility:
// Old: <workflowBlock.result.response.data> -> New: <workflowBlock.result.data>
// Only apply fallback if:
// 1. Block type is 'workflow' or 'workflow_input'
// 2. Path starts with 'result.response.'
// 3. output.result.response doesn't exist (confirming child used new format)
const isWorkflowBlock = const isWorkflowBlock =
block?.metadata?.id === 'workflow' || block?.metadata?.id === 'workflow_input' block.metadata?.id === 'workflow' || block.metadata?.id === 'workflow_input'
const outputRecord = output as Record<string, Record<string, unknown> | undefined>
if ( if (
isWorkflowBlock && isWorkflowBlock &&
pathParts[0] === 'result' && pathParts[0] === 'result' &&
pathParts[1] === 'response' && pathParts[1] === 'response' &&
output?.result?.response === undefined outputRecord?.result?.response === undefined
) { ) {
const adjustedPathParts = ['result', ...pathParts.slice(2)] const adjustedPathParts = ['result', ...pathParts.slice(2)]
result = navigatePath(output, adjustedPathParts) const fallbackResult = navigatePath(output, adjustedPathParts)
if (result !== undefined) { if (fallbackResult !== undefined) {
return result return fallbackResult
} }
} }
const blockType = block?.metadata?.id
const params = block?.config?.params as Record<string, unknown> | undefined
const subBlocks = params
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
: undefined
const toolId = block?.config?.tool
const toolConfig = toolId ? getTool(toolId) : undefined
const outputSchema =
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block?.outputs)
const schemaFields = getSchemaFieldNames(outputSchema)
if (schemaFields.length > 0 && !isPathInOutputSchema(outputSchema, pathParts)) {
throw new Error(
`"${pathParts.join('.')}" doesn't exist on block "${blockName}". ` +
`Available fields: ${schemaFields.join(', ')}`
)
}
return undefined return undefined
} }

View File

@@ -1,6 +1,7 @@
import { loggerMock } from '@sim/testing' import { loggerMock } from '@sim/testing'
import { describe, expect, it, vi } from 'vitest' import { describe, expect, it, vi } from 'vitest'
import type { LoopScope } from '@/executor/execution/state' import type { LoopScope } from '@/executor/execution/state'
import { InvalidFieldError } from '@/executor/utils/block-reference'
import { LoopResolver } from './loop' import { LoopResolver } from './loop'
import type { ResolutionContext } from './reference' import type { ResolutionContext } from './reference'
@@ -62,7 +63,12 @@ function createTestContext(
describe('LoopResolver', () => { describe('LoopResolver', () => {
describe('canResolve', () => { describe('canResolve', () => {
it.concurrent('should return true for loop references', () => { it.concurrent('should return true for bare loop reference', () => {
const resolver = new LoopResolver(createTestWorkflow())
expect(resolver.canResolve('<loop>')).toBe(true)
})
it.concurrent('should return true for known loop properties', () => {
const resolver = new LoopResolver(createTestWorkflow()) const resolver = new LoopResolver(createTestWorkflow())
expect(resolver.canResolve('<loop.index>')).toBe(true) expect(resolver.canResolve('<loop.index>')).toBe(true)
expect(resolver.canResolve('<loop.iteration>')).toBe(true) expect(resolver.canResolve('<loop.iteration>')).toBe(true)
@@ -78,6 +84,13 @@ describe('LoopResolver', () => {
expect(resolver.canResolve('<loop.items.0>')).toBe(true) expect(resolver.canResolve('<loop.items.0>')).toBe(true)
}) })
it.concurrent('should return true for unknown loop properties (validates in resolve)', () => {
const resolver = new LoopResolver(createTestWorkflow())
expect(resolver.canResolve('<loop.results>')).toBe(true)
expect(resolver.canResolve('<loop.output>')).toBe(true)
expect(resolver.canResolve('<loop.unknownProperty>')).toBe(true)
})
it.concurrent('should return false for non-loop references', () => { it.concurrent('should return false for non-loop references', () => {
const resolver = new LoopResolver(createTestWorkflow()) const resolver = new LoopResolver(createTestWorkflow())
expect(resolver.canResolve('<block.output>')).toBe(false) expect(resolver.canResolve('<block.output>')).toBe(false)
@@ -181,20 +194,34 @@ describe('LoopResolver', () => {
}) })
describe('edge cases', () => { describe('edge cases', () => {
it.concurrent('should return undefined for invalid loop reference (missing property)', () => { it.concurrent('should return context object for bare loop reference', () => {
const resolver = new LoopResolver(createTestWorkflow()) const resolver = new LoopResolver(createTestWorkflow())
const loopScope = createLoopScope({ iteration: 0 }) const loopScope = createLoopScope({ iteration: 2, item: 'test', items: ['a', 'b', 'c'] })
const ctx = createTestContext('block-1', loopScope) const ctx = createTestContext('block-1', loopScope)
expect(resolver.resolve('<loop>', ctx)).toBeUndefined() expect(resolver.resolve('<loop>', ctx)).toEqual({
index: 2,
currentItem: 'test',
items: ['a', 'b', 'c'],
})
}) })
it.concurrent('should return undefined for unknown loop property', () => { it.concurrent('should return minimal context object for for-loop (no items)', () => {
const resolver = new LoopResolver(createTestWorkflow())
const loopScope = createLoopScope({ iteration: 5 })
const ctx = createTestContext('block-1', loopScope)
expect(resolver.resolve('<loop>', ctx)).toEqual({
index: 5,
})
})
it.concurrent('should throw InvalidFieldError for unknown loop property', () => {
const resolver = new LoopResolver(createTestWorkflow()) const resolver = new LoopResolver(createTestWorkflow())
const loopScope = createLoopScope({ iteration: 0 }) const loopScope = createLoopScope({ iteration: 0 })
const ctx = createTestContext('block-1', loopScope) const ctx = createTestContext('block-1', loopScope)
expect(resolver.resolve('<loop.unknownProperty>', ctx)).toBeUndefined() expect(() => resolver.resolve('<loop.unknownProperty>', ctx)).toThrow(InvalidFieldError)
}) })
it.concurrent('should handle iteration index 0 correctly', () => { it.concurrent('should handle iteration index 0 correctly', () => {

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { isReference, parseReferencePath, REFERENCE } from '@/executor/constants' import { isReference, parseReferencePath, REFERENCE } from '@/executor/constants'
import { InvalidFieldError } from '@/executor/utils/block-reference'
import { extractBaseBlockId } from '@/executor/utils/subflow-utils' import { extractBaseBlockId } from '@/executor/utils/subflow-utils'
import { import {
navigatePath, navigatePath,
@@ -13,6 +14,8 @@ const logger = createLogger('LoopResolver')
export class LoopResolver implements Resolver { export class LoopResolver implements Resolver {
constructor(private workflow: SerializedWorkflow) {} constructor(private workflow: SerializedWorkflow) {}
private static KNOWN_PROPERTIES = ['iteration', 'index', 'item', 'currentItem', 'items']
canResolve(reference: string): boolean { canResolve(reference: string): boolean {
if (!isReference(reference)) { if (!isReference(reference)) {
return false return false
@@ -27,16 +30,15 @@ export class LoopResolver implements Resolver {
resolve(reference: string, context: ResolutionContext): any { resolve(reference: string, context: ResolutionContext): any {
const parts = parseReferencePath(reference) const parts = parseReferencePath(reference)
if (parts.length < 2) { if (parts.length === 0) {
logger.warn('Invalid loop reference - missing property', { reference }) logger.warn('Invalid loop reference', { reference })
return undefined return undefined
} }
const [_, property, ...pathParts] = parts const loopId = this.findLoopForBlock(context.currentNodeId)
let loopScope = context.loopScope let loopScope = context.loopScope
if (!loopScope) { if (!loopScope) {
const loopId = this.findLoopForBlock(context.currentNodeId)
if (!loopId) { if (!loopId) {
return undefined return undefined
} }
@@ -48,6 +50,27 @@ export class LoopResolver implements Resolver {
return undefined return undefined
} }
const isForEach = loopId ? this.isForEachLoop(loopId) : loopScope.items !== undefined
if (parts.length === 1) {
const result: Record<string, any> = {
index: loopScope.iteration,
}
if (loopScope.item !== undefined) {
result.currentItem = loopScope.item
}
if (loopScope.items !== undefined) {
result.items = loopScope.items
}
return result
}
const [_, property, ...pathParts] = parts
if (!LoopResolver.KNOWN_PROPERTIES.includes(property)) {
const availableFields = isForEach ? ['index', 'currentItem', 'items'] : ['index']
throw new InvalidFieldError('loop', property, availableFields)
}
let value: any let value: any
switch (property) { switch (property) {
case 'iteration': case 'iteration':
@@ -61,12 +84,8 @@ export class LoopResolver implements Resolver {
case 'items': case 'items':
value = loopScope.items value = loopScope.items
break break
default:
logger.warn('Unknown loop property', { property })
return undefined
} }
// If there are additional path parts, navigate deeper
if (pathParts.length > 0) { if (pathParts.length > 0) {
return navigatePath(value, pathParts) return navigatePath(value, pathParts)
} }
@@ -85,4 +104,9 @@ export class LoopResolver implements Resolver {
return undefined return undefined
} }
private isForEachLoop(loopId: string): boolean {
const loopConfig = this.workflow.loops?.[loopId]
return loopConfig?.loopType === 'forEach'
}
} }

View File

@@ -1,5 +1,6 @@
import { loggerMock } from '@sim/testing' import { loggerMock } from '@sim/testing'
import { describe, expect, it, vi } from 'vitest' import { describe, expect, it, vi } from 'vitest'
import { InvalidFieldError } from '@/executor/utils/block-reference'
import { ParallelResolver } from './parallel' import { ParallelResolver } from './parallel'
import type { ResolutionContext } from './reference' import type { ResolutionContext } from './reference'
@@ -81,7 +82,12 @@ function createTestContext(
describe('ParallelResolver', () => { describe('ParallelResolver', () => {
describe('canResolve', () => { describe('canResolve', () => {
it.concurrent('should return true for parallel references', () => { it.concurrent('should return true for bare parallel reference', () => {
const resolver = new ParallelResolver(createTestWorkflow())
expect(resolver.canResolve('<parallel>')).toBe(true)
})
it.concurrent('should return true for known parallel properties', () => {
const resolver = new ParallelResolver(createTestWorkflow()) const resolver = new ParallelResolver(createTestWorkflow())
expect(resolver.canResolve('<parallel.index>')).toBe(true) expect(resolver.canResolve('<parallel.index>')).toBe(true)
expect(resolver.canResolve('<parallel.currentItem>')).toBe(true) expect(resolver.canResolve('<parallel.currentItem>')).toBe(true)
@@ -94,6 +100,16 @@ describe('ParallelResolver', () => {
expect(resolver.canResolve('<parallel.items.0>')).toBe(true) expect(resolver.canResolve('<parallel.items.0>')).toBe(true)
}) })
it.concurrent(
'should return true for unknown parallel properties (validates in resolve)',
() => {
const resolver = new ParallelResolver(createTestWorkflow())
expect(resolver.canResolve('<parallel.results>')).toBe(true)
expect(resolver.canResolve('<parallel.output>')).toBe(true)
expect(resolver.canResolve('<parallel.unknownProperty>')).toBe(true)
}
)
it.concurrent('should return false for non-parallel references', () => { it.concurrent('should return false for non-parallel references', () => {
const resolver = new ParallelResolver(createTestWorkflow()) const resolver = new ParallelResolver(createTestWorkflow())
expect(resolver.canResolve('<block.output>')).toBe(false) expect(resolver.canResolve('<block.output>')).toBe(false)
@@ -254,24 +270,40 @@ describe('ParallelResolver', () => {
}) })
describe('edge cases', () => { describe('edge cases', () => {
it.concurrent( it.concurrent('should return context object for bare parallel reference', () => {
'should return undefined for invalid parallel reference (missing property)', const workflow = createTestWorkflow({
() => { 'parallel-1': { nodes: ['block-1'], distribution: ['a', 'b', 'c'] },
const resolver = new ParallelResolver(createTestWorkflow()) })
const resolver = new ParallelResolver(workflow)
const ctx = createTestContext('block-1₍1₎')
expect(resolver.resolve('<parallel>', ctx)).toEqual({
index: 1,
currentItem: 'b',
items: ['a', 'b', 'c'],
})
})
it.concurrent('should return minimal context object when no distribution', () => {
const workflow = createTestWorkflow({
'parallel-1': { nodes: ['block-1'] },
})
const resolver = new ParallelResolver(workflow)
const ctx = createTestContext('block-1₍0₎') const ctx = createTestContext('block-1₍0₎')
expect(resolver.resolve('<parallel>', ctx)).toBeUndefined() const result = resolver.resolve('<parallel>', ctx)
} expect(result).toHaveProperty('index', 0)
) expect(result).toHaveProperty('items')
})
it.concurrent('should return undefined for unknown parallel property', () => { it.concurrent('should throw InvalidFieldError for unknown parallel property', () => {
const workflow = createTestWorkflow({ const workflow = createTestWorkflow({
'parallel-1': { nodes: ['block-1'], distribution: ['a'] }, 'parallel-1': { nodes: ['block-1'], distribution: ['a'] },
}) })
const resolver = new ParallelResolver(workflow) const resolver = new ParallelResolver(workflow)
const ctx = createTestContext('block-1₍0₎') const ctx = createTestContext('block-1₍0₎')
expect(resolver.resolve('<parallel.unknownProperty>', ctx)).toBeUndefined() expect(() => resolver.resolve('<parallel.unknownProperty>', ctx)).toThrow(InvalidFieldError)
}) })
it.concurrent('should return undefined when block is not in any parallel', () => { it.concurrent('should return undefined when block is not in any parallel', () => {

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { isReference, parseReferencePath, REFERENCE } from '@/executor/constants' import { isReference, parseReferencePath, REFERENCE } from '@/executor/constants'
import { InvalidFieldError } from '@/executor/utils/block-reference'
import { extractBaseBlockId, extractBranchIndex } from '@/executor/utils/subflow-utils' import { extractBaseBlockId, extractBranchIndex } from '@/executor/utils/subflow-utils'
import { import {
navigatePath, navigatePath,
@@ -13,6 +14,8 @@ const logger = createLogger('ParallelResolver')
export class ParallelResolver implements Resolver { export class ParallelResolver implements Resolver {
constructor(private workflow: SerializedWorkflow) {} constructor(private workflow: SerializedWorkflow) {}
private static KNOWN_PROPERTIES = ['index', 'currentItem', 'items']
canResolve(reference: string): boolean { canResolve(reference: string): boolean {
if (!isReference(reference)) { if (!isReference(reference)) {
return false return false
@@ -27,12 +30,11 @@ export class ParallelResolver implements Resolver {
resolve(reference: string, context: ResolutionContext): any { resolve(reference: string, context: ResolutionContext): any {
const parts = parseReferencePath(reference) const parts = parseReferencePath(reference)
if (parts.length < 2) { if (parts.length === 0) {
logger.warn('Invalid parallel reference - missing property', { reference }) logger.warn('Invalid parallel reference', { reference })
return undefined return undefined
} }
const [_, property, ...pathParts] = parts
const parallelId = this.findParallelForBlock(context.currentNodeId) const parallelId = this.findParallelForBlock(context.currentNodeId)
if (!parallelId) { if (!parallelId) {
return undefined return undefined
@@ -49,11 +51,33 @@ export class ParallelResolver implements Resolver {
return undefined return undefined
} }
// First try to get items from the parallel scope (resolved at runtime)
// This is the same pattern as LoopResolver reading from loopScope.items
const parallelScope = context.executionContext.parallelExecutions?.get(parallelId) const parallelScope = context.executionContext.parallelExecutions?.get(parallelId)
const distributionItems = parallelScope?.items ?? this.getDistributionItems(parallelConfig) const distributionItems = parallelScope?.items ?? this.getDistributionItems(parallelConfig)
if (parts.length === 1) {
const result: Record<string, any> = {
index: branchIndex,
}
if (distributionItems !== undefined) {
result.items = distributionItems
if (Array.isArray(distributionItems)) {
result.currentItem = distributionItems[branchIndex]
} else if (typeof distributionItems === 'object' && distributionItems !== null) {
const keys = Object.keys(distributionItems)
const key = keys[branchIndex]
result.currentItem = key !== undefined ? distributionItems[key] : undefined
}
}
return result
}
const [_, property, ...pathParts] = parts
if (!ParallelResolver.KNOWN_PROPERTIES.includes(property)) {
const isCollection = parallelConfig.parallelType === 'collection'
const availableFields = isCollection ? ['index', 'currentItem', 'items'] : ['index']
throw new InvalidFieldError('parallel', property, availableFields)
}
let value: any let value: any
switch (property) { switch (property) {
case 'index': case 'index':
@@ -73,12 +97,8 @@ export class ParallelResolver implements Resolver {
case 'items': case 'items':
value = distributionItems value = distributionItems
break break
default:
logger.warn('Unknown parallel property', { property })
return undefined
} }
// If there are additional path parts, navigate deeper
if (pathParts.length > 0) { if (pathParts.length > 0) {
return navigatePath(value, pathParts) return navigatePath(value, pathParts)
} }

View File

@@ -27,23 +27,28 @@ export function navigatePath(obj: any, path: string[]): any {
return undefined return undefined
} }
// Handle array indexing like "items[0]" or just numeric indices const arrayMatch = part.match(/^([^[]+)(\[.+)$/)
const arrayMatch = part.match(/^([^[]+)\[(\d+)\](.*)$/)
if (arrayMatch) { if (arrayMatch) {
// Handle complex array access like "items[0]" const [, prop, bracketsPart] = arrayMatch
const [, prop, index] = arrayMatch
current = current[prop] current = current[prop]
if (current === undefined || current === null) { if (current === undefined || current === null) {
return undefined return undefined
} }
const idx = Number.parseInt(index, 10)
const indices = bracketsPart.match(/\[(\d+)\]/g)
if (indices) {
for (const indexMatch of indices) {
if (current === null || current === undefined) {
return undefined
}
const idx = Number.parseInt(indexMatch.slice(1, -1), 10)
current = Array.isArray(current) ? current[idx] : undefined current = Array.isArray(current) ? current[idx] : undefined
}
}
} else if (/^\d+$/.test(part)) { } else if (/^\d+$/.test(part)) {
// Handle plain numeric index
const index = Number.parseInt(part, 10) const index = Number.parseInt(part, 10)
current = Array.isArray(current) ? current[index] : undefined current = Array.isArray(current) ? current[index] : undefined
} else { } else {
// Handle regular property access
current = current[part] current = current[part]
} }
} }

View File

@@ -1,10 +1,9 @@
'use client' 'use client'
import { useCallback, useEffect, useState } from 'react' import { useCallback } from 'react'
import { createLogger } from '@sim/logger' import { useQueryClient } from '@tanstack/react-query'
import type { AllTagSlot } from '@/lib/knowledge/constants' import type { AllTagSlot } from '@/lib/knowledge/constants'
import { knowledgeKeys, useTagDefinitionsQuery } from '@/hooks/queries/knowledge'
const logger = createLogger('useKnowledgeBaseTagDefinitions')
export interface TagDefinition { export interface TagDefinition {
id: string id: string
@@ -17,54 +16,23 @@ export interface TagDefinition {
/** /**
* Hook for fetching KB-scoped tag definitions (for filtering/selection) * Hook for fetching KB-scoped tag definitions (for filtering/selection)
* @param knowledgeBaseId - The knowledge base ID * Uses React Query as single source of truth
*/ */
export function useKnowledgeBaseTagDefinitions(knowledgeBaseId: string | null) { export function useKnowledgeBaseTagDefinitions(knowledgeBaseId: string | null) {
const [tagDefinitions, setTagDefinitions] = useState<TagDefinition[]>([]) const queryClient = useQueryClient()
const [isLoading, setIsLoading] = useState(false) const query = useTagDefinitionsQuery(knowledgeBaseId)
const [error, setError] = useState<string | null>(null)
const fetchTagDefinitions = useCallback(async () => { const fetchTagDefinitions = useCallback(async () => {
if (!knowledgeBaseId) { if (!knowledgeBaseId) return
setTagDefinitions([]) await queryClient.invalidateQueries({
return queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId),
} })
}, [queryClient, knowledgeBaseId])
setIsLoading(true)
setError(null)
try {
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`)
if (!response.ok) {
throw new Error(`Failed to fetch tag definitions: ${response.statusText}`)
}
const data = await response.json()
if (data.success && Array.isArray(data.data)) {
setTagDefinitions(data.data)
} else {
throw new Error('Invalid response format')
}
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'
logger.error('Error fetching tag definitions:', err)
setError(errorMessage)
setTagDefinitions([])
} finally {
setIsLoading(false)
}
}, [knowledgeBaseId])
useEffect(() => {
fetchTagDefinitions()
}, [fetchTagDefinitions])
return { return {
tagDefinitions, tagDefinitions: (query.data ?? []) as TagDefinition[],
isLoading, isLoading: query.isLoading,
error, error: query.error instanceof Error ? query.error.message : null,
fetchTagDefinitions, fetchTagDefinitions,
} }
} }

View File

@@ -1,4 +1,4 @@
import { useCallback } from 'react' import { useCallback, useMemo } from 'react'
import { useQueryClient } from '@tanstack/react-query' import { useQueryClient } from '@tanstack/react-query'
import type { ChunkData, DocumentData, KnowledgeBaseData } from '@/lib/knowledge/types' import type { ChunkData, DocumentData, KnowledgeBaseData } from '@/lib/knowledge/types'
import { import {
@@ -67,12 +67,17 @@ export function useKnowledgeBaseDocuments(
sortBy?: string sortBy?: string
sortOrder?: string sortOrder?: string
enabled?: boolean enabled?: boolean
refetchInterval?: number | false refetchInterval?:
| number
| false
| ((data: KnowledgeDocumentsResponse | undefined) => number | false)
enabledFilter?: 'all' | 'enabled' | 'disabled'
} }
) { ) {
const queryClient = useQueryClient() const queryClient = useQueryClient()
const requestLimit = options?.limit ?? DEFAULT_PAGE_SIZE const requestLimit = options?.limit ?? DEFAULT_PAGE_SIZE
const requestOffset = options?.offset ?? 0 const requestOffset = options?.offset ?? 0
const enabledFilter = options?.enabledFilter ?? 'all'
const paramsKey = serializeDocumentParams({ const paramsKey = serializeDocumentParams({
knowledgeBaseId, knowledgeBaseId,
limit: requestLimit, limit: requestLimit,
@@ -80,8 +85,19 @@ export function useKnowledgeBaseDocuments(
search: options?.search, search: options?.search,
sortBy: options?.sortBy, sortBy: options?.sortBy,
sortOrder: options?.sortOrder, sortOrder: options?.sortOrder,
enabledFilter,
}) })
const refetchIntervalFn = useMemo(() => {
if (typeof options?.refetchInterval === 'function') {
const userFn = options.refetchInterval
return (query: { state: { data?: KnowledgeDocumentsResponse } }) => {
return userFn(query.state.data)
}
}
return options?.refetchInterval
}, [options?.refetchInterval])
const query = useKnowledgeDocumentsQuery( const query = useKnowledgeDocumentsQuery(
{ {
knowledgeBaseId, knowledgeBaseId,
@@ -90,10 +106,11 @@ export function useKnowledgeBaseDocuments(
search: options?.search, search: options?.search,
sortBy: options?.sortBy, sortBy: options?.sortBy,
sortOrder: options?.sortOrder, sortOrder: options?.sortOrder,
enabledFilter,
}, },
{ {
enabled: (options?.enabled ?? true) && Boolean(knowledgeBaseId), enabled: (options?.enabled ?? true) && Boolean(knowledgeBaseId),
refetchInterval: options?.refetchInterval, refetchInterval: refetchIntervalFn,
} }
) )
@@ -105,6 +122,14 @@ export function useKnowledgeBaseDocuments(
hasMore: false, hasMore: false,
} }
const hasProcessingDocs = useMemo(
() =>
documents.some(
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
),
[documents]
)
const refreshDocuments = useCallback(async () => { const refreshDocuments = useCallback(async () => {
await queryClient.invalidateQueries({ await queryClient.invalidateQueries({
queryKey: knowledgeKeys.documents(knowledgeBaseId, paramsKey), queryKey: knowledgeKeys.documents(knowledgeBaseId, paramsKey),
@@ -136,6 +161,7 @@ export function useKnowledgeBaseDocuments(
isFetching: query.isFetching, isFetching: query.isFetching,
isPlaceholderData: query.isPlaceholderData, isPlaceholderData: query.isPlaceholderData,
error: query.error instanceof Error ? query.error.message : null, error: query.error instanceof Error ? query.error.message : null,
hasProcessingDocuments: hasProcessingDocs,
refreshDocuments, refreshDocuments,
updateDocument, updateDocument,
} }
@@ -233,8 +259,8 @@ export function useDocumentChunks(
const hasPrevPage = currentPage > 1 const hasPrevPage = currentPage > 1
const goToPage = useCallback( const goToPage = useCallback(
async (newPage: number) => { (newPage: number): boolean => {
if (newPage < 1 || newPage > totalPages) return return newPage >= 1 && newPage <= totalPages
}, },
[totalPages] [totalPages]
) )

View File

@@ -1,10 +1,15 @@
'use client' 'use client'
import { useCallback, useEffect, useState } from 'react' import { useCallback } from 'react'
import { createLogger } from '@sim/logger' import { useQueryClient } from '@tanstack/react-query'
import type { AllTagSlot } from '@/lib/knowledge/constants' import type { AllTagSlot } from '@/lib/knowledge/constants'
import {
const logger = createLogger('useTagDefinitions') type DocumentTagDefinitionInput,
knowledgeKeys,
useDeleteDocumentTagDefinitions,
useDocumentTagDefinitionsQuery,
useSaveDocumentTagDefinitions,
} from '@/hooks/queries/knowledge'
export interface TagDefinition { export interface TagDefinition {
id: string id: string
@@ -19,57 +24,30 @@ export interface TagDefinitionInput {
tagSlot: AllTagSlot tagSlot: AllTagSlot
displayName: string displayName: string
fieldType: string fieldType: string
// Optional: for editing existing definitions
_originalDisplayName?: string _originalDisplayName?: string
} }
/** /**
* Hook for managing KB-scoped tag definitions * Hook for managing document-scoped tag definitions
* @param knowledgeBaseId - The knowledge base ID * Uses React Query as single source of truth
* @param documentId - The document ID (required for API calls)
*/ */
export function useTagDefinitions( export function useTagDefinitions(
knowledgeBaseId: string | null, knowledgeBaseId: string | null,
documentId: string | null = null documentId: string | null = null
) { ) {
const [tagDefinitions, setTagDefinitions] = useState<TagDefinition[]>([]) const queryClient = useQueryClient()
const [isLoading, setIsLoading] = useState(false) const query = useDocumentTagDefinitionsQuery(knowledgeBaseId, documentId)
const [error, setError] = useState<string | null>(null) const { mutateAsync: saveTagDefinitionsMutation } = useSaveDocumentTagDefinitions()
const { mutateAsync: deleteTagDefinitionsMutation } = useDeleteDocumentTagDefinitions()
const tagDefinitions = (query.data ?? []) as TagDefinition[]
const fetchTagDefinitions = useCallback(async () => { const fetchTagDefinitions = useCallback(async () => {
if (!knowledgeBaseId || !documentId) { if (!knowledgeBaseId || !documentId) return
setTagDefinitions([]) await queryClient.invalidateQueries({
return queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId, documentId),
} })
}, [queryClient, knowledgeBaseId, documentId])
setIsLoading(true)
setError(null)
try {
const response = await fetch(
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`
)
if (!response.ok) {
throw new Error(`Failed to fetch tag definitions: ${response.statusText}`)
}
const data = await response.json()
if (data.success && Array.isArray(data.data)) {
setTagDefinitions(data.data)
} else {
throw new Error('Invalid response format')
}
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'
logger.error('Error fetching tag definitions:', err)
setError(errorMessage)
setTagDefinitions([])
} finally {
setIsLoading(false)
}
}, [knowledgeBaseId, documentId])
const saveTagDefinitions = useCallback( const saveTagDefinitions = useCallback(
async (definitions: TagDefinitionInput[]) => { async (definitions: TagDefinitionInput[]) => {
@@ -77,43 +55,13 @@ export function useTagDefinitions(
throw new Error('Knowledge base ID and document ID are required') throw new Error('Knowledge base ID and document ID are required')
} }
// Simple validation return saveTagDefinitionsMutation({
const validDefinitions = (definitions || []).filter( knowledgeBaseId,
(def) => def?.tagSlot && def.displayName && def.displayName.trim() documentId,
) definitions: definitions as DocumentTagDefinitionInput[],
})
try {
const response = await fetch(
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
}, },
body: JSON.stringify({ definitions: validDefinitions }), [knowledgeBaseId, documentId, saveTagDefinitionsMutation]
}
)
if (!response.ok) {
throw new Error(`Failed to save tag definitions: ${response.statusText}`)
}
const data = await response.json()
if (!data.success) {
throw new Error(data.error || 'Failed to save tag definitions')
}
// Refresh the definitions after saving
await fetchTagDefinitions()
return data.data
} catch (err) {
logger.error('Error saving tag definitions:', err)
throw err
}
},
[knowledgeBaseId, documentId, fetchTagDefinitions]
) )
const deleteTagDefinitions = useCallback(async () => { const deleteTagDefinitions = useCallback(async () => {
@@ -121,25 +69,11 @@ export function useTagDefinitions(
throw new Error('Knowledge base ID and document ID are required') throw new Error('Knowledge base ID and document ID are required')
} }
try { return deleteTagDefinitionsMutation({
const response = await fetch( knowledgeBaseId,
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`, documentId,
{ })
method: 'DELETE', }, [knowledgeBaseId, documentId, deleteTagDefinitionsMutation])
}
)
if (!response.ok) {
throw new Error(`Failed to delete tag definitions: ${response.statusText}`)
}
// Refresh the definitions after deleting
await fetchTagDefinitions()
} catch (err) {
logger.error('Error deleting tag definitions:', err)
throw err
}
}, [knowledgeBaseId, documentId, fetchTagDefinitions])
const getTagLabel = useCallback( const getTagLabel = useCallback(
(tagSlot: string): string => { (tagSlot: string): string => {
@@ -156,15 +90,10 @@ export function useTagDefinitions(
[tagDefinitions] [tagDefinitions]
) )
// Auto-fetch on mount and when dependencies change
useEffect(() => {
fetchTagDefinitions()
}, [fetchTagDefinitions])
return { return {
tagDefinitions, tagDefinitions,
isLoading, isLoading: query.isLoading,
error, error: query.error instanceof Error ? query.error.message : null,
fetchTagDefinitions, fetchTagDefinitions,
saveTagDefinitions, saveTagDefinitions,
deleteTagDefinitions, deleteTagDefinitions,

View File

@@ -1,3 +1,4 @@
import { createLogger } from '@sim/logger'
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query' import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import type { import type {
ChunkData, ChunkData,
@@ -7,15 +8,21 @@ import type {
KnowledgeBaseData, KnowledgeBaseData,
} from '@/lib/knowledge/types' } from '@/lib/knowledge/types'
const logger = createLogger('KnowledgeQueries')
export const knowledgeKeys = { export const knowledgeKeys = {
all: ['knowledge'] as const, all: ['knowledge'] as const,
list: (workspaceId?: string) => [...knowledgeKeys.all, 'list', workspaceId ?? 'all'] as const, list: (workspaceId?: string) => [...knowledgeKeys.all, 'list', workspaceId ?? 'all'] as const,
detail: (knowledgeBaseId?: string) => detail: (knowledgeBaseId?: string) =>
[...knowledgeKeys.all, 'detail', knowledgeBaseId ?? ''] as const, [...knowledgeKeys.all, 'detail', knowledgeBaseId ?? ''] as const,
tagDefinitions: (knowledgeBaseId: string) =>
[...knowledgeKeys.detail(knowledgeBaseId), 'tagDefinitions'] as const,
documents: (knowledgeBaseId: string, paramsKey: string) => documents: (knowledgeBaseId: string, paramsKey: string) =>
[...knowledgeKeys.detail(knowledgeBaseId), 'documents', paramsKey] as const, [...knowledgeKeys.detail(knowledgeBaseId), 'documents', paramsKey] as const,
document: (knowledgeBaseId: string, documentId: string) => document: (knowledgeBaseId: string, documentId: string) =>
[...knowledgeKeys.detail(knowledgeBaseId), 'document', documentId] as const, [...knowledgeKeys.detail(knowledgeBaseId), 'document', documentId] as const,
documentTagDefinitions: (knowledgeBaseId: string, documentId: string) =>
[...knowledgeKeys.document(knowledgeBaseId, documentId), 'tagDefinitions'] as const,
chunks: (knowledgeBaseId: string, documentId: string, paramsKey: string) => chunks: (knowledgeBaseId: string, documentId: string, paramsKey: string) =>
[...knowledgeKeys.document(knowledgeBaseId, documentId), 'chunks', paramsKey] as const, [...knowledgeKeys.document(knowledgeBaseId, documentId), 'chunks', paramsKey] as const,
} }
@@ -79,6 +86,7 @@ export interface KnowledgeDocumentsParams {
offset?: number offset?: number
sortBy?: string sortBy?: string
sortOrder?: string sortOrder?: string
enabledFilter?: 'all' | 'enabled' | 'disabled'
} }
export interface KnowledgeDocumentsResponse { export interface KnowledgeDocumentsResponse {
@@ -93,6 +101,7 @@ export async function fetchKnowledgeDocuments({
offset = 0, offset = 0,
sortBy, sortBy,
sortOrder, sortOrder,
enabledFilter,
}: KnowledgeDocumentsParams): Promise<KnowledgeDocumentsResponse> { }: KnowledgeDocumentsParams): Promise<KnowledgeDocumentsResponse> {
const params = new URLSearchParams() const params = new URLSearchParams()
if (search) params.set('search', search) if (search) params.set('search', search)
@@ -100,6 +109,7 @@ export async function fetchKnowledgeDocuments({
if (sortOrder) params.set('sortOrder', sortOrder) if (sortOrder) params.set('sortOrder', sortOrder)
params.set('limit', limit.toString()) params.set('limit', limit.toString())
params.set('offset', offset.toString()) params.set('offset', offset.toString())
if (enabledFilter) params.set('enabledFilter', enabledFilter)
const url = `/api/knowledge/${knowledgeBaseId}/documents${params.toString() ? `?${params.toString()}` : ''}` const url = `/api/knowledge/${knowledgeBaseId}/documents${params.toString() ? `?${params.toString()}` : ''}`
const response = await fetch(url) const response = await fetch(url)
@@ -212,6 +222,7 @@ export function useDocumentQuery(knowledgeBaseId?: string, documentId?: string)
queryFn: () => fetchDocument(knowledgeBaseId as string, documentId as string), queryFn: () => fetchDocument(knowledgeBaseId as string, documentId as string),
enabled: Boolean(knowledgeBaseId && documentId), enabled: Boolean(knowledgeBaseId && documentId),
staleTime: 60 * 1000, staleTime: 60 * 1000,
placeholderData: keepPreviousData,
}) })
} }
@@ -222,13 +233,17 @@ export const serializeDocumentParams = (params: KnowledgeDocumentsParams) =>
offset: params.offset ?? 0, offset: params.offset ?? 0,
sortBy: params.sortBy ?? '', sortBy: params.sortBy ?? '',
sortOrder: params.sortOrder ?? '', sortOrder: params.sortOrder ?? '',
enabledFilter: params.enabledFilter ?? 'all',
}) })
export function useKnowledgeDocumentsQuery( export function useKnowledgeDocumentsQuery(
params: KnowledgeDocumentsParams, params: KnowledgeDocumentsParams,
options?: { options?: {
enabled?: boolean enabled?: boolean
refetchInterval?: number | false refetchInterval?:
| number
| false
| ((query: { state: { data?: KnowledgeDocumentsResponse } }) => number | false)
} }
) { ) {
const paramsKey = serializeDocumentParams(params) const paramsKey = serializeDocumentParams(params)
@@ -572,7 +587,9 @@ export function useDeleteDocument() {
export interface BulkDocumentOperationParams { export interface BulkDocumentOperationParams {
knowledgeBaseId: string knowledgeBaseId: string
operation: 'enable' | 'disable' | 'delete' operation: 'enable' | 'disable' | 'delete'
documentIds: string[] documentIds?: string[]
selectAll?: boolean
enabledFilter?: 'all' | 'enabled' | 'disabled'
} }
export interface BulkDocumentOperationResult { export interface BulkDocumentOperationResult {
@@ -585,11 +602,21 @@ export async function bulkDocumentOperation({
knowledgeBaseId, knowledgeBaseId,
operation, operation,
documentIds, documentIds,
selectAll,
enabledFilter,
}: BulkDocumentOperationParams): Promise<BulkDocumentOperationResult> { }: BulkDocumentOperationParams): Promise<BulkDocumentOperationResult> {
const body: Record<string, unknown> = { operation }
if (selectAll) {
body.selectAll = true
if (enabledFilter) body.enabledFilter = enabledFilter
} else {
body.documentIds = documentIds
}
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, { const response = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, {
method: 'PATCH', method: 'PATCH',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ operation, documentIds }), body: JSON.stringify(body),
}) })
if (!response.ok) { if (!response.ok) {
@@ -858,6 +885,31 @@ export interface TagDefinitionData {
updatedAt: string updatedAt: string
} }
export async function fetchTagDefinitions(knowledgeBaseId: string): Promise<TagDefinitionData[]> {
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`)
if (!response.ok) {
throw new Error(`Failed to fetch tag definitions: ${response.status} ${response.statusText}`)
}
const result = await response.json()
if (!result?.success) {
throw new Error(result?.error || 'Failed to fetch tag definitions')
}
return Array.isArray(result.data) ? result.data : []
}
export function useTagDefinitionsQuery(knowledgeBaseId?: string | null) {
return useQuery({
queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId ?? ''),
queryFn: () => fetchTagDefinitions(knowledgeBaseId as string),
enabled: Boolean(knowledgeBaseId),
staleTime: 60 * 1000,
placeholderData: keepPreviousData,
})
}
export interface CreateTagDefinitionParams { export interface CreateTagDefinitionParams {
knowledgeBaseId: string knowledgeBaseId: string
displayName: string displayName: string
@@ -914,7 +966,7 @@ export function useCreateTagDefinition() {
mutationFn: createTagDefinition, mutationFn: createTagDefinition,
onSuccess: (_, { knowledgeBaseId }) => { onSuccess: (_, { knowledgeBaseId }) => {
queryClient.invalidateQueries({ queryClient.invalidateQueries({
queryKey: knowledgeKeys.detail(knowledgeBaseId), queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId),
}) })
}, },
}) })
@@ -952,8 +1004,152 @@ export function useDeleteTagDefinition() {
mutationFn: deleteTagDefinition, mutationFn: deleteTagDefinition,
onSuccess: (_, { knowledgeBaseId }) => { onSuccess: (_, { knowledgeBaseId }) => {
queryClient.invalidateQueries({ queryClient.invalidateQueries({
queryKey: knowledgeKeys.detail(knowledgeBaseId), queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId),
}) })
}, },
}) })
} }
export interface DocumentTagDefinitionData {
id: string
tagSlot: string
displayName: string
fieldType: string
createdAt: string
updatedAt: string
}
export async function fetchDocumentTagDefinitions(
knowledgeBaseId: string,
documentId: string
): Promise<DocumentTagDefinitionData[]> {
const response = await fetch(
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`
)
if (!response.ok) {
throw new Error(
`Failed to fetch document tag definitions: ${response.status} ${response.statusText}`
)
}
const result = await response.json()
if (!result?.success) {
throw new Error(result?.error || 'Failed to fetch document tag definitions')
}
return Array.isArray(result.data) ? result.data : []
}
export function useDocumentTagDefinitionsQuery(
knowledgeBaseId?: string | null,
documentId?: string | null
) {
return useQuery({
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId ?? '', documentId ?? ''),
queryFn: () => fetchDocumentTagDefinitions(knowledgeBaseId as string, documentId as string),
enabled: Boolean(knowledgeBaseId && documentId),
staleTime: 60 * 1000,
placeholderData: keepPreviousData,
})
}
export interface DocumentTagDefinitionInput {
tagSlot: string
displayName: string
fieldType: string
}
export interface SaveDocumentTagDefinitionsParams {
knowledgeBaseId: string
documentId: string
definitions: DocumentTagDefinitionInput[]
}
export async function saveDocumentTagDefinitions({
knowledgeBaseId,
documentId,
definitions,
}: SaveDocumentTagDefinitionsParams): Promise<DocumentTagDefinitionData[]> {
const validDefinitions = (definitions || []).filter(
(def) => def?.tagSlot && def.displayName && def.displayName.trim()
)
const response = await fetch(
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ definitions: validDefinitions }),
}
)
if (!response.ok) {
const result = await response.json()
throw new Error(result.error || 'Failed to save document tag definitions')
}
const result = await response.json()
if (!result?.success) {
throw new Error(result?.error || 'Failed to save document tag definitions')
}
return result.data
}
export function useSaveDocumentTagDefinitions() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: saveDocumentTagDefinitions,
onSuccess: (_, { knowledgeBaseId, documentId }) => {
queryClient.invalidateQueries({
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId, documentId),
})
},
onError: (error) => {
logger.error('Failed to save document tag definitions:', error)
},
})
}
export interface DeleteDocumentTagDefinitionsParams {
knowledgeBaseId: string
documentId: string
}
export async function deleteDocumentTagDefinitions({
knowledgeBaseId,
documentId,
}: DeleteDocumentTagDefinitionsParams): Promise<void> {
const response = await fetch(
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
{ method: 'DELETE' }
)
if (!response.ok) {
const result = await response.json()
throw new Error(result.error || 'Failed to delete document tag definitions')
}
const result = await response.json()
if (!result?.success) {
throw new Error(result?.error || 'Failed to delete document tag definitions')
}
}
export function useDeleteDocumentTagDefinitions() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: deleteDocumentTagDefinitions,
onSuccess: (_, { knowledgeBaseId, documentId }) => {
queryClient.invalidateQueries({
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId, documentId),
})
},
onError: (error) => {
logger.error('Failed to delete document tag definitions:', error)
},
})
}

View File

@@ -5,7 +5,7 @@ import { useShallow } from 'zustand/react/shallow'
import { useSession } from '@/lib/auth/auth-client' import { useSession } from '@/lib/auth/auth-client'
import { useSocket } from '@/app/workspace/providers/socket-provider' import { useSocket } from '@/app/workspace/providers/socket-provider'
import { getBlock } from '@/blocks' import { getBlock } from '@/blocks'
import { normalizeName } from '@/executor/constants' import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
import { useUndoRedo } from '@/hooks/use-undo-redo' import { useUndoRedo } from '@/hooks/use-undo-redo'
import { import {
BLOCK_OPERATIONS, BLOCK_OPERATIONS,
@@ -740,6 +740,16 @@ export function useCollaborativeWorkflow() {
return { success: false, error: 'Block name cannot be empty' } return { success: false, error: 'Block name cannot be empty' }
} }
if ((RESERVED_BLOCK_NAMES as readonly string[]).includes(normalizedNewName)) {
logger.error(`Cannot rename block to reserved name: "${trimmedName}"`)
useNotificationStore.getState().addNotification({
level: 'error',
message: `"${trimmedName}" is a reserved name and cannot be used`,
workflowId: activeWorkflowId || undefined,
})
return { success: false, error: `"${trimmedName}" is a reserved name` }
}
const currentBlocks = useWorkflowStore.getState().blocks const currentBlocks = useWorkflowStore.getState().blocks
const conflictingBlock = Object.entries(currentBlocks).find( const conflictingBlock = Object.entries(currentBlocks).find(
([blockId, block]) => blockId !== id && normalizeName(block.name) === normalizedNewName ([blockId, block]) => blockId !== id && normalizeName(block.name) === normalizedNewName

View File

@@ -1,7 +1,5 @@
'use client' 'use client'
import { useState } from 'react'
import { Check, Copy } from 'lucide-react'
import { Code } from '@/components/emcn' import { Code } from '@/components/emcn'
interface CodeBlockProps { interface CodeBlockProps {
@@ -10,30 +8,8 @@ interface CodeBlockProps {
} }
export function CodeBlock({ code, language }: CodeBlockProps) { export function CodeBlock({ code, language }: CodeBlockProps) {
const [copied, setCopied] = useState(false)
const handleCopy = () => {
navigator.clipboard.writeText(code)
setCopied(true)
setTimeout(() => setCopied(false), 2000)
}
return ( return (
<div className='dark w-full overflow-hidden rounded-md border border-[#2a2a2a] bg-[#1F1F1F] text-sm'> <div className='dark w-full overflow-hidden rounded-md border border-[#2a2a2a] bg-[#1F1F1F] text-sm'>
<div className='flex items-center justify-between border-[#2a2a2a] border-b px-4 py-1.5'>
<span className='text-[#A3A3A3] text-xs'>{language}</span>
<button
onClick={handleCopy}
className='text-[#A3A3A3] transition-colors hover:text-gray-300'
title='Copy code'
>
{copied ? (
<Check className='h-3 w-3' strokeWidth={2} />
) : (
<Copy className='h-3 w-3' strokeWidth={2} />
)}
</button>
</div>
<Code.Viewer <Code.Viewer
code={code} code={code}
showGutter showGutter

View File

@@ -61,7 +61,7 @@ export const mdxComponents: MDXRemoteProps['components'] = {
)} )}
/> />
), ),
li: (props: any) => <li {...props} className={clsx('mb-2', props.className)} />, li: (props: any) => <li {...props} className={clsx('mb-1', props.className)} />,
strong: (props: any) => <strong {...props} className={clsx('font-semibold', props.className)} />, strong: (props: any) => <strong {...props} className={clsx('font-semibold', props.className)} />,
em: (props: any) => <em {...props} className={clsx('italic', props.className)} />, em: (props: any) => <em {...props} className={clsx('italic', props.className)} />,
a: (props: any) => { a: (props: any) => {

View File

@@ -10,6 +10,8 @@ import type { BlogMeta, BlogPost, TagWithCount } from '@/lib/blog/schema'
import { AuthorSchema, BlogFrontmatterSchema } from '@/lib/blog/schema' import { AuthorSchema, BlogFrontmatterSchema } from '@/lib/blog/schema'
import { AUTHORS_DIR, BLOG_DIR, byDateDesc, ensureContentDirs, toIsoDate } from '@/lib/blog/utils' import { AUTHORS_DIR, BLOG_DIR, byDateDesc, ensureContentDirs, toIsoDate } from '@/lib/blog/utils'
const postComponentsRegistry: Record<string, Record<string, React.ComponentType>> = {}
let cachedMeta: BlogMeta[] | null = null let cachedMeta: BlogMeta[] | null = null
let cachedAuthors: Record<string, any> | null = null let cachedAuthors: Record<string, any> | null = null
@@ -99,6 +101,21 @@ export async function getAllTags(): Promise<TagWithCount[]> {
.sort((a, b) => b.count - a.count || a.tag.localeCompare(b.tag)) .sort((a, b) => b.count - a.count || a.tag.localeCompare(b.tag))
} }
async function loadPostComponents(slug: string): Promise<Record<string, React.ComponentType>> {
if (postComponentsRegistry[slug]) {
return postComponentsRegistry[slug]
}
try {
const postComponents = await import(`@/content/blog/${slug}/components`)
postComponentsRegistry[slug] = postComponents
return postComponents
} catch {
postComponentsRegistry[slug] = {}
return {}
}
}
export async function getPostBySlug(slug: string): Promise<BlogPost> { export async function getPostBySlug(slug: string): Promise<BlogPost> {
const meta = await scanFrontmatters() const meta = await scanFrontmatters()
const found = meta.find((m) => m.slug === slug) const found = meta.find((m) => m.slug === slug)
@@ -107,9 +124,13 @@ export async function getPostBySlug(slug: string): Promise<BlogPost> {
const raw = await fs.readFile(mdxPath, 'utf-8') const raw = await fs.readFile(mdxPath, 'utf-8')
const { content, data } = matter(raw) const { content, data } = matter(raw)
const fm = BlogFrontmatterSchema.parse(data) const fm = BlogFrontmatterSchema.parse(data)
const postComponents = await loadPostComponents(slug)
const mergedComponents = { ...mdxComponents, ...postComponents }
const compiled = await compileMDX({ const compiled = await compileMDX({
source: content, source: content,
components: mdxComponents as any, components: mergedComponents as any,
options: { options: {
parseFrontmatter: false, parseFrontmatter: false,
mdxOptions: { mdxOptions: {
@@ -141,6 +162,7 @@ export async function getPostBySlug(slug: string): Promise<BlogPost> {
export function invalidateBlogCaches() { export function invalidateBlogCaches() {
cachedMeta = null cachedMeta = null
cachedAuthors = null cachedAuthors = null
Object.keys(postComponentsRegistry).forEach((key) => delete postComponentsRegistry[key])
} }
export async function getRelatedPosts(slug: string, limit = 3): Promise<BlogMeta[]> { export async function getRelatedPosts(slug: string, limit = 3): Promise<BlogMeta[]> {

View File

@@ -34,17 +34,3 @@ import './workflow/set-global-workflow-variables'
// User tools // User tools
import './user/set-environment-variables' import './user/set-environment-variables'
// Re-export UI config utilities for convenience
export {
getSubagentLabels,
getToolUIConfig,
hasInterrupt,
type InterruptConfig,
isSpecialTool,
isSubagentTool,
type ParamsTableConfig,
type SecondaryActionConfig,
type SubagentConfig,
type ToolUIConfig,
} from './ui-config'

View File

@@ -1,10 +1,6 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool' import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
import { import type { KnowledgeBaseArgs, KnowledgeBaseResult } from '@/lib/copilot/tools/shared/schemas'
type KnowledgeBaseArgs,
KnowledgeBaseArgsSchema,
type KnowledgeBaseResult,
} from '@/lib/copilot/tools/shared/schemas'
import { generateSearchEmbedding } from '@/lib/knowledge/embeddings' import { generateSearchEmbedding } from '@/lib/knowledge/embeddings'
import { import {
createKnowledgeBase, createKnowledgeBase,
@@ -15,11 +11,6 @@ import { getQueryStrategy, handleVectorOnlySearch } from '@/app/api/knowledge/se
const logger = createLogger('KnowledgeBaseServerTool') const logger = createLogger('KnowledgeBaseServerTool')
// Re-export for backwards compatibility
export const KnowledgeBaseInput = KnowledgeBaseArgsSchema
export type KnowledgeBaseInputType = KnowledgeBaseArgs
export type KnowledgeBaseResultType = KnowledgeBaseResult
/** /**
* Knowledge base tool for copilot to create, list, and get knowledge bases * Knowledge base tool for copilot to create, list, and get knowledge bases
*/ */
@@ -163,7 +154,6 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
} }
} }
// Verify knowledge base exists
const kb = await getKnowledgeBaseById(args.knowledgeBaseId) const kb = await getKnowledgeBaseById(args.knowledgeBaseId)
if (!kb) { if (!kb) {
return { return {
@@ -181,10 +171,8 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
) )
const queryVector = JSON.stringify(queryEmbedding) const queryVector = JSON.stringify(queryEmbedding)
// Get search strategy
const strategy = getQueryStrategy(1, topK) const strategy = getQueryStrategy(1, topK)
// Perform vector search
const results = await handleVectorOnlySearch({ const results = await handleVectorOnlySearch({
knowledgeBaseIds: [args.knowledgeBaseId], knowledgeBaseIds: [args.knowledgeBaseId],
topK, topK,

View File

@@ -6,10 +6,7 @@ import { getBlocksAndToolsServerTool } from '@/lib/copilot/tools/server/blocks/g
import { getBlocksMetadataServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-metadata-tool' import { getBlocksMetadataServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-metadata-tool'
import { getTriggerBlocksServerTool } from '@/lib/copilot/tools/server/blocks/get-trigger-blocks' import { getTriggerBlocksServerTool } from '@/lib/copilot/tools/server/blocks/get-trigger-blocks'
import { searchDocumentationServerTool } from '@/lib/copilot/tools/server/docs/search-documentation' import { searchDocumentationServerTool } from '@/lib/copilot/tools/server/docs/search-documentation'
import { import { knowledgeBaseServerTool } from '@/lib/copilot/tools/server/knowledge/knowledge-base'
KnowledgeBaseInput,
knowledgeBaseServerTool,
} from '@/lib/copilot/tools/server/knowledge/knowledge-base'
import { makeApiRequestServerTool } from '@/lib/copilot/tools/server/other/make-api-request' import { makeApiRequestServerTool } from '@/lib/copilot/tools/server/other/make-api-request'
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online' import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials' import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
@@ -28,6 +25,7 @@ import {
GetBlocksMetadataResult, GetBlocksMetadataResult,
GetTriggerBlocksInput, GetTriggerBlocksInput,
GetTriggerBlocksResult, GetTriggerBlocksResult,
KnowledgeBaseArgsSchema,
} from '@/lib/copilot/tools/shared/schemas' } from '@/lib/copilot/tools/shared/schemas'
// Generic execute response schemas (success path only for this route; errors handled via HTTP status) // Generic execute response schemas (success path only for this route; errors handled via HTTP status)
@@ -90,7 +88,7 @@ export async function routeExecution(
args = GetTriggerBlocksInput.parse(args) args = GetTriggerBlocksInput.parse(args)
} }
if (toolName === 'knowledge_base') { if (toolName === 'knowledge_base') {
args = KnowledgeBaseInput.parse(args) args = KnowledgeBaseArgsSchema.parse(args)
} }
const result = await tool.execute(args, context) const result = await tool.execute(args, context)

View File

@@ -14,7 +14,7 @@ import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
import { TriggerUtils } from '@/lib/workflows/triggers/triggers' import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { getAllBlocks, getBlock } from '@/blocks/registry' import { getAllBlocks, getBlock } from '@/blocks/registry'
import type { SubBlockConfig } from '@/blocks/types' import type { SubBlockConfig } from '@/blocks/types'
import { EDGE, normalizeName } from '@/executor/constants' import { EDGE, normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
import { getUserPermissionConfig } from '@/executor/utils/permission-check' import { getUserPermissionConfig } from '@/executor/utils/permission-check'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils' import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants' import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
@@ -63,6 +63,7 @@ type SkippedItemType =
| 'invalid_subflow_parent' | 'invalid_subflow_parent'
| 'nested_subflow_not_allowed' | 'nested_subflow_not_allowed'
| 'duplicate_block_name' | 'duplicate_block_name'
| 'reserved_block_name'
| 'duplicate_trigger' | 'duplicate_trigger'
| 'duplicate_single_instance_block' | 'duplicate_single_instance_block'
@@ -1683,7 +1684,8 @@ function applyOperationsToWorkflowState(
} }
} }
if (params?.name !== undefined) { if (params?.name !== undefined) {
if (!normalizeName(params.name)) { const normalizedName = normalizeName(params.name)
if (!normalizedName) {
logSkippedItem(skippedItems, { logSkippedItem(skippedItems, {
type: 'missing_required_params', type: 'missing_required_params',
operationType: 'edit', operationType: 'edit',
@@ -1691,6 +1693,14 @@ function applyOperationsToWorkflowState(
reason: `Cannot rename to empty name`, reason: `Cannot rename to empty name`,
details: { requestedName: params.name }, details: { requestedName: params.name },
}) })
} else if ((RESERVED_BLOCK_NAMES as readonly string[]).includes(normalizedName)) {
logSkippedItem(skippedItems, {
type: 'reserved_block_name',
operationType: 'edit',
blockId: block_id,
reason: `Cannot rename to "${params.name}" - this is a reserved name`,
details: { requestedName: params.name },
})
} else { } else {
const conflictingBlock = findBlockWithDuplicateNormalizedName( const conflictingBlock = findBlockWithDuplicateNormalizedName(
modifiedState.blocks, modifiedState.blocks,
@@ -1911,7 +1921,8 @@ function applyOperationsToWorkflowState(
} }
case 'add': { case 'add': {
if (!params?.type || !params?.name || !normalizeName(params.name)) { const addNormalizedName = params?.name ? normalizeName(params.name) : ''
if (!params?.type || !params?.name || !addNormalizedName) {
logSkippedItem(skippedItems, { logSkippedItem(skippedItems, {
type: 'missing_required_params', type: 'missing_required_params',
operationType: 'add', operationType: 'add',
@@ -1922,6 +1933,17 @@ function applyOperationsToWorkflowState(
break break
} }
if ((RESERVED_BLOCK_NAMES as readonly string[]).includes(addNormalizedName)) {
logSkippedItem(skippedItems, {
type: 'reserved_block_name',
operationType: 'add',
blockId: block_id,
reason: `Block name "${params.name}" is a reserved name and cannot be used`,
details: { requestedName: params.name },
})
break
}
const conflictingBlock = findBlockWithDuplicateNormalizedName( const conflictingBlock = findBlockWithDuplicateNormalizedName(
modifiedState.blocks, modifiedState.blocks,
params.name, params.name,

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { idempotencyKey } from '@sim/db/schema' import { idempotencyKey } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq, lt } from 'drizzle-orm' import { and, count, inArray, like, lt, max, min, sql } from 'drizzle-orm'
const logger = createLogger('IdempotencyCleanup') const logger = createLogger('IdempotencyCleanup')
@@ -19,7 +19,8 @@ export interface CleanupOptions {
batchSize?: number batchSize?: number
/** /**
* Specific namespace to clean up, or undefined to clean all namespaces * Specific namespace prefix to clean up (e.g., 'webhook', 'polling')
* Keys are prefixed with namespace, so this filters by key prefix
*/ */
namespace?: string namespace?: string
} }
@@ -53,13 +54,17 @@ export async function cleanupExpiredIdempotencyKeys(
while (hasMore) { while (hasMore) {
try { try {
// Build where condition - filter by cutoff date and optionally by namespace prefix
const whereCondition = namespace const whereCondition = namespace
? and(lt(idempotencyKey.createdAt, cutoffDate), eq(idempotencyKey.namespace, namespace)) ? and(
lt(idempotencyKey.createdAt, cutoffDate),
like(idempotencyKey.key, `${namespace}:%`)
)
: lt(idempotencyKey.createdAt, cutoffDate) : lt(idempotencyKey.createdAt, cutoffDate)
// First, find IDs to delete with limit // Find keys to delete with limit
const toDelete = await db const toDelete = await db
.select({ key: idempotencyKey.key, namespace: idempotencyKey.namespace }) .select({ key: idempotencyKey.key })
.from(idempotencyKey) .from(idempotencyKey)
.where(whereCondition) .where(whereCondition)
.limit(batchSize) .limit(batchSize)
@@ -68,14 +73,13 @@ export async function cleanupExpiredIdempotencyKeys(
break break
} }
// Delete the found records // Delete the found records by key
const deleteResult = await db const deleteResult = await db
.delete(idempotencyKey) .delete(idempotencyKey)
.where( .where(
and( inArray(
...toDelete.map((item) => idempotencyKey.key,
and(eq(idempotencyKey.key, item.key), eq(idempotencyKey.namespace, item.namespace)) toDelete.map((item) => item.key)
)
) )
) )
.returning({ key: idempotencyKey.key }) .returning({ key: idempotencyKey.key })
@@ -126,6 +130,7 @@ export async function cleanupExpiredIdempotencyKeys(
/** /**
* Get statistics about idempotency key usage * Get statistics about idempotency key usage
* Uses SQL aggregations to avoid loading all keys into memory
*/ */
export async function getIdempotencyKeyStats(): Promise<{ export async function getIdempotencyKeyStats(): Promise<{
totalKeys: number totalKeys: number
@@ -134,34 +139,35 @@ export async function getIdempotencyKeyStats(): Promise<{
newestKey: Date | null newestKey: Date | null
}> { }> {
try { try {
const allKeys = await db // Get total count and date range in a single query
const [statsResult] = await db
.select({ .select({
namespace: idempotencyKey.namespace, totalKeys: count(),
createdAt: idempotencyKey.createdAt, oldestKey: min(idempotencyKey.createdAt),
newestKey: max(idempotencyKey.createdAt),
}) })
.from(idempotencyKey) .from(idempotencyKey)
const totalKeys = allKeys.length // Get counts by namespace prefix using SQL substring
// Extracts everything before the first ':' as the namespace
const namespaceStats = await db
.select({
namespace: sql<string>`split_part(${idempotencyKey.key}, ':', 1)`.as('namespace'),
count: count(),
})
.from(idempotencyKey)
.groupBy(sql`split_part(${idempotencyKey.key}, ':', 1)`)
const keysByNamespace: Record<string, number> = {} const keysByNamespace: Record<string, number> = {}
let oldestKey: Date | null = null for (const row of namespaceStats) {
let newestKey: Date | null = null keysByNamespace[row.namespace || 'unknown'] = row.count
for (const key of allKeys) {
keysByNamespace[key.namespace] = (keysByNamespace[key.namespace] || 0) + 1
if (!oldestKey || key.createdAt < oldestKey) {
oldestKey = key.createdAt
}
if (!newestKey || key.createdAt > newestKey) {
newestKey = key.createdAt
}
} }
return { return {
totalKeys, totalKeys: statsResult?.totalKeys ?? 0,
keysByNamespace, keysByNamespace,
oldestKey, oldestKey: statsResult?.oldestKey ?? null,
newestKey, newestKey: statsResult?.newestKey ?? null,
} }
} catch (error) { } catch (error) {
logger.error('Failed to get idempotency key stats:', error) logger.error('Failed to get idempotency key stats:', error)

View File

@@ -2,7 +2,7 @@ import { randomUUID } from 'crypto'
import { db } from '@sim/db' import { db } from '@sim/db'
import { idempotencyKey } from '@sim/db/schema' import { idempotencyKey } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm' import { eq } from 'drizzle-orm'
import { getRedisClient } from '@/lib/core/config/redis' import { getRedisClient } from '@/lib/core/config/redis'
import { getStorageMethod, type StorageMethod } from '@/lib/core/storage' import { getStorageMethod, type StorageMethod } from '@/lib/core/storage'
import { extractProviderIdentifierFromBody } from '@/lib/webhooks/provider-utils' import { extractProviderIdentifierFromBody } from '@/lib/webhooks/provider-utils'
@@ -124,12 +124,7 @@ export class IdempotencyService {
const existing = await db const existing = await db
.select({ result: idempotencyKey.result, createdAt: idempotencyKey.createdAt }) .select({ result: idempotencyKey.result, createdAt: idempotencyKey.createdAt })
.from(idempotencyKey) .from(idempotencyKey)
.where( .where(eq(idempotencyKey.key, normalizedKey))
and(
eq(idempotencyKey.key, normalizedKey),
eq(idempotencyKey.namespace, this.config.namespace)
)
)
.limit(1) .limit(1)
if (existing.length > 0) { if (existing.length > 0) {
@@ -224,11 +219,12 @@ export class IdempotencyService {
.insert(idempotencyKey) .insert(idempotencyKey)
.values({ .values({
key: normalizedKey, key: normalizedKey,
namespace: this.config.namespace,
result: inProgressResult, result: inProgressResult,
createdAt: new Date(), createdAt: new Date(),
}) })
.onConflictDoNothing() .onConflictDoNothing({
target: [idempotencyKey.key],
})
.returning({ key: idempotencyKey.key }) .returning({ key: idempotencyKey.key })
if (insertResult.length > 0) { if (insertResult.length > 0) {
@@ -243,12 +239,7 @@ export class IdempotencyService {
const existing = await db const existing = await db
.select({ result: idempotencyKey.result }) .select({ result: idempotencyKey.result })
.from(idempotencyKey) .from(idempotencyKey)
.where( .where(eq(idempotencyKey.key, normalizedKey))
and(
eq(idempotencyKey.key, normalizedKey),
eq(idempotencyKey.namespace, this.config.namespace)
)
)
.limit(1) .limit(1)
const existingResult = const existingResult =
@@ -280,12 +271,7 @@ export class IdempotencyService {
const existing = await db const existing = await db
.select({ result: idempotencyKey.result }) .select({ result: idempotencyKey.result })
.from(idempotencyKey) .from(idempotencyKey)
.where( .where(eq(idempotencyKey.key, normalizedKey))
and(
eq(idempotencyKey.key, normalizedKey),
eq(idempotencyKey.namespace, this.config.namespace)
)
)
.limit(1) .limit(1)
currentResult = existing.length > 0 ? (existing[0].result as ProcessingResult) : null currentResult = existing.length > 0 ? (existing[0].result as ProcessingResult) : null
} }
@@ -339,12 +325,11 @@ export class IdempotencyService {
.insert(idempotencyKey) .insert(idempotencyKey)
.values({ .values({
key: normalizedKey, key: normalizedKey,
namespace: this.config.namespace,
result: result, result: result,
createdAt: new Date(), createdAt: new Date(),
}) })
.onConflictDoUpdate({ .onConflictDoUpdate({
target: [idempotencyKey.key, idempotencyKey.namespace], target: [idempotencyKey.key],
set: { set: {
result: result, result: result,
createdAt: new Date(), createdAt: new Date(),

View File

@@ -82,10 +82,26 @@ export function formatDateTime(date: Date, timezone?: string): string {
* @returns A formatted date string in the format "MMM D, YYYY" * @returns A formatted date string in the format "MMM D, YYYY"
*/ */
export function formatDate(date: Date): string { export function formatDate(date: Date): string {
return date.toLocaleString('en-US', { return date.toLocaleDateString('en-US', {
year: 'numeric',
month: 'short', month: 'short',
day: 'numeric', day: 'numeric',
})
}
/**
* Formats a date string to absolute format for tooltip display
* @param dateString - ISO date string to format
* @returns A formatted date string (e.g., "Jan 22, 2026, 01:30 PM")
*/
export function formatAbsoluteDate(dateString: string): string {
const date = new Date(dateString)
return date.toLocaleDateString('en-US', {
year: 'numeric', year: 'numeric',
month: 'short',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
}) })
} }
@@ -139,20 +155,24 @@ export function formatCompactTimestamp(iso: string): string {
/** /**
* Format a duration in milliseconds to a human-readable format * Format a duration in milliseconds to a human-readable format
* @param durationMs - The duration in milliseconds * @param durationMs - The duration in milliseconds
* @param options - Optional formatting options
* @param options.precision - Number of decimal places for seconds (default: 0)
* @returns A formatted duration string * @returns A formatted duration string
*/ */
export function formatDuration(durationMs: number): string { export function formatDuration(durationMs: number, options?: { precision?: number }): string {
const precision = options?.precision ?? 0
if (durationMs < 1000) { if (durationMs < 1000) {
return `${durationMs}ms` return `${durationMs}ms`
} }
const seconds = Math.floor(durationMs / 1000) const seconds = durationMs / 1000
if (seconds < 60) { if (seconds < 60) {
return `${seconds}s` return precision > 0 ? `${seconds.toFixed(precision)}s` : `${Math.floor(seconds)}s`
} }
const minutes = Math.floor(seconds / 60) const minutes = Math.floor(seconds / 60)
const remainingSeconds = seconds % 60 const remainingSeconds = Math.floor(seconds % 60)
if (minutes < 60) { if (minutes < 60) {
return `${minutes}m ${remainingSeconds}s` return `${minutes}m ${remainingSeconds}s`
} }
@@ -161,3 +181,40 @@ export function formatDuration(durationMs: number): string {
const remainingMinutes = minutes % 60 const remainingMinutes = minutes % 60
return `${hours}h ${remainingMinutes}m` return `${hours}h ${remainingMinutes}m`
} }
/**
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
* @param dateString - ISO date string to format
* @returns A human-readable relative time string
*/
export function formatRelativeTime(dateString: string): string {
const date = new Date(dateString)
const now = new Date()
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
if (diffInSeconds < 60) {
return 'just now'
}
if (diffInSeconds < 3600) {
const minutes = Math.floor(diffInSeconds / 60)
return `${minutes}m ago`
}
if (diffInSeconds < 86400) {
const hours = Math.floor(diffInSeconds / 3600)
return `${hours}h ago`
}
if (diffInSeconds < 604800) {
const days = Math.floor(diffInSeconds / 86400)
return `${days}d ago`
}
if (diffInSeconds < 2592000) {
const weeks = Math.floor(diffInSeconds / 604800)
return `${weeks}w ago`
}
if (diffInSeconds < 31536000) {
const months = Math.floor(diffInSeconds / 2592000)
return `${months}mo ago`
}
const years = Math.floor(diffInSeconds / 31536000)
return `${years}y ago`
}

View File

@@ -130,8 +130,12 @@ async function executeCode(request) {
await jail.set('environmentVariables', new ivm.ExternalCopy(envVars).copyInto()) await jail.set('environmentVariables', new ivm.ExternalCopy(envVars).copyInto())
for (const [key, value] of Object.entries(contextVariables)) { for (const [key, value] of Object.entries(contextVariables)) {
if (value === undefined) {
await jail.set(key, undefined)
} else {
await jail.set(key, new ivm.ExternalCopy(value).copyInto()) await jail.set(key, new ivm.ExternalCopy(value).copyInto())
} }
}
const fetchCallback = new ivm.Reference(async (url, optionsJson) => { const fetchCallback = new ivm.Reference(async (url, optionsJson) => {
return new Promise((resolve) => { return new Promise((resolve) => {

View File

@@ -127,7 +127,6 @@ export async function processDocumentTags(
tagData: DocumentTagData[], tagData: DocumentTagData[],
requestId: string requestId: string
): Promise<ProcessedDocumentTags> { ): Promise<ProcessedDocumentTags> {
// Helper to set a tag value with proper typing
const setTagValue = ( const setTagValue = (
tags: ProcessedDocumentTags, tags: ProcessedDocumentTags,
slot: string, slot: string,
@@ -672,12 +671,8 @@ export async function createDocumentRecords(
tag7?: string tag7?: string
}>, }>,
knowledgeBaseId: string, knowledgeBaseId: string,
requestId: string, requestId: string
userId?: string
): Promise<DocumentData[]> { ): Promise<DocumentData[]> {
if (userId) {
const totalSize = documents.reduce((sum, doc) => sum + doc.fileSize, 0)
const kb = await db const kb = await db
.select({ userId: knowledgeBase.userId }) .select({ userId: knowledgeBase.userId })
.from(knowledgeBase) .from(knowledgeBase)
@@ -687,7 +682,6 @@ export async function createDocumentRecords(
if (kb.length === 0) { if (kb.length === 0) {
throw new Error('Knowledge base not found') throw new Error('Knowledge base not found')
} }
}
return await db.transaction(async (tx) => { return await db.transaction(async (tx) => {
const now = new Date() const now = new Date()
@@ -770,16 +764,6 @@ export async function createDocumentRecords(
.update(knowledgeBase) .update(knowledgeBase)
.set({ updatedAt: now }) .set({ updatedAt: now })
.where(eq(knowledgeBase.id, knowledgeBaseId)) .where(eq(knowledgeBase.id, knowledgeBaseId))
if (userId) {
const totalSize = documents.reduce((sum, doc) => sum + doc.fileSize, 0)
const kb = await db
.select({ userId: knowledgeBase.userId })
.from(knowledgeBase)
.where(eq(knowledgeBase.id, knowledgeBaseId))
.limit(1)
}
} }
return returnData return returnData
@@ -792,7 +776,7 @@ export async function createDocumentRecords(
export async function getDocuments( export async function getDocuments(
knowledgeBaseId: string, knowledgeBaseId: string,
options: { options: {
includeDisabled?: boolean enabledFilter?: 'all' | 'enabled' | 'disabled'
search?: string search?: string
limit?: number limit?: number
offset?: number offset?: number
@@ -846,7 +830,7 @@ export async function getDocuments(
} }
}> { }> {
const { const {
includeDisabled = false, enabledFilter = 'all',
search, search,
limit = 50, limit = 50,
offset = 0, offset = 0,
@@ -854,26 +838,21 @@ export async function getDocuments(
sortOrder = 'asc', sortOrder = 'asc',
} = options } = options
// Build where conditions
const whereConditions = [ const whereConditions = [
eq(document.knowledgeBaseId, knowledgeBaseId), eq(document.knowledgeBaseId, knowledgeBaseId),
isNull(document.deletedAt), isNull(document.deletedAt),
] ]
// Filter out disabled documents unless specifically requested if (enabledFilter === 'enabled') {
if (!includeDisabled) {
whereConditions.push(eq(document.enabled, true)) whereConditions.push(eq(document.enabled, true))
} else if (enabledFilter === 'disabled') {
whereConditions.push(eq(document.enabled, false))
} }
// Add search condition if provided
if (search) { if (search) {
whereConditions.push( whereConditions.push(sql`LOWER(${document.filename}) LIKE LOWER(${`%${search}%`})`)
// Search in filename
sql`LOWER(${document.filename}) LIKE LOWER(${`%${search}%`})`
)
} }
// Get total count for pagination
const totalResult = await db const totalResult = await db
.select({ count: sql<number>`COUNT(*)` }) .select({ count: sql<number>`COUNT(*)` })
.from(document) .from(document)
@@ -882,7 +861,6 @@ export async function getDocuments(
const total = totalResult[0]?.count || 0 const total = totalResult[0]?.count || 0
const hasMore = offset + limit < total const hasMore = offset + limit < total
// Create dynamic order by clause
const getOrderByColumn = () => { const getOrderByColumn = () => {
switch (sortBy) { switch (sortBy) {
case 'filename': case 'filename':
@@ -897,12 +875,13 @@ export async function getDocuments(
return document.uploadedAt return document.uploadedAt
case 'processingStatus': case 'processingStatus':
return document.processingStatus return document.processingStatus
case 'enabled':
return document.enabled
default: default:
return document.uploadedAt return document.uploadedAt
} }
} }
// Use stable secondary sort to prevent shifting when primary values are identical
const primaryOrderBy = sortOrder === 'asc' ? asc(getOrderByColumn()) : desc(getOrderByColumn()) const primaryOrderBy = sortOrder === 'asc' ? asc(getOrderByColumn()) : desc(getOrderByColumn())
const secondaryOrderBy = const secondaryOrderBy =
sortBy === 'filename' ? desc(document.uploadedAt) : asc(document.filename) sortBy === 'filename' ? desc(document.uploadedAt) : asc(document.filename)
@@ -1021,8 +1000,7 @@ export async function createSingleDocument(
tag7?: string tag7?: string
}, },
knowledgeBaseId: string, knowledgeBaseId: string,
requestId: string, requestId: string
userId?: string
): Promise<{ ): Promise<{
id: string id: string
knowledgeBaseId: string knowledgeBaseId: string
@@ -1043,9 +1021,6 @@ export async function createSingleDocument(
tag6: string | null tag6: string | null
tag7: string | null tag7: string | null
}> { }> {
// Check storage limits before creating document
if (userId) {
// Get knowledge base owner
const kb = await db const kb = await db
.select({ userId: knowledgeBase.userId }) .select({ userId: knowledgeBase.userId })
.from(knowledgeBase) .from(knowledgeBase)
@@ -1055,12 +1030,10 @@ export async function createSingleDocument(
if (kb.length === 0) { if (kb.length === 0) {
throw new Error('Knowledge base not found') throw new Error('Knowledge base not found')
} }
}
const documentId = randomUUID() const documentId = randomUUID()
const now = new Date() const now = new Date()
// Process structured tag data if provided
let processedTags: ProcessedDocumentTags = { let processedTags: ProcessedDocumentTags = {
// Text tags (7 slots) // Text tags (7 slots)
tag1: documentData.tag1 ?? null, tag1: documentData.tag1 ?? null,
@@ -1089,11 +1062,9 @@ export async function createSingleDocument(
try { try {
const tagData = JSON.parse(documentData.documentTagsData) const tagData = JSON.parse(documentData.documentTagsData)
if (Array.isArray(tagData)) { if (Array.isArray(tagData)) {
// Process structured tag data and create tag definitions
processedTags = await processDocumentTags(knowledgeBaseId, tagData, requestId) processedTags = await processDocumentTags(knowledgeBaseId, tagData, requestId)
} }
} catch (error) { } catch (error) {
// Re-throw validation errors, only catch JSON parse errors
if (error instanceof SyntaxError) { if (error instanceof SyntaxError) {
logger.warn(`[${requestId}] Failed to parse documentTagsData:`, error) logger.warn(`[${requestId}] Failed to parse documentTagsData:`, error)
} else { } else {
@@ -1126,15 +1097,6 @@ export async function createSingleDocument(
logger.info(`[${requestId}] Document created: ${documentId} in knowledge base ${knowledgeBaseId}`) logger.info(`[${requestId}] Document created: ${documentId} in knowledge base ${knowledgeBaseId}`)
if (userId) {
// Get knowledge base owner
const kb = await db
.select({ userId: knowledgeBase.userId })
.from(knowledgeBase)
.where(eq(knowledgeBase.id, knowledgeBaseId))
.limit(1)
}
return newDocument as { return newDocument as {
id: string id: string
knowledgeBaseId: string knowledgeBaseId: string
@@ -1164,8 +1126,7 @@ export async function bulkDocumentOperation(
knowledgeBaseId: string, knowledgeBaseId: string,
operation: 'enable' | 'disable' | 'delete', operation: 'enable' | 'disable' | 'delete',
documentIds: string[], documentIds: string[],
requestId: string, requestId: string
userId?: string
): Promise<{ ): Promise<{
success: boolean success: boolean
successCount: number successCount: number
@@ -1180,7 +1141,6 @@ export async function bulkDocumentOperation(
`[${requestId}] Starting bulk ${operation} operation on ${documentIds.length} documents in knowledge base ${knowledgeBaseId}` `[${requestId}] Starting bulk ${operation} operation on ${documentIds.length} documents in knowledge base ${knowledgeBaseId}`
) )
// Verify all documents belong to this knowledge base
const documentsToUpdate = await db const documentsToUpdate = await db
.select({ .select({
id: document.id, id: document.id,
@@ -1213,24 +1173,6 @@ export async function bulkDocumentOperation(
}> }>
if (operation === 'delete') { if (operation === 'delete') {
// Get file sizes before deletion for storage tracking
let totalSize = 0
if (userId) {
const documentsToDelete = await db
.select({ fileSize: document.fileSize })
.from(document)
.where(
and(
eq(document.knowledgeBaseId, knowledgeBaseId),
inArray(document.id, documentIds),
isNull(document.deletedAt)
)
)
totalSize = documentsToDelete.reduce((sum, doc) => sum + doc.fileSize, 0)
}
// Handle bulk soft delete
updateResult = await db updateResult = await db
.update(document) .update(document)
.set({ .set({
@@ -1245,7 +1187,6 @@ export async function bulkDocumentOperation(
) )
.returning({ id: document.id, deletedAt: document.deletedAt }) .returning({ id: document.id, deletedAt: document.deletedAt })
} else { } else {
// Handle bulk enable/disable
const enabled = operation === 'enable' const enabled = operation === 'enable'
updateResult = await db updateResult = await db
@@ -1276,6 +1217,77 @@ export async function bulkDocumentOperation(
} }
} }
/**
* Perform bulk operations on all documents matching a filter
*/
export async function bulkDocumentOperationByFilter(
knowledgeBaseId: string,
operation: 'enable' | 'disable' | 'delete',
enabledFilter: 'all' | 'enabled' | 'disabled' | undefined,
requestId: string
): Promise<{
success: boolean
successCount: number
updatedDocuments: Array<{
id: string
enabled?: boolean
deletedAt?: Date | null
}>
}> {
logger.info(
`[${requestId}] Starting bulk ${operation} operation on all documents (filter: ${enabledFilter || 'all'}) in knowledge base ${knowledgeBaseId}`
)
const whereConditions = [
eq(document.knowledgeBaseId, knowledgeBaseId),
isNull(document.deletedAt),
]
if (enabledFilter === 'enabled') {
whereConditions.push(eq(document.enabled, true))
} else if (enabledFilter === 'disabled') {
whereConditions.push(eq(document.enabled, false))
}
let updateResult: Array<{
id: string
enabled?: boolean
deletedAt?: Date | null
}>
if (operation === 'delete') {
updateResult = await db
.update(document)
.set({
deletedAt: new Date(),
})
.where(and(...whereConditions))
.returning({ id: document.id, deletedAt: document.deletedAt })
} else {
const enabled = operation === 'enable'
updateResult = await db
.update(document)
.set({
enabled,
})
.where(and(...whereConditions))
.returning({ id: document.id, enabled: document.enabled })
}
const successCount = updateResult.length
logger.info(
`[${requestId}] Bulk ${operation} by filter completed: ${successCount} documents updated in knowledge base ${knowledgeBaseId}`
)
return {
success: true,
successCount,
updatedDocuments: updateResult,
}
}
/** /**
* Mark a document as failed due to timeout * Mark a document as failed due to timeout
*/ */
@@ -1325,7 +1337,6 @@ export async function retryDocumentProcessing(
}, },
requestId: string requestId: string
): Promise<{ success: boolean; status: string; message: string }> { ): Promise<{ success: boolean; status: string; message: string }> {
// Fetch KB's chunkingConfig for retry processing
const kb = await db const kb = await db
.select({ .select({
chunkingConfig: knowledgeBase.chunkingConfig, chunkingConfig: knowledgeBase.chunkingConfig,
@@ -1336,7 +1347,6 @@ export async function retryDocumentProcessing(
const kbConfig = kb[0].chunkingConfig as { maxSize: number; minSize: number; overlap: number } const kbConfig = kb[0].chunkingConfig as { maxSize: number; minSize: number; overlap: number }
// Clear existing embeddings and reset document state
await db.transaction(async (tx) => { await db.transaction(async (tx) => {
await tx.delete(embedding).where(eq(embedding.documentId, documentId)) await tx.delete(embedding).where(eq(embedding.documentId, documentId))
@@ -1362,7 +1372,6 @@ export async function retryDocumentProcessing(
chunkOverlap: kbConfig.overlap, chunkOverlap: kbConfig.overlap,
} }
// Start processing in the background
processDocumentAsync(knowledgeBaseId, documentId, docData, processingOptions).catch( processDocumentAsync(knowledgeBaseId, documentId, docData, processingOptions).catch(
(error: unknown) => { (error: unknown) => {
logger.error(`[${requestId}] Background retry processing error:`, error) logger.error(`[${requestId}] Background retry processing error:`, error)
@@ -1511,7 +1520,6 @@ export async function updateDocument(
if (updateData.processingError !== undefined) if (updateData.processingError !== undefined)
dbUpdateData.processingError = updateData.processingError dbUpdateData.processingError = updateData.processingError
// Helper to convert string values to proper types for the database
const convertTagValue = ( const convertTagValue = (
slot: string, slot: string,
value: string | undefined value: string | undefined

View File

@@ -6,6 +6,7 @@ export type DocumentSortField =
| 'chunkCount' | 'chunkCount'
| 'uploadedAt' | 'uploadedAt'
| 'processingStatus' | 'processingStatus'
| 'enabled'
export type SortOrder = 'asc' | 'desc' export type SortOrder = 'asc' | 'desc'
export interface DocumentSortOptions { export interface DocumentSortOptions {

View File

@@ -2,12 +2,9 @@
* Autolayout Constants * Autolayout Constants
* *
* Layout algorithm specific constants for spacing, padding, and overlap detection. * Layout algorithm specific constants for spacing, padding, and overlap detection.
* Block dimensions are imported from the shared source: @/lib/workflows/blocks/block-dimensions * Block dimensions are in @/lib/workflows/blocks/block-dimensions
*/ */
// Re-export block dimensions for autolayout consumers
export { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
/** /**
* Horizontal spacing between layers (columns) * Horizontal spacing between layers (columns)
*/ */

View File

@@ -11,21 +11,6 @@ import type { BlockMetrics, BoundingBox, Edge, GraphNode } from '@/lib/workflows
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions' import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
import type { BlockState } from '@/stores/workflows/workflow/types' import type { BlockState } from '@/stores/workflows/workflow/types'
// Re-export layout constants for backwards compatibility
export {
CONTAINER_PADDING,
CONTAINER_PADDING_X,
CONTAINER_PADDING_Y,
ROOT_PADDING_X,
ROOT_PADDING_Y,
}
// Re-export block dimensions for backwards compatibility
export const DEFAULT_BLOCK_WIDTH = BLOCK_DIMENSIONS.FIXED_WIDTH
export const DEFAULT_BLOCK_HEIGHT = BLOCK_DIMENSIONS.MIN_HEIGHT
export const DEFAULT_CONTAINER_WIDTH = CONTAINER_DIMENSIONS.DEFAULT_WIDTH
export const DEFAULT_CONTAINER_HEIGHT = CONTAINER_DIMENSIONS.DEFAULT_HEIGHT
/** /**
* Resolves a potentially undefined numeric value to a fallback * Resolves a potentially undefined numeric value to a fallback
*/ */

View File

@@ -133,7 +133,7 @@
"papaparse": "5.5.3", "papaparse": "5.5.3",
"pdf-lib": "1.17.1", "pdf-lib": "1.17.1",
"postgres": "^3.4.5", "postgres": "^3.4.5",
"posthog-js": "1.268.9", "posthog-js": "1.334.1",
"posthog-node": "5.9.2", "posthog-node": "5.9.2",
"prismjs": "^1.30.0", "prismjs": "^1.30.0",
"react": "19.2.1", "react": "19.2.1",

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 122 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 405 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 228 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 352 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 257 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 143 KiB

View File

@@ -2116,6 +2116,24 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
}) })
}) })
} }
} else {
// Check if this is an integration tool (server-side) that should be auto-executed
const isIntegrationTool = !CLASS_TOOL_METADATA[name]
if (isIntegrationTool && isSubAgentAutoAllowed) {
logger.info('[SubAgent] Auto-executing integration tool (auto-allowed)', {
id,
name,
})
// Execute integration tool via the store method
const { executeIntegrationTool } = get()
executeIntegrationTool(id).catch((err) => {
logger.error('[SubAgent] Integration tool auto-execution failed', {
id,
name,
error: err?.message || err,
})
})
}
} }
} }
} catch (e: any) { } catch (e: any) {
@@ -2797,9 +2815,14 @@ export const useCopilotStore = create<CopilotStore>()(
mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent' mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent'
// Extract slash commands from contexts (lowercase) and filter them out from contexts // Extract slash commands from contexts (lowercase) and filter them out from contexts
// Map UI command IDs to API command IDs (e.g., "actions" -> "superagent")
const uiToApiCommandMap: Record<string, string> = { actions: 'superagent' }
const commands = contexts const commands = contexts
?.filter((c) => c.kind === 'slash_command' && 'command' in c) ?.filter((c) => c.kind === 'slash_command' && 'command' in c)
.map((c) => (c as any).command.toLowerCase()) as string[] | undefined .map((c) => {
const uiCommand = (c as any).command.toLowerCase()
return uiToApiCommandMap[uiCommand] || uiCommand
}) as string[] | undefined
const filteredContexts = contexts?.filter((c) => c.kind !== 'slash_command') const filteredContexts = contexts?.filter((c) => c.kind !== 'slash_command')
const result = await sendStreamingMessage({ const result = await sendStreamingMessage({
@@ -3923,11 +3946,16 @@ export const useCopilotStore = create<CopilotStore>()(
loadAutoAllowedTools: async () => { loadAutoAllowedTools: async () => {
try { try {
logger.info('[AutoAllowedTools] Loading from API...')
const res = await fetch('/api/copilot/auto-allowed-tools') const res = await fetch('/api/copilot/auto-allowed-tools')
logger.info('[AutoAllowedTools] Load response', { status: res.status, ok: res.ok })
if (res.ok) { if (res.ok) {
const data = await res.json() const data = await res.json()
set({ autoAllowedTools: data.autoAllowedTools || [] }) const tools = data.autoAllowedTools || []
logger.info('[AutoAllowedTools] Loaded', { tools: data.autoAllowedTools }) set({ autoAllowedTools: tools })
logger.info('[AutoAllowedTools] Loaded successfully', { count: tools.length, tools })
} else {
logger.warn('[AutoAllowedTools] Load failed with status', { status: res.status })
} }
} catch (err) { } catch (err) {
logger.error('[AutoAllowedTools] Failed to load', { error: err }) logger.error('[AutoAllowedTools] Failed to load', { error: err })
@@ -3936,15 +3964,18 @@ export const useCopilotStore = create<CopilotStore>()(
addAutoAllowedTool: async (toolId: string) => { addAutoAllowedTool: async (toolId: string) => {
try { try {
logger.info('[AutoAllowedTools] Adding tool...', { toolId })
const res = await fetch('/api/copilot/auto-allowed-tools', { const res = await fetch('/api/copilot/auto-allowed-tools', {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ toolId }), body: JSON.stringify({ toolId }),
}) })
logger.info('[AutoAllowedTools] API response', { toolId, status: res.status, ok: res.ok })
if (res.ok) { if (res.ok) {
const data = await res.json() const data = await res.json()
logger.info('[AutoAllowedTools] API returned', { toolId, tools: data.autoAllowedTools })
set({ autoAllowedTools: data.autoAllowedTools || [] }) set({ autoAllowedTools: data.autoAllowedTools || [] })
logger.info('[AutoAllowedTools] Added tool', { toolId }) logger.info('[AutoAllowedTools] Added tool to store', { toolId })
// Auto-execute all pending tools of the same type // Auto-execute all pending tools of the same type
const { toolCallsById, executeIntegrationTool } = get() const { toolCallsById, executeIntegrationTool } = get()

View File

@@ -7,7 +7,7 @@ import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { TriggerUtils } from '@/lib/workflows/triggers/triggers' import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { getBlock } from '@/blocks' import { getBlock } from '@/blocks'
import type { SubBlockConfig } from '@/blocks/types' import type { SubBlockConfig } from '@/blocks/types'
import { normalizeName } from '@/executor/constants' import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { filterNewEdges, getUniqueBlockName, mergeSubblockState } from '@/stores/workflows/utils' import { filterNewEdges, getUniqueBlockName, mergeSubblockState } from '@/stores/workflows/utils'
@@ -726,6 +726,11 @@ export const useWorkflowStore = create<WorkflowStore>()(
return { success: false, changedSubblocks: [] } return { success: false, changedSubblocks: [] }
} }
if ((RESERVED_BLOCK_NAMES as readonly string[]).includes(normalizedNewName)) {
logger.error(`Cannot rename block to reserved name: "${name}"`)
return { success: false, changedSubblocks: [] }
}
const newState = { const newState = {
blocks: { blocks: {
...get().blocks, ...get().blocks,

View File

@@ -56,6 +56,7 @@ describe('Function Execute Tool', () => {
workflowVariables: {}, workflowVariables: {},
blockData: {}, blockData: {},
blockNameMapping: {}, blockNameMapping: {},
blockOutputSchemas: {},
isCustomTool: false, isCustomTool: false,
language: 'javascript', language: 'javascript',
timeout: 5000, timeout: 5000,
@@ -83,6 +84,7 @@ describe('Function Execute Tool', () => {
workflowVariables: {}, workflowVariables: {},
blockData: {}, blockData: {},
blockNameMapping: {}, blockNameMapping: {},
blockOutputSchemas: {},
isCustomTool: false, isCustomTool: false,
language: 'javascript', language: 'javascript',
workflowId: undefined, workflowId: undefined,
@@ -101,6 +103,7 @@ describe('Function Execute Tool', () => {
workflowVariables: {}, workflowVariables: {},
blockData: {}, blockData: {},
blockNameMapping: {}, blockNameMapping: {},
blockOutputSchemas: {},
isCustomTool: false, isCustomTool: false,
language: 'javascript', language: 'javascript',
workflowId: undefined, workflowId: undefined,

View File

@@ -53,6 +53,13 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
description: 'Mapping of block names to block IDs', description: 'Mapping of block names to block IDs',
default: {}, default: {},
}, },
blockOutputSchemas: {
type: 'object',
required: false,
visibility: 'hidden',
description: 'Mapping of block IDs to their output schemas for validation',
default: {},
},
workflowVariables: { workflowVariables: {
type: 'object', type: 'object',
required: false, required: false,
@@ -81,6 +88,7 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
workflowVariables: params.workflowVariables || {}, workflowVariables: params.workflowVariables || {},
blockData: params.blockData || {}, blockData: params.blockData || {},
blockNameMapping: params.blockNameMapping || {}, blockNameMapping: params.blockNameMapping || {},
blockOutputSchemas: params.blockOutputSchemas || {},
workflowId: params._context?.workflowId, workflowId: params._context?.workflowId,
isCustomTool: params.isCustomTool || false, isCustomTool: params.isCustomTool || false,
} }

View File

@@ -11,6 +11,7 @@ export interface CodeExecutionInput {
workflowVariables?: Record<string, unknown> workflowVariables?: Record<string, unknown>
blockData?: Record<string, unknown> blockData?: Record<string, unknown>
blockNameMapping?: Record<string, string> blockNameMapping?: Record<string, string>
blockOutputSchemas?: Record<string, Record<string, unknown>>
_context?: { _context?: {
workflowId?: string workflowId?: string
} }

View File

@@ -164,7 +164,7 @@
"papaparse": "5.5.3", "papaparse": "5.5.3",
"pdf-lib": "1.17.1", "pdf-lib": "1.17.1",
"postgres": "^3.4.5", "postgres": "^3.4.5",
"posthog-js": "1.268.9", "posthog-js": "1.334.1",
"posthog-node": "5.9.2", "posthog-node": "5.9.2",
"prismjs": "^1.30.0", "prismjs": "^1.30.0",
"react": "19.2.1", "react": "19.2.1",
@@ -963,7 +963,9 @@
"@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="], "@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="],
"@posthog/core": ["@posthog/core@1.2.2", "", {}, "sha512-f16Ozx6LIigRG+HsJdt+7kgSxZTHeX5f1JlCGKI1lXcvlZgfsCR338FuMI2QRYXGl+jg/vYFzGOTQBxl90lnBg=="], "@posthog/core": ["@posthog/core@1.13.0", "", { "dependencies": { "cross-spawn": "^7.0.6" } }, "sha512-knjncrk7qRmssFRbGzBl1Tunt21GRpe0Wv+uVelyL0Rh7PdQUsgguulzXFTps8hA6wPwTU4kq85qnbAJ3eH6Wg=="],
"@posthog/types": ["@posthog/types@1.334.1", "", {}, "sha512-ypFnwTO7qbV7icylLbujbamPdQXbJq0a61GUUBnJAeTbBw/qYPIss5IRYICcbCj0uunQrwD7/CGxVb5TOYKWgA=="],
"@prisma/config": ["@prisma/config@6.19.2", "", { "dependencies": { "c12": "3.1.0", "deepmerge-ts": "7.1.5", "effect": "3.18.4", "empathic": "2.0.0" } }, "sha512-kadBGDl+aUswv/zZMk9Mx0C8UZs1kjao8H9/JpI4Wh4SHZaM7zkTwiKn/iFLfRg+XtOAo/Z/c6pAYhijKl0nzQ=="], "@prisma/config": ["@prisma/config@6.19.2", "", { "dependencies": { "c12": "3.1.0", "deepmerge-ts": "7.1.5", "effect": "3.18.4", "empathic": "2.0.0" } }, "sha512-kadBGDl+aUswv/zZMk9Mx0C8UZs1kjao8H9/JpI4Wh4SHZaM7zkTwiKn/iFLfRg+XtOAo/Z/c6pAYhijKl0nzQ=="],
@@ -1541,6 +1543,8 @@
"@types/tough-cookie": ["@types/tough-cookie@4.0.5", "", {}, "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA=="], "@types/tough-cookie": ["@types/tough-cookie@4.0.5", "", {}, "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA=="],
"@types/trusted-types": ["@types/trusted-types@2.0.7", "", {}, "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw=="],
"@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="], "@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="],
"@types/uuid": ["@types/uuid@10.0.0", "", {}, "sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ=="], "@types/uuid": ["@types/uuid@10.0.0", "", {}, "sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ=="],
@@ -1993,6 +1997,8 @@
"domhandler": ["domhandler@5.0.3", "", { "dependencies": { "domelementtype": "^2.3.0" } }, "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w=="], "domhandler": ["domhandler@5.0.3", "", { "dependencies": { "domelementtype": "^2.3.0" } }, "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w=="],
"dompurify": ["dompurify@3.3.1", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q=="],
"domutils": ["domutils@3.2.2", "", { "dependencies": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3" } }, "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw=="], "domutils": ["domutils@3.2.2", "", { "dependencies": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3" } }, "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw=="],
"dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="], "dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="],
@@ -2953,7 +2959,7 @@
"postgres": ["postgres@3.4.8", "", {}, "sha512-d+JFcLM17njZaOLkv6SCev7uoLaBtfK86vMUXhW1Z4glPWh4jozno9APvW/XKFJ3CCxVoC7OL38BqRydtu5nGg=="], "postgres": ["postgres@3.4.8", "", {}, "sha512-d+JFcLM17njZaOLkv6SCev7uoLaBtfK86vMUXhW1Z4glPWh4jozno9APvW/XKFJ3CCxVoC7OL38BqRydtu5nGg=="],
"posthog-js": ["posthog-js@1.268.9", "", { "dependencies": { "@posthog/core": "1.2.2", "core-js": "^3.38.1", "fflate": "^0.4.8", "preact": "^10.19.3", "web-vitals": "^4.2.4" }, "peerDependencies": { "@rrweb/types": "2.0.0-alpha.17", "rrweb-snapshot": "2.0.0-alpha.17" }, "optionalPeers": ["@rrweb/types", "rrweb-snapshot"] }, "sha512-ejK5/i0TUQ8I1SzaIn7xWNf5TzOjWquawpgjKit8DyucD3Z1yf7LTMtgCYZN8oRx9VjiPcP34fSk8YsWQmmkTQ=="], "posthog-js": ["posthog-js@1.334.1", "", { "dependencies": { "@opentelemetry/api": "^1.9.0", "@opentelemetry/api-logs": "^0.208.0", "@opentelemetry/exporter-logs-otlp-http": "^0.208.0", "@opentelemetry/resources": "^2.2.0", "@opentelemetry/sdk-logs": "^0.208.0", "@posthog/core": "1.13.0", "@posthog/types": "1.334.1", "core-js": "^3.38.1", "dompurify": "^3.3.1", "fflate": "^0.4.8", "preact": "^10.28.0", "query-selector-shadow-dom": "^1.0.1", "web-vitals": "^5.1.0" } }, "sha512-5cDzLICr2afnwX/cR9fwoLC0vN0Nb5gP5HiCigzHkgHdO+E3WsYefla3EFMQz7U4r01CBPZ+nZ9/srkzeACxtQ=="],
"posthog-node": ["posthog-node@5.9.2", "", { "dependencies": { "@posthog/core": "1.2.2" } }, "sha512-oU7FbFcH5cn40nhP04cBeT67zE76EiGWjKKzDvm6IOm5P83sqM0Ij0wMJQSHp+QI6ZN7MLzb+4xfMPUEZ4q6CA=="], "posthog-node": ["posthog-node@5.9.2", "", { "dependencies": { "@posthog/core": "1.2.2" } }, "sha512-oU7FbFcH5cn40nhP04cBeT67zE76EiGWjKKzDvm6IOm5P83sqM0Ij0wMJQSHp+QI6ZN7MLzb+4xfMPUEZ4q6CA=="],
@@ -3001,6 +3007,8 @@
"qs": ["qs@6.14.1", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ=="], "qs": ["qs@6.14.1", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ=="],
"query-selector-shadow-dom": ["query-selector-shadow-dom@1.0.1", "", {}, "sha512-lT5yCqEBgfoMYpf3F2xQRK7zEr1rhIIZuceDK6+xRkJQ4NMbHTwXqk4NkwDwQMNqXgG9r9fyHnzwNVs6zV5KRw=="],
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
"quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="], "quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="],
@@ -3509,7 +3517,7 @@
"web-streams-polyfill": ["web-streams-polyfill@4.0.0-beta.3", "", {}, "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug=="], "web-streams-polyfill": ["web-streams-polyfill@4.0.0-beta.3", "", {}, "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug=="],
"web-vitals": ["web-vitals@4.2.4", "", {}, "sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw=="], "web-vitals": ["web-vitals@5.1.0", "", {}, "sha512-ArI3kx5jI0atlTtmV0fWU3fjpLmq/nD3Zr1iFFlJLaqa5wLBkUSzINwBPySCX/8jRyjlmy1Volw1kz1g9XE4Jg=="],
"webidl-conversions": ["webidl-conversions@7.0.0", "", {}, "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="], "webidl-conversions": ["webidl-conversions@7.0.0", "", {}, "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="],
@@ -4097,8 +4105,16 @@
"postcss-nested/postcss-selector-parser": ["postcss-selector-parser@6.1.2", "", { "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg=="], "postcss-nested/postcss-selector-parser": ["postcss-selector-parser@6.1.2", "", { "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg=="],
"posthog-js/@opentelemetry/api-logs": ["@opentelemetry/api-logs@0.208.0", "", { "dependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-CjruKY9V6NMssL/T1kAFgzosF1v9o6oeN+aX5JB/C/xPNtmgIJqcXHG7fA82Ou1zCpWGl4lROQUKwUNE1pMCyg=="],
"posthog-js/@opentelemetry/exporter-logs-otlp-http": ["@opentelemetry/exporter-logs-otlp-http@0.208.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.208.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-exporter-base": "0.208.0", "@opentelemetry/otlp-transformer": "0.208.0", "@opentelemetry/sdk-logs": "0.208.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-jOv40Bs9jy9bZVLo/i8FwUiuCvbjWDI+ZW13wimJm4LjnlwJxGgB+N/VWOZUTpM+ah/awXeQqKdNlpLf2EjvYg=="],
"posthog-js/@opentelemetry/sdk-logs": ["@opentelemetry/sdk-logs@0.208.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.208.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.4.0 <1.10.0" } }, "sha512-QlAyL1jRpOeaqx7/leG1vJMp84g0xKP6gJmfELBpnI4O/9xPX+Hu5m1POk9Kl+veNkyth5t19hRlN6tNY1sjbA=="],
"posthog-js/fflate": ["fflate@0.4.8", "", {}, "sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA=="], "posthog-js/fflate": ["fflate@0.4.8", "", {}, "sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA=="],
"posthog-node/@posthog/core": ["@posthog/core@1.2.2", "", {}, "sha512-f16Ozx6LIigRG+HsJdt+7kgSxZTHeX5f1JlCGKI1lXcvlZgfsCR338FuMI2QRYXGl+jg/vYFzGOTQBxl90lnBg=="],
"protobufjs/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], "protobufjs/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"proxy-addr/ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="], "proxy-addr/ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="],
@@ -4577,6 +4593,16 @@
"ora/strip-ansi/ansi-regex": ["ansi-regex@6.2.2", "", {}, "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="], "ora/strip-ansi/ansi-regex": ["ansi-regex@6.2.2", "", {}, "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="],
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="],
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-exporter-base": ["@opentelemetry/otlp-exporter-base@0.208.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/otlp-transformer": "0.208.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-gMd39gIfVb2OgxldxUtOwGJYSH8P1kVFFlJLuut32L6KgUC4gl1dMhn+YC2mGn0bDOiQYSk/uHOdSjuKp58vvA=="],
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer": ["@opentelemetry/otlp-transformer@0.208.0", "", { "dependencies": { "@opentelemetry/api-logs": "0.208.0", "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/sdk-logs": "0.208.0", "@opentelemetry/sdk-metrics": "2.2.0", "@opentelemetry/sdk-trace-base": "2.2.0", "protobufjs": "^7.3.0" }, "peerDependencies": { "@opentelemetry/api": "^1.3.0" } }, "sha512-DCFPY8C6lAQHUNkzcNT9R+qYExvsk6C5Bto2pbNxgicpcSWbe2WHShLxkOxIdNcBiYPdVHv/e7vH7K6TI+C+fQ=="],
"posthog-js/@opentelemetry/sdk-logs/@opentelemetry/core": ["@opentelemetry/core@2.2.0", "", { "dependencies": { "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.0.0 <1.10.0" } }, "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw=="],
"posthog-js/@opentelemetry/sdk-logs/@opentelemetry/resources": ["@opentelemetry/resources@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A=="],
"protobufjs/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="], "protobufjs/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"react-email/chokidar/readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="], "react-email/chokidar/readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
@@ -4785,6 +4811,12 @@
"ora/cli-cursor/restore-cursor/onetime": ["onetime@7.0.0", "", { "dependencies": { "mimic-function": "^5.0.0" } }, "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ=="], "ora/cli-cursor/restore-cursor/onetime": ["onetime@7.0.0", "", { "dependencies": { "mimic-function": "^5.0.0" } }, "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ=="],
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/resources": ["@opentelemetry/resources@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A=="],
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-metrics": ["@opentelemetry/sdk-metrics@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.9.0 <1.10.0" } }, "sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw=="],
"posthog-js/@opentelemetry/exporter-logs-otlp-http/@opentelemetry/otlp-transformer/@opentelemetry/sdk-trace-base": ["@opentelemetry/sdk-trace-base@2.2.0", "", { "dependencies": { "@opentelemetry/core": "2.2.0", "@opentelemetry/resources": "2.2.0", "@opentelemetry/semantic-conventions": "^1.29.0" }, "peerDependencies": { "@opentelemetry/api": ">=1.3.0 <1.10.0" } }, "sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw=="],
"rimraf/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], "rimraf/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
"sim/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], "sim/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],

View File

@@ -110,12 +110,22 @@ spec:
{{- end }} {{- end }}
{{- include "sim.resources" .Values.app | nindent 10 }} {{- include "sim.resources" .Values.app | nindent 10 }}
{{- include "sim.securityContext" .Values.app | nindent 10 }} {{- include "sim.securityContext" .Values.app | nindent 10 }}
{{- with .Values.extraVolumeMounts }} {{- if or .Values.extraVolumeMounts .Values.app.extraVolumeMounts }}
volumeMounts: volumeMounts:
{{- with .Values.extraVolumeMounts }}
{{- toYaml . | nindent 12 }} {{- toYaml . | nindent 12 }}
{{- end }} {{- end }}
{{- with .Values.extraVolumes }} {{- with .Values.app.extraVolumeMounts }}
{{- toYaml . | nindent 12 }}
{{- end }}
{{- end }}
{{- if or .Values.extraVolumes .Values.app.extraVolumes }}
volumes: volumes:
{{- with .Values.extraVolumes }}
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.app.extraVolumes }}
{{- toYaml . | nindent 8 }} {{- toYaml . | nindent 8 }}
{{- end }} {{- end }}
{{- end }} {{- end }}
{{- end }}

View File

@@ -92,6 +92,7 @@ spec:
{{- toYaml .Values.ollama.readinessProbe | nindent 12 }} {{- toYaml .Values.ollama.readinessProbe | nindent 12 }}
{{- end }} {{- end }}
{{- include "sim.resources" .Values.ollama | nindent 10 }} {{- include "sim.resources" .Values.ollama | nindent 10 }}
{{- if or .Values.ollama.persistence.enabled .Values.extraVolumeMounts .Values.ollama.extraVolumeMounts }}
volumeMounts: volumeMounts:
{{- if .Values.ollama.persistence.enabled }} {{- if .Values.ollama.persistence.enabled }}
- name: ollama-data - name: ollama-data
@@ -100,13 +101,22 @@ spec:
{{- with .Values.extraVolumeMounts }} {{- with .Values.extraVolumeMounts }}
{{- toYaml . | nindent 12 }} {{- toYaml . | nindent 12 }}
{{- end }} {{- end }}
{{- if .Values.ollama.persistence.enabled }} {{- with .Values.ollama.extraVolumeMounts }}
{{- toYaml . | nindent 12 }}
{{- end }}
{{- end }}
{{- if or .Values.ollama.persistence.enabled .Values.extraVolumes .Values.ollama.extraVolumes }}
volumes: volumes:
{{- if .Values.ollama.persistence.enabled }}
- name: ollama-data - name: ollama-data
persistentVolumeClaim: persistentVolumeClaim:
claimName: {{ include "sim.fullname" . }}-ollama-data claimName: {{ include "sim.fullname" . }}-ollama-data
{{- end }}
{{- with .Values.extraVolumes }} {{- with .Values.extraVolumes }}
{{- toYaml . | nindent 8 }} {{- toYaml . | nindent 8 }}
{{- end }} {{- end }}
{{- with .Values.ollama.extraVolumes }}
{{- toYaml . | nindent 8 }}
{{- end }}
{{- end }} {{- end }}
{{- end }} {{- end }}

View File

@@ -84,12 +84,22 @@ spec:
{{- end }} {{- end }}
{{- include "sim.resources" .Values.realtime | nindent 10 }} {{- include "sim.resources" .Values.realtime | nindent 10 }}
{{- include "sim.securityContext" .Values.realtime | nindent 10 }} {{- include "sim.securityContext" .Values.realtime | nindent 10 }}
{{- with .Values.extraVolumeMounts }} {{- if or .Values.extraVolumeMounts .Values.realtime.extraVolumeMounts }}
volumeMounts: volumeMounts:
{{- with .Values.extraVolumeMounts }}
{{- toYaml . | nindent 12 }} {{- toYaml . | nindent 12 }}
{{- end }} {{- end }}
{{- with .Values.extraVolumes }} {{- with .Values.realtime.extraVolumeMounts }}
{{- toYaml . | nindent 12 }}
{{- end }}
{{- end }}
{{- if or .Values.extraVolumes .Values.realtime.extraVolumes }}
volumes: volumes:
{{- with .Values.extraVolumes }}
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.realtime.extraVolumes }}
{{- toYaml . | nindent 8 }} {{- toYaml . | nindent 8 }}
{{- end }} {{- end }}
{{- end }} {{- end }}
{{- end }}

View File

@@ -224,6 +224,10 @@ app:
timeoutSeconds: 5 timeoutSeconds: 5
failureThreshold: 3 failureThreshold: 3
# Additional volumes for app deployment (e.g., branding assets, custom configs)
extraVolumes: []
extraVolumeMounts: []
# Realtime socket server configuration # Realtime socket server configuration
realtime: realtime:
# Enable/disable the realtime service # Enable/disable the realtime service
@@ -301,6 +305,10 @@ realtime:
timeoutSeconds: 5 timeoutSeconds: 5
failureThreshold: 3 failureThreshold: 3
# Additional volumes for realtime deployment
extraVolumes: []
extraVolumeMounts: []
# Database migrations job configuration # Database migrations job configuration
migrations: migrations:
# Enable/disable migrations job # Enable/disable migrations job
@@ -539,6 +547,10 @@ ollama:
timeoutSeconds: 5 timeoutSeconds: 5
failureThreshold: 3 failureThreshold: 3
# Additional volumes for ollama deployment
extraVolumes: []
extraVolumeMounts: []
# Ingress configuration # Ingress configuration
ingress: ingress:
# Enable/disable ingress # Enable/disable ingress

View File

@@ -0,0 +1,4 @@
DROP INDEX "idempotency_key_namespace_unique";--> statement-breakpoint
DROP INDEX "idempotency_key_namespace_idx";--> statement-breakpoint
ALTER TABLE "idempotency_key" ADD PRIMARY KEY ("key");--> statement-breakpoint
ALTER TABLE "idempotency_key" DROP COLUMN "namespace";

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More