mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-11 16:08:04 -05:00
Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
016cd6750c | ||
|
|
3b982533d1 | ||
|
|
1604ce4d7c | ||
|
|
86168f1a87 | ||
|
|
5d7fc5382c | ||
|
|
7a5aeadbb7 | ||
|
|
f4e627a9f7 | ||
|
|
b0c1547198 | ||
|
|
d19632aec3 | ||
|
|
35ac68f579 | ||
|
|
9c14f5f8fc | ||
|
|
d50db1d3fb |
@@ -49,7 +49,7 @@ In Sim Studio, the Knowledge Base block enables your agents to perform intellige
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Perform semantic vector search across one or more knowledge bases or upload new chunks to documents. Uses advanced AI embeddings to understand meaning and context for search operations.
|
||||
Perform semantic vector search across knowledge bases, upload individual chunks to existing documents, or create new documents from text content. Uses advanced AI embeddings to understand meaning and context for search operations.
|
||||
|
||||
|
||||
|
||||
@@ -100,6 +100,25 @@ Upload a new chunk to a document in a knowledge base
|
||||
| `createdAt` | string |
|
||||
| `updatedAt` | string |
|
||||
|
||||
### `knowledge_create_document`
|
||||
|
||||
Create a new document in a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base containing the document |
|
||||
| `name` | string | Yes | Name of the document |
|
||||
| `content` | string | Yes | Content of the document |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type |
|
||||
| --------- | ---- |
|
||||
| `data` | string |
|
||||
| `name` | string |
|
||||
|
||||
|
||||
|
||||
## Block Configuration
|
||||
|
||||
@@ -764,6 +764,20 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
bucket: 'test-s3-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
S3_KB_CONFIG: {
|
||||
bucket: 'test-s3-kb-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
BLOB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@aws-sdk/client-s3', () => ({
|
||||
@@ -806,6 +820,11 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@azure/storage-blob', () => ({
|
||||
|
||||
@@ -292,12 +292,12 @@ export async function executeWorkflowForChat(
|
||||
|
||||
logger.debug(`[${requestId}] Using ${outputBlockIds.length} output blocks for extraction`)
|
||||
|
||||
// Find the workflow
|
||||
// Find the workflow (deployedState is NOT deprecated - needed for chat execution)
|
||||
const workflowResult = await db
|
||||
.select({
|
||||
state: workflow.state,
|
||||
deployedState: workflow.deployedState,
|
||||
isDeployed: workflow.isDeployed,
|
||||
deployedState: workflow.deployedState,
|
||||
variables: workflow.variables,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, workflowId))
|
||||
@@ -308,9 +308,14 @@ export async function executeWorkflowForChat(
|
||||
throw new Error('Workflow not available')
|
||||
}
|
||||
|
||||
// Use deployed state for execution
|
||||
const state = workflowResult[0].deployedState || workflowResult[0].state
|
||||
const { blocks, edges, loops, parallels } = state as WorkflowState
|
||||
// For chat execution, use ONLY the deployed state (no fallback)
|
||||
if (!workflowResult[0].deployedState) {
|
||||
throw new Error(`Workflow must be deployed to be available for chat`)
|
||||
}
|
||||
|
||||
// Use deployed state for chat execution (this is the stable, deployed version)
|
||||
const deployedState = workflowResult[0].deployedState as WorkflowState
|
||||
const { blocks, edges, loops, parallels } = deployedState
|
||||
|
||||
// Prepare for execution, similar to use-workflow-execution.ts
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
@@ -344,16 +349,13 @@ export async function executeWorkflowForChat(
|
||||
logger.warn(`[${requestId}] Could not fetch environment variables:`, error)
|
||||
}
|
||||
|
||||
// Get workflow variables
|
||||
let workflowVariables = {}
|
||||
try {
|
||||
// The workflow state may contain variables
|
||||
const workflowState = state as any
|
||||
if (workflowState.variables) {
|
||||
if (workflowResult[0].variables) {
|
||||
workflowVariables =
|
||||
typeof workflowState.variables === 'string'
|
||||
? JSON.parse(workflowState.variables)
|
||||
: workflowState.variables
|
||||
typeof workflowResult[0].variables === 'string'
|
||||
? JSON.parse(workflowResult[0].variables)
|
||||
: workflowResult[0].variables
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Could not parse workflow variables:`, error)
|
||||
|
||||
@@ -39,8 +39,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Direct uploads are only available when cloud storage is enabled')
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -64,7 +65,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('fileName is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when contentType is missing', async () => {
|
||||
@@ -87,7 +89,59 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('contentType is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when fileSize is invalid', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'test.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: 0,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('fileSize must be a positive number')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when file size exceeds limit', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const largeFileSize = 150 * 1024 * 1024 // 150MB (exceeds 100MB limit)
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'large-file.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: largeFileSize,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toContain('exceeds maximum allowed size')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should generate S3 presigned URL successfully', async () => {
|
||||
@@ -122,6 +176,34 @@ describe('/api/files/presigned', () => {
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate knowledge-base S3 presigned URL with kb prefix', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost:3000/api/files/presigned?type=knowledge-base',
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'knowledge-doc.pdf',
|
||||
contentType: 'application/pdf',
|
||||
fileSize: 2048,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.fileInfo.key).toMatch(/^kb\/.*knowledge-doc\.pdf$/)
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate Azure Blob presigned URL successfully', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
@@ -182,8 +264,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Unknown storage provider')
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Unknown storage provider: unknown') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -225,8 +308,10 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('S3 service unavailable')
|
||||
expect(data.error).toBe(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
) // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle Azure Blob errors gracefully', async () => {
|
||||
@@ -269,8 +354,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('Azure service unavailable')
|
||||
expect(data.error).toBe('Failed to generate Azure Blob presigned URL') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle malformed JSON gracefully', async () => {
|
||||
@@ -289,9 +374,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('SyntaxError')
|
||||
expect(data.message).toContain('Unexpected token')
|
||||
expect(response.status).toBe(400) // Changed from 500 to 400 (ValidationError)
|
||||
expect(data.error).toBe('Invalid JSON in request body') // Updated error message
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { getBlobServiceClient } from '@/lib/uploads/blob/blob-client'
|
||||
import { getS3Client, sanitizeFilenameForMetadata } from '@/lib/uploads/s3/s3-client'
|
||||
import { BLOB_CONFIG, S3_CONFIG } from '@/lib/uploads/setup'
|
||||
import { BLOB_CONFIG, BLOB_KB_CONFIG, S3_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import { createErrorResponse, createOptionsResponse } from '../utils'
|
||||
|
||||
const logger = createLogger('PresignedUploadAPI')
|
||||
@@ -17,124 +17,148 @@ interface PresignedUrlRequest {
|
||||
fileSize: number
|
||||
}
|
||||
|
||||
type UploadType = 'general' | 'knowledge-base'
|
||||
|
||||
class PresignedUrlError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public statusCode = 400
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'PresignedUrlError'
|
||||
}
|
||||
}
|
||||
|
||||
class StorageConfigError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'STORAGE_CONFIG_ERROR', 500)
|
||||
}
|
||||
}
|
||||
|
||||
class ValidationError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'VALIDATION_ERROR', 400)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
// Parse the request body
|
||||
const data: PresignedUrlRequest = await request.json()
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName || !contentType) {
|
||||
return NextResponse.json({ error: 'Missing fileName or contentType' }, { status: 400 })
|
||||
let data: PresignedUrlRequest
|
||||
try {
|
||||
data = await request.json()
|
||||
} catch {
|
||||
throw new ValidationError('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
// Only proceed if cloud storage is enabled
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName?.trim()) {
|
||||
throw new ValidationError('fileName is required and cannot be empty')
|
||||
}
|
||||
if (!contentType?.trim()) {
|
||||
throw new ValidationError('contentType is required and cannot be empty')
|
||||
}
|
||||
if (!fileSize || fileSize <= 0) {
|
||||
throw new ValidationError('fileSize must be a positive number')
|
||||
}
|
||||
|
||||
const MAX_FILE_SIZE = 100 * 1024 * 1024
|
||||
if (fileSize > MAX_FILE_SIZE) {
|
||||
throw new ValidationError(
|
||||
`File size (${fileSize} bytes) exceeds maximum allowed size (${MAX_FILE_SIZE} bytes)`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadTypeParam = request.nextUrl.searchParams.get('type')
|
||||
const uploadType: UploadType =
|
||||
uploadTypeParam === 'knowledge-base' ? 'knowledge-base' : 'general'
|
||||
|
||||
if (!isUsingCloudStorage()) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Direct uploads are only available when cloud storage is enabled',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
throw new StorageConfigError(
|
||||
'Direct uploads are only available when cloud storage is enabled'
|
||||
)
|
||||
}
|
||||
|
||||
const storageProvider = getStorageProvider()
|
||||
logger.info(`Generating ${uploadType} presigned URL for ${fileName} using ${storageProvider}`)
|
||||
|
||||
switch (storageProvider) {
|
||||
case 's3':
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
case 'blob':
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
default:
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Unknown storage provider',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
throw new StorageConfigError(`Unknown storage provider: ${storageProvider}`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error generating presigned URL:', error)
|
||||
|
||||
if (error instanceof PresignedUrlError) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error.message,
|
||||
code: error.code,
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: error.statusCode }
|
||||
)
|
||||
}
|
||||
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate presigned URL')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleS3PresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
// Sanitize the original filename for S3 metadata to prevent header errors
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create the S3 command
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: S3_CONFIG.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
|
||||
// Generate the presigned URL
|
||||
const presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
})
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
async function handleS3PresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
const config = uploadType === 'knowledge-base' ? S3_KB_CONFIG : S3_CONFIG
|
||||
|
||||
// Generate SAS token for upload (write permission)
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: BLOB_CONFIG.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
if (!config.bucket || !config.region) {
|
||||
throw new StorageConfigError(`S3 configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(BLOB_CONFIG.accountName, BLOB_CONFIG.accountKey || '')
|
||||
).toString()
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
const metadata: Record<string, string> = {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
if (uploadType === 'knowledge-base') {
|
||||
metadata.purpose = 'knowledge-base'
|
||||
}
|
||||
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: config.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: metadata,
|
||||
})
|
||||
|
||||
let presignedUrl: string
|
||||
try {
|
||||
presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
} catch (s3Error) {
|
||||
logger.error('Failed to generate S3 presigned URL:', s3Error)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} S3 presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
@@ -146,22 +170,103 @@ async function handleBlobPresignedUrl(fileName: string, contentType: string, fil
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders: {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error generating Blob presigned URL:', error)
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate Blob presigned URL')
|
||||
)
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in S3 presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate S3 presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const config = uploadType === 'knowledge-base' ? BLOB_KB_CONFIG : BLOB_CONFIG
|
||||
|
||||
if (
|
||||
!config.accountName ||
|
||||
!config.containerName ||
|
||||
(!config.accountKey && !config.connectionString)
|
||||
) {
|
||||
throw new StorageConfigError(`Azure Blob configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(config.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: config.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
}
|
||||
|
||||
let sasToken: string
|
||||
try {
|
||||
sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(config.accountName, config.accountKey || '')
|
||||
).toString()
|
||||
} catch (blobError) {
|
||||
logger.error('Failed to generate Azure Blob SAS token:', blobError)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate Azure Blob SAS token - check Azure credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} Azure Blob presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
}
|
||||
|
||||
if (uploadType === 'knowledge-base') {
|
||||
uploadHeaders['x-ms-meta-purpose'] = 'knowledge-base'
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in Azure Blob presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate Azure Blob presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
// Handle preflight requests
|
||||
export async function OPTIONS() {
|
||||
return createOptionsResponse()
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { readFile } from 'fs/promises'
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { downloadFile, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { downloadFile, getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { BLOB_KB_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import '@/lib/uploads/setup.server'
|
||||
|
||||
import {
|
||||
@@ -16,6 +17,19 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('FilesServeAPI')
|
||||
|
||||
async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
readableStream.on('data', (data) => {
|
||||
chunks.push(data instanceof Buffer ? data : Buffer.from(data))
|
||||
})
|
||||
readableStream.on('end', () => {
|
||||
resolve(Buffer.concat(chunks))
|
||||
})
|
||||
readableStream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Main API route handler for serving files
|
||||
*/
|
||||
@@ -85,12 +99,65 @@ async function handleLocalFile(filename: string): Promise<NextResponse> {
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadKBFile(cloudKey: string): Promise<Buffer> {
|
||||
const storageProvider = getStorageProvider()
|
||||
|
||||
if (storageProvider === 'blob') {
|
||||
logger.info(`Downloading KB file from Azure Blob Storage: ${cloudKey}`)
|
||||
// Use KB-specific blob configuration
|
||||
const { getBlobServiceClient } = await import('@/lib/uploads/blob/blob-client')
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_KB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(cloudKey)
|
||||
|
||||
const downloadBlockBlobResponse = await blockBlobClient.download()
|
||||
if (!downloadBlockBlobResponse.readableStreamBody) {
|
||||
throw new Error('Failed to get readable stream from blob download')
|
||||
}
|
||||
|
||||
// Convert stream to buffer
|
||||
return await streamToBuffer(downloadBlockBlobResponse.readableStreamBody)
|
||||
}
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
logger.info(`Downloading KB file from S3: ${cloudKey}`)
|
||||
// Use KB-specific S3 configuration
|
||||
const { getS3Client } = await import('@/lib/uploads/s3/s3-client')
|
||||
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const s3Client = getS3Client()
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: S3_KB_CONFIG.bucket,
|
||||
Key: cloudKey,
|
||||
})
|
||||
|
||||
const response = await s3Client.send(command)
|
||||
if (!response.Body) {
|
||||
throw new Error('No body in S3 response')
|
||||
}
|
||||
|
||||
// Convert stream to buffer using the same method as the regular S3 client
|
||||
const stream = response.Body as any
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
stream.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks)))
|
||||
stream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported storage provider for KB files: ${storageProvider}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Proxy cloud file through our server
|
||||
*/
|
||||
async function handleCloudProxy(cloudKey: string): Promise<NextResponse> {
|
||||
try {
|
||||
const fileBuffer = await downloadFile(cloudKey)
|
||||
// Check if this is a KB file (starts with 'kb/')
|
||||
const isKBFile = cloudKey.startsWith('kb/')
|
||||
|
||||
const fileBuffer = isKBFile ? await downloadKBFile(cloudKey) : await downloadFile(cloudKey)
|
||||
|
||||
// Extract the original filename from the key (last part after last /)
|
||||
const originalFilename = cloudKey.split('/').pop() || 'download'
|
||||
|
||||
@@ -391,6 +391,225 @@ describe('Function Execute API Route', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('Enhanced Error Handling', () => {
|
||||
it('should provide detailed syntax error with line content', async () => {
|
||||
// Mock VM Script to throw a syntax error
|
||||
const mockScript = vi.fn().mockImplementation(() => {
|
||||
const error = new Error('Invalid or unexpected token')
|
||||
error.name = 'SyntaxError'
|
||||
error.stack = `user-function.js:5
|
||||
description: "This has a missing closing quote
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
SyntaxError: Invalid or unexpected token
|
||||
at new Script (node:vm:117:7)
|
||||
at POST (/path/to/route.ts:123:24)`
|
||||
throw error
|
||||
})
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'const obj = {\n name: "test",\n description: "This has a missing closing quote\n};\nreturn obj;',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toContain('Syntax Error')
|
||||
expect(data.error).toContain('Line 3')
|
||||
expect(data.error).toContain('description: "This has a missing closing quote')
|
||||
expect(data.error).toContain('Invalid or unexpected token')
|
||||
expect(data.error).toContain('(Check for missing quotes, brackets, or semicolons)')
|
||||
|
||||
// Check debug information
|
||||
expect(data.debug).toBeDefined()
|
||||
expect(data.debug.line).toBe(3)
|
||||
expect(data.debug.errorType).toBe('SyntaxError')
|
||||
expect(data.debug.lineContent).toBe('description: "This has a missing closing quote')
|
||||
})
|
||||
|
||||
it('should provide detailed runtime error with line and column', async () => {
|
||||
// Create the error object first
|
||||
const runtimeError = new Error("Cannot read properties of null (reading 'someMethod')")
|
||||
runtimeError.name = 'TypeError'
|
||||
runtimeError.stack = `TypeError: Cannot read properties of null (reading 'someMethod')
|
||||
at user-function.js:4:16
|
||||
at user-function.js:9:3
|
||||
at Script.runInContext (node:vm:147:14)`
|
||||
|
||||
// Mock successful script creation but runtime error
|
||||
const mockScript = vi.fn().mockImplementation(() => ({
|
||||
runInContext: vi.fn().mockRejectedValue(runtimeError),
|
||||
}))
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'const obj = null;\nreturn obj.someMethod();',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toContain('Type Error')
|
||||
expect(data.error).toContain('Line 2')
|
||||
expect(data.error).toContain('return obj.someMethod();')
|
||||
expect(data.error).toContain('Cannot read properties of null')
|
||||
|
||||
// Check debug information
|
||||
expect(data.debug).toBeDefined()
|
||||
expect(data.debug.line).toBe(2)
|
||||
expect(data.debug.column).toBe(16)
|
||||
expect(data.debug.errorType).toBe('TypeError')
|
||||
expect(data.debug.lineContent).toBe('return obj.someMethod();')
|
||||
})
|
||||
|
||||
it('should handle ReferenceError with enhanced details', async () => {
|
||||
// Create the error object first
|
||||
const referenceError = new Error('undefinedVariable is not defined')
|
||||
referenceError.name = 'ReferenceError'
|
||||
referenceError.stack = `ReferenceError: undefinedVariable is not defined
|
||||
at user-function.js:4:8
|
||||
at Script.runInContext (node:vm:147:14)`
|
||||
|
||||
const mockScript = vi.fn().mockImplementation(() => ({
|
||||
runInContext: vi.fn().mockRejectedValue(referenceError),
|
||||
}))
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'const x = 42;\nreturn undefinedVariable + x;',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toContain('Reference Error')
|
||||
expect(data.error).toContain('Line 2')
|
||||
expect(data.error).toContain('return undefinedVariable + x;')
|
||||
expect(data.error).toContain('undefinedVariable is not defined')
|
||||
})
|
||||
|
||||
it('should handle errors without line content gracefully', async () => {
|
||||
const mockScript = vi.fn().mockImplementation(() => {
|
||||
const error = new Error('Generic error without stack trace')
|
||||
error.name = 'Error'
|
||||
// No stack trace
|
||||
throw error
|
||||
})
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return "test";',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toBe('Generic error without stack trace')
|
||||
|
||||
// Should still have debug info, but without line details
|
||||
expect(data.debug).toBeDefined()
|
||||
expect(data.debug.errorType).toBe('Error')
|
||||
expect(data.debug.line).toBeUndefined()
|
||||
expect(data.debug.lineContent).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should extract line numbers from different stack trace formats', async () => {
|
||||
const mockScript = vi.fn().mockImplementation(() => {
|
||||
const error = new Error('Test error')
|
||||
error.name = 'Error'
|
||||
error.stack = `Error: Test error
|
||||
at user-function.js:7:25
|
||||
at async function
|
||||
at Script.runInContext (node:vm:147:14)`
|
||||
throw error
|
||||
})
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'const a = 1;\nconst b = 2;\nconst c = 3;\nconst d = 4;\nreturn a + b + c + d;',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
|
||||
// Line 7 in VM should map to line 5 in user code (7 - 3 + 1 = 5)
|
||||
expect(data.debug.line).toBe(5)
|
||||
expect(data.debug.column).toBe(25)
|
||||
expect(data.debug.lineContent).toBe('return a + b + c + d;')
|
||||
})
|
||||
|
||||
it('should provide helpful suggestions for common syntax errors', async () => {
|
||||
const mockScript = vi.fn().mockImplementation(() => {
|
||||
const error = new Error('Unexpected end of input')
|
||||
error.name = 'SyntaxError'
|
||||
error.stack = 'user-function.js:4\nSyntaxError: Unexpected end of input'
|
||||
throw error
|
||||
})
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'const obj = {\n name: "test"\n// Missing closing brace',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toContain('Syntax Error')
|
||||
expect(data.error).toContain('Unexpected end of input')
|
||||
expect(data.error).toContain('(Check for missing closing brackets or braces)')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Utility Functions', () => {
|
||||
it('should properly escape regex special characters', async () => {
|
||||
// This tests the escapeRegExp function indirectly
|
||||
|
||||
@@ -8,6 +8,210 @@ export const maxDuration = 60
|
||||
|
||||
const logger = createLogger('FunctionExecuteAPI')
|
||||
|
||||
/**
|
||||
* Enhanced error information interface
|
||||
*/
|
||||
interface EnhancedError {
|
||||
message: string
|
||||
line?: number
|
||||
column?: number
|
||||
stack?: string
|
||||
name: string
|
||||
originalError: any
|
||||
lineContent?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract enhanced error information from VM execution errors
|
||||
*/
|
||||
function extractEnhancedError(
|
||||
error: any,
|
||||
userCodeStartLine: number,
|
||||
userCode?: string
|
||||
): EnhancedError {
|
||||
const enhanced: EnhancedError = {
|
||||
message: error.message || 'Unknown error',
|
||||
name: error.name || 'Error',
|
||||
originalError: error,
|
||||
}
|
||||
|
||||
if (error.stack) {
|
||||
enhanced.stack = error.stack
|
||||
|
||||
// Parse stack trace to extract line and column information
|
||||
// Handle both compilation errors and runtime errors
|
||||
const stackLines: string[] = error.stack.split('\n')
|
||||
|
||||
for (const line of stackLines) {
|
||||
// Pattern 1: Compilation errors - "user-function.js:6"
|
||||
let match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
|
||||
// Pattern 2: Runtime errors - "at user-function.js:5:12"
|
||||
if (!match) {
|
||||
match = line.match(/at\s+user-function\.js:(\d+):(\d+)/)
|
||||
}
|
||||
|
||||
// Pattern 3: Generic patterns for any line containing our filename
|
||||
if (!match) {
|
||||
match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
}
|
||||
|
||||
if (match) {
|
||||
const stackLine = Number.parseInt(match[1], 10)
|
||||
const stackColumn = match[2] ? Number.parseInt(match[2], 10) : undefined
|
||||
|
||||
// Adjust line number to account for wrapper code
|
||||
// The user code starts at a specific line in our wrapper
|
||||
const adjustedLine = stackLine - userCodeStartLine + 1
|
||||
|
||||
// Check if this is a syntax error in wrapper code caused by incomplete user code
|
||||
const isWrapperSyntaxError =
|
||||
stackLine > userCodeStartLine &&
|
||||
error.name === 'SyntaxError' &&
|
||||
(error.message.includes('Unexpected token') ||
|
||||
error.message.includes('Unexpected end of input'))
|
||||
|
||||
if (isWrapperSyntaxError && userCode) {
|
||||
// Map wrapper syntax errors to the last line of user code
|
||||
const codeLines = userCode.split('\n')
|
||||
const lastUserLine = codeLines.length
|
||||
enhanced.line = lastUserLine
|
||||
enhanced.column = codeLines[lastUserLine - 1]?.length || 0
|
||||
enhanced.lineContent = codeLines[lastUserLine - 1]?.trim()
|
||||
break
|
||||
}
|
||||
|
||||
if (adjustedLine > 0) {
|
||||
enhanced.line = adjustedLine
|
||||
enhanced.column = stackColumn
|
||||
|
||||
// Extract the actual line content from user code
|
||||
if (userCode) {
|
||||
const codeLines = userCode.split('\n')
|
||||
if (adjustedLine <= codeLines.length) {
|
||||
enhanced.lineContent = codeLines[adjustedLine - 1]?.trim()
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if (stackLine <= userCodeStartLine) {
|
||||
// Error is in wrapper code itself
|
||||
enhanced.line = stackLine
|
||||
enhanced.column = stackColumn
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up stack trace to show user-relevant information
|
||||
const cleanedStackLines: string[] = stackLines
|
||||
.filter(
|
||||
(line: string) =>
|
||||
line.includes('user-function.js') ||
|
||||
(!line.includes('vm.js') && !line.includes('internal/'))
|
||||
)
|
||||
.map((line: string) => line.replace(/\s+at\s+/, ' at '))
|
||||
|
||||
if (cleanedStackLines.length > 0) {
|
||||
enhanced.stack = cleanedStackLines.join('\n')
|
||||
}
|
||||
}
|
||||
|
||||
// Keep original message without adding error type prefix
|
||||
// The error type will be added later in createUserFriendlyErrorMessage
|
||||
|
||||
return enhanced
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a detailed error message for users
|
||||
*/
|
||||
function createUserFriendlyErrorMessage(
|
||||
enhanced: EnhancedError,
|
||||
requestId: string,
|
||||
userCode?: string
|
||||
): string {
|
||||
let errorMessage = enhanced.message
|
||||
|
||||
// Add line and column information if available
|
||||
if (enhanced.line !== undefined) {
|
||||
let lineInfo = `Line ${enhanced.line}${enhanced.column !== undefined ? `:${enhanced.column}` : ''}`
|
||||
|
||||
// Add the actual line content if available
|
||||
if (enhanced.lineContent) {
|
||||
lineInfo += `: \`${enhanced.lineContent}\``
|
||||
}
|
||||
|
||||
errorMessage = `${lineInfo} - ${errorMessage}`
|
||||
} else {
|
||||
// If no line number, try to extract it from stack trace for display
|
||||
if (enhanced.stack) {
|
||||
const stackMatch = enhanced.stack.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
if (stackMatch) {
|
||||
const line = Number.parseInt(stackMatch[1], 10)
|
||||
const column = stackMatch[2] ? Number.parseInt(stackMatch[2], 10) : undefined
|
||||
let lineInfo = `Line ${line}${column ? `:${column}` : ''}`
|
||||
|
||||
// Try to get line content if we have userCode
|
||||
if (userCode) {
|
||||
const codeLines = userCode.split('\n')
|
||||
// Note: stackMatch gives us VM line number, need to adjust
|
||||
// This is a fallback case, so we might not have perfect line mapping
|
||||
if (line <= codeLines.length) {
|
||||
const lineContent = codeLines[line - 1]?.trim()
|
||||
if (lineContent) {
|
||||
lineInfo += `: \`${lineContent}\``
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
errorMessage = `${lineInfo} - ${errorMessage}`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add error type prefix with consistent naming
|
||||
if (enhanced.name !== 'Error') {
|
||||
const errorTypePrefix =
|
||||
enhanced.name === 'SyntaxError'
|
||||
? 'Syntax Error'
|
||||
: enhanced.name === 'TypeError'
|
||||
? 'Type Error'
|
||||
: enhanced.name === 'ReferenceError'
|
||||
? 'Reference Error'
|
||||
: enhanced.name
|
||||
|
||||
// Only add prefix if not already present
|
||||
if (!errorMessage.toLowerCase().includes(errorTypePrefix.toLowerCase())) {
|
||||
errorMessage = `${errorTypePrefix}: ${errorMessage}`
|
||||
}
|
||||
}
|
||||
|
||||
// For syntax errors, provide additional context
|
||||
if (enhanced.name === 'SyntaxError') {
|
||||
if (errorMessage.includes('Invalid or unexpected token')) {
|
||||
errorMessage += ' (Check for missing quotes, brackets, or semicolons)'
|
||||
} else if (errorMessage.includes('Unexpected end of input')) {
|
||||
errorMessage += ' (Check for missing closing brackets or braces)'
|
||||
} else if (errorMessage.includes('Unexpected token')) {
|
||||
// Check if this might be due to incomplete code
|
||||
if (
|
||||
enhanced.lineContent &&
|
||||
((enhanced.lineContent.includes('(') && !enhanced.lineContent.includes(')')) ||
|
||||
(enhanced.lineContent.includes('[') && !enhanced.lineContent.includes(']')) ||
|
||||
(enhanced.lineContent.includes('{') && !enhanced.lineContent.includes('}')))
|
||||
) {
|
||||
errorMessage += ' (Check for missing closing parentheses, brackets, or braces)'
|
||||
} else {
|
||||
errorMessage += ' (Check your syntax)'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return errorMessage
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves environment variables and tags in code
|
||||
* @param code - Code with variables
|
||||
@@ -19,7 +223,9 @@ const logger = createLogger('FunctionExecuteAPI')
|
||||
function resolveCodeVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
envVars: Record<string, string> = {}
|
||||
envVars: Record<string, string> = {},
|
||||
blockData: Record<string, any> = {},
|
||||
blockNameMapping: Record<string, string> = {}
|
||||
): { resolvedCode: string; contextVariables: Record<string, any> } {
|
||||
let resolvedCode = code
|
||||
const contextVariables: Record<string, any> = {}
|
||||
@@ -39,11 +245,52 @@ function resolveCodeVariables(
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
}
|
||||
|
||||
// Resolve tags with <tag_name> syntax
|
||||
const tagMatches = resolvedCode.match(/<([a-zA-Z_][a-zA-Z0-9_]*)>/g) || []
|
||||
// Resolve tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
const tagMatches = resolvedCode.match(/<([a-zA-Z_][a-zA-Z0-9_.]*[a-zA-Z0-9_])>/g) || []
|
||||
|
||||
for (const match of tagMatches) {
|
||||
const tagName = match.slice(1, -1).trim()
|
||||
const tagValue = params[tagName] || ''
|
||||
|
||||
// Handle nested paths like "getrecord.response.data" or "function1.response.result"
|
||||
// First try params, then blockData directly, then try with block name mapping
|
||||
let tagValue = getNestedValue(params, tagName) || getNestedValue(blockData, tagName) || ''
|
||||
|
||||
// If not found and the path starts with a block name, try mapping the block name to ID
|
||||
if (!tagValue && tagName.includes('.')) {
|
||||
const pathParts = tagName.split('.')
|
||||
const normalizedBlockName = pathParts[0] // This should already be normalized like "function1"
|
||||
|
||||
// Find the block ID by looking for a block name that normalizes to this value
|
||||
let blockId = null
|
||||
|
||||
for (const [blockName, id] of Object.entries(blockNameMapping)) {
|
||||
// Apply the same normalization logic as the UI: remove spaces and lowercase
|
||||
const normalizedName = blockName.replace(/\s+/g, '').toLowerCase()
|
||||
if (normalizedName === normalizedBlockName) {
|
||||
blockId = id
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (blockId) {
|
||||
const remainingPath = pathParts.slice(1).join('.')
|
||||
const fullPath = `${blockId}.${remainingPath}`
|
||||
tagValue = getNestedValue(blockData, fullPath) || ''
|
||||
}
|
||||
}
|
||||
|
||||
// If the value is a stringified JSON, parse it back to object
|
||||
if (
|
||||
typeof tagValue === 'string' &&
|
||||
tagValue.length > 100 &&
|
||||
(tagValue.startsWith('{') || tagValue.startsWith('['))
|
||||
) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch (e) {
|
||||
// Keep as string if parsing fails
|
||||
}
|
||||
}
|
||||
|
||||
// Instead of injecting large JSON directly, create a variable reference
|
||||
const safeVarName = `__tag_${tagName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
@@ -56,6 +303,17 @@ function resolveCodeVariables(
|
||||
return { resolvedCode, contextVariables }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get nested value from object using dot notation path
|
||||
*/
|
||||
function getNestedValue(obj: any, path: string): any {
|
||||
if (!obj || !path) return undefined
|
||||
|
||||
return path.split('.').reduce((current, key) => {
|
||||
return current && typeof current === 'object' ? current[key] : undefined
|
||||
}, obj)
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape special regex characters in a string
|
||||
*/
|
||||
@@ -67,6 +325,8 @@ export async function POST(req: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
const startTime = Date.now()
|
||||
let stdout = ''
|
||||
let userCodeStartLine = 3 // Default value for error reporting
|
||||
let resolvedCode = '' // Store resolved code for error reporting
|
||||
|
||||
try {
|
||||
const body = await req.json()
|
||||
@@ -76,6 +336,8 @@ export async function POST(req: NextRequest) {
|
||||
params = {},
|
||||
timeout = 5000,
|
||||
envVars = {},
|
||||
blockData = {},
|
||||
blockNameMapping = {},
|
||||
workflowId,
|
||||
isCustomTool = false,
|
||||
} = body
|
||||
@@ -93,7 +355,15 @@ export async function POST(req: NextRequest) {
|
||||
})
|
||||
|
||||
// Resolve variables in the code with workflow environment variables
|
||||
const { resolvedCode, contextVariables } = resolveCodeVariables(code, executionParams, envVars)
|
||||
const codeResolution = resolveCodeVariables(
|
||||
code,
|
||||
executionParams,
|
||||
envVars,
|
||||
blockData,
|
||||
blockNameMapping
|
||||
)
|
||||
resolvedCode = codeResolution.resolvedCode
|
||||
const contextVariables = codeResolution.contextVariables
|
||||
|
||||
const executionMethod = 'vm' // Default execution method
|
||||
|
||||
@@ -239,16 +509,12 @@ export async function POST(req: NextRequest) {
|
||||
// timeout,
|
||||
// displayErrors: true,
|
||||
// })
|
||||
// logger.info(`[${requestId}] VM execution result`, {
|
||||
// result,
|
||||
// stdout,
|
||||
// })
|
||||
// }
|
||||
// } else {
|
||||
logger.info(`[${requestId}] Using VM for code execution`, {
|
||||
resolvedCode,
|
||||
executionParams,
|
||||
envVars,
|
||||
hasEnvVars: Object.keys(envVars).length > 0,
|
||||
})
|
||||
|
||||
// Create a secure context with console logging
|
||||
@@ -274,28 +540,40 @@ export async function POST(req: NextRequest) {
|
||||
},
|
||||
})
|
||||
|
||||
const script = new Script(`
|
||||
(async () => {
|
||||
try {
|
||||
${
|
||||
isCustomTool
|
||||
? `// For custom tools, make parameters directly accessible
|
||||
${Object.keys(executionParams)
|
||||
.map((key) => `const ${key} = params.${key};`)
|
||||
.join('\n ')}`
|
||||
: ''
|
||||
}
|
||||
${resolvedCode}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
throw error;
|
||||
}
|
||||
})()
|
||||
`)
|
||||
// Calculate line offset for user code to provide accurate error reporting
|
||||
const wrapperLines = ['(async () => {', ' try {']
|
||||
|
||||
// Add custom tool parameter declarations if needed
|
||||
if (isCustomTool) {
|
||||
wrapperLines.push(' // For custom tools, make parameters directly accessible')
|
||||
Object.keys(executionParams).forEach((key) => {
|
||||
wrapperLines.push(` const ${key} = params.${key};`)
|
||||
})
|
||||
}
|
||||
|
||||
userCodeStartLine = wrapperLines.length + 1 // +1 because user code starts on next line
|
||||
|
||||
// Build the complete script with proper formatting for line numbers
|
||||
const fullScript = [
|
||||
...wrapperLines,
|
||||
` ${resolvedCode.split('\n').join('\n ')}`, // Indent user code
|
||||
' } catch (error) {',
|
||||
' console.error(error);',
|
||||
' throw error;',
|
||||
' }',
|
||||
'})()',
|
||||
].join('\n')
|
||||
|
||||
const script = new Script(fullScript, {
|
||||
filename: 'user-function.js', // This filename will appear in stack traces
|
||||
lineOffset: 0, // Start line numbering from 0
|
||||
columnOffset: 0, // Start column numbering from 0
|
||||
})
|
||||
|
||||
const result = await script.runInContext(context, {
|
||||
timeout,
|
||||
displayErrors: true,
|
||||
breakOnSigint: true, // Allow breaking on SIGINT for better debugging
|
||||
})
|
||||
// }
|
||||
|
||||
@@ -322,14 +600,40 @@ export async function POST(req: NextRequest) {
|
||||
executionTime,
|
||||
})
|
||||
|
||||
const enhancedError = extractEnhancedError(error, userCodeStartLine, resolvedCode)
|
||||
const userFriendlyErrorMessage = createUserFriendlyErrorMessage(
|
||||
enhancedError,
|
||||
requestId,
|
||||
resolvedCode
|
||||
)
|
||||
|
||||
// Log enhanced error details for debugging
|
||||
logger.error(`[${requestId}] Enhanced error details`, {
|
||||
originalMessage: error.message,
|
||||
enhancedMessage: userFriendlyErrorMessage,
|
||||
line: enhancedError.line,
|
||||
column: enhancedError.column,
|
||||
lineContent: enhancedError.lineContent,
|
||||
errorType: enhancedError.name,
|
||||
userCodeStartLine,
|
||||
})
|
||||
|
||||
const errorResponse = {
|
||||
success: false,
|
||||
error: error.message || 'Code execution failed',
|
||||
error: userFriendlyErrorMessage,
|
||||
output: {
|
||||
result: null,
|
||||
stdout,
|
||||
executionTime,
|
||||
},
|
||||
// Include debug information in development or for debugging
|
||||
debug: {
|
||||
line: enhancedError.line,
|
||||
column: enhancedError.column,
|
||||
errorType: enhancedError.name,
|
||||
lineContent: enhancedError.lineContent,
|
||||
stack: enhancedError.stack,
|
||||
},
|
||||
}
|
||||
|
||||
return NextResponse.json(errorResponse, { status: 500 })
|
||||
|
||||
@@ -17,6 +17,17 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
|
||||
mockExecutionDependencies()
|
||||
|
||||
// Mock the normalized tables helper
|
||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue({
|
||||
blocks: sampleWorkflowState.blocks,
|
||||
edges: sampleWorkflowState.edges || [],
|
||||
loops: sampleWorkflowState.loops || {},
|
||||
parallels: sampleWorkflowState.parallels || {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('croner', () => ({
|
||||
Cron: vi.fn().mockImplementation(() => ({
|
||||
nextRun: vi.fn().mockReturnValue(new Date(Date.now() + 60000)), // Next run in 1 minute
|
||||
|
||||
@@ -14,13 +14,13 @@ import {
|
||||
} from '@/lib/schedules/utils'
|
||||
import { checkServerSideUsageLimits } from '@/lib/usage-monitor'
|
||||
import { decryptSecret } from '@/lib/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import { environment, userStats, workflow, workflowSchedule } from '@/db/schema'
|
||||
import { Executor } from '@/executor'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { mergeSubblockState } from '@/stores/workflows/server-utils'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
// Add dynamic export to prevent caching
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -149,8 +149,27 @@ export async function GET(req: NextRequest) {
|
||||
continue
|
||||
}
|
||||
|
||||
const state = workflowRecord.state as WorkflowState
|
||||
const { blocks, edges, loops, parallels } = state
|
||||
// Load workflow data from normalized tables (no fallback to deprecated state column)
|
||||
logger.debug(
|
||||
`[${requestId}] Loading workflow ${schedule.workflowId} from normalized tables`
|
||||
)
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(schedule.workflowId)
|
||||
|
||||
if (!normalizedData) {
|
||||
logger.error(
|
||||
`[${requestId}] No normalized data found for scheduled workflow ${schedule.workflowId}`
|
||||
)
|
||||
throw new Error(`Workflow data not found in normalized tables for ${schedule.workflowId}`)
|
||||
}
|
||||
|
||||
// Use normalized data only
|
||||
const blocks = normalizedData.blocks
|
||||
const edges = normalizedData.edges
|
||||
const loops = normalizedData.loops
|
||||
const parallels = normalizedData.parallels
|
||||
logger.info(
|
||||
`[${requestId}] Loaded scheduled workflow ${schedule.workflowId} from normalized tables`
|
||||
)
|
||||
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
|
||||
@@ -405,9 +424,13 @@ export async function GET(req: NextRequest) {
|
||||
.limit(1)
|
||||
|
||||
if (workflowRecord) {
|
||||
const state = workflowRecord.state as WorkflowState
|
||||
const { blocks } = state
|
||||
nextRunAt = calculateNextRunTime(schedule, blocks)
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(schedule.workflowId)
|
||||
|
||||
if (!normalizedData) {
|
||||
nextRunAt = new Date(now.getTime() + 24 * 60 * 60 * 1000)
|
||||
} else {
|
||||
nextRunAt = calculateNextRunTime(schedule, normalizedData.blocks)
|
||||
}
|
||||
} else {
|
||||
nextRunAt = new Date(now.getTime() + 24 * 60 * 60 * 1000)
|
||||
}
|
||||
|
||||
@@ -5,11 +5,7 @@ import { NextRequest } from 'next/server'
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockExecutionDependencies,
|
||||
sampleWorkflowState,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
import { createMockRequest, mockExecutionDependencies } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
// Define mock functions at the top level to be used in mocks
|
||||
const hasProcessedMessageMock = vi.fn().mockResolvedValue(false)
|
||||
@@ -148,10 +144,18 @@ describe('Webhook Trigger API Route', () => {
|
||||
vi.resetAllMocks()
|
||||
vi.clearAllTimers()
|
||||
|
||||
// Mock all dependencies
|
||||
mockExecutionDependencies()
|
||||
|
||||
// Reset mock behaviors to default for each test
|
||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue({
|
||||
blocks: {},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
hasProcessedMessageMock.mockResolvedValue(false)
|
||||
markMessageAsProcessedMock.mockResolvedValue(true)
|
||||
acquireLockMock.mockResolvedValue(true)
|
||||
@@ -159,12 +163,10 @@ describe('Webhook Trigger API Route', () => {
|
||||
processGenericDeduplicationMock.mockResolvedValue(null)
|
||||
processWebhookMock.mockResolvedValue(new Response('Webhook processed', { status: 200 }))
|
||||
|
||||
// Restore original crypto.randomUUID if it was mocked
|
||||
if ((global as any).crypto?.randomUUID) {
|
||||
vi.spyOn(crypto, 'randomUUID').mockRestore()
|
||||
}
|
||||
|
||||
// Mock crypto.randomUUID to return predictable values
|
||||
vi.spyOn(crypto, 'randomUUID').mockReturnValue('mock-uuid-12345')
|
||||
})
|
||||
|
||||
@@ -263,7 +265,6 @@ describe('Webhook Trigger API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: sampleWorkflowState,
|
||||
},
|
||||
},
|
||||
])
|
||||
@@ -355,7 +356,6 @@ describe('Webhook Trigger API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: sampleWorkflowState,
|
||||
},
|
||||
},
|
||||
])
|
||||
@@ -409,7 +409,6 @@ describe('Webhook Trigger API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: sampleWorkflowState,
|
||||
},
|
||||
},
|
||||
])
|
||||
@@ -482,7 +481,6 @@ describe('Webhook Trigger API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: sampleWorkflowState,
|
||||
},
|
||||
},
|
||||
])
|
||||
@@ -553,7 +551,6 @@ describe('Webhook Trigger API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: sampleWorkflowState,
|
||||
},
|
||||
},
|
||||
])
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
processWebhook,
|
||||
processWhatsAppDeduplication,
|
||||
} from '@/lib/webhooks/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { db } from '@/db'
|
||||
import { webhook, workflow } from '@/db/schema'
|
||||
|
||||
@@ -187,6 +188,24 @@ export async function POST(
|
||||
foundWebhook = webhooks[0].webhook
|
||||
foundWorkflow = webhooks[0].workflow
|
||||
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(foundWorkflow.id)
|
||||
|
||||
if (!normalizedData) {
|
||||
logger.error(`[${requestId}] No normalized data found for webhook workflow ${foundWorkflow.id}`)
|
||||
return new NextResponse('Workflow data not found in normalized tables', { status: 500 })
|
||||
}
|
||||
|
||||
// Construct state from normalized data only (execution-focused, no frontend state fields)
|
||||
foundWorkflow.state = {
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: foundWorkflow.isDeployed || false,
|
||||
deployedAt: foundWorkflow.deployedAt,
|
||||
}
|
||||
|
||||
// Special handling for Telegram webhooks to work around middleware User-Agent checks
|
||||
if (foundWebhook.provider === 'telegram') {
|
||||
// Log detailed information about the request for debugging
|
||||
|
||||
@@ -31,6 +31,27 @@ describe('Workflow Deployment API Route', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue({
|
||||
blocks: {
|
||||
'block-1': {
|
||||
id: 'block-1',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
data: {},
|
||||
},
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('../../middleware', () => ({
|
||||
validateWorkflowAccess: vi.fn().mockResolvedValue({
|
||||
workflow: {
|
||||
@@ -74,6 +95,7 @@ describe('Workflow Deployment API Route', () => {
|
||||
isDeployed: false,
|
||||
deployedAt: null,
|
||||
userId: 'user-id',
|
||||
deployedState: null,
|
||||
},
|
||||
]),
|
||||
}),
|
||||
@@ -129,7 +151,6 @@ describe('Workflow Deployment API Route', () => {
|
||||
}),
|
||||
}),
|
||||
})
|
||||
// Mock normalized table queries (blocks, edges, subflows)
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([
|
||||
@@ -216,7 +237,6 @@ describe('Workflow Deployment API Route', () => {
|
||||
}),
|
||||
}),
|
||||
})
|
||||
// Mock normalized table queries (blocks, edges, subflows)
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([
|
||||
|
||||
@@ -32,7 +32,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
isDeployed: workflow.isDeployed,
|
||||
deployedAt: workflow.deployedAt,
|
||||
userId: workflow.userId,
|
||||
state: workflow.state,
|
||||
deployedState: workflow.deployedState,
|
||||
})
|
||||
.from(workflow)
|
||||
@@ -93,11 +92,25 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
// Check if the workflow has meaningful changes that would require redeployment
|
||||
let needsRedeployment = false
|
||||
if (workflowData.deployedState) {
|
||||
const { hasWorkflowChanged } = await import('@/lib/workflows/utils')
|
||||
needsRedeployment = hasWorkflowChanged(
|
||||
workflowData.state as any,
|
||||
workflowData.deployedState as any
|
||||
)
|
||||
// Load current state from normalized tables for comparison
|
||||
const { loadWorkflowFromNormalizedTables } = await import('@/lib/workflows/db-helpers')
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(id)
|
||||
|
||||
if (normalizedData) {
|
||||
// Convert normalized data to WorkflowState format for comparison
|
||||
const currentState = {
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
}
|
||||
|
||||
const { hasWorkflowChanged } = await import('@/lib/workflows/utils')
|
||||
needsRedeployment = hasWorkflowChanged(
|
||||
currentState as any,
|
||||
workflowData.deployedState as any
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully retrieved deployment info: ${id}`)
|
||||
@@ -126,11 +139,10 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return createErrorResponse(validation.error.message, validation.error.status)
|
||||
}
|
||||
|
||||
// Get the workflow to find the user
|
||||
// Get the workflow to find the user (removed deprecated state column)
|
||||
const workflowData = await db
|
||||
.select({
|
||||
userId: workflow.userId,
|
||||
state: workflow.state,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, id))
|
||||
|
||||
@@ -24,45 +24,54 @@ describe('Workflow Execution API Route', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
|
||||
// Mock workflow middleware
|
||||
vi.doMock('@/app/api/workflows/middleware', () => ({
|
||||
validateWorkflowAccess: vi.fn().mockResolvedValue({
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: {
|
||||
blocks: {
|
||||
'starter-id': {
|
||||
id: 'starter-id',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
enabled: true,
|
||||
},
|
||||
'agent-id': {
|
||||
id: 'agent-id',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
position: { x: 300, y: 100 },
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
id: 'edge-1',
|
||||
source: 'starter-id',
|
||||
target: 'agent-id',
|
||||
sourceHandle: 'source',
|
||||
targetHandle: 'target',
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
// Reset execute mock to track calls
|
||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue({
|
||||
blocks: {
|
||||
'starter-id': {
|
||||
id: 'starter-id',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
data: {},
|
||||
},
|
||||
'agent-id': {
|
||||
id: 'agent-id',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
position: { x: 300, y: 100 },
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
data: {},
|
||||
},
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
id: 'edge-1',
|
||||
source: 'starter-id',
|
||||
target: 'agent-id',
|
||||
sourceHandle: 'source',
|
||||
targetHandle: 'target',
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
executeMock = vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
output: {
|
||||
@@ -76,14 +85,12 @@ describe('Workflow Execution API Route', () => {
|
||||
},
|
||||
})
|
||||
|
||||
// Mock executor
|
||||
vi.doMock('@/executor', () => ({
|
||||
Executor: vi.fn().mockImplementation(() => ({
|
||||
execute: executeMock,
|
||||
})),
|
||||
}))
|
||||
|
||||
// Mock environment variables
|
||||
vi.doMock('@/lib/utils', () => ({
|
||||
decryptSecret: vi.fn().mockResolvedValue({
|
||||
decrypted: 'decrypted-secret-value',
|
||||
@@ -92,13 +99,11 @@ describe('Workflow Execution API Route', () => {
|
||||
getRotatingApiKey: vi.fn().mockReturnValue('rotated-api-key'),
|
||||
}))
|
||||
|
||||
// Mock logger
|
||||
vi.doMock('@/lib/logs/execution-logger', () => ({
|
||||
persistExecutionLogs: vi.fn().mockResolvedValue(undefined),
|
||||
persistExecutionError: vi.fn().mockResolvedValue(undefined),
|
||||
}))
|
||||
|
||||
// Mock trace spans
|
||||
vi.doMock('@/lib/logs/trace-spans', () => ({
|
||||
buildTraceSpans: vi.fn().mockReturnValue({
|
||||
traceSpans: [],
|
||||
@@ -106,13 +111,11 @@ describe('Workflow Execution API Route', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock workflow run counts
|
||||
vi.doMock('@/lib/workflows/utils', () => ({
|
||||
updateWorkflowRunCounts: vi.fn().mockResolvedValue(undefined),
|
||||
workflowHasResponseBlock: vi.fn().mockReturnValue(false),
|
||||
}))
|
||||
|
||||
// Mock database
|
||||
vi.doMock('@/db', () => {
|
||||
const mockDb = {
|
||||
select: vi.fn().mockImplementation(() => ({
|
||||
@@ -140,7 +143,6 @@ describe('Workflow Execution API Route', () => {
|
||||
return { db: mockDb }
|
||||
})
|
||||
|
||||
// Mock Serializer
|
||||
vi.doMock('@/serializer', () => ({
|
||||
Serializer: vi.fn().mockImplementation(() => ({
|
||||
serializeWorkflow: vi.fn().mockReturnValue({
|
||||
@@ -162,49 +164,37 @@ describe('Workflow Execution API Route', () => {
|
||||
* Simulates direct execution with URL-based parameters
|
||||
*/
|
||||
it('should execute workflow with GET request successfully', async () => {
|
||||
// Create a mock request with query parameters
|
||||
const req = createMockRequest('GET')
|
||||
|
||||
// Create params similar to what Next.js would provide
|
||||
const params = Promise.resolve({ id: 'workflow-id' })
|
||||
|
||||
// Import the handler after mocks are set up
|
||||
const { GET } = await import('./route')
|
||||
|
||||
// Call the handler
|
||||
const response = await GET(req, { params })
|
||||
|
||||
// Get the actual status code - in some implementations this might not be 200
|
||||
// Based on the current implementation, validate the response exists
|
||||
expect(response).toBeDefined()
|
||||
|
||||
// Try to parse the response body
|
||||
let data
|
||||
try {
|
||||
data = await response.json()
|
||||
} catch (e) {
|
||||
// If we can't parse JSON, the response may not be what we expect
|
||||
console.error('Response could not be parsed as JSON:', await response.text())
|
||||
throw e
|
||||
}
|
||||
|
||||
// If status is 200, verify success structure
|
||||
if (response.status === 200) {
|
||||
expect(data).toHaveProperty('success', true)
|
||||
expect(data).toHaveProperty('output')
|
||||
expect(data.output).toHaveProperty('response')
|
||||
}
|
||||
|
||||
// Verify middleware was called
|
||||
const validateWorkflowAccess = (await import('@/app/api/workflows/middleware'))
|
||||
.validateWorkflowAccess
|
||||
expect(validateWorkflowAccess).toHaveBeenCalledWith(expect.any(Object), 'workflow-id')
|
||||
|
||||
// Verify executor was initialized
|
||||
const Executor = (await import('@/executor')).Executor
|
||||
expect(Executor).toHaveBeenCalled()
|
||||
|
||||
// Verify execute was called with undefined input (GET requests don't have body)
|
||||
expect(executeMock).toHaveBeenCalledWith('workflow-id')
|
||||
})
|
||||
|
||||
@@ -213,59 +203,45 @@ describe('Workflow Execution API Route', () => {
|
||||
* Simulates execution with a JSON body containing parameters
|
||||
*/
|
||||
it('should execute workflow with POST request successfully', async () => {
|
||||
// Create request body with custom inputs
|
||||
const requestBody = {
|
||||
inputs: {
|
||||
message: 'Test input message',
|
||||
},
|
||||
}
|
||||
|
||||
// Create a mock request with the request body
|
||||
const req = createMockRequest('POST', requestBody)
|
||||
|
||||
// Create params similar to what Next.js would provide
|
||||
const params = Promise.resolve({ id: 'workflow-id' })
|
||||
|
||||
// Import the handler after mocks are set up
|
||||
const { POST } = await import('./route')
|
||||
|
||||
// Call the handler
|
||||
const response = await POST(req, { params })
|
||||
|
||||
// Ensure response exists
|
||||
expect(response).toBeDefined()
|
||||
|
||||
// Try to parse the response body
|
||||
let data
|
||||
try {
|
||||
data = await response.json()
|
||||
} catch (e) {
|
||||
// If we can't parse JSON, the response may not be what we expect
|
||||
console.error('Response could not be parsed as JSON:', await response.text())
|
||||
throw e
|
||||
}
|
||||
|
||||
// If status is 200, verify success structure
|
||||
if (response.status === 200) {
|
||||
expect(data).toHaveProperty('success', true)
|
||||
expect(data).toHaveProperty('output')
|
||||
expect(data.output).toHaveProperty('response')
|
||||
}
|
||||
|
||||
// Verify middleware was called
|
||||
const validateWorkflowAccess = (await import('@/app/api/workflows/middleware'))
|
||||
.validateWorkflowAccess
|
||||
expect(validateWorkflowAccess).toHaveBeenCalledWith(expect.any(Object), 'workflow-id')
|
||||
|
||||
// Verify executor was constructed
|
||||
const Executor = (await import('@/executor')).Executor
|
||||
expect(Executor).toHaveBeenCalled()
|
||||
|
||||
// Verify execute was called with the input body
|
||||
expect(executeMock).toHaveBeenCalledWith('workflow-id')
|
||||
|
||||
// Updated expectations to match actual implementation
|
||||
// The structure should match: serializedWorkflow, processedBlockStates, decryptedEnvVars, processedInput, workflowVariables
|
||||
expect(Executor).toHaveBeenCalledWith(
|
||||
expect.anything(), // serializedWorkflow
|
||||
expect.anything(), // processedBlockStates
|
||||
@@ -282,7 +258,6 @@ describe('Workflow Execution API Route', () => {
|
||||
* Test POST execution with structured input matching the input format
|
||||
*/
|
||||
it('should execute workflow with structured input matching the input format', async () => {
|
||||
// Create structured input matching the expected input format
|
||||
const structuredInput = {
|
||||
firstName: 'John',
|
||||
age: 30,
|
||||
@@ -291,27 +266,20 @@ describe('Workflow Execution API Route', () => {
|
||||
tags: ['test', 'api'],
|
||||
}
|
||||
|
||||
// Create a mock request with the structured input
|
||||
const req = createMockRequest('POST', structuredInput)
|
||||
|
||||
// Create params similar to what Next.js would provide
|
||||
const params = Promise.resolve({ id: 'workflow-id' })
|
||||
|
||||
// Import the handler after mocks are set up
|
||||
const { POST } = await import('./route')
|
||||
|
||||
// Call the handler
|
||||
const response = await POST(req, { params })
|
||||
|
||||
// Ensure response exists and is successful
|
||||
expect(response).toBeDefined()
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
// Parse the response body
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('success', true)
|
||||
|
||||
// Verify the executor was constructed with the structured input - updated to match implementation
|
||||
const Executor = (await import('@/executor')).Executor
|
||||
expect(Executor).toHaveBeenCalledWith(
|
||||
expect.anything(), // serializedWorkflow
|
||||
@@ -478,39 +446,51 @@ describe('Workflow Execution API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-with-vars-id',
|
||||
userId: 'user-id',
|
||||
state: {
|
||||
blocks: {
|
||||
'starter-id': {
|
||||
id: 'starter-id',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
enabled: true,
|
||||
},
|
||||
'agent-id': {
|
||||
id: 'agent-id',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
position: { x: 300, y: 100 },
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
id: 'edge-1',
|
||||
source: 'starter-id',
|
||||
target: 'agent-id',
|
||||
sourceHandle: 'source',
|
||||
targetHandle: 'target',
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
},
|
||||
variables: workflowVariables,
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock normalized tables helper for this specific test
|
||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue({
|
||||
blocks: {
|
||||
'starter-id': {
|
||||
id: 'starter-id',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
data: {},
|
||||
},
|
||||
'agent-id': {
|
||||
id: 'agent-id',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
position: { x: 300, y: 100 },
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
data: {},
|
||||
},
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
id: 'edge-1',
|
||||
source: 'starter-id',
|
||||
target: 'agent-id',
|
||||
sourceHandle: 'source',
|
||||
targetHandle: 'target',
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Create a constructor mock to capture the arguments
|
||||
const executorConstructorMock = vi.fn().mockImplementation(() => ({
|
||||
execute: vi.fn().mockResolvedValue({
|
||||
|
||||
@@ -7,6 +7,7 @@ import { persistExecutionError, persistExecutionLogs } from '@/lib/logs/executio
|
||||
import { buildTraceSpans } from '@/lib/logs/trace-spans'
|
||||
import { checkServerSideUsageLimits } from '@/lib/usage-monitor'
|
||||
import { decryptSecret } from '@/lib/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import {
|
||||
createHttpResponseFromBlock,
|
||||
updateWorkflowRunCounts,
|
||||
@@ -94,19 +95,34 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any) {
|
||||
runningExecutions.add(executionKey)
|
||||
logger.info(`[${requestId}] Starting workflow execution: ${workflowId}`)
|
||||
|
||||
// Use the deployed state if available, otherwise fall back to current state
|
||||
const workflowState = workflow.deployedState || workflow.state
|
||||
// Load workflow data from normalized tables
|
||||
logger.debug(`[${requestId}] Loading workflow ${workflowId} from normalized tables`)
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
|
||||
if (!workflow.deployedState) {
|
||||
logger.warn(
|
||||
`[${requestId}] No deployed state found for workflow: ${workflowId}, using current state`
|
||||
)
|
||||
let blocks: Record<string, any>
|
||||
let edges: any[]
|
||||
let loops: Record<string, any>
|
||||
let parallels: Record<string, any>
|
||||
|
||||
if (normalizedData) {
|
||||
// Use normalized data as primary source
|
||||
;({ blocks, edges, loops, parallels } = normalizedData)
|
||||
logger.info(`[${requestId}] Using normalized tables for workflow execution: ${workflowId}`)
|
||||
} else {
|
||||
logger.info(`[${requestId}] Using deployed state for workflow execution: ${workflowId}`)
|
||||
}
|
||||
// Fallback to deployed state if available (for legacy workflows)
|
||||
logger.warn(
|
||||
`[${requestId}] No normalized data found, falling back to deployed state for workflow: ${workflowId}`
|
||||
)
|
||||
|
||||
const state = workflowState as WorkflowState
|
||||
const { blocks, edges, loops, parallels } = state
|
||||
if (!workflow.deployedState) {
|
||||
throw new Error(
|
||||
`Workflow ${workflowId} has no deployed state and no normalized data available`
|
||||
)
|
||||
}
|
||||
|
||||
const deployedState = workflow.deployedState as WorkflowState
|
||||
;({ blocks, edges, loops, parallels } = deployedState)
|
||||
}
|
||||
|
||||
// Use the same execution flow as in scheduled executions
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
|
||||
@@ -104,7 +104,7 @@ async function createWorkspace(userId: string, name: string) {
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
// Create "Workflow 1" for the workspace with start block
|
||||
// Create initial workflow for the workspace with start block
|
||||
const starterId = crypto.randomUUID()
|
||||
const initialState = {
|
||||
blocks: {
|
||||
@@ -170,7 +170,7 @@ async function createWorkspace(userId: string, name: string) {
|
||||
userId,
|
||||
workspaceId,
|
||||
folderId: null,
|
||||
name: 'Workflow 1',
|
||||
name: 'default-agent',
|
||||
description: 'Your first workflow - start building here!',
|
||||
state: initialState,
|
||||
color: '#3972F6',
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { AlertCircle, Loader2, X } from 'lucide-react'
|
||||
import { AlertCircle, ChevronDown, ChevronUp, Loader2, X } from 'lucide-react'
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
@@ -16,6 +16,7 @@ import { Button } from '@/components/ui/button'
|
||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import type { ChunkData, DocumentData } from '@/stores/knowledge/store'
|
||||
|
||||
@@ -28,6 +29,12 @@ interface EditChunkModalProps {
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
onChunkUpdate?: (updatedChunk: ChunkData) => void
|
||||
// New props for navigation
|
||||
allChunks?: ChunkData[]
|
||||
currentPage?: number
|
||||
totalPages?: number
|
||||
onNavigateToChunk?: (chunk: ChunkData) => void
|
||||
onNavigateToPage?: (page: number, selectChunk: 'first' | 'last') => Promise<void>
|
||||
}
|
||||
|
||||
export function EditChunkModal({
|
||||
@@ -37,11 +44,18 @@ export function EditChunkModal({
|
||||
isOpen,
|
||||
onClose,
|
||||
onChunkUpdate,
|
||||
allChunks = [],
|
||||
currentPage = 1,
|
||||
totalPages = 1,
|
||||
onNavigateToChunk,
|
||||
onNavigateToPage,
|
||||
}: EditChunkModalProps) {
|
||||
const [editedContent, setEditedContent] = useState(chunk?.content || '')
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [isNavigating, setIsNavigating] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
|
||||
// Check if there are unsaved changes
|
||||
const hasUnsavedChanges = editedContent !== (chunk?.content || '')
|
||||
@@ -53,6 +67,13 @@ export function EditChunkModal({
|
||||
}
|
||||
}, [chunk?.id, chunk?.content])
|
||||
|
||||
// Find current chunk index in the current page
|
||||
const currentChunkIndex = chunk ? allChunks.findIndex((c) => c.id === chunk.id) : -1
|
||||
|
||||
// Calculate navigation availability
|
||||
const canNavigatePrev = currentChunkIndex > 0 || currentPage > 1
|
||||
const canNavigateNext = currentChunkIndex < allChunks.length - 1 || currentPage < totalPages
|
||||
|
||||
const handleSaveContent = async () => {
|
||||
if (!chunk || !document) return
|
||||
|
||||
@@ -82,7 +103,6 @@ export function EditChunkModal({
|
||||
|
||||
if (result.success && onChunkUpdate) {
|
||||
onChunkUpdate(result.data)
|
||||
onClose()
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating chunk:', err)
|
||||
@@ -92,8 +112,51 @@ export function EditChunkModal({
|
||||
}
|
||||
}
|
||||
|
||||
const navigateToChunk = async (direction: 'prev' | 'next') => {
|
||||
if (!chunk || isNavigating) return
|
||||
|
||||
try {
|
||||
setIsNavigating(true)
|
||||
|
||||
if (direction === 'prev') {
|
||||
if (currentChunkIndex > 0) {
|
||||
// Navigate to previous chunk in current page
|
||||
const prevChunk = allChunks[currentChunkIndex - 1]
|
||||
onNavigateToChunk?.(prevChunk)
|
||||
} else if (currentPage > 1) {
|
||||
// Load previous page and navigate to last chunk
|
||||
await onNavigateToPage?.(currentPage - 1, 'last')
|
||||
}
|
||||
} else {
|
||||
if (currentChunkIndex < allChunks.length - 1) {
|
||||
// Navigate to next chunk in current page
|
||||
const nextChunk = allChunks[currentChunkIndex + 1]
|
||||
onNavigateToChunk?.(nextChunk)
|
||||
} else if (currentPage < totalPages) {
|
||||
// Load next page and navigate to first chunk
|
||||
await onNavigateToPage?.(currentPage + 1, 'first')
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Error navigating ${direction}:`, err)
|
||||
setError(`Failed to navigate to ${direction === 'prev' ? 'previous' : 'next'} chunk`)
|
||||
} finally {
|
||||
setIsNavigating(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleNavigate = (direction: 'prev' | 'next') => {
|
||||
if (hasUnsavedChanges) {
|
||||
setPendingNavigation(() => () => navigateToChunk(direction))
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
void navigateToChunk(direction)
|
||||
}
|
||||
}
|
||||
|
||||
const handleCloseAttempt = () => {
|
||||
if (hasUnsavedChanges && !isSaving) {
|
||||
setPendingNavigation(null)
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
onClose()
|
||||
@@ -102,7 +165,12 @@ export function EditChunkModal({
|
||||
|
||||
const handleConfirmDiscard = () => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
onClose()
|
||||
if (pendingNavigation) {
|
||||
void pendingNavigation()
|
||||
setPendingNavigation(null)
|
||||
} else {
|
||||
onClose()
|
||||
}
|
||||
}
|
||||
|
||||
const isFormValid = editedContent.trim().length > 0 && editedContent.trim().length <= 10000
|
||||
@@ -118,7 +186,59 @@ export function EditChunkModal({
|
||||
>
|
||||
<DialogHeader className='flex-shrink-0 border-b px-6 py-4'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<DialogTitle className='font-medium text-lg'>Edit Chunk</DialogTitle>
|
||||
<div className='flex items-center gap-3'>
|
||||
<DialogTitle className='font-medium text-lg'>Edit Chunk</DialogTitle>
|
||||
|
||||
{/* Navigation Controls */}
|
||||
<div className='flex items-center gap-1'>
|
||||
<Tooltip>
|
||||
<TooltipTrigger
|
||||
asChild
|
||||
onFocus={(e) => e.preventDefault()}
|
||||
onBlur={(e) => e.preventDefault()}
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => handleNavigate('prev')}
|
||||
disabled={!canNavigatePrev || isNavigating || isSaving}
|
||||
className='h-8 w-8 p-0'
|
||||
>
|
||||
<ChevronUp className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom'>
|
||||
Previous chunk{' '}
|
||||
{currentPage > 1 && currentChunkIndex === 0 ? '(previous page)' : ''}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip>
|
||||
<TooltipTrigger
|
||||
asChild
|
||||
onFocus={(e) => e.preventDefault()}
|
||||
onBlur={(e) => e.preventDefault()}
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => handleNavigate('next')}
|
||||
disabled={!canNavigateNext || isNavigating || isSaving}
|
||||
className='h-8 w-8 p-0'
|
||||
>
|
||||
<ChevronDown className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom'>
|
||||
Next chunk{' '}
|
||||
{currentPage < totalPages && currentChunkIndex === allChunks.length - 1
|
||||
? '(next page)'
|
||||
: ''}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='icon'
|
||||
@@ -142,7 +262,7 @@ export function EditChunkModal({
|
||||
{document?.filename || 'Unknown Document'}
|
||||
</p>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Editing chunk #{chunk.chunkIndex}
|
||||
Editing chunk #{chunk.chunkIndex} • Page {currentPage} of {totalPages}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -167,7 +287,7 @@ export function EditChunkModal({
|
||||
onChange={(e) => setEditedContent(e.target.value)}
|
||||
placeholder='Enter chunk content...'
|
||||
className='flex-1 resize-none'
|
||||
disabled={isSaving}
|
||||
disabled={isSaving || isNavigating}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
@@ -176,12 +296,16 @@ export function EditChunkModal({
|
||||
{/* Footer */}
|
||||
<div className='mt-auto border-t px-6 pt-4 pb-6'>
|
||||
<div className='flex justify-between'>
|
||||
<Button variant='outline' onClick={handleCloseAttempt} disabled={isSaving}>
|
||||
<Button
|
||||
variant='outline'
|
||||
onClick={handleCloseAttempt}
|
||||
disabled={isSaving || isNavigating}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleSaveContent}
|
||||
disabled={!isFormValid || isSaving || !hasUnsavedChanges}
|
||||
disabled={!isFormValid || isSaving || !hasUnsavedChanges || isNavigating}
|
||||
className='bg-[#701FFC] font-[480] text-primary-foreground shadow-[0_0_0_0_#701FFC] transition-all duration-200 hover:bg-[#6518E6] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
|
||||
>
|
||||
{isSaving ? (
|
||||
@@ -205,12 +329,19 @@ export function EditChunkModal({
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Unsaved Changes</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
You have unsaved changes to this chunk content. Are you sure you want to discard your
|
||||
changes and close the editor?
|
||||
You have unsaved changes to this chunk content.
|
||||
{pendingNavigation
|
||||
? ' Do you want to discard your changes and navigate to the next chunk?'
|
||||
: ' Are you sure you want to discard your changes and close the editor?'}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel onClick={() => setShowUnsavedChangesAlert(false)}>
|
||||
<AlertDialogCancel
|
||||
onClick={() => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
setPendingNavigation(null)
|
||||
}}
|
||||
>
|
||||
Keep Editing
|
||||
</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
|
||||
@@ -767,6 +767,30 @@ export function Document({
|
||||
updateChunk(updatedChunk.id, updatedChunk)
|
||||
setSelectedChunk(updatedChunk)
|
||||
}}
|
||||
allChunks={chunks}
|
||||
currentPage={currentPage}
|
||||
totalPages={totalPages}
|
||||
onNavigateToChunk={(chunk: ChunkData) => {
|
||||
setSelectedChunk(chunk)
|
||||
}}
|
||||
onNavigateToPage={async (page: number, selectChunk: 'first' | 'last') => {
|
||||
await goToPage(page)
|
||||
|
||||
const checkAndSelectChunk = () => {
|
||||
if (!isLoadingChunks && chunks.length > 0) {
|
||||
if (selectChunk === 'first') {
|
||||
setSelectedChunk(chunks[0])
|
||||
} else {
|
||||
setSelectedChunk(chunks[chunks.length - 1])
|
||||
}
|
||||
} else {
|
||||
// Retry after a short delay if chunks aren't loaded yet
|
||||
setTimeout(checkAndSelectChunk, 100)
|
||||
}
|
||||
}
|
||||
|
||||
setTimeout(checkAndSelectChunk, 0)
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Create Chunk Modal */}
|
||||
|
||||
@@ -36,16 +36,11 @@ import { useKnowledgeBase, useKnowledgeBaseDocuments } from '@/hooks/use-knowled
|
||||
import { type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
import { KnowledgeHeader } from '../components/knowledge-header/knowledge-header'
|
||||
import { useKnowledgeUpload } from '../hooks/use-knowledge-upload'
|
||||
import { KnowledgeBaseLoading } from './components/knowledge-base-loading/knowledge-base-loading'
|
||||
|
||||
const logger = createLogger('KnowledgeBase')
|
||||
|
||||
interface ProcessedDocumentResponse {
|
||||
documentId: string
|
||||
filename: string
|
||||
status: string
|
||||
}
|
||||
|
||||
interface KnowledgeBaseProps {
|
||||
id: string
|
||||
knowledgeBaseName?: string
|
||||
@@ -145,17 +140,32 @@ export function KnowledgeBase({
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const [isBulkOperating, setIsBulkOperating] = useState(false)
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
const [uploadError, setUploadError] = useState<{
|
||||
message: string
|
||||
timestamp: number
|
||||
} | null>(null)
|
||||
const [uploadProgress, setUploadProgress] = useState<{
|
||||
stage: 'idle' | 'uploading' | 'processing' | 'completing'
|
||||
filesCompleted: number
|
||||
totalFiles: number
|
||||
currentFile?: string
|
||||
}>({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
|
||||
const { isUploading, uploadProgress, uploadError, uploadFiles, clearError } = useKnowledgeUpload({
|
||||
onUploadComplete: async (uploadedFiles) => {
|
||||
const pendingDocuments: DocumentData[] = uploadedFiles.map((file, index) => ({
|
||||
id: `temp-${Date.now()}-${index}`,
|
||||
knowledgeBaseId: id,
|
||||
filename: file.filename,
|
||||
fileUrl: file.fileUrl,
|
||||
fileSize: file.fileSize,
|
||||
mimeType: file.mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}))
|
||||
|
||||
useKnowledgeStore.getState().addPendingDocuments(id, pendingDocuments)
|
||||
|
||||
await refreshDocuments()
|
||||
},
|
||||
})
|
||||
const router = useRouter()
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
@@ -240,11 +250,11 @@ export function KnowledgeBase({
|
||||
useEffect(() => {
|
||||
if (uploadError) {
|
||||
const timer = setTimeout(() => {
|
||||
setUploadError(null)
|
||||
clearError()
|
||||
}, 8000)
|
||||
return () => clearTimeout(timer)
|
||||
}
|
||||
}, [uploadError])
|
||||
}, [uploadError, clearError])
|
||||
|
||||
// Filter documents based on search query
|
||||
const filteredDocuments = documents.filter((doc) =>
|
||||
@@ -448,153 +458,18 @@ export function KnowledgeBase({
|
||||
const files = e.target.files
|
||||
if (!files || files.length === 0) return
|
||||
|
||||
interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
try {
|
||||
setIsUploading(true)
|
||||
setUploadError(null)
|
||||
setUploadProgress({ stage: 'uploading', filesCompleted: 0, totalFiles: files.length })
|
||||
|
||||
// Upload all files and start processing
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
const fileArray = Array.from(files)
|
||||
|
||||
for (const [index, file] of fileArray.entries()) {
|
||||
setUploadProgress((prev) => ({ ...prev, currentFile: file.name, filesCompleted: index }))
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json()
|
||||
throw new Error(`Failed to upload ${file.name}: ${errorData.error || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
|
||||
// Validate upload result structure
|
||||
if (!uploadResult.path) {
|
||||
throw new Error(`Invalid upload response for ${file.name}: missing file path`)
|
||||
}
|
||||
|
||||
uploadedFiles.push({
|
||||
filename: file.name,
|
||||
fileUrl: uploadResult.path.startsWith('http')
|
||||
? uploadResult.path
|
||||
: `${window.location.origin}${uploadResult.path}`,
|
||||
fileSize: file.size,
|
||||
mimeType: file.type,
|
||||
})
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({
|
||||
...prev,
|
||||
stage: 'processing',
|
||||
filesCompleted: fileArray.length,
|
||||
}))
|
||||
|
||||
// Start async document processing
|
||||
const processResponse = await fetch(`/api/knowledge/${id}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: knowledgeBase?.chunkingConfig?.maxSize || 1024,
|
||||
minCharactersPerChunk: knowledgeBase?.chunkingConfig?.minSize || 100,
|
||||
chunkOverlap: knowledgeBase?.chunkingConfig?.overlap || 200,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}),
|
||||
const chunkingConfig = knowledgeBase?.chunkingConfig
|
||||
await uploadFiles(Array.from(files), id, {
|
||||
chunkSize: chunkingConfig?.maxSize || 1024,
|
||||
minCharactersPerChunk: chunkingConfig?.minSize || 100,
|
||||
chunkOverlap: chunkingConfig?.overlap || 200,
|
||||
recipe: 'default',
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
const errorData = await processResponse.json()
|
||||
throw new Error(
|
||||
`Failed to start document processing: ${errorData.error || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Validate process result structure
|
||||
if (!processResult.success) {
|
||||
throw new Error(`Document processing failed: ${processResult.error || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
if (!processResult.data || !processResult.data.documentsCreated) {
|
||||
throw new Error('Invalid processing response: missing document data')
|
||||
}
|
||||
|
||||
// Create pending document objects and add them to the store immediately
|
||||
const pendingDocuments: DocumentData[] = processResult.data.documentsCreated.map(
|
||||
(doc: ProcessedDocumentResponse, index: number) => {
|
||||
if (!doc.documentId || !doc.filename) {
|
||||
logger.error(`Invalid document data received:`, doc)
|
||||
throw new Error(
|
||||
`Invalid document data for ${uploadedFiles[index]?.filename || 'unknown file'}`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
id: doc.documentId,
|
||||
knowledgeBaseId: id,
|
||||
filename: doc.filename,
|
||||
fileUrl: uploadedFiles[index].fileUrl,
|
||||
fileSize: uploadedFiles[index].fileSize,
|
||||
mimeType: uploadedFiles[index].mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
// Add pending documents to store for immediate UI update
|
||||
useKnowledgeStore.getState().addPendingDocuments(id, pendingDocuments)
|
||||
|
||||
logger.info(`Successfully started processing ${uploadedFiles.length} documents`)
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'completing' }))
|
||||
|
||||
// Trigger a refresh to ensure documents are properly loaded
|
||||
await refreshDocuments()
|
||||
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
} catch (err) {
|
||||
logger.error('Error uploading documents:', err)
|
||||
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : 'Unknown error occurred during upload'
|
||||
setUploadError({
|
||||
message: errorMessage,
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
|
||||
// Show user-friendly error message in console for debugging
|
||||
console.error('Document upload failed:', errorMessage)
|
||||
} catch (error) {
|
||||
logger.error('Error uploading files:', error)
|
||||
// Error handling is managed by the upload hook
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
// Reset the file input
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
@@ -995,7 +870,7 @@ export function KnowledgeBase({
|
||||
</tr>
|
||||
))
|
||||
) : (
|
||||
filteredDocuments.map((doc, index) => {
|
||||
filteredDocuments.map((doc) => {
|
||||
const isSelected = selectedDocuments.has(doc.id)
|
||||
const statusDisplay = getStatusDisplay(doc)
|
||||
// const processingTime = getProcessingTime(doc)
|
||||
@@ -1254,7 +1129,7 @@ export function KnowledgeBase({
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => setUploadError(null)}
|
||||
onClick={() => clearError()}
|
||||
className='flex-shrink-0 rounded-sm opacity-70 hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring'
|
||||
>
|
||||
<X className='h-4 w-4' />
|
||||
|
||||
@@ -13,8 +13,8 @@ import { Label } from '@/components/ui/label'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components/icons/document-icons'
|
||||
import type { DocumentData, KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import { useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import { useKnowledgeUpload } from '../../hooks/use-knowledge-upload'
|
||||
|
||||
const logger = createLogger('CreateModal')
|
||||
|
||||
@@ -29,12 +29,6 @@ const ACCEPTED_FILE_TYPES = [
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
]
|
||||
|
||||
interface ProcessedDocumentResponse {
|
||||
documentId: string
|
||||
filename: string
|
||||
status: string
|
||||
}
|
||||
|
||||
interface FileWithPreview extends File {
|
||||
preview: string
|
||||
}
|
||||
@@ -89,6 +83,12 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
const scrollContainerRef = useRef<HTMLDivElement>(null)
|
||||
const dropZoneRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
const { uploadFiles } = useKnowledgeUpload({
|
||||
onUploadComplete: (uploadedFiles) => {
|
||||
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
||||
},
|
||||
})
|
||||
|
||||
// Cleanup file preview URLs when component unmounts to prevent memory leaks
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
@@ -235,19 +235,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
return `${Number.parseFloat((bytes / k ** i).toFixed(1))} ${sizes[i]}`
|
||||
}
|
||||
|
||||
// Helper function to create uploadedFiles array from file uploads
|
||||
const createUploadedFile = (
|
||||
filename: string,
|
||||
fileUrl: string,
|
||||
fileSize: number,
|
||||
mimeType: string
|
||||
) => ({
|
||||
filename,
|
||||
fileUrl: fileUrl.startsWith('http') ? fileUrl : `${window.location.origin}${fileUrl}`,
|
||||
fileSize,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const onSubmit = async (data: FormValues) => {
|
||||
setIsSubmitting(true)
|
||||
setSubmitStatus(null)
|
||||
@@ -285,138 +272,14 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
|
||||
const newKnowledgeBase = result.data
|
||||
|
||||
// If files are uploaded, upload them and start processing
|
||||
if (files.length > 0) {
|
||||
// First, upload all files to get their URLs
|
||||
interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const presignedResponse = await fetch('/api/files/presigned', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
fileSize: file.size,
|
||||
}),
|
||||
})
|
||||
|
||||
const presignedData = await presignedResponse.json()
|
||||
|
||||
if (presignedResponse.ok && presignedData.directUploadSupported) {
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'Content-Type': file.type,
|
||||
}
|
||||
|
||||
// Add Azure-specific headers if provided
|
||||
if (presignedData.uploadHeaders) {
|
||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
||||
}
|
||||
|
||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
||||
method: 'PUT',
|
||||
headers: uploadHeaders, // Use the merged headers
|
||||
body: file,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
throw new Error(
|
||||
`Direct upload failed: ${uploadResponse.status} ${uploadResponse.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(file.name, presignedData.fileInfo.path, file.size, file.type)
|
||||
)
|
||||
} else {
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json()
|
||||
throw new Error(
|
||||
`Failed to upload ${file.name}: ${errorData.error || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(file.name, uploadResult.path, file.size, file.type)
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to upload ${file.name}: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Start async document processing
|
||||
const processResponse = await fetch(`/api/knowledge/${newKnowledgeBase.id}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: data.maxChunkSize,
|
||||
minCharactersPerChunk: data.minChunkSize,
|
||||
chunkOverlap: data.overlapSize,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}),
|
||||
const uploadedFiles = await uploadFiles(files, newKnowledgeBase.id, {
|
||||
chunkSize: data.maxChunkSize,
|
||||
minCharactersPerChunk: data.minChunkSize,
|
||||
chunkOverlap: data.overlapSize,
|
||||
recipe: 'default',
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
throw new Error('Failed to start document processing')
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Create pending document objects and add them to the store immediately
|
||||
if (processResult.success && processResult.data.documentsCreated) {
|
||||
const pendingDocuments: DocumentData[] = processResult.data.documentsCreated.map(
|
||||
(doc: ProcessedDocumentResponse, index: number) => ({
|
||||
id: doc.documentId,
|
||||
knowledgeBaseId: newKnowledgeBase.id,
|
||||
filename: doc.filename,
|
||||
fileUrl: uploadedFiles[index].fileUrl,
|
||||
fileSize: uploadedFiles[index].fileSize,
|
||||
mimeType: uploadedFiles[index].mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
})
|
||||
)
|
||||
|
||||
// Add pending documents to store for immediate UI update
|
||||
useKnowledgeStore.getState().addPendingDocuments(newKnowledgeBase.id, pendingDocuments)
|
||||
}
|
||||
|
||||
// Update the knowledge base object with the correct document count
|
||||
newKnowledgeBase.docCount = uploadedFiles.length
|
||||
|
||||
|
||||
@@ -0,0 +1,352 @@
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('KnowledgeUpload')
|
||||
|
||||
export interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
export interface UploadProgress {
|
||||
stage: 'idle' | 'uploading' | 'processing' | 'completing'
|
||||
filesCompleted: number
|
||||
totalFiles: number
|
||||
currentFile?: string
|
||||
}
|
||||
|
||||
export interface UploadError {
|
||||
message: string
|
||||
timestamp: number
|
||||
code?: string
|
||||
details?: any
|
||||
}
|
||||
|
||||
export interface ProcessingOptions {
|
||||
chunkSize?: number
|
||||
minCharactersPerChunk?: number
|
||||
chunkOverlap?: number
|
||||
recipe?: string
|
||||
}
|
||||
|
||||
export interface UseKnowledgeUploadOptions {
|
||||
onUploadComplete?: (uploadedFiles: UploadedFile[]) => void
|
||||
onError?: (error: UploadError) => void
|
||||
}
|
||||
|
||||
class KnowledgeUploadError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public details?: any
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'KnowledgeUploadError'
|
||||
}
|
||||
}
|
||||
|
||||
class PresignedUrlError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'PRESIGNED_URL_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
class DirectUploadError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'DIRECT_UPLOAD_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
class ProcessingError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'PROCESSING_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
const [uploadProgress, setUploadProgress] = useState<UploadProgress>({
|
||||
stage: 'idle',
|
||||
filesCompleted: 0,
|
||||
totalFiles: 0,
|
||||
})
|
||||
const [uploadError, setUploadError] = useState<UploadError | null>(null)
|
||||
|
||||
const createUploadedFile = (
|
||||
filename: string,
|
||||
fileUrl: string,
|
||||
fileSize: number,
|
||||
mimeType: string
|
||||
): UploadedFile => ({
|
||||
filename,
|
||||
fileUrl,
|
||||
fileSize,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const createErrorFromException = (error: unknown, defaultMessage: string): UploadError => {
|
||||
if (error instanceof KnowledgeUploadError) {
|
||||
return {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
details: error.details,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
return {
|
||||
message: error.message,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
message: defaultMessage,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
const uploadFiles = async (
|
||||
files: File[],
|
||||
knowledgeBaseId: string,
|
||||
processingOptions: ProcessingOptions = {}
|
||||
): Promise<UploadedFile[]> => {
|
||||
if (files.length === 0) {
|
||||
throw new KnowledgeUploadError('No files provided for upload', 'NO_FILES')
|
||||
}
|
||||
|
||||
if (!knowledgeBaseId?.trim()) {
|
||||
throw new KnowledgeUploadError('Knowledge base ID is required', 'INVALID_KB_ID')
|
||||
}
|
||||
|
||||
try {
|
||||
setIsUploading(true)
|
||||
setUploadError(null)
|
||||
setUploadProgress({ stage: 'uploading', filesCompleted: 0, totalFiles: files.length })
|
||||
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
|
||||
// Upload all files using presigned URLs
|
||||
for (const [index, file] of files.entries()) {
|
||||
setUploadProgress((prev) => ({
|
||||
...prev,
|
||||
currentFile: file.name,
|
||||
filesCompleted: index,
|
||||
}))
|
||||
|
||||
try {
|
||||
// Get presigned URL
|
||||
const presignedResponse = await fetch('/api/files/presigned?type=knowledge-base', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
fileSize: file.size,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!presignedResponse.ok) {
|
||||
let errorDetails: any = null
|
||||
try {
|
||||
errorDetails = await presignedResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
throw new PresignedUrlError(
|
||||
`Failed to get presigned URL for ${file.name}: ${presignedResponse.status} ${presignedResponse.statusText}`,
|
||||
errorDetails
|
||||
)
|
||||
}
|
||||
|
||||
const presignedData = await presignedResponse.json()
|
||||
|
||||
if (presignedData.directUploadSupported) {
|
||||
// Use presigned URL for direct upload
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'Content-Type': file.type,
|
||||
}
|
||||
|
||||
// Add Azure-specific headers if provided
|
||||
if (presignedData.uploadHeaders) {
|
||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
||||
}
|
||||
|
||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
||||
method: 'PUT',
|
||||
headers: uploadHeaders,
|
||||
body: file,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
throw new DirectUploadError(
|
||||
`Direct upload failed for ${file.name}: ${uploadResponse.status} ${uploadResponse.statusText}`,
|
||||
{ uploadResponse: uploadResponse.statusText }
|
||||
)
|
||||
}
|
||||
|
||||
// Convert relative path to full URL for schema validation
|
||||
const fullFileUrl = presignedData.fileInfo.path.startsWith('http')
|
||||
? presignedData.fileInfo.path
|
||||
: `${window.location.origin}${presignedData.fileInfo.path}`
|
||||
|
||||
uploadedFiles.push(createUploadedFile(file.name, fullFileUrl, file.size, file.type))
|
||||
} else {
|
||||
// Fallback to traditional upload through API route
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
let errorData: any = null
|
||||
try {
|
||||
errorData = await uploadResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
throw new DirectUploadError(
|
||||
`Failed to upload ${file.name}: ${errorData?.error || 'Unknown error'}`,
|
||||
errorData
|
||||
)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
|
||||
// Validate upload result structure
|
||||
if (!uploadResult.path) {
|
||||
throw new DirectUploadError(
|
||||
`Invalid upload response for ${file.name}: missing file path`,
|
||||
uploadResult
|
||||
)
|
||||
}
|
||||
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(
|
||||
file.name,
|
||||
uploadResult.path.startsWith('http')
|
||||
? uploadResult.path
|
||||
: `${window.location.origin}${uploadResult.path}`,
|
||||
file.size,
|
||||
file.type
|
||||
)
|
||||
)
|
||||
}
|
||||
} catch (fileError) {
|
||||
logger.error(`Error uploading file ${file.name}:`, fileError)
|
||||
throw fileError // Re-throw to be caught by outer try-catch
|
||||
}
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'processing' }))
|
||||
|
||||
// Start async document processing
|
||||
const processPayload = {
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: processingOptions.chunkSize || 1024,
|
||||
minCharactersPerChunk: processingOptions.minCharactersPerChunk || 100,
|
||||
chunkOverlap: processingOptions.chunkOverlap || 200,
|
||||
recipe: processingOptions.recipe || 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}
|
||||
|
||||
const processResponse = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(processPayload),
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
let errorData: any = null
|
||||
try {
|
||||
errorData = await processResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
logger.error('Document processing failed:', {
|
||||
status: processResponse.status,
|
||||
error: errorData,
|
||||
uploadedFiles: uploadedFiles.map((f) => ({
|
||||
filename: f.filename,
|
||||
fileUrl: f.fileUrl,
|
||||
fileSize: f.fileSize,
|
||||
mimeType: f.mimeType,
|
||||
})),
|
||||
})
|
||||
|
||||
throw new ProcessingError(
|
||||
`Failed to start document processing: ${errorData?.error || errorData?.message || 'Unknown error'}`,
|
||||
errorData
|
||||
)
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Validate process result structure
|
||||
if (!processResult.success) {
|
||||
throw new ProcessingError(
|
||||
`Document processing failed: ${processResult.error || 'Unknown error'}`,
|
||||
processResult
|
||||
)
|
||||
}
|
||||
|
||||
if (!processResult.data || !processResult.data.documentsCreated) {
|
||||
throw new ProcessingError(
|
||||
'Invalid processing response: missing document data',
|
||||
processResult
|
||||
)
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'completing' }))
|
||||
|
||||
logger.info(`Successfully started processing ${uploadedFiles.length} documents`)
|
||||
|
||||
// Call success callback
|
||||
options.onUploadComplete?.(uploadedFiles)
|
||||
|
||||
return uploadedFiles
|
||||
} catch (err) {
|
||||
logger.error('Error uploading documents:', err)
|
||||
|
||||
const error = createErrorFromException(err, 'Unknown error occurred during upload')
|
||||
setUploadError(error)
|
||||
options.onError?.(error)
|
||||
|
||||
// Show user-friendly error message in console for debugging
|
||||
console.error('Document upload failed:', error.message)
|
||||
|
||||
throw err
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
}
|
||||
}
|
||||
|
||||
const clearError = () => {
|
||||
setUploadError(null)
|
||||
}
|
||||
|
||||
return {
|
||||
isUploading,
|
||||
uploadProgress,
|
||||
uploadError,
|
||||
uploadFiles,
|
||||
clearError,
|
||||
}
|
||||
}
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
} from '@/components/ui/command'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
|
||||
interface DocumentData {
|
||||
id: string
|
||||
@@ -50,45 +50,25 @@ export function DocumentSelector({
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: DocumentSelectorProps) {
|
||||
const { getValue, setValue } = useSubBlockStore()
|
||||
|
||||
const [documents, setDocuments] = useState<DocumentData[]>([])
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [open, setOpen] = useState(false)
|
||||
const [selectedDocument, setSelectedDocument] = useState<DocumentData | null>(null)
|
||||
const [initialFetchDone, setInitialFetchDone] = useState(false)
|
||||
const [selectedId, setSelectedId] = useState('')
|
||||
|
||||
// Get the current value from the store
|
||||
const storeValue = getValue(blockId, subBlock.id)
|
||||
// Use the proper hook to get the current value and setter
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
|
||||
// Get the knowledge base ID from the same block's knowledgeBaseId subblock
|
||||
const knowledgeBaseId = getValue(blockId, 'knowledgeBaseId')
|
||||
const [knowledgeBaseId] = useSubBlockValue(blockId, 'knowledgeBaseId')
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
|
||||
// Initialize selectedId with the effective value
|
||||
useEffect(() => {
|
||||
if (isPreview && previewValue !== undefined) {
|
||||
setSelectedId(previewValue || '')
|
||||
} else {
|
||||
setSelectedId(value || '')
|
||||
}
|
||||
}, [value, isPreview, previewValue])
|
||||
|
||||
// Update local state when external value changes
|
||||
useEffect(() => {
|
||||
const currentValue = isPreview ? previewValue : value
|
||||
setSelectedId(currentValue || '')
|
||||
}, [value, isPreview, previewValue])
|
||||
|
||||
// Fetch documents for the selected knowledge base
|
||||
const fetchDocuments = useCallback(async () => {
|
||||
if (!knowledgeBaseId) {
|
||||
setDocuments([])
|
||||
setError('No knowledge base selected')
|
||||
setInitialFetchDone(true)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -109,39 +89,12 @@ export function DocumentSelector({
|
||||
|
||||
const fetchedDocuments = result.data || []
|
||||
setDocuments(fetchedDocuments)
|
||||
setInitialFetchDone(true)
|
||||
|
||||
// Auto-selection logic: if we have a valid selection, keep it
|
||||
// If there's only one document, select it
|
||||
// If we have a value but it's not in the documents, reset it
|
||||
if (selectedId && !fetchedDocuments.some((doc: DocumentData) => doc.id === selectedId)) {
|
||||
setSelectedId('')
|
||||
if (!isPreview) {
|
||||
setValue(blockId, subBlock.id, '')
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
(!selectedId || !fetchedDocuments.some((doc: DocumentData) => doc.id === selectedId)) &&
|
||||
fetchedDocuments.length > 0
|
||||
) {
|
||||
if (fetchedDocuments.length === 1) {
|
||||
// If only one document, auto-select it
|
||||
const singleDoc = fetchedDocuments[0]
|
||||
setSelectedId(singleDoc.id)
|
||||
setSelectedDocument(singleDoc)
|
||||
if (!isPreview) {
|
||||
setValue(blockId, subBlock.id, singleDoc.id)
|
||||
}
|
||||
onDocumentSelect?.(singleDoc.id)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if ((err as Error).name === 'AbortError') return
|
||||
setError((err as Error).message)
|
||||
setDocuments([])
|
||||
}
|
||||
}, [knowledgeBaseId, selectedId, setValue, blockId, subBlock.id, isPreview, onDocumentSelect])
|
||||
}, [knowledgeBaseId])
|
||||
|
||||
// Handle dropdown open/close - fetch documents when opening
|
||||
const handleOpenChange = (isOpen: boolean) => {
|
||||
@@ -160,50 +113,34 @@ export function DocumentSelector({
|
||||
if (isPreview) return
|
||||
|
||||
setSelectedDocument(document)
|
||||
setSelectedId(document.id)
|
||||
|
||||
if (!isPreview) {
|
||||
setValue(blockId, subBlock.id, document.id)
|
||||
}
|
||||
|
||||
setStoreValue(document.id)
|
||||
onDocumentSelect?.(document.id)
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
// Sync selected document with value prop
|
||||
useEffect(() => {
|
||||
if (selectedId && documents.length > 0) {
|
||||
const docInfo = documents.find((doc) => doc.id === selectedId)
|
||||
if (docInfo) {
|
||||
setSelectedDocument(docInfo)
|
||||
} else {
|
||||
setSelectedDocument(null)
|
||||
}
|
||||
} else if (!selectedId) {
|
||||
if (value && documents.length > 0) {
|
||||
const docInfo = documents.find((doc) => doc.id === value)
|
||||
setSelectedDocument(docInfo || null)
|
||||
} else {
|
||||
setSelectedDocument(null)
|
||||
}
|
||||
}, [selectedId, documents])
|
||||
}, [value, documents])
|
||||
|
||||
// Reset documents when knowledge base changes
|
||||
useEffect(() => {
|
||||
if (knowledgeBaseId) {
|
||||
setDocuments([])
|
||||
setSelectedDocument(null)
|
||||
setSelectedId('')
|
||||
setInitialFetchDone(false)
|
||||
setError(null)
|
||||
if (!isPreview) {
|
||||
setValue(blockId, subBlock.id, '')
|
||||
}
|
||||
}
|
||||
}, [knowledgeBaseId, blockId, subBlock.id, setValue, isPreview])
|
||||
setDocuments([])
|
||||
setSelectedDocument(null)
|
||||
setError(null)
|
||||
}, [knowledgeBaseId])
|
||||
|
||||
// Fetch documents when knowledge base is available and we haven't fetched yet
|
||||
// Fetch documents when knowledge base is available
|
||||
useEffect(() => {
|
||||
if (knowledgeBaseId && !initialFetchDone && !isPreview) {
|
||||
if (knowledgeBaseId && !isPreview) {
|
||||
fetchDocuments()
|
||||
}
|
||||
}, [knowledgeBaseId, initialFetchDone, fetchDocuments, isPreview])
|
||||
}, [knowledgeBaseId, isPreview, fetchDocuments])
|
||||
|
||||
const formatDocumentName = (document: DocumentData) => {
|
||||
return document.filename
|
||||
@@ -297,7 +234,7 @@ export function DocumentSelector({
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{document.id === selectedId && <Check className='ml-auto h-4 w-4' />}
|
||||
{document.id === value && <Check className='ml-auto h-4 w-4' />}
|
||||
</CommandItem>
|
||||
))}
|
||||
</CommandGroup>
|
||||
|
||||
@@ -4,8 +4,10 @@ import { useEffect, useState } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { env } from '@/lib/env'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
import type { ConfluenceFileInfo } from './components/confluence-file-selector'
|
||||
import { ConfluenceFileSelector } from './components/confluence-file-selector'
|
||||
import type { DiscordChannelInfo } from './components/discord-channel-selector'
|
||||
@@ -36,8 +38,12 @@ export function FileSelectorInput({
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: FileSelectorInputProps) {
|
||||
const { getValue, setValue } = useSubBlockStore()
|
||||
const { getValue } = useSubBlockStore()
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
|
||||
// Use the proper hook to get the current value and setter
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
const [selectedFileId, setSelectedFileId] = useState<string>('')
|
||||
const [_fileInfo, setFileInfo] = useState<FileInfo | ConfluenceFileInfo | null>(null)
|
||||
const [selectedIssueId, setSelectedIssueId] = useState<string>('')
|
||||
@@ -64,7 +70,7 @@ export function FileSelectorInput({
|
||||
const serverId = isDiscord ? (getValue(blockId, 'serverId') as string) || '' : ''
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : getValue(blockId, subBlock.id)
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
|
||||
// Get the current value from the store or prop value if in preview mode
|
||||
useEffect(() => {
|
||||
@@ -115,19 +121,19 @@ export function FileSelectorInput({
|
||||
const handleFileChange = (fileId: string, info?: any) => {
|
||||
setSelectedFileId(fileId)
|
||||
setFileInfo(info || null)
|
||||
setValue(blockId, subBlock.id, fileId)
|
||||
setStoreValue(fileId)
|
||||
}
|
||||
|
||||
// Handle issue selection
|
||||
const handleIssueChange = (issueKey: string, info?: JiraIssueInfo) => {
|
||||
setSelectedIssueId(issueKey)
|
||||
setIssueInfo(info || null)
|
||||
setValue(blockId, subBlock.id, issueKey)
|
||||
setStoreValue(issueKey)
|
||||
|
||||
// Clear the fields when a new issue is selected
|
||||
if (isJira) {
|
||||
setValue(blockId, 'summary', '')
|
||||
setValue(blockId, 'description', '')
|
||||
collaborativeSetSubblockValue(blockId, 'summary', '')
|
||||
collaborativeSetSubblockValue(blockId, 'description', '')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -135,14 +141,14 @@ export function FileSelectorInput({
|
||||
const handleChannelChange = (channelId: string, info?: DiscordChannelInfo) => {
|
||||
setSelectedChannelId(channelId)
|
||||
setChannelInfo(info || null)
|
||||
setValue(blockId, subBlock.id, channelId)
|
||||
setStoreValue(channelId)
|
||||
}
|
||||
|
||||
// Handle calendar selection
|
||||
const handleCalendarChange = (calendarId: string, info?: GoogleCalendarInfo) => {
|
||||
setSelectedCalendarId(calendarId)
|
||||
setCalendarInfo(info || null)
|
||||
setValue(blockId, subBlock.id, calendarId)
|
||||
setStoreValue(calendarId)
|
||||
}
|
||||
|
||||
// For Google Drive
|
||||
@@ -337,7 +343,7 @@ export function FileSelectorInput({
|
||||
onChange={(value, info) => {
|
||||
setSelectedMessageId(value)
|
||||
setMessageInfo(info || null)
|
||||
setValue(blockId, subBlock.id, value)
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, value)
|
||||
}}
|
||||
provider='microsoft-teams'
|
||||
requiredScopes={subBlock.requiredScopes || []}
|
||||
|
||||
@@ -6,7 +6,6 @@ import { Button } from '@/components/ui/button'
|
||||
import { Progress } from '@/components/ui/progress'
|
||||
import { useNotificationStore } from '@/stores/notifications/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import { useSubBlockValue } from '../hooks/use-sub-block-value'
|
||||
|
||||
@@ -297,16 +296,10 @@ export function FileUpload({
|
||||
const newFiles = Array.from(uniqueFiles.values())
|
||||
|
||||
setStoreValue(newFiles)
|
||||
|
||||
// Make sure to update the subblock store value for the workflow execution
|
||||
useSubBlockStore.getState().setValue(blockId, subBlockId, newFiles)
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
} else {
|
||||
// For single file: Replace with last uploaded file
|
||||
setStoreValue(uploadedFiles[0] || null)
|
||||
|
||||
// Make sure to update the subblock store value for the workflow execution
|
||||
useSubBlockStore.getState().setValue(blockId, subBlockId, uploadedFiles[0] || null)
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -362,17 +355,9 @@ export function FileUpload({
|
||||
const filesArray = Array.isArray(value) ? value : value ? [value] : []
|
||||
const updatedFiles = filesArray.filter((f) => f.path !== file.path)
|
||||
setStoreValue(updatedFiles.length > 0 ? updatedFiles : null)
|
||||
|
||||
// Make sure to update the subblock store value for the workflow execution
|
||||
useSubBlockStore
|
||||
.getState()
|
||||
.setValue(blockId, subBlockId, updatedFiles.length > 0 ? updatedFiles : null)
|
||||
} else {
|
||||
// For single file: Clear the value
|
||||
setStoreValue(null)
|
||||
|
||||
// Make sure to update the subblock store
|
||||
useSubBlockStore.getState().setValue(blockId, subBlockId, null)
|
||||
}
|
||||
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
@@ -413,7 +398,6 @@ export function FileUpload({
|
||||
|
||||
// Clear input state immediately for better UX
|
||||
setStoreValue(null)
|
||||
useSubBlockStore.getState().setValue(blockId, subBlockId, null)
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
|
||||
if (fileInputRef.current) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { Check, ChevronDown, RefreshCw, X } from 'lucide-react'
|
||||
import { PackageSearchIcon } from '@/components/icons'
|
||||
import { Button } from '@/components/ui/button'
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { type KnowledgeBaseData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useSubBlockValue } from '../../../sub-block/hooks/use-sub-block-value'
|
||||
|
||||
interface KnowledgeBaseSelectorProps {
|
||||
blockId: string
|
||||
@@ -36,23 +36,39 @@ export function KnowledgeBaseSelector({
|
||||
}: KnowledgeBaseSelectorProps) {
|
||||
const { getKnowledgeBasesList, knowledgeBasesList, loadingKnowledgeBasesList } =
|
||||
useKnowledgeStore()
|
||||
const { getValue, setValue } = useSubBlockStore()
|
||||
|
||||
const [knowledgeBases, setKnowledgeBases] = useState<KnowledgeBaseData[]>([])
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [open, setOpen] = useState(false)
|
||||
const [selectedKnowledgeBases, setSelectedKnowledgeBases] = useState<KnowledgeBaseData[]>([])
|
||||
const [initialFetchDone, setInitialFetchDone] = useState(false)
|
||||
|
||||
// Get the current value from the store
|
||||
const storeValue = getValue(blockId, subBlock.id)
|
||||
// Use the proper hook to get the current value and setter - this prevents infinite loops
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
|
||||
const isMultiSelect = subBlock.multiSelect === true
|
||||
|
||||
// Compute selected knowledge bases directly from value - no local state to avoid loops
|
||||
const selectedKnowledgeBases = useMemo(() => {
|
||||
if (value && knowledgeBases.length > 0) {
|
||||
const selectedIds =
|
||||
typeof value === 'string'
|
||||
? value.includes(',')
|
||||
? value
|
||||
.split(',')
|
||||
.map((id) => id.trim())
|
||||
.filter((id) => id.length > 0)
|
||||
: [value]
|
||||
: []
|
||||
|
||||
return knowledgeBases.filter((kb) => selectedIds.includes(kb.id))
|
||||
}
|
||||
return []
|
||||
}, [value, knowledgeBases])
|
||||
|
||||
// Fetch knowledge bases
|
||||
const fetchKnowledgeBases = useCallback(async () => {
|
||||
setLoading(true)
|
||||
@@ -87,11 +103,8 @@ export function KnowledgeBaseSelector({
|
||||
const handleSelectSingleKnowledgeBase = (knowledgeBase: KnowledgeBaseData) => {
|
||||
if (isPreview) return
|
||||
|
||||
setSelectedKnowledgeBases([knowledgeBase])
|
||||
|
||||
if (!isPreview) {
|
||||
setValue(blockId, subBlock.id, knowledgeBase.id)
|
||||
}
|
||||
// Use the hook's setter which handles collaborative updates
|
||||
setStoreValue(knowledgeBase.id)
|
||||
|
||||
onKnowledgeBaseSelect?.(knowledgeBase.id)
|
||||
setOpen(false)
|
||||
@@ -112,15 +125,13 @@ export function KnowledgeBaseSelector({
|
||||
newSelected = [...selectedKnowledgeBases, knowledgeBase]
|
||||
}
|
||||
|
||||
setSelectedKnowledgeBases(newSelected)
|
||||
const selectedIds = newSelected.map((kb) => kb.id)
|
||||
const valueToStore = selectedIds.length === 1 ? selectedIds[0] : selectedIds.join(',')
|
||||
|
||||
if (!isPreview) {
|
||||
const selectedIds = newSelected.map((kb) => kb.id)
|
||||
const valueToStore = selectedIds.length === 1 ? selectedIds[0] : selectedIds.join(',')
|
||||
setValue(blockId, subBlock.id, valueToStore)
|
||||
}
|
||||
// Use the hook's setter which handles collaborative updates
|
||||
setStoreValue(valueToStore)
|
||||
|
||||
onKnowledgeBaseSelect?.(newSelected.map((kb) => kb.id))
|
||||
onKnowledgeBaseSelect?.(selectedIds)
|
||||
}
|
||||
|
||||
// Remove selected knowledge base (for multi-select tags)
|
||||
@@ -128,37 +139,15 @@ export function KnowledgeBaseSelector({
|
||||
if (isPreview) return
|
||||
|
||||
const newSelected = selectedKnowledgeBases.filter((kb) => kb.id !== knowledgeBaseId)
|
||||
setSelectedKnowledgeBases(newSelected)
|
||||
const selectedIds = newSelected.map((kb) => kb.id)
|
||||
const valueToStore = selectedIds.length === 1 ? selectedIds[0] : selectedIds.join(',')
|
||||
|
||||
if (!isPreview) {
|
||||
const selectedIds = newSelected.map((kb) => kb.id)
|
||||
const valueToStore = selectedIds.length === 1 ? selectedIds[0] : selectedIds.join(',')
|
||||
setValue(blockId, subBlock.id, valueToStore)
|
||||
}
|
||||
// Use the hook's setter which handles collaborative updates
|
||||
setStoreValue(valueToStore)
|
||||
|
||||
onKnowledgeBaseSelect?.(newSelected.map((kb) => kb.id))
|
||||
onKnowledgeBaseSelect?.(selectedIds)
|
||||
}
|
||||
|
||||
// Sync selected knowledge bases with value prop
|
||||
useEffect(() => {
|
||||
if (value && knowledgeBases.length > 0) {
|
||||
const selectedIds =
|
||||
typeof value === 'string'
|
||||
? value.includes(',')
|
||||
? value
|
||||
.split(',')
|
||||
.map((id) => id.trim())
|
||||
.filter((id) => id.length > 0)
|
||||
: [value]
|
||||
: []
|
||||
|
||||
const selectedKbs = knowledgeBases.filter((kb) => selectedIds.includes(kb.id))
|
||||
setSelectedKnowledgeBases(selectedKbs)
|
||||
} else if (!value) {
|
||||
setSelectedKnowledgeBases([])
|
||||
}
|
||||
}, [value, knowledgeBases])
|
||||
|
||||
// Use cached data if available
|
||||
useEffect(() => {
|
||||
if (knowledgeBasesList.length > 0 && !initialFetchDone) {
|
||||
@@ -172,6 +161,7 @@ export function KnowledgeBaseSelector({
|
||||
if (
|
||||
value &&
|
||||
selectedKnowledgeBases.length === 0 &&
|
||||
knowledgeBases.length === 0 &&
|
||||
!loading &&
|
||||
!initialFetchDone &&
|
||||
!isPreview
|
||||
@@ -181,6 +171,7 @@ export function KnowledgeBaseSelector({
|
||||
}, [
|
||||
value,
|
||||
selectedKnowledgeBases.length,
|
||||
knowledgeBases.length,
|
||||
loading,
|
||||
initialFetchDone,
|
||||
fetchKnowledgeBases,
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
import { useEffect, useState } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
import { type DiscordServerInfo, DiscordServerSelector } from './components/discord-server-selector'
|
||||
import { type JiraProjectInfo, JiraProjectSelector } from './components/jira-project-selector'
|
||||
import { type LinearProjectInfo, LinearProjectSelector } from './components/linear-project-selector'
|
||||
@@ -26,10 +28,14 @@ export function ProjectSelectorInput({
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: ProjectSelectorInputProps) {
|
||||
const { getValue, setValue } = useSubBlockStore()
|
||||
const { getValue } = useSubBlockStore()
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
const [selectedProjectId, setSelectedProjectId] = useState<string>('')
|
||||
const [_projectInfo, setProjectInfo] = useState<JiraProjectInfo | DiscordServerInfo | null>(null)
|
||||
|
||||
// Use the proper hook to get the current value and setter
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
|
||||
// Get provider-specific values
|
||||
const provider = subBlock.provider || 'jira'
|
||||
const isDiscord = provider === 'discord'
|
||||
@@ -58,21 +64,21 @@ export function ProjectSelectorInput({
|
||||
) => {
|
||||
setSelectedProjectId(projectId)
|
||||
setProjectInfo(info || null)
|
||||
setValue(blockId, subBlock.id, projectId)
|
||||
setStoreValue(projectId)
|
||||
|
||||
// Clear the issue-related fields when a new project is selected
|
||||
if (provider === 'jira') {
|
||||
setValue(blockId, 'summary', '')
|
||||
setValue(blockId, 'description', '')
|
||||
setValue(blockId, 'issueKey', '')
|
||||
collaborativeSetSubblockValue(blockId, 'summary', '')
|
||||
collaborativeSetSubblockValue(blockId, 'description', '')
|
||||
collaborativeSetSubblockValue(blockId, 'issueKey', '')
|
||||
} else if (provider === 'discord') {
|
||||
setValue(blockId, 'channelId', '')
|
||||
collaborativeSetSubblockValue(blockId, 'channelId', '')
|
||||
} else if (provider === 'linear') {
|
||||
if (subBlock.id === 'teamId') {
|
||||
setValue(blockId, 'teamId', projectId)
|
||||
setValue(blockId, 'projectId', '')
|
||||
collaborativeSetSubblockValue(blockId, 'teamId', projectId)
|
||||
collaborativeSetSubblockValue(blockId, 'projectId', '')
|
||||
} else if (subBlock.id === 'projectId') {
|
||||
setValue(blockId, 'projectId', projectId)
|
||||
collaborativeSetSubblockValue(blockId, 'projectId', projectId)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -26,10 +26,10 @@ interface ScheduleConfigProps {
|
||||
|
||||
export function ScheduleConfig({
|
||||
blockId,
|
||||
subBlockId,
|
||||
subBlockId: _subBlockId,
|
||||
isConnecting,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
previewValue: _previewValue,
|
||||
disabled = false,
|
||||
}: ScheduleConfigProps) {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
@@ -56,13 +56,7 @@ export function ScheduleConfig({
|
||||
|
||||
// Get the startWorkflow value to determine if scheduling is enabled
|
||||
// and expose the setter so we can update it
|
||||
const [startWorkflow, setStartWorkflow] = useSubBlockValue(blockId, 'startWorkflow')
|
||||
const isScheduleEnabled = startWorkflow === 'schedule'
|
||||
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const [_startWorkflow, setStartWorkflow] = useSubBlockValue(blockId, 'startWorkflow')
|
||||
|
||||
// Function to check if schedule exists in the database
|
||||
const checkSchedule = async () => {
|
||||
@@ -110,10 +104,17 @@ export function ScheduleConfig({
|
||||
|
||||
// Check for schedule on mount and when relevant dependencies change
|
||||
useEffect(() => {
|
||||
// Always check for schedules regardless of the UI setting
|
||||
// This ensures we detect schedules even when the UI is set to manual
|
||||
checkSchedule()
|
||||
}, [workflowId, scheduleType, isModalOpen, refreshCounter])
|
||||
// Only check for schedules when workflowId changes or modal opens
|
||||
// Avoid checking on every scheduleType change to prevent excessive API calls
|
||||
if (workflowId && (isModalOpen || refreshCounter > 0)) {
|
||||
checkSchedule()
|
||||
}
|
||||
|
||||
// Cleanup function to reset loading state
|
||||
return () => {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [workflowId, isModalOpen, refreshCounter])
|
||||
|
||||
// Format the schedule information for display
|
||||
const getScheduleInfo = () => {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useCallback, useEffect, useRef } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { getProviderFromModel } from '@/providers/utils'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
@@ -167,6 +168,8 @@ export function useSubBlockValue<T = any>(
|
||||
subBlockId: string,
|
||||
triggerWorkflowUpdate = false
|
||||
): readonly [T | null, (value: T) => void] {
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
|
||||
const blockType = useWorkflowStore(
|
||||
useCallback((state) => state.blocks?.[blockId]?.type, [blockId])
|
||||
)
|
||||
@@ -228,25 +231,24 @@ export function useSubBlockValue<T = any>(
|
||||
storeApiKeyValue(blockId, blockType, modelValue, newValue, storeValue)
|
||||
}
|
||||
|
||||
// Update the subblock store directly
|
||||
useSubBlockStore.getState().setValue(blockId, subBlockId, valueCopy)
|
||||
|
||||
// Dispatch event to trigger socket emission only (not store update)
|
||||
const event = new CustomEvent('update-subblock-value', {
|
||||
detail: {
|
||||
blockId,
|
||||
subBlockId,
|
||||
value: valueCopy,
|
||||
},
|
||||
})
|
||||
window.dispatchEvent(event)
|
||||
// Use collaborative function which handles both local store update and socket emission
|
||||
collaborativeSetSubblockValue(blockId, subBlockId, valueCopy)
|
||||
|
||||
if (triggerWorkflowUpdate) {
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
}
|
||||
}
|
||||
},
|
||||
[blockId, subBlockId, blockType, isApiKey, storeValue, triggerWorkflowUpdate, modelValue]
|
||||
[
|
||||
blockId,
|
||||
subBlockId,
|
||||
blockType,
|
||||
isApiKey,
|
||||
storeValue,
|
||||
triggerWorkflowUpdate,
|
||||
modelValue,
|
||||
collaborativeSetSubblockValue,
|
||||
]
|
||||
)
|
||||
|
||||
// Initialize valueRef on first render
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { BookOpen, Code, Info, RectangleHorizontal, RectangleVertical } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Button } from '@/components/ui/button'
|
||||
@@ -83,6 +84,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
const isActiveBlock = useExecutionStore((state) => state.activeBlockIds.has(id))
|
||||
const isActive = dataIsActive || isActiveBlock
|
||||
|
||||
// Get the current workflow ID from URL params instead of global state
|
||||
// This prevents race conditions when switching workflows rapidly
|
||||
const params = useParams()
|
||||
const currentWorkflowId = params.workflowId as string
|
||||
|
||||
const reactivateSchedule = async (scheduleId: string) => {
|
||||
try {
|
||||
const response = await fetch(`/api/schedules/${scheduleId}`, {
|
||||
@@ -94,7 +100,10 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
fetchScheduleInfo()
|
||||
// Use the current workflow ID from params instead of global state
|
||||
if (currentWorkflowId) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
}
|
||||
} else {
|
||||
console.error('Failed to reactivate schedule')
|
||||
}
|
||||
@@ -103,11 +112,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
}
|
||||
}
|
||||
|
||||
const fetchScheduleInfo = async () => {
|
||||
const fetchScheduleInfo = async (workflowId: string) => {
|
||||
if (!workflowId) return
|
||||
|
||||
try {
|
||||
setIsLoadingScheduleInfo(true)
|
||||
const workflowId = useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (!workflowId) return
|
||||
|
||||
const response = await fetch(`/api/schedules?workflowId=${workflowId}&mode=schedule`, {
|
||||
cache: 'no-store',
|
||||
@@ -176,12 +185,18 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (type === 'starter') {
|
||||
fetchScheduleInfo()
|
||||
if (type === 'starter' && currentWorkflowId) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
} else {
|
||||
setScheduleInfo(null)
|
||||
setIsLoadingScheduleInfo(false) // Reset loading state when not a starter block
|
||||
}
|
||||
}, [type])
|
||||
|
||||
// Cleanup function to reset loading state when component unmounts or workflow changes
|
||||
return () => {
|
||||
setIsLoadingScheduleInfo(false)
|
||||
}
|
||||
}, [type, currentWorkflowId])
|
||||
|
||||
// Get webhook information for the tooltip
|
||||
useEffect(() => {
|
||||
|
||||
@@ -419,6 +419,7 @@ const WorkflowContent = React.memo(() => {
|
||||
}
|
||||
|
||||
const { type } = event.detail
|
||||
console.log('🛠️ Adding block from toolbar:', type)
|
||||
|
||||
if (!type) return
|
||||
if (type === 'connectionBlock') return
|
||||
@@ -439,32 +440,42 @@ const WorkflowContent = React.memo(() => {
|
||||
y: window.innerHeight / 2,
|
||||
})
|
||||
|
||||
// Add the container node directly to canvas with default dimensions
|
||||
addBlock(id, type, name, centerPosition, {
|
||||
width: 500,
|
||||
height: 300,
|
||||
type: type === 'loop' ? 'loopNode' : 'parallelNode',
|
||||
})
|
||||
|
||||
// Auto-connect logic for container nodes
|
||||
const isAutoConnectEnabled = useGeneralStore.getState().isAutoConnectEnabled
|
||||
let autoConnectEdge
|
||||
if (isAutoConnectEnabled) {
|
||||
const closestBlock = findClosestOutput(centerPosition)
|
||||
if (closestBlock) {
|
||||
// Get appropriate source handle
|
||||
const sourceHandle = determineSourceHandle(closestBlock)
|
||||
|
||||
addEdge({
|
||||
autoConnectEdge = {
|
||||
id: crypto.randomUUID(),
|
||||
source: closestBlock.id,
|
||||
target: id,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the container node directly to canvas with default dimensions and auto-connect edge
|
||||
addBlock(
|
||||
id,
|
||||
type,
|
||||
name,
|
||||
centerPosition,
|
||||
{
|
||||
width: 500,
|
||||
height: 300,
|
||||
type: type === 'loop' ? 'loopNode' : 'parallelNode',
|
||||
},
|
||||
undefined,
|
||||
undefined,
|
||||
autoConnectEdge
|
||||
)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
@@ -486,27 +497,30 @@ const WorkflowContent = React.memo(() => {
|
||||
Object.values(blocks).filter((b) => b.type === type).length + 1
|
||||
}`
|
||||
|
||||
// Add the block to the workflow
|
||||
addBlock(id, type, name, centerPosition)
|
||||
|
||||
// Auto-connect logic
|
||||
const isAutoConnectEnabled = useGeneralStore.getState().isAutoConnectEnabled
|
||||
let autoConnectEdge
|
||||
if (isAutoConnectEnabled && type !== 'starter') {
|
||||
const closestBlock = findClosestOutput(centerPosition)
|
||||
console.log('🎯 Closest block found:', closestBlock)
|
||||
if (closestBlock) {
|
||||
// Get appropriate source handle
|
||||
const sourceHandle = determineSourceHandle(closestBlock)
|
||||
|
||||
addEdge({
|
||||
autoConnectEdge = {
|
||||
id: crypto.randomUUID(),
|
||||
source: closestBlock.id,
|
||||
target: id,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
})
|
||||
}
|
||||
console.log('✅ Auto-connect edge created:', autoConnectEdge)
|
||||
}
|
||||
}
|
||||
|
||||
// Add the block to the workflow with auto-connect edge
|
||||
addBlock(id, type, name, centerPosition, undefined, undefined, undefined, autoConnectEdge)
|
||||
}
|
||||
|
||||
window.addEventListener('add-block-from-toolbar', handleAddBlockFromToolbar as EventListener)
|
||||
@@ -583,30 +597,40 @@ const WorkflowContent = React.memo(() => {
|
||||
// Resize the parent container to fit the new child container
|
||||
resizeLoopNodesWrapper()
|
||||
} else {
|
||||
// Add the container node directly to canvas with default dimensions
|
||||
addBlock(id, data.type, name, position, {
|
||||
width: 500,
|
||||
height: 300,
|
||||
type: data.type === 'loop' ? 'loopNode' : 'parallelNode',
|
||||
})
|
||||
|
||||
// Auto-connect the container to the closest node on the canvas
|
||||
const isAutoConnectEnabled = useGeneralStore.getState().isAutoConnectEnabled
|
||||
let autoConnectEdge
|
||||
if (isAutoConnectEnabled) {
|
||||
const closestBlock = findClosestOutput(position)
|
||||
if (closestBlock) {
|
||||
const sourceHandle = determineSourceHandle(closestBlock)
|
||||
|
||||
addEdge({
|
||||
autoConnectEdge = {
|
||||
id: crypto.randomUUID(),
|
||||
source: closestBlock.id,
|
||||
target: id,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add the container node directly to canvas with default dimensions and auto-connect edge
|
||||
addBlock(
|
||||
id,
|
||||
data.type,
|
||||
name,
|
||||
position,
|
||||
{
|
||||
width: 500,
|
||||
height: 300,
|
||||
type: data.type === 'loop' ? 'loopNode' : 'parallelNode',
|
||||
},
|
||||
undefined,
|
||||
undefined,
|
||||
autoConnectEdge
|
||||
)
|
||||
}
|
||||
|
||||
return
|
||||
@@ -706,26 +730,27 @@ const WorkflowContent = React.memo(() => {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Regular canvas drop
|
||||
addBlock(id, data.type, name, position)
|
||||
|
||||
// Regular auto-connect logic
|
||||
const isAutoConnectEnabled = useGeneralStore.getState().isAutoConnectEnabled
|
||||
let autoConnectEdge
|
||||
if (isAutoConnectEnabled && data.type !== 'starter') {
|
||||
const closestBlock = findClosestOutput(position)
|
||||
if (closestBlock) {
|
||||
const sourceHandle = determineSourceHandle(closestBlock)
|
||||
|
||||
addEdge({
|
||||
autoConnectEdge = {
|
||||
id: crypto.randomUUID(),
|
||||
source: closestBlock.id,
|
||||
target: id,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Regular canvas drop with auto-connect edge
|
||||
addBlock(id, data.type, name, position, undefined, undefined, undefined, autoConnectEdge)
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error dropping block:', { err })
|
||||
|
||||
@@ -15,9 +15,14 @@ import { useFolderStore } from '@/stores/folders/store'
|
||||
interface CreateMenuProps {
|
||||
onCreateWorkflow: (folderId?: string) => void
|
||||
isCollapsed?: boolean
|
||||
isCreatingWorkflow?: boolean
|
||||
}
|
||||
|
||||
export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
export function CreateMenu({
|
||||
onCreateWorkflow,
|
||||
isCollapsed,
|
||||
isCreatingWorkflow = false,
|
||||
}: CreateMenuProps) {
|
||||
const [showFolderDialog, setShowFolderDialog] = useState(false)
|
||||
const [folderName, setFolderName] = useState('')
|
||||
const [isCreating, setIsCreating] = useState(false)
|
||||
@@ -73,6 +78,7 @@ export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
onClick={handleCreateWorkflow}
|
||||
onMouseEnter={() => setIsHoverOpen(true)}
|
||||
onMouseLeave={() => setIsHoverOpen(false)}
|
||||
disabled={isCreatingWorkflow}
|
||||
>
|
||||
<Plus
|
||||
className={cn(
|
||||
@@ -101,11 +107,17 @@ export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
onCloseAutoFocus={(e) => e.preventDefault()}
|
||||
>
|
||||
<button
|
||||
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
|
||||
className={cn(
|
||||
'flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors',
|
||||
isCreatingWorkflow
|
||||
? 'cursor-not-allowed opacity-50'
|
||||
: 'hover:bg-accent hover:text-accent-foreground'
|
||||
)}
|
||||
onClick={handleCreateWorkflow}
|
||||
disabled={isCreatingWorkflow}
|
||||
>
|
||||
<File className='h-4 w-4' />
|
||||
New Workflow
|
||||
{isCreatingWorkflow ? 'Creating...' : 'New Workflow'}
|
||||
</button>
|
||||
<button
|
||||
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
|
||||
|
||||
@@ -41,6 +41,9 @@ export function Sidebar() {
|
||||
const { isPending: sessionLoading } = useSession()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const isLoading = workflowsLoading || sessionLoading
|
||||
|
||||
// Add state to prevent multiple simultaneous workflow creations
|
||||
const [isCreatingWorkflow, setIsCreatingWorkflow] = useState(false)
|
||||
const router = useRouter()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
@@ -108,7 +111,14 @@ export function Sidebar() {
|
||||
|
||||
// Create workflow handler
|
||||
const handleCreateWorkflow = async (folderId?: string) => {
|
||||
// Prevent multiple simultaneous workflow creations
|
||||
if (isCreatingWorkflow) {
|
||||
logger.info('Workflow creation already in progress, ignoring request')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
setIsCreatingWorkflow(true)
|
||||
const id = await createWorkflow({
|
||||
workspaceId: workspaceId || undefined,
|
||||
folderId: folderId || undefined,
|
||||
@@ -116,6 +126,8 @@ export function Sidebar() {
|
||||
router.push(`/workspace/${workspaceId}/w/${id}`)
|
||||
} catch (error) {
|
||||
logger.error('Error creating workflow:', error)
|
||||
} finally {
|
||||
setIsCreatingWorkflow(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,7 +185,11 @@ export function Sidebar() {
|
||||
{isLoading ? <Skeleton className='h-4 w-16' /> : 'Workflows'}
|
||||
</h2>
|
||||
{!isCollapsed && !isLoading && (
|
||||
<CreateMenu onCreateWorkflow={handleCreateWorkflow} isCollapsed={false} />
|
||||
<CreateMenu
|
||||
onCreateWorkflow={handleCreateWorkflow}
|
||||
isCollapsed={false}
|
||||
isCreatingWorkflow={isCreatingWorkflow}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<FolderTree
|
||||
|
||||
@@ -6,13 +6,13 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
name: 'Knowledge',
|
||||
description: 'Use vector search',
|
||||
longDescription:
|
||||
'Perform semantic vector search across one or more knowledge bases or upload new chunks to documents. Uses advanced AI embeddings to understand meaning and context for search operations.',
|
||||
'Perform semantic vector search across knowledge bases, upload individual chunks to existing documents, or create new documents from text content. Uses advanced AI embeddings to understand meaning and context for search operations.',
|
||||
bgColor: '#00B0B0',
|
||||
icon: PackageSearchIcon,
|
||||
category: 'blocks',
|
||||
docsLink: 'https://docs.simstudio.ai/blocks/knowledge',
|
||||
tools: {
|
||||
access: ['knowledge_search', 'knowledge_upload_chunk'],
|
||||
access: ['knowledge_search', 'knowledge_upload_chunk', 'knowledge_create_document'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
@@ -20,6 +20,8 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
return 'knowledge_search'
|
||||
case 'upload_chunk':
|
||||
return 'knowledge_upload_chunk'
|
||||
case 'create_document':
|
||||
return 'knowledge_create_document'
|
||||
default:
|
||||
return 'knowledge_search'
|
||||
}
|
||||
@@ -53,6 +55,7 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
options: [
|
||||
{ label: 'Search', id: 'search' },
|
||||
{ label: 'Upload Chunk', id: 'upload_chunk' },
|
||||
{ label: 'Create Document', id: 'create_document' },
|
||||
],
|
||||
value: () => 'search',
|
||||
},
|
||||
@@ -72,7 +75,7 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
layout: 'full',
|
||||
placeholder: 'Select knowledge base',
|
||||
multiSelect: false,
|
||||
condition: { field: 'operation', value: 'upload_chunk' },
|
||||
condition: { field: 'operation', value: ['upload_chunk', 'create_document'] },
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
@@ -107,5 +110,22 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
rows: 6,
|
||||
condition: { field: 'operation', value: 'upload_chunk' },
|
||||
},
|
||||
{
|
||||
id: 'name',
|
||||
title: 'Document Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter document name',
|
||||
condition: { field: 'operation', value: ['create_document'] },
|
||||
},
|
||||
{
|
||||
id: 'content',
|
||||
title: 'Document Content',
|
||||
type: 'long-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter the document content',
|
||||
rows: 6,
|
||||
condition: { field: 'operation', value: ['create_document'] },
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
9
apps/sim/db/migrations/0048_flawless_ultron.sql
Normal file
9
apps/sim/db/migrations/0048_flawless_ultron.sql
Normal file
@@ -0,0 +1,9 @@
|
||||
ALTER TABLE "user_stats" ADD COLUMN "current_usage_limit" numeric DEFAULT '5' NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "usage_limit_set_by" text;--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "usage_limit_updated_at" timestamp DEFAULT now();--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "current_period_cost" numeric DEFAULT '0' NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "billing_period_start" timestamp DEFAULT now();--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "billing_period_end" timestamp;--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "last_period_cost" numeric DEFAULT '0';--> statement-breakpoint
|
||||
CREATE INDEX "subscription_reference_status_idx" ON "subscription" USING btree ("reference_id","status");--> statement-breakpoint
|
||||
ALTER TABLE "subscription" ADD CONSTRAINT "check_enterprise_metadata" CHECK (plan != 'enterprise' OR (metadata IS NOT NULL AND (metadata->>'perSeatAllowance' IS NOT NULL OR metadata->>'totalAllowance' IS NOT NULL)));
|
||||
82
apps/sim/db/migrations/0049_fancy_cardiac.sql
Normal file
82
apps/sim/db/migrations/0049_fancy_cardiac.sql
Normal file
@@ -0,0 +1,82 @@
|
||||
CREATE TABLE "workflow_execution_blocks" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"execution_id" text NOT NULL,
|
||||
"workflow_id" text NOT NULL,
|
||||
"block_id" text NOT NULL,
|
||||
"block_name" text,
|
||||
"block_type" text NOT NULL,
|
||||
"started_at" timestamp NOT NULL,
|
||||
"ended_at" timestamp,
|
||||
"duration_ms" integer,
|
||||
"status" text NOT NULL,
|
||||
"error_message" text,
|
||||
"error_stack_trace" text,
|
||||
"input_data" jsonb,
|
||||
"output_data" jsonb,
|
||||
"cost_input" numeric(10, 6),
|
||||
"cost_output" numeric(10, 6),
|
||||
"cost_total" numeric(10, 6),
|
||||
"tokens_prompt" integer,
|
||||
"tokens_completion" integer,
|
||||
"tokens_total" integer,
|
||||
"model_used" text,
|
||||
"metadata" jsonb,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "workflow_execution_logs" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"workflow_id" text NOT NULL,
|
||||
"execution_id" text NOT NULL,
|
||||
"state_snapshot_id" text NOT NULL,
|
||||
"level" text NOT NULL,
|
||||
"message" text NOT NULL,
|
||||
"trigger" text NOT NULL,
|
||||
"started_at" timestamp NOT NULL,
|
||||
"ended_at" timestamp,
|
||||
"total_duration_ms" integer,
|
||||
"block_count" integer DEFAULT 0 NOT NULL,
|
||||
"success_count" integer DEFAULT 0 NOT NULL,
|
||||
"error_count" integer DEFAULT 0 NOT NULL,
|
||||
"skipped_count" integer DEFAULT 0 NOT NULL,
|
||||
"total_cost" numeric(10, 6),
|
||||
"total_input_cost" numeric(10, 6),
|
||||
"total_output_cost" numeric(10, 6),
|
||||
"total_tokens" integer,
|
||||
"metadata" jsonb DEFAULT '{}' NOT NULL,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "workflow_execution_snapshots" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"workflow_id" text NOT NULL,
|
||||
"state_hash" text NOT NULL,
|
||||
"state_data" jsonb NOT NULL,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "workflow_execution_blocks" ADD CONSTRAINT "workflow_execution_blocks_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "workflow_execution_logs" ADD CONSTRAINT "workflow_execution_logs_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "workflow_execution_logs" ADD CONSTRAINT "workflow_execution_logs_state_snapshot_id_workflow_execution_snapshots_id_fk" FOREIGN KEY ("state_snapshot_id") REFERENCES "public"."workflow_execution_snapshots"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "workflow_execution_snapshots" ADD CONSTRAINT "workflow_execution_snapshots_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_execution_id_idx" ON "workflow_execution_blocks" USING btree ("execution_id");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_workflow_id_idx" ON "workflow_execution_blocks" USING btree ("workflow_id");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_block_id_idx" ON "workflow_execution_blocks" USING btree ("block_id");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_status_idx" ON "workflow_execution_blocks" USING btree ("status");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_duration_idx" ON "workflow_execution_blocks" USING btree ("duration_ms");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_cost_idx" ON "workflow_execution_blocks" USING btree ("cost_total");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_workflow_execution_idx" ON "workflow_execution_blocks" USING btree ("workflow_id","execution_id");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_execution_status_idx" ON "workflow_execution_blocks" USING btree ("execution_id","status");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_started_at_idx" ON "workflow_execution_blocks" USING btree ("started_at");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_workflow_id_idx" ON "workflow_execution_logs" USING btree ("workflow_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_execution_id_idx" ON "workflow_execution_logs" USING btree ("execution_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_trigger_idx" ON "workflow_execution_logs" USING btree ("trigger");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_level_idx" ON "workflow_execution_logs" USING btree ("level");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_started_at_idx" ON "workflow_execution_logs" USING btree ("started_at");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_cost_idx" ON "workflow_execution_logs" USING btree ("total_cost");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_duration_idx" ON "workflow_execution_logs" USING btree ("total_duration_ms");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "workflow_execution_logs_execution_id_unique" ON "workflow_execution_logs" USING btree ("execution_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_snapshots_workflow_id_idx" ON "workflow_execution_snapshots" USING btree ("workflow_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_snapshots_hash_idx" ON "workflow_execution_snapshots" USING btree ("state_hash");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "workflow_snapshots_workflow_hash_idx" ON "workflow_execution_snapshots" USING btree ("workflow_id","state_hash");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_snapshots_created_at_idx" ON "workflow_execution_snapshots" USING btree ("created_at");
|
||||
3751
apps/sim/db/migrations/meta/0048_snapshot.json
Normal file
3751
apps/sim/db/migrations/meta/0048_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
4461
apps/sim/db/migrations/meta/0049_snapshot.json
Normal file
4461
apps/sim/db/migrations/meta/0049_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -330,6 +330,20 @@
|
||||
"when": 1750794256278,
|
||||
"tag": "0047_new_triathlon",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 48,
|
||||
"version": "7",
|
||||
"when": 1751422991828,
|
||||
"tag": "0048_flawless_ultron",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 49,
|
||||
"version": "7",
|
||||
"when": 1751430703326,
|
||||
"tag": "0049_fancy_cardiac",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -116,6 +116,7 @@ export const workflow = pgTable('workflow', {
|
||||
folderId: text('folder_id').references(() => workflowFolder.id, { onDelete: 'set null' }),
|
||||
name: text('name').notNull(),
|
||||
description: text('description'),
|
||||
// DEPRECATED: Use normalized tables (workflow_blocks, workflow_edges, workflow_subflows) instead
|
||||
state: json('state').notNull(),
|
||||
color: text('color').notNull().default('#3972F6'),
|
||||
lastSynced: timestamp('last_synced').notNull(),
|
||||
@@ -132,58 +133,43 @@ export const workflow = pgTable('workflow', {
|
||||
marketplaceData: json('marketplace_data'),
|
||||
})
|
||||
|
||||
// New normalized workflow tables
|
||||
export const workflowBlocks = pgTable(
|
||||
'workflow_blocks',
|
||||
{
|
||||
// Primary identification
|
||||
id: text('id').primaryKey(), // Block UUID from the current JSON structure
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }), // Link to parent workflow
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
|
||||
// Block properties (from current BlockState interface)
|
||||
type: text('type').notNull(), // e.g., 'starter', 'agent', 'api', 'function'
|
||||
name: text('name').notNull(), // Display name of the block
|
||||
type: text('type').notNull(), // 'starter', 'agent', 'api', 'function'
|
||||
name: text('name').notNull(),
|
||||
|
||||
// Position coordinates (from position.x, position.y)
|
||||
positionX: decimal('position_x').notNull(), // X coordinate on canvas
|
||||
positionY: decimal('position_y').notNull(), // Y coordinate on canvas
|
||||
positionX: decimal('position_x').notNull(),
|
||||
positionY: decimal('position_y').notNull(),
|
||||
|
||||
// Block behavior flags (from current BlockState)
|
||||
enabled: boolean('enabled').notNull().default(true), // Whether block is active
|
||||
horizontalHandles: boolean('horizontal_handles').notNull().default(true), // UI layout preference
|
||||
isWide: boolean('is_wide').notNull().default(false), // Whether block uses wide layout
|
||||
advancedMode: boolean('advanced_mode').notNull().default(false), // Whether block is in advanced mode
|
||||
height: decimal('height').notNull().default('0'), // Custom height override
|
||||
enabled: boolean('enabled').notNull().default(true),
|
||||
horizontalHandles: boolean('horizontal_handles').notNull().default(true),
|
||||
isWide: boolean('is_wide').notNull().default(false),
|
||||
advancedMode: boolean('advanced_mode').notNull().default(false),
|
||||
height: decimal('height').notNull().default('0'),
|
||||
|
||||
// Block data (keeping JSON for flexibility as current system does)
|
||||
subBlocks: jsonb('sub_blocks').notNull().default('{}'), // All subblock configurations
|
||||
outputs: jsonb('outputs').notNull().default('{}'), // Output type definitions
|
||||
data: jsonb('data').default('{}'), // Additional block-specific data
|
||||
subBlocks: jsonb('sub_blocks').notNull().default('{}'),
|
||||
outputs: jsonb('outputs').notNull().default('{}'),
|
||||
data: jsonb('data').default('{}'),
|
||||
|
||||
// Hierarchy support (for loop/parallel child blocks)
|
||||
parentId: text('parent_id'), // Self-reference handled by foreign key constraint in migration
|
||||
extent: text('extent'), // 'parent' or null - for ReactFlow parent constraint
|
||||
parentId: text('parent_id'),
|
||||
extent: text('extent'), // 'parent' or null
|
||||
|
||||
// Timestamps
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
// Primary access pattern: get all blocks for a workflow
|
||||
workflowIdIdx: index('workflow_blocks_workflow_id_idx').on(table.workflowId),
|
||||
|
||||
// For finding child blocks of a parent (loop/parallel containers)
|
||||
parentIdIdx: index('workflow_blocks_parent_id_idx').on(table.parentId),
|
||||
|
||||
// Composite index for efficient parent-child queries
|
||||
workflowParentIdx: index('workflow_blocks_workflow_parent_idx').on(
|
||||
table.workflowId,
|
||||
table.parentId
|
||||
),
|
||||
|
||||
// For block type filtering/analytics
|
||||
workflowTypeIdx: index('workflow_blocks_workflow_type_idx').on(table.workflowId, table.type),
|
||||
})
|
||||
)
|
||||
@@ -191,36 +177,26 @@ export const workflowBlocks = pgTable(
|
||||
export const workflowEdges = pgTable(
|
||||
'workflow_edges',
|
||||
{
|
||||
// Primary identification
|
||||
id: text('id').primaryKey(), // Edge UUID from ReactFlow
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }), // Link to parent workflow
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
|
||||
// Connection definition (from ReactFlow Edge interface)
|
||||
sourceBlockId: text('source_block_id')
|
||||
.notNull()
|
||||
.references(() => workflowBlocks.id, { onDelete: 'cascade' }), // Source block ID
|
||||
.references(() => workflowBlocks.id, { onDelete: 'cascade' }),
|
||||
targetBlockId: text('target_block_id')
|
||||
.notNull()
|
||||
.references(() => workflowBlocks.id, { onDelete: 'cascade' }), // Target block ID
|
||||
sourceHandle: text('source_handle'), // Specific output handle (optional)
|
||||
targetHandle: text('target_handle'), // Specific input handle (optional)
|
||||
.references(() => workflowBlocks.id, { onDelete: 'cascade' }),
|
||||
sourceHandle: text('source_handle'),
|
||||
targetHandle: text('target_handle'),
|
||||
|
||||
// Timestamps
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
// Primary access pattern: get all edges for a workflow
|
||||
workflowIdIdx: index('workflow_edges_workflow_id_idx').on(table.workflowId),
|
||||
|
||||
// For finding outgoing connections from a block
|
||||
sourceBlockIdx: index('workflow_edges_source_block_idx').on(table.sourceBlockId),
|
||||
|
||||
// For finding incoming connections to a block
|
||||
targetBlockIdx: index('workflow_edges_target_block_idx').on(table.targetBlockId),
|
||||
|
||||
// For comprehensive workflow topology queries
|
||||
workflowSourceIdx: index('workflow_edges_workflow_source_idx').on(
|
||||
table.workflowId,
|
||||
table.sourceBlockId
|
||||
@@ -235,25 +211,19 @@ export const workflowEdges = pgTable(
|
||||
export const workflowSubflows = pgTable(
|
||||
'workflow_subflows',
|
||||
{
|
||||
// Primary identification
|
||||
id: text('id').primaryKey(), // Subflow UUID (currently loop/parallel ID)
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }), // Link to parent workflow
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
|
||||
// Subflow type and configuration
|
||||
type: text('type').notNull(), // 'loop' or 'parallel' (extensible for future types)
|
||||
config: jsonb('config').notNull().default('{}'), // Type-specific configuration
|
||||
type: text('type').notNull(), // 'loop' or 'parallel'
|
||||
config: jsonb('config').notNull().default('{}'),
|
||||
|
||||
// Timestamps
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
// Primary access pattern: get all subflows for a workflow
|
||||
workflowIdIdx: index('workflow_subflows_workflow_id_idx').on(table.workflowId),
|
||||
|
||||
// For filtering by subflow type
|
||||
workflowTypeIdx: index('workflow_subflows_workflow_type_idx').on(table.workflowId, table.type),
|
||||
})
|
||||
)
|
||||
@@ -272,14 +242,136 @@ export const workflowLogs = pgTable('workflow_logs', {
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
executionId: text('execution_id'),
|
||||
level: text('level').notNull(), // e.g. "info", "error", etc.
|
||||
level: text('level').notNull(), // "info", "error", etc.
|
||||
message: text('message').notNull(),
|
||||
duration: text('duration'), // Store as text to allow 'NA' for errors
|
||||
trigger: text('trigger'), // e.g. "api", "schedule", "manual"
|
||||
trigger: text('trigger'), // "api", "schedule", "manual"
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
metadata: json('metadata'), // Optional JSON field for storing additional context like tool calls
|
||||
metadata: json('metadata'),
|
||||
})
|
||||
|
||||
export const workflowExecutionSnapshots = pgTable(
|
||||
'workflow_execution_snapshots',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
stateHash: text('state_hash').notNull(),
|
||||
stateData: jsonb('state_data').notNull(),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
workflowIdIdx: index('workflow_snapshots_workflow_id_idx').on(table.workflowId),
|
||||
stateHashIdx: index('workflow_snapshots_hash_idx').on(table.stateHash),
|
||||
workflowHashUnique: uniqueIndex('workflow_snapshots_workflow_hash_idx').on(
|
||||
table.workflowId,
|
||||
table.stateHash
|
||||
),
|
||||
createdAtIdx: index('workflow_snapshots_created_at_idx').on(table.createdAt),
|
||||
})
|
||||
)
|
||||
|
||||
export const workflowExecutionLogs = pgTable(
|
||||
'workflow_execution_logs',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
executionId: text('execution_id').notNull(),
|
||||
stateSnapshotId: text('state_snapshot_id')
|
||||
.notNull()
|
||||
.references(() => workflowExecutionSnapshots.id),
|
||||
|
||||
level: text('level').notNull(), // 'info', 'error'
|
||||
message: text('message').notNull(),
|
||||
trigger: text('trigger').notNull(), // 'api', 'webhook', 'schedule', 'manual', 'chat'
|
||||
|
||||
startedAt: timestamp('started_at').notNull(),
|
||||
endedAt: timestamp('ended_at'),
|
||||
totalDurationMs: integer('total_duration_ms'),
|
||||
|
||||
blockCount: integer('block_count').notNull().default(0),
|
||||
successCount: integer('success_count').notNull().default(0),
|
||||
errorCount: integer('error_count').notNull().default(0),
|
||||
skippedCount: integer('skipped_count').notNull().default(0),
|
||||
|
||||
totalCost: decimal('total_cost', { precision: 10, scale: 6 }),
|
||||
totalInputCost: decimal('total_input_cost', { precision: 10, scale: 6 }),
|
||||
totalOutputCost: decimal('total_output_cost', { precision: 10, scale: 6 }),
|
||||
totalTokens: integer('total_tokens'),
|
||||
|
||||
metadata: jsonb('metadata').notNull().default('{}'),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
workflowIdIdx: index('workflow_execution_logs_workflow_id_idx').on(table.workflowId),
|
||||
executionIdIdx: index('workflow_execution_logs_execution_id_idx').on(table.executionId),
|
||||
triggerIdx: index('workflow_execution_logs_trigger_idx').on(table.trigger),
|
||||
levelIdx: index('workflow_execution_logs_level_idx').on(table.level),
|
||||
startedAtIdx: index('workflow_execution_logs_started_at_idx').on(table.startedAt),
|
||||
costIdx: index('workflow_execution_logs_cost_idx').on(table.totalCost),
|
||||
durationIdx: index('workflow_execution_logs_duration_idx').on(table.totalDurationMs),
|
||||
executionIdUnique: uniqueIndex('workflow_execution_logs_execution_id_unique').on(
|
||||
table.executionId
|
||||
),
|
||||
})
|
||||
)
|
||||
|
||||
export const workflowExecutionBlocks = pgTable(
|
||||
'workflow_execution_blocks',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
executionId: text('execution_id').notNull(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
blockId: text('block_id').notNull(),
|
||||
blockName: text('block_name'),
|
||||
blockType: text('block_type').notNull(),
|
||||
|
||||
startedAt: timestamp('started_at').notNull(),
|
||||
endedAt: timestamp('ended_at'),
|
||||
durationMs: integer('duration_ms'),
|
||||
|
||||
status: text('status').notNull(), // 'success', 'error', 'skipped'
|
||||
errorMessage: text('error_message'),
|
||||
errorStackTrace: text('error_stack_trace'),
|
||||
|
||||
inputData: jsonb('input_data'),
|
||||
outputData: jsonb('output_data'),
|
||||
|
||||
costInput: decimal('cost_input', { precision: 10, scale: 6 }),
|
||||
costOutput: decimal('cost_output', { precision: 10, scale: 6 }),
|
||||
costTotal: decimal('cost_total', { precision: 10, scale: 6 }),
|
||||
tokensPrompt: integer('tokens_prompt'),
|
||||
tokensCompletion: integer('tokens_completion'),
|
||||
tokensTotal: integer('tokens_total'),
|
||||
modelUsed: text('model_used'),
|
||||
|
||||
metadata: jsonb('metadata'),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
executionIdIdx: index('execution_blocks_execution_id_idx').on(table.executionId),
|
||||
workflowIdIdx: index('execution_blocks_workflow_id_idx').on(table.workflowId),
|
||||
blockIdIdx: index('execution_blocks_block_id_idx').on(table.blockId),
|
||||
statusIdx: index('execution_blocks_status_idx').on(table.status),
|
||||
durationIdx: index('execution_blocks_duration_idx').on(table.durationMs),
|
||||
costIdx: index('execution_blocks_cost_idx').on(table.costTotal),
|
||||
workflowExecutionIdx: index('execution_blocks_workflow_execution_idx').on(
|
||||
table.workflowId,
|
||||
table.executionId
|
||||
),
|
||||
executionStatusIdx: index('execution_blocks_execution_status_idx').on(
|
||||
table.executionId,
|
||||
table.status
|
||||
),
|
||||
startedAtIdx: index('execution_blocks_started_at_idx').on(table.startedAt),
|
||||
})
|
||||
)
|
||||
|
||||
export const environment = pgTable('environment', {
|
||||
id: text('id').primaryKey(), // Use the user id as the key
|
||||
userId: text('user_id')
|
||||
@@ -400,6 +492,14 @@ export const userStats = pgTable('user_stats', {
|
||||
totalChatExecutions: integer('total_chat_executions').notNull().default(0),
|
||||
totalTokensUsed: integer('total_tokens_used').notNull().default(0),
|
||||
totalCost: decimal('total_cost').notNull().default('0'),
|
||||
currentUsageLimit: decimal('current_usage_limit').notNull().default('5'), // Default $5 for free plan
|
||||
usageLimitSetBy: text('usage_limit_set_by'), // User ID who set the limit (for team admin tracking)
|
||||
usageLimitUpdatedAt: timestamp('usage_limit_updated_at').defaultNow(),
|
||||
// Billing period tracking
|
||||
currentPeriodCost: decimal('current_period_cost').notNull().default('0'), // Usage in current billing period
|
||||
billingPeriodStart: timestamp('billing_period_start').defaultNow(), // When current billing period started
|
||||
billingPeriodEnd: timestamp('billing_period_end'), // When current billing period ends
|
||||
lastPeriodCost: decimal('last_period_cost').default('0'), // Usage from previous billing period
|
||||
lastActive: timestamp('last_active').notNull().defaultNow(),
|
||||
})
|
||||
|
||||
@@ -415,21 +515,34 @@ export const customTools = pgTable('custom_tools', {
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
})
|
||||
|
||||
export const subscription = pgTable('subscription', {
|
||||
id: text('id').primaryKey(),
|
||||
plan: text('plan').notNull(),
|
||||
referenceId: text('reference_id').notNull(),
|
||||
stripeCustomerId: text('stripe_customer_id'),
|
||||
stripeSubscriptionId: text('stripe_subscription_id'),
|
||||
status: text('status'),
|
||||
periodStart: timestamp('period_start'),
|
||||
periodEnd: timestamp('period_end'),
|
||||
cancelAtPeriodEnd: boolean('cancel_at_period_end'),
|
||||
seats: integer('seats'),
|
||||
trialStart: timestamp('trial_start'),
|
||||
trialEnd: timestamp('trial_end'),
|
||||
metadata: json('metadata'),
|
||||
})
|
||||
export const subscription = pgTable(
|
||||
'subscription',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
plan: text('plan').notNull(),
|
||||
referenceId: text('reference_id').notNull(),
|
||||
stripeCustomerId: text('stripe_customer_id'),
|
||||
stripeSubscriptionId: text('stripe_subscription_id'),
|
||||
status: text('status'),
|
||||
periodStart: timestamp('period_start'),
|
||||
periodEnd: timestamp('period_end'),
|
||||
cancelAtPeriodEnd: boolean('cancel_at_period_end'),
|
||||
seats: integer('seats'),
|
||||
trialStart: timestamp('trial_start'),
|
||||
trialEnd: timestamp('trial_end'),
|
||||
metadata: json('metadata'),
|
||||
},
|
||||
(table) => ({
|
||||
referenceStatusIdx: index('subscription_reference_status_idx').on(
|
||||
table.referenceId,
|
||||
table.status
|
||||
),
|
||||
enterpriseMetadataCheck: check(
|
||||
'check_enterprise_metadata',
|
||||
sql`plan != 'enterprise' OR (metadata IS NOT NULL AND (metadata->>'perSeatAllowance' IS NOT NULL OR metadata->>'totalAllowance' IS NOT NULL))`
|
||||
),
|
||||
})
|
||||
)
|
||||
|
||||
export const chat = pgTable(
|
||||
'chat',
|
||||
@@ -484,7 +597,7 @@ export const member = pgTable('member', {
|
||||
organizationId: text('organization_id')
|
||||
.notNull()
|
||||
.references(() => organization.id, { onDelete: 'cascade' }),
|
||||
role: text('role').notNull(),
|
||||
role: text('role').notNull(), // 'admin' or 'member' - team-level permissions only
|
||||
createdAt: timestamp('created_at').defaultNow().notNull(),
|
||||
})
|
||||
|
||||
|
||||
@@ -77,6 +77,8 @@ describe('FunctionBlockHandler', () => {
|
||||
code: inputs.code,
|
||||
timeout: inputs.timeout,
|
||||
envVars: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
_context: { workflowId: mockContext.workflowId },
|
||||
}
|
||||
const expectedOutput: BlockOutput = { response: { result: 'Success' } }
|
||||
@@ -100,6 +102,8 @@ describe('FunctionBlockHandler', () => {
|
||||
code: expectedCode,
|
||||
timeout: inputs.timeout,
|
||||
envVars: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
_context: { workflowId: mockContext.workflowId },
|
||||
}
|
||||
const expectedOutput: BlockOutput = { response: { result: 'Success' } }
|
||||
@@ -116,6 +120,8 @@ describe('FunctionBlockHandler', () => {
|
||||
code: inputs.code,
|
||||
timeout: 5000, // Default timeout
|
||||
envVars: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
_context: { workflowId: mockContext.workflowId },
|
||||
}
|
||||
|
||||
|
||||
@@ -23,12 +23,29 @@ export class FunctionBlockHandler implements BlockHandler {
|
||||
? inputs.code.map((c: { content: string }) => c.content).join('\n')
|
||||
: inputs.code
|
||||
|
||||
// Extract block data for variable resolution
|
||||
const blockData: Record<string, any> = {}
|
||||
const blockNameMapping: Record<string, string> = {}
|
||||
|
||||
for (const [blockId, blockState] of context.blockStates.entries()) {
|
||||
if (blockState.output) {
|
||||
blockData[blockId] = blockState.output
|
||||
|
||||
// Try to find the block name from the workflow
|
||||
const workflowBlock = context.workflow?.blocks?.find((b) => b.id === blockId)
|
||||
if (workflowBlock?.metadata?.name) {
|
||||
blockNameMapping[workflowBlock.metadata.name] = blockId
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Directly use the function_execute tool which calls the API route
|
||||
logger.info(`Executing function block via API route: ${block.id}`)
|
||||
const result = await executeTool('function_execute', {
|
||||
code: codeContent,
|
||||
timeout: inputs.timeout || 5000,
|
||||
envVars: context.environmentVariables || {},
|
||||
blockData: blockData, // Pass block data for variable resolution
|
||||
blockNameMapping: blockNameMapping, // Pass block name to ID mapping
|
||||
_context: { workflowId: context.workflowId },
|
||||
})
|
||||
|
||||
|
||||
@@ -145,7 +145,7 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
|
||||
logger.info(`Loaded child workflow: ${workflowData.name} (${workflowId})`)
|
||||
|
||||
// Extract the workflow state
|
||||
// Extract the workflow state (API returns normalized data in state field)
|
||||
const workflowState = workflowData.state
|
||||
|
||||
if (!workflowState || !workflowState.blocks) {
|
||||
@@ -153,7 +153,7 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
return null
|
||||
}
|
||||
|
||||
// Use blocks directly since DB format should match UI format
|
||||
// Use blocks directly since API returns data from normalized tables
|
||||
const serializedWorkflow = this.serializer.serializeWorkflow(
|
||||
workflowState.blocks,
|
||||
workflowState.edges || [],
|
||||
|
||||
@@ -668,4 +668,238 @@ describe('Executor', () => {
|
||||
expect(createContextSpy).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Dependency checking logic tests
|
||||
*/
|
||||
describe('dependency checking', () => {
|
||||
test('should handle multi-input blocks with inactive sources correctly', () => {
|
||||
// Create workflow with router -> multiple APIs -> single agent
|
||||
const routerWorkflow = {
|
||||
blocks: [
|
||||
{
|
||||
id: 'start',
|
||||
metadata: { id: 'starter', name: 'Start' },
|
||||
config: { params: {} },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'router',
|
||||
metadata: { id: 'router', name: 'Router' },
|
||||
config: { params: { prompt: 'test', model: 'gpt-4' } },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'api1',
|
||||
metadata: { id: 'api', name: 'API 1' },
|
||||
config: { params: { url: 'http://api1.com', method: 'GET' } },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'api2',
|
||||
metadata: { id: 'api', name: 'API 2' },
|
||||
config: { params: { url: 'http://api2.com', method: 'GET' } },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'agent',
|
||||
metadata: { id: 'agent', name: 'Agent' },
|
||||
config: { params: { model: 'gpt-4', userPrompt: 'test' } },
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [
|
||||
{ source: 'start', target: 'router' },
|
||||
{ source: 'router', target: 'api1' },
|
||||
{ source: 'router', target: 'api2' },
|
||||
{ source: 'api1', target: 'agent' },
|
||||
{ source: 'api2', target: 'agent' },
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
}
|
||||
|
||||
const executor = new Executor(routerWorkflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
// Mock context simulating: router selected api1, api1 executed, api2 not in active path
|
||||
const mockContext = {
|
||||
blockStates: new Map(),
|
||||
decisions: {
|
||||
router: new Map([['router', 'api1']]),
|
||||
condition: new Map(),
|
||||
},
|
||||
activeExecutionPath: new Set(['start', 'router', 'api1', 'agent']),
|
||||
workflow: routerWorkflow,
|
||||
} as any
|
||||
|
||||
const executedBlocks = new Set(['start', 'router', 'api1'])
|
||||
|
||||
// Test agent's dependencies
|
||||
const agentConnections = [
|
||||
{ source: 'api1', target: 'agent', sourceHandle: 'source' },
|
||||
{ source: 'api2', target: 'agent', sourceHandle: 'source' },
|
||||
]
|
||||
|
||||
const dependenciesMet = checkDependencies(agentConnections, executedBlocks, mockContext)
|
||||
|
||||
// Both dependencies should be met:
|
||||
// - api1: in active path AND executed = met
|
||||
// - api2: NOT in active path = automatically met
|
||||
expect(dependenciesMet).toBe(true)
|
||||
})
|
||||
|
||||
test('should prioritize special connection types over active path check', () => {
|
||||
const workflow = createMinimalWorkflow()
|
||||
const executor = new Executor(workflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
const mockContext = {
|
||||
blockStates: new Map(),
|
||||
decisions: { router: new Map(), condition: new Map() },
|
||||
activeExecutionPath: new Set(['block1']), // block2 not in active path
|
||||
completedLoops: new Set(),
|
||||
workflow: workflow,
|
||||
} as any
|
||||
|
||||
const executedBlocks = new Set(['block1'])
|
||||
|
||||
// Test error connection (should be handled before active path check)
|
||||
const errorConnections = [{ source: 'block2', target: 'block3', sourceHandle: 'error' }]
|
||||
|
||||
// Mock block2 with error state
|
||||
mockContext.blockStates.set('block2', {
|
||||
output: { error: 'test error' },
|
||||
})
|
||||
|
||||
// Even though block2 is not in active path, error connection should be handled specially
|
||||
const errorDepsResult = checkDependencies(errorConnections, new Set(['block2']), mockContext)
|
||||
expect(errorDepsResult).toBe(true) // source executed + has error = dependency met
|
||||
|
||||
// Test loop connection
|
||||
const loopConnections = [
|
||||
{ source: 'block2', target: 'block3', sourceHandle: 'loop-end-source' },
|
||||
]
|
||||
|
||||
mockContext.completedLoops.add('block2')
|
||||
const loopDepsResult = checkDependencies(loopConnections, new Set(['block2']), mockContext)
|
||||
expect(loopDepsResult).toBe(true) // loop completed = dependency met
|
||||
})
|
||||
|
||||
test('should handle router decisions correctly in dependency checking', () => {
|
||||
const workflow = createMinimalWorkflow()
|
||||
const executor = new Executor(workflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
// Add router block to workflow
|
||||
workflow.blocks.push({
|
||||
id: 'router1',
|
||||
metadata: { id: 'router', name: 'Router' },
|
||||
config: { params: {} },
|
||||
enabled: true,
|
||||
})
|
||||
|
||||
const mockContext = {
|
||||
blockStates: new Map(),
|
||||
decisions: {
|
||||
router: new Map([['router1', 'target1']]), // router selected target1
|
||||
condition: new Map(),
|
||||
},
|
||||
activeExecutionPath: new Set(['router1', 'target1', 'target2']),
|
||||
workflow: workflow,
|
||||
} as any
|
||||
|
||||
const executedBlocks = new Set(['router1'])
|
||||
|
||||
// Test selected target
|
||||
const selectedConnections = [{ source: 'router1', target: 'target1', sourceHandle: 'source' }]
|
||||
const selectedResult = checkDependencies(selectedConnections, executedBlocks, mockContext)
|
||||
expect(selectedResult).toBe(true) // router executed + target selected = dependency met
|
||||
|
||||
// Test non-selected target
|
||||
const nonSelectedConnections = [
|
||||
{ source: 'router1', target: 'target2', sourceHandle: 'source' },
|
||||
]
|
||||
const nonSelectedResult = checkDependencies(
|
||||
nonSelectedConnections,
|
||||
executedBlocks,
|
||||
mockContext
|
||||
)
|
||||
expect(nonSelectedResult).toBe(true) // router executed + target NOT selected = dependency auto-met
|
||||
})
|
||||
|
||||
test('should handle condition decisions correctly in dependency checking', () => {
|
||||
const conditionWorkflow = createWorkflowWithCondition()
|
||||
const executor = new Executor(conditionWorkflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
const mockContext = {
|
||||
blockStates: new Map(),
|
||||
decisions: {
|
||||
router: new Map(),
|
||||
condition: new Map([['condition1', 'true']]), // condition selected true path
|
||||
},
|
||||
activeExecutionPath: new Set(['condition1', 'trueTarget']),
|
||||
workflow: conditionWorkflow,
|
||||
} as any
|
||||
|
||||
const executedBlocks = new Set(['condition1'])
|
||||
|
||||
// Test selected condition path
|
||||
const trueConnections = [
|
||||
{ source: 'condition1', target: 'trueTarget', sourceHandle: 'condition-true' },
|
||||
]
|
||||
const trueResult = checkDependencies(trueConnections, executedBlocks, mockContext)
|
||||
expect(trueResult).toBe(true)
|
||||
|
||||
// Test non-selected condition path
|
||||
const falseConnections = [
|
||||
{ source: 'condition1', target: 'falseTarget', sourceHandle: 'condition-false' },
|
||||
]
|
||||
const falseResult = checkDependencies(falseConnections, executedBlocks, mockContext)
|
||||
expect(falseResult).toBe(true) // condition executed + path NOT selected = dependency auto-met
|
||||
})
|
||||
|
||||
test('should handle regular sequential dependencies correctly', () => {
|
||||
const workflow = createMinimalWorkflow()
|
||||
const executor = new Executor(workflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
const mockContext = {
|
||||
blockStates: new Map(),
|
||||
decisions: { router: new Map(), condition: new Map() },
|
||||
activeExecutionPath: new Set(['block1', 'block2']),
|
||||
workflow: workflow,
|
||||
} as any
|
||||
|
||||
const executedBlocks = new Set(['block1'])
|
||||
|
||||
// Test normal sequential dependency
|
||||
const normalConnections = [{ source: 'block1', target: 'block2', sourceHandle: 'source' }]
|
||||
|
||||
// Without error
|
||||
const normalResult = checkDependencies(normalConnections, executedBlocks, mockContext)
|
||||
expect(normalResult).toBe(true) // source executed + no error = dependency met
|
||||
|
||||
// With error should fail regular connection
|
||||
mockContext.blockStates.set('block1', {
|
||||
output: { error: 'test error' },
|
||||
})
|
||||
const errorResult = checkDependencies(normalConnections, executedBlocks, mockContext)
|
||||
expect(errorResult).toBe(false) // source executed + has error = regular dependency not met
|
||||
})
|
||||
|
||||
test('should handle empty dependency list', () => {
|
||||
const workflow = createMinimalWorkflow()
|
||||
const executor = new Executor(workflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
const mockContext = createMockContext()
|
||||
const executedBlocks = new Set<string>()
|
||||
|
||||
// Empty connections should return true
|
||||
const result = checkDependencies([], executedBlocks, mockContext)
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -877,6 +877,9 @@ export class Executor {
|
||||
insideParallel?: string,
|
||||
iterationIndex?: number
|
||||
): boolean {
|
||||
if (incomingConnections.length === 0) {
|
||||
return true
|
||||
}
|
||||
// Check if this is a loop block
|
||||
const isLoopBlock = incomingConnections.some((conn) => {
|
||||
const sourceBlock = this.actualWorkflow.blocks.find((b) => b.id === conn.source)
|
||||
@@ -994,6 +997,12 @@ export class Executor {
|
||||
return sourceExecuted && conn.target === selectedTarget
|
||||
}
|
||||
|
||||
// If source is not in active path, consider this dependency met
|
||||
// This allows blocks with multiple inputs to execute even if some inputs are from inactive paths
|
||||
if (!context.activeExecutionPath.has(conn.source)) {
|
||||
return true
|
||||
}
|
||||
|
||||
// For error connections, check if the source had an error
|
||||
if (conn.sourceHandle === 'error') {
|
||||
return sourceExecuted && hasSourceError
|
||||
@@ -1004,12 +1013,6 @@ export class Executor {
|
||||
return sourceExecuted && !hasSourceError
|
||||
}
|
||||
|
||||
// If source is not in active path, consider this dependency met
|
||||
// This allows blocks with multiple inputs to execute even if some inputs are from inactive paths
|
||||
if (!context.activeExecutionPath.has(conn.source)) {
|
||||
return true
|
||||
}
|
||||
|
||||
// For regular blocks, dependency is met if source is executed
|
||||
return sourceExecuted
|
||||
})
|
||||
|
||||
@@ -408,4 +408,206 @@ describe('PathTracker', () => {
|
||||
}).not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Router downstream path activation', () => {
|
||||
beforeEach(() => {
|
||||
// Create router workflow with downstream connections
|
||||
mockWorkflow = {
|
||||
version: '1.0',
|
||||
blocks: [
|
||||
{
|
||||
id: 'router1',
|
||||
metadata: { id: 'router', name: 'Router' },
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'router', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'api1',
|
||||
metadata: { id: 'api', name: 'API 1' },
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'api', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'api2',
|
||||
metadata: { id: 'api', name: 'API 2' },
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'api', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'agent1',
|
||||
metadata: { id: 'agent', name: 'Agent' },
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'agent', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [
|
||||
{ source: 'router1', target: 'api1' },
|
||||
{ source: 'router1', target: 'api2' },
|
||||
{ source: 'api1', target: 'agent1' },
|
||||
{ source: 'api2', target: 'agent1' },
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
}
|
||||
|
||||
pathTracker = new PathTracker(mockWorkflow)
|
||||
mockContext = {
|
||||
workflowId: 'test-router-workflow',
|
||||
blockStates: new Map(),
|
||||
blockLogs: [],
|
||||
metadata: { duration: 0 },
|
||||
environmentVariables: {},
|
||||
decisions: { router: new Map(), condition: new Map() },
|
||||
loopIterations: new Map(),
|
||||
loopItems: new Map(),
|
||||
completedLoops: new Set(),
|
||||
executedBlocks: new Set(),
|
||||
activeExecutionPath: new Set(),
|
||||
workflow: mockWorkflow,
|
||||
}
|
||||
})
|
||||
|
||||
it('should activate downstream paths when router selects a target', () => {
|
||||
// Mock router output selecting api1
|
||||
mockContext.blockStates.set('router1', {
|
||||
output: {
|
||||
response: {
|
||||
selectedPath: {
|
||||
blockId: 'api1',
|
||||
blockType: 'api',
|
||||
blockTitle: 'API 1',
|
||||
},
|
||||
},
|
||||
},
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
|
||||
// Update paths for router
|
||||
pathTracker.updateExecutionPaths(['router1'], mockContext)
|
||||
|
||||
// Both api1 and agent1 should be activated (downstream from api1)
|
||||
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
|
||||
expect(mockContext.activeExecutionPath.has('agent1')).toBe(true)
|
||||
|
||||
// api2 should NOT be activated (not selected by router)
|
||||
expect(mockContext.activeExecutionPath.has('api2')).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle multiple levels of downstream connections', () => {
|
||||
// Add another level to test deep activation
|
||||
mockWorkflow.blocks.push({
|
||||
id: 'finalStep',
|
||||
metadata: { id: 'api', name: 'Final Step' },
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'api', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
})
|
||||
mockWorkflow.connections.push({ source: 'agent1', target: 'finalStep' })
|
||||
|
||||
pathTracker = new PathTracker(mockWorkflow)
|
||||
|
||||
// Mock router output selecting api1
|
||||
mockContext.blockStates.set('router1', {
|
||||
output: {
|
||||
response: {
|
||||
selectedPath: {
|
||||
blockId: 'api1',
|
||||
blockType: 'api',
|
||||
blockTitle: 'API 1',
|
||||
},
|
||||
},
|
||||
},
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
|
||||
pathTracker.updateExecutionPaths(['router1'], mockContext)
|
||||
|
||||
// All downstream blocks should be activated
|
||||
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
|
||||
expect(mockContext.activeExecutionPath.has('agent1')).toBe(true)
|
||||
expect(mockContext.activeExecutionPath.has('finalStep')).toBe(true)
|
||||
|
||||
// Non-selected path should not be activated
|
||||
expect(mockContext.activeExecutionPath.has('api2')).toBe(false)
|
||||
})
|
||||
|
||||
it('should not create infinite loops in cyclic workflows', () => {
|
||||
// Add a cycle to test loop prevention
|
||||
mockWorkflow.connections.push({ source: 'agent1', target: 'api1' })
|
||||
pathTracker = new PathTracker(mockWorkflow)
|
||||
|
||||
mockContext.blockStates.set('router1', {
|
||||
output: {
|
||||
response: {
|
||||
selectedPath: {
|
||||
blockId: 'api1',
|
||||
blockType: 'api',
|
||||
blockTitle: 'API 1',
|
||||
},
|
||||
},
|
||||
},
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
|
||||
// This should not throw or cause infinite recursion
|
||||
expect(() => {
|
||||
pathTracker.updateExecutionPaths(['router1'], mockContext)
|
||||
}).not.toThrow()
|
||||
|
||||
// Both api1 and agent1 should still be activated
|
||||
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
|
||||
expect(mockContext.activeExecutionPath.has('agent1')).toBe(true)
|
||||
})
|
||||
|
||||
it('should handle router with no downstream connections', () => {
|
||||
// Create isolated router
|
||||
const isolatedWorkflow = {
|
||||
...mockWorkflow,
|
||||
connections: [
|
||||
{ source: 'router1', target: 'api1' },
|
||||
{ source: 'router1', target: 'api2' },
|
||||
// Remove downstream connections from api1/api2
|
||||
],
|
||||
}
|
||||
pathTracker = new PathTracker(isolatedWorkflow)
|
||||
|
||||
mockContext.blockStates.set('router1', {
|
||||
output: {
|
||||
response: {
|
||||
selectedPath: {
|
||||
blockId: 'api1',
|
||||
blockType: 'api',
|
||||
blockTitle: 'API 1',
|
||||
},
|
||||
},
|
||||
},
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
|
||||
pathTracker.updateExecutionPaths(['router1'], mockContext)
|
||||
|
||||
// Only the selected target should be activated
|
||||
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
|
||||
expect(mockContext.activeExecutionPath.has('api2')).toBe(false)
|
||||
expect(mockContext.activeExecutionPath.has('agent1')).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -165,10 +165,28 @@ export class PathTracker {
|
||||
if (selectedPath) {
|
||||
context.decisions.router.set(block.id, selectedPath)
|
||||
context.activeExecutionPath.add(selectedPath)
|
||||
|
||||
this.activateDownstreamPaths(selectedPath, context)
|
||||
|
||||
logger.info(`Router ${block.id} selected path: ${selectedPath}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively activate downstream paths from a block
|
||||
*/
|
||||
private activateDownstreamPaths(blockId: string, context: ExecutionContext): void {
|
||||
const outgoingConnections = this.getOutgoingConnections(blockId)
|
||||
|
||||
for (const conn of outgoingConnections) {
|
||||
if (!context.activeExecutionPath.has(conn.target)) {
|
||||
context.activeExecutionPath.add(conn.target)
|
||||
|
||||
this.activateDownstreamPaths(conn.target, context)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update paths for condition blocks
|
||||
*/
|
||||
@@ -219,9 +237,7 @@ export class PathTracker {
|
||||
const isPartOfLoop = blockLoops.length > 0
|
||||
|
||||
for (const conn of outgoingConnections) {
|
||||
if (
|
||||
this.shouldActivateConnection(conn, block.id, hasError, isPartOfLoop, blockLoops, context)
|
||||
) {
|
||||
if (this.shouldActivateConnection(conn, hasError, isPartOfLoop, blockLoops, context)) {
|
||||
context.activeExecutionPath.add(conn.target)
|
||||
}
|
||||
}
|
||||
@@ -253,7 +269,6 @@ export class PathTracker {
|
||||
*/
|
||||
private shouldActivateConnection(
|
||||
conn: SerializedConnection,
|
||||
sourceBlockId: string,
|
||||
hasError: boolean,
|
||||
isPartOfLoop: boolean,
|
||||
blockLoops: Array<{ id: string; loop: any }>,
|
||||
|
||||
@@ -593,6 +593,7 @@ export class InputResolver {
|
||||
isInTemplateLiteral
|
||||
)
|
||||
} else {
|
||||
// The function execution API will handle variable resolution within code strings
|
||||
formattedValue =
|
||||
typeof replacementValue === 'object'
|
||||
? JSON.stringify(replacementValue)
|
||||
|
||||
@@ -91,6 +91,10 @@ export function useCollaborativeWorkflow() {
|
||||
payload.parentId,
|
||||
payload.extent
|
||||
)
|
||||
// Handle auto-connect edge if present
|
||||
if (payload.autoConnectEdge) {
|
||||
workflowStore.addEdge(payload.autoConnectEdge)
|
||||
}
|
||||
break
|
||||
case 'update-position': {
|
||||
// Apply position update only if it's newer than the last applied timestamp
|
||||
@@ -164,6 +168,10 @@ export function useCollaborativeWorkflow() {
|
||||
payload.parentId,
|
||||
payload.extent
|
||||
)
|
||||
// Handle auto-connect edge if present
|
||||
if (payload.autoConnectEdge) {
|
||||
workflowStore.addEdge(payload.autoConnectEdge)
|
||||
}
|
||||
break
|
||||
}
|
||||
} else if (target === 'edge') {
|
||||
@@ -284,7 +292,8 @@ export function useCollaborativeWorkflow() {
|
||||
position: Position,
|
||||
data?: Record<string, any>,
|
||||
parentId?: string,
|
||||
extent?: 'parent'
|
||||
extent?: 'parent',
|
||||
autoConnectEdge?: Edge
|
||||
) => {
|
||||
// Create complete block data upfront using the same logic as the store
|
||||
const blockConfig = getBlock(type)
|
||||
@@ -306,10 +315,14 @@ export function useCollaborativeWorkflow() {
|
||||
height: 0,
|
||||
parentId,
|
||||
extent,
|
||||
autoConnectEdge, // Include edge data for atomic operation
|
||||
}
|
||||
|
||||
// Apply locally first
|
||||
workflowStore.addBlock(id, type, name, position, data, parentId, extent)
|
||||
if (autoConnectEdge) {
|
||||
workflowStore.addEdge(autoConnectEdge)
|
||||
}
|
||||
|
||||
// Then broadcast to other clients with complete block data
|
||||
if (!isApplyingRemoteChange.current) {
|
||||
@@ -354,10 +367,14 @@ export function useCollaborativeWorkflow() {
|
||||
height: 0, // Default height, will be set by the UI
|
||||
parentId,
|
||||
extent,
|
||||
autoConnectEdge, // Include edge data for atomic operation
|
||||
}
|
||||
|
||||
// Apply locally first
|
||||
workflowStore.addBlock(id, type, name, position, data, parentId, extent)
|
||||
if (autoConnectEdge) {
|
||||
workflowStore.addEdge(autoConnectEdge)
|
||||
}
|
||||
|
||||
// Then broadcast to other clients with complete block data
|
||||
if (!isApplyingRemoteChange.current) {
|
||||
|
||||
@@ -339,8 +339,36 @@ async function parseWithFileParser(
|
||||
try {
|
||||
let content: string
|
||||
|
||||
if (fileUrl.startsWith('http://') || fileUrl.startsWith('https://')) {
|
||||
// Download and parse remote file with timeout
|
||||
if (fileUrl.startsWith('data:')) {
|
||||
logger.info(`Processing data URI for: ${filename}`)
|
||||
|
||||
try {
|
||||
const [header, base64Data] = fileUrl.split(',')
|
||||
if (!base64Data) {
|
||||
throw new Error('Invalid data URI format')
|
||||
}
|
||||
|
||||
if (header.includes('base64')) {
|
||||
const buffer = Buffer.from(base64Data, 'base64')
|
||||
content = buffer.toString('utf8')
|
||||
} else {
|
||||
content = decodeURIComponent(base64Data)
|
||||
}
|
||||
|
||||
if (mimeType === 'text/plain') {
|
||||
logger.info(`Data URI processed successfully for text content: ${filename}`)
|
||||
} else {
|
||||
const extension = filename.split('.').pop()?.toLowerCase() || 'txt'
|
||||
const buffer = Buffer.from(base64Data, 'base64')
|
||||
const result = await parseBuffer(buffer, extension)
|
||||
content = result.content
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to process data URI: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
} else if (fileUrl.startsWith('http://') || fileUrl.startsWith('https://')) {
|
||||
const controller = new AbortController()
|
||||
const timeoutId = setTimeout(() => controller.abort(), TIMEOUTS.FILE_DOWNLOAD)
|
||||
|
||||
@@ -354,7 +382,6 @@ async function parseWithFileParser(
|
||||
|
||||
const buffer = Buffer.from(await response.arrayBuffer())
|
||||
|
||||
// Extract file extension from filename
|
||||
const extension = filename.split('.').pop()?.toLowerCase() || ''
|
||||
if (!extension) {
|
||||
throw new Error(`Could not determine file extension from filename: ${filename}`)
|
||||
|
||||
380
apps/sim/lib/logs/types.ts
Normal file
380
apps/sim/lib/logs/types.ts
Normal file
@@ -0,0 +1,380 @@
|
||||
import type { Edge } from 'reactflow'
|
||||
import type { BlockLog, NormalizedBlockOutput } from '@/executor/types'
|
||||
import type { DeploymentStatus } from '@/stores/workflows/registry/types'
|
||||
import type { Loop, Parallel, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
export type { WorkflowState, Loop, Parallel, DeploymentStatus }
|
||||
export type WorkflowEdge = Edge
|
||||
export type { NormalizedBlockOutput, BlockLog }
|
||||
|
||||
export interface PricingInfo {
|
||||
input: number
|
||||
output: number
|
||||
cachedInput?: number
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export interface TokenUsage {
|
||||
prompt: number
|
||||
completion: number
|
||||
total: number
|
||||
}
|
||||
|
||||
export interface CostBreakdown {
|
||||
input: number
|
||||
output: number
|
||||
total: number
|
||||
tokens: TokenUsage
|
||||
model: string
|
||||
pricing: PricingInfo
|
||||
}
|
||||
|
||||
export interface ToolCall {
|
||||
name: string
|
||||
duration: number
|
||||
startTime: string
|
||||
endTime: string
|
||||
status: 'success' | 'error'
|
||||
input: Record<string, unknown>
|
||||
output: Record<string, unknown>
|
||||
error?: string
|
||||
}
|
||||
|
||||
export type BlockInputData = Record<string, any>
|
||||
export type BlockOutputData = NormalizedBlockOutput | null
|
||||
|
||||
export interface ExecutionEnvironment {
|
||||
variables: Record<string, string>
|
||||
workflowId: string
|
||||
executionId: string
|
||||
userId: string
|
||||
workspaceId: string
|
||||
}
|
||||
|
||||
export interface ExecutionTrigger {
|
||||
type: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
source: string
|
||||
data?: Record<string, unknown>
|
||||
timestamp: string
|
||||
}
|
||||
|
||||
export interface ExecutionStatus {
|
||||
status: 'running' | 'completed' | 'failed' | 'cancelled'
|
||||
startedAt: string
|
||||
endedAt?: string
|
||||
durationMs?: number
|
||||
}
|
||||
|
||||
export interface WorkflowExecutionSnapshot {
|
||||
id: string
|
||||
workflowId: string
|
||||
stateHash: string
|
||||
stateData: WorkflowState
|
||||
createdAt: string
|
||||
}
|
||||
|
||||
export type WorkflowExecutionSnapshotInsert = Omit<WorkflowExecutionSnapshot, 'createdAt'>
|
||||
export type WorkflowExecutionSnapshotSelect = WorkflowExecutionSnapshot
|
||||
|
||||
export interface WorkflowExecutionLog {
|
||||
id: string
|
||||
workflowId: string
|
||||
executionId: string
|
||||
stateSnapshotId: string
|
||||
level: 'info' | 'error'
|
||||
message: string
|
||||
trigger: ExecutionTrigger['type']
|
||||
startedAt: string
|
||||
endedAt: string
|
||||
totalDurationMs: number
|
||||
blockCount: number
|
||||
successCount: number
|
||||
errorCount: number
|
||||
skippedCount: number
|
||||
totalCost: number
|
||||
totalInputCost: number
|
||||
totalOutputCost: number
|
||||
totalTokens: number
|
||||
primaryModel: string
|
||||
metadata: {
|
||||
environment: ExecutionEnvironment
|
||||
trigger: ExecutionTrigger
|
||||
traceSpans?: TraceSpan[]
|
||||
errorDetails?: {
|
||||
blockId: string
|
||||
blockName: string
|
||||
error: string
|
||||
stackTrace?: string
|
||||
}
|
||||
}
|
||||
duration?: string
|
||||
createdAt: string
|
||||
}
|
||||
|
||||
export type WorkflowExecutionLogInsert = Omit<WorkflowExecutionLog, 'id' | 'createdAt'>
|
||||
export type WorkflowExecutionLogSelect = WorkflowExecutionLog
|
||||
|
||||
export interface BlockExecutionLog {
|
||||
id: string
|
||||
executionId: string
|
||||
workflowId: string
|
||||
blockId: string
|
||||
blockName: string
|
||||
blockType: string
|
||||
startedAt: string
|
||||
endedAt: string
|
||||
durationMs: number
|
||||
status: 'success' | 'error' | 'skipped'
|
||||
errorMessage?: string
|
||||
errorStackTrace?: string
|
||||
inputData: BlockInputData
|
||||
outputData: BlockOutputData
|
||||
cost: CostBreakdown | null
|
||||
metadata: {
|
||||
toolCalls?: ToolCall[]
|
||||
iterationIndex?: number
|
||||
virtualBlockId?: string
|
||||
parentBlockId?: string
|
||||
environmentSnapshot?: Record<string, string>
|
||||
}
|
||||
createdAt: string
|
||||
}
|
||||
|
||||
export type BlockExecutionLogInsert = Omit<BlockExecutionLog, 'id' | 'createdAt'>
|
||||
export type BlockExecutionLogSelect = BlockExecutionLog
|
||||
|
||||
export interface TraceSpan {
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
duration: number
|
||||
startTime: string
|
||||
endTime: string
|
||||
children?: TraceSpan[]
|
||||
toolCalls?: ToolCall[]
|
||||
status?: 'success' | 'error'
|
||||
tokens?: number
|
||||
relativeStartMs?: number
|
||||
blockId?: string
|
||||
input?: Record<string, unknown>
|
||||
}
|
||||
|
||||
export interface WorkflowExecutionSummary {
|
||||
id: string
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
executionId: string
|
||||
trigger: ExecutionTrigger['type']
|
||||
status: ExecutionStatus['status']
|
||||
startedAt: string
|
||||
endedAt: string
|
||||
durationMs: number
|
||||
blockStats: {
|
||||
total: number
|
||||
success: number
|
||||
error: number
|
||||
skipped: number
|
||||
}
|
||||
costSummary: {
|
||||
total: number
|
||||
inputCost: number
|
||||
outputCost: number
|
||||
tokens: number
|
||||
primaryModel: string
|
||||
}
|
||||
stateSnapshotId: string
|
||||
errorSummary?: {
|
||||
blockId: string
|
||||
blockName: string
|
||||
message: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface WorkflowExecutionDetail extends WorkflowExecutionSummary {
|
||||
environment: ExecutionEnvironment
|
||||
triggerData: ExecutionTrigger
|
||||
blockExecutions: BlockExecutionSummary[]
|
||||
traceSpans: TraceSpan[]
|
||||
workflowState: WorkflowState
|
||||
}
|
||||
|
||||
export interface BlockExecutionSummary {
|
||||
id: string
|
||||
blockId: string
|
||||
blockName: string
|
||||
blockType: string
|
||||
startedAt: string
|
||||
endedAt: string
|
||||
durationMs: number
|
||||
status: BlockExecutionLog['status']
|
||||
errorMessage?: string
|
||||
cost?: CostBreakdown
|
||||
inputSummary: {
|
||||
parameterCount: number
|
||||
hasComplexData: boolean
|
||||
}
|
||||
outputSummary: {
|
||||
hasOutput: boolean
|
||||
outputType: string
|
||||
hasError: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface BlockExecutionDetail extends BlockExecutionSummary {
|
||||
inputData: BlockInputData
|
||||
outputData: BlockOutputData
|
||||
metadata: BlockExecutionLog['metadata']
|
||||
toolCalls?: ToolCall[]
|
||||
}
|
||||
|
||||
export interface PaginatedResponse<T> {
|
||||
data: T[]
|
||||
pagination: {
|
||||
page: number
|
||||
pageSize: number
|
||||
total: number
|
||||
totalPages: number
|
||||
hasNext: boolean
|
||||
hasPrevious: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export type WorkflowExecutionsResponse = PaginatedResponse<WorkflowExecutionSummary>
|
||||
export type BlockExecutionsResponse = PaginatedResponse<BlockExecutionSummary>
|
||||
|
||||
export interface WorkflowExecutionFilters {
|
||||
workflowIds?: string[]
|
||||
folderIds?: string[]
|
||||
triggers?: ExecutionTrigger['type'][]
|
||||
status?: ExecutionStatus['status'][]
|
||||
startDate?: string
|
||||
endDate?: string
|
||||
search?: string
|
||||
minDuration?: number
|
||||
maxDuration?: number
|
||||
minCost?: number
|
||||
maxCost?: number
|
||||
hasErrors?: boolean
|
||||
}
|
||||
|
||||
export interface PaginationParams {
|
||||
page: number
|
||||
pageSize: number
|
||||
sortBy?: 'startedAt' | 'durationMs' | 'totalCost' | 'blockCount'
|
||||
sortOrder?: 'asc' | 'desc'
|
||||
}
|
||||
|
||||
export interface LogsQueryParams extends WorkflowExecutionFilters, PaginationParams {
|
||||
includeBlockSummary?: boolean
|
||||
includeWorkflowState?: boolean
|
||||
}
|
||||
|
||||
export interface LogsError {
|
||||
code: 'EXECUTION_NOT_FOUND' | 'SNAPSHOT_NOT_FOUND' | 'INVALID_WORKFLOW_STATE' | 'STORAGE_ERROR'
|
||||
message: string
|
||||
details?: Record<string, unknown>
|
||||
}
|
||||
|
||||
export interface ValidationError {
|
||||
field: string
|
||||
message: string
|
||||
value: unknown
|
||||
}
|
||||
|
||||
export class LogsServiceError extends Error {
|
||||
public code: LogsError['code']
|
||||
public details?: Record<string, unknown>
|
||||
|
||||
constructor(message: string, code: LogsError['code'], details?: Record<string, unknown>) {
|
||||
super(message)
|
||||
this.name = 'LogsServiceError'
|
||||
this.code = code
|
||||
this.details = details
|
||||
}
|
||||
}
|
||||
|
||||
export interface DatabaseOperationResult<T> {
|
||||
success: boolean
|
||||
data?: T
|
||||
error?: LogsServiceError
|
||||
}
|
||||
|
||||
export interface BatchInsertResult<T> {
|
||||
inserted: T[]
|
||||
failed: Array<{
|
||||
item: T
|
||||
error: string
|
||||
}>
|
||||
totalAttempted: number
|
||||
totalSucceeded: number
|
||||
totalFailed: number
|
||||
}
|
||||
|
||||
export interface SnapshotService {
|
||||
createSnapshot(workflowId: string, state: WorkflowState): Promise<WorkflowExecutionSnapshot>
|
||||
getSnapshot(id: string): Promise<WorkflowExecutionSnapshot | null>
|
||||
getSnapshotByHash(workflowId: string, hash: string): Promise<WorkflowExecutionSnapshot | null>
|
||||
computeStateHash(state: WorkflowState): string
|
||||
cleanupOrphanedSnapshots(olderThanDays: number): Promise<number>
|
||||
}
|
||||
|
||||
export interface SnapshotCreationResult {
|
||||
snapshot: WorkflowExecutionSnapshot
|
||||
isNew: boolean
|
||||
}
|
||||
|
||||
export interface ExecutionLoggerService {
|
||||
startWorkflowExecution(params: {
|
||||
workflowId: string
|
||||
executionId: string
|
||||
trigger: ExecutionTrigger
|
||||
environment: ExecutionEnvironment
|
||||
workflowState: WorkflowState
|
||||
}): Promise<{
|
||||
workflowLog: WorkflowExecutionLog
|
||||
snapshot: WorkflowExecutionSnapshot
|
||||
}>
|
||||
|
||||
logBlockExecution(params: {
|
||||
executionId: string
|
||||
workflowId: string
|
||||
blockId: string
|
||||
blockName: string
|
||||
blockType: string
|
||||
input: BlockInputData
|
||||
output: BlockOutputData
|
||||
timing: {
|
||||
startedAt: string
|
||||
endedAt: string
|
||||
durationMs: number
|
||||
}
|
||||
status: BlockExecutionLog['status']
|
||||
error?: {
|
||||
message: string
|
||||
stackTrace?: string
|
||||
}
|
||||
cost?: CostBreakdown
|
||||
metadata?: BlockExecutionLog['metadata']
|
||||
}): Promise<BlockExecutionLog>
|
||||
|
||||
completeWorkflowExecution(params: {
|
||||
executionId: string
|
||||
endedAt: string
|
||||
totalDurationMs: number
|
||||
blockStats: {
|
||||
total: number
|
||||
success: number
|
||||
error: number
|
||||
skipped: number
|
||||
}
|
||||
costSummary: {
|
||||
totalCost: number
|
||||
totalInputCost: number
|
||||
totalOutputCost: number
|
||||
totalTokens: number
|
||||
primaryModel: string
|
||||
}
|
||||
finalOutput: BlockOutputData
|
||||
traceSpans?: TraceSpan[]
|
||||
}): Promise<WorkflowExecutionLog>
|
||||
}
|
||||
@@ -1,9 +1,5 @@
|
||||
// Export the storage abstraction layer
|
||||
|
||||
export * as BlobClient from './blob/blob-client'
|
||||
// Export specific storage clients for advanced use cases
|
||||
export * as S3Client from './s3/s3-client'
|
||||
// Export configuration
|
||||
export {
|
||||
BLOB_CONFIG,
|
||||
BLOB_KB_CONFIG,
|
||||
|
||||
@@ -279,15 +279,51 @@ describe('S3 Client', () => {
|
||||
})
|
||||
|
||||
describe('s3Client initialization', () => {
|
||||
it('should initialize with correct configuration', async () => {
|
||||
it('should initialize with correct configuration when credentials are available', async () => {
|
||||
// Mock env with credentials
|
||||
vi.doMock('../../env', () => ({
|
||||
env: {
|
||||
AWS_ACCESS_KEY_ID: 'test-access-key',
|
||||
AWS_SECRET_ACCESS_KEY: 'test-secret-key',
|
||||
},
|
||||
}))
|
||||
|
||||
// Re-import to get fresh module with mocked env
|
||||
vi.resetModules()
|
||||
const { getS3Client } = await import('./s3-client')
|
||||
const { S3Client } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const client = getS3Client()
|
||||
|
||||
expect(client).toBeDefined()
|
||||
// Verify the client was constructed with the right configuration
|
||||
expect(S3Client).toHaveBeenCalledWith({ region: 'test-region' })
|
||||
expect(S3Client).toHaveBeenCalledWith({
|
||||
region: 'test-region',
|
||||
credentials: {
|
||||
accessKeyId: 'test-access-key',
|
||||
secretAccessKey: 'test-secret-key',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should initialize without credentials when env vars are not available', async () => {
|
||||
vi.doMock('../../env', () => ({
|
||||
env: {
|
||||
AWS_ACCESS_KEY_ID: undefined,
|
||||
AWS_SECRET_ACCESS_KEY: undefined,
|
||||
},
|
||||
}))
|
||||
|
||||
vi.resetModules()
|
||||
const { getS3Client } = await import('./s3-client')
|
||||
const { S3Client } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const client = getS3Client()
|
||||
|
||||
expect(client).toBeDefined()
|
||||
expect(S3Client).toHaveBeenCalledWith({
|
||||
region: 'test-region',
|
||||
credentials: undefined,
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -6,6 +6,7 @@ import { persistExecutionError, persistExecutionLogs } from '@/lib/logs/executio
|
||||
import { buildTraceSpans } from '@/lib/logs/trace-spans'
|
||||
import { hasProcessedMessage, markMessageAsProcessed } from '@/lib/redis'
|
||||
import { decryptSecret } from '@/lib/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
|
||||
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
|
||||
import { db } from '@/db'
|
||||
@@ -13,7 +14,6 @@ import { environment, userStats, webhook } from '@/db/schema'
|
||||
import { Executor } from '@/executor'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { mergeSubblockStateAsync } from '@/stores/workflows/server-utils'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('WebhookUtils')
|
||||
|
||||
@@ -475,23 +475,28 @@ export async function executeWorkflowFromPayload(
|
||||
|
||||
// Returns void as errors are handled internally
|
||||
try {
|
||||
// Get the workflow state
|
||||
if (!foundWorkflow.state) {
|
||||
logger.error(`[${requestId}] TRACE: Missing workflow state`, {
|
||||
// Load workflow data from normalized tables
|
||||
logger.debug(`[${requestId}] Loading workflow ${foundWorkflow.id} from normalized tables`)
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(foundWorkflow.id)
|
||||
|
||||
if (!normalizedData) {
|
||||
logger.error(`[${requestId}] TRACE: No normalized data found for workflow`, {
|
||||
workflowId: foundWorkflow.id,
|
||||
hasState: false,
|
||||
hasNormalizedData: false,
|
||||
})
|
||||
throw new Error(`Workflow ${foundWorkflow.id} has no state`)
|
||||
throw new Error(`Workflow ${foundWorkflow.id} data not found in normalized tables`)
|
||||
}
|
||||
const state = foundWorkflow.state as WorkflowState
|
||||
const { blocks, edges, loops, parallels } = state
|
||||
|
||||
// Use normalized data for execution
|
||||
const { blocks, edges, loops, parallels } = normalizedData
|
||||
logger.info(`[${requestId}] Loaded workflow ${foundWorkflow.id} from normalized tables`)
|
||||
|
||||
// DEBUG: Log state information
|
||||
logger.debug(`[${requestId}] TRACE: Retrieved workflow state`, {
|
||||
logger.debug(`[${requestId}] TRACE: Retrieved workflow state from normalized tables`, {
|
||||
workflowId: foundWorkflow.id,
|
||||
blockCount: Object.keys(blocks || {}).length,
|
||||
edgeCount: (edges || []).length,
|
||||
loopCount: (loops || []).length,
|
||||
loopCount: Object.keys(loops || {}).length,
|
||||
})
|
||||
|
||||
logger.debug(
|
||||
|
||||
@@ -122,6 +122,7 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
updatedAt: '2025-06-17',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 2 },
|
||||
toolUsageControl: true,
|
||||
},
|
||||
},
|
||||
@@ -134,6 +135,7 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
updatedAt: '2025-06-17',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 2 },
|
||||
toolUsageControl: true,
|
||||
},
|
||||
},
|
||||
@@ -146,6 +148,7 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
updatedAt: '2025-06-17',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 2 },
|
||||
toolUsageControl: true,
|
||||
},
|
||||
},
|
||||
|
||||
@@ -110,6 +110,9 @@ describe('Model Capabilities', () => {
|
||||
it.concurrent('should return true for models that support temperature', () => {
|
||||
const supportedModels = [
|
||||
'gpt-4o',
|
||||
'gpt-4.1',
|
||||
'gpt-4.1-mini',
|
||||
'gpt-4.1-nano',
|
||||
'gemini-2.5-flash',
|
||||
'claude-sonnet-4-0',
|
||||
'claude-opus-4-0',
|
||||
@@ -139,10 +142,6 @@ describe('Model Capabilities', () => {
|
||||
'deepseek-r1',
|
||||
// Chat models that don't support temperature
|
||||
'deepseek-chat',
|
||||
// GPT-4.1 family models that don't support temperature
|
||||
'gpt-4.1',
|
||||
'gpt-4.1-nano',
|
||||
'gpt-4.1-mini',
|
||||
'azure/gpt-4.1',
|
||||
'azure/model-router',
|
||||
]
|
||||
|
||||
@@ -29,6 +29,34 @@ const db = socketDb
|
||||
// Constants
|
||||
const DEFAULT_LOOP_ITERATIONS = 5
|
||||
|
||||
/**
|
||||
* Shared function to handle auto-connect edge insertion
|
||||
* @param tx - Database transaction
|
||||
* @param workflowId - The workflow ID
|
||||
* @param autoConnectEdge - The auto-connect edge data
|
||||
* @param logger - Logger instance
|
||||
*/
|
||||
async function insertAutoConnectEdge(
|
||||
tx: any,
|
||||
workflowId: string,
|
||||
autoConnectEdge: any,
|
||||
logger: any
|
||||
) {
|
||||
if (!autoConnectEdge) return
|
||||
|
||||
await tx.insert(workflowEdges).values({
|
||||
id: autoConnectEdge.id,
|
||||
workflowId,
|
||||
sourceBlockId: autoConnectEdge.source,
|
||||
targetBlockId: autoConnectEdge.target,
|
||||
sourceHandle: autoConnectEdge.sourceHandle || null,
|
||||
targetHandle: autoConnectEdge.targetHandle || null,
|
||||
})
|
||||
logger.debug(
|
||||
`Added auto-connect edge ${autoConnectEdge.id}: ${autoConnectEdge.source} -> ${autoConnectEdge.target}`
|
||||
)
|
||||
}
|
||||
|
||||
// Enum for subflow types
|
||||
enum SubflowType {
|
||||
LOOP = 'loop',
|
||||
@@ -246,6 +274,9 @@ async function handleBlockOperationTx(
|
||||
}
|
||||
|
||||
await tx.insert(workflowBlocks).values(insertData)
|
||||
|
||||
// Handle auto-connect edge if present
|
||||
await insertAutoConnectEdge(tx, workflowId, payload.autoConnectEdge, logger)
|
||||
} catch (insertError) {
|
||||
logger.error(`[SERVER] ❌ Failed to insert block ${payload.id}:`, insertError)
|
||||
throw insertError
|
||||
@@ -592,6 +623,9 @@ async function handleBlockOperationTx(
|
||||
}
|
||||
|
||||
await tx.insert(workflowBlocks).values(insertData)
|
||||
|
||||
// Handle auto-connect edge if present
|
||||
await insertAutoConnectEdge(tx, workflowId, payload.autoConnectEdge, logger)
|
||||
} catch (insertError) {
|
||||
logger.error(`[SERVER] ❌ Failed to insert duplicated block ${payload.id}:`, insertError)
|
||||
throw insertError
|
||||
|
||||
@@ -279,6 +279,32 @@ describe('Socket Server Index Integration', () => {
|
||||
expect(() => WorkflowOperationSchema.parse(validOperation)).not.toThrow()
|
||||
})
|
||||
|
||||
it.concurrent('should validate block operations with autoConnectEdge', async () => {
|
||||
const { WorkflowOperationSchema } = await import('./validation/schemas')
|
||||
|
||||
const validOperationWithAutoEdge = {
|
||||
operation: 'add',
|
||||
target: 'block',
|
||||
payload: {
|
||||
id: 'test-block',
|
||||
type: 'action',
|
||||
name: 'Test Block',
|
||||
position: { x: 100, y: 200 },
|
||||
autoConnectEdge: {
|
||||
id: 'auto-edge-123',
|
||||
source: 'source-block',
|
||||
target: 'test-block',
|
||||
sourceHandle: 'output',
|
||||
targetHandle: 'target',
|
||||
type: 'workflowEdge',
|
||||
},
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
|
||||
expect(() => WorkflowOperationSchema.parse(validOperationWithAutoEdge)).not.toThrow()
|
||||
})
|
||||
|
||||
it.concurrent('should validate edge operations', async () => {
|
||||
const { WorkflowOperationSchema } = await import('./validation/schemas')
|
||||
|
||||
|
||||
@@ -5,6 +5,16 @@ const PositionSchema = z.object({
|
||||
y: z.number(),
|
||||
})
|
||||
|
||||
// Schema for auto-connect edge data
|
||||
const AutoConnectEdgeSchema = z.object({
|
||||
id: z.string(),
|
||||
source: z.string(),
|
||||
target: z.string(),
|
||||
sourceHandle: z.string().nullable().optional(),
|
||||
targetHandle: z.string().nullable().optional(),
|
||||
type: z.string().optional(),
|
||||
})
|
||||
|
||||
export const BlockOperationSchema = z.object({
|
||||
operation: z.enum([
|
||||
'add',
|
||||
@@ -35,6 +45,7 @@ export const BlockOperationSchema = z.object({
|
||||
isWide: z.boolean().optional(),
|
||||
advancedMode: z.boolean().optional(),
|
||||
height: z.number().optional(),
|
||||
autoConnectEdge: AutoConnectEdgeSchema.optional(), // Add support for auto-connect edges
|
||||
}),
|
||||
timestamp: z.number(),
|
||||
})
|
||||
@@ -69,4 +80,4 @@ export const WorkflowOperationSchema = z.union([
|
||||
SubflowOperationSchema,
|
||||
])
|
||||
|
||||
export { PositionSchema }
|
||||
export { PositionSchema, AutoConnectEdgeSchema }
|
||||
|
||||
@@ -43,9 +43,6 @@ async function initializeApplication(): Promise<void> {
|
||||
// Mark data as initialized only after sync managers have loaded data from DB
|
||||
dataInitialized = true
|
||||
|
||||
// Register cleanup
|
||||
window.addEventListener('beforeunload', handleBeforeUnload)
|
||||
|
||||
// Log initialization timing information
|
||||
const initDuration = Date.now() - initStartTime
|
||||
logger.info(`Application initialization completed in ${initDuration}ms`)
|
||||
|
||||
@@ -4,8 +4,11 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
const logger = createLogger('KnowledgeStore')
|
||||
|
||||
export interface ChunkingConfig {
|
||||
chunkSize?: number
|
||||
minCharactersPerChunk?: number
|
||||
maxSize: number
|
||||
minSize: number
|
||||
overlap: number
|
||||
chunkSize?: number // Legacy support
|
||||
minCharactersPerChunk?: number // Legacy support
|
||||
recipe?: string
|
||||
lang?: string
|
||||
strategy?: 'recursive' | 'semantic' | 'sentence' | 'paragraph'
|
||||
@@ -463,75 +466,65 @@ export const useKnowledgeStore = create<KnowledgeStore>((set, get) => ({
|
||||
throw new Error(result.error || 'Failed to fetch documents')
|
||||
}
|
||||
|
||||
const documents = result.data
|
||||
const serverDocuments = result.data
|
||||
|
||||
set((state) => {
|
||||
// Merge with existing documents, being smart about when to use server data vs local optimistic updates
|
||||
const currentDocuments = state.documents[knowledgeBaseId] || []
|
||||
|
||||
// For each fetched document, decide whether to use server data or preserve local state
|
||||
const mergedDocuments = documents.map((fetchedDoc: DocumentData) => {
|
||||
const existingDoc = currentDocuments.find((doc) => doc.id === fetchedDoc.id)
|
||||
// Create a map of server documents by filename for quick lookup
|
||||
const serverDocumentsByFilename = new Map()
|
||||
serverDocuments.forEach((doc: DocumentData) => {
|
||||
serverDocumentsByFilename.set(doc.filename, doc)
|
||||
})
|
||||
|
||||
if (!existingDoc) {
|
||||
// New document from server, use it as-is
|
||||
return fetchedDoc
|
||||
// Filter out temporary documents that now have real server equivalents
|
||||
const filteredCurrentDocs = currentDocuments.filter((doc) => {
|
||||
// If this is a temporary document (starts with temp-) and a server document exists with the same filename
|
||||
if (doc.id.startsWith('temp-') && serverDocumentsByFilename.has(doc.filename)) {
|
||||
return false // Remove the temporary document
|
||||
}
|
||||
|
||||
// If processing status is different, generally prefer server data for these transitions:
|
||||
if (existingDoc.processingStatus !== fetchedDoc.processingStatus) {
|
||||
// Always allow these status progressions from server:
|
||||
// pending -> processing, pending -> completed, pending -> failed
|
||||
// processing -> completed, processing -> failed
|
||||
const allowedTransitions = [
|
||||
{ from: 'pending', to: 'processing' },
|
||||
{ from: 'pending', to: 'completed' },
|
||||
{ from: 'pending', to: 'failed' },
|
||||
{ from: 'processing', to: 'completed' },
|
||||
{ from: 'processing', to: 'failed' },
|
||||
]
|
||||
|
||||
const transition = allowedTransitions.find(
|
||||
(t) => t.from === existingDoc.processingStatus && t.to === fetchedDoc.processingStatus
|
||||
)
|
||||
|
||||
if (transition) {
|
||||
return fetchedDoc
|
||||
// If this is a real document that still exists on the server, keep it for merging
|
||||
if (!doc.id.startsWith('temp-')) {
|
||||
const serverDoc = serverDocuments.find((sDoc: DocumentData) => sDoc.id === doc.id)
|
||||
if (serverDoc) {
|
||||
return false // Will be replaced by server version in merge below
|
||||
}
|
||||
}
|
||||
|
||||
const existingHasTimestamps =
|
||||
existingDoc.processingStartedAt || existingDoc.processingCompletedAt
|
||||
const fetchedHasTimestamps =
|
||||
fetchedDoc.processingStartedAt || fetchedDoc.processingCompletedAt
|
||||
|
||||
if (fetchedHasTimestamps && !existingHasTimestamps) {
|
||||
return fetchedDoc
|
||||
}
|
||||
|
||||
// If the server document has updated stats (chunk count, token count, etc.), use it
|
||||
if (
|
||||
fetchedDoc.processingStatus === 'completed' &&
|
||||
(fetchedDoc.chunkCount !== existingDoc.chunkCount ||
|
||||
fetchedDoc.tokenCount !== existingDoc.tokenCount ||
|
||||
fetchedDoc.characterCount !== existingDoc.characterCount)
|
||||
) {
|
||||
return fetchedDoc
|
||||
}
|
||||
|
||||
// Otherwise, preserve the existing document (keeps optimistic updates)
|
||||
return existingDoc
|
||||
// Keep temporary documents that don't have server equivalents yet
|
||||
return true
|
||||
})
|
||||
|
||||
// Add any new documents that weren't in the existing set
|
||||
const newDocuments = documents.filter(
|
||||
(fetchedDoc: DocumentData) => !currentDocuments.find((doc) => doc.id === fetchedDoc.id)
|
||||
)
|
||||
// Merge server documents with any remaining local documents
|
||||
const mergedDocuments = serverDocuments.map((serverDoc: DocumentData) => {
|
||||
const existingDoc = currentDocuments.find((doc) => doc.id === serverDoc.id)
|
||||
|
||||
if (!existingDoc) {
|
||||
// New document from server, use it as-is
|
||||
return serverDoc
|
||||
}
|
||||
|
||||
// Merge logic for existing documents (prefer server data for most fields)
|
||||
return {
|
||||
...existingDoc,
|
||||
...serverDoc,
|
||||
// Preserve any local optimistic updates that haven't been reflected on server yet
|
||||
...(existingDoc.processingStatus !== serverDoc.processingStatus &&
|
||||
['pending', 'processing'].includes(existingDoc.processingStatus) &&
|
||||
!serverDoc.processingStartedAt
|
||||
? { processingStatus: existingDoc.processingStatus }
|
||||
: {}),
|
||||
}
|
||||
})
|
||||
|
||||
// Add any remaining temporary documents that don't have server equivalents
|
||||
const finalDocuments = [...mergedDocuments, ...filteredCurrentDocs]
|
||||
|
||||
return {
|
||||
documents: {
|
||||
...state.documents,
|
||||
[knowledgeBaseId]: [...mergedDocuments, ...newDocuments],
|
||||
[knowledgeBaseId]: finalDocuments,
|
||||
},
|
||||
loadingDocuments: new Set(
|
||||
[...state.loadingDocuments].filter((loadingId) => loadingId !== knowledgeBaseId)
|
||||
@@ -540,7 +533,7 @@ export const useKnowledgeStore = create<KnowledgeStore>((set, get) => ({
|
||||
})
|
||||
|
||||
logger.info(`Documents refreshed for knowledge base: ${knowledgeBaseId}`)
|
||||
return documents
|
||||
return serverDocuments
|
||||
} catch (error) {
|
||||
logger.error(`Error refreshing documents for knowledge base ${knowledgeBaseId}:`, error)
|
||||
|
||||
|
||||
@@ -432,7 +432,7 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
let workflowState: any
|
||||
|
||||
if (workflowData?.state) {
|
||||
// Use the state from the database
|
||||
// API returns normalized data in state
|
||||
workflowState = {
|
||||
blocks: workflowData.state.blocks || {},
|
||||
edges: workflowData.state.edges || [],
|
||||
@@ -448,9 +448,18 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: workflowData.state,
|
||||
state: {
|
||||
blocks: workflowData.state.blocks || {},
|
||||
edges: workflowData.state.edges || [],
|
||||
loops: workflowData.state.loops || {},
|
||||
parallels: workflowData.state.parallels || {},
|
||||
isDeployed: workflowData.isDeployed || false,
|
||||
deployedAt: workflowData.deployedAt
|
||||
? new Date(workflowData.deployedAt)
|
||||
: undefined,
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Loaded from database',
|
||||
action: 'Loaded from database (normalized tables)',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
@@ -548,9 +557,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
* @returns The ID of the newly created workflow
|
||||
*/
|
||||
createWorkflow: async (options = {}) => {
|
||||
const { workflows } = get()
|
||||
const id = crypto.randomUUID()
|
||||
|
||||
// Use provided workspace ID (must be provided since we no longer track active workspace)
|
||||
const workspaceId = options.workspaceId
|
||||
|
||||
@@ -561,292 +567,259 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
}
|
||||
|
||||
logger.info(`Creating new workflow in workspace: ${workspaceId || 'none'}`)
|
||||
// Generate workflow metadata with appropriate name and color
|
||||
const newWorkflow: WorkflowMetadata = {
|
||||
id,
|
||||
name: options.name || generateUniqueName(workflows),
|
||||
lastModified: new Date(),
|
||||
description: options.description || 'New workflow',
|
||||
color: options.marketplaceId ? '#808080' : getNextWorkflowColor(workflows), // Gray for marketplace imports
|
||||
marketplaceData: options.marketplaceId
|
||||
? { id: options.marketplaceId, status: 'temp' as const }
|
||||
: undefined,
|
||||
workspaceId, // Associate with workspace
|
||||
folderId: options.folderId || null, // Associate with folder if provided
|
||||
}
|
||||
|
||||
let initialState: any
|
||||
|
||||
// If this is a marketplace import with existing state
|
||||
if (options.marketplaceId && options.marketplaceState) {
|
||||
initialState = {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Imported from marketplace',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
logger.info(`Created workflow from marketplace: ${options.marketplaceId}`)
|
||||
} else {
|
||||
// Create starter block for new workflow
|
||||
const starterId = crypto.randomUUID()
|
||||
const starterBlock = {
|
||||
id: starterId,
|
||||
type: 'starter' as const,
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
subBlocks: {
|
||||
startWorkflow: {
|
||||
id: 'startWorkflow',
|
||||
type: 'dropdown' as const,
|
||||
value: 'manual',
|
||||
},
|
||||
webhookPath: {
|
||||
id: 'webhookPath',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
webhookSecret: {
|
||||
id: 'webhookSecret',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
scheduleType: {
|
||||
id: 'scheduleType',
|
||||
type: 'dropdown' as const,
|
||||
value: 'daily',
|
||||
},
|
||||
minutesInterval: {
|
||||
id: 'minutesInterval',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
minutesStartingAt: {
|
||||
id: 'minutesStartingAt',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
hourlyMinute: {
|
||||
id: 'hourlyMinute',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
dailyTime: {
|
||||
id: 'dailyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
weeklyDay: {
|
||||
id: 'weeklyDay',
|
||||
type: 'dropdown' as const,
|
||||
value: 'MON',
|
||||
},
|
||||
weeklyDayTime: {
|
||||
id: 'weeklyDayTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyDay: {
|
||||
id: 'monthlyDay',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyTime: {
|
||||
id: 'monthlyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
cronExpression: {
|
||||
id: 'cronExpression',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
timezone: {
|
||||
id: 'timezone',
|
||||
type: 'dropdown' as const,
|
||||
value: 'UTC',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
input: 'any',
|
||||
},
|
||||
},
|
||||
},
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
height: 0,
|
||||
}
|
||||
|
||||
initialState = {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Initial state',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
// Add workflow to registry first
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[id]: newWorkflow,
|
||||
},
|
||||
error: null,
|
||||
}))
|
||||
|
||||
// Initialize subblock values if this is a marketplace import
|
||||
if (options.marketplaceId && options.marketplaceState?.blocks) {
|
||||
useSubBlockStore.getState().initializeFromWorkflow(id, options.marketplaceState.blocks)
|
||||
}
|
||||
|
||||
// Initialize subblock values to ensure they're available for sync
|
||||
if (!options.marketplaceId) {
|
||||
// For non-marketplace workflows, initialize subblock values from the starter block
|
||||
const subblockValues: Record<string, Record<string, any>> = {}
|
||||
const blocks = initialState.blocks as Record<string, BlockState>
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
subblockValues[blockId] = {}
|
||||
for (const [subblockId, subblock] of Object.entries(block.subBlocks)) {
|
||||
subblockValues[blockId][subblockId] = (subblock as any).value
|
||||
}
|
||||
}
|
||||
|
||||
// Update the subblock store with the initial values
|
||||
useSubBlockStore.setState((state) => ({
|
||||
workflowValues: {
|
||||
...state.workflowValues,
|
||||
[id]: subblockValues,
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
// Properly set as active workflow and initialize state
|
||||
set({ activeWorkflowId: id })
|
||||
useWorkflowStore.setState(initialState)
|
||||
|
||||
// Immediately persist the new workflow to the database using dedicated endpoint
|
||||
const persistWorkflow = async () => {
|
||||
try {
|
||||
const response = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
name: newWorkflow.name,
|
||||
description: newWorkflow.description,
|
||||
color: newWorkflow.color,
|
||||
workspaceId: newWorkflow.workspaceId,
|
||||
folderId: newWorkflow.folderId,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json()
|
||||
throw new Error(
|
||||
`Failed to create workflow: ${errorData.error || response.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
const createdWorkflow = await response.json()
|
||||
logger.info(`Successfully created workflow ${createdWorkflow.id} on server`)
|
||||
|
||||
// Update the local workflow ID to match the server-generated one
|
||||
if (createdWorkflow.id !== id) {
|
||||
logger.info(`Updating local workflow ID from ${id} to ${createdWorkflow.id}`)
|
||||
|
||||
// Update registry with server ID
|
||||
set((state) => {
|
||||
const { [id]: oldWorkflow, ...otherWorkflows } = state.workflows
|
||||
return {
|
||||
workflows: {
|
||||
...otherWorkflows,
|
||||
[createdWorkflow.id]: {
|
||||
...oldWorkflow,
|
||||
id: createdWorkflow.id,
|
||||
},
|
||||
},
|
||||
activeWorkflowId: createdWorkflow.id,
|
||||
}
|
||||
})
|
||||
|
||||
// Return the server ID for the caller
|
||||
return createdWorkflow.id
|
||||
}
|
||||
|
||||
return id
|
||||
} catch (error) {
|
||||
logger.error(`Failed to create new workflow ${id}:`, error)
|
||||
throw error // Re-throw to handle in calling code
|
||||
}
|
||||
}
|
||||
|
||||
// Persist synchronously to ensure workflow exists before Socket.IO operations
|
||||
let finalId = id
|
||||
// Create the workflow on the server first to get the server-generated ID
|
||||
try {
|
||||
finalId = await persistWorkflow()
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Critical: Failed to persist new workflow ${id}, Socket.IO operations may fail:`,
|
||||
error
|
||||
const response = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
name: options.name || generateUniqueName(),
|
||||
description: options.description || 'New workflow',
|
||||
color: options.marketplaceId ? '#808080' : getNextWorkflowColor(),
|
||||
workspaceId,
|
||||
folderId: options.folderId || null,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json()
|
||||
throw new Error(`Failed to create workflow: ${errorData.error || response.statusText}`)
|
||||
}
|
||||
|
||||
const createdWorkflow = await response.json()
|
||||
const serverWorkflowId = createdWorkflow.id
|
||||
|
||||
logger.info(`Successfully created workflow ${serverWorkflowId} on server`)
|
||||
|
||||
// Generate workflow metadata with server-generated ID
|
||||
const newWorkflow: WorkflowMetadata = {
|
||||
id: serverWorkflowId,
|
||||
name: createdWorkflow.name,
|
||||
lastModified: new Date(),
|
||||
description: createdWorkflow.description,
|
||||
color: createdWorkflow.color,
|
||||
marketplaceData: options.marketplaceId
|
||||
? { id: options.marketplaceId, status: 'temp' as const }
|
||||
: undefined,
|
||||
workspaceId,
|
||||
folderId: createdWorkflow.folderId,
|
||||
}
|
||||
|
||||
let initialState: any
|
||||
|
||||
// If this is a marketplace import with existing state
|
||||
if (options.marketplaceId && options.marketplaceState) {
|
||||
initialState = {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Imported from marketplace',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
logger.info(`Created workflow from marketplace: ${options.marketplaceId}`)
|
||||
} else {
|
||||
// Create starter block for new workflow
|
||||
const starterId = crypto.randomUUID()
|
||||
const starterBlock = {
|
||||
id: starterId,
|
||||
type: 'starter' as const,
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
subBlocks: {
|
||||
startWorkflow: {
|
||||
id: 'startWorkflow',
|
||||
type: 'dropdown' as const,
|
||||
value: 'manual',
|
||||
},
|
||||
webhookPath: {
|
||||
id: 'webhookPath',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
webhookSecret: {
|
||||
id: 'webhookSecret',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
scheduleType: {
|
||||
id: 'scheduleType',
|
||||
type: 'dropdown' as const,
|
||||
value: 'daily',
|
||||
},
|
||||
minutesInterval: {
|
||||
id: 'minutesInterval',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
minutesStartingAt: {
|
||||
id: 'minutesStartingAt',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
hourlyMinute: {
|
||||
id: 'hourlyMinute',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
dailyTime: {
|
||||
id: 'dailyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
weeklyDay: {
|
||||
id: 'weeklyDay',
|
||||
type: 'dropdown' as const,
|
||||
value: 'MON',
|
||||
},
|
||||
weeklyDayTime: {
|
||||
id: 'weeklyDayTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyDay: {
|
||||
id: 'monthlyDay',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyTime: {
|
||||
id: 'monthlyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
cronExpression: {
|
||||
id: 'cronExpression',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
timezone: {
|
||||
id: 'timezone',
|
||||
type: 'dropdown' as const,
|
||||
value: 'UTC',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
input: 'any',
|
||||
},
|
||||
},
|
||||
},
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
height: 0,
|
||||
}
|
||||
|
||||
initialState = {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Initial state',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
// Add workflow to registry with server-generated ID
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[serverWorkflowId]: newWorkflow,
|
||||
},
|
||||
error: null,
|
||||
}))
|
||||
|
||||
// Initialize subblock values if this is a marketplace import
|
||||
if (options.marketplaceId && options.marketplaceState?.blocks) {
|
||||
useSubBlockStore
|
||||
.getState()
|
||||
.initializeFromWorkflow(serverWorkflowId, options.marketplaceState.blocks)
|
||||
}
|
||||
|
||||
// Initialize subblock values to ensure they're available for sync
|
||||
if (!options.marketplaceId) {
|
||||
// For non-marketplace workflows, initialize subblock values from the starter block
|
||||
const subblockValues: Record<string, Record<string, any>> = {}
|
||||
const blocks = initialState.blocks as Record<string, BlockState>
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
subblockValues[blockId] = {}
|
||||
for (const [subblockId, subblock] of Object.entries(block.subBlocks)) {
|
||||
subblockValues[blockId][subblockId] = (subblock as any).value
|
||||
}
|
||||
}
|
||||
|
||||
// Update the subblock store with the initial values
|
||||
useSubBlockStore.setState((state) => ({
|
||||
workflowValues: {
|
||||
...state.workflowValues,
|
||||
[serverWorkflowId]: subblockValues,
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
// Don't set as active workflow here - let the navigation/URL change handle that
|
||||
// This prevents race conditions and flickering
|
||||
logger.info(
|
||||
`Created new workflow with ID ${serverWorkflowId} in workspace ${workspaceId || 'none'}`
|
||||
)
|
||||
// Don't throw - allow workflow creation to continue in memory
|
||||
|
||||
return serverWorkflowId
|
||||
} catch (error) {
|
||||
logger.error(`Failed to create new workflow:`, error)
|
||||
set({
|
||||
error: `Failed to create workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
})
|
||||
throw error
|
||||
}
|
||||
|
||||
logger.info(`Created new workflow with ID ${finalId} in workspace ${workspaceId || 'none'}`)
|
||||
|
||||
return finalId
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -857,16 +830,15 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
state: any,
|
||||
metadata: Partial<WorkflowMetadata>
|
||||
) => {
|
||||
const { workflows } = get()
|
||||
const id = crypto.randomUUID()
|
||||
|
||||
// Generate workflow metadata with marketplace properties
|
||||
const newWorkflow: WorkflowMetadata = {
|
||||
id,
|
||||
name: metadata.name || 'Marketplace workflow',
|
||||
name: metadata.name || generateUniqueName(),
|
||||
lastModified: new Date(),
|
||||
description: metadata.description || 'Imported from marketplace',
|
||||
color: metadata.color || getNextWorkflowColor(workflows),
|
||||
color: metadata.color || getNextWorkflowColor(),
|
||||
marketplaceData: { id: marketplaceId, status: 'temp' as const },
|
||||
}
|
||||
|
||||
@@ -1022,7 +994,7 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
name: `${sourceWorkflow.name} (Copy)`,
|
||||
lastModified: new Date(),
|
||||
description: sourceWorkflow.description,
|
||||
color: getNextWorkflowColor(workflows),
|
||||
color: getNextWorkflowColor(),
|
||||
workspaceId, // Include the workspaceId in the new workflow
|
||||
folderId: sourceWorkflow.folderId, // Include the folderId from source workflow
|
||||
// Do not copy marketplace data
|
||||
|
||||
@@ -1,62 +1,247 @@
|
||||
import type { WorkflowMetadata } from './types'
|
||||
|
||||
// Available workflow colors
|
||||
export const WORKFLOW_COLORS = [
|
||||
'#3972F6',
|
||||
'#F639DD',
|
||||
'#F6B539',
|
||||
'#8139F6',
|
||||
'#39B54A',
|
||||
'#39B5AB',
|
||||
'#F66839',
|
||||
// Original colors
|
||||
'#3972F6', // Blue
|
||||
'#F639DD', // Pink/Magenta
|
||||
'#F6B539', // Orange/Yellow
|
||||
'#8139F6', // Purple
|
||||
'#39B54A', // Green
|
||||
'#39B5AB', // Teal
|
||||
'#F66839', // Red/Orange
|
||||
|
||||
// Additional vibrant blues
|
||||
'#2E5BFF', // Bright Blue
|
||||
'#4A90FF', // Sky Blue
|
||||
'#1E40AF', // Deep Blue
|
||||
'#0EA5E9', // Cyan Blue
|
||||
'#3B82F6', // Royal Blue
|
||||
'#6366F1', // Indigo
|
||||
'#1D4ED8', // Electric Blue
|
||||
|
||||
// Additional vibrant purples
|
||||
'#A855F7', // Bright Purple
|
||||
'#C084FC', // Light Purple
|
||||
'#7C3AED', // Deep Purple
|
||||
'#9333EA', // Violet
|
||||
'#8B5CF6', // Medium Purple
|
||||
'#6D28D9', // Dark Purple
|
||||
'#5B21B6', // Deep Violet
|
||||
|
||||
// Additional vibrant pinks/magentas
|
||||
'#EC4899', // Hot Pink
|
||||
'#F97316', // Pink Orange
|
||||
'#E11D48', // Rose
|
||||
'#BE185D', // Deep Pink
|
||||
'#DB2777', // Pink Red
|
||||
'#F472B6', // Light Pink
|
||||
'#F59E0B', // Amber Pink
|
||||
|
||||
// Additional vibrant greens
|
||||
'#10B981', // Emerald
|
||||
'#059669', // Green Teal
|
||||
'#16A34A', // Forest Green
|
||||
'#22C55E', // Lime Green
|
||||
'#84CC16', // Yellow Green
|
||||
'#65A30D', // Olive Green
|
||||
'#15803D', // Dark Green
|
||||
|
||||
// Additional vibrant teals/cyans
|
||||
'#06B6D4', // Cyan
|
||||
'#0891B2', // Dark Cyan
|
||||
'#0E7490', // Teal Blue
|
||||
'#14B8A6', // Turquoise
|
||||
'#0D9488', // Dark Teal
|
||||
'#047857', // Sea Green
|
||||
'#059669', // Mint Green
|
||||
|
||||
// Additional vibrant oranges/reds
|
||||
'#EA580C', // Bright Orange
|
||||
'#DC2626', // Red
|
||||
'#B91C1C', // Dark Red
|
||||
'#EF4444', // Light Red
|
||||
'#F97316', // Orange
|
||||
'#FB923C', // Light Orange
|
||||
'#FDBA74', // Peach
|
||||
|
||||
// Additional vibrant yellows/golds
|
||||
'#FBBF24', // Gold
|
||||
'#F59E0B', // Amber
|
||||
'#D97706', // Dark Amber
|
||||
'#92400E', // Bronze
|
||||
'#EAB308', // Yellow
|
||||
'#CA8A04', // Dark Yellow
|
||||
'#A16207', // Mustard
|
||||
|
||||
// Additional unique vibrant colors
|
||||
'#FF6B6B', // Coral
|
||||
'#4ECDC4', // Mint
|
||||
'#45B7D1', // Light Blue
|
||||
'#96CEB4', // Sage
|
||||
'#FFEAA7', // Cream
|
||||
'#DDA0DD', // Plum
|
||||
'#98D8C8', // Seafoam
|
||||
'#F7DC6F', // Banana
|
||||
'#BB8FCE', // Lavender
|
||||
'#85C1E9', // Baby Blue
|
||||
'#F8C471', // Peach
|
||||
'#82E0AA', // Light Green
|
||||
'#F1948A', // Salmon
|
||||
'#D7BDE2', // Lilac
|
||||
'#D7BDE2', // Lilac
|
||||
]
|
||||
|
||||
// Generates a unique name for a new workflow
|
||||
export function generateUniqueName(existingWorkflows: Record<string, WorkflowMetadata>): string {
|
||||
// Extract numbers from existing workflow names using regex
|
||||
const numbers = Object.values(existingWorkflows)
|
||||
.map((w) => {
|
||||
const match = w.name.match(/Workflow (\d+)/)
|
||||
return match ? Number.parseInt(match[1]) : 0
|
||||
})
|
||||
.filter((n) => n > 0)
|
||||
// Random adjectives and nouns for generating creative workflow names
|
||||
const ADJECTIVES = [
|
||||
'Blazing',
|
||||
'Crystal',
|
||||
'Golden',
|
||||
'Silver',
|
||||
'Mystic',
|
||||
'Cosmic',
|
||||
'Electric',
|
||||
'Frozen',
|
||||
'Burning',
|
||||
'Shining',
|
||||
'Dancing',
|
||||
'Flying',
|
||||
'Roaring',
|
||||
'Whispering',
|
||||
'Glowing',
|
||||
'Sparkling',
|
||||
'Thunder',
|
||||
'Lightning',
|
||||
'Storm',
|
||||
'Ocean',
|
||||
'Mountain',
|
||||
'Forest',
|
||||
'Desert',
|
||||
'Arctic',
|
||||
'Tropical',
|
||||
'Midnight',
|
||||
'Dawn',
|
||||
'Sunset',
|
||||
'Rainbow',
|
||||
'Diamond',
|
||||
'Ruby',
|
||||
'Emerald',
|
||||
'Sapphire',
|
||||
'Pearl',
|
||||
'Jade',
|
||||
'Amber',
|
||||
'Coral',
|
||||
'Ivory',
|
||||
'Obsidian',
|
||||
'Marble',
|
||||
'Velvet',
|
||||
'Silk',
|
||||
'Satin',
|
||||
'Linen',
|
||||
'Cotton',
|
||||
'Wool',
|
||||
'Cashmere',
|
||||
'Denim',
|
||||
'Neon',
|
||||
'Pastel',
|
||||
'Vibrant',
|
||||
'Muted',
|
||||
'Bold',
|
||||
'Subtle',
|
||||
'Bright',
|
||||
'Dark',
|
||||
]
|
||||
|
||||
if (numbers.length === 0) {
|
||||
return 'Workflow 1'
|
||||
}
|
||||
const NOUNS = [
|
||||
'Phoenix',
|
||||
'Dragon',
|
||||
'Eagle',
|
||||
'Wolf',
|
||||
'Lion',
|
||||
'Tiger',
|
||||
'Panther',
|
||||
'Falcon',
|
||||
'Hawk',
|
||||
'Raven',
|
||||
'Swan',
|
||||
'Dove',
|
||||
'Butterfly',
|
||||
'Firefly',
|
||||
'Dragonfly',
|
||||
'Hummingbird',
|
||||
'Galaxy',
|
||||
'Nebula',
|
||||
'Comet',
|
||||
'Meteor',
|
||||
'Star',
|
||||
'Moon',
|
||||
'Sun',
|
||||
'Planet',
|
||||
'Asteroid',
|
||||
'Constellation',
|
||||
'Aurora',
|
||||
'Eclipse',
|
||||
'Solstice',
|
||||
'Equinox',
|
||||
'Horizon',
|
||||
'Zenith',
|
||||
'Castle',
|
||||
'Tower',
|
||||
'Bridge',
|
||||
'Garden',
|
||||
'Fountain',
|
||||
'Palace',
|
||||
'Temple',
|
||||
'Cathedral',
|
||||
'Lighthouse',
|
||||
'Windmill',
|
||||
'Waterfall',
|
||||
'Canyon',
|
||||
'Valley',
|
||||
'Peak',
|
||||
'Ridge',
|
||||
'Cliff',
|
||||
'Ocean',
|
||||
'River',
|
||||
'Lake',
|
||||
'Stream',
|
||||
'Pond',
|
||||
'Bay',
|
||||
'Cove',
|
||||
'Harbor',
|
||||
'Island',
|
||||
'Peninsula',
|
||||
'Archipelago',
|
||||
'Atoll',
|
||||
'Reef',
|
||||
'Lagoon',
|
||||
'Fjord',
|
||||
'Delta',
|
||||
'Cake',
|
||||
'Cookie',
|
||||
'Muffin',
|
||||
'Cupcake',
|
||||
'Pie',
|
||||
'Tart',
|
||||
'Brownie',
|
||||
'Donut',
|
||||
'Pancake',
|
||||
'Waffle',
|
||||
'Croissant',
|
||||
'Bagel',
|
||||
'Pretzel',
|
||||
'Biscuit',
|
||||
'Scone',
|
||||
'Crumpet',
|
||||
]
|
||||
|
||||
// Find the maximum number and add 1
|
||||
const nextNumber = Math.max(...numbers) + 1
|
||||
return `Workflow ${nextNumber}`
|
||||
// Generates a random name for a new workflow
|
||||
export function generateUniqueName(): string {
|
||||
const adjective = ADJECTIVES[Math.floor(Math.random() * ADJECTIVES.length)]
|
||||
const noun = NOUNS[Math.floor(Math.random() * NOUNS.length)]
|
||||
return `${adjective.toLowerCase()}-${noun.toLowerCase()}`
|
||||
}
|
||||
|
||||
// Determines the next color to use for a new workflow based on the color of the newest workflow
|
||||
export function getNextWorkflowColor(existingWorkflows: Record<string, WorkflowMetadata>): string {
|
||||
const workflowArray = Object.values(existingWorkflows)
|
||||
|
||||
if (workflowArray.length === 0) {
|
||||
return WORKFLOW_COLORS[0]
|
||||
}
|
||||
|
||||
// Sort workflows by lastModified date (newest first)
|
||||
const sortedWorkflows = [...workflowArray].sort((a, b) => {
|
||||
const dateA =
|
||||
a.lastModified instanceof Date ? a.lastModified.getTime() : new Date(a.lastModified).getTime()
|
||||
const dateB =
|
||||
b.lastModified instanceof Date ? b.lastModified.getTime() : new Date(b.lastModified).getTime()
|
||||
return dateB - dateA
|
||||
})
|
||||
|
||||
// Get the newest workflow (first in sorted array)
|
||||
const newestWorkflow = sortedWorkflows[0]
|
||||
|
||||
// Find the index of the newest workflow's color, defaulting to -1 if undefined
|
||||
const currentColorIndex = newestWorkflow?.color
|
||||
? WORKFLOW_COLORS.indexOf(newestWorkflow.color)
|
||||
: -1
|
||||
|
||||
// Get next color index, wrapping around to 0 if we reach the end
|
||||
const nextColorIndex = (currentColorIndex + 1) % WORKFLOW_COLORS.length
|
||||
|
||||
return WORKFLOW_COLORS[nextColorIndex]
|
||||
// Generates a random color for a new workflow
|
||||
export function getNextWorkflowColor(): string {
|
||||
// Simply return a random color from the available colors
|
||||
return WORKFLOW_COLORS[Math.floor(Math.random() * WORKFLOW_COLORS.length)]
|
||||
}
|
||||
|
||||
@@ -50,6 +50,8 @@ describe('Function Execute Tool', () => {
|
||||
expect(body).toEqual({
|
||||
code: 'return 42',
|
||||
envVars: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
isCustomTool: false,
|
||||
timeout: 5000,
|
||||
workflowId: undefined,
|
||||
@@ -73,6 +75,8 @@ describe('Function Execute Tool', () => {
|
||||
code: 'const x = 40;\nconst y = 2;\nreturn x + y;',
|
||||
timeout: 10000,
|
||||
envVars: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
isCustomTool: false,
|
||||
workflowId: undefined,
|
||||
})
|
||||
@@ -87,6 +91,8 @@ describe('Function Execute Tool', () => {
|
||||
code: 'return 42',
|
||||
timeout: 10000,
|
||||
envVars: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
isCustomTool: false,
|
||||
workflowId: undefined,
|
||||
})
|
||||
@@ -158,6 +164,197 @@ describe('Function Execute Tool', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('Enhanced Error Handling', () => {
|
||||
test('should handle enhanced syntax error with line content', async () => {
|
||||
// Setup enhanced error response with debug information
|
||||
tester.setup(
|
||||
{
|
||||
success: false,
|
||||
error:
|
||||
'Syntax Error: Line 3: `description: "This has a missing closing quote` - Invalid or unexpected token (Check for missing quotes, brackets, or semicolons)',
|
||||
output: {
|
||||
result: null,
|
||||
stdout: '',
|
||||
executionTime: 5,
|
||||
},
|
||||
debug: {
|
||||
line: 3,
|
||||
column: undefined,
|
||||
errorType: 'SyntaxError',
|
||||
lineContent: 'description: "This has a missing closing quote',
|
||||
stack: 'user-function.js:5\n description: "This has a missing closing quote\n...',
|
||||
},
|
||||
},
|
||||
{ ok: false, status: 500 }
|
||||
)
|
||||
|
||||
// Execute the tool with syntax error
|
||||
const result = await tester.execute({
|
||||
code: 'const obj = {\n name: "test",\n description: "This has a missing closing quote\n};\nreturn obj;',
|
||||
})
|
||||
|
||||
// Check enhanced error handling
|
||||
expect(result.success).toBe(false)
|
||||
expect(result.error).toContain('Syntax Error')
|
||||
expect(result.error).toContain('Line 3')
|
||||
expect(result.error).toContain('description: "This has a missing closing quote')
|
||||
expect(result.error).toContain('Invalid or unexpected token')
|
||||
expect(result.error).toContain('(Check for missing quotes, brackets, or semicolons)')
|
||||
})
|
||||
|
||||
test('should handle enhanced runtime error with line and column', async () => {
|
||||
// Setup enhanced runtime error response
|
||||
tester.setup(
|
||||
{
|
||||
success: false,
|
||||
error:
|
||||
"Type Error: Line 2:16: `return obj.someMethod();` - Cannot read properties of null (reading 'someMethod')",
|
||||
output: {
|
||||
result: null,
|
||||
stdout: 'ERROR: {}\n',
|
||||
executionTime: 12,
|
||||
},
|
||||
debug: {
|
||||
line: 2,
|
||||
column: 16,
|
||||
errorType: 'TypeError',
|
||||
lineContent: 'return obj.someMethod();',
|
||||
stack: 'TypeError: Cannot read properties of null...',
|
||||
},
|
||||
},
|
||||
{ ok: false, status: 500 }
|
||||
)
|
||||
|
||||
// Execute the tool with runtime error
|
||||
const result = await tester.execute({
|
||||
code: 'const obj = null;\nreturn obj.someMethod();',
|
||||
})
|
||||
|
||||
// Check enhanced error handling
|
||||
expect(result.success).toBe(false)
|
||||
expect(result.error).toContain('Type Error')
|
||||
expect(result.error).toContain('Line 2:16')
|
||||
expect(result.error).toContain('return obj.someMethod();')
|
||||
expect(result.error).toContain('Cannot read properties of null')
|
||||
})
|
||||
|
||||
test('should handle enhanced error information in tool response', async () => {
|
||||
// Setup enhanced error response with full debug info
|
||||
tester.setup(
|
||||
{
|
||||
success: false,
|
||||
error: 'Reference Error: Line 1: `return undefinedVar` - undefinedVar is not defined',
|
||||
output: {
|
||||
result: null,
|
||||
stdout: '',
|
||||
executionTime: 3,
|
||||
},
|
||||
debug: {
|
||||
line: 1,
|
||||
column: 7,
|
||||
errorType: 'ReferenceError',
|
||||
lineContent: 'return undefinedVar',
|
||||
stack: 'ReferenceError: undefinedVar is not defined...',
|
||||
},
|
||||
},
|
||||
{ ok: false, status: 500 }
|
||||
)
|
||||
|
||||
// Execute the tool with reference error
|
||||
const result = await tester.execute({
|
||||
code: 'return undefinedVar',
|
||||
})
|
||||
|
||||
// Check that the tool properly captures enhanced error
|
||||
expect(result.success).toBe(false)
|
||||
expect(result.error).toBe(
|
||||
'Reference Error: Line 1: `return undefinedVar` - undefinedVar is not defined'
|
||||
)
|
||||
})
|
||||
|
||||
test('should preserve debug information in error object', async () => {
|
||||
// Setup enhanced error response
|
||||
tester.setup(
|
||||
{
|
||||
success: false,
|
||||
error: 'Syntax Error: Line 2 - Invalid syntax',
|
||||
debug: {
|
||||
line: 2,
|
||||
column: 5,
|
||||
errorType: 'SyntaxError',
|
||||
lineContent: 'invalid syntax here',
|
||||
stack: 'SyntaxError: Invalid syntax...',
|
||||
},
|
||||
},
|
||||
{ ok: false, status: 500 }
|
||||
)
|
||||
|
||||
// Execute the tool
|
||||
const result = await tester.execute({
|
||||
code: 'valid line\ninvalid syntax here',
|
||||
})
|
||||
|
||||
// Check that enhanced error information is available
|
||||
expect(result.success).toBe(false)
|
||||
expect(result.error).toBe('Syntax Error: Line 2 - Invalid syntax')
|
||||
|
||||
// Note: In this test framework, debug information would be available
|
||||
// in the response object, but the tool transforms it into the error message
|
||||
})
|
||||
|
||||
test('should handle enhanced error without line information', async () => {
|
||||
// Setup error response without line information
|
||||
tester.setup(
|
||||
{
|
||||
success: false,
|
||||
error: 'Generic error message',
|
||||
debug: {
|
||||
errorType: 'Error',
|
||||
stack: 'Error: Generic error message...',
|
||||
},
|
||||
},
|
||||
{ ok: false, status: 500 }
|
||||
)
|
||||
|
||||
// Execute the tool
|
||||
const result = await tester.execute({
|
||||
code: 'return "test";',
|
||||
})
|
||||
|
||||
// Check error handling without enhanced line info
|
||||
expect(result.success).toBe(false)
|
||||
expect(result.error).toBe('Generic error message')
|
||||
})
|
||||
|
||||
test('should provide line-specific error message when available', async () => {
|
||||
// Setup enhanced error response with line info
|
||||
tester.setup(
|
||||
{
|
||||
success: false,
|
||||
error:
|
||||
'Type Error: Line 5:20: `obj.nonExistentMethod()` - obj.nonExistentMethod is not a function',
|
||||
debug: {
|
||||
line: 5,
|
||||
column: 20,
|
||||
errorType: 'TypeError',
|
||||
lineContent: 'obj.nonExistentMethod()',
|
||||
},
|
||||
},
|
||||
{ ok: false, status: 500 }
|
||||
)
|
||||
|
||||
// Execute the tool
|
||||
const result = await tester.execute({
|
||||
code: 'const obj = {};\nobj.nonExistentMethod();',
|
||||
})
|
||||
|
||||
// Check that enhanced error message is provided
|
||||
expect(result.success).toBe(false)
|
||||
expect(result.error).toContain('Line 5:20')
|
||||
expect(result.error).toContain('obj.nonExistentMethod()')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
test('should handle empty code input', async () => {
|
||||
// Execute with empty code - this should still pass through to the API
|
||||
|
||||
@@ -28,6 +28,18 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
|
||||
description: 'Environment variables to make available during execution',
|
||||
default: {},
|
||||
},
|
||||
blockData: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
description: 'Block output data for variable resolution',
|
||||
default: {},
|
||||
},
|
||||
blockNameMapping: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
description: 'Mapping of block names to block IDs',
|
||||
default: {},
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -45,6 +57,8 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
|
||||
code: codeContent,
|
||||
timeout: params.timeout || DEFAULT_TIMEOUT,
|
||||
envVars: params.envVars || {},
|
||||
blockData: params.blockData || {},
|
||||
blockNameMapping: params.blockNameMapping || {},
|
||||
workflowId: params._context?.workflowId,
|
||||
isCustomTool: params.isCustomTool || false,
|
||||
}
|
||||
@@ -56,7 +70,21 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
|
||||
const result = await response.json()
|
||||
|
||||
if (!response.ok || !result.success) {
|
||||
throw new Error(result.error || 'Code execution failed')
|
||||
// Create enhanced error with debug information if available
|
||||
const error = new Error(result.error || 'Code execution failed')
|
||||
|
||||
// Add debug information to the error object if available
|
||||
if (result.debug) {
|
||||
Object.assign(error, {
|
||||
line: result.debug.line,
|
||||
column: result.debug.column,
|
||||
errorType: result.debug.errorType,
|
||||
stack: result.debug.stack,
|
||||
enhancedError: true,
|
||||
})
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -69,6 +97,10 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
|
||||
},
|
||||
|
||||
transformError: (error: any) => {
|
||||
// If we have enhanced error information, create a more detailed message
|
||||
if (error.enhancedError && error.line) {
|
||||
return `Line ${error.line}${error.column ? `:${error.column}` : ''} - ${error.message}`
|
||||
}
|
||||
return error.message || 'Code execution failed'
|
||||
},
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@ export interface CodeExecutionInput {
|
||||
timeout?: number
|
||||
memoryLimit?: number
|
||||
envVars?: Record<string, string>
|
||||
blockData?: Record<string, any>
|
||||
blockNameMapping?: Record<string, string>
|
||||
_context?: {
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
173
apps/sim/tools/knowledge/create_document.ts
Normal file
173
apps/sim/tools/knowledge/create_document.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
import type { ToolConfig } from '../types'
|
||||
import type { KnowledgeCreateDocumentResponse } from './types'
|
||||
|
||||
export const knowledgeCreateDocumentTool: ToolConfig<any, KnowledgeCreateDocumentResponse> = {
|
||||
id: 'knowledge_create_document',
|
||||
name: 'Knowledge Create Document',
|
||||
description: 'Create a new document in a knowledge base',
|
||||
version: '1.0.0',
|
||||
params: {
|
||||
knowledgeBaseId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
description: 'ID of the knowledge base containing the document',
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
description: 'Name of the document',
|
||||
},
|
||||
content: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
description: 'Content of the document',
|
||||
},
|
||||
},
|
||||
request: {
|
||||
url: (params) => `/api/knowledge/${params.knowledgeBaseId}/documents`,
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => {
|
||||
const textContent = params.content?.trim()
|
||||
const documentName = params.name?.trim()
|
||||
|
||||
if (!documentName || documentName.length === 0) {
|
||||
throw new Error('Document name is required')
|
||||
}
|
||||
if (documentName.length > 255) {
|
||||
throw new Error('Document name must be 255 characters or less')
|
||||
}
|
||||
if (/[<>:"/\\|?*]/.test(documentName)) {
|
||||
throw new Error('Document name contains invalid characters. Avoid: < > : " / \\ | ? *')
|
||||
}
|
||||
if (!textContent || textContent.length < 10) {
|
||||
throw new Error('Document content must be at least 10 characters long')
|
||||
}
|
||||
if (textContent.length > 1000000) {
|
||||
throw new Error('Document content exceeds maximum size of 1MB')
|
||||
}
|
||||
|
||||
const contentBytes = new TextEncoder().encode(textContent).length
|
||||
|
||||
const utf8Bytes = new TextEncoder().encode(textContent)
|
||||
const base64Content =
|
||||
typeof Buffer !== 'undefined'
|
||||
? Buffer.from(textContent, 'utf8').toString('base64')
|
||||
: btoa(String.fromCharCode(...utf8Bytes))
|
||||
|
||||
const dataUri = `data:text/plain;base64,${base64Content}`
|
||||
|
||||
const documents = [
|
||||
{
|
||||
filename: documentName.endsWith('.txt') ? documentName : `${documentName}.txt`,
|
||||
fileUrl: dataUri,
|
||||
fileSize: contentBytes,
|
||||
mimeType: 'text/plain',
|
||||
},
|
||||
]
|
||||
|
||||
return {
|
||||
documents: documents,
|
||||
processingOptions: {
|
||||
chunkSize: 1024,
|
||||
minCharactersPerChunk: 100,
|
||||
chunkOverlap: 200,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}
|
||||
},
|
||||
isInternalRoute: true,
|
||||
},
|
||||
transformResponse: async (response): Promise<KnowledgeCreateDocumentResponse> => {
|
||||
try {
|
||||
const result = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
const errorMessage = result.error?.message || result.message || 'Failed to create document'
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
const data = result.data || result
|
||||
const documentsCreated = data.documentsCreated || []
|
||||
|
||||
// Handle multiple documents response
|
||||
const uploadCount = documentsCreated.length
|
||||
const firstDocument = documentsCreated[0]
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
data: {
|
||||
id: firstDocument?.documentId || firstDocument?.id || '',
|
||||
name:
|
||||
uploadCount > 1 ? `${uploadCount} documents` : firstDocument?.filename || 'Unknown',
|
||||
type: 'document',
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
enabled: true,
|
||||
},
|
||||
message:
|
||||
uploadCount > 1
|
||||
? `Successfully created ${uploadCount} documents in knowledge base`
|
||||
: `Successfully created document in knowledge base`,
|
||||
documentId: firstDocument?.documentId || firstDocument?.id || '',
|
||||
},
|
||||
}
|
||||
} catch (error: any) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
data: {
|
||||
id: '',
|
||||
name: '',
|
||||
type: '',
|
||||
enabled: true,
|
||||
createdAt: '',
|
||||
updatedAt: '',
|
||||
},
|
||||
message: `Failed to create document: ${error.message || 'Unknown error'}`,
|
||||
documentId: '',
|
||||
},
|
||||
error: `Failed to create document: ${error.message || 'Unknown error'}`,
|
||||
}
|
||||
}
|
||||
},
|
||||
transformError: async (error): Promise<KnowledgeCreateDocumentResponse> => {
|
||||
let errorMessage = 'Failed to create document'
|
||||
|
||||
if (error.message) {
|
||||
if (error.message.includes('Document name')) {
|
||||
errorMessage = `Document name error: ${error.message}`
|
||||
} else if (error.message.includes('Document content')) {
|
||||
errorMessage = `Document content error: ${error.message}`
|
||||
} else if (error.message.includes('invalid characters')) {
|
||||
errorMessage = `${error.message}. Please use a valid filename.`
|
||||
} else if (error.message.includes('maximum size')) {
|
||||
errorMessage = `${error.message}. Consider breaking large content into smaller documents.`
|
||||
} else {
|
||||
errorMessage = `Failed to create document: ${error.message}`
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
data: {
|
||||
id: '',
|
||||
name: '',
|
||||
type: '',
|
||||
enabled: true,
|
||||
createdAt: '',
|
||||
updatedAt: '',
|
||||
},
|
||||
message: errorMessage,
|
||||
documentId: '',
|
||||
},
|
||||
error: errorMessage,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import { knowledgeCreateDocumentTool } from './create_document'
|
||||
import { knowledgeSearchTool } from './search'
|
||||
import { knowledgeUploadChunkTool } from './upload_chunk'
|
||||
|
||||
export { knowledgeSearchTool, knowledgeUploadChunkTool }
|
||||
export { knowledgeSearchTool, knowledgeUploadChunkTool, knowledgeCreateDocumentTool }
|
||||
|
||||
@@ -49,3 +49,22 @@ export interface KnowledgeUploadChunkParams {
|
||||
content: string
|
||||
enabled?: boolean
|
||||
}
|
||||
|
||||
export interface KnowledgeCreateDocumentResult {
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
enabled: boolean
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export interface KnowledgeCreateDocumentResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
data: KnowledgeCreateDocumentResult
|
||||
message: string
|
||||
documentId: string
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
@@ -53,7 +53,11 @@ import { contactsTool as hubspotContacts } from './hubspot/contacts'
|
||||
import { huggingfaceChatTool } from './huggingface'
|
||||
import { readUrlTool } from './jina'
|
||||
import { jiraBulkRetrieveTool, jiraRetrieveTool, jiraUpdateTool, jiraWriteTool } from './jira'
|
||||
import { knowledgeSearchTool, knowledgeUploadChunkTool } from './knowledge'
|
||||
import {
|
||||
knowledgeCreateDocumentTool,
|
||||
knowledgeSearchTool,
|
||||
knowledgeUploadChunkTool,
|
||||
} from './knowledge'
|
||||
import { linearCreateIssueTool, linearReadIssuesTool } from './linear'
|
||||
import { linkupSearchTool } from './linkup'
|
||||
import { mem0AddMemoriesTool, mem0GetMemoriesTool, mem0SearchMemoriesTool } from './mem0'
|
||||
@@ -191,6 +195,7 @@ export const tools: Record<string, ToolConfig> = {
|
||||
memory_delete: memoryDeleteTool,
|
||||
knowledge_search: knowledgeSearchTool,
|
||||
knowledge_upload_chunk: knowledgeUploadChunkTool,
|
||||
knowledge_create_document: knowledgeCreateDocumentTool,
|
||||
elevenlabs_tts: elevenLabsTtsTool,
|
||||
s3_get_object: s3GetObjectTool,
|
||||
telegram_message: telegramMessageTool,
|
||||
|
||||
@@ -1,6 +1,23 @@
|
||||
import type { ToolConfig } from '../types'
|
||||
import type { TelegramMessageParams, TelegramMessageResponse } from './types'
|
||||
|
||||
// Helper function to convert basic markdown to HTML
|
||||
function convertMarkdownToHTML(text: string): string {
|
||||
return (
|
||||
text
|
||||
// Bold: **text** or __text__ -> <b>text</b>
|
||||
.replace(/\*\*(.*?)\*\*/g, '<b>$1</b>')
|
||||
.replace(/__(.*?)__/g, '<b>$1</b>')
|
||||
// Italic: *text* or _text_ -> <i>text</i>
|
||||
.replace(/\*(.*?)\*/g, '<i>$1</i>')
|
||||
.replace(/_(.*?)_/g, '<i>$1</i>')
|
||||
// Code: `text` -> <code>text</code>
|
||||
.replace(/`(.*?)`/g, '<code>$1</code>')
|
||||
// Links: [text](url) -> <a href="url">text</a>
|
||||
.replace(/\[([^\]]+)\]\(([^)]+)\)/g, '<a href="$2">$1</a>')
|
||||
)
|
||||
}
|
||||
|
||||
export const telegramMessageTool: ToolConfig<TelegramMessageParams, TelegramMessageResponse> = {
|
||||
id: 'telegram_message',
|
||||
name: 'Telegram Message',
|
||||
@@ -36,7 +53,8 @@ export const telegramMessageTool: ToolConfig<TelegramMessageParams, TelegramMess
|
||||
}),
|
||||
body: (params: TelegramMessageParams) => ({
|
||||
chat_id: params.chatId,
|
||||
text: params.text,
|
||||
text: convertMarkdownToHTML(params.text),
|
||||
parse_mode: 'HTML',
|
||||
}),
|
||||
},
|
||||
|
||||
|
||||
Reference in New Issue
Block a user