mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 15:38:00 -05:00
Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
016cd6750c | ||
|
|
3b982533d1 |
@@ -764,6 +764,20 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
bucket: 'test-s3-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
S3_KB_CONFIG: {
|
||||
bucket: 'test-s3-kb-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
BLOB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@aws-sdk/client-s3', () => ({
|
||||
@@ -806,6 +820,11 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@azure/storage-blob', () => ({
|
||||
|
||||
@@ -39,8 +39,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Direct uploads are only available when cloud storage is enabled')
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -64,7 +65,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('fileName is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when contentType is missing', async () => {
|
||||
@@ -87,7 +89,59 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('contentType is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when fileSize is invalid', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'test.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: 0,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('fileSize must be a positive number')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when file size exceeds limit', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const largeFileSize = 150 * 1024 * 1024 // 150MB (exceeds 100MB limit)
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'large-file.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: largeFileSize,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toContain('exceeds maximum allowed size')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should generate S3 presigned URL successfully', async () => {
|
||||
@@ -122,6 +176,34 @@ describe('/api/files/presigned', () => {
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate knowledge-base S3 presigned URL with kb prefix', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost:3000/api/files/presigned?type=knowledge-base',
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'knowledge-doc.pdf',
|
||||
contentType: 'application/pdf',
|
||||
fileSize: 2048,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.fileInfo.key).toMatch(/^kb\/.*knowledge-doc\.pdf$/)
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate Azure Blob presigned URL successfully', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
@@ -182,8 +264,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Unknown storage provider')
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Unknown storage provider: unknown') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -225,8 +308,10 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('S3 service unavailable')
|
||||
expect(data.error).toBe(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
) // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle Azure Blob errors gracefully', async () => {
|
||||
@@ -269,8 +354,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('Azure service unavailable')
|
||||
expect(data.error).toBe('Failed to generate Azure Blob presigned URL') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle malformed JSON gracefully', async () => {
|
||||
@@ -289,9 +374,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('SyntaxError')
|
||||
expect(data.message).toContain('Unexpected token')
|
||||
expect(response.status).toBe(400) // Changed from 500 to 400 (ValidationError)
|
||||
expect(data.error).toBe('Invalid JSON in request body') // Updated error message
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { getBlobServiceClient } from '@/lib/uploads/blob/blob-client'
|
||||
import { getS3Client, sanitizeFilenameForMetadata } from '@/lib/uploads/s3/s3-client'
|
||||
import { BLOB_CONFIG, S3_CONFIG } from '@/lib/uploads/setup'
|
||||
import { BLOB_CONFIG, BLOB_KB_CONFIG, S3_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import { createErrorResponse, createOptionsResponse } from '../utils'
|
||||
|
||||
const logger = createLogger('PresignedUploadAPI')
|
||||
@@ -17,124 +17,148 @@ interface PresignedUrlRequest {
|
||||
fileSize: number
|
||||
}
|
||||
|
||||
type UploadType = 'general' | 'knowledge-base'
|
||||
|
||||
class PresignedUrlError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public statusCode = 400
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'PresignedUrlError'
|
||||
}
|
||||
}
|
||||
|
||||
class StorageConfigError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'STORAGE_CONFIG_ERROR', 500)
|
||||
}
|
||||
}
|
||||
|
||||
class ValidationError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'VALIDATION_ERROR', 400)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
// Parse the request body
|
||||
const data: PresignedUrlRequest = await request.json()
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName || !contentType) {
|
||||
return NextResponse.json({ error: 'Missing fileName or contentType' }, { status: 400 })
|
||||
let data: PresignedUrlRequest
|
||||
try {
|
||||
data = await request.json()
|
||||
} catch {
|
||||
throw new ValidationError('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
// Only proceed if cloud storage is enabled
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName?.trim()) {
|
||||
throw new ValidationError('fileName is required and cannot be empty')
|
||||
}
|
||||
if (!contentType?.trim()) {
|
||||
throw new ValidationError('contentType is required and cannot be empty')
|
||||
}
|
||||
if (!fileSize || fileSize <= 0) {
|
||||
throw new ValidationError('fileSize must be a positive number')
|
||||
}
|
||||
|
||||
const MAX_FILE_SIZE = 100 * 1024 * 1024
|
||||
if (fileSize > MAX_FILE_SIZE) {
|
||||
throw new ValidationError(
|
||||
`File size (${fileSize} bytes) exceeds maximum allowed size (${MAX_FILE_SIZE} bytes)`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadTypeParam = request.nextUrl.searchParams.get('type')
|
||||
const uploadType: UploadType =
|
||||
uploadTypeParam === 'knowledge-base' ? 'knowledge-base' : 'general'
|
||||
|
||||
if (!isUsingCloudStorage()) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Direct uploads are only available when cloud storage is enabled',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
throw new StorageConfigError(
|
||||
'Direct uploads are only available when cloud storage is enabled'
|
||||
)
|
||||
}
|
||||
|
||||
const storageProvider = getStorageProvider()
|
||||
logger.info(`Generating ${uploadType} presigned URL for ${fileName} using ${storageProvider}`)
|
||||
|
||||
switch (storageProvider) {
|
||||
case 's3':
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
case 'blob':
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
default:
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Unknown storage provider',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
throw new StorageConfigError(`Unknown storage provider: ${storageProvider}`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error generating presigned URL:', error)
|
||||
|
||||
if (error instanceof PresignedUrlError) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error.message,
|
||||
code: error.code,
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: error.statusCode }
|
||||
)
|
||||
}
|
||||
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate presigned URL')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleS3PresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
// Sanitize the original filename for S3 metadata to prevent header errors
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create the S3 command
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: S3_CONFIG.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
|
||||
// Generate the presigned URL
|
||||
const presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
})
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
async function handleS3PresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
const config = uploadType === 'knowledge-base' ? S3_KB_CONFIG : S3_CONFIG
|
||||
|
||||
// Generate SAS token for upload (write permission)
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: BLOB_CONFIG.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
if (!config.bucket || !config.region) {
|
||||
throw new StorageConfigError(`S3 configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(BLOB_CONFIG.accountName, BLOB_CONFIG.accountKey || '')
|
||||
).toString()
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
const metadata: Record<string, string> = {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
if (uploadType === 'knowledge-base') {
|
||||
metadata.purpose = 'knowledge-base'
|
||||
}
|
||||
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: config.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: metadata,
|
||||
})
|
||||
|
||||
let presignedUrl: string
|
||||
try {
|
||||
presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
} catch (s3Error) {
|
||||
logger.error('Failed to generate S3 presigned URL:', s3Error)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} S3 presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
@@ -146,22 +170,103 @@ async function handleBlobPresignedUrl(fileName: string, contentType: string, fil
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders: {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error generating Blob presigned URL:', error)
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate Blob presigned URL')
|
||||
)
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in S3 presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate S3 presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const config = uploadType === 'knowledge-base' ? BLOB_KB_CONFIG : BLOB_CONFIG
|
||||
|
||||
if (
|
||||
!config.accountName ||
|
||||
!config.containerName ||
|
||||
(!config.accountKey && !config.connectionString)
|
||||
) {
|
||||
throw new StorageConfigError(`Azure Blob configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(config.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: config.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
}
|
||||
|
||||
let sasToken: string
|
||||
try {
|
||||
sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(config.accountName, config.accountKey || '')
|
||||
).toString()
|
||||
} catch (blobError) {
|
||||
logger.error('Failed to generate Azure Blob SAS token:', blobError)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate Azure Blob SAS token - check Azure credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} Azure Blob presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
}
|
||||
|
||||
if (uploadType === 'knowledge-base') {
|
||||
uploadHeaders['x-ms-meta-purpose'] = 'knowledge-base'
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in Azure Blob presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate Azure Blob presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
// Handle preflight requests
|
||||
export async function OPTIONS() {
|
||||
return createOptionsResponse()
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { readFile } from 'fs/promises'
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { downloadFile, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { downloadFile, getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { BLOB_KB_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import '@/lib/uploads/setup.server'
|
||||
|
||||
import {
|
||||
@@ -16,6 +17,19 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('FilesServeAPI')
|
||||
|
||||
async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
readableStream.on('data', (data) => {
|
||||
chunks.push(data instanceof Buffer ? data : Buffer.from(data))
|
||||
})
|
||||
readableStream.on('end', () => {
|
||||
resolve(Buffer.concat(chunks))
|
||||
})
|
||||
readableStream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Main API route handler for serving files
|
||||
*/
|
||||
@@ -85,12 +99,65 @@ async function handleLocalFile(filename: string): Promise<NextResponse> {
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadKBFile(cloudKey: string): Promise<Buffer> {
|
||||
const storageProvider = getStorageProvider()
|
||||
|
||||
if (storageProvider === 'blob') {
|
||||
logger.info(`Downloading KB file from Azure Blob Storage: ${cloudKey}`)
|
||||
// Use KB-specific blob configuration
|
||||
const { getBlobServiceClient } = await import('@/lib/uploads/blob/blob-client')
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_KB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(cloudKey)
|
||||
|
||||
const downloadBlockBlobResponse = await blockBlobClient.download()
|
||||
if (!downloadBlockBlobResponse.readableStreamBody) {
|
||||
throw new Error('Failed to get readable stream from blob download')
|
||||
}
|
||||
|
||||
// Convert stream to buffer
|
||||
return await streamToBuffer(downloadBlockBlobResponse.readableStreamBody)
|
||||
}
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
logger.info(`Downloading KB file from S3: ${cloudKey}`)
|
||||
// Use KB-specific S3 configuration
|
||||
const { getS3Client } = await import('@/lib/uploads/s3/s3-client')
|
||||
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const s3Client = getS3Client()
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: S3_KB_CONFIG.bucket,
|
||||
Key: cloudKey,
|
||||
})
|
||||
|
||||
const response = await s3Client.send(command)
|
||||
if (!response.Body) {
|
||||
throw new Error('No body in S3 response')
|
||||
}
|
||||
|
||||
// Convert stream to buffer using the same method as the regular S3 client
|
||||
const stream = response.Body as any
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
stream.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks)))
|
||||
stream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported storage provider for KB files: ${storageProvider}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Proxy cloud file through our server
|
||||
*/
|
||||
async function handleCloudProxy(cloudKey: string): Promise<NextResponse> {
|
||||
try {
|
||||
const fileBuffer = await downloadFile(cloudKey)
|
||||
// Check if this is a KB file (starts with 'kb/')
|
||||
const isKBFile = cloudKey.startsWith('kb/')
|
||||
|
||||
const fileBuffer = isKBFile ? await downloadKBFile(cloudKey) : await downloadFile(cloudKey)
|
||||
|
||||
// Extract the original filename from the key (last part after last /)
|
||||
const originalFilename = cloudKey.split('/').pop() || 'download'
|
||||
|
||||
@@ -104,7 +104,7 @@ async function createWorkspace(userId: string, name: string) {
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
// Create "Workflow 1" for the workspace with start block
|
||||
// Create initial workflow for the workspace with start block
|
||||
const starterId = crypto.randomUUID()
|
||||
const initialState = {
|
||||
blocks: {
|
||||
@@ -170,7 +170,7 @@ async function createWorkspace(userId: string, name: string) {
|
||||
userId,
|
||||
workspaceId,
|
||||
folderId: null,
|
||||
name: 'Workflow 1',
|
||||
name: 'default-agent',
|
||||
description: 'Your first workflow - start building here!',
|
||||
state: initialState,
|
||||
color: '#3972F6',
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { AlertCircle, Loader2, X } from 'lucide-react'
|
||||
import { AlertCircle, ChevronDown, ChevronUp, Loader2, X } from 'lucide-react'
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
@@ -16,6 +16,7 @@ import { Button } from '@/components/ui/button'
|
||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import type { ChunkData, DocumentData } from '@/stores/knowledge/store'
|
||||
|
||||
@@ -28,6 +29,12 @@ interface EditChunkModalProps {
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
onChunkUpdate?: (updatedChunk: ChunkData) => void
|
||||
// New props for navigation
|
||||
allChunks?: ChunkData[]
|
||||
currentPage?: number
|
||||
totalPages?: number
|
||||
onNavigateToChunk?: (chunk: ChunkData) => void
|
||||
onNavigateToPage?: (page: number, selectChunk: 'first' | 'last') => Promise<void>
|
||||
}
|
||||
|
||||
export function EditChunkModal({
|
||||
@@ -37,11 +44,18 @@ export function EditChunkModal({
|
||||
isOpen,
|
||||
onClose,
|
||||
onChunkUpdate,
|
||||
allChunks = [],
|
||||
currentPage = 1,
|
||||
totalPages = 1,
|
||||
onNavigateToChunk,
|
||||
onNavigateToPage,
|
||||
}: EditChunkModalProps) {
|
||||
const [editedContent, setEditedContent] = useState(chunk?.content || '')
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [isNavigating, setIsNavigating] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
|
||||
// Check if there are unsaved changes
|
||||
const hasUnsavedChanges = editedContent !== (chunk?.content || '')
|
||||
@@ -53,6 +67,13 @@ export function EditChunkModal({
|
||||
}
|
||||
}, [chunk?.id, chunk?.content])
|
||||
|
||||
// Find current chunk index in the current page
|
||||
const currentChunkIndex = chunk ? allChunks.findIndex((c) => c.id === chunk.id) : -1
|
||||
|
||||
// Calculate navigation availability
|
||||
const canNavigatePrev = currentChunkIndex > 0 || currentPage > 1
|
||||
const canNavigateNext = currentChunkIndex < allChunks.length - 1 || currentPage < totalPages
|
||||
|
||||
const handleSaveContent = async () => {
|
||||
if (!chunk || !document) return
|
||||
|
||||
@@ -82,7 +103,6 @@ export function EditChunkModal({
|
||||
|
||||
if (result.success && onChunkUpdate) {
|
||||
onChunkUpdate(result.data)
|
||||
onClose()
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating chunk:', err)
|
||||
@@ -92,8 +112,51 @@ export function EditChunkModal({
|
||||
}
|
||||
}
|
||||
|
||||
const navigateToChunk = async (direction: 'prev' | 'next') => {
|
||||
if (!chunk || isNavigating) return
|
||||
|
||||
try {
|
||||
setIsNavigating(true)
|
||||
|
||||
if (direction === 'prev') {
|
||||
if (currentChunkIndex > 0) {
|
||||
// Navigate to previous chunk in current page
|
||||
const prevChunk = allChunks[currentChunkIndex - 1]
|
||||
onNavigateToChunk?.(prevChunk)
|
||||
} else if (currentPage > 1) {
|
||||
// Load previous page and navigate to last chunk
|
||||
await onNavigateToPage?.(currentPage - 1, 'last')
|
||||
}
|
||||
} else {
|
||||
if (currentChunkIndex < allChunks.length - 1) {
|
||||
// Navigate to next chunk in current page
|
||||
const nextChunk = allChunks[currentChunkIndex + 1]
|
||||
onNavigateToChunk?.(nextChunk)
|
||||
} else if (currentPage < totalPages) {
|
||||
// Load next page and navigate to first chunk
|
||||
await onNavigateToPage?.(currentPage + 1, 'first')
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Error navigating ${direction}:`, err)
|
||||
setError(`Failed to navigate to ${direction === 'prev' ? 'previous' : 'next'} chunk`)
|
||||
} finally {
|
||||
setIsNavigating(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleNavigate = (direction: 'prev' | 'next') => {
|
||||
if (hasUnsavedChanges) {
|
||||
setPendingNavigation(() => () => navigateToChunk(direction))
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
void navigateToChunk(direction)
|
||||
}
|
||||
}
|
||||
|
||||
const handleCloseAttempt = () => {
|
||||
if (hasUnsavedChanges && !isSaving) {
|
||||
setPendingNavigation(null)
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
onClose()
|
||||
@@ -102,7 +165,12 @@ export function EditChunkModal({
|
||||
|
||||
const handleConfirmDiscard = () => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
onClose()
|
||||
if (pendingNavigation) {
|
||||
void pendingNavigation()
|
||||
setPendingNavigation(null)
|
||||
} else {
|
||||
onClose()
|
||||
}
|
||||
}
|
||||
|
||||
const isFormValid = editedContent.trim().length > 0 && editedContent.trim().length <= 10000
|
||||
@@ -118,7 +186,59 @@ export function EditChunkModal({
|
||||
>
|
||||
<DialogHeader className='flex-shrink-0 border-b px-6 py-4'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<DialogTitle className='font-medium text-lg'>Edit Chunk</DialogTitle>
|
||||
<div className='flex items-center gap-3'>
|
||||
<DialogTitle className='font-medium text-lg'>Edit Chunk</DialogTitle>
|
||||
|
||||
{/* Navigation Controls */}
|
||||
<div className='flex items-center gap-1'>
|
||||
<Tooltip>
|
||||
<TooltipTrigger
|
||||
asChild
|
||||
onFocus={(e) => e.preventDefault()}
|
||||
onBlur={(e) => e.preventDefault()}
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => handleNavigate('prev')}
|
||||
disabled={!canNavigatePrev || isNavigating || isSaving}
|
||||
className='h-8 w-8 p-0'
|
||||
>
|
||||
<ChevronUp className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom'>
|
||||
Previous chunk{' '}
|
||||
{currentPage > 1 && currentChunkIndex === 0 ? '(previous page)' : ''}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip>
|
||||
<TooltipTrigger
|
||||
asChild
|
||||
onFocus={(e) => e.preventDefault()}
|
||||
onBlur={(e) => e.preventDefault()}
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => handleNavigate('next')}
|
||||
disabled={!canNavigateNext || isNavigating || isSaving}
|
||||
className='h-8 w-8 p-0'
|
||||
>
|
||||
<ChevronDown className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom'>
|
||||
Next chunk{' '}
|
||||
{currentPage < totalPages && currentChunkIndex === allChunks.length - 1
|
||||
? '(next page)'
|
||||
: ''}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='icon'
|
||||
@@ -142,7 +262,7 @@ export function EditChunkModal({
|
||||
{document?.filename || 'Unknown Document'}
|
||||
</p>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Editing chunk #{chunk.chunkIndex}
|
||||
Editing chunk #{chunk.chunkIndex} • Page {currentPage} of {totalPages}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -167,7 +287,7 @@ export function EditChunkModal({
|
||||
onChange={(e) => setEditedContent(e.target.value)}
|
||||
placeholder='Enter chunk content...'
|
||||
className='flex-1 resize-none'
|
||||
disabled={isSaving}
|
||||
disabled={isSaving || isNavigating}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
@@ -176,12 +296,16 @@ export function EditChunkModal({
|
||||
{/* Footer */}
|
||||
<div className='mt-auto border-t px-6 pt-4 pb-6'>
|
||||
<div className='flex justify-between'>
|
||||
<Button variant='outline' onClick={handleCloseAttempt} disabled={isSaving}>
|
||||
<Button
|
||||
variant='outline'
|
||||
onClick={handleCloseAttempt}
|
||||
disabled={isSaving || isNavigating}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleSaveContent}
|
||||
disabled={!isFormValid || isSaving || !hasUnsavedChanges}
|
||||
disabled={!isFormValid || isSaving || !hasUnsavedChanges || isNavigating}
|
||||
className='bg-[#701FFC] font-[480] text-primary-foreground shadow-[0_0_0_0_#701FFC] transition-all duration-200 hover:bg-[#6518E6] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
|
||||
>
|
||||
{isSaving ? (
|
||||
@@ -205,12 +329,19 @@ export function EditChunkModal({
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Unsaved Changes</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
You have unsaved changes to this chunk content. Are you sure you want to discard your
|
||||
changes and close the editor?
|
||||
You have unsaved changes to this chunk content.
|
||||
{pendingNavigation
|
||||
? ' Do you want to discard your changes and navigate to the next chunk?'
|
||||
: ' Are you sure you want to discard your changes and close the editor?'}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel onClick={() => setShowUnsavedChangesAlert(false)}>
|
||||
<AlertDialogCancel
|
||||
onClick={() => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
setPendingNavigation(null)
|
||||
}}
|
||||
>
|
||||
Keep Editing
|
||||
</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
|
||||
@@ -767,6 +767,30 @@ export function Document({
|
||||
updateChunk(updatedChunk.id, updatedChunk)
|
||||
setSelectedChunk(updatedChunk)
|
||||
}}
|
||||
allChunks={chunks}
|
||||
currentPage={currentPage}
|
||||
totalPages={totalPages}
|
||||
onNavigateToChunk={(chunk: ChunkData) => {
|
||||
setSelectedChunk(chunk)
|
||||
}}
|
||||
onNavigateToPage={async (page: number, selectChunk: 'first' | 'last') => {
|
||||
await goToPage(page)
|
||||
|
||||
const checkAndSelectChunk = () => {
|
||||
if (!isLoadingChunks && chunks.length > 0) {
|
||||
if (selectChunk === 'first') {
|
||||
setSelectedChunk(chunks[0])
|
||||
} else {
|
||||
setSelectedChunk(chunks[chunks.length - 1])
|
||||
}
|
||||
} else {
|
||||
// Retry after a short delay if chunks aren't loaded yet
|
||||
setTimeout(checkAndSelectChunk, 100)
|
||||
}
|
||||
}
|
||||
|
||||
setTimeout(checkAndSelectChunk, 0)
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Create Chunk Modal */}
|
||||
|
||||
@@ -36,16 +36,11 @@ import { useKnowledgeBase, useKnowledgeBaseDocuments } from '@/hooks/use-knowled
|
||||
import { type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
import { KnowledgeHeader } from '../components/knowledge-header/knowledge-header'
|
||||
import { useKnowledgeUpload } from '../hooks/use-knowledge-upload'
|
||||
import { KnowledgeBaseLoading } from './components/knowledge-base-loading/knowledge-base-loading'
|
||||
|
||||
const logger = createLogger('KnowledgeBase')
|
||||
|
||||
interface ProcessedDocumentResponse {
|
||||
documentId: string
|
||||
filename: string
|
||||
status: string
|
||||
}
|
||||
|
||||
interface KnowledgeBaseProps {
|
||||
id: string
|
||||
knowledgeBaseName?: string
|
||||
@@ -145,17 +140,32 @@ export function KnowledgeBase({
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const [isBulkOperating, setIsBulkOperating] = useState(false)
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
const [uploadError, setUploadError] = useState<{
|
||||
message: string
|
||||
timestamp: number
|
||||
} | null>(null)
|
||||
const [uploadProgress, setUploadProgress] = useState<{
|
||||
stage: 'idle' | 'uploading' | 'processing' | 'completing'
|
||||
filesCompleted: number
|
||||
totalFiles: number
|
||||
currentFile?: string
|
||||
}>({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
|
||||
const { isUploading, uploadProgress, uploadError, uploadFiles, clearError } = useKnowledgeUpload({
|
||||
onUploadComplete: async (uploadedFiles) => {
|
||||
const pendingDocuments: DocumentData[] = uploadedFiles.map((file, index) => ({
|
||||
id: `temp-${Date.now()}-${index}`,
|
||||
knowledgeBaseId: id,
|
||||
filename: file.filename,
|
||||
fileUrl: file.fileUrl,
|
||||
fileSize: file.fileSize,
|
||||
mimeType: file.mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}))
|
||||
|
||||
useKnowledgeStore.getState().addPendingDocuments(id, pendingDocuments)
|
||||
|
||||
await refreshDocuments()
|
||||
},
|
||||
})
|
||||
const router = useRouter()
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
@@ -240,11 +250,11 @@ export function KnowledgeBase({
|
||||
useEffect(() => {
|
||||
if (uploadError) {
|
||||
const timer = setTimeout(() => {
|
||||
setUploadError(null)
|
||||
clearError()
|
||||
}, 8000)
|
||||
return () => clearTimeout(timer)
|
||||
}
|
||||
}, [uploadError])
|
||||
}, [uploadError, clearError])
|
||||
|
||||
// Filter documents based on search query
|
||||
const filteredDocuments = documents.filter((doc) =>
|
||||
@@ -448,153 +458,18 @@ export function KnowledgeBase({
|
||||
const files = e.target.files
|
||||
if (!files || files.length === 0) return
|
||||
|
||||
interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
try {
|
||||
setIsUploading(true)
|
||||
setUploadError(null)
|
||||
setUploadProgress({ stage: 'uploading', filesCompleted: 0, totalFiles: files.length })
|
||||
|
||||
// Upload all files and start processing
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
const fileArray = Array.from(files)
|
||||
|
||||
for (const [index, file] of fileArray.entries()) {
|
||||
setUploadProgress((prev) => ({ ...prev, currentFile: file.name, filesCompleted: index }))
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json()
|
||||
throw new Error(`Failed to upload ${file.name}: ${errorData.error || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
|
||||
// Validate upload result structure
|
||||
if (!uploadResult.path) {
|
||||
throw new Error(`Invalid upload response for ${file.name}: missing file path`)
|
||||
}
|
||||
|
||||
uploadedFiles.push({
|
||||
filename: file.name,
|
||||
fileUrl: uploadResult.path.startsWith('http')
|
||||
? uploadResult.path
|
||||
: `${window.location.origin}${uploadResult.path}`,
|
||||
fileSize: file.size,
|
||||
mimeType: file.type,
|
||||
})
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({
|
||||
...prev,
|
||||
stage: 'processing',
|
||||
filesCompleted: fileArray.length,
|
||||
}))
|
||||
|
||||
// Start async document processing
|
||||
const processResponse = await fetch(`/api/knowledge/${id}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: knowledgeBase?.chunkingConfig?.maxSize || 1024,
|
||||
minCharactersPerChunk: knowledgeBase?.chunkingConfig?.minSize || 100,
|
||||
chunkOverlap: knowledgeBase?.chunkingConfig?.overlap || 200,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}),
|
||||
const chunkingConfig = knowledgeBase?.chunkingConfig
|
||||
await uploadFiles(Array.from(files), id, {
|
||||
chunkSize: chunkingConfig?.maxSize || 1024,
|
||||
minCharactersPerChunk: chunkingConfig?.minSize || 100,
|
||||
chunkOverlap: chunkingConfig?.overlap || 200,
|
||||
recipe: 'default',
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
const errorData = await processResponse.json()
|
||||
throw new Error(
|
||||
`Failed to start document processing: ${errorData.error || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Validate process result structure
|
||||
if (!processResult.success) {
|
||||
throw new Error(`Document processing failed: ${processResult.error || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
if (!processResult.data || !processResult.data.documentsCreated) {
|
||||
throw new Error('Invalid processing response: missing document data')
|
||||
}
|
||||
|
||||
// Create pending document objects and add them to the store immediately
|
||||
const pendingDocuments: DocumentData[] = processResult.data.documentsCreated.map(
|
||||
(doc: ProcessedDocumentResponse, index: number) => {
|
||||
if (!doc.documentId || !doc.filename) {
|
||||
logger.error(`Invalid document data received:`, doc)
|
||||
throw new Error(
|
||||
`Invalid document data for ${uploadedFiles[index]?.filename || 'unknown file'}`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
id: doc.documentId,
|
||||
knowledgeBaseId: id,
|
||||
filename: doc.filename,
|
||||
fileUrl: uploadedFiles[index].fileUrl,
|
||||
fileSize: uploadedFiles[index].fileSize,
|
||||
mimeType: uploadedFiles[index].mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
// Add pending documents to store for immediate UI update
|
||||
useKnowledgeStore.getState().addPendingDocuments(id, pendingDocuments)
|
||||
|
||||
logger.info(`Successfully started processing ${uploadedFiles.length} documents`)
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'completing' }))
|
||||
|
||||
// Trigger a refresh to ensure documents are properly loaded
|
||||
await refreshDocuments()
|
||||
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
} catch (err) {
|
||||
logger.error('Error uploading documents:', err)
|
||||
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : 'Unknown error occurred during upload'
|
||||
setUploadError({
|
||||
message: errorMessage,
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
|
||||
// Show user-friendly error message in console for debugging
|
||||
console.error('Document upload failed:', errorMessage)
|
||||
} catch (error) {
|
||||
logger.error('Error uploading files:', error)
|
||||
// Error handling is managed by the upload hook
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
// Reset the file input
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
@@ -995,7 +870,7 @@ export function KnowledgeBase({
|
||||
</tr>
|
||||
))
|
||||
) : (
|
||||
filteredDocuments.map((doc, index) => {
|
||||
filteredDocuments.map((doc) => {
|
||||
const isSelected = selectedDocuments.has(doc.id)
|
||||
const statusDisplay = getStatusDisplay(doc)
|
||||
// const processingTime = getProcessingTime(doc)
|
||||
@@ -1254,7 +1129,7 @@ export function KnowledgeBase({
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => setUploadError(null)}
|
||||
onClick={() => clearError()}
|
||||
className='flex-shrink-0 rounded-sm opacity-70 hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring'
|
||||
>
|
||||
<X className='h-4 w-4' />
|
||||
|
||||
@@ -13,8 +13,8 @@ import { Label } from '@/components/ui/label'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components/icons/document-icons'
|
||||
import type { DocumentData, KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import { useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import { useKnowledgeUpload } from '../../hooks/use-knowledge-upload'
|
||||
|
||||
const logger = createLogger('CreateModal')
|
||||
|
||||
@@ -29,12 +29,6 @@ const ACCEPTED_FILE_TYPES = [
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
]
|
||||
|
||||
interface ProcessedDocumentResponse {
|
||||
documentId: string
|
||||
filename: string
|
||||
status: string
|
||||
}
|
||||
|
||||
interface FileWithPreview extends File {
|
||||
preview: string
|
||||
}
|
||||
@@ -89,6 +83,12 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
const scrollContainerRef = useRef<HTMLDivElement>(null)
|
||||
const dropZoneRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
const { uploadFiles } = useKnowledgeUpload({
|
||||
onUploadComplete: (uploadedFiles) => {
|
||||
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
||||
},
|
||||
})
|
||||
|
||||
// Cleanup file preview URLs when component unmounts to prevent memory leaks
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
@@ -235,19 +235,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
return `${Number.parseFloat((bytes / k ** i).toFixed(1))} ${sizes[i]}`
|
||||
}
|
||||
|
||||
// Helper function to create uploadedFiles array from file uploads
|
||||
const createUploadedFile = (
|
||||
filename: string,
|
||||
fileUrl: string,
|
||||
fileSize: number,
|
||||
mimeType: string
|
||||
) => ({
|
||||
filename,
|
||||
fileUrl: fileUrl.startsWith('http') ? fileUrl : `${window.location.origin}${fileUrl}`,
|
||||
fileSize,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const onSubmit = async (data: FormValues) => {
|
||||
setIsSubmitting(true)
|
||||
setSubmitStatus(null)
|
||||
@@ -285,138 +272,14 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
|
||||
const newKnowledgeBase = result.data
|
||||
|
||||
// If files are uploaded, upload them and start processing
|
||||
if (files.length > 0) {
|
||||
// First, upload all files to get their URLs
|
||||
interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const presignedResponse = await fetch('/api/files/presigned', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
fileSize: file.size,
|
||||
}),
|
||||
})
|
||||
|
||||
const presignedData = await presignedResponse.json()
|
||||
|
||||
if (presignedResponse.ok && presignedData.directUploadSupported) {
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'Content-Type': file.type,
|
||||
}
|
||||
|
||||
// Add Azure-specific headers if provided
|
||||
if (presignedData.uploadHeaders) {
|
||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
||||
}
|
||||
|
||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
||||
method: 'PUT',
|
||||
headers: uploadHeaders, // Use the merged headers
|
||||
body: file,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
throw new Error(
|
||||
`Direct upload failed: ${uploadResponse.status} ${uploadResponse.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(file.name, presignedData.fileInfo.path, file.size, file.type)
|
||||
)
|
||||
} else {
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json()
|
||||
throw new Error(
|
||||
`Failed to upload ${file.name}: ${errorData.error || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(file.name, uploadResult.path, file.size, file.type)
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to upload ${file.name}: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Start async document processing
|
||||
const processResponse = await fetch(`/api/knowledge/${newKnowledgeBase.id}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: data.maxChunkSize,
|
||||
minCharactersPerChunk: data.minChunkSize,
|
||||
chunkOverlap: data.overlapSize,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}),
|
||||
const uploadedFiles = await uploadFiles(files, newKnowledgeBase.id, {
|
||||
chunkSize: data.maxChunkSize,
|
||||
minCharactersPerChunk: data.minChunkSize,
|
||||
chunkOverlap: data.overlapSize,
|
||||
recipe: 'default',
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
throw new Error('Failed to start document processing')
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Create pending document objects and add them to the store immediately
|
||||
if (processResult.success && processResult.data.documentsCreated) {
|
||||
const pendingDocuments: DocumentData[] = processResult.data.documentsCreated.map(
|
||||
(doc: ProcessedDocumentResponse, index: number) => ({
|
||||
id: doc.documentId,
|
||||
knowledgeBaseId: newKnowledgeBase.id,
|
||||
filename: doc.filename,
|
||||
fileUrl: uploadedFiles[index].fileUrl,
|
||||
fileSize: uploadedFiles[index].fileSize,
|
||||
mimeType: uploadedFiles[index].mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
})
|
||||
)
|
||||
|
||||
// Add pending documents to store for immediate UI update
|
||||
useKnowledgeStore.getState().addPendingDocuments(newKnowledgeBase.id, pendingDocuments)
|
||||
}
|
||||
|
||||
// Update the knowledge base object with the correct document count
|
||||
newKnowledgeBase.docCount = uploadedFiles.length
|
||||
|
||||
|
||||
@@ -0,0 +1,352 @@
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('KnowledgeUpload')
|
||||
|
||||
export interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
export interface UploadProgress {
|
||||
stage: 'idle' | 'uploading' | 'processing' | 'completing'
|
||||
filesCompleted: number
|
||||
totalFiles: number
|
||||
currentFile?: string
|
||||
}
|
||||
|
||||
export interface UploadError {
|
||||
message: string
|
||||
timestamp: number
|
||||
code?: string
|
||||
details?: any
|
||||
}
|
||||
|
||||
export interface ProcessingOptions {
|
||||
chunkSize?: number
|
||||
minCharactersPerChunk?: number
|
||||
chunkOverlap?: number
|
||||
recipe?: string
|
||||
}
|
||||
|
||||
export interface UseKnowledgeUploadOptions {
|
||||
onUploadComplete?: (uploadedFiles: UploadedFile[]) => void
|
||||
onError?: (error: UploadError) => void
|
||||
}
|
||||
|
||||
class KnowledgeUploadError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public details?: any
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'KnowledgeUploadError'
|
||||
}
|
||||
}
|
||||
|
||||
class PresignedUrlError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'PRESIGNED_URL_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
class DirectUploadError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'DIRECT_UPLOAD_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
class ProcessingError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'PROCESSING_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
const [uploadProgress, setUploadProgress] = useState<UploadProgress>({
|
||||
stage: 'idle',
|
||||
filesCompleted: 0,
|
||||
totalFiles: 0,
|
||||
})
|
||||
const [uploadError, setUploadError] = useState<UploadError | null>(null)
|
||||
|
||||
const createUploadedFile = (
|
||||
filename: string,
|
||||
fileUrl: string,
|
||||
fileSize: number,
|
||||
mimeType: string
|
||||
): UploadedFile => ({
|
||||
filename,
|
||||
fileUrl,
|
||||
fileSize,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const createErrorFromException = (error: unknown, defaultMessage: string): UploadError => {
|
||||
if (error instanceof KnowledgeUploadError) {
|
||||
return {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
details: error.details,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
return {
|
||||
message: error.message,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
message: defaultMessage,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
const uploadFiles = async (
|
||||
files: File[],
|
||||
knowledgeBaseId: string,
|
||||
processingOptions: ProcessingOptions = {}
|
||||
): Promise<UploadedFile[]> => {
|
||||
if (files.length === 0) {
|
||||
throw new KnowledgeUploadError('No files provided for upload', 'NO_FILES')
|
||||
}
|
||||
|
||||
if (!knowledgeBaseId?.trim()) {
|
||||
throw new KnowledgeUploadError('Knowledge base ID is required', 'INVALID_KB_ID')
|
||||
}
|
||||
|
||||
try {
|
||||
setIsUploading(true)
|
||||
setUploadError(null)
|
||||
setUploadProgress({ stage: 'uploading', filesCompleted: 0, totalFiles: files.length })
|
||||
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
|
||||
// Upload all files using presigned URLs
|
||||
for (const [index, file] of files.entries()) {
|
||||
setUploadProgress((prev) => ({
|
||||
...prev,
|
||||
currentFile: file.name,
|
||||
filesCompleted: index,
|
||||
}))
|
||||
|
||||
try {
|
||||
// Get presigned URL
|
||||
const presignedResponse = await fetch('/api/files/presigned?type=knowledge-base', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
fileSize: file.size,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!presignedResponse.ok) {
|
||||
let errorDetails: any = null
|
||||
try {
|
||||
errorDetails = await presignedResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
throw new PresignedUrlError(
|
||||
`Failed to get presigned URL for ${file.name}: ${presignedResponse.status} ${presignedResponse.statusText}`,
|
||||
errorDetails
|
||||
)
|
||||
}
|
||||
|
||||
const presignedData = await presignedResponse.json()
|
||||
|
||||
if (presignedData.directUploadSupported) {
|
||||
// Use presigned URL for direct upload
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'Content-Type': file.type,
|
||||
}
|
||||
|
||||
// Add Azure-specific headers if provided
|
||||
if (presignedData.uploadHeaders) {
|
||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
||||
}
|
||||
|
||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
||||
method: 'PUT',
|
||||
headers: uploadHeaders,
|
||||
body: file,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
throw new DirectUploadError(
|
||||
`Direct upload failed for ${file.name}: ${uploadResponse.status} ${uploadResponse.statusText}`,
|
||||
{ uploadResponse: uploadResponse.statusText }
|
||||
)
|
||||
}
|
||||
|
||||
// Convert relative path to full URL for schema validation
|
||||
const fullFileUrl = presignedData.fileInfo.path.startsWith('http')
|
||||
? presignedData.fileInfo.path
|
||||
: `${window.location.origin}${presignedData.fileInfo.path}`
|
||||
|
||||
uploadedFiles.push(createUploadedFile(file.name, fullFileUrl, file.size, file.type))
|
||||
} else {
|
||||
// Fallback to traditional upload through API route
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
let errorData: any = null
|
||||
try {
|
||||
errorData = await uploadResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
throw new DirectUploadError(
|
||||
`Failed to upload ${file.name}: ${errorData?.error || 'Unknown error'}`,
|
||||
errorData
|
||||
)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
|
||||
// Validate upload result structure
|
||||
if (!uploadResult.path) {
|
||||
throw new DirectUploadError(
|
||||
`Invalid upload response for ${file.name}: missing file path`,
|
||||
uploadResult
|
||||
)
|
||||
}
|
||||
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(
|
||||
file.name,
|
||||
uploadResult.path.startsWith('http')
|
||||
? uploadResult.path
|
||||
: `${window.location.origin}${uploadResult.path}`,
|
||||
file.size,
|
||||
file.type
|
||||
)
|
||||
)
|
||||
}
|
||||
} catch (fileError) {
|
||||
logger.error(`Error uploading file ${file.name}:`, fileError)
|
||||
throw fileError // Re-throw to be caught by outer try-catch
|
||||
}
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'processing' }))
|
||||
|
||||
// Start async document processing
|
||||
const processPayload = {
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: processingOptions.chunkSize || 1024,
|
||||
minCharactersPerChunk: processingOptions.minCharactersPerChunk || 100,
|
||||
chunkOverlap: processingOptions.chunkOverlap || 200,
|
||||
recipe: processingOptions.recipe || 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}
|
||||
|
||||
const processResponse = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(processPayload),
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
let errorData: any = null
|
||||
try {
|
||||
errorData = await processResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
logger.error('Document processing failed:', {
|
||||
status: processResponse.status,
|
||||
error: errorData,
|
||||
uploadedFiles: uploadedFiles.map((f) => ({
|
||||
filename: f.filename,
|
||||
fileUrl: f.fileUrl,
|
||||
fileSize: f.fileSize,
|
||||
mimeType: f.mimeType,
|
||||
})),
|
||||
})
|
||||
|
||||
throw new ProcessingError(
|
||||
`Failed to start document processing: ${errorData?.error || errorData?.message || 'Unknown error'}`,
|
||||
errorData
|
||||
)
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Validate process result structure
|
||||
if (!processResult.success) {
|
||||
throw new ProcessingError(
|
||||
`Document processing failed: ${processResult.error || 'Unknown error'}`,
|
||||
processResult
|
||||
)
|
||||
}
|
||||
|
||||
if (!processResult.data || !processResult.data.documentsCreated) {
|
||||
throw new ProcessingError(
|
||||
'Invalid processing response: missing document data',
|
||||
processResult
|
||||
)
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'completing' }))
|
||||
|
||||
logger.info(`Successfully started processing ${uploadedFiles.length} documents`)
|
||||
|
||||
// Call success callback
|
||||
options.onUploadComplete?.(uploadedFiles)
|
||||
|
||||
return uploadedFiles
|
||||
} catch (err) {
|
||||
logger.error('Error uploading documents:', err)
|
||||
|
||||
const error = createErrorFromException(err, 'Unknown error occurred during upload')
|
||||
setUploadError(error)
|
||||
options.onError?.(error)
|
||||
|
||||
// Show user-friendly error message in console for debugging
|
||||
console.error('Document upload failed:', error.message)
|
||||
|
||||
throw err
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
}
|
||||
}
|
||||
|
||||
const clearError = () => {
|
||||
setUploadError(null)
|
||||
}
|
||||
|
||||
return {
|
||||
isUploading,
|
||||
uploadProgress,
|
||||
uploadError,
|
||||
uploadFiles,
|
||||
clearError,
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { Check, ChevronDown, FileText } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import {
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
} from '@/components/ui/command'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
|
||||
interface DocumentData {
|
||||
@@ -51,19 +50,16 @@ export function DocumentSelector({
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: DocumentSelectorProps) {
|
||||
const { getValue } = useSubBlockStore()
|
||||
|
||||
const [documents, setDocuments] = useState<DocumentData[]>([])
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [open, setOpen] = useState(false)
|
||||
const [selectedDocument, setSelectedDocument] = useState<DocumentData | null>(null)
|
||||
const [initialFetchDone, setInitialFetchDone] = useState(false)
|
||||
|
||||
// Use the proper hook to get the current value and setter
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
|
||||
// Get the knowledge base ID from the same block's knowledgeBaseId subblock - memoize to prevent re-renders
|
||||
const knowledgeBaseId = useMemo(() => getValue(blockId, 'knowledgeBaseId'), [getValue, blockId])
|
||||
// Get the knowledge base ID from the same block's knowledgeBaseId subblock
|
||||
const [knowledgeBaseId] = useSubBlockValue(blockId, 'knowledgeBaseId')
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
@@ -73,7 +69,6 @@ export function DocumentSelector({
|
||||
if (!knowledgeBaseId) {
|
||||
setDocuments([])
|
||||
setError('No knowledge base selected')
|
||||
setInitialFetchDone(true)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -94,7 +89,6 @@ export function DocumentSelector({
|
||||
|
||||
const fetchedDocuments = result.data || []
|
||||
setDocuments(fetchedDocuments)
|
||||
setInitialFetchDone(true)
|
||||
} catch (err) {
|
||||
if ((err as Error).name === 'AbortError') return
|
||||
setError((err as Error).message)
|
||||
@@ -138,16 +132,15 @@ export function DocumentSelector({
|
||||
useEffect(() => {
|
||||
setDocuments([])
|
||||
setSelectedDocument(null)
|
||||
setInitialFetchDone(false)
|
||||
setError(null)
|
||||
}, [knowledgeBaseId])
|
||||
|
||||
// Fetch documents when knowledge base is available and we haven't fetched yet
|
||||
// Fetch documents when knowledge base is available
|
||||
useEffect(() => {
|
||||
if (knowledgeBaseId && !initialFetchDone && !isPreview) {
|
||||
if (knowledgeBaseId && !isPreview) {
|
||||
fetchDocuments()
|
||||
}
|
||||
}, [knowledgeBaseId, initialFetchDone, isPreview, fetchDocuments])
|
||||
}, [knowledgeBaseId, isPreview, fetchDocuments])
|
||||
|
||||
const formatDocumentName = (document: DocumentData) => {
|
||||
return document.filename
|
||||
|
||||
@@ -26,10 +26,10 @@ interface ScheduleConfigProps {
|
||||
|
||||
export function ScheduleConfig({
|
||||
blockId,
|
||||
subBlockId,
|
||||
subBlockId: _subBlockId,
|
||||
isConnecting,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
previewValue: _previewValue,
|
||||
disabled = false,
|
||||
}: ScheduleConfigProps) {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
@@ -56,13 +56,7 @@ export function ScheduleConfig({
|
||||
|
||||
// Get the startWorkflow value to determine if scheduling is enabled
|
||||
// and expose the setter so we can update it
|
||||
const [startWorkflow, setStartWorkflow] = useSubBlockValue(blockId, 'startWorkflow')
|
||||
const isScheduleEnabled = startWorkflow === 'schedule'
|
||||
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const [_startWorkflow, setStartWorkflow] = useSubBlockValue(blockId, 'startWorkflow')
|
||||
|
||||
// Function to check if schedule exists in the database
|
||||
const checkSchedule = async () => {
|
||||
@@ -110,10 +104,17 @@ export function ScheduleConfig({
|
||||
|
||||
// Check for schedule on mount and when relevant dependencies change
|
||||
useEffect(() => {
|
||||
// Always check for schedules regardless of the UI setting
|
||||
// This ensures we detect schedules even when the UI is set to manual
|
||||
checkSchedule()
|
||||
}, [workflowId, scheduleType, isModalOpen, refreshCounter])
|
||||
// Only check for schedules when workflowId changes or modal opens
|
||||
// Avoid checking on every scheduleType change to prevent excessive API calls
|
||||
if (workflowId && (isModalOpen || refreshCounter > 0)) {
|
||||
checkSchedule()
|
||||
}
|
||||
|
||||
// Cleanup function to reset loading state
|
||||
return () => {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [workflowId, isModalOpen, refreshCounter])
|
||||
|
||||
// Format the schedule information for display
|
||||
const getScheduleInfo = () => {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { BookOpen, Code, Info, RectangleHorizontal, RectangleVertical } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Button } from '@/components/ui/button'
|
||||
@@ -83,6 +84,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
const isActiveBlock = useExecutionStore((state) => state.activeBlockIds.has(id))
|
||||
const isActive = dataIsActive || isActiveBlock
|
||||
|
||||
// Get the current workflow ID from URL params instead of global state
|
||||
// This prevents race conditions when switching workflows rapidly
|
||||
const params = useParams()
|
||||
const currentWorkflowId = params.workflowId as string
|
||||
|
||||
const reactivateSchedule = async (scheduleId: string) => {
|
||||
try {
|
||||
const response = await fetch(`/api/schedules/${scheduleId}`, {
|
||||
@@ -94,7 +100,10 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
fetchScheduleInfo()
|
||||
// Use the current workflow ID from params instead of global state
|
||||
if (currentWorkflowId) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
}
|
||||
} else {
|
||||
console.error('Failed to reactivate schedule')
|
||||
}
|
||||
@@ -103,11 +112,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
}
|
||||
}
|
||||
|
||||
const fetchScheduleInfo = async () => {
|
||||
const fetchScheduleInfo = async (workflowId: string) => {
|
||||
if (!workflowId) return
|
||||
|
||||
try {
|
||||
setIsLoadingScheduleInfo(true)
|
||||
const workflowId = useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (!workflowId) return
|
||||
|
||||
const response = await fetch(`/api/schedules?workflowId=${workflowId}&mode=schedule`, {
|
||||
cache: 'no-store',
|
||||
@@ -176,12 +185,18 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (type === 'starter') {
|
||||
fetchScheduleInfo()
|
||||
if (type === 'starter' && currentWorkflowId) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
} else {
|
||||
setScheduleInfo(null)
|
||||
setIsLoadingScheduleInfo(false) // Reset loading state when not a starter block
|
||||
}
|
||||
}, [type])
|
||||
|
||||
// Cleanup function to reset loading state when component unmounts or workflow changes
|
||||
return () => {
|
||||
setIsLoadingScheduleInfo(false)
|
||||
}
|
||||
}, [type, currentWorkflowId])
|
||||
|
||||
// Get webhook information for the tooltip
|
||||
useEffect(() => {
|
||||
|
||||
@@ -15,9 +15,14 @@ import { useFolderStore } from '@/stores/folders/store'
|
||||
interface CreateMenuProps {
|
||||
onCreateWorkflow: (folderId?: string) => void
|
||||
isCollapsed?: boolean
|
||||
isCreatingWorkflow?: boolean
|
||||
}
|
||||
|
||||
export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
export function CreateMenu({
|
||||
onCreateWorkflow,
|
||||
isCollapsed,
|
||||
isCreatingWorkflow = false,
|
||||
}: CreateMenuProps) {
|
||||
const [showFolderDialog, setShowFolderDialog] = useState(false)
|
||||
const [folderName, setFolderName] = useState('')
|
||||
const [isCreating, setIsCreating] = useState(false)
|
||||
@@ -73,6 +78,7 @@ export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
onClick={handleCreateWorkflow}
|
||||
onMouseEnter={() => setIsHoverOpen(true)}
|
||||
onMouseLeave={() => setIsHoverOpen(false)}
|
||||
disabled={isCreatingWorkflow}
|
||||
>
|
||||
<Plus
|
||||
className={cn(
|
||||
@@ -101,11 +107,17 @@ export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
onCloseAutoFocus={(e) => e.preventDefault()}
|
||||
>
|
||||
<button
|
||||
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
|
||||
className={cn(
|
||||
'flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors',
|
||||
isCreatingWorkflow
|
||||
? 'cursor-not-allowed opacity-50'
|
||||
: 'hover:bg-accent hover:text-accent-foreground'
|
||||
)}
|
||||
onClick={handleCreateWorkflow}
|
||||
disabled={isCreatingWorkflow}
|
||||
>
|
||||
<File className='h-4 w-4' />
|
||||
New Workflow
|
||||
{isCreatingWorkflow ? 'Creating...' : 'New Workflow'}
|
||||
</button>
|
||||
<button
|
||||
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
|
||||
|
||||
@@ -41,6 +41,9 @@ export function Sidebar() {
|
||||
const { isPending: sessionLoading } = useSession()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const isLoading = workflowsLoading || sessionLoading
|
||||
|
||||
// Add state to prevent multiple simultaneous workflow creations
|
||||
const [isCreatingWorkflow, setIsCreatingWorkflow] = useState(false)
|
||||
const router = useRouter()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
@@ -108,7 +111,14 @@ export function Sidebar() {
|
||||
|
||||
// Create workflow handler
|
||||
const handleCreateWorkflow = async (folderId?: string) => {
|
||||
// Prevent multiple simultaneous workflow creations
|
||||
if (isCreatingWorkflow) {
|
||||
logger.info('Workflow creation already in progress, ignoring request')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
setIsCreatingWorkflow(true)
|
||||
const id = await createWorkflow({
|
||||
workspaceId: workspaceId || undefined,
|
||||
folderId: folderId || undefined,
|
||||
@@ -116,6 +126,8 @@ export function Sidebar() {
|
||||
router.push(`/workspace/${workspaceId}/w/${id}`)
|
||||
} catch (error) {
|
||||
logger.error('Error creating workflow:', error)
|
||||
} finally {
|
||||
setIsCreatingWorkflow(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,7 +185,11 @@ export function Sidebar() {
|
||||
{isLoading ? <Skeleton className='h-4 w-16' /> : 'Workflows'}
|
||||
</h2>
|
||||
{!isCollapsed && !isLoading && (
|
||||
<CreateMenu onCreateWorkflow={handleCreateWorkflow} isCollapsed={false} />
|
||||
<CreateMenu
|
||||
onCreateWorkflow={handleCreateWorkflow}
|
||||
isCollapsed={false}
|
||||
isCreatingWorkflow={isCreatingWorkflow}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<FolderTree
|
||||
|
||||
9
apps/sim/db/migrations/0048_flawless_ultron.sql
Normal file
9
apps/sim/db/migrations/0048_flawless_ultron.sql
Normal file
@@ -0,0 +1,9 @@
|
||||
ALTER TABLE "user_stats" ADD COLUMN "current_usage_limit" numeric DEFAULT '5' NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "usage_limit_set_by" text;--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "usage_limit_updated_at" timestamp DEFAULT now();--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "current_period_cost" numeric DEFAULT '0' NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "billing_period_start" timestamp DEFAULT now();--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "billing_period_end" timestamp;--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "last_period_cost" numeric DEFAULT '0';--> statement-breakpoint
|
||||
CREATE INDEX "subscription_reference_status_idx" ON "subscription" USING btree ("reference_id","status");--> statement-breakpoint
|
||||
ALTER TABLE "subscription" ADD CONSTRAINT "check_enterprise_metadata" CHECK (plan != 'enterprise' OR (metadata IS NOT NULL AND (metadata->>'perSeatAllowance' IS NOT NULL OR metadata->>'totalAllowance' IS NOT NULL)));
|
||||
82
apps/sim/db/migrations/0049_fancy_cardiac.sql
Normal file
82
apps/sim/db/migrations/0049_fancy_cardiac.sql
Normal file
@@ -0,0 +1,82 @@
|
||||
CREATE TABLE "workflow_execution_blocks" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"execution_id" text NOT NULL,
|
||||
"workflow_id" text NOT NULL,
|
||||
"block_id" text NOT NULL,
|
||||
"block_name" text,
|
||||
"block_type" text NOT NULL,
|
||||
"started_at" timestamp NOT NULL,
|
||||
"ended_at" timestamp,
|
||||
"duration_ms" integer,
|
||||
"status" text NOT NULL,
|
||||
"error_message" text,
|
||||
"error_stack_trace" text,
|
||||
"input_data" jsonb,
|
||||
"output_data" jsonb,
|
||||
"cost_input" numeric(10, 6),
|
||||
"cost_output" numeric(10, 6),
|
||||
"cost_total" numeric(10, 6),
|
||||
"tokens_prompt" integer,
|
||||
"tokens_completion" integer,
|
||||
"tokens_total" integer,
|
||||
"model_used" text,
|
||||
"metadata" jsonb,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "workflow_execution_logs" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"workflow_id" text NOT NULL,
|
||||
"execution_id" text NOT NULL,
|
||||
"state_snapshot_id" text NOT NULL,
|
||||
"level" text NOT NULL,
|
||||
"message" text NOT NULL,
|
||||
"trigger" text NOT NULL,
|
||||
"started_at" timestamp NOT NULL,
|
||||
"ended_at" timestamp,
|
||||
"total_duration_ms" integer,
|
||||
"block_count" integer DEFAULT 0 NOT NULL,
|
||||
"success_count" integer DEFAULT 0 NOT NULL,
|
||||
"error_count" integer DEFAULT 0 NOT NULL,
|
||||
"skipped_count" integer DEFAULT 0 NOT NULL,
|
||||
"total_cost" numeric(10, 6),
|
||||
"total_input_cost" numeric(10, 6),
|
||||
"total_output_cost" numeric(10, 6),
|
||||
"total_tokens" integer,
|
||||
"metadata" jsonb DEFAULT '{}' NOT NULL,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "workflow_execution_snapshots" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"workflow_id" text NOT NULL,
|
||||
"state_hash" text NOT NULL,
|
||||
"state_data" jsonb NOT NULL,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "workflow_execution_blocks" ADD CONSTRAINT "workflow_execution_blocks_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "workflow_execution_logs" ADD CONSTRAINT "workflow_execution_logs_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "workflow_execution_logs" ADD CONSTRAINT "workflow_execution_logs_state_snapshot_id_workflow_execution_snapshots_id_fk" FOREIGN KEY ("state_snapshot_id") REFERENCES "public"."workflow_execution_snapshots"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "workflow_execution_snapshots" ADD CONSTRAINT "workflow_execution_snapshots_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_execution_id_idx" ON "workflow_execution_blocks" USING btree ("execution_id");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_workflow_id_idx" ON "workflow_execution_blocks" USING btree ("workflow_id");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_block_id_idx" ON "workflow_execution_blocks" USING btree ("block_id");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_status_idx" ON "workflow_execution_blocks" USING btree ("status");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_duration_idx" ON "workflow_execution_blocks" USING btree ("duration_ms");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_cost_idx" ON "workflow_execution_blocks" USING btree ("cost_total");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_workflow_execution_idx" ON "workflow_execution_blocks" USING btree ("workflow_id","execution_id");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_execution_status_idx" ON "workflow_execution_blocks" USING btree ("execution_id","status");--> statement-breakpoint
|
||||
CREATE INDEX "execution_blocks_started_at_idx" ON "workflow_execution_blocks" USING btree ("started_at");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_workflow_id_idx" ON "workflow_execution_logs" USING btree ("workflow_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_execution_id_idx" ON "workflow_execution_logs" USING btree ("execution_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_trigger_idx" ON "workflow_execution_logs" USING btree ("trigger");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_level_idx" ON "workflow_execution_logs" USING btree ("level");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_started_at_idx" ON "workflow_execution_logs" USING btree ("started_at");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_cost_idx" ON "workflow_execution_logs" USING btree ("total_cost");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_execution_logs_duration_idx" ON "workflow_execution_logs" USING btree ("total_duration_ms");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "workflow_execution_logs_execution_id_unique" ON "workflow_execution_logs" USING btree ("execution_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_snapshots_workflow_id_idx" ON "workflow_execution_snapshots" USING btree ("workflow_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_snapshots_hash_idx" ON "workflow_execution_snapshots" USING btree ("state_hash");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "workflow_snapshots_workflow_hash_idx" ON "workflow_execution_snapshots" USING btree ("workflow_id","state_hash");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_snapshots_created_at_idx" ON "workflow_execution_snapshots" USING btree ("created_at");
|
||||
3751
apps/sim/db/migrations/meta/0048_snapshot.json
Normal file
3751
apps/sim/db/migrations/meta/0048_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
4461
apps/sim/db/migrations/meta/0049_snapshot.json
Normal file
4461
apps/sim/db/migrations/meta/0049_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -330,6 +330,20 @@
|
||||
"when": 1750794256278,
|
||||
"tag": "0047_new_triathlon",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 48,
|
||||
"version": "7",
|
||||
"when": 1751422991828,
|
||||
"tag": "0048_flawless_ultron",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 49,
|
||||
"version": "7",
|
||||
"when": 1751430703326,
|
||||
"tag": "0049_fancy_cardiac",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -133,58 +133,43 @@ export const workflow = pgTable('workflow', {
|
||||
marketplaceData: json('marketplace_data'),
|
||||
})
|
||||
|
||||
// New normalized workflow tables
|
||||
export const workflowBlocks = pgTable(
|
||||
'workflow_blocks',
|
||||
{
|
||||
// Primary identification
|
||||
id: text('id').primaryKey(), // Block UUID from the current JSON structure
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }), // Link to parent workflow
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
|
||||
// Block properties (from current BlockState interface)
|
||||
type: text('type').notNull(), // e.g., 'starter', 'agent', 'api', 'function'
|
||||
name: text('name').notNull(), // Display name of the block
|
||||
type: text('type').notNull(), // 'starter', 'agent', 'api', 'function'
|
||||
name: text('name').notNull(),
|
||||
|
||||
// Position coordinates (from position.x, position.y)
|
||||
positionX: decimal('position_x').notNull(), // X coordinate on canvas
|
||||
positionY: decimal('position_y').notNull(), // Y coordinate on canvas
|
||||
positionX: decimal('position_x').notNull(),
|
||||
positionY: decimal('position_y').notNull(),
|
||||
|
||||
// Block behavior flags (from current BlockState)
|
||||
enabled: boolean('enabled').notNull().default(true), // Whether block is active
|
||||
horizontalHandles: boolean('horizontal_handles').notNull().default(true), // UI layout preference
|
||||
isWide: boolean('is_wide').notNull().default(false), // Whether block uses wide layout
|
||||
advancedMode: boolean('advanced_mode').notNull().default(false), // Whether block is in advanced mode
|
||||
height: decimal('height').notNull().default('0'), // Custom height override
|
||||
enabled: boolean('enabled').notNull().default(true),
|
||||
horizontalHandles: boolean('horizontal_handles').notNull().default(true),
|
||||
isWide: boolean('is_wide').notNull().default(false),
|
||||
advancedMode: boolean('advanced_mode').notNull().default(false),
|
||||
height: decimal('height').notNull().default('0'),
|
||||
|
||||
// Block data (keeping JSON for flexibility as current system does)
|
||||
subBlocks: jsonb('sub_blocks').notNull().default('{}'), // All subblock configurations
|
||||
outputs: jsonb('outputs').notNull().default('{}'), // Output type definitions
|
||||
data: jsonb('data').default('{}'), // Additional block-specific data
|
||||
subBlocks: jsonb('sub_blocks').notNull().default('{}'),
|
||||
outputs: jsonb('outputs').notNull().default('{}'),
|
||||
data: jsonb('data').default('{}'),
|
||||
|
||||
// Hierarchy support (for loop/parallel child blocks)
|
||||
parentId: text('parent_id'), // Self-reference handled by foreign key constraint in migration
|
||||
extent: text('extent'), // 'parent' or null - for ReactFlow parent constraint
|
||||
parentId: text('parent_id'),
|
||||
extent: text('extent'), // 'parent' or null
|
||||
|
||||
// Timestamps
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
// Primary access pattern: get all blocks for a workflow
|
||||
workflowIdIdx: index('workflow_blocks_workflow_id_idx').on(table.workflowId),
|
||||
|
||||
// For finding child blocks of a parent (loop/parallel containers)
|
||||
parentIdIdx: index('workflow_blocks_parent_id_idx').on(table.parentId),
|
||||
|
||||
// Composite index for efficient parent-child queries
|
||||
workflowParentIdx: index('workflow_blocks_workflow_parent_idx').on(
|
||||
table.workflowId,
|
||||
table.parentId
|
||||
),
|
||||
|
||||
// For block type filtering/analytics
|
||||
workflowTypeIdx: index('workflow_blocks_workflow_type_idx').on(table.workflowId, table.type),
|
||||
})
|
||||
)
|
||||
@@ -192,36 +177,26 @@ export const workflowBlocks = pgTable(
|
||||
export const workflowEdges = pgTable(
|
||||
'workflow_edges',
|
||||
{
|
||||
// Primary identification
|
||||
id: text('id').primaryKey(), // Edge UUID from ReactFlow
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }), // Link to parent workflow
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
|
||||
// Connection definition (from ReactFlow Edge interface)
|
||||
sourceBlockId: text('source_block_id')
|
||||
.notNull()
|
||||
.references(() => workflowBlocks.id, { onDelete: 'cascade' }), // Source block ID
|
||||
.references(() => workflowBlocks.id, { onDelete: 'cascade' }),
|
||||
targetBlockId: text('target_block_id')
|
||||
.notNull()
|
||||
.references(() => workflowBlocks.id, { onDelete: 'cascade' }), // Target block ID
|
||||
sourceHandle: text('source_handle'), // Specific output handle (optional)
|
||||
targetHandle: text('target_handle'), // Specific input handle (optional)
|
||||
.references(() => workflowBlocks.id, { onDelete: 'cascade' }),
|
||||
sourceHandle: text('source_handle'),
|
||||
targetHandle: text('target_handle'),
|
||||
|
||||
// Timestamps
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
// Primary access pattern: get all edges for a workflow
|
||||
workflowIdIdx: index('workflow_edges_workflow_id_idx').on(table.workflowId),
|
||||
|
||||
// For finding outgoing connections from a block
|
||||
sourceBlockIdx: index('workflow_edges_source_block_idx').on(table.sourceBlockId),
|
||||
|
||||
// For finding incoming connections to a block
|
||||
targetBlockIdx: index('workflow_edges_target_block_idx').on(table.targetBlockId),
|
||||
|
||||
// For comprehensive workflow topology queries
|
||||
workflowSourceIdx: index('workflow_edges_workflow_source_idx').on(
|
||||
table.workflowId,
|
||||
table.sourceBlockId
|
||||
@@ -236,25 +211,19 @@ export const workflowEdges = pgTable(
|
||||
export const workflowSubflows = pgTable(
|
||||
'workflow_subflows',
|
||||
{
|
||||
// Primary identification
|
||||
id: text('id').primaryKey(), // Subflow UUID (currently loop/parallel ID)
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }), // Link to parent workflow
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
|
||||
// Subflow type and configuration
|
||||
type: text('type').notNull(), // 'loop' or 'parallel' (extensible for future types)
|
||||
config: jsonb('config').notNull().default('{}'), // Type-specific configuration
|
||||
type: text('type').notNull(), // 'loop' or 'parallel'
|
||||
config: jsonb('config').notNull().default('{}'),
|
||||
|
||||
// Timestamps
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
// Primary access pattern: get all subflows for a workflow
|
||||
workflowIdIdx: index('workflow_subflows_workflow_id_idx').on(table.workflowId),
|
||||
|
||||
// For filtering by subflow type
|
||||
workflowTypeIdx: index('workflow_subflows_workflow_type_idx').on(table.workflowId, table.type),
|
||||
})
|
||||
)
|
||||
@@ -273,14 +242,136 @@ export const workflowLogs = pgTable('workflow_logs', {
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
executionId: text('execution_id'),
|
||||
level: text('level').notNull(), // e.g. "info", "error", etc.
|
||||
level: text('level').notNull(), // "info", "error", etc.
|
||||
message: text('message').notNull(),
|
||||
duration: text('duration'), // Store as text to allow 'NA' for errors
|
||||
trigger: text('trigger'), // e.g. "api", "schedule", "manual"
|
||||
trigger: text('trigger'), // "api", "schedule", "manual"
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
metadata: json('metadata'), // Optional JSON field for storing additional context like tool calls
|
||||
metadata: json('metadata'),
|
||||
})
|
||||
|
||||
export const workflowExecutionSnapshots = pgTable(
|
||||
'workflow_execution_snapshots',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
stateHash: text('state_hash').notNull(),
|
||||
stateData: jsonb('state_data').notNull(),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
workflowIdIdx: index('workflow_snapshots_workflow_id_idx').on(table.workflowId),
|
||||
stateHashIdx: index('workflow_snapshots_hash_idx').on(table.stateHash),
|
||||
workflowHashUnique: uniqueIndex('workflow_snapshots_workflow_hash_idx').on(
|
||||
table.workflowId,
|
||||
table.stateHash
|
||||
),
|
||||
createdAtIdx: index('workflow_snapshots_created_at_idx').on(table.createdAt),
|
||||
})
|
||||
)
|
||||
|
||||
export const workflowExecutionLogs = pgTable(
|
||||
'workflow_execution_logs',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
executionId: text('execution_id').notNull(),
|
||||
stateSnapshotId: text('state_snapshot_id')
|
||||
.notNull()
|
||||
.references(() => workflowExecutionSnapshots.id),
|
||||
|
||||
level: text('level').notNull(), // 'info', 'error'
|
||||
message: text('message').notNull(),
|
||||
trigger: text('trigger').notNull(), // 'api', 'webhook', 'schedule', 'manual', 'chat'
|
||||
|
||||
startedAt: timestamp('started_at').notNull(),
|
||||
endedAt: timestamp('ended_at'),
|
||||
totalDurationMs: integer('total_duration_ms'),
|
||||
|
||||
blockCount: integer('block_count').notNull().default(0),
|
||||
successCount: integer('success_count').notNull().default(0),
|
||||
errorCount: integer('error_count').notNull().default(0),
|
||||
skippedCount: integer('skipped_count').notNull().default(0),
|
||||
|
||||
totalCost: decimal('total_cost', { precision: 10, scale: 6 }),
|
||||
totalInputCost: decimal('total_input_cost', { precision: 10, scale: 6 }),
|
||||
totalOutputCost: decimal('total_output_cost', { precision: 10, scale: 6 }),
|
||||
totalTokens: integer('total_tokens'),
|
||||
|
||||
metadata: jsonb('metadata').notNull().default('{}'),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
workflowIdIdx: index('workflow_execution_logs_workflow_id_idx').on(table.workflowId),
|
||||
executionIdIdx: index('workflow_execution_logs_execution_id_idx').on(table.executionId),
|
||||
triggerIdx: index('workflow_execution_logs_trigger_idx').on(table.trigger),
|
||||
levelIdx: index('workflow_execution_logs_level_idx').on(table.level),
|
||||
startedAtIdx: index('workflow_execution_logs_started_at_idx').on(table.startedAt),
|
||||
costIdx: index('workflow_execution_logs_cost_idx').on(table.totalCost),
|
||||
durationIdx: index('workflow_execution_logs_duration_idx').on(table.totalDurationMs),
|
||||
executionIdUnique: uniqueIndex('workflow_execution_logs_execution_id_unique').on(
|
||||
table.executionId
|
||||
),
|
||||
})
|
||||
)
|
||||
|
||||
export const workflowExecutionBlocks = pgTable(
|
||||
'workflow_execution_blocks',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
executionId: text('execution_id').notNull(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
blockId: text('block_id').notNull(),
|
||||
blockName: text('block_name'),
|
||||
blockType: text('block_type').notNull(),
|
||||
|
||||
startedAt: timestamp('started_at').notNull(),
|
||||
endedAt: timestamp('ended_at'),
|
||||
durationMs: integer('duration_ms'),
|
||||
|
||||
status: text('status').notNull(), // 'success', 'error', 'skipped'
|
||||
errorMessage: text('error_message'),
|
||||
errorStackTrace: text('error_stack_trace'),
|
||||
|
||||
inputData: jsonb('input_data'),
|
||||
outputData: jsonb('output_data'),
|
||||
|
||||
costInput: decimal('cost_input', { precision: 10, scale: 6 }),
|
||||
costOutput: decimal('cost_output', { precision: 10, scale: 6 }),
|
||||
costTotal: decimal('cost_total', { precision: 10, scale: 6 }),
|
||||
tokensPrompt: integer('tokens_prompt'),
|
||||
tokensCompletion: integer('tokens_completion'),
|
||||
tokensTotal: integer('tokens_total'),
|
||||
modelUsed: text('model_used'),
|
||||
|
||||
metadata: jsonb('metadata'),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
executionIdIdx: index('execution_blocks_execution_id_idx').on(table.executionId),
|
||||
workflowIdIdx: index('execution_blocks_workflow_id_idx').on(table.workflowId),
|
||||
blockIdIdx: index('execution_blocks_block_id_idx').on(table.blockId),
|
||||
statusIdx: index('execution_blocks_status_idx').on(table.status),
|
||||
durationIdx: index('execution_blocks_duration_idx').on(table.durationMs),
|
||||
costIdx: index('execution_blocks_cost_idx').on(table.costTotal),
|
||||
workflowExecutionIdx: index('execution_blocks_workflow_execution_idx').on(
|
||||
table.workflowId,
|
||||
table.executionId
|
||||
),
|
||||
executionStatusIdx: index('execution_blocks_execution_status_idx').on(
|
||||
table.executionId,
|
||||
table.status
|
||||
),
|
||||
startedAtIdx: index('execution_blocks_started_at_idx').on(table.startedAt),
|
||||
})
|
||||
)
|
||||
|
||||
export const environment = pgTable('environment', {
|
||||
id: text('id').primaryKey(), // Use the user id as the key
|
||||
userId: text('user_id')
|
||||
@@ -401,6 +492,14 @@ export const userStats = pgTable('user_stats', {
|
||||
totalChatExecutions: integer('total_chat_executions').notNull().default(0),
|
||||
totalTokensUsed: integer('total_tokens_used').notNull().default(0),
|
||||
totalCost: decimal('total_cost').notNull().default('0'),
|
||||
currentUsageLimit: decimal('current_usage_limit').notNull().default('5'), // Default $5 for free plan
|
||||
usageLimitSetBy: text('usage_limit_set_by'), // User ID who set the limit (for team admin tracking)
|
||||
usageLimitUpdatedAt: timestamp('usage_limit_updated_at').defaultNow(),
|
||||
// Billing period tracking
|
||||
currentPeriodCost: decimal('current_period_cost').notNull().default('0'), // Usage in current billing period
|
||||
billingPeriodStart: timestamp('billing_period_start').defaultNow(), // When current billing period started
|
||||
billingPeriodEnd: timestamp('billing_period_end'), // When current billing period ends
|
||||
lastPeriodCost: decimal('last_period_cost').default('0'), // Usage from previous billing period
|
||||
lastActive: timestamp('last_active').notNull().defaultNow(),
|
||||
})
|
||||
|
||||
@@ -416,21 +515,34 @@ export const customTools = pgTable('custom_tools', {
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
})
|
||||
|
||||
export const subscription = pgTable('subscription', {
|
||||
id: text('id').primaryKey(),
|
||||
plan: text('plan').notNull(),
|
||||
referenceId: text('reference_id').notNull(),
|
||||
stripeCustomerId: text('stripe_customer_id'),
|
||||
stripeSubscriptionId: text('stripe_subscription_id'),
|
||||
status: text('status'),
|
||||
periodStart: timestamp('period_start'),
|
||||
periodEnd: timestamp('period_end'),
|
||||
cancelAtPeriodEnd: boolean('cancel_at_period_end'),
|
||||
seats: integer('seats'),
|
||||
trialStart: timestamp('trial_start'),
|
||||
trialEnd: timestamp('trial_end'),
|
||||
metadata: json('metadata'),
|
||||
})
|
||||
export const subscription = pgTable(
|
||||
'subscription',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
plan: text('plan').notNull(),
|
||||
referenceId: text('reference_id').notNull(),
|
||||
stripeCustomerId: text('stripe_customer_id'),
|
||||
stripeSubscriptionId: text('stripe_subscription_id'),
|
||||
status: text('status'),
|
||||
periodStart: timestamp('period_start'),
|
||||
periodEnd: timestamp('period_end'),
|
||||
cancelAtPeriodEnd: boolean('cancel_at_period_end'),
|
||||
seats: integer('seats'),
|
||||
trialStart: timestamp('trial_start'),
|
||||
trialEnd: timestamp('trial_end'),
|
||||
metadata: json('metadata'),
|
||||
},
|
||||
(table) => ({
|
||||
referenceStatusIdx: index('subscription_reference_status_idx').on(
|
||||
table.referenceId,
|
||||
table.status
|
||||
),
|
||||
enterpriseMetadataCheck: check(
|
||||
'check_enterprise_metadata',
|
||||
sql`plan != 'enterprise' OR (metadata IS NOT NULL AND (metadata->>'perSeatAllowance' IS NOT NULL OR metadata->>'totalAllowance' IS NOT NULL))`
|
||||
),
|
||||
})
|
||||
)
|
||||
|
||||
export const chat = pgTable(
|
||||
'chat',
|
||||
@@ -485,7 +597,7 @@ export const member = pgTable('member', {
|
||||
organizationId: text('organization_id')
|
||||
.notNull()
|
||||
.references(() => organization.id, { onDelete: 'cascade' }),
|
||||
role: text('role').notNull(),
|
||||
role: text('role').notNull(), // 'admin' or 'member' - team-level permissions only
|
||||
createdAt: timestamp('created_at').defaultNow().notNull(),
|
||||
})
|
||||
|
||||
|
||||
@@ -668,4 +668,238 @@ describe('Executor', () => {
|
||||
expect(createContextSpy).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
/**
|
||||
* Dependency checking logic tests
|
||||
*/
|
||||
describe('dependency checking', () => {
|
||||
test('should handle multi-input blocks with inactive sources correctly', () => {
|
||||
// Create workflow with router -> multiple APIs -> single agent
|
||||
const routerWorkflow = {
|
||||
blocks: [
|
||||
{
|
||||
id: 'start',
|
||||
metadata: { id: 'starter', name: 'Start' },
|
||||
config: { params: {} },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'router',
|
||||
metadata: { id: 'router', name: 'Router' },
|
||||
config: { params: { prompt: 'test', model: 'gpt-4' } },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'api1',
|
||||
metadata: { id: 'api', name: 'API 1' },
|
||||
config: { params: { url: 'http://api1.com', method: 'GET' } },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'api2',
|
||||
metadata: { id: 'api', name: 'API 2' },
|
||||
config: { params: { url: 'http://api2.com', method: 'GET' } },
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'agent',
|
||||
metadata: { id: 'agent', name: 'Agent' },
|
||||
config: { params: { model: 'gpt-4', userPrompt: 'test' } },
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [
|
||||
{ source: 'start', target: 'router' },
|
||||
{ source: 'router', target: 'api1' },
|
||||
{ source: 'router', target: 'api2' },
|
||||
{ source: 'api1', target: 'agent' },
|
||||
{ source: 'api2', target: 'agent' },
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
}
|
||||
|
||||
const executor = new Executor(routerWorkflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
// Mock context simulating: router selected api1, api1 executed, api2 not in active path
|
||||
const mockContext = {
|
||||
blockStates: new Map(),
|
||||
decisions: {
|
||||
router: new Map([['router', 'api1']]),
|
||||
condition: new Map(),
|
||||
},
|
||||
activeExecutionPath: new Set(['start', 'router', 'api1', 'agent']),
|
||||
workflow: routerWorkflow,
|
||||
} as any
|
||||
|
||||
const executedBlocks = new Set(['start', 'router', 'api1'])
|
||||
|
||||
// Test agent's dependencies
|
||||
const agentConnections = [
|
||||
{ source: 'api1', target: 'agent', sourceHandle: 'source' },
|
||||
{ source: 'api2', target: 'agent', sourceHandle: 'source' },
|
||||
]
|
||||
|
||||
const dependenciesMet = checkDependencies(agentConnections, executedBlocks, mockContext)
|
||||
|
||||
// Both dependencies should be met:
|
||||
// - api1: in active path AND executed = met
|
||||
// - api2: NOT in active path = automatically met
|
||||
expect(dependenciesMet).toBe(true)
|
||||
})
|
||||
|
||||
test('should prioritize special connection types over active path check', () => {
|
||||
const workflow = createMinimalWorkflow()
|
||||
const executor = new Executor(workflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
const mockContext = {
|
||||
blockStates: new Map(),
|
||||
decisions: { router: new Map(), condition: new Map() },
|
||||
activeExecutionPath: new Set(['block1']), // block2 not in active path
|
||||
completedLoops: new Set(),
|
||||
workflow: workflow,
|
||||
} as any
|
||||
|
||||
const executedBlocks = new Set(['block1'])
|
||||
|
||||
// Test error connection (should be handled before active path check)
|
||||
const errorConnections = [{ source: 'block2', target: 'block3', sourceHandle: 'error' }]
|
||||
|
||||
// Mock block2 with error state
|
||||
mockContext.blockStates.set('block2', {
|
||||
output: { error: 'test error' },
|
||||
})
|
||||
|
||||
// Even though block2 is not in active path, error connection should be handled specially
|
||||
const errorDepsResult = checkDependencies(errorConnections, new Set(['block2']), mockContext)
|
||||
expect(errorDepsResult).toBe(true) // source executed + has error = dependency met
|
||||
|
||||
// Test loop connection
|
||||
const loopConnections = [
|
||||
{ source: 'block2', target: 'block3', sourceHandle: 'loop-end-source' },
|
||||
]
|
||||
|
||||
mockContext.completedLoops.add('block2')
|
||||
const loopDepsResult = checkDependencies(loopConnections, new Set(['block2']), mockContext)
|
||||
expect(loopDepsResult).toBe(true) // loop completed = dependency met
|
||||
})
|
||||
|
||||
test('should handle router decisions correctly in dependency checking', () => {
|
||||
const workflow = createMinimalWorkflow()
|
||||
const executor = new Executor(workflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
// Add router block to workflow
|
||||
workflow.blocks.push({
|
||||
id: 'router1',
|
||||
metadata: { id: 'router', name: 'Router' },
|
||||
config: { params: {} },
|
||||
enabled: true,
|
||||
})
|
||||
|
||||
const mockContext = {
|
||||
blockStates: new Map(),
|
||||
decisions: {
|
||||
router: new Map([['router1', 'target1']]), // router selected target1
|
||||
condition: new Map(),
|
||||
},
|
||||
activeExecutionPath: new Set(['router1', 'target1', 'target2']),
|
||||
workflow: workflow,
|
||||
} as any
|
||||
|
||||
const executedBlocks = new Set(['router1'])
|
||||
|
||||
// Test selected target
|
||||
const selectedConnections = [{ source: 'router1', target: 'target1', sourceHandle: 'source' }]
|
||||
const selectedResult = checkDependencies(selectedConnections, executedBlocks, mockContext)
|
||||
expect(selectedResult).toBe(true) // router executed + target selected = dependency met
|
||||
|
||||
// Test non-selected target
|
||||
const nonSelectedConnections = [
|
||||
{ source: 'router1', target: 'target2', sourceHandle: 'source' },
|
||||
]
|
||||
const nonSelectedResult = checkDependencies(
|
||||
nonSelectedConnections,
|
||||
executedBlocks,
|
||||
mockContext
|
||||
)
|
||||
expect(nonSelectedResult).toBe(true) // router executed + target NOT selected = dependency auto-met
|
||||
})
|
||||
|
||||
test('should handle condition decisions correctly in dependency checking', () => {
|
||||
const conditionWorkflow = createWorkflowWithCondition()
|
||||
const executor = new Executor(conditionWorkflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
const mockContext = {
|
||||
blockStates: new Map(),
|
||||
decisions: {
|
||||
router: new Map(),
|
||||
condition: new Map([['condition1', 'true']]), // condition selected true path
|
||||
},
|
||||
activeExecutionPath: new Set(['condition1', 'trueTarget']),
|
||||
workflow: conditionWorkflow,
|
||||
} as any
|
||||
|
||||
const executedBlocks = new Set(['condition1'])
|
||||
|
||||
// Test selected condition path
|
||||
const trueConnections = [
|
||||
{ source: 'condition1', target: 'trueTarget', sourceHandle: 'condition-true' },
|
||||
]
|
||||
const trueResult = checkDependencies(trueConnections, executedBlocks, mockContext)
|
||||
expect(trueResult).toBe(true)
|
||||
|
||||
// Test non-selected condition path
|
||||
const falseConnections = [
|
||||
{ source: 'condition1', target: 'falseTarget', sourceHandle: 'condition-false' },
|
||||
]
|
||||
const falseResult = checkDependencies(falseConnections, executedBlocks, mockContext)
|
||||
expect(falseResult).toBe(true) // condition executed + path NOT selected = dependency auto-met
|
||||
})
|
||||
|
||||
test('should handle regular sequential dependencies correctly', () => {
|
||||
const workflow = createMinimalWorkflow()
|
||||
const executor = new Executor(workflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
const mockContext = {
|
||||
blockStates: new Map(),
|
||||
decisions: { router: new Map(), condition: new Map() },
|
||||
activeExecutionPath: new Set(['block1', 'block2']),
|
||||
workflow: workflow,
|
||||
} as any
|
||||
|
||||
const executedBlocks = new Set(['block1'])
|
||||
|
||||
// Test normal sequential dependency
|
||||
const normalConnections = [{ source: 'block1', target: 'block2', sourceHandle: 'source' }]
|
||||
|
||||
// Without error
|
||||
const normalResult = checkDependencies(normalConnections, executedBlocks, mockContext)
|
||||
expect(normalResult).toBe(true) // source executed + no error = dependency met
|
||||
|
||||
// With error should fail regular connection
|
||||
mockContext.blockStates.set('block1', {
|
||||
output: { error: 'test error' },
|
||||
})
|
||||
const errorResult = checkDependencies(normalConnections, executedBlocks, mockContext)
|
||||
expect(errorResult).toBe(false) // source executed + has error = regular dependency not met
|
||||
})
|
||||
|
||||
test('should handle empty dependency list', () => {
|
||||
const workflow = createMinimalWorkflow()
|
||||
const executor = new Executor(workflow)
|
||||
const checkDependencies = (executor as any).checkDependencies.bind(executor)
|
||||
|
||||
const mockContext = createMockContext()
|
||||
const executedBlocks = new Set<string>()
|
||||
|
||||
// Empty connections should return true
|
||||
const result = checkDependencies([], executedBlocks, mockContext)
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -877,6 +877,9 @@ export class Executor {
|
||||
insideParallel?: string,
|
||||
iterationIndex?: number
|
||||
): boolean {
|
||||
if (incomingConnections.length === 0) {
|
||||
return true
|
||||
}
|
||||
// Check if this is a loop block
|
||||
const isLoopBlock = incomingConnections.some((conn) => {
|
||||
const sourceBlock = this.actualWorkflow.blocks.find((b) => b.id === conn.source)
|
||||
@@ -994,6 +997,12 @@ export class Executor {
|
||||
return sourceExecuted && conn.target === selectedTarget
|
||||
}
|
||||
|
||||
// If source is not in active path, consider this dependency met
|
||||
// This allows blocks with multiple inputs to execute even if some inputs are from inactive paths
|
||||
if (!context.activeExecutionPath.has(conn.source)) {
|
||||
return true
|
||||
}
|
||||
|
||||
// For error connections, check if the source had an error
|
||||
if (conn.sourceHandle === 'error') {
|
||||
return sourceExecuted && hasSourceError
|
||||
@@ -1004,12 +1013,6 @@ export class Executor {
|
||||
return sourceExecuted && !hasSourceError
|
||||
}
|
||||
|
||||
// If source is not in active path, consider this dependency met
|
||||
// This allows blocks with multiple inputs to execute even if some inputs are from inactive paths
|
||||
if (!context.activeExecutionPath.has(conn.source)) {
|
||||
return true
|
||||
}
|
||||
|
||||
// For regular blocks, dependency is met if source is executed
|
||||
return sourceExecuted
|
||||
})
|
||||
|
||||
@@ -408,4 +408,206 @@ describe('PathTracker', () => {
|
||||
}).not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Router downstream path activation', () => {
|
||||
beforeEach(() => {
|
||||
// Create router workflow with downstream connections
|
||||
mockWorkflow = {
|
||||
version: '1.0',
|
||||
blocks: [
|
||||
{
|
||||
id: 'router1',
|
||||
metadata: { id: 'router', name: 'Router' },
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'router', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'api1',
|
||||
metadata: { id: 'api', name: 'API 1' },
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'api', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'api2',
|
||||
metadata: { id: 'api', name: 'API 2' },
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'api', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'agent1',
|
||||
metadata: { id: 'agent', name: 'Agent' },
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'agent', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [
|
||||
{ source: 'router1', target: 'api1' },
|
||||
{ source: 'router1', target: 'api2' },
|
||||
{ source: 'api1', target: 'agent1' },
|
||||
{ source: 'api2', target: 'agent1' },
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
}
|
||||
|
||||
pathTracker = new PathTracker(mockWorkflow)
|
||||
mockContext = {
|
||||
workflowId: 'test-router-workflow',
|
||||
blockStates: new Map(),
|
||||
blockLogs: [],
|
||||
metadata: { duration: 0 },
|
||||
environmentVariables: {},
|
||||
decisions: { router: new Map(), condition: new Map() },
|
||||
loopIterations: new Map(),
|
||||
loopItems: new Map(),
|
||||
completedLoops: new Set(),
|
||||
executedBlocks: new Set(),
|
||||
activeExecutionPath: new Set(),
|
||||
workflow: mockWorkflow,
|
||||
}
|
||||
})
|
||||
|
||||
it('should activate downstream paths when router selects a target', () => {
|
||||
// Mock router output selecting api1
|
||||
mockContext.blockStates.set('router1', {
|
||||
output: {
|
||||
response: {
|
||||
selectedPath: {
|
||||
blockId: 'api1',
|
||||
blockType: 'api',
|
||||
blockTitle: 'API 1',
|
||||
},
|
||||
},
|
||||
},
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
|
||||
// Update paths for router
|
||||
pathTracker.updateExecutionPaths(['router1'], mockContext)
|
||||
|
||||
// Both api1 and agent1 should be activated (downstream from api1)
|
||||
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
|
||||
expect(mockContext.activeExecutionPath.has('agent1')).toBe(true)
|
||||
|
||||
// api2 should NOT be activated (not selected by router)
|
||||
expect(mockContext.activeExecutionPath.has('api2')).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle multiple levels of downstream connections', () => {
|
||||
// Add another level to test deep activation
|
||||
mockWorkflow.blocks.push({
|
||||
id: 'finalStep',
|
||||
metadata: { id: 'api', name: 'Final Step' },
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: 'api', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
})
|
||||
mockWorkflow.connections.push({ source: 'agent1', target: 'finalStep' })
|
||||
|
||||
pathTracker = new PathTracker(mockWorkflow)
|
||||
|
||||
// Mock router output selecting api1
|
||||
mockContext.blockStates.set('router1', {
|
||||
output: {
|
||||
response: {
|
||||
selectedPath: {
|
||||
blockId: 'api1',
|
||||
blockType: 'api',
|
||||
blockTitle: 'API 1',
|
||||
},
|
||||
},
|
||||
},
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
|
||||
pathTracker.updateExecutionPaths(['router1'], mockContext)
|
||||
|
||||
// All downstream blocks should be activated
|
||||
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
|
||||
expect(mockContext.activeExecutionPath.has('agent1')).toBe(true)
|
||||
expect(mockContext.activeExecutionPath.has('finalStep')).toBe(true)
|
||||
|
||||
// Non-selected path should not be activated
|
||||
expect(mockContext.activeExecutionPath.has('api2')).toBe(false)
|
||||
})
|
||||
|
||||
it('should not create infinite loops in cyclic workflows', () => {
|
||||
// Add a cycle to test loop prevention
|
||||
mockWorkflow.connections.push({ source: 'agent1', target: 'api1' })
|
||||
pathTracker = new PathTracker(mockWorkflow)
|
||||
|
||||
mockContext.blockStates.set('router1', {
|
||||
output: {
|
||||
response: {
|
||||
selectedPath: {
|
||||
blockId: 'api1',
|
||||
blockType: 'api',
|
||||
blockTitle: 'API 1',
|
||||
},
|
||||
},
|
||||
},
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
|
||||
// This should not throw or cause infinite recursion
|
||||
expect(() => {
|
||||
pathTracker.updateExecutionPaths(['router1'], mockContext)
|
||||
}).not.toThrow()
|
||||
|
||||
// Both api1 and agent1 should still be activated
|
||||
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
|
||||
expect(mockContext.activeExecutionPath.has('agent1')).toBe(true)
|
||||
})
|
||||
|
||||
it('should handle router with no downstream connections', () => {
|
||||
// Create isolated router
|
||||
const isolatedWorkflow = {
|
||||
...mockWorkflow,
|
||||
connections: [
|
||||
{ source: 'router1', target: 'api1' },
|
||||
{ source: 'router1', target: 'api2' },
|
||||
// Remove downstream connections from api1/api2
|
||||
],
|
||||
}
|
||||
pathTracker = new PathTracker(isolatedWorkflow)
|
||||
|
||||
mockContext.blockStates.set('router1', {
|
||||
output: {
|
||||
response: {
|
||||
selectedPath: {
|
||||
blockId: 'api1',
|
||||
blockType: 'api',
|
||||
blockTitle: 'API 1',
|
||||
},
|
||||
},
|
||||
},
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
|
||||
pathTracker.updateExecutionPaths(['router1'], mockContext)
|
||||
|
||||
// Only the selected target should be activated
|
||||
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
|
||||
expect(mockContext.activeExecutionPath.has('api2')).toBe(false)
|
||||
expect(mockContext.activeExecutionPath.has('agent1')).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -165,10 +165,28 @@ export class PathTracker {
|
||||
if (selectedPath) {
|
||||
context.decisions.router.set(block.id, selectedPath)
|
||||
context.activeExecutionPath.add(selectedPath)
|
||||
|
||||
this.activateDownstreamPaths(selectedPath, context)
|
||||
|
||||
logger.info(`Router ${block.id} selected path: ${selectedPath}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively activate downstream paths from a block
|
||||
*/
|
||||
private activateDownstreamPaths(blockId: string, context: ExecutionContext): void {
|
||||
const outgoingConnections = this.getOutgoingConnections(blockId)
|
||||
|
||||
for (const conn of outgoingConnections) {
|
||||
if (!context.activeExecutionPath.has(conn.target)) {
|
||||
context.activeExecutionPath.add(conn.target)
|
||||
|
||||
this.activateDownstreamPaths(conn.target, context)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update paths for condition blocks
|
||||
*/
|
||||
@@ -219,9 +237,7 @@ export class PathTracker {
|
||||
const isPartOfLoop = blockLoops.length > 0
|
||||
|
||||
for (const conn of outgoingConnections) {
|
||||
if (
|
||||
this.shouldActivateConnection(conn, block.id, hasError, isPartOfLoop, blockLoops, context)
|
||||
) {
|
||||
if (this.shouldActivateConnection(conn, hasError, isPartOfLoop, blockLoops, context)) {
|
||||
context.activeExecutionPath.add(conn.target)
|
||||
}
|
||||
}
|
||||
@@ -253,7 +269,6 @@ export class PathTracker {
|
||||
*/
|
||||
private shouldActivateConnection(
|
||||
conn: SerializedConnection,
|
||||
sourceBlockId: string,
|
||||
hasError: boolean,
|
||||
isPartOfLoop: boolean,
|
||||
blockLoops: Array<{ id: string; loop: any }>,
|
||||
|
||||
380
apps/sim/lib/logs/types.ts
Normal file
380
apps/sim/lib/logs/types.ts
Normal file
@@ -0,0 +1,380 @@
|
||||
import type { Edge } from 'reactflow'
|
||||
import type { BlockLog, NormalizedBlockOutput } from '@/executor/types'
|
||||
import type { DeploymentStatus } from '@/stores/workflows/registry/types'
|
||||
import type { Loop, Parallel, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
export type { WorkflowState, Loop, Parallel, DeploymentStatus }
|
||||
export type WorkflowEdge = Edge
|
||||
export type { NormalizedBlockOutput, BlockLog }
|
||||
|
||||
export interface PricingInfo {
|
||||
input: number
|
||||
output: number
|
||||
cachedInput?: number
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export interface TokenUsage {
|
||||
prompt: number
|
||||
completion: number
|
||||
total: number
|
||||
}
|
||||
|
||||
export interface CostBreakdown {
|
||||
input: number
|
||||
output: number
|
||||
total: number
|
||||
tokens: TokenUsage
|
||||
model: string
|
||||
pricing: PricingInfo
|
||||
}
|
||||
|
||||
export interface ToolCall {
|
||||
name: string
|
||||
duration: number
|
||||
startTime: string
|
||||
endTime: string
|
||||
status: 'success' | 'error'
|
||||
input: Record<string, unknown>
|
||||
output: Record<string, unknown>
|
||||
error?: string
|
||||
}
|
||||
|
||||
export type BlockInputData = Record<string, any>
|
||||
export type BlockOutputData = NormalizedBlockOutput | null
|
||||
|
||||
export interface ExecutionEnvironment {
|
||||
variables: Record<string, string>
|
||||
workflowId: string
|
||||
executionId: string
|
||||
userId: string
|
||||
workspaceId: string
|
||||
}
|
||||
|
||||
export interface ExecutionTrigger {
|
||||
type: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
source: string
|
||||
data?: Record<string, unknown>
|
||||
timestamp: string
|
||||
}
|
||||
|
||||
export interface ExecutionStatus {
|
||||
status: 'running' | 'completed' | 'failed' | 'cancelled'
|
||||
startedAt: string
|
||||
endedAt?: string
|
||||
durationMs?: number
|
||||
}
|
||||
|
||||
export interface WorkflowExecutionSnapshot {
|
||||
id: string
|
||||
workflowId: string
|
||||
stateHash: string
|
||||
stateData: WorkflowState
|
||||
createdAt: string
|
||||
}
|
||||
|
||||
export type WorkflowExecutionSnapshotInsert = Omit<WorkflowExecutionSnapshot, 'createdAt'>
|
||||
export type WorkflowExecutionSnapshotSelect = WorkflowExecutionSnapshot
|
||||
|
||||
export interface WorkflowExecutionLog {
|
||||
id: string
|
||||
workflowId: string
|
||||
executionId: string
|
||||
stateSnapshotId: string
|
||||
level: 'info' | 'error'
|
||||
message: string
|
||||
trigger: ExecutionTrigger['type']
|
||||
startedAt: string
|
||||
endedAt: string
|
||||
totalDurationMs: number
|
||||
blockCount: number
|
||||
successCount: number
|
||||
errorCount: number
|
||||
skippedCount: number
|
||||
totalCost: number
|
||||
totalInputCost: number
|
||||
totalOutputCost: number
|
||||
totalTokens: number
|
||||
primaryModel: string
|
||||
metadata: {
|
||||
environment: ExecutionEnvironment
|
||||
trigger: ExecutionTrigger
|
||||
traceSpans?: TraceSpan[]
|
||||
errorDetails?: {
|
||||
blockId: string
|
||||
blockName: string
|
||||
error: string
|
||||
stackTrace?: string
|
||||
}
|
||||
}
|
||||
duration?: string
|
||||
createdAt: string
|
||||
}
|
||||
|
||||
export type WorkflowExecutionLogInsert = Omit<WorkflowExecutionLog, 'id' | 'createdAt'>
|
||||
export type WorkflowExecutionLogSelect = WorkflowExecutionLog
|
||||
|
||||
export interface BlockExecutionLog {
|
||||
id: string
|
||||
executionId: string
|
||||
workflowId: string
|
||||
blockId: string
|
||||
blockName: string
|
||||
blockType: string
|
||||
startedAt: string
|
||||
endedAt: string
|
||||
durationMs: number
|
||||
status: 'success' | 'error' | 'skipped'
|
||||
errorMessage?: string
|
||||
errorStackTrace?: string
|
||||
inputData: BlockInputData
|
||||
outputData: BlockOutputData
|
||||
cost: CostBreakdown | null
|
||||
metadata: {
|
||||
toolCalls?: ToolCall[]
|
||||
iterationIndex?: number
|
||||
virtualBlockId?: string
|
||||
parentBlockId?: string
|
||||
environmentSnapshot?: Record<string, string>
|
||||
}
|
||||
createdAt: string
|
||||
}
|
||||
|
||||
export type BlockExecutionLogInsert = Omit<BlockExecutionLog, 'id' | 'createdAt'>
|
||||
export type BlockExecutionLogSelect = BlockExecutionLog
|
||||
|
||||
export interface TraceSpan {
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
duration: number
|
||||
startTime: string
|
||||
endTime: string
|
||||
children?: TraceSpan[]
|
||||
toolCalls?: ToolCall[]
|
||||
status?: 'success' | 'error'
|
||||
tokens?: number
|
||||
relativeStartMs?: number
|
||||
blockId?: string
|
||||
input?: Record<string, unknown>
|
||||
}
|
||||
|
||||
export interface WorkflowExecutionSummary {
|
||||
id: string
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
executionId: string
|
||||
trigger: ExecutionTrigger['type']
|
||||
status: ExecutionStatus['status']
|
||||
startedAt: string
|
||||
endedAt: string
|
||||
durationMs: number
|
||||
blockStats: {
|
||||
total: number
|
||||
success: number
|
||||
error: number
|
||||
skipped: number
|
||||
}
|
||||
costSummary: {
|
||||
total: number
|
||||
inputCost: number
|
||||
outputCost: number
|
||||
tokens: number
|
||||
primaryModel: string
|
||||
}
|
||||
stateSnapshotId: string
|
||||
errorSummary?: {
|
||||
blockId: string
|
||||
blockName: string
|
||||
message: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface WorkflowExecutionDetail extends WorkflowExecutionSummary {
|
||||
environment: ExecutionEnvironment
|
||||
triggerData: ExecutionTrigger
|
||||
blockExecutions: BlockExecutionSummary[]
|
||||
traceSpans: TraceSpan[]
|
||||
workflowState: WorkflowState
|
||||
}
|
||||
|
||||
export interface BlockExecutionSummary {
|
||||
id: string
|
||||
blockId: string
|
||||
blockName: string
|
||||
blockType: string
|
||||
startedAt: string
|
||||
endedAt: string
|
||||
durationMs: number
|
||||
status: BlockExecutionLog['status']
|
||||
errorMessage?: string
|
||||
cost?: CostBreakdown
|
||||
inputSummary: {
|
||||
parameterCount: number
|
||||
hasComplexData: boolean
|
||||
}
|
||||
outputSummary: {
|
||||
hasOutput: boolean
|
||||
outputType: string
|
||||
hasError: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface BlockExecutionDetail extends BlockExecutionSummary {
|
||||
inputData: BlockInputData
|
||||
outputData: BlockOutputData
|
||||
metadata: BlockExecutionLog['metadata']
|
||||
toolCalls?: ToolCall[]
|
||||
}
|
||||
|
||||
export interface PaginatedResponse<T> {
|
||||
data: T[]
|
||||
pagination: {
|
||||
page: number
|
||||
pageSize: number
|
||||
total: number
|
||||
totalPages: number
|
||||
hasNext: boolean
|
||||
hasPrevious: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export type WorkflowExecutionsResponse = PaginatedResponse<WorkflowExecutionSummary>
|
||||
export type BlockExecutionsResponse = PaginatedResponse<BlockExecutionSummary>
|
||||
|
||||
export interface WorkflowExecutionFilters {
|
||||
workflowIds?: string[]
|
||||
folderIds?: string[]
|
||||
triggers?: ExecutionTrigger['type'][]
|
||||
status?: ExecutionStatus['status'][]
|
||||
startDate?: string
|
||||
endDate?: string
|
||||
search?: string
|
||||
minDuration?: number
|
||||
maxDuration?: number
|
||||
minCost?: number
|
||||
maxCost?: number
|
||||
hasErrors?: boolean
|
||||
}
|
||||
|
||||
export interface PaginationParams {
|
||||
page: number
|
||||
pageSize: number
|
||||
sortBy?: 'startedAt' | 'durationMs' | 'totalCost' | 'blockCount'
|
||||
sortOrder?: 'asc' | 'desc'
|
||||
}
|
||||
|
||||
export interface LogsQueryParams extends WorkflowExecutionFilters, PaginationParams {
|
||||
includeBlockSummary?: boolean
|
||||
includeWorkflowState?: boolean
|
||||
}
|
||||
|
||||
export interface LogsError {
|
||||
code: 'EXECUTION_NOT_FOUND' | 'SNAPSHOT_NOT_FOUND' | 'INVALID_WORKFLOW_STATE' | 'STORAGE_ERROR'
|
||||
message: string
|
||||
details?: Record<string, unknown>
|
||||
}
|
||||
|
||||
export interface ValidationError {
|
||||
field: string
|
||||
message: string
|
||||
value: unknown
|
||||
}
|
||||
|
||||
export class LogsServiceError extends Error {
|
||||
public code: LogsError['code']
|
||||
public details?: Record<string, unknown>
|
||||
|
||||
constructor(message: string, code: LogsError['code'], details?: Record<string, unknown>) {
|
||||
super(message)
|
||||
this.name = 'LogsServiceError'
|
||||
this.code = code
|
||||
this.details = details
|
||||
}
|
||||
}
|
||||
|
||||
export interface DatabaseOperationResult<T> {
|
||||
success: boolean
|
||||
data?: T
|
||||
error?: LogsServiceError
|
||||
}
|
||||
|
||||
export interface BatchInsertResult<T> {
|
||||
inserted: T[]
|
||||
failed: Array<{
|
||||
item: T
|
||||
error: string
|
||||
}>
|
||||
totalAttempted: number
|
||||
totalSucceeded: number
|
||||
totalFailed: number
|
||||
}
|
||||
|
||||
export interface SnapshotService {
|
||||
createSnapshot(workflowId: string, state: WorkflowState): Promise<WorkflowExecutionSnapshot>
|
||||
getSnapshot(id: string): Promise<WorkflowExecutionSnapshot | null>
|
||||
getSnapshotByHash(workflowId: string, hash: string): Promise<WorkflowExecutionSnapshot | null>
|
||||
computeStateHash(state: WorkflowState): string
|
||||
cleanupOrphanedSnapshots(olderThanDays: number): Promise<number>
|
||||
}
|
||||
|
||||
export interface SnapshotCreationResult {
|
||||
snapshot: WorkflowExecutionSnapshot
|
||||
isNew: boolean
|
||||
}
|
||||
|
||||
export interface ExecutionLoggerService {
|
||||
startWorkflowExecution(params: {
|
||||
workflowId: string
|
||||
executionId: string
|
||||
trigger: ExecutionTrigger
|
||||
environment: ExecutionEnvironment
|
||||
workflowState: WorkflowState
|
||||
}): Promise<{
|
||||
workflowLog: WorkflowExecutionLog
|
||||
snapshot: WorkflowExecutionSnapshot
|
||||
}>
|
||||
|
||||
logBlockExecution(params: {
|
||||
executionId: string
|
||||
workflowId: string
|
||||
blockId: string
|
||||
blockName: string
|
||||
blockType: string
|
||||
input: BlockInputData
|
||||
output: BlockOutputData
|
||||
timing: {
|
||||
startedAt: string
|
||||
endedAt: string
|
||||
durationMs: number
|
||||
}
|
||||
status: BlockExecutionLog['status']
|
||||
error?: {
|
||||
message: string
|
||||
stackTrace?: string
|
||||
}
|
||||
cost?: CostBreakdown
|
||||
metadata?: BlockExecutionLog['metadata']
|
||||
}): Promise<BlockExecutionLog>
|
||||
|
||||
completeWorkflowExecution(params: {
|
||||
executionId: string
|
||||
endedAt: string
|
||||
totalDurationMs: number
|
||||
blockStats: {
|
||||
total: number
|
||||
success: number
|
||||
error: number
|
||||
skipped: number
|
||||
}
|
||||
costSummary: {
|
||||
totalCost: number
|
||||
totalInputCost: number
|
||||
totalOutputCost: number
|
||||
totalTokens: number
|
||||
primaryModel: string
|
||||
}
|
||||
finalOutput: BlockOutputData
|
||||
traceSpans?: TraceSpan[]
|
||||
}): Promise<WorkflowExecutionLog>
|
||||
}
|
||||
@@ -1,9 +1,5 @@
|
||||
// Export the storage abstraction layer
|
||||
|
||||
export * as BlobClient from './blob/blob-client'
|
||||
// Export specific storage clients for advanced use cases
|
||||
export * as S3Client from './s3/s3-client'
|
||||
// Export configuration
|
||||
export {
|
||||
BLOB_CONFIG,
|
||||
BLOB_KB_CONFIG,
|
||||
|
||||
@@ -279,15 +279,51 @@ describe('S3 Client', () => {
|
||||
})
|
||||
|
||||
describe('s3Client initialization', () => {
|
||||
it('should initialize with correct configuration', async () => {
|
||||
it('should initialize with correct configuration when credentials are available', async () => {
|
||||
// Mock env with credentials
|
||||
vi.doMock('../../env', () => ({
|
||||
env: {
|
||||
AWS_ACCESS_KEY_ID: 'test-access-key',
|
||||
AWS_SECRET_ACCESS_KEY: 'test-secret-key',
|
||||
},
|
||||
}))
|
||||
|
||||
// Re-import to get fresh module with mocked env
|
||||
vi.resetModules()
|
||||
const { getS3Client } = await import('./s3-client')
|
||||
const { S3Client } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const client = getS3Client()
|
||||
|
||||
expect(client).toBeDefined()
|
||||
// Verify the client was constructed with the right configuration
|
||||
expect(S3Client).toHaveBeenCalledWith({ region: 'test-region' })
|
||||
expect(S3Client).toHaveBeenCalledWith({
|
||||
region: 'test-region',
|
||||
credentials: {
|
||||
accessKeyId: 'test-access-key',
|
||||
secretAccessKey: 'test-secret-key',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should initialize without credentials when env vars are not available', async () => {
|
||||
vi.doMock('../../env', () => ({
|
||||
env: {
|
||||
AWS_ACCESS_KEY_ID: undefined,
|
||||
AWS_SECRET_ACCESS_KEY: undefined,
|
||||
},
|
||||
}))
|
||||
|
||||
vi.resetModules()
|
||||
const { getS3Client } = await import('./s3-client')
|
||||
const { S3Client } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const client = getS3Client()
|
||||
|
||||
expect(client).toBeDefined()
|
||||
expect(S3Client).toHaveBeenCalledWith({
|
||||
region: 'test-region',
|
||||
credentials: undefined,
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -43,9 +43,6 @@ async function initializeApplication(): Promise<void> {
|
||||
// Mark data as initialized only after sync managers have loaded data from DB
|
||||
dataInitialized = true
|
||||
|
||||
// Register cleanup
|
||||
window.addEventListener('beforeunload', handleBeforeUnload)
|
||||
|
||||
// Log initialization timing information
|
||||
const initDuration = Date.now() - initStartTime
|
||||
logger.info(`Application initialization completed in ${initDuration}ms`)
|
||||
|
||||
@@ -4,8 +4,11 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
const logger = createLogger('KnowledgeStore')
|
||||
|
||||
export interface ChunkingConfig {
|
||||
chunkSize?: number
|
||||
minCharactersPerChunk?: number
|
||||
maxSize: number
|
||||
minSize: number
|
||||
overlap: number
|
||||
chunkSize?: number // Legacy support
|
||||
minCharactersPerChunk?: number // Legacy support
|
||||
recipe?: string
|
||||
lang?: string
|
||||
strategy?: 'recursive' | 'semantic' | 'sentence' | 'paragraph'
|
||||
@@ -463,75 +466,65 @@ export const useKnowledgeStore = create<KnowledgeStore>((set, get) => ({
|
||||
throw new Error(result.error || 'Failed to fetch documents')
|
||||
}
|
||||
|
||||
const documents = result.data
|
||||
const serverDocuments = result.data
|
||||
|
||||
set((state) => {
|
||||
// Merge with existing documents, being smart about when to use server data vs local optimistic updates
|
||||
const currentDocuments = state.documents[knowledgeBaseId] || []
|
||||
|
||||
// For each fetched document, decide whether to use server data or preserve local state
|
||||
const mergedDocuments = documents.map((fetchedDoc: DocumentData) => {
|
||||
const existingDoc = currentDocuments.find((doc) => doc.id === fetchedDoc.id)
|
||||
// Create a map of server documents by filename for quick lookup
|
||||
const serverDocumentsByFilename = new Map()
|
||||
serverDocuments.forEach((doc: DocumentData) => {
|
||||
serverDocumentsByFilename.set(doc.filename, doc)
|
||||
})
|
||||
|
||||
if (!existingDoc) {
|
||||
// New document from server, use it as-is
|
||||
return fetchedDoc
|
||||
// Filter out temporary documents that now have real server equivalents
|
||||
const filteredCurrentDocs = currentDocuments.filter((doc) => {
|
||||
// If this is a temporary document (starts with temp-) and a server document exists with the same filename
|
||||
if (doc.id.startsWith('temp-') && serverDocumentsByFilename.has(doc.filename)) {
|
||||
return false // Remove the temporary document
|
||||
}
|
||||
|
||||
// If processing status is different, generally prefer server data for these transitions:
|
||||
if (existingDoc.processingStatus !== fetchedDoc.processingStatus) {
|
||||
// Always allow these status progressions from server:
|
||||
// pending -> processing, pending -> completed, pending -> failed
|
||||
// processing -> completed, processing -> failed
|
||||
const allowedTransitions = [
|
||||
{ from: 'pending', to: 'processing' },
|
||||
{ from: 'pending', to: 'completed' },
|
||||
{ from: 'pending', to: 'failed' },
|
||||
{ from: 'processing', to: 'completed' },
|
||||
{ from: 'processing', to: 'failed' },
|
||||
]
|
||||
|
||||
const transition = allowedTransitions.find(
|
||||
(t) => t.from === existingDoc.processingStatus && t.to === fetchedDoc.processingStatus
|
||||
)
|
||||
|
||||
if (transition) {
|
||||
return fetchedDoc
|
||||
// If this is a real document that still exists on the server, keep it for merging
|
||||
if (!doc.id.startsWith('temp-')) {
|
||||
const serverDoc = serverDocuments.find((sDoc: DocumentData) => sDoc.id === doc.id)
|
||||
if (serverDoc) {
|
||||
return false // Will be replaced by server version in merge below
|
||||
}
|
||||
}
|
||||
|
||||
const existingHasTimestamps =
|
||||
existingDoc.processingStartedAt || existingDoc.processingCompletedAt
|
||||
const fetchedHasTimestamps =
|
||||
fetchedDoc.processingStartedAt || fetchedDoc.processingCompletedAt
|
||||
|
||||
if (fetchedHasTimestamps && !existingHasTimestamps) {
|
||||
return fetchedDoc
|
||||
}
|
||||
|
||||
// If the server document has updated stats (chunk count, token count, etc.), use it
|
||||
if (
|
||||
fetchedDoc.processingStatus === 'completed' &&
|
||||
(fetchedDoc.chunkCount !== existingDoc.chunkCount ||
|
||||
fetchedDoc.tokenCount !== existingDoc.tokenCount ||
|
||||
fetchedDoc.characterCount !== existingDoc.characterCount)
|
||||
) {
|
||||
return fetchedDoc
|
||||
}
|
||||
|
||||
// Otherwise, preserve the existing document (keeps optimistic updates)
|
||||
return existingDoc
|
||||
// Keep temporary documents that don't have server equivalents yet
|
||||
return true
|
||||
})
|
||||
|
||||
// Add any new documents that weren't in the existing set
|
||||
const newDocuments = documents.filter(
|
||||
(fetchedDoc: DocumentData) => !currentDocuments.find((doc) => doc.id === fetchedDoc.id)
|
||||
)
|
||||
// Merge server documents with any remaining local documents
|
||||
const mergedDocuments = serverDocuments.map((serverDoc: DocumentData) => {
|
||||
const existingDoc = currentDocuments.find((doc) => doc.id === serverDoc.id)
|
||||
|
||||
if (!existingDoc) {
|
||||
// New document from server, use it as-is
|
||||
return serverDoc
|
||||
}
|
||||
|
||||
// Merge logic for existing documents (prefer server data for most fields)
|
||||
return {
|
||||
...existingDoc,
|
||||
...serverDoc,
|
||||
// Preserve any local optimistic updates that haven't been reflected on server yet
|
||||
...(existingDoc.processingStatus !== serverDoc.processingStatus &&
|
||||
['pending', 'processing'].includes(existingDoc.processingStatus) &&
|
||||
!serverDoc.processingStartedAt
|
||||
? { processingStatus: existingDoc.processingStatus }
|
||||
: {}),
|
||||
}
|
||||
})
|
||||
|
||||
// Add any remaining temporary documents that don't have server equivalents
|
||||
const finalDocuments = [...mergedDocuments, ...filteredCurrentDocs]
|
||||
|
||||
return {
|
||||
documents: {
|
||||
...state.documents,
|
||||
[knowledgeBaseId]: [...mergedDocuments, ...newDocuments],
|
||||
[knowledgeBaseId]: finalDocuments,
|
||||
},
|
||||
loadingDocuments: new Set(
|
||||
[...state.loadingDocuments].filter((loadingId) => loadingId !== knowledgeBaseId)
|
||||
@@ -540,7 +533,7 @@ export const useKnowledgeStore = create<KnowledgeStore>((set, get) => ({
|
||||
})
|
||||
|
||||
logger.info(`Documents refreshed for knowledge base: ${knowledgeBaseId}`)
|
||||
return documents
|
||||
return serverDocuments
|
||||
} catch (error) {
|
||||
logger.error(`Error refreshing documents for knowledge base ${knowledgeBaseId}:`, error)
|
||||
|
||||
|
||||
@@ -557,9 +557,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
* @returns The ID of the newly created workflow
|
||||
*/
|
||||
createWorkflow: async (options = {}) => {
|
||||
const { workflows } = get()
|
||||
const id = crypto.randomUUID()
|
||||
|
||||
// Use provided workspace ID (must be provided since we no longer track active workspace)
|
||||
const workspaceId = options.workspaceId
|
||||
|
||||
@@ -570,292 +567,259 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
}
|
||||
|
||||
logger.info(`Creating new workflow in workspace: ${workspaceId || 'none'}`)
|
||||
// Generate workflow metadata with appropriate name and color
|
||||
const newWorkflow: WorkflowMetadata = {
|
||||
id,
|
||||
name: options.name || generateUniqueName(workflows),
|
||||
lastModified: new Date(),
|
||||
description: options.description || 'New workflow',
|
||||
color: options.marketplaceId ? '#808080' : getNextWorkflowColor(workflows), // Gray for marketplace imports
|
||||
marketplaceData: options.marketplaceId
|
||||
? { id: options.marketplaceId, status: 'temp' as const }
|
||||
: undefined,
|
||||
workspaceId, // Associate with workspace
|
||||
folderId: options.folderId || null, // Associate with folder if provided
|
||||
}
|
||||
|
||||
let initialState: any
|
||||
|
||||
// If this is a marketplace import with existing state
|
||||
if (options.marketplaceId && options.marketplaceState) {
|
||||
initialState = {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Imported from marketplace',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
logger.info(`Created workflow from marketplace: ${options.marketplaceId}`)
|
||||
} else {
|
||||
// Create starter block for new workflow
|
||||
const starterId = crypto.randomUUID()
|
||||
const starterBlock = {
|
||||
id: starterId,
|
||||
type: 'starter' as const,
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
subBlocks: {
|
||||
startWorkflow: {
|
||||
id: 'startWorkflow',
|
||||
type: 'dropdown' as const,
|
||||
value: 'manual',
|
||||
},
|
||||
webhookPath: {
|
||||
id: 'webhookPath',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
webhookSecret: {
|
||||
id: 'webhookSecret',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
scheduleType: {
|
||||
id: 'scheduleType',
|
||||
type: 'dropdown' as const,
|
||||
value: 'daily',
|
||||
},
|
||||
minutesInterval: {
|
||||
id: 'minutesInterval',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
minutesStartingAt: {
|
||||
id: 'minutesStartingAt',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
hourlyMinute: {
|
||||
id: 'hourlyMinute',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
dailyTime: {
|
||||
id: 'dailyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
weeklyDay: {
|
||||
id: 'weeklyDay',
|
||||
type: 'dropdown' as const,
|
||||
value: 'MON',
|
||||
},
|
||||
weeklyDayTime: {
|
||||
id: 'weeklyDayTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyDay: {
|
||||
id: 'monthlyDay',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyTime: {
|
||||
id: 'monthlyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
cronExpression: {
|
||||
id: 'cronExpression',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
timezone: {
|
||||
id: 'timezone',
|
||||
type: 'dropdown' as const,
|
||||
value: 'UTC',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
input: 'any',
|
||||
},
|
||||
},
|
||||
},
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
height: 0,
|
||||
}
|
||||
|
||||
initialState = {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Initial state',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
// Add workflow to registry first
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[id]: newWorkflow,
|
||||
},
|
||||
error: null,
|
||||
}))
|
||||
|
||||
// Initialize subblock values if this is a marketplace import
|
||||
if (options.marketplaceId && options.marketplaceState?.blocks) {
|
||||
useSubBlockStore.getState().initializeFromWorkflow(id, options.marketplaceState.blocks)
|
||||
}
|
||||
|
||||
// Initialize subblock values to ensure they're available for sync
|
||||
if (!options.marketplaceId) {
|
||||
// For non-marketplace workflows, initialize subblock values from the starter block
|
||||
const subblockValues: Record<string, Record<string, any>> = {}
|
||||
const blocks = initialState.blocks as Record<string, BlockState>
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
subblockValues[blockId] = {}
|
||||
for (const [subblockId, subblock] of Object.entries(block.subBlocks)) {
|
||||
subblockValues[blockId][subblockId] = (subblock as any).value
|
||||
}
|
||||
}
|
||||
|
||||
// Update the subblock store with the initial values
|
||||
useSubBlockStore.setState((state) => ({
|
||||
workflowValues: {
|
||||
...state.workflowValues,
|
||||
[id]: subblockValues,
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
// Properly set as active workflow and initialize state
|
||||
set({ activeWorkflowId: id })
|
||||
useWorkflowStore.setState(initialState)
|
||||
|
||||
// Immediately persist the new workflow to the database using dedicated endpoint
|
||||
const persistWorkflow = async () => {
|
||||
try {
|
||||
const response = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
name: newWorkflow.name,
|
||||
description: newWorkflow.description,
|
||||
color: newWorkflow.color,
|
||||
workspaceId: newWorkflow.workspaceId,
|
||||
folderId: newWorkflow.folderId,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json()
|
||||
throw new Error(
|
||||
`Failed to create workflow: ${errorData.error || response.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
const createdWorkflow = await response.json()
|
||||
logger.info(`Successfully created workflow ${createdWorkflow.id} on server`)
|
||||
|
||||
// Update the local workflow ID to match the server-generated one
|
||||
if (createdWorkflow.id !== id) {
|
||||
logger.info(`Updating local workflow ID from ${id} to ${createdWorkflow.id}`)
|
||||
|
||||
// Update registry with server ID
|
||||
set((state) => {
|
||||
const { [id]: oldWorkflow, ...otherWorkflows } = state.workflows
|
||||
return {
|
||||
workflows: {
|
||||
...otherWorkflows,
|
||||
[createdWorkflow.id]: {
|
||||
...oldWorkflow,
|
||||
id: createdWorkflow.id,
|
||||
},
|
||||
},
|
||||
activeWorkflowId: createdWorkflow.id,
|
||||
}
|
||||
})
|
||||
|
||||
// Return the server ID for the caller
|
||||
return createdWorkflow.id
|
||||
}
|
||||
|
||||
return id
|
||||
} catch (error) {
|
||||
logger.error(`Failed to create new workflow ${id}:`, error)
|
||||
throw error // Re-throw to handle in calling code
|
||||
}
|
||||
}
|
||||
|
||||
// Persist synchronously to ensure workflow exists before Socket.IO operations
|
||||
let finalId = id
|
||||
// Create the workflow on the server first to get the server-generated ID
|
||||
try {
|
||||
finalId = await persistWorkflow()
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Critical: Failed to persist new workflow ${id}, Socket.IO operations may fail:`,
|
||||
error
|
||||
const response = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
name: options.name || generateUniqueName(),
|
||||
description: options.description || 'New workflow',
|
||||
color: options.marketplaceId ? '#808080' : getNextWorkflowColor(),
|
||||
workspaceId,
|
||||
folderId: options.folderId || null,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json()
|
||||
throw new Error(`Failed to create workflow: ${errorData.error || response.statusText}`)
|
||||
}
|
||||
|
||||
const createdWorkflow = await response.json()
|
||||
const serverWorkflowId = createdWorkflow.id
|
||||
|
||||
logger.info(`Successfully created workflow ${serverWorkflowId} on server`)
|
||||
|
||||
// Generate workflow metadata with server-generated ID
|
||||
const newWorkflow: WorkflowMetadata = {
|
||||
id: serverWorkflowId,
|
||||
name: createdWorkflow.name,
|
||||
lastModified: new Date(),
|
||||
description: createdWorkflow.description,
|
||||
color: createdWorkflow.color,
|
||||
marketplaceData: options.marketplaceId
|
||||
? { id: options.marketplaceId, status: 'temp' as const }
|
||||
: undefined,
|
||||
workspaceId,
|
||||
folderId: createdWorkflow.folderId,
|
||||
}
|
||||
|
||||
let initialState: any
|
||||
|
||||
// If this is a marketplace import with existing state
|
||||
if (options.marketplaceId && options.marketplaceState) {
|
||||
initialState = {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Imported from marketplace',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
logger.info(`Created workflow from marketplace: ${options.marketplaceId}`)
|
||||
} else {
|
||||
// Create starter block for new workflow
|
||||
const starterId = crypto.randomUUID()
|
||||
const starterBlock = {
|
||||
id: starterId,
|
||||
type: 'starter' as const,
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
subBlocks: {
|
||||
startWorkflow: {
|
||||
id: 'startWorkflow',
|
||||
type: 'dropdown' as const,
|
||||
value: 'manual',
|
||||
},
|
||||
webhookPath: {
|
||||
id: 'webhookPath',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
webhookSecret: {
|
||||
id: 'webhookSecret',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
scheduleType: {
|
||||
id: 'scheduleType',
|
||||
type: 'dropdown' as const,
|
||||
value: 'daily',
|
||||
},
|
||||
minutesInterval: {
|
||||
id: 'minutesInterval',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
minutesStartingAt: {
|
||||
id: 'minutesStartingAt',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
hourlyMinute: {
|
||||
id: 'hourlyMinute',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
dailyTime: {
|
||||
id: 'dailyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
weeklyDay: {
|
||||
id: 'weeklyDay',
|
||||
type: 'dropdown' as const,
|
||||
value: 'MON',
|
||||
},
|
||||
weeklyDayTime: {
|
||||
id: 'weeklyDayTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyDay: {
|
||||
id: 'monthlyDay',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyTime: {
|
||||
id: 'monthlyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
cronExpression: {
|
||||
id: 'cronExpression',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
timezone: {
|
||||
id: 'timezone',
|
||||
type: 'dropdown' as const,
|
||||
value: 'UTC',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
input: 'any',
|
||||
},
|
||||
},
|
||||
},
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
height: 0,
|
||||
}
|
||||
|
||||
initialState = {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Initial state',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
// Add workflow to registry with server-generated ID
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[serverWorkflowId]: newWorkflow,
|
||||
},
|
||||
error: null,
|
||||
}))
|
||||
|
||||
// Initialize subblock values if this is a marketplace import
|
||||
if (options.marketplaceId && options.marketplaceState?.blocks) {
|
||||
useSubBlockStore
|
||||
.getState()
|
||||
.initializeFromWorkflow(serverWorkflowId, options.marketplaceState.blocks)
|
||||
}
|
||||
|
||||
// Initialize subblock values to ensure they're available for sync
|
||||
if (!options.marketplaceId) {
|
||||
// For non-marketplace workflows, initialize subblock values from the starter block
|
||||
const subblockValues: Record<string, Record<string, any>> = {}
|
||||
const blocks = initialState.blocks as Record<string, BlockState>
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
subblockValues[blockId] = {}
|
||||
for (const [subblockId, subblock] of Object.entries(block.subBlocks)) {
|
||||
subblockValues[blockId][subblockId] = (subblock as any).value
|
||||
}
|
||||
}
|
||||
|
||||
// Update the subblock store with the initial values
|
||||
useSubBlockStore.setState((state) => ({
|
||||
workflowValues: {
|
||||
...state.workflowValues,
|
||||
[serverWorkflowId]: subblockValues,
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
// Don't set as active workflow here - let the navigation/URL change handle that
|
||||
// This prevents race conditions and flickering
|
||||
logger.info(
|
||||
`Created new workflow with ID ${serverWorkflowId} in workspace ${workspaceId || 'none'}`
|
||||
)
|
||||
// Don't throw - allow workflow creation to continue in memory
|
||||
|
||||
return serverWorkflowId
|
||||
} catch (error) {
|
||||
logger.error(`Failed to create new workflow:`, error)
|
||||
set({
|
||||
error: `Failed to create workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
})
|
||||
throw error
|
||||
}
|
||||
|
||||
logger.info(`Created new workflow with ID ${finalId} in workspace ${workspaceId || 'none'}`)
|
||||
|
||||
return finalId
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -866,16 +830,15 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
state: any,
|
||||
metadata: Partial<WorkflowMetadata>
|
||||
) => {
|
||||
const { workflows } = get()
|
||||
const id = crypto.randomUUID()
|
||||
|
||||
// Generate workflow metadata with marketplace properties
|
||||
const newWorkflow: WorkflowMetadata = {
|
||||
id,
|
||||
name: metadata.name || 'Marketplace workflow',
|
||||
name: metadata.name || generateUniqueName(),
|
||||
lastModified: new Date(),
|
||||
description: metadata.description || 'Imported from marketplace',
|
||||
color: metadata.color || getNextWorkflowColor(workflows),
|
||||
color: metadata.color || getNextWorkflowColor(),
|
||||
marketplaceData: { id: marketplaceId, status: 'temp' as const },
|
||||
}
|
||||
|
||||
@@ -1031,7 +994,7 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
name: `${sourceWorkflow.name} (Copy)`,
|
||||
lastModified: new Date(),
|
||||
description: sourceWorkflow.description,
|
||||
color: getNextWorkflowColor(workflows),
|
||||
color: getNextWorkflowColor(),
|
||||
workspaceId, // Include the workspaceId in the new workflow
|
||||
folderId: sourceWorkflow.folderId, // Include the folderId from source workflow
|
||||
// Do not copy marketplace data
|
||||
|
||||
@@ -1,62 +1,247 @@
|
||||
import type { WorkflowMetadata } from './types'
|
||||
|
||||
// Available workflow colors
|
||||
export const WORKFLOW_COLORS = [
|
||||
'#3972F6',
|
||||
'#F639DD',
|
||||
'#F6B539',
|
||||
'#8139F6',
|
||||
'#39B54A',
|
||||
'#39B5AB',
|
||||
'#F66839',
|
||||
// Original colors
|
||||
'#3972F6', // Blue
|
||||
'#F639DD', // Pink/Magenta
|
||||
'#F6B539', // Orange/Yellow
|
||||
'#8139F6', // Purple
|
||||
'#39B54A', // Green
|
||||
'#39B5AB', // Teal
|
||||
'#F66839', // Red/Orange
|
||||
|
||||
// Additional vibrant blues
|
||||
'#2E5BFF', // Bright Blue
|
||||
'#4A90FF', // Sky Blue
|
||||
'#1E40AF', // Deep Blue
|
||||
'#0EA5E9', // Cyan Blue
|
||||
'#3B82F6', // Royal Blue
|
||||
'#6366F1', // Indigo
|
||||
'#1D4ED8', // Electric Blue
|
||||
|
||||
// Additional vibrant purples
|
||||
'#A855F7', // Bright Purple
|
||||
'#C084FC', // Light Purple
|
||||
'#7C3AED', // Deep Purple
|
||||
'#9333EA', // Violet
|
||||
'#8B5CF6', // Medium Purple
|
||||
'#6D28D9', // Dark Purple
|
||||
'#5B21B6', // Deep Violet
|
||||
|
||||
// Additional vibrant pinks/magentas
|
||||
'#EC4899', // Hot Pink
|
||||
'#F97316', // Pink Orange
|
||||
'#E11D48', // Rose
|
||||
'#BE185D', // Deep Pink
|
||||
'#DB2777', // Pink Red
|
||||
'#F472B6', // Light Pink
|
||||
'#F59E0B', // Amber Pink
|
||||
|
||||
// Additional vibrant greens
|
||||
'#10B981', // Emerald
|
||||
'#059669', // Green Teal
|
||||
'#16A34A', // Forest Green
|
||||
'#22C55E', // Lime Green
|
||||
'#84CC16', // Yellow Green
|
||||
'#65A30D', // Olive Green
|
||||
'#15803D', // Dark Green
|
||||
|
||||
// Additional vibrant teals/cyans
|
||||
'#06B6D4', // Cyan
|
||||
'#0891B2', // Dark Cyan
|
||||
'#0E7490', // Teal Blue
|
||||
'#14B8A6', // Turquoise
|
||||
'#0D9488', // Dark Teal
|
||||
'#047857', // Sea Green
|
||||
'#059669', // Mint Green
|
||||
|
||||
// Additional vibrant oranges/reds
|
||||
'#EA580C', // Bright Orange
|
||||
'#DC2626', // Red
|
||||
'#B91C1C', // Dark Red
|
||||
'#EF4444', // Light Red
|
||||
'#F97316', // Orange
|
||||
'#FB923C', // Light Orange
|
||||
'#FDBA74', // Peach
|
||||
|
||||
// Additional vibrant yellows/golds
|
||||
'#FBBF24', // Gold
|
||||
'#F59E0B', // Amber
|
||||
'#D97706', // Dark Amber
|
||||
'#92400E', // Bronze
|
||||
'#EAB308', // Yellow
|
||||
'#CA8A04', // Dark Yellow
|
||||
'#A16207', // Mustard
|
||||
|
||||
// Additional unique vibrant colors
|
||||
'#FF6B6B', // Coral
|
||||
'#4ECDC4', // Mint
|
||||
'#45B7D1', // Light Blue
|
||||
'#96CEB4', // Sage
|
||||
'#FFEAA7', // Cream
|
||||
'#DDA0DD', // Plum
|
||||
'#98D8C8', // Seafoam
|
||||
'#F7DC6F', // Banana
|
||||
'#BB8FCE', // Lavender
|
||||
'#85C1E9', // Baby Blue
|
||||
'#F8C471', // Peach
|
||||
'#82E0AA', // Light Green
|
||||
'#F1948A', // Salmon
|
||||
'#D7BDE2', // Lilac
|
||||
'#D7BDE2', // Lilac
|
||||
]
|
||||
|
||||
// Generates a unique name for a new workflow
|
||||
export function generateUniqueName(existingWorkflows: Record<string, WorkflowMetadata>): string {
|
||||
// Extract numbers from existing workflow names using regex
|
||||
const numbers = Object.values(existingWorkflows)
|
||||
.map((w) => {
|
||||
const match = w.name.match(/Workflow (\d+)/)
|
||||
return match ? Number.parseInt(match[1]) : 0
|
||||
})
|
||||
.filter((n) => n > 0)
|
||||
// Random adjectives and nouns for generating creative workflow names
|
||||
const ADJECTIVES = [
|
||||
'Blazing',
|
||||
'Crystal',
|
||||
'Golden',
|
||||
'Silver',
|
||||
'Mystic',
|
||||
'Cosmic',
|
||||
'Electric',
|
||||
'Frozen',
|
||||
'Burning',
|
||||
'Shining',
|
||||
'Dancing',
|
||||
'Flying',
|
||||
'Roaring',
|
||||
'Whispering',
|
||||
'Glowing',
|
||||
'Sparkling',
|
||||
'Thunder',
|
||||
'Lightning',
|
||||
'Storm',
|
||||
'Ocean',
|
||||
'Mountain',
|
||||
'Forest',
|
||||
'Desert',
|
||||
'Arctic',
|
||||
'Tropical',
|
||||
'Midnight',
|
||||
'Dawn',
|
||||
'Sunset',
|
||||
'Rainbow',
|
||||
'Diamond',
|
||||
'Ruby',
|
||||
'Emerald',
|
||||
'Sapphire',
|
||||
'Pearl',
|
||||
'Jade',
|
||||
'Amber',
|
||||
'Coral',
|
||||
'Ivory',
|
||||
'Obsidian',
|
||||
'Marble',
|
||||
'Velvet',
|
||||
'Silk',
|
||||
'Satin',
|
||||
'Linen',
|
||||
'Cotton',
|
||||
'Wool',
|
||||
'Cashmere',
|
||||
'Denim',
|
||||
'Neon',
|
||||
'Pastel',
|
||||
'Vibrant',
|
||||
'Muted',
|
||||
'Bold',
|
||||
'Subtle',
|
||||
'Bright',
|
||||
'Dark',
|
||||
]
|
||||
|
||||
if (numbers.length === 0) {
|
||||
return 'Workflow 1'
|
||||
}
|
||||
const NOUNS = [
|
||||
'Phoenix',
|
||||
'Dragon',
|
||||
'Eagle',
|
||||
'Wolf',
|
||||
'Lion',
|
||||
'Tiger',
|
||||
'Panther',
|
||||
'Falcon',
|
||||
'Hawk',
|
||||
'Raven',
|
||||
'Swan',
|
||||
'Dove',
|
||||
'Butterfly',
|
||||
'Firefly',
|
||||
'Dragonfly',
|
||||
'Hummingbird',
|
||||
'Galaxy',
|
||||
'Nebula',
|
||||
'Comet',
|
||||
'Meteor',
|
||||
'Star',
|
||||
'Moon',
|
||||
'Sun',
|
||||
'Planet',
|
||||
'Asteroid',
|
||||
'Constellation',
|
||||
'Aurora',
|
||||
'Eclipse',
|
||||
'Solstice',
|
||||
'Equinox',
|
||||
'Horizon',
|
||||
'Zenith',
|
||||
'Castle',
|
||||
'Tower',
|
||||
'Bridge',
|
||||
'Garden',
|
||||
'Fountain',
|
||||
'Palace',
|
||||
'Temple',
|
||||
'Cathedral',
|
||||
'Lighthouse',
|
||||
'Windmill',
|
||||
'Waterfall',
|
||||
'Canyon',
|
||||
'Valley',
|
||||
'Peak',
|
||||
'Ridge',
|
||||
'Cliff',
|
||||
'Ocean',
|
||||
'River',
|
||||
'Lake',
|
||||
'Stream',
|
||||
'Pond',
|
||||
'Bay',
|
||||
'Cove',
|
||||
'Harbor',
|
||||
'Island',
|
||||
'Peninsula',
|
||||
'Archipelago',
|
||||
'Atoll',
|
||||
'Reef',
|
||||
'Lagoon',
|
||||
'Fjord',
|
||||
'Delta',
|
||||
'Cake',
|
||||
'Cookie',
|
||||
'Muffin',
|
||||
'Cupcake',
|
||||
'Pie',
|
||||
'Tart',
|
||||
'Brownie',
|
||||
'Donut',
|
||||
'Pancake',
|
||||
'Waffle',
|
||||
'Croissant',
|
||||
'Bagel',
|
||||
'Pretzel',
|
||||
'Biscuit',
|
||||
'Scone',
|
||||
'Crumpet',
|
||||
]
|
||||
|
||||
// Find the maximum number and add 1
|
||||
const nextNumber = Math.max(...numbers) + 1
|
||||
return `Workflow ${nextNumber}`
|
||||
// Generates a random name for a new workflow
|
||||
export function generateUniqueName(): string {
|
||||
const adjective = ADJECTIVES[Math.floor(Math.random() * ADJECTIVES.length)]
|
||||
const noun = NOUNS[Math.floor(Math.random() * NOUNS.length)]
|
||||
return `${adjective.toLowerCase()}-${noun.toLowerCase()}`
|
||||
}
|
||||
|
||||
// Determines the next color to use for a new workflow based on the color of the newest workflow
|
||||
export function getNextWorkflowColor(existingWorkflows: Record<string, WorkflowMetadata>): string {
|
||||
const workflowArray = Object.values(existingWorkflows)
|
||||
|
||||
if (workflowArray.length === 0) {
|
||||
return WORKFLOW_COLORS[0]
|
||||
}
|
||||
|
||||
// Sort workflows by lastModified date (newest first)
|
||||
const sortedWorkflows = [...workflowArray].sort((a, b) => {
|
||||
const dateA =
|
||||
a.lastModified instanceof Date ? a.lastModified.getTime() : new Date(a.lastModified).getTime()
|
||||
const dateB =
|
||||
b.lastModified instanceof Date ? b.lastModified.getTime() : new Date(b.lastModified).getTime()
|
||||
return dateB - dateA
|
||||
})
|
||||
|
||||
// Get the newest workflow (first in sorted array)
|
||||
const newestWorkflow = sortedWorkflows[0]
|
||||
|
||||
// Find the index of the newest workflow's color, defaulting to -1 if undefined
|
||||
const currentColorIndex = newestWorkflow?.color
|
||||
? WORKFLOW_COLORS.indexOf(newestWorkflow.color)
|
||||
: -1
|
||||
|
||||
// Get next color index, wrapping around to 0 if we reach the end
|
||||
const nextColorIndex = (currentColorIndex + 1) % WORKFLOW_COLORS.length
|
||||
|
||||
return WORKFLOW_COLORS[nextColorIndex]
|
||||
// Generates a random color for a new workflow
|
||||
export function getNextWorkflowColor(): string {
|
||||
// Simply return a random color from the available colors
|
||||
return WORKFLOW_COLORS[Math.floor(Math.random() * WORKFLOW_COLORS.length)]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,23 @@
|
||||
import type { ToolConfig } from '../types'
|
||||
import type { TelegramMessageParams, TelegramMessageResponse } from './types'
|
||||
|
||||
// Helper function to convert basic markdown to HTML
|
||||
function convertMarkdownToHTML(text: string): string {
|
||||
return (
|
||||
text
|
||||
// Bold: **text** or __text__ -> <b>text</b>
|
||||
.replace(/\*\*(.*?)\*\*/g, '<b>$1</b>')
|
||||
.replace(/__(.*?)__/g, '<b>$1</b>')
|
||||
// Italic: *text* or _text_ -> <i>text</i>
|
||||
.replace(/\*(.*?)\*/g, '<i>$1</i>')
|
||||
.replace(/_(.*?)_/g, '<i>$1</i>')
|
||||
// Code: `text` -> <code>text</code>
|
||||
.replace(/`(.*?)`/g, '<code>$1</code>')
|
||||
// Links: [text](url) -> <a href="url">text</a>
|
||||
.replace(/\[([^\]]+)\]\(([^)]+)\)/g, '<a href="$2">$1</a>')
|
||||
)
|
||||
}
|
||||
|
||||
export const telegramMessageTool: ToolConfig<TelegramMessageParams, TelegramMessageResponse> = {
|
||||
id: 'telegram_message',
|
||||
name: 'Telegram Message',
|
||||
@@ -36,7 +53,8 @@ export const telegramMessageTool: ToolConfig<TelegramMessageParams, TelegramMess
|
||||
}),
|
||||
body: (params: TelegramMessageParams) => ({
|
||||
chat_id: params.chatId,
|
||||
text: params.text,
|
||||
text: convertMarkdownToHTML(params.text),
|
||||
parse_mode: 'HTML',
|
||||
}),
|
||||
},
|
||||
|
||||
|
||||
Reference in New Issue
Block a user