mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 15:38:00 -05:00
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
016cd6750c |
@@ -764,6 +764,20 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
bucket: 'test-s3-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
S3_KB_CONFIG: {
|
||||
bucket: 'test-s3-kb-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
BLOB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@aws-sdk/client-s3', () => ({
|
||||
@@ -806,6 +820,11 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@azure/storage-blob', () => ({
|
||||
|
||||
@@ -39,8 +39,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Direct uploads are only available when cloud storage is enabled')
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -64,7 +65,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('fileName is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when contentType is missing', async () => {
|
||||
@@ -87,7 +89,59 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('contentType is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when fileSize is invalid', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'test.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: 0,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('fileSize must be a positive number')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when file size exceeds limit', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const largeFileSize = 150 * 1024 * 1024 // 150MB (exceeds 100MB limit)
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'large-file.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: largeFileSize,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toContain('exceeds maximum allowed size')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should generate S3 presigned URL successfully', async () => {
|
||||
@@ -122,6 +176,34 @@ describe('/api/files/presigned', () => {
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate knowledge-base S3 presigned URL with kb prefix', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost:3000/api/files/presigned?type=knowledge-base',
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'knowledge-doc.pdf',
|
||||
contentType: 'application/pdf',
|
||||
fileSize: 2048,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.fileInfo.key).toMatch(/^kb\/.*knowledge-doc\.pdf$/)
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate Azure Blob presigned URL successfully', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
@@ -182,8 +264,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Unknown storage provider')
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Unknown storage provider: unknown') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -225,8 +308,10 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('S3 service unavailable')
|
||||
expect(data.error).toBe(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
) // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle Azure Blob errors gracefully', async () => {
|
||||
@@ -269,8 +354,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('Azure service unavailable')
|
||||
expect(data.error).toBe('Failed to generate Azure Blob presigned URL') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle malformed JSON gracefully', async () => {
|
||||
@@ -289,9 +374,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('SyntaxError')
|
||||
expect(data.message).toContain('Unexpected token')
|
||||
expect(response.status).toBe(400) // Changed from 500 to 400 (ValidationError)
|
||||
expect(data.error).toBe('Invalid JSON in request body') // Updated error message
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { getBlobServiceClient } from '@/lib/uploads/blob/blob-client'
|
||||
import { getS3Client, sanitizeFilenameForMetadata } from '@/lib/uploads/s3/s3-client'
|
||||
import { BLOB_CONFIG, S3_CONFIG } from '@/lib/uploads/setup'
|
||||
import { BLOB_CONFIG, BLOB_KB_CONFIG, S3_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import { createErrorResponse, createOptionsResponse } from '../utils'
|
||||
|
||||
const logger = createLogger('PresignedUploadAPI')
|
||||
@@ -17,124 +17,148 @@ interface PresignedUrlRequest {
|
||||
fileSize: number
|
||||
}
|
||||
|
||||
type UploadType = 'general' | 'knowledge-base'
|
||||
|
||||
class PresignedUrlError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public statusCode = 400
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'PresignedUrlError'
|
||||
}
|
||||
}
|
||||
|
||||
class StorageConfigError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'STORAGE_CONFIG_ERROR', 500)
|
||||
}
|
||||
}
|
||||
|
||||
class ValidationError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'VALIDATION_ERROR', 400)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
// Parse the request body
|
||||
const data: PresignedUrlRequest = await request.json()
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName || !contentType) {
|
||||
return NextResponse.json({ error: 'Missing fileName or contentType' }, { status: 400 })
|
||||
let data: PresignedUrlRequest
|
||||
try {
|
||||
data = await request.json()
|
||||
} catch {
|
||||
throw new ValidationError('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
// Only proceed if cloud storage is enabled
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName?.trim()) {
|
||||
throw new ValidationError('fileName is required and cannot be empty')
|
||||
}
|
||||
if (!contentType?.trim()) {
|
||||
throw new ValidationError('contentType is required and cannot be empty')
|
||||
}
|
||||
if (!fileSize || fileSize <= 0) {
|
||||
throw new ValidationError('fileSize must be a positive number')
|
||||
}
|
||||
|
||||
const MAX_FILE_SIZE = 100 * 1024 * 1024
|
||||
if (fileSize > MAX_FILE_SIZE) {
|
||||
throw new ValidationError(
|
||||
`File size (${fileSize} bytes) exceeds maximum allowed size (${MAX_FILE_SIZE} bytes)`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadTypeParam = request.nextUrl.searchParams.get('type')
|
||||
const uploadType: UploadType =
|
||||
uploadTypeParam === 'knowledge-base' ? 'knowledge-base' : 'general'
|
||||
|
||||
if (!isUsingCloudStorage()) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Direct uploads are only available when cloud storage is enabled',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
throw new StorageConfigError(
|
||||
'Direct uploads are only available when cloud storage is enabled'
|
||||
)
|
||||
}
|
||||
|
||||
const storageProvider = getStorageProvider()
|
||||
logger.info(`Generating ${uploadType} presigned URL for ${fileName} using ${storageProvider}`)
|
||||
|
||||
switch (storageProvider) {
|
||||
case 's3':
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
case 'blob':
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
default:
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Unknown storage provider',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
throw new StorageConfigError(`Unknown storage provider: ${storageProvider}`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error generating presigned URL:', error)
|
||||
|
||||
if (error instanceof PresignedUrlError) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error.message,
|
||||
code: error.code,
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: error.statusCode }
|
||||
)
|
||||
}
|
||||
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate presigned URL')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleS3PresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
// Sanitize the original filename for S3 metadata to prevent header errors
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create the S3 command
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: S3_CONFIG.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
|
||||
// Generate the presigned URL
|
||||
const presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
})
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
async function handleS3PresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
const config = uploadType === 'knowledge-base' ? S3_KB_CONFIG : S3_CONFIG
|
||||
|
||||
// Generate SAS token for upload (write permission)
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: BLOB_CONFIG.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
if (!config.bucket || !config.region) {
|
||||
throw new StorageConfigError(`S3 configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(BLOB_CONFIG.accountName, BLOB_CONFIG.accountKey || '')
|
||||
).toString()
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
const metadata: Record<string, string> = {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
if (uploadType === 'knowledge-base') {
|
||||
metadata.purpose = 'knowledge-base'
|
||||
}
|
||||
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: config.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: metadata,
|
||||
})
|
||||
|
||||
let presignedUrl: string
|
||||
try {
|
||||
presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
} catch (s3Error) {
|
||||
logger.error('Failed to generate S3 presigned URL:', s3Error)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} S3 presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
@@ -146,22 +170,103 @@ async function handleBlobPresignedUrl(fileName: string, contentType: string, fil
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders: {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error generating Blob presigned URL:', error)
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate Blob presigned URL')
|
||||
)
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in S3 presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate S3 presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const config = uploadType === 'knowledge-base' ? BLOB_KB_CONFIG : BLOB_CONFIG
|
||||
|
||||
if (
|
||||
!config.accountName ||
|
||||
!config.containerName ||
|
||||
(!config.accountKey && !config.connectionString)
|
||||
) {
|
||||
throw new StorageConfigError(`Azure Blob configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(config.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: config.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
}
|
||||
|
||||
let sasToken: string
|
||||
try {
|
||||
sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(config.accountName, config.accountKey || '')
|
||||
).toString()
|
||||
} catch (blobError) {
|
||||
logger.error('Failed to generate Azure Blob SAS token:', blobError)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate Azure Blob SAS token - check Azure credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} Azure Blob presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
}
|
||||
|
||||
if (uploadType === 'knowledge-base') {
|
||||
uploadHeaders['x-ms-meta-purpose'] = 'knowledge-base'
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in Azure Blob presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate Azure Blob presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
// Handle preflight requests
|
||||
export async function OPTIONS() {
|
||||
return createOptionsResponse()
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { readFile } from 'fs/promises'
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { downloadFile, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { downloadFile, getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { BLOB_KB_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import '@/lib/uploads/setup.server'
|
||||
|
||||
import {
|
||||
@@ -16,6 +17,19 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('FilesServeAPI')
|
||||
|
||||
async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
readableStream.on('data', (data) => {
|
||||
chunks.push(data instanceof Buffer ? data : Buffer.from(data))
|
||||
})
|
||||
readableStream.on('end', () => {
|
||||
resolve(Buffer.concat(chunks))
|
||||
})
|
||||
readableStream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Main API route handler for serving files
|
||||
*/
|
||||
@@ -85,12 +99,65 @@ async function handleLocalFile(filename: string): Promise<NextResponse> {
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadKBFile(cloudKey: string): Promise<Buffer> {
|
||||
const storageProvider = getStorageProvider()
|
||||
|
||||
if (storageProvider === 'blob') {
|
||||
logger.info(`Downloading KB file from Azure Blob Storage: ${cloudKey}`)
|
||||
// Use KB-specific blob configuration
|
||||
const { getBlobServiceClient } = await import('@/lib/uploads/blob/blob-client')
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_KB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(cloudKey)
|
||||
|
||||
const downloadBlockBlobResponse = await blockBlobClient.download()
|
||||
if (!downloadBlockBlobResponse.readableStreamBody) {
|
||||
throw new Error('Failed to get readable stream from blob download')
|
||||
}
|
||||
|
||||
// Convert stream to buffer
|
||||
return await streamToBuffer(downloadBlockBlobResponse.readableStreamBody)
|
||||
}
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
logger.info(`Downloading KB file from S3: ${cloudKey}`)
|
||||
// Use KB-specific S3 configuration
|
||||
const { getS3Client } = await import('@/lib/uploads/s3/s3-client')
|
||||
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const s3Client = getS3Client()
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: S3_KB_CONFIG.bucket,
|
||||
Key: cloudKey,
|
||||
})
|
||||
|
||||
const response = await s3Client.send(command)
|
||||
if (!response.Body) {
|
||||
throw new Error('No body in S3 response')
|
||||
}
|
||||
|
||||
// Convert stream to buffer using the same method as the regular S3 client
|
||||
const stream = response.Body as any
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
stream.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks)))
|
||||
stream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported storage provider for KB files: ${storageProvider}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Proxy cloud file through our server
|
||||
*/
|
||||
async function handleCloudProxy(cloudKey: string): Promise<NextResponse> {
|
||||
try {
|
||||
const fileBuffer = await downloadFile(cloudKey)
|
||||
// Check if this is a KB file (starts with 'kb/')
|
||||
const isKBFile = cloudKey.startsWith('kb/')
|
||||
|
||||
const fileBuffer = isKBFile ? await downloadKBFile(cloudKey) : await downloadFile(cloudKey)
|
||||
|
||||
// Extract the original filename from the key (last part after last /)
|
||||
const originalFilename = cloudKey.split('/').pop() || 'download'
|
||||
|
||||
@@ -104,7 +104,7 @@ async function createWorkspace(userId: string, name: string) {
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
// Create "Workflow 1" for the workspace with start block
|
||||
// Create initial workflow for the workspace with start block
|
||||
const starterId = crypto.randomUUID()
|
||||
const initialState = {
|
||||
blocks: {
|
||||
@@ -170,7 +170,7 @@ async function createWorkspace(userId: string, name: string) {
|
||||
userId,
|
||||
workspaceId,
|
||||
folderId: null,
|
||||
name: 'Workflow 1',
|
||||
name: 'default-agent',
|
||||
description: 'Your first workflow - start building here!',
|
||||
state: initialState,
|
||||
color: '#3972F6',
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { AlertCircle, Loader2, X } from 'lucide-react'
|
||||
import { AlertCircle, ChevronDown, ChevronUp, Loader2, X } from 'lucide-react'
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
@@ -16,6 +16,7 @@ import { Button } from '@/components/ui/button'
|
||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import type { ChunkData, DocumentData } from '@/stores/knowledge/store'
|
||||
|
||||
@@ -28,6 +29,12 @@ interface EditChunkModalProps {
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
onChunkUpdate?: (updatedChunk: ChunkData) => void
|
||||
// New props for navigation
|
||||
allChunks?: ChunkData[]
|
||||
currentPage?: number
|
||||
totalPages?: number
|
||||
onNavigateToChunk?: (chunk: ChunkData) => void
|
||||
onNavigateToPage?: (page: number, selectChunk: 'first' | 'last') => Promise<void>
|
||||
}
|
||||
|
||||
export function EditChunkModal({
|
||||
@@ -37,11 +44,18 @@ export function EditChunkModal({
|
||||
isOpen,
|
||||
onClose,
|
||||
onChunkUpdate,
|
||||
allChunks = [],
|
||||
currentPage = 1,
|
||||
totalPages = 1,
|
||||
onNavigateToChunk,
|
||||
onNavigateToPage,
|
||||
}: EditChunkModalProps) {
|
||||
const [editedContent, setEditedContent] = useState(chunk?.content || '')
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [isNavigating, setIsNavigating] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
|
||||
// Check if there are unsaved changes
|
||||
const hasUnsavedChanges = editedContent !== (chunk?.content || '')
|
||||
@@ -53,6 +67,13 @@ export function EditChunkModal({
|
||||
}
|
||||
}, [chunk?.id, chunk?.content])
|
||||
|
||||
// Find current chunk index in the current page
|
||||
const currentChunkIndex = chunk ? allChunks.findIndex((c) => c.id === chunk.id) : -1
|
||||
|
||||
// Calculate navigation availability
|
||||
const canNavigatePrev = currentChunkIndex > 0 || currentPage > 1
|
||||
const canNavigateNext = currentChunkIndex < allChunks.length - 1 || currentPage < totalPages
|
||||
|
||||
const handleSaveContent = async () => {
|
||||
if (!chunk || !document) return
|
||||
|
||||
@@ -82,7 +103,6 @@ export function EditChunkModal({
|
||||
|
||||
if (result.success && onChunkUpdate) {
|
||||
onChunkUpdate(result.data)
|
||||
onClose()
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating chunk:', err)
|
||||
@@ -92,8 +112,51 @@ export function EditChunkModal({
|
||||
}
|
||||
}
|
||||
|
||||
const navigateToChunk = async (direction: 'prev' | 'next') => {
|
||||
if (!chunk || isNavigating) return
|
||||
|
||||
try {
|
||||
setIsNavigating(true)
|
||||
|
||||
if (direction === 'prev') {
|
||||
if (currentChunkIndex > 0) {
|
||||
// Navigate to previous chunk in current page
|
||||
const prevChunk = allChunks[currentChunkIndex - 1]
|
||||
onNavigateToChunk?.(prevChunk)
|
||||
} else if (currentPage > 1) {
|
||||
// Load previous page and navigate to last chunk
|
||||
await onNavigateToPage?.(currentPage - 1, 'last')
|
||||
}
|
||||
} else {
|
||||
if (currentChunkIndex < allChunks.length - 1) {
|
||||
// Navigate to next chunk in current page
|
||||
const nextChunk = allChunks[currentChunkIndex + 1]
|
||||
onNavigateToChunk?.(nextChunk)
|
||||
} else if (currentPage < totalPages) {
|
||||
// Load next page and navigate to first chunk
|
||||
await onNavigateToPage?.(currentPage + 1, 'first')
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Error navigating ${direction}:`, err)
|
||||
setError(`Failed to navigate to ${direction === 'prev' ? 'previous' : 'next'} chunk`)
|
||||
} finally {
|
||||
setIsNavigating(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleNavigate = (direction: 'prev' | 'next') => {
|
||||
if (hasUnsavedChanges) {
|
||||
setPendingNavigation(() => () => navigateToChunk(direction))
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
void navigateToChunk(direction)
|
||||
}
|
||||
}
|
||||
|
||||
const handleCloseAttempt = () => {
|
||||
if (hasUnsavedChanges && !isSaving) {
|
||||
setPendingNavigation(null)
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
onClose()
|
||||
@@ -102,7 +165,12 @@ export function EditChunkModal({
|
||||
|
||||
const handleConfirmDiscard = () => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
onClose()
|
||||
if (pendingNavigation) {
|
||||
void pendingNavigation()
|
||||
setPendingNavigation(null)
|
||||
} else {
|
||||
onClose()
|
||||
}
|
||||
}
|
||||
|
||||
const isFormValid = editedContent.trim().length > 0 && editedContent.trim().length <= 10000
|
||||
@@ -118,7 +186,59 @@ export function EditChunkModal({
|
||||
>
|
||||
<DialogHeader className='flex-shrink-0 border-b px-6 py-4'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<DialogTitle className='font-medium text-lg'>Edit Chunk</DialogTitle>
|
||||
<div className='flex items-center gap-3'>
|
||||
<DialogTitle className='font-medium text-lg'>Edit Chunk</DialogTitle>
|
||||
|
||||
{/* Navigation Controls */}
|
||||
<div className='flex items-center gap-1'>
|
||||
<Tooltip>
|
||||
<TooltipTrigger
|
||||
asChild
|
||||
onFocus={(e) => e.preventDefault()}
|
||||
onBlur={(e) => e.preventDefault()}
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => handleNavigate('prev')}
|
||||
disabled={!canNavigatePrev || isNavigating || isSaving}
|
||||
className='h-8 w-8 p-0'
|
||||
>
|
||||
<ChevronUp className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom'>
|
||||
Previous chunk{' '}
|
||||
{currentPage > 1 && currentChunkIndex === 0 ? '(previous page)' : ''}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip>
|
||||
<TooltipTrigger
|
||||
asChild
|
||||
onFocus={(e) => e.preventDefault()}
|
||||
onBlur={(e) => e.preventDefault()}
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => handleNavigate('next')}
|
||||
disabled={!canNavigateNext || isNavigating || isSaving}
|
||||
className='h-8 w-8 p-0'
|
||||
>
|
||||
<ChevronDown className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom'>
|
||||
Next chunk{' '}
|
||||
{currentPage < totalPages && currentChunkIndex === allChunks.length - 1
|
||||
? '(next page)'
|
||||
: ''}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='icon'
|
||||
@@ -142,7 +262,7 @@ export function EditChunkModal({
|
||||
{document?.filename || 'Unknown Document'}
|
||||
</p>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Editing chunk #{chunk.chunkIndex}
|
||||
Editing chunk #{chunk.chunkIndex} • Page {currentPage} of {totalPages}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -167,7 +287,7 @@ export function EditChunkModal({
|
||||
onChange={(e) => setEditedContent(e.target.value)}
|
||||
placeholder='Enter chunk content...'
|
||||
className='flex-1 resize-none'
|
||||
disabled={isSaving}
|
||||
disabled={isSaving || isNavigating}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
@@ -176,12 +296,16 @@ export function EditChunkModal({
|
||||
{/* Footer */}
|
||||
<div className='mt-auto border-t px-6 pt-4 pb-6'>
|
||||
<div className='flex justify-between'>
|
||||
<Button variant='outline' onClick={handleCloseAttempt} disabled={isSaving}>
|
||||
<Button
|
||||
variant='outline'
|
||||
onClick={handleCloseAttempt}
|
||||
disabled={isSaving || isNavigating}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleSaveContent}
|
||||
disabled={!isFormValid || isSaving || !hasUnsavedChanges}
|
||||
disabled={!isFormValid || isSaving || !hasUnsavedChanges || isNavigating}
|
||||
className='bg-[#701FFC] font-[480] text-primary-foreground shadow-[0_0_0_0_#701FFC] transition-all duration-200 hover:bg-[#6518E6] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
|
||||
>
|
||||
{isSaving ? (
|
||||
@@ -205,12 +329,19 @@ export function EditChunkModal({
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Unsaved Changes</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
You have unsaved changes to this chunk content. Are you sure you want to discard your
|
||||
changes and close the editor?
|
||||
You have unsaved changes to this chunk content.
|
||||
{pendingNavigation
|
||||
? ' Do you want to discard your changes and navigate to the next chunk?'
|
||||
: ' Are you sure you want to discard your changes and close the editor?'}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel onClick={() => setShowUnsavedChangesAlert(false)}>
|
||||
<AlertDialogCancel
|
||||
onClick={() => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
setPendingNavigation(null)
|
||||
}}
|
||||
>
|
||||
Keep Editing
|
||||
</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
|
||||
@@ -767,6 +767,30 @@ export function Document({
|
||||
updateChunk(updatedChunk.id, updatedChunk)
|
||||
setSelectedChunk(updatedChunk)
|
||||
}}
|
||||
allChunks={chunks}
|
||||
currentPage={currentPage}
|
||||
totalPages={totalPages}
|
||||
onNavigateToChunk={(chunk: ChunkData) => {
|
||||
setSelectedChunk(chunk)
|
||||
}}
|
||||
onNavigateToPage={async (page: number, selectChunk: 'first' | 'last') => {
|
||||
await goToPage(page)
|
||||
|
||||
const checkAndSelectChunk = () => {
|
||||
if (!isLoadingChunks && chunks.length > 0) {
|
||||
if (selectChunk === 'first') {
|
||||
setSelectedChunk(chunks[0])
|
||||
} else {
|
||||
setSelectedChunk(chunks[chunks.length - 1])
|
||||
}
|
||||
} else {
|
||||
// Retry after a short delay if chunks aren't loaded yet
|
||||
setTimeout(checkAndSelectChunk, 100)
|
||||
}
|
||||
}
|
||||
|
||||
setTimeout(checkAndSelectChunk, 0)
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Create Chunk Modal */}
|
||||
|
||||
@@ -36,16 +36,11 @@ import { useKnowledgeBase, useKnowledgeBaseDocuments } from '@/hooks/use-knowled
|
||||
import { type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
import { KnowledgeHeader } from '../components/knowledge-header/knowledge-header'
|
||||
import { useKnowledgeUpload } from '../hooks/use-knowledge-upload'
|
||||
import { KnowledgeBaseLoading } from './components/knowledge-base-loading/knowledge-base-loading'
|
||||
|
||||
const logger = createLogger('KnowledgeBase')
|
||||
|
||||
interface ProcessedDocumentResponse {
|
||||
documentId: string
|
||||
filename: string
|
||||
status: string
|
||||
}
|
||||
|
||||
interface KnowledgeBaseProps {
|
||||
id: string
|
||||
knowledgeBaseName?: string
|
||||
@@ -145,17 +140,32 @@ export function KnowledgeBase({
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const [isBulkOperating, setIsBulkOperating] = useState(false)
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
const [uploadError, setUploadError] = useState<{
|
||||
message: string
|
||||
timestamp: number
|
||||
} | null>(null)
|
||||
const [uploadProgress, setUploadProgress] = useState<{
|
||||
stage: 'idle' | 'uploading' | 'processing' | 'completing'
|
||||
filesCompleted: number
|
||||
totalFiles: number
|
||||
currentFile?: string
|
||||
}>({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
|
||||
const { isUploading, uploadProgress, uploadError, uploadFiles, clearError } = useKnowledgeUpload({
|
||||
onUploadComplete: async (uploadedFiles) => {
|
||||
const pendingDocuments: DocumentData[] = uploadedFiles.map((file, index) => ({
|
||||
id: `temp-${Date.now()}-${index}`,
|
||||
knowledgeBaseId: id,
|
||||
filename: file.filename,
|
||||
fileUrl: file.fileUrl,
|
||||
fileSize: file.fileSize,
|
||||
mimeType: file.mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}))
|
||||
|
||||
useKnowledgeStore.getState().addPendingDocuments(id, pendingDocuments)
|
||||
|
||||
await refreshDocuments()
|
||||
},
|
||||
})
|
||||
const router = useRouter()
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
@@ -240,11 +250,11 @@ export function KnowledgeBase({
|
||||
useEffect(() => {
|
||||
if (uploadError) {
|
||||
const timer = setTimeout(() => {
|
||||
setUploadError(null)
|
||||
clearError()
|
||||
}, 8000)
|
||||
return () => clearTimeout(timer)
|
||||
}
|
||||
}, [uploadError])
|
||||
}, [uploadError, clearError])
|
||||
|
||||
// Filter documents based on search query
|
||||
const filteredDocuments = documents.filter((doc) =>
|
||||
@@ -448,153 +458,18 @@ export function KnowledgeBase({
|
||||
const files = e.target.files
|
||||
if (!files || files.length === 0) return
|
||||
|
||||
interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
try {
|
||||
setIsUploading(true)
|
||||
setUploadError(null)
|
||||
setUploadProgress({ stage: 'uploading', filesCompleted: 0, totalFiles: files.length })
|
||||
|
||||
// Upload all files and start processing
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
const fileArray = Array.from(files)
|
||||
|
||||
for (const [index, file] of fileArray.entries()) {
|
||||
setUploadProgress((prev) => ({ ...prev, currentFile: file.name, filesCompleted: index }))
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json()
|
||||
throw new Error(`Failed to upload ${file.name}: ${errorData.error || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
|
||||
// Validate upload result structure
|
||||
if (!uploadResult.path) {
|
||||
throw new Error(`Invalid upload response for ${file.name}: missing file path`)
|
||||
}
|
||||
|
||||
uploadedFiles.push({
|
||||
filename: file.name,
|
||||
fileUrl: uploadResult.path.startsWith('http')
|
||||
? uploadResult.path
|
||||
: `${window.location.origin}${uploadResult.path}`,
|
||||
fileSize: file.size,
|
||||
mimeType: file.type,
|
||||
})
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({
|
||||
...prev,
|
||||
stage: 'processing',
|
||||
filesCompleted: fileArray.length,
|
||||
}))
|
||||
|
||||
// Start async document processing
|
||||
const processResponse = await fetch(`/api/knowledge/${id}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: knowledgeBase?.chunkingConfig?.maxSize || 1024,
|
||||
minCharactersPerChunk: knowledgeBase?.chunkingConfig?.minSize || 100,
|
||||
chunkOverlap: knowledgeBase?.chunkingConfig?.overlap || 200,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}),
|
||||
const chunkingConfig = knowledgeBase?.chunkingConfig
|
||||
await uploadFiles(Array.from(files), id, {
|
||||
chunkSize: chunkingConfig?.maxSize || 1024,
|
||||
minCharactersPerChunk: chunkingConfig?.minSize || 100,
|
||||
chunkOverlap: chunkingConfig?.overlap || 200,
|
||||
recipe: 'default',
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
const errorData = await processResponse.json()
|
||||
throw new Error(
|
||||
`Failed to start document processing: ${errorData.error || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Validate process result structure
|
||||
if (!processResult.success) {
|
||||
throw new Error(`Document processing failed: ${processResult.error || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
if (!processResult.data || !processResult.data.documentsCreated) {
|
||||
throw new Error('Invalid processing response: missing document data')
|
||||
}
|
||||
|
||||
// Create pending document objects and add them to the store immediately
|
||||
const pendingDocuments: DocumentData[] = processResult.data.documentsCreated.map(
|
||||
(doc: ProcessedDocumentResponse, index: number) => {
|
||||
if (!doc.documentId || !doc.filename) {
|
||||
logger.error(`Invalid document data received:`, doc)
|
||||
throw new Error(
|
||||
`Invalid document data for ${uploadedFiles[index]?.filename || 'unknown file'}`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
id: doc.documentId,
|
||||
knowledgeBaseId: id,
|
||||
filename: doc.filename,
|
||||
fileUrl: uploadedFiles[index].fileUrl,
|
||||
fileSize: uploadedFiles[index].fileSize,
|
||||
mimeType: uploadedFiles[index].mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
// Add pending documents to store for immediate UI update
|
||||
useKnowledgeStore.getState().addPendingDocuments(id, pendingDocuments)
|
||||
|
||||
logger.info(`Successfully started processing ${uploadedFiles.length} documents`)
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'completing' }))
|
||||
|
||||
// Trigger a refresh to ensure documents are properly loaded
|
||||
await refreshDocuments()
|
||||
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
} catch (err) {
|
||||
logger.error('Error uploading documents:', err)
|
||||
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : 'Unknown error occurred during upload'
|
||||
setUploadError({
|
||||
message: errorMessage,
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
|
||||
// Show user-friendly error message in console for debugging
|
||||
console.error('Document upload failed:', errorMessage)
|
||||
} catch (error) {
|
||||
logger.error('Error uploading files:', error)
|
||||
// Error handling is managed by the upload hook
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
// Reset the file input
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
@@ -995,7 +870,7 @@ export function KnowledgeBase({
|
||||
</tr>
|
||||
))
|
||||
) : (
|
||||
filteredDocuments.map((doc, index) => {
|
||||
filteredDocuments.map((doc) => {
|
||||
const isSelected = selectedDocuments.has(doc.id)
|
||||
const statusDisplay = getStatusDisplay(doc)
|
||||
// const processingTime = getProcessingTime(doc)
|
||||
@@ -1254,7 +1129,7 @@ export function KnowledgeBase({
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => setUploadError(null)}
|
||||
onClick={() => clearError()}
|
||||
className='flex-shrink-0 rounded-sm opacity-70 hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring'
|
||||
>
|
||||
<X className='h-4 w-4' />
|
||||
|
||||
@@ -13,8 +13,8 @@ import { Label } from '@/components/ui/label'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components/icons/document-icons'
|
||||
import type { DocumentData, KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import { useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import { useKnowledgeUpload } from '../../hooks/use-knowledge-upload'
|
||||
|
||||
const logger = createLogger('CreateModal')
|
||||
|
||||
@@ -29,12 +29,6 @@ const ACCEPTED_FILE_TYPES = [
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
]
|
||||
|
||||
interface ProcessedDocumentResponse {
|
||||
documentId: string
|
||||
filename: string
|
||||
status: string
|
||||
}
|
||||
|
||||
interface FileWithPreview extends File {
|
||||
preview: string
|
||||
}
|
||||
@@ -89,6 +83,12 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
const scrollContainerRef = useRef<HTMLDivElement>(null)
|
||||
const dropZoneRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
const { uploadFiles } = useKnowledgeUpload({
|
||||
onUploadComplete: (uploadedFiles) => {
|
||||
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
||||
},
|
||||
})
|
||||
|
||||
// Cleanup file preview URLs when component unmounts to prevent memory leaks
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
@@ -235,19 +235,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
return `${Number.parseFloat((bytes / k ** i).toFixed(1))} ${sizes[i]}`
|
||||
}
|
||||
|
||||
// Helper function to create uploadedFiles array from file uploads
|
||||
const createUploadedFile = (
|
||||
filename: string,
|
||||
fileUrl: string,
|
||||
fileSize: number,
|
||||
mimeType: string
|
||||
) => ({
|
||||
filename,
|
||||
fileUrl: fileUrl.startsWith('http') ? fileUrl : `${window.location.origin}${fileUrl}`,
|
||||
fileSize,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const onSubmit = async (data: FormValues) => {
|
||||
setIsSubmitting(true)
|
||||
setSubmitStatus(null)
|
||||
@@ -285,138 +272,14 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
|
||||
const newKnowledgeBase = result.data
|
||||
|
||||
// If files are uploaded, upload them and start processing
|
||||
if (files.length > 0) {
|
||||
// First, upload all files to get their URLs
|
||||
interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const presignedResponse = await fetch('/api/files/presigned', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
fileSize: file.size,
|
||||
}),
|
||||
})
|
||||
|
||||
const presignedData = await presignedResponse.json()
|
||||
|
||||
if (presignedResponse.ok && presignedData.directUploadSupported) {
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'Content-Type': file.type,
|
||||
}
|
||||
|
||||
// Add Azure-specific headers if provided
|
||||
if (presignedData.uploadHeaders) {
|
||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
||||
}
|
||||
|
||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
||||
method: 'PUT',
|
||||
headers: uploadHeaders, // Use the merged headers
|
||||
body: file,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
throw new Error(
|
||||
`Direct upload failed: ${uploadResponse.status} ${uploadResponse.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(file.name, presignedData.fileInfo.path, file.size, file.type)
|
||||
)
|
||||
} else {
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json()
|
||||
throw new Error(
|
||||
`Failed to upload ${file.name}: ${errorData.error || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(file.name, uploadResult.path, file.size, file.type)
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to upload ${file.name}: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Start async document processing
|
||||
const processResponse = await fetch(`/api/knowledge/${newKnowledgeBase.id}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: data.maxChunkSize,
|
||||
minCharactersPerChunk: data.minChunkSize,
|
||||
chunkOverlap: data.overlapSize,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}),
|
||||
const uploadedFiles = await uploadFiles(files, newKnowledgeBase.id, {
|
||||
chunkSize: data.maxChunkSize,
|
||||
minCharactersPerChunk: data.minChunkSize,
|
||||
chunkOverlap: data.overlapSize,
|
||||
recipe: 'default',
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
throw new Error('Failed to start document processing')
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Create pending document objects and add them to the store immediately
|
||||
if (processResult.success && processResult.data.documentsCreated) {
|
||||
const pendingDocuments: DocumentData[] = processResult.data.documentsCreated.map(
|
||||
(doc: ProcessedDocumentResponse, index: number) => ({
|
||||
id: doc.documentId,
|
||||
knowledgeBaseId: newKnowledgeBase.id,
|
||||
filename: doc.filename,
|
||||
fileUrl: uploadedFiles[index].fileUrl,
|
||||
fileSize: uploadedFiles[index].fileSize,
|
||||
mimeType: uploadedFiles[index].mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
})
|
||||
)
|
||||
|
||||
// Add pending documents to store for immediate UI update
|
||||
useKnowledgeStore.getState().addPendingDocuments(newKnowledgeBase.id, pendingDocuments)
|
||||
}
|
||||
|
||||
// Update the knowledge base object with the correct document count
|
||||
newKnowledgeBase.docCount = uploadedFiles.length
|
||||
|
||||
|
||||
@@ -0,0 +1,352 @@
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('KnowledgeUpload')
|
||||
|
||||
export interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
export interface UploadProgress {
|
||||
stage: 'idle' | 'uploading' | 'processing' | 'completing'
|
||||
filesCompleted: number
|
||||
totalFiles: number
|
||||
currentFile?: string
|
||||
}
|
||||
|
||||
export interface UploadError {
|
||||
message: string
|
||||
timestamp: number
|
||||
code?: string
|
||||
details?: any
|
||||
}
|
||||
|
||||
export interface ProcessingOptions {
|
||||
chunkSize?: number
|
||||
minCharactersPerChunk?: number
|
||||
chunkOverlap?: number
|
||||
recipe?: string
|
||||
}
|
||||
|
||||
export interface UseKnowledgeUploadOptions {
|
||||
onUploadComplete?: (uploadedFiles: UploadedFile[]) => void
|
||||
onError?: (error: UploadError) => void
|
||||
}
|
||||
|
||||
class KnowledgeUploadError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public details?: any
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'KnowledgeUploadError'
|
||||
}
|
||||
}
|
||||
|
||||
class PresignedUrlError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'PRESIGNED_URL_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
class DirectUploadError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'DIRECT_UPLOAD_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
class ProcessingError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'PROCESSING_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
const [uploadProgress, setUploadProgress] = useState<UploadProgress>({
|
||||
stage: 'idle',
|
||||
filesCompleted: 0,
|
||||
totalFiles: 0,
|
||||
})
|
||||
const [uploadError, setUploadError] = useState<UploadError | null>(null)
|
||||
|
||||
const createUploadedFile = (
|
||||
filename: string,
|
||||
fileUrl: string,
|
||||
fileSize: number,
|
||||
mimeType: string
|
||||
): UploadedFile => ({
|
||||
filename,
|
||||
fileUrl,
|
||||
fileSize,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const createErrorFromException = (error: unknown, defaultMessage: string): UploadError => {
|
||||
if (error instanceof KnowledgeUploadError) {
|
||||
return {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
details: error.details,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
return {
|
||||
message: error.message,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
message: defaultMessage,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
const uploadFiles = async (
|
||||
files: File[],
|
||||
knowledgeBaseId: string,
|
||||
processingOptions: ProcessingOptions = {}
|
||||
): Promise<UploadedFile[]> => {
|
||||
if (files.length === 0) {
|
||||
throw new KnowledgeUploadError('No files provided for upload', 'NO_FILES')
|
||||
}
|
||||
|
||||
if (!knowledgeBaseId?.trim()) {
|
||||
throw new KnowledgeUploadError('Knowledge base ID is required', 'INVALID_KB_ID')
|
||||
}
|
||||
|
||||
try {
|
||||
setIsUploading(true)
|
||||
setUploadError(null)
|
||||
setUploadProgress({ stage: 'uploading', filesCompleted: 0, totalFiles: files.length })
|
||||
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
|
||||
// Upload all files using presigned URLs
|
||||
for (const [index, file] of files.entries()) {
|
||||
setUploadProgress((prev) => ({
|
||||
...prev,
|
||||
currentFile: file.name,
|
||||
filesCompleted: index,
|
||||
}))
|
||||
|
||||
try {
|
||||
// Get presigned URL
|
||||
const presignedResponse = await fetch('/api/files/presigned?type=knowledge-base', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
fileSize: file.size,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!presignedResponse.ok) {
|
||||
let errorDetails: any = null
|
||||
try {
|
||||
errorDetails = await presignedResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
throw new PresignedUrlError(
|
||||
`Failed to get presigned URL for ${file.name}: ${presignedResponse.status} ${presignedResponse.statusText}`,
|
||||
errorDetails
|
||||
)
|
||||
}
|
||||
|
||||
const presignedData = await presignedResponse.json()
|
||||
|
||||
if (presignedData.directUploadSupported) {
|
||||
// Use presigned URL for direct upload
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'Content-Type': file.type,
|
||||
}
|
||||
|
||||
// Add Azure-specific headers if provided
|
||||
if (presignedData.uploadHeaders) {
|
||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
||||
}
|
||||
|
||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
||||
method: 'PUT',
|
||||
headers: uploadHeaders,
|
||||
body: file,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
throw new DirectUploadError(
|
||||
`Direct upload failed for ${file.name}: ${uploadResponse.status} ${uploadResponse.statusText}`,
|
||||
{ uploadResponse: uploadResponse.statusText }
|
||||
)
|
||||
}
|
||||
|
||||
// Convert relative path to full URL for schema validation
|
||||
const fullFileUrl = presignedData.fileInfo.path.startsWith('http')
|
||||
? presignedData.fileInfo.path
|
||||
: `${window.location.origin}${presignedData.fileInfo.path}`
|
||||
|
||||
uploadedFiles.push(createUploadedFile(file.name, fullFileUrl, file.size, file.type))
|
||||
} else {
|
||||
// Fallback to traditional upload through API route
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
let errorData: any = null
|
||||
try {
|
||||
errorData = await uploadResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
throw new DirectUploadError(
|
||||
`Failed to upload ${file.name}: ${errorData?.error || 'Unknown error'}`,
|
||||
errorData
|
||||
)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
|
||||
// Validate upload result structure
|
||||
if (!uploadResult.path) {
|
||||
throw new DirectUploadError(
|
||||
`Invalid upload response for ${file.name}: missing file path`,
|
||||
uploadResult
|
||||
)
|
||||
}
|
||||
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(
|
||||
file.name,
|
||||
uploadResult.path.startsWith('http')
|
||||
? uploadResult.path
|
||||
: `${window.location.origin}${uploadResult.path}`,
|
||||
file.size,
|
||||
file.type
|
||||
)
|
||||
)
|
||||
}
|
||||
} catch (fileError) {
|
||||
logger.error(`Error uploading file ${file.name}:`, fileError)
|
||||
throw fileError // Re-throw to be caught by outer try-catch
|
||||
}
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'processing' }))
|
||||
|
||||
// Start async document processing
|
||||
const processPayload = {
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: processingOptions.chunkSize || 1024,
|
||||
minCharactersPerChunk: processingOptions.minCharactersPerChunk || 100,
|
||||
chunkOverlap: processingOptions.chunkOverlap || 200,
|
||||
recipe: processingOptions.recipe || 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}
|
||||
|
||||
const processResponse = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(processPayload),
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
let errorData: any = null
|
||||
try {
|
||||
errorData = await processResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
logger.error('Document processing failed:', {
|
||||
status: processResponse.status,
|
||||
error: errorData,
|
||||
uploadedFiles: uploadedFiles.map((f) => ({
|
||||
filename: f.filename,
|
||||
fileUrl: f.fileUrl,
|
||||
fileSize: f.fileSize,
|
||||
mimeType: f.mimeType,
|
||||
})),
|
||||
})
|
||||
|
||||
throw new ProcessingError(
|
||||
`Failed to start document processing: ${errorData?.error || errorData?.message || 'Unknown error'}`,
|
||||
errorData
|
||||
)
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Validate process result structure
|
||||
if (!processResult.success) {
|
||||
throw new ProcessingError(
|
||||
`Document processing failed: ${processResult.error || 'Unknown error'}`,
|
||||
processResult
|
||||
)
|
||||
}
|
||||
|
||||
if (!processResult.data || !processResult.data.documentsCreated) {
|
||||
throw new ProcessingError(
|
||||
'Invalid processing response: missing document data',
|
||||
processResult
|
||||
)
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'completing' }))
|
||||
|
||||
logger.info(`Successfully started processing ${uploadedFiles.length} documents`)
|
||||
|
||||
// Call success callback
|
||||
options.onUploadComplete?.(uploadedFiles)
|
||||
|
||||
return uploadedFiles
|
||||
} catch (err) {
|
||||
logger.error('Error uploading documents:', err)
|
||||
|
||||
const error = createErrorFromException(err, 'Unknown error occurred during upload')
|
||||
setUploadError(error)
|
||||
options.onError?.(error)
|
||||
|
||||
// Show user-friendly error message in console for debugging
|
||||
console.error('Document upload failed:', error.message)
|
||||
|
||||
throw err
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
}
|
||||
}
|
||||
|
||||
const clearError = () => {
|
||||
setUploadError(null)
|
||||
}
|
||||
|
||||
return {
|
||||
isUploading,
|
||||
uploadProgress,
|
||||
uploadError,
|
||||
uploadFiles,
|
||||
clearError,
|
||||
}
|
||||
}
|
||||
@@ -26,10 +26,10 @@ interface ScheduleConfigProps {
|
||||
|
||||
export function ScheduleConfig({
|
||||
blockId,
|
||||
subBlockId,
|
||||
subBlockId: _subBlockId,
|
||||
isConnecting,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
previewValue: _previewValue,
|
||||
disabled = false,
|
||||
}: ScheduleConfigProps) {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
@@ -56,13 +56,7 @@ export function ScheduleConfig({
|
||||
|
||||
// Get the startWorkflow value to determine if scheduling is enabled
|
||||
// and expose the setter so we can update it
|
||||
const [startWorkflow, setStartWorkflow] = useSubBlockValue(blockId, 'startWorkflow')
|
||||
const isScheduleEnabled = startWorkflow === 'schedule'
|
||||
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const [_startWorkflow, setStartWorkflow] = useSubBlockValue(blockId, 'startWorkflow')
|
||||
|
||||
// Function to check if schedule exists in the database
|
||||
const checkSchedule = async () => {
|
||||
@@ -110,10 +104,17 @@ export function ScheduleConfig({
|
||||
|
||||
// Check for schedule on mount and when relevant dependencies change
|
||||
useEffect(() => {
|
||||
// Always check for schedules regardless of the UI setting
|
||||
// This ensures we detect schedules even when the UI is set to manual
|
||||
checkSchedule()
|
||||
}, [workflowId, scheduleType, isModalOpen, refreshCounter])
|
||||
// Only check for schedules when workflowId changes or modal opens
|
||||
// Avoid checking on every scheduleType change to prevent excessive API calls
|
||||
if (workflowId && (isModalOpen || refreshCounter > 0)) {
|
||||
checkSchedule()
|
||||
}
|
||||
|
||||
// Cleanup function to reset loading state
|
||||
return () => {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [workflowId, isModalOpen, refreshCounter])
|
||||
|
||||
// Format the schedule information for display
|
||||
const getScheduleInfo = () => {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { BookOpen, Code, Info, RectangleHorizontal, RectangleVertical } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Button } from '@/components/ui/button'
|
||||
@@ -83,6 +84,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
const isActiveBlock = useExecutionStore((state) => state.activeBlockIds.has(id))
|
||||
const isActive = dataIsActive || isActiveBlock
|
||||
|
||||
// Get the current workflow ID from URL params instead of global state
|
||||
// This prevents race conditions when switching workflows rapidly
|
||||
const params = useParams()
|
||||
const currentWorkflowId = params.workflowId as string
|
||||
|
||||
const reactivateSchedule = async (scheduleId: string) => {
|
||||
try {
|
||||
const response = await fetch(`/api/schedules/${scheduleId}`, {
|
||||
@@ -94,7 +100,10 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
fetchScheduleInfo()
|
||||
// Use the current workflow ID from params instead of global state
|
||||
if (currentWorkflowId) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
}
|
||||
} else {
|
||||
console.error('Failed to reactivate schedule')
|
||||
}
|
||||
@@ -103,11 +112,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
}
|
||||
}
|
||||
|
||||
const fetchScheduleInfo = async () => {
|
||||
const fetchScheduleInfo = async (workflowId: string) => {
|
||||
if (!workflowId) return
|
||||
|
||||
try {
|
||||
setIsLoadingScheduleInfo(true)
|
||||
const workflowId = useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (!workflowId) return
|
||||
|
||||
const response = await fetch(`/api/schedules?workflowId=${workflowId}&mode=schedule`, {
|
||||
cache: 'no-store',
|
||||
@@ -176,12 +185,18 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (type === 'starter') {
|
||||
fetchScheduleInfo()
|
||||
if (type === 'starter' && currentWorkflowId) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
} else {
|
||||
setScheduleInfo(null)
|
||||
setIsLoadingScheduleInfo(false) // Reset loading state when not a starter block
|
||||
}
|
||||
}, [type])
|
||||
|
||||
// Cleanup function to reset loading state when component unmounts or workflow changes
|
||||
return () => {
|
||||
setIsLoadingScheduleInfo(false)
|
||||
}
|
||||
}, [type, currentWorkflowId])
|
||||
|
||||
// Get webhook information for the tooltip
|
||||
useEffect(() => {
|
||||
|
||||
@@ -15,9 +15,14 @@ import { useFolderStore } from '@/stores/folders/store'
|
||||
interface CreateMenuProps {
|
||||
onCreateWorkflow: (folderId?: string) => void
|
||||
isCollapsed?: boolean
|
||||
isCreatingWorkflow?: boolean
|
||||
}
|
||||
|
||||
export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
export function CreateMenu({
|
||||
onCreateWorkflow,
|
||||
isCollapsed,
|
||||
isCreatingWorkflow = false,
|
||||
}: CreateMenuProps) {
|
||||
const [showFolderDialog, setShowFolderDialog] = useState(false)
|
||||
const [folderName, setFolderName] = useState('')
|
||||
const [isCreating, setIsCreating] = useState(false)
|
||||
@@ -73,6 +78,7 @@ export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
onClick={handleCreateWorkflow}
|
||||
onMouseEnter={() => setIsHoverOpen(true)}
|
||||
onMouseLeave={() => setIsHoverOpen(false)}
|
||||
disabled={isCreatingWorkflow}
|
||||
>
|
||||
<Plus
|
||||
className={cn(
|
||||
@@ -101,11 +107,17 @@ export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
onCloseAutoFocus={(e) => e.preventDefault()}
|
||||
>
|
||||
<button
|
||||
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
|
||||
className={cn(
|
||||
'flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors',
|
||||
isCreatingWorkflow
|
||||
? 'cursor-not-allowed opacity-50'
|
||||
: 'hover:bg-accent hover:text-accent-foreground'
|
||||
)}
|
||||
onClick={handleCreateWorkflow}
|
||||
disabled={isCreatingWorkflow}
|
||||
>
|
||||
<File className='h-4 w-4' />
|
||||
New Workflow
|
||||
{isCreatingWorkflow ? 'Creating...' : 'New Workflow'}
|
||||
</button>
|
||||
<button
|
||||
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
|
||||
|
||||
@@ -41,6 +41,9 @@ export function Sidebar() {
|
||||
const { isPending: sessionLoading } = useSession()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const isLoading = workflowsLoading || sessionLoading
|
||||
|
||||
// Add state to prevent multiple simultaneous workflow creations
|
||||
const [isCreatingWorkflow, setIsCreatingWorkflow] = useState(false)
|
||||
const router = useRouter()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
@@ -108,7 +111,14 @@ export function Sidebar() {
|
||||
|
||||
// Create workflow handler
|
||||
const handleCreateWorkflow = async (folderId?: string) => {
|
||||
// Prevent multiple simultaneous workflow creations
|
||||
if (isCreatingWorkflow) {
|
||||
logger.info('Workflow creation already in progress, ignoring request')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
setIsCreatingWorkflow(true)
|
||||
const id = await createWorkflow({
|
||||
workspaceId: workspaceId || undefined,
|
||||
folderId: folderId || undefined,
|
||||
@@ -116,6 +126,8 @@ export function Sidebar() {
|
||||
router.push(`/workspace/${workspaceId}/w/${id}`)
|
||||
} catch (error) {
|
||||
logger.error('Error creating workflow:', error)
|
||||
} finally {
|
||||
setIsCreatingWorkflow(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,7 +185,11 @@ export function Sidebar() {
|
||||
{isLoading ? <Skeleton className='h-4 w-16' /> : 'Workflows'}
|
||||
</h2>
|
||||
{!isCollapsed && !isLoading && (
|
||||
<CreateMenu onCreateWorkflow={handleCreateWorkflow} isCollapsed={false} />
|
||||
<CreateMenu
|
||||
onCreateWorkflow={handleCreateWorkflow}
|
||||
isCollapsed={false}
|
||||
isCreatingWorkflow={isCreatingWorkflow}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<FolderTree
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
// Export the storage abstraction layer
|
||||
|
||||
export * as BlobClient from './blob/blob-client'
|
||||
// Export specific storage clients for advanced use cases
|
||||
export * as S3Client from './s3/s3-client'
|
||||
// Export configuration
|
||||
export {
|
||||
BLOB_CONFIG,
|
||||
BLOB_KB_CONFIG,
|
||||
|
||||
@@ -279,15 +279,51 @@ describe('S3 Client', () => {
|
||||
})
|
||||
|
||||
describe('s3Client initialization', () => {
|
||||
it('should initialize with correct configuration', async () => {
|
||||
it('should initialize with correct configuration when credentials are available', async () => {
|
||||
// Mock env with credentials
|
||||
vi.doMock('../../env', () => ({
|
||||
env: {
|
||||
AWS_ACCESS_KEY_ID: 'test-access-key',
|
||||
AWS_SECRET_ACCESS_KEY: 'test-secret-key',
|
||||
},
|
||||
}))
|
||||
|
||||
// Re-import to get fresh module with mocked env
|
||||
vi.resetModules()
|
||||
const { getS3Client } = await import('./s3-client')
|
||||
const { S3Client } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const client = getS3Client()
|
||||
|
||||
expect(client).toBeDefined()
|
||||
// Verify the client was constructed with the right configuration
|
||||
expect(S3Client).toHaveBeenCalledWith({ region: 'test-region' })
|
||||
expect(S3Client).toHaveBeenCalledWith({
|
||||
region: 'test-region',
|
||||
credentials: {
|
||||
accessKeyId: 'test-access-key',
|
||||
secretAccessKey: 'test-secret-key',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should initialize without credentials when env vars are not available', async () => {
|
||||
vi.doMock('../../env', () => ({
|
||||
env: {
|
||||
AWS_ACCESS_KEY_ID: undefined,
|
||||
AWS_SECRET_ACCESS_KEY: undefined,
|
||||
},
|
||||
}))
|
||||
|
||||
vi.resetModules()
|
||||
const { getS3Client } = await import('./s3-client')
|
||||
const { S3Client } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const client = getS3Client()
|
||||
|
||||
expect(client).toBeDefined()
|
||||
expect(S3Client).toHaveBeenCalledWith({
|
||||
region: 'test-region',
|
||||
credentials: undefined,
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -4,8 +4,11 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
const logger = createLogger('KnowledgeStore')
|
||||
|
||||
export interface ChunkingConfig {
|
||||
chunkSize?: number
|
||||
minCharactersPerChunk?: number
|
||||
maxSize: number
|
||||
minSize: number
|
||||
overlap: number
|
||||
chunkSize?: number // Legacy support
|
||||
minCharactersPerChunk?: number // Legacy support
|
||||
recipe?: string
|
||||
lang?: string
|
||||
strategy?: 'recursive' | 'semantic' | 'sentence' | 'paragraph'
|
||||
@@ -463,75 +466,65 @@ export const useKnowledgeStore = create<KnowledgeStore>((set, get) => ({
|
||||
throw new Error(result.error || 'Failed to fetch documents')
|
||||
}
|
||||
|
||||
const documents = result.data
|
||||
const serverDocuments = result.data
|
||||
|
||||
set((state) => {
|
||||
// Merge with existing documents, being smart about when to use server data vs local optimistic updates
|
||||
const currentDocuments = state.documents[knowledgeBaseId] || []
|
||||
|
||||
// For each fetched document, decide whether to use server data or preserve local state
|
||||
const mergedDocuments = documents.map((fetchedDoc: DocumentData) => {
|
||||
const existingDoc = currentDocuments.find((doc) => doc.id === fetchedDoc.id)
|
||||
// Create a map of server documents by filename for quick lookup
|
||||
const serverDocumentsByFilename = new Map()
|
||||
serverDocuments.forEach((doc: DocumentData) => {
|
||||
serverDocumentsByFilename.set(doc.filename, doc)
|
||||
})
|
||||
|
||||
if (!existingDoc) {
|
||||
// New document from server, use it as-is
|
||||
return fetchedDoc
|
||||
// Filter out temporary documents that now have real server equivalents
|
||||
const filteredCurrentDocs = currentDocuments.filter((doc) => {
|
||||
// If this is a temporary document (starts with temp-) and a server document exists with the same filename
|
||||
if (doc.id.startsWith('temp-') && serverDocumentsByFilename.has(doc.filename)) {
|
||||
return false // Remove the temporary document
|
||||
}
|
||||
|
||||
// If processing status is different, generally prefer server data for these transitions:
|
||||
if (existingDoc.processingStatus !== fetchedDoc.processingStatus) {
|
||||
// Always allow these status progressions from server:
|
||||
// pending -> processing, pending -> completed, pending -> failed
|
||||
// processing -> completed, processing -> failed
|
||||
const allowedTransitions = [
|
||||
{ from: 'pending', to: 'processing' },
|
||||
{ from: 'pending', to: 'completed' },
|
||||
{ from: 'pending', to: 'failed' },
|
||||
{ from: 'processing', to: 'completed' },
|
||||
{ from: 'processing', to: 'failed' },
|
||||
]
|
||||
|
||||
const transition = allowedTransitions.find(
|
||||
(t) => t.from === existingDoc.processingStatus && t.to === fetchedDoc.processingStatus
|
||||
)
|
||||
|
||||
if (transition) {
|
||||
return fetchedDoc
|
||||
// If this is a real document that still exists on the server, keep it for merging
|
||||
if (!doc.id.startsWith('temp-')) {
|
||||
const serverDoc = serverDocuments.find((sDoc: DocumentData) => sDoc.id === doc.id)
|
||||
if (serverDoc) {
|
||||
return false // Will be replaced by server version in merge below
|
||||
}
|
||||
}
|
||||
|
||||
const existingHasTimestamps =
|
||||
existingDoc.processingStartedAt || existingDoc.processingCompletedAt
|
||||
const fetchedHasTimestamps =
|
||||
fetchedDoc.processingStartedAt || fetchedDoc.processingCompletedAt
|
||||
|
||||
if (fetchedHasTimestamps && !existingHasTimestamps) {
|
||||
return fetchedDoc
|
||||
}
|
||||
|
||||
// If the server document has updated stats (chunk count, token count, etc.), use it
|
||||
if (
|
||||
fetchedDoc.processingStatus === 'completed' &&
|
||||
(fetchedDoc.chunkCount !== existingDoc.chunkCount ||
|
||||
fetchedDoc.tokenCount !== existingDoc.tokenCount ||
|
||||
fetchedDoc.characterCount !== existingDoc.characterCount)
|
||||
) {
|
||||
return fetchedDoc
|
||||
}
|
||||
|
||||
// Otherwise, preserve the existing document (keeps optimistic updates)
|
||||
return existingDoc
|
||||
// Keep temporary documents that don't have server equivalents yet
|
||||
return true
|
||||
})
|
||||
|
||||
// Add any new documents that weren't in the existing set
|
||||
const newDocuments = documents.filter(
|
||||
(fetchedDoc: DocumentData) => !currentDocuments.find((doc) => doc.id === fetchedDoc.id)
|
||||
)
|
||||
// Merge server documents with any remaining local documents
|
||||
const mergedDocuments = serverDocuments.map((serverDoc: DocumentData) => {
|
||||
const existingDoc = currentDocuments.find((doc) => doc.id === serverDoc.id)
|
||||
|
||||
if (!existingDoc) {
|
||||
// New document from server, use it as-is
|
||||
return serverDoc
|
||||
}
|
||||
|
||||
// Merge logic for existing documents (prefer server data for most fields)
|
||||
return {
|
||||
...existingDoc,
|
||||
...serverDoc,
|
||||
// Preserve any local optimistic updates that haven't been reflected on server yet
|
||||
...(existingDoc.processingStatus !== serverDoc.processingStatus &&
|
||||
['pending', 'processing'].includes(existingDoc.processingStatus) &&
|
||||
!serverDoc.processingStartedAt
|
||||
? { processingStatus: existingDoc.processingStatus }
|
||||
: {}),
|
||||
}
|
||||
})
|
||||
|
||||
// Add any remaining temporary documents that don't have server equivalents
|
||||
const finalDocuments = [...mergedDocuments, ...filteredCurrentDocs]
|
||||
|
||||
return {
|
||||
documents: {
|
||||
...state.documents,
|
||||
[knowledgeBaseId]: [...mergedDocuments, ...newDocuments],
|
||||
[knowledgeBaseId]: finalDocuments,
|
||||
},
|
||||
loadingDocuments: new Set(
|
||||
[...state.loadingDocuments].filter((loadingId) => loadingId !== knowledgeBaseId)
|
||||
@@ -540,7 +533,7 @@ export const useKnowledgeStore = create<KnowledgeStore>((set, get) => ({
|
||||
})
|
||||
|
||||
logger.info(`Documents refreshed for knowledge base: ${knowledgeBaseId}`)
|
||||
return documents
|
||||
return serverDocuments
|
||||
} catch (error) {
|
||||
logger.error(`Error refreshing documents for knowledge base ${knowledgeBaseId}:`, error)
|
||||
|
||||
|
||||
@@ -557,9 +557,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
* @returns The ID of the newly created workflow
|
||||
*/
|
||||
createWorkflow: async (options = {}) => {
|
||||
const { workflows } = get()
|
||||
const id = crypto.randomUUID()
|
||||
|
||||
// Use provided workspace ID (must be provided since we no longer track active workspace)
|
||||
const workspaceId = options.workspaceId
|
||||
|
||||
@@ -570,292 +567,259 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
}
|
||||
|
||||
logger.info(`Creating new workflow in workspace: ${workspaceId || 'none'}`)
|
||||
// Generate workflow metadata with appropriate name and color
|
||||
const newWorkflow: WorkflowMetadata = {
|
||||
id,
|
||||
name: options.name || generateUniqueName(workflows),
|
||||
lastModified: new Date(),
|
||||
description: options.description || 'New workflow',
|
||||
color: options.marketplaceId ? '#808080' : getNextWorkflowColor(workflows), // Gray for marketplace imports
|
||||
marketplaceData: options.marketplaceId
|
||||
? { id: options.marketplaceId, status: 'temp' as const }
|
||||
: undefined,
|
||||
workspaceId, // Associate with workspace
|
||||
folderId: options.folderId || null, // Associate with folder if provided
|
||||
}
|
||||
|
||||
let initialState: any
|
||||
|
||||
// If this is a marketplace import with existing state
|
||||
if (options.marketplaceId && options.marketplaceState) {
|
||||
initialState = {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Imported from marketplace',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
logger.info(`Created workflow from marketplace: ${options.marketplaceId}`)
|
||||
} else {
|
||||
// Create starter block for new workflow
|
||||
const starterId = crypto.randomUUID()
|
||||
const starterBlock = {
|
||||
id: starterId,
|
||||
type: 'starter' as const,
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
subBlocks: {
|
||||
startWorkflow: {
|
||||
id: 'startWorkflow',
|
||||
type: 'dropdown' as const,
|
||||
value: 'manual',
|
||||
},
|
||||
webhookPath: {
|
||||
id: 'webhookPath',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
webhookSecret: {
|
||||
id: 'webhookSecret',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
scheduleType: {
|
||||
id: 'scheduleType',
|
||||
type: 'dropdown' as const,
|
||||
value: 'daily',
|
||||
},
|
||||
minutesInterval: {
|
||||
id: 'minutesInterval',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
minutesStartingAt: {
|
||||
id: 'minutesStartingAt',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
hourlyMinute: {
|
||||
id: 'hourlyMinute',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
dailyTime: {
|
||||
id: 'dailyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
weeklyDay: {
|
||||
id: 'weeklyDay',
|
||||
type: 'dropdown' as const,
|
||||
value: 'MON',
|
||||
},
|
||||
weeklyDayTime: {
|
||||
id: 'weeklyDayTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyDay: {
|
||||
id: 'monthlyDay',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyTime: {
|
||||
id: 'monthlyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
cronExpression: {
|
||||
id: 'cronExpression',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
timezone: {
|
||||
id: 'timezone',
|
||||
type: 'dropdown' as const,
|
||||
value: 'UTC',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
input: 'any',
|
||||
},
|
||||
},
|
||||
},
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
height: 0,
|
||||
}
|
||||
|
||||
initialState = {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Initial state',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
// Add workflow to registry first
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[id]: newWorkflow,
|
||||
},
|
||||
error: null,
|
||||
}))
|
||||
|
||||
// Initialize subblock values if this is a marketplace import
|
||||
if (options.marketplaceId && options.marketplaceState?.blocks) {
|
||||
useSubBlockStore.getState().initializeFromWorkflow(id, options.marketplaceState.blocks)
|
||||
}
|
||||
|
||||
// Initialize subblock values to ensure they're available for sync
|
||||
if (!options.marketplaceId) {
|
||||
// For non-marketplace workflows, initialize subblock values from the starter block
|
||||
const subblockValues: Record<string, Record<string, any>> = {}
|
||||
const blocks = initialState.blocks as Record<string, BlockState>
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
subblockValues[blockId] = {}
|
||||
for (const [subblockId, subblock] of Object.entries(block.subBlocks)) {
|
||||
subblockValues[blockId][subblockId] = (subblock as any).value
|
||||
}
|
||||
}
|
||||
|
||||
// Update the subblock store with the initial values
|
||||
useSubBlockStore.setState((state) => ({
|
||||
workflowValues: {
|
||||
...state.workflowValues,
|
||||
[id]: subblockValues,
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
// Properly set as active workflow and initialize state
|
||||
set({ activeWorkflowId: id })
|
||||
useWorkflowStore.setState(initialState)
|
||||
|
||||
// Immediately persist the new workflow to the database using dedicated endpoint
|
||||
const persistWorkflow = async () => {
|
||||
try {
|
||||
const response = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
name: newWorkflow.name,
|
||||
description: newWorkflow.description,
|
||||
color: newWorkflow.color,
|
||||
workspaceId: newWorkflow.workspaceId,
|
||||
folderId: newWorkflow.folderId,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json()
|
||||
throw new Error(
|
||||
`Failed to create workflow: ${errorData.error || response.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
const createdWorkflow = await response.json()
|
||||
logger.info(`Successfully created workflow ${createdWorkflow.id} on server`)
|
||||
|
||||
// Update the local workflow ID to match the server-generated one
|
||||
if (createdWorkflow.id !== id) {
|
||||
logger.info(`Updating local workflow ID from ${id} to ${createdWorkflow.id}`)
|
||||
|
||||
// Update registry with server ID
|
||||
set((state) => {
|
||||
const { [id]: oldWorkflow, ...otherWorkflows } = state.workflows
|
||||
return {
|
||||
workflows: {
|
||||
...otherWorkflows,
|
||||
[createdWorkflow.id]: {
|
||||
...oldWorkflow,
|
||||
id: createdWorkflow.id,
|
||||
},
|
||||
},
|
||||
activeWorkflowId: createdWorkflow.id,
|
||||
}
|
||||
})
|
||||
|
||||
// Return the server ID for the caller
|
||||
return createdWorkflow.id
|
||||
}
|
||||
|
||||
return id
|
||||
} catch (error) {
|
||||
logger.error(`Failed to create new workflow ${id}:`, error)
|
||||
throw error // Re-throw to handle in calling code
|
||||
}
|
||||
}
|
||||
|
||||
// Persist synchronously to ensure workflow exists before Socket.IO operations
|
||||
let finalId = id
|
||||
// Create the workflow on the server first to get the server-generated ID
|
||||
try {
|
||||
finalId = await persistWorkflow()
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Critical: Failed to persist new workflow ${id}, Socket.IO operations may fail:`,
|
||||
error
|
||||
const response = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
name: options.name || generateUniqueName(),
|
||||
description: options.description || 'New workflow',
|
||||
color: options.marketplaceId ? '#808080' : getNextWorkflowColor(),
|
||||
workspaceId,
|
||||
folderId: options.folderId || null,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json()
|
||||
throw new Error(`Failed to create workflow: ${errorData.error || response.statusText}`)
|
||||
}
|
||||
|
||||
const createdWorkflow = await response.json()
|
||||
const serverWorkflowId = createdWorkflow.id
|
||||
|
||||
logger.info(`Successfully created workflow ${serverWorkflowId} on server`)
|
||||
|
||||
// Generate workflow metadata with server-generated ID
|
||||
const newWorkflow: WorkflowMetadata = {
|
||||
id: serverWorkflowId,
|
||||
name: createdWorkflow.name,
|
||||
lastModified: new Date(),
|
||||
description: createdWorkflow.description,
|
||||
color: createdWorkflow.color,
|
||||
marketplaceData: options.marketplaceId
|
||||
? { id: options.marketplaceId, status: 'temp' as const }
|
||||
: undefined,
|
||||
workspaceId,
|
||||
folderId: createdWorkflow.folderId,
|
||||
}
|
||||
|
||||
let initialState: any
|
||||
|
||||
// If this is a marketplace import with existing state
|
||||
if (options.marketplaceId && options.marketplaceState) {
|
||||
initialState = {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: options.marketplaceState.blocks || {},
|
||||
edges: options.marketplaceState.edges || [],
|
||||
loops: options.marketplaceState.loops || {},
|
||||
parallels: options.marketplaceState.parallels || {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Imported from marketplace',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
logger.info(`Created workflow from marketplace: ${options.marketplaceId}`)
|
||||
} else {
|
||||
// Create starter block for new workflow
|
||||
const starterId = crypto.randomUUID()
|
||||
const starterBlock = {
|
||||
id: starterId,
|
||||
type: 'starter' as const,
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
subBlocks: {
|
||||
startWorkflow: {
|
||||
id: 'startWorkflow',
|
||||
type: 'dropdown' as const,
|
||||
value: 'manual',
|
||||
},
|
||||
webhookPath: {
|
||||
id: 'webhookPath',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
webhookSecret: {
|
||||
id: 'webhookSecret',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
scheduleType: {
|
||||
id: 'scheduleType',
|
||||
type: 'dropdown' as const,
|
||||
value: 'daily',
|
||||
},
|
||||
minutesInterval: {
|
||||
id: 'minutesInterval',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
minutesStartingAt: {
|
||||
id: 'minutesStartingAt',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
hourlyMinute: {
|
||||
id: 'hourlyMinute',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
dailyTime: {
|
||||
id: 'dailyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
weeklyDay: {
|
||||
id: 'weeklyDay',
|
||||
type: 'dropdown' as const,
|
||||
value: 'MON',
|
||||
},
|
||||
weeklyDayTime: {
|
||||
id: 'weeklyDayTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyDay: {
|
||||
id: 'monthlyDay',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
monthlyTime: {
|
||||
id: 'monthlyTime',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
cronExpression: {
|
||||
id: 'cronExpression',
|
||||
type: 'short-input' as const,
|
||||
value: '',
|
||||
},
|
||||
timezone: {
|
||||
id: 'timezone',
|
||||
type: 'dropdown' as const,
|
||||
value: 'UTC',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
input: 'any',
|
||||
},
|
||||
},
|
||||
},
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
height: 0,
|
||||
}
|
||||
|
||||
initialState = {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {}, // Initialize empty deployment statuses map
|
||||
workspaceId, // Include workspace ID in the state object
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
state: {
|
||||
blocks: {
|
||||
[starterId]: starterBlock,
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
workspaceId, // Include workspace ID in history
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
action: 'Initial state',
|
||||
subblockValues: {},
|
||||
},
|
||||
future: [],
|
||||
},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
// Add workflow to registry with server-generated ID
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[serverWorkflowId]: newWorkflow,
|
||||
},
|
||||
error: null,
|
||||
}))
|
||||
|
||||
// Initialize subblock values if this is a marketplace import
|
||||
if (options.marketplaceId && options.marketplaceState?.blocks) {
|
||||
useSubBlockStore
|
||||
.getState()
|
||||
.initializeFromWorkflow(serverWorkflowId, options.marketplaceState.blocks)
|
||||
}
|
||||
|
||||
// Initialize subblock values to ensure they're available for sync
|
||||
if (!options.marketplaceId) {
|
||||
// For non-marketplace workflows, initialize subblock values from the starter block
|
||||
const subblockValues: Record<string, Record<string, any>> = {}
|
||||
const blocks = initialState.blocks as Record<string, BlockState>
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
subblockValues[blockId] = {}
|
||||
for (const [subblockId, subblock] of Object.entries(block.subBlocks)) {
|
||||
subblockValues[blockId][subblockId] = (subblock as any).value
|
||||
}
|
||||
}
|
||||
|
||||
// Update the subblock store with the initial values
|
||||
useSubBlockStore.setState((state) => ({
|
||||
workflowValues: {
|
||||
...state.workflowValues,
|
||||
[serverWorkflowId]: subblockValues,
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
// Don't set as active workflow here - let the navigation/URL change handle that
|
||||
// This prevents race conditions and flickering
|
||||
logger.info(
|
||||
`Created new workflow with ID ${serverWorkflowId} in workspace ${workspaceId || 'none'}`
|
||||
)
|
||||
// Don't throw - allow workflow creation to continue in memory
|
||||
|
||||
return serverWorkflowId
|
||||
} catch (error) {
|
||||
logger.error(`Failed to create new workflow:`, error)
|
||||
set({
|
||||
error: `Failed to create workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
})
|
||||
throw error
|
||||
}
|
||||
|
||||
logger.info(`Created new workflow with ID ${finalId} in workspace ${workspaceId || 'none'}`)
|
||||
|
||||
return finalId
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -866,16 +830,15 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
state: any,
|
||||
metadata: Partial<WorkflowMetadata>
|
||||
) => {
|
||||
const { workflows } = get()
|
||||
const id = crypto.randomUUID()
|
||||
|
||||
// Generate workflow metadata with marketplace properties
|
||||
const newWorkflow: WorkflowMetadata = {
|
||||
id,
|
||||
name: metadata.name || 'Marketplace workflow',
|
||||
name: metadata.name || generateUniqueName(),
|
||||
lastModified: new Date(),
|
||||
description: metadata.description || 'Imported from marketplace',
|
||||
color: metadata.color || getNextWorkflowColor(workflows),
|
||||
color: metadata.color || getNextWorkflowColor(),
|
||||
marketplaceData: { id: marketplaceId, status: 'temp' as const },
|
||||
}
|
||||
|
||||
@@ -1031,7 +994,7 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
name: `${sourceWorkflow.name} (Copy)`,
|
||||
lastModified: new Date(),
|
||||
description: sourceWorkflow.description,
|
||||
color: getNextWorkflowColor(workflows),
|
||||
color: getNextWorkflowColor(),
|
||||
workspaceId, // Include the workspaceId in the new workflow
|
||||
folderId: sourceWorkflow.folderId, // Include the folderId from source workflow
|
||||
// Do not copy marketplace data
|
||||
|
||||
@@ -1,62 +1,247 @@
|
||||
import type { WorkflowMetadata } from './types'
|
||||
|
||||
// Available workflow colors
|
||||
export const WORKFLOW_COLORS = [
|
||||
'#3972F6',
|
||||
'#F639DD',
|
||||
'#F6B539',
|
||||
'#8139F6',
|
||||
'#39B54A',
|
||||
'#39B5AB',
|
||||
'#F66839',
|
||||
// Original colors
|
||||
'#3972F6', // Blue
|
||||
'#F639DD', // Pink/Magenta
|
||||
'#F6B539', // Orange/Yellow
|
||||
'#8139F6', // Purple
|
||||
'#39B54A', // Green
|
||||
'#39B5AB', // Teal
|
||||
'#F66839', // Red/Orange
|
||||
|
||||
// Additional vibrant blues
|
||||
'#2E5BFF', // Bright Blue
|
||||
'#4A90FF', // Sky Blue
|
||||
'#1E40AF', // Deep Blue
|
||||
'#0EA5E9', // Cyan Blue
|
||||
'#3B82F6', // Royal Blue
|
||||
'#6366F1', // Indigo
|
||||
'#1D4ED8', // Electric Blue
|
||||
|
||||
// Additional vibrant purples
|
||||
'#A855F7', // Bright Purple
|
||||
'#C084FC', // Light Purple
|
||||
'#7C3AED', // Deep Purple
|
||||
'#9333EA', // Violet
|
||||
'#8B5CF6', // Medium Purple
|
||||
'#6D28D9', // Dark Purple
|
||||
'#5B21B6', // Deep Violet
|
||||
|
||||
// Additional vibrant pinks/magentas
|
||||
'#EC4899', // Hot Pink
|
||||
'#F97316', // Pink Orange
|
||||
'#E11D48', // Rose
|
||||
'#BE185D', // Deep Pink
|
||||
'#DB2777', // Pink Red
|
||||
'#F472B6', // Light Pink
|
||||
'#F59E0B', // Amber Pink
|
||||
|
||||
// Additional vibrant greens
|
||||
'#10B981', // Emerald
|
||||
'#059669', // Green Teal
|
||||
'#16A34A', // Forest Green
|
||||
'#22C55E', // Lime Green
|
||||
'#84CC16', // Yellow Green
|
||||
'#65A30D', // Olive Green
|
||||
'#15803D', // Dark Green
|
||||
|
||||
// Additional vibrant teals/cyans
|
||||
'#06B6D4', // Cyan
|
||||
'#0891B2', // Dark Cyan
|
||||
'#0E7490', // Teal Blue
|
||||
'#14B8A6', // Turquoise
|
||||
'#0D9488', // Dark Teal
|
||||
'#047857', // Sea Green
|
||||
'#059669', // Mint Green
|
||||
|
||||
// Additional vibrant oranges/reds
|
||||
'#EA580C', // Bright Orange
|
||||
'#DC2626', // Red
|
||||
'#B91C1C', // Dark Red
|
||||
'#EF4444', // Light Red
|
||||
'#F97316', // Orange
|
||||
'#FB923C', // Light Orange
|
||||
'#FDBA74', // Peach
|
||||
|
||||
// Additional vibrant yellows/golds
|
||||
'#FBBF24', // Gold
|
||||
'#F59E0B', // Amber
|
||||
'#D97706', // Dark Amber
|
||||
'#92400E', // Bronze
|
||||
'#EAB308', // Yellow
|
||||
'#CA8A04', // Dark Yellow
|
||||
'#A16207', // Mustard
|
||||
|
||||
// Additional unique vibrant colors
|
||||
'#FF6B6B', // Coral
|
||||
'#4ECDC4', // Mint
|
||||
'#45B7D1', // Light Blue
|
||||
'#96CEB4', // Sage
|
||||
'#FFEAA7', // Cream
|
||||
'#DDA0DD', // Plum
|
||||
'#98D8C8', // Seafoam
|
||||
'#F7DC6F', // Banana
|
||||
'#BB8FCE', // Lavender
|
||||
'#85C1E9', // Baby Blue
|
||||
'#F8C471', // Peach
|
||||
'#82E0AA', // Light Green
|
||||
'#F1948A', // Salmon
|
||||
'#D7BDE2', // Lilac
|
||||
'#D7BDE2', // Lilac
|
||||
]
|
||||
|
||||
// Generates a unique name for a new workflow
|
||||
export function generateUniqueName(existingWorkflows: Record<string, WorkflowMetadata>): string {
|
||||
// Extract numbers from existing workflow names using regex
|
||||
const numbers = Object.values(existingWorkflows)
|
||||
.map((w) => {
|
||||
const match = w.name.match(/Workflow (\d+)/)
|
||||
return match ? Number.parseInt(match[1]) : 0
|
||||
})
|
||||
.filter((n) => n > 0)
|
||||
// Random adjectives and nouns for generating creative workflow names
|
||||
const ADJECTIVES = [
|
||||
'Blazing',
|
||||
'Crystal',
|
||||
'Golden',
|
||||
'Silver',
|
||||
'Mystic',
|
||||
'Cosmic',
|
||||
'Electric',
|
||||
'Frozen',
|
||||
'Burning',
|
||||
'Shining',
|
||||
'Dancing',
|
||||
'Flying',
|
||||
'Roaring',
|
||||
'Whispering',
|
||||
'Glowing',
|
||||
'Sparkling',
|
||||
'Thunder',
|
||||
'Lightning',
|
||||
'Storm',
|
||||
'Ocean',
|
||||
'Mountain',
|
||||
'Forest',
|
||||
'Desert',
|
||||
'Arctic',
|
||||
'Tropical',
|
||||
'Midnight',
|
||||
'Dawn',
|
||||
'Sunset',
|
||||
'Rainbow',
|
||||
'Diamond',
|
||||
'Ruby',
|
||||
'Emerald',
|
||||
'Sapphire',
|
||||
'Pearl',
|
||||
'Jade',
|
||||
'Amber',
|
||||
'Coral',
|
||||
'Ivory',
|
||||
'Obsidian',
|
||||
'Marble',
|
||||
'Velvet',
|
||||
'Silk',
|
||||
'Satin',
|
||||
'Linen',
|
||||
'Cotton',
|
||||
'Wool',
|
||||
'Cashmere',
|
||||
'Denim',
|
||||
'Neon',
|
||||
'Pastel',
|
||||
'Vibrant',
|
||||
'Muted',
|
||||
'Bold',
|
||||
'Subtle',
|
||||
'Bright',
|
||||
'Dark',
|
||||
]
|
||||
|
||||
if (numbers.length === 0) {
|
||||
return 'Workflow 1'
|
||||
}
|
||||
const NOUNS = [
|
||||
'Phoenix',
|
||||
'Dragon',
|
||||
'Eagle',
|
||||
'Wolf',
|
||||
'Lion',
|
||||
'Tiger',
|
||||
'Panther',
|
||||
'Falcon',
|
||||
'Hawk',
|
||||
'Raven',
|
||||
'Swan',
|
||||
'Dove',
|
||||
'Butterfly',
|
||||
'Firefly',
|
||||
'Dragonfly',
|
||||
'Hummingbird',
|
||||
'Galaxy',
|
||||
'Nebula',
|
||||
'Comet',
|
||||
'Meteor',
|
||||
'Star',
|
||||
'Moon',
|
||||
'Sun',
|
||||
'Planet',
|
||||
'Asteroid',
|
||||
'Constellation',
|
||||
'Aurora',
|
||||
'Eclipse',
|
||||
'Solstice',
|
||||
'Equinox',
|
||||
'Horizon',
|
||||
'Zenith',
|
||||
'Castle',
|
||||
'Tower',
|
||||
'Bridge',
|
||||
'Garden',
|
||||
'Fountain',
|
||||
'Palace',
|
||||
'Temple',
|
||||
'Cathedral',
|
||||
'Lighthouse',
|
||||
'Windmill',
|
||||
'Waterfall',
|
||||
'Canyon',
|
||||
'Valley',
|
||||
'Peak',
|
||||
'Ridge',
|
||||
'Cliff',
|
||||
'Ocean',
|
||||
'River',
|
||||
'Lake',
|
||||
'Stream',
|
||||
'Pond',
|
||||
'Bay',
|
||||
'Cove',
|
||||
'Harbor',
|
||||
'Island',
|
||||
'Peninsula',
|
||||
'Archipelago',
|
||||
'Atoll',
|
||||
'Reef',
|
||||
'Lagoon',
|
||||
'Fjord',
|
||||
'Delta',
|
||||
'Cake',
|
||||
'Cookie',
|
||||
'Muffin',
|
||||
'Cupcake',
|
||||
'Pie',
|
||||
'Tart',
|
||||
'Brownie',
|
||||
'Donut',
|
||||
'Pancake',
|
||||
'Waffle',
|
||||
'Croissant',
|
||||
'Bagel',
|
||||
'Pretzel',
|
||||
'Biscuit',
|
||||
'Scone',
|
||||
'Crumpet',
|
||||
]
|
||||
|
||||
// Find the maximum number and add 1
|
||||
const nextNumber = Math.max(...numbers) + 1
|
||||
return `Workflow ${nextNumber}`
|
||||
// Generates a random name for a new workflow
|
||||
export function generateUniqueName(): string {
|
||||
const adjective = ADJECTIVES[Math.floor(Math.random() * ADJECTIVES.length)]
|
||||
const noun = NOUNS[Math.floor(Math.random() * NOUNS.length)]
|
||||
return `${adjective.toLowerCase()}-${noun.toLowerCase()}`
|
||||
}
|
||||
|
||||
// Determines the next color to use for a new workflow based on the color of the newest workflow
|
||||
export function getNextWorkflowColor(existingWorkflows: Record<string, WorkflowMetadata>): string {
|
||||
const workflowArray = Object.values(existingWorkflows)
|
||||
|
||||
if (workflowArray.length === 0) {
|
||||
return WORKFLOW_COLORS[0]
|
||||
}
|
||||
|
||||
// Sort workflows by lastModified date (newest first)
|
||||
const sortedWorkflows = [...workflowArray].sort((a, b) => {
|
||||
const dateA =
|
||||
a.lastModified instanceof Date ? a.lastModified.getTime() : new Date(a.lastModified).getTime()
|
||||
const dateB =
|
||||
b.lastModified instanceof Date ? b.lastModified.getTime() : new Date(b.lastModified).getTime()
|
||||
return dateB - dateA
|
||||
})
|
||||
|
||||
// Get the newest workflow (first in sorted array)
|
||||
const newestWorkflow = sortedWorkflows[0]
|
||||
|
||||
// Find the index of the newest workflow's color, defaulting to -1 if undefined
|
||||
const currentColorIndex = newestWorkflow?.color
|
||||
? WORKFLOW_COLORS.indexOf(newestWorkflow.color)
|
||||
: -1
|
||||
|
||||
// Get next color index, wrapping around to 0 if we reach the end
|
||||
const nextColorIndex = (currentColorIndex + 1) % WORKFLOW_COLORS.length
|
||||
|
||||
return WORKFLOW_COLORS[nextColorIndex]
|
||||
// Generates a random color for a new workflow
|
||||
export function getNextWorkflowColor(): string {
|
||||
// Simply return a random color from the available colors
|
||||
return WORKFLOW_COLORS[Math.floor(Math.random() * WORKFLOW_COLORS.length)]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,23 @@
|
||||
import type { ToolConfig } from '../types'
|
||||
import type { TelegramMessageParams, TelegramMessageResponse } from './types'
|
||||
|
||||
// Helper function to convert basic markdown to HTML
|
||||
function convertMarkdownToHTML(text: string): string {
|
||||
return (
|
||||
text
|
||||
// Bold: **text** or __text__ -> <b>text</b>
|
||||
.replace(/\*\*(.*?)\*\*/g, '<b>$1</b>')
|
||||
.replace(/__(.*?)__/g, '<b>$1</b>')
|
||||
// Italic: *text* or _text_ -> <i>text</i>
|
||||
.replace(/\*(.*?)\*/g, '<i>$1</i>')
|
||||
.replace(/_(.*?)_/g, '<i>$1</i>')
|
||||
// Code: `text` -> <code>text</code>
|
||||
.replace(/`(.*?)`/g, '<code>$1</code>')
|
||||
// Links: [text](url) -> <a href="url">text</a>
|
||||
.replace(/\[([^\]]+)\]\(([^)]+)\)/g, '<a href="$2">$1</a>')
|
||||
)
|
||||
}
|
||||
|
||||
export const telegramMessageTool: ToolConfig<TelegramMessageParams, TelegramMessageResponse> = {
|
||||
id: 'telegram_message',
|
||||
name: 'Telegram Message',
|
||||
@@ -36,7 +53,8 @@ export const telegramMessageTool: ToolConfig<TelegramMessageParams, TelegramMess
|
||||
}),
|
||||
body: (params: TelegramMessageParams) => ({
|
||||
chat_id: params.chatId,
|
||||
text: params.text,
|
||||
text: convertMarkdownToHTML(params.text),
|
||||
parse_mode: 'HTML',
|
||||
}),
|
||||
},
|
||||
|
||||
|
||||
Reference in New Issue
Block a user