mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 15:38:00 -05:00
Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
78b5ae7b3d | ||
|
|
016cd6750c | ||
|
|
3b982533d1 | ||
|
|
1604ce4d7c | ||
|
|
86168f1a87 | ||
|
|
5d7fc5382c | ||
|
|
7a5aeadbb7 |
@@ -66,17 +66,17 @@ Define the data to pass to the child workflow:
|
||||
|
||||
- **Single Variable Input**: Select a variable or block output to pass to the child workflow
|
||||
- **Variable References**: Use `<variable.name>` to reference workflow variables
|
||||
- **Block References**: Use `<blockName.response.field>` to reference outputs from previous blocks
|
||||
- **Automatic Mapping**: The selected data is automatically available as `start.response.input` in the child workflow
|
||||
- **Block References**: Use `<blockName.field>` to reference outputs from previous blocks
|
||||
- **Automatic Mapping**: The selected data is automatically available as `start.input` in the child workflow
|
||||
- **Optional**: The input field is optional - child workflows can run without input data
|
||||
- **Type Preservation**: Variable types (strings, numbers, objects, etc.) are preserved when passed to the child workflow
|
||||
|
||||
### Examples of Input References
|
||||
|
||||
- `<variable.customerData>` - Pass a workflow variable
|
||||
- `<dataProcessor.response.result>` - Pass the result from a previous block
|
||||
- `<start.response.input>` - Pass the original workflow input
|
||||
- `<apiCall.response.data.user>` - Pass a specific field from an API response
|
||||
- `<dataProcessor.result>` - Pass the result from a previous block
|
||||
- `<start.input>` - Pass the original workflow input
|
||||
- `<apiCall.data.user>` - Pass a specific field from an API response
|
||||
|
||||
### Execution Context
|
||||
|
||||
@@ -109,7 +109,7 @@ To prevent infinite recursion and ensure system stability, the Workflow block in
|
||||
<strong>Workflow ID</strong>: The identifier of the workflow to execute
|
||||
</li>
|
||||
<li>
|
||||
<strong>Input Variable</strong>: Variable or block reference to pass to the child workflow (e.g., `<variable.name>` or `<block.response.field>`)
|
||||
<strong>Input Variable</strong>: Variable or block reference to pass to the child workflow (e.g., `<variable.name>` or `<block.field>`)
|
||||
</li>
|
||||
</ul>
|
||||
</Tab>
|
||||
@@ -150,23 +150,23 @@ blocks:
|
||||
- type: workflow
|
||||
name: "Setup Customer Account"
|
||||
workflowId: "account-setup-workflow"
|
||||
input: "<Validate Customer Data.response.result>"
|
||||
input: "<Validate Customer Data.result>"
|
||||
|
||||
- type: workflow
|
||||
name: "Send Welcome Email"
|
||||
workflowId: "welcome-email-workflow"
|
||||
input: "<Setup Customer Account.response.result.accountDetails>"
|
||||
input: "<Setup Customer Account.result.accountDetails>"
|
||||
```
|
||||
|
||||
### Child Workflow: Customer Validation
|
||||
```yaml
|
||||
# Reusable customer validation workflow
|
||||
# Access the input data using: start.response.input
|
||||
# Access the input data using: start.input
|
||||
blocks:
|
||||
- type: function
|
||||
name: "Validate Email"
|
||||
code: |
|
||||
const customerData = start.response.input;
|
||||
const customerData = start.input;
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
||||
return emailRegex.test(customerData.email);
|
||||
|
||||
@@ -174,7 +174,7 @@ blocks:
|
||||
name: "Check Credit Score"
|
||||
url: "https://api.creditcheck.com/score"
|
||||
method: "POST"
|
||||
body: "<start.response.input>"
|
||||
body: "<start.input>"
|
||||
```
|
||||
|
||||
### Variable Reference Examples
|
||||
@@ -184,13 +184,13 @@ blocks:
|
||||
input: "<variable.customerInfo>"
|
||||
|
||||
# Using block outputs
|
||||
input: "<dataProcessor.response.cleanedData>"
|
||||
input: "<dataProcessor.cleanedData>"
|
||||
|
||||
# Using nested object properties
|
||||
input: "<apiCall.response.data.user.profile>"
|
||||
input: "<apiCall.data.user.profile>"
|
||||
|
||||
# Using array elements (if supported by the resolver)
|
||||
input: "<listProcessor.response.items[0]>"
|
||||
input: "<listProcessor.items[0]>"
|
||||
```
|
||||
|
||||
## Access Control and Permissions
|
||||
|
||||
@@ -49,7 +49,7 @@ In Sim Studio, the Knowledge Base block enables your agents to perform intellige
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Perform semantic vector search across one or more knowledge bases or upload new chunks to documents. Uses advanced AI embeddings to understand meaning and context for search operations.
|
||||
Perform semantic vector search across knowledge bases, upload individual chunks to existing documents, or create new documents from text content. Uses advanced AI embeddings to understand meaning and context for search operations.
|
||||
|
||||
|
||||
|
||||
@@ -100,6 +100,25 @@ Upload a new chunk to a document in a knowledge base
|
||||
| `createdAt` | string |
|
||||
| `updatedAt` | string |
|
||||
|
||||
### `knowledge_create_document`
|
||||
|
||||
Create a new document in a knowledge base
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `knowledgeBaseId` | string | Yes | ID of the knowledge base containing the document |
|
||||
| `name` | string | Yes | Name of the document |
|
||||
| `content` | string | Yes | Content of the document |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type |
|
||||
| --------- | ---- |
|
||||
| `data` | string |
|
||||
| `name` | string |
|
||||
|
||||
|
||||
|
||||
## Block Configuration
|
||||
|
||||
@@ -93,7 +93,7 @@ export const sampleWorkflowState = {
|
||||
webhookPath: { id: 'webhookPath', type: 'short-input', value: '' },
|
||||
},
|
||||
outputs: {
|
||||
response: { type: { input: 'any' } },
|
||||
input: 'any',
|
||||
},
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
@@ -111,7 +111,7 @@ export const sampleWorkflowState = {
|
||||
type: 'long-input',
|
||||
value: 'You are a helpful assistant',
|
||||
},
|
||||
context: { id: 'context', type: 'short-input', value: '<start.response.input>' },
|
||||
context: { id: 'context', type: 'short-input', value: '<start.input>' },
|
||||
model: { id: 'model', type: 'dropdown', value: 'gpt-4o' },
|
||||
apiKey: { id: 'apiKey', type: 'short-input', value: '{{OPENAI_API_KEY}}' },
|
||||
},
|
||||
@@ -138,6 +138,7 @@ export const sampleWorkflowState = {
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
}
|
||||
@@ -764,6 +765,20 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
bucket: 'test-s3-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
S3_KB_CONFIG: {
|
||||
bucket: 'test-s3-kb-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
BLOB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@aws-sdk/client-s3', () => ({
|
||||
@@ -806,6 +821,11 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@azure/storage-blob', () => ({
|
||||
|
||||
@@ -241,7 +241,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
describe('POST endpoint', () => {
|
||||
it('should handle authentication requests without messages', async () => {
|
||||
it('should handle authentication requests without input', async () => {
|
||||
const req = createMockRequest('POST', { password: 'test-password' })
|
||||
const params = Promise.resolve({ subdomain: 'password-protected-chat' })
|
||||
|
||||
@@ -257,7 +257,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
expect(mockSetChatAuthCookie).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return 400 for requests without message', async () => {
|
||||
it('should return 400 for requests without input', async () => {
|
||||
const req = createMockRequest('POST', {})
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
@@ -269,7 +269,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error')
|
||||
expect(data).toHaveProperty('message', 'No message provided')
|
||||
expect(data).toHaveProperty('message', 'No input provided')
|
||||
})
|
||||
|
||||
it('should return 401 for unauthorized access', async () => {
|
||||
@@ -279,7 +279,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
error: 'Authentication required',
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', { message: 'Hello' })
|
||||
const req = createMockRequest('POST', { input: 'Hello' })
|
||||
const params = Promise.resolve({ subdomain: 'protected-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -342,7 +342,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
}
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', { message: 'Hello' })
|
||||
const req = createMockRequest('POST', { input: 'Hello' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -357,7 +357,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
it('should return streaming response for valid chat messages', async () => {
|
||||
const req = createMockRequest('POST', { message: 'Hello world', conversationId: 'conv-123' })
|
||||
const req = createMockRequest('POST', { input: 'Hello world', conversationId: 'conv-123' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -374,7 +374,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
it('should handle streaming response body correctly', async () => {
|
||||
const req = createMockRequest('POST', { message: 'Hello world' })
|
||||
const req = createMockRequest('POST', { input: 'Hello world' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -404,7 +404,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
throw new Error('Execution failed')
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', { message: 'Trigger error' })
|
||||
const req = createMockRequest('POST', { input: 'Trigger error' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -444,7 +444,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
|
||||
it('should pass conversationId to executeWorkflowForChat when provided', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
message: 'Hello world',
|
||||
input: 'Hello world',
|
||||
conversationId: 'test-conversation-123',
|
||||
})
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
@@ -461,7 +461,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
it('should handle missing conversationId gracefully', async () => {
|
||||
const req = createMockRequest('POST', { message: 'Hello world' })
|
||||
const req = createMockRequest('POST', { input: 'Hello world' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
@@ -72,11 +72,11 @@ export async function POST(
|
||||
}
|
||||
|
||||
// Use the already parsed body
|
||||
const { message, password, email, conversationId } = parsedBody
|
||||
const { input, password, email, conversationId } = parsedBody
|
||||
|
||||
// If this is an authentication request (has password or email but no message),
|
||||
// If this is an authentication request (has password or email but no input),
|
||||
// set auth cookie and return success
|
||||
if ((password || email) && !message) {
|
||||
if ((password || email) && !input) {
|
||||
const response = addCorsHeaders(createSuccessResponse({ authenticated: true }), request)
|
||||
|
||||
// Set authentication cookie
|
||||
@@ -86,8 +86,8 @@ export async function POST(
|
||||
}
|
||||
|
||||
// For chat messages, create regular response
|
||||
if (!message) {
|
||||
return addCorsHeaders(createErrorResponse('No message provided', 400), request)
|
||||
if (!input) {
|
||||
return addCorsHeaders(createErrorResponse('No input provided', 400), request)
|
||||
}
|
||||
|
||||
// Get the workflow for this chat
|
||||
@@ -105,8 +105,8 @@ export async function POST(
|
||||
}
|
||||
|
||||
try {
|
||||
// Execute workflow with structured input (message + conversationId for context)
|
||||
const result = await executeWorkflowForChat(deployment.id, message, conversationId)
|
||||
// Execute workflow with structured input (input + conversationId for context)
|
||||
const result = await executeWorkflowForChat(deployment.id, input, conversationId)
|
||||
|
||||
// The result is always a ReadableStream that we can pipe to the client
|
||||
const streamResponse = new NextResponse(result, {
|
||||
|
||||
@@ -128,10 +128,10 @@ export async function validateChatAuth(
|
||||
return { authorized: false, error: 'Password is required' }
|
||||
}
|
||||
|
||||
const { password, message } = parsedBody
|
||||
const { password, input } = parsedBody
|
||||
|
||||
// If this is a chat message, not an auth attempt
|
||||
if (message && !password) {
|
||||
if (input && !password) {
|
||||
return { authorized: false, error: 'auth_required_password' }
|
||||
}
|
||||
|
||||
@@ -170,10 +170,10 @@ export async function validateChatAuth(
|
||||
return { authorized: false, error: 'Email is required' }
|
||||
}
|
||||
|
||||
const { email, message } = parsedBody
|
||||
const { email, input } = parsedBody
|
||||
|
||||
// If this is a chat message, not an auth attempt
|
||||
if (message && !email) {
|
||||
if (input && !email) {
|
||||
return { authorized: false, error: 'auth_required_email' }
|
||||
}
|
||||
|
||||
@@ -211,17 +211,17 @@ export async function validateChatAuth(
|
||||
/**
|
||||
* Executes a workflow for a chat request and returns the formatted output.
|
||||
*
|
||||
* When workflows reference <start.response.input>, they receive a structured JSON
|
||||
* containing both the message and conversationId for maintaining chat context.
|
||||
* When workflows reference <start.input>, they receive the input directly.
|
||||
* The conversationId is available at <start.conversationId> for maintaining chat context.
|
||||
*
|
||||
* @param chatId - Chat deployment identifier
|
||||
* @param message - User's chat message
|
||||
* @param input - User's chat input
|
||||
* @param conversationId - Optional ID for maintaining conversation context
|
||||
* @returns Workflow execution result formatted for the chat interface
|
||||
*/
|
||||
export async function executeWorkflowForChat(
|
||||
chatId: string,
|
||||
message: string,
|
||||
input: string,
|
||||
conversationId?: string
|
||||
): Promise<any> {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
@@ -292,12 +292,12 @@ export async function executeWorkflowForChat(
|
||||
|
||||
logger.debug(`[${requestId}] Using ${outputBlockIds.length} output blocks for extraction`)
|
||||
|
||||
// Find the workflow
|
||||
// Find the workflow (deployedState is NOT deprecated - needed for chat execution)
|
||||
const workflowResult = await db
|
||||
.select({
|
||||
state: workflow.state,
|
||||
deployedState: workflow.deployedState,
|
||||
isDeployed: workflow.isDeployed,
|
||||
deployedState: workflow.deployedState,
|
||||
variables: workflow.variables,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, workflowId))
|
||||
@@ -308,9 +308,14 @@ export async function executeWorkflowForChat(
|
||||
throw new Error('Workflow not available')
|
||||
}
|
||||
|
||||
// Use deployed state for execution
|
||||
const state = workflowResult[0].deployedState || workflowResult[0].state
|
||||
const { blocks, edges, loops, parallels } = state as WorkflowState
|
||||
// For chat execution, use ONLY the deployed state (no fallback)
|
||||
if (!workflowResult[0].deployedState) {
|
||||
throw new Error(`Workflow must be deployed to be available for chat`)
|
||||
}
|
||||
|
||||
// Use deployed state for chat execution (this is the stable, deployed version)
|
||||
const deployedState = workflowResult[0].deployedState as WorkflowState
|
||||
const { blocks, edges, loops, parallels } = deployedState
|
||||
|
||||
// Prepare for execution, similar to use-workflow-execution.ts
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
@@ -344,16 +349,13 @@ export async function executeWorkflowForChat(
|
||||
logger.warn(`[${requestId}] Could not fetch environment variables:`, error)
|
||||
}
|
||||
|
||||
// Get workflow variables
|
||||
let workflowVariables = {}
|
||||
try {
|
||||
// The workflow state may contain variables
|
||||
const workflowState = state as any
|
||||
if (workflowState.variables) {
|
||||
if (workflowResult[0].variables) {
|
||||
workflowVariables =
|
||||
typeof workflowState.variables === 'string'
|
||||
? JSON.parse(workflowState.variables)
|
||||
: workflowState.variables
|
||||
typeof workflowResult[0].variables === 'string'
|
||||
? JSON.parse(workflowResult[0].variables)
|
||||
: workflowResult[0].variables
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Could not parse workflow variables:`, error)
|
||||
@@ -443,7 +445,7 @@ export async function executeWorkflowForChat(
|
||||
workflow: serializedWorkflow,
|
||||
currentBlockStates: processedBlockStates,
|
||||
envVarValues: decryptedEnvVars,
|
||||
workflowInput: { input: message, conversationId },
|
||||
workflowInput: { input: input, conversationId },
|
||||
workflowVariables,
|
||||
contextExtensions: {
|
||||
stream: true,
|
||||
@@ -461,8 +463,8 @@ export async function executeWorkflowForChat(
|
||||
if (result && 'success' in result) {
|
||||
result.logs?.forEach((log: BlockLog) => {
|
||||
if (streamedContent.has(log.blockId)) {
|
||||
if (log.output?.response) {
|
||||
log.output.response.content = streamedContent.get(log.blockId)
|
||||
if (log.output) {
|
||||
log.output.content = streamedContent.get(log.blockId)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
@@ -239,7 +239,7 @@ Example Scenario:
|
||||
User Prompt: "Fetch user data from an API. Use the User ID passed in as 'userId' and an API Key stored as the 'SERVICE_API_KEY' environment variable."
|
||||
|
||||
Generated Code:
|
||||
const userId = <block.response.content>; // Correct: Accessing input parameter without quotes
|
||||
const userId = <block.content>; // Correct: Accessing input parameter without quotes
|
||||
const apiKey = {{SERVICE_API_KEY}}; // Correct: Accessing environment variable without quotes
|
||||
const url = \`https://api.example.com/users/\${userId}\`;
|
||||
|
||||
@@ -273,7 +273,7 @@ Do not include import/require statements unless absolutely necessary and they ar
|
||||
Do not include markdown formatting or explanations.
|
||||
Output only the raw TypeScript code. Use modern TypeScript features where appropriate. Do not use semicolons.
|
||||
Example:
|
||||
const userId = <block.response.content> as string
|
||||
const userId = <block.content> as string
|
||||
const apiKey = {{SERVICE_API_KEY}}
|
||||
const response = await fetch(\`https://api.example.com/users/\${userId}\`, { headers: { Authorization: \`Bearer \${apiKey}\` } })
|
||||
if (!response.ok) {
|
||||
|
||||
@@ -39,8 +39,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Direct uploads are only available when cloud storage is enabled')
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -64,7 +65,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('fileName is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when contentType is missing', async () => {
|
||||
@@ -87,7 +89,59 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('contentType is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when fileSize is invalid', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'test.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: 0,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('fileSize must be a positive number')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when file size exceeds limit', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const largeFileSize = 150 * 1024 * 1024 // 150MB (exceeds 100MB limit)
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'large-file.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: largeFileSize,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toContain('exceeds maximum allowed size')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should generate S3 presigned URL successfully', async () => {
|
||||
@@ -122,6 +176,34 @@ describe('/api/files/presigned', () => {
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate knowledge-base S3 presigned URL with kb prefix', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost:3000/api/files/presigned?type=knowledge-base',
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'knowledge-doc.pdf',
|
||||
contentType: 'application/pdf',
|
||||
fileSize: 2048,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.fileInfo.key).toMatch(/^kb\/.*knowledge-doc\.pdf$/)
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate Azure Blob presigned URL successfully', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
@@ -182,8 +264,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Unknown storage provider')
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Unknown storage provider: unknown') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -225,8 +308,10 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('S3 service unavailable')
|
||||
expect(data.error).toBe(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
) // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle Azure Blob errors gracefully', async () => {
|
||||
@@ -269,8 +354,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('Azure service unavailable')
|
||||
expect(data.error).toBe('Failed to generate Azure Blob presigned URL') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle malformed JSON gracefully', async () => {
|
||||
@@ -289,9 +374,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('SyntaxError')
|
||||
expect(data.message).toContain('Unexpected token')
|
||||
expect(response.status).toBe(400) // Changed from 500 to 400 (ValidationError)
|
||||
expect(data.error).toBe('Invalid JSON in request body') // Updated error message
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { getBlobServiceClient } from '@/lib/uploads/blob/blob-client'
|
||||
import { getS3Client, sanitizeFilenameForMetadata } from '@/lib/uploads/s3/s3-client'
|
||||
import { BLOB_CONFIG, S3_CONFIG } from '@/lib/uploads/setup'
|
||||
import { BLOB_CONFIG, BLOB_KB_CONFIG, S3_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import { createErrorResponse, createOptionsResponse } from '../utils'
|
||||
|
||||
const logger = createLogger('PresignedUploadAPI')
|
||||
@@ -17,124 +17,148 @@ interface PresignedUrlRequest {
|
||||
fileSize: number
|
||||
}
|
||||
|
||||
type UploadType = 'general' | 'knowledge-base'
|
||||
|
||||
class PresignedUrlError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public statusCode = 400
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'PresignedUrlError'
|
||||
}
|
||||
}
|
||||
|
||||
class StorageConfigError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'STORAGE_CONFIG_ERROR', 500)
|
||||
}
|
||||
}
|
||||
|
||||
class ValidationError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'VALIDATION_ERROR', 400)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
// Parse the request body
|
||||
const data: PresignedUrlRequest = await request.json()
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName || !contentType) {
|
||||
return NextResponse.json({ error: 'Missing fileName or contentType' }, { status: 400 })
|
||||
let data: PresignedUrlRequest
|
||||
try {
|
||||
data = await request.json()
|
||||
} catch {
|
||||
throw new ValidationError('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
// Only proceed if cloud storage is enabled
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName?.trim()) {
|
||||
throw new ValidationError('fileName is required and cannot be empty')
|
||||
}
|
||||
if (!contentType?.trim()) {
|
||||
throw new ValidationError('contentType is required and cannot be empty')
|
||||
}
|
||||
if (!fileSize || fileSize <= 0) {
|
||||
throw new ValidationError('fileSize must be a positive number')
|
||||
}
|
||||
|
||||
const MAX_FILE_SIZE = 100 * 1024 * 1024
|
||||
if (fileSize > MAX_FILE_SIZE) {
|
||||
throw new ValidationError(
|
||||
`File size (${fileSize} bytes) exceeds maximum allowed size (${MAX_FILE_SIZE} bytes)`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadTypeParam = request.nextUrl.searchParams.get('type')
|
||||
const uploadType: UploadType =
|
||||
uploadTypeParam === 'knowledge-base' ? 'knowledge-base' : 'general'
|
||||
|
||||
if (!isUsingCloudStorage()) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Direct uploads are only available when cloud storage is enabled',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
throw new StorageConfigError(
|
||||
'Direct uploads are only available when cloud storage is enabled'
|
||||
)
|
||||
}
|
||||
|
||||
const storageProvider = getStorageProvider()
|
||||
logger.info(`Generating ${uploadType} presigned URL for ${fileName} using ${storageProvider}`)
|
||||
|
||||
switch (storageProvider) {
|
||||
case 's3':
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
case 'blob':
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
default:
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Unknown storage provider',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
throw new StorageConfigError(`Unknown storage provider: ${storageProvider}`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error generating presigned URL:', error)
|
||||
|
||||
if (error instanceof PresignedUrlError) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error.message,
|
||||
code: error.code,
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: error.statusCode }
|
||||
)
|
||||
}
|
||||
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate presigned URL')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleS3PresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
// Sanitize the original filename for S3 metadata to prevent header errors
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create the S3 command
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: S3_CONFIG.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
|
||||
// Generate the presigned URL
|
||||
const presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
})
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
async function handleS3PresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
const config = uploadType === 'knowledge-base' ? S3_KB_CONFIG : S3_CONFIG
|
||||
|
||||
// Generate SAS token for upload (write permission)
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: BLOB_CONFIG.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
if (!config.bucket || !config.region) {
|
||||
throw new StorageConfigError(`S3 configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(BLOB_CONFIG.accountName, BLOB_CONFIG.accountKey || '')
|
||||
).toString()
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
const metadata: Record<string, string> = {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
if (uploadType === 'knowledge-base') {
|
||||
metadata.purpose = 'knowledge-base'
|
||||
}
|
||||
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: config.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: metadata,
|
||||
})
|
||||
|
||||
let presignedUrl: string
|
||||
try {
|
||||
presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
} catch (s3Error) {
|
||||
logger.error('Failed to generate S3 presigned URL:', s3Error)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} S3 presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
@@ -146,22 +170,103 @@ async function handleBlobPresignedUrl(fileName: string, contentType: string, fil
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders: {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error generating Blob presigned URL:', error)
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate Blob presigned URL')
|
||||
)
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in S3 presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate S3 presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const config = uploadType === 'knowledge-base' ? BLOB_KB_CONFIG : BLOB_CONFIG
|
||||
|
||||
if (
|
||||
!config.accountName ||
|
||||
!config.containerName ||
|
||||
(!config.accountKey && !config.connectionString)
|
||||
) {
|
||||
throw new StorageConfigError(`Azure Blob configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(config.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: config.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
}
|
||||
|
||||
let sasToken: string
|
||||
try {
|
||||
sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(config.accountName, config.accountKey || '')
|
||||
).toString()
|
||||
} catch (blobError) {
|
||||
logger.error('Failed to generate Azure Blob SAS token:', blobError)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate Azure Blob SAS token - check Azure credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} Azure Blob presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
}
|
||||
|
||||
if (uploadType === 'knowledge-base') {
|
||||
uploadHeaders['x-ms-meta-purpose'] = 'knowledge-base'
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in Azure Blob presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate Azure Blob presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
// Handle preflight requests
|
||||
export async function OPTIONS() {
|
||||
return createOptionsResponse()
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { readFile } from 'fs/promises'
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { downloadFile, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { downloadFile, getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { BLOB_KB_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import '@/lib/uploads/setup.server'
|
||||
|
||||
import {
|
||||
@@ -16,6 +17,19 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('FilesServeAPI')
|
||||
|
||||
async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
readableStream.on('data', (data) => {
|
||||
chunks.push(data instanceof Buffer ? data : Buffer.from(data))
|
||||
})
|
||||
readableStream.on('end', () => {
|
||||
resolve(Buffer.concat(chunks))
|
||||
})
|
||||
readableStream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Main API route handler for serving files
|
||||
*/
|
||||
@@ -85,12 +99,65 @@ async function handleLocalFile(filename: string): Promise<NextResponse> {
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadKBFile(cloudKey: string): Promise<Buffer> {
|
||||
const storageProvider = getStorageProvider()
|
||||
|
||||
if (storageProvider === 'blob') {
|
||||
logger.info(`Downloading KB file from Azure Blob Storage: ${cloudKey}`)
|
||||
// Use KB-specific blob configuration
|
||||
const { getBlobServiceClient } = await import('@/lib/uploads/blob/blob-client')
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_KB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(cloudKey)
|
||||
|
||||
const downloadBlockBlobResponse = await blockBlobClient.download()
|
||||
if (!downloadBlockBlobResponse.readableStreamBody) {
|
||||
throw new Error('Failed to get readable stream from blob download')
|
||||
}
|
||||
|
||||
// Convert stream to buffer
|
||||
return await streamToBuffer(downloadBlockBlobResponse.readableStreamBody)
|
||||
}
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
logger.info(`Downloading KB file from S3: ${cloudKey}`)
|
||||
// Use KB-specific S3 configuration
|
||||
const { getS3Client } = await import('@/lib/uploads/s3/s3-client')
|
||||
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const s3Client = getS3Client()
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: S3_KB_CONFIG.bucket,
|
||||
Key: cloudKey,
|
||||
})
|
||||
|
||||
const response = await s3Client.send(command)
|
||||
if (!response.Body) {
|
||||
throw new Error('No body in S3 response')
|
||||
}
|
||||
|
||||
// Convert stream to buffer using the same method as the regular S3 client
|
||||
const stream = response.Body as any
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
stream.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks)))
|
||||
stream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported storage provider for KB files: ${storageProvider}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Proxy cloud file through our server
|
||||
*/
|
||||
async function handleCloudProxy(cloudKey: string): Promise<NextResponse> {
|
||||
try {
|
||||
const fileBuffer = await downloadFile(cloudKey)
|
||||
// Check if this is a KB file (starts with 'kb/')
|
||||
const isKBFile = cloudKey.startsWith('kb/')
|
||||
|
||||
const fileBuffer = isKBFile ? await downloadKBFile(cloudKey) : await downloadFile(cloudKey)
|
||||
|
||||
// Extract the original filename from the key (last part after last /)
|
||||
const originalFilename = cloudKey.split('/').pop() || 'download'
|
||||
|
||||
@@ -391,6 +391,225 @@ describe('Function Execute API Route', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('Enhanced Error Handling', () => {
|
||||
it('should provide detailed syntax error with line content', async () => {
|
||||
// Mock VM Script to throw a syntax error
|
||||
const mockScript = vi.fn().mockImplementation(() => {
|
||||
const error = new Error('Invalid or unexpected token')
|
||||
error.name = 'SyntaxError'
|
||||
error.stack = `user-function.js:5
|
||||
description: "This has a missing closing quote
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
SyntaxError: Invalid or unexpected token
|
||||
at new Script (node:vm:117:7)
|
||||
at POST (/path/to/route.ts:123:24)`
|
||||
throw error
|
||||
})
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'const obj = {\n name: "test",\n description: "This has a missing closing quote\n};\nreturn obj;',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toContain('Syntax Error')
|
||||
expect(data.error).toContain('Line 3')
|
||||
expect(data.error).toContain('description: "This has a missing closing quote')
|
||||
expect(data.error).toContain('Invalid or unexpected token')
|
||||
expect(data.error).toContain('(Check for missing quotes, brackets, or semicolons)')
|
||||
|
||||
// Check debug information
|
||||
expect(data.debug).toBeDefined()
|
||||
expect(data.debug.line).toBe(3)
|
||||
expect(data.debug.errorType).toBe('SyntaxError')
|
||||
expect(data.debug.lineContent).toBe('description: "This has a missing closing quote')
|
||||
})
|
||||
|
||||
it('should provide detailed runtime error with line and column', async () => {
|
||||
// Create the error object first
|
||||
const runtimeError = new Error("Cannot read properties of null (reading 'someMethod')")
|
||||
runtimeError.name = 'TypeError'
|
||||
runtimeError.stack = `TypeError: Cannot read properties of null (reading 'someMethod')
|
||||
at user-function.js:4:16
|
||||
at user-function.js:9:3
|
||||
at Script.runInContext (node:vm:147:14)`
|
||||
|
||||
// Mock successful script creation but runtime error
|
||||
const mockScript = vi.fn().mockImplementation(() => ({
|
||||
runInContext: vi.fn().mockRejectedValue(runtimeError),
|
||||
}))
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'const obj = null;\nreturn obj.someMethod();',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toContain('Type Error')
|
||||
expect(data.error).toContain('Line 2')
|
||||
expect(data.error).toContain('return obj.someMethod();')
|
||||
expect(data.error).toContain('Cannot read properties of null')
|
||||
|
||||
// Check debug information
|
||||
expect(data.debug).toBeDefined()
|
||||
expect(data.debug.line).toBe(2)
|
||||
expect(data.debug.column).toBe(16)
|
||||
expect(data.debug.errorType).toBe('TypeError')
|
||||
expect(data.debug.lineContent).toBe('return obj.someMethod();')
|
||||
})
|
||||
|
||||
it('should handle ReferenceError with enhanced details', async () => {
|
||||
// Create the error object first
|
||||
const referenceError = new Error('undefinedVariable is not defined')
|
||||
referenceError.name = 'ReferenceError'
|
||||
referenceError.stack = `ReferenceError: undefinedVariable is not defined
|
||||
at user-function.js:4:8
|
||||
at Script.runInContext (node:vm:147:14)`
|
||||
|
||||
const mockScript = vi.fn().mockImplementation(() => ({
|
||||
runInContext: vi.fn().mockRejectedValue(referenceError),
|
||||
}))
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'const x = 42;\nreturn undefinedVariable + x;',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toContain('Reference Error')
|
||||
expect(data.error).toContain('Line 2')
|
||||
expect(data.error).toContain('return undefinedVariable + x;')
|
||||
expect(data.error).toContain('undefinedVariable is not defined')
|
||||
})
|
||||
|
||||
it('should handle errors without line content gracefully', async () => {
|
||||
const mockScript = vi.fn().mockImplementation(() => {
|
||||
const error = new Error('Generic error without stack trace')
|
||||
error.name = 'Error'
|
||||
// No stack trace
|
||||
throw error
|
||||
})
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return "test";',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toBe('Generic error without stack trace')
|
||||
|
||||
// Should still have debug info, but without line details
|
||||
expect(data.debug).toBeDefined()
|
||||
expect(data.debug.errorType).toBe('Error')
|
||||
expect(data.debug.line).toBeUndefined()
|
||||
expect(data.debug.lineContent).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should extract line numbers from different stack trace formats', async () => {
|
||||
const mockScript = vi.fn().mockImplementation(() => {
|
||||
const error = new Error('Test error')
|
||||
error.name = 'Error'
|
||||
error.stack = `Error: Test error
|
||||
at user-function.js:7:25
|
||||
at async function
|
||||
at Script.runInContext (node:vm:147:14)`
|
||||
throw error
|
||||
})
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'const a = 1;\nconst b = 2;\nconst c = 3;\nconst d = 4;\nreturn a + b + c + d;',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
|
||||
// Line 7 in VM should map to line 5 in user code (7 - 3 + 1 = 5)
|
||||
expect(data.debug.line).toBe(5)
|
||||
expect(data.debug.column).toBe(25)
|
||||
expect(data.debug.lineContent).toBe('return a + b + c + d;')
|
||||
})
|
||||
|
||||
it('should provide helpful suggestions for common syntax errors', async () => {
|
||||
const mockScript = vi.fn().mockImplementation(() => {
|
||||
const error = new Error('Unexpected end of input')
|
||||
error.name = 'SyntaxError'
|
||||
error.stack = 'user-function.js:4\nSyntaxError: Unexpected end of input'
|
||||
throw error
|
||||
})
|
||||
|
||||
vi.doMock('vm', () => ({
|
||||
createContext: mockCreateContext,
|
||||
Script: mockScript,
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'const obj = {\n name: "test"\n// Missing closing brace',
|
||||
timeout: 5000,
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toContain('Syntax Error')
|
||||
expect(data.error).toContain('Unexpected end of input')
|
||||
expect(data.error).toContain('(Check for missing closing brackets or braces)')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Utility Functions', () => {
|
||||
it('should properly escape regex special characters', async () => {
|
||||
// This tests the escapeRegExp function indirectly
|
||||
|
||||
@@ -8,6 +8,210 @@ export const maxDuration = 60
|
||||
|
||||
const logger = createLogger('FunctionExecuteAPI')
|
||||
|
||||
/**
|
||||
* Enhanced error information interface
|
||||
*/
|
||||
interface EnhancedError {
|
||||
message: string
|
||||
line?: number
|
||||
column?: number
|
||||
stack?: string
|
||||
name: string
|
||||
originalError: any
|
||||
lineContent?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract enhanced error information from VM execution errors
|
||||
*/
|
||||
function extractEnhancedError(
|
||||
error: any,
|
||||
userCodeStartLine: number,
|
||||
userCode?: string
|
||||
): EnhancedError {
|
||||
const enhanced: EnhancedError = {
|
||||
message: error.message || 'Unknown error',
|
||||
name: error.name || 'Error',
|
||||
originalError: error,
|
||||
}
|
||||
|
||||
if (error.stack) {
|
||||
enhanced.stack = error.stack
|
||||
|
||||
// Parse stack trace to extract line and column information
|
||||
// Handle both compilation errors and runtime errors
|
||||
const stackLines: string[] = error.stack.split('\n')
|
||||
|
||||
for (const line of stackLines) {
|
||||
// Pattern 1: Compilation errors - "user-function.js:6"
|
||||
let match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
|
||||
// Pattern 2: Runtime errors - "at user-function.js:5:12"
|
||||
if (!match) {
|
||||
match = line.match(/at\s+user-function\.js:(\d+):(\d+)/)
|
||||
}
|
||||
|
||||
// Pattern 3: Generic patterns for any line containing our filename
|
||||
if (!match) {
|
||||
match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
}
|
||||
|
||||
if (match) {
|
||||
const stackLine = Number.parseInt(match[1], 10)
|
||||
const stackColumn = match[2] ? Number.parseInt(match[2], 10) : undefined
|
||||
|
||||
// Adjust line number to account for wrapper code
|
||||
// The user code starts at a specific line in our wrapper
|
||||
const adjustedLine = stackLine - userCodeStartLine + 1
|
||||
|
||||
// Check if this is a syntax error in wrapper code caused by incomplete user code
|
||||
const isWrapperSyntaxError =
|
||||
stackLine > userCodeStartLine &&
|
||||
error.name === 'SyntaxError' &&
|
||||
(error.message.includes('Unexpected token') ||
|
||||
error.message.includes('Unexpected end of input'))
|
||||
|
||||
if (isWrapperSyntaxError && userCode) {
|
||||
// Map wrapper syntax errors to the last line of user code
|
||||
const codeLines = userCode.split('\n')
|
||||
const lastUserLine = codeLines.length
|
||||
enhanced.line = lastUserLine
|
||||
enhanced.column = codeLines[lastUserLine - 1]?.length || 0
|
||||
enhanced.lineContent = codeLines[lastUserLine - 1]?.trim()
|
||||
break
|
||||
}
|
||||
|
||||
if (adjustedLine > 0) {
|
||||
enhanced.line = adjustedLine
|
||||
enhanced.column = stackColumn
|
||||
|
||||
// Extract the actual line content from user code
|
||||
if (userCode) {
|
||||
const codeLines = userCode.split('\n')
|
||||
if (adjustedLine <= codeLines.length) {
|
||||
enhanced.lineContent = codeLines[adjustedLine - 1]?.trim()
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if (stackLine <= userCodeStartLine) {
|
||||
// Error is in wrapper code itself
|
||||
enhanced.line = stackLine
|
||||
enhanced.column = stackColumn
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up stack trace to show user-relevant information
|
||||
const cleanedStackLines: string[] = stackLines
|
||||
.filter(
|
||||
(line: string) =>
|
||||
line.includes('user-function.js') ||
|
||||
(!line.includes('vm.js') && !line.includes('internal/'))
|
||||
)
|
||||
.map((line: string) => line.replace(/\s+at\s+/, ' at '))
|
||||
|
||||
if (cleanedStackLines.length > 0) {
|
||||
enhanced.stack = cleanedStackLines.join('\n')
|
||||
}
|
||||
}
|
||||
|
||||
// Keep original message without adding error type prefix
|
||||
// The error type will be added later in createUserFriendlyErrorMessage
|
||||
|
||||
return enhanced
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a detailed error message for users
|
||||
*/
|
||||
function createUserFriendlyErrorMessage(
|
||||
enhanced: EnhancedError,
|
||||
requestId: string,
|
||||
userCode?: string
|
||||
): string {
|
||||
let errorMessage = enhanced.message
|
||||
|
||||
// Add line and column information if available
|
||||
if (enhanced.line !== undefined) {
|
||||
let lineInfo = `Line ${enhanced.line}${enhanced.column !== undefined ? `:${enhanced.column}` : ''}`
|
||||
|
||||
// Add the actual line content if available
|
||||
if (enhanced.lineContent) {
|
||||
lineInfo += `: \`${enhanced.lineContent}\``
|
||||
}
|
||||
|
||||
errorMessage = `${lineInfo} - ${errorMessage}`
|
||||
} else {
|
||||
// If no line number, try to extract it from stack trace for display
|
||||
if (enhanced.stack) {
|
||||
const stackMatch = enhanced.stack.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
if (stackMatch) {
|
||||
const line = Number.parseInt(stackMatch[1], 10)
|
||||
const column = stackMatch[2] ? Number.parseInt(stackMatch[2], 10) : undefined
|
||||
let lineInfo = `Line ${line}${column ? `:${column}` : ''}`
|
||||
|
||||
// Try to get line content if we have userCode
|
||||
if (userCode) {
|
||||
const codeLines = userCode.split('\n')
|
||||
// Note: stackMatch gives us VM line number, need to adjust
|
||||
// This is a fallback case, so we might not have perfect line mapping
|
||||
if (line <= codeLines.length) {
|
||||
const lineContent = codeLines[line - 1]?.trim()
|
||||
if (lineContent) {
|
||||
lineInfo += `: \`${lineContent}\``
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
errorMessage = `${lineInfo} - ${errorMessage}`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add error type prefix with consistent naming
|
||||
if (enhanced.name !== 'Error') {
|
||||
const errorTypePrefix =
|
||||
enhanced.name === 'SyntaxError'
|
||||
? 'Syntax Error'
|
||||
: enhanced.name === 'TypeError'
|
||||
? 'Type Error'
|
||||
: enhanced.name === 'ReferenceError'
|
||||
? 'Reference Error'
|
||||
: enhanced.name
|
||||
|
||||
// Only add prefix if not already present
|
||||
if (!errorMessage.toLowerCase().includes(errorTypePrefix.toLowerCase())) {
|
||||
errorMessage = `${errorTypePrefix}: ${errorMessage}`
|
||||
}
|
||||
}
|
||||
|
||||
// For syntax errors, provide additional context
|
||||
if (enhanced.name === 'SyntaxError') {
|
||||
if (errorMessage.includes('Invalid or unexpected token')) {
|
||||
errorMessage += ' (Check for missing quotes, brackets, or semicolons)'
|
||||
} else if (errorMessage.includes('Unexpected end of input')) {
|
||||
errorMessage += ' (Check for missing closing brackets or braces)'
|
||||
} else if (errorMessage.includes('Unexpected token')) {
|
||||
// Check if this might be due to incomplete code
|
||||
if (
|
||||
enhanced.lineContent &&
|
||||
((enhanced.lineContent.includes('(') && !enhanced.lineContent.includes(')')) ||
|
||||
(enhanced.lineContent.includes('[') && !enhanced.lineContent.includes(']')) ||
|
||||
(enhanced.lineContent.includes('{') && !enhanced.lineContent.includes('}')))
|
||||
) {
|
||||
errorMessage += ' (Check for missing closing parentheses, brackets, or braces)'
|
||||
} else {
|
||||
errorMessage += ' (Check your syntax)'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return errorMessage
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves environment variables and tags in code
|
||||
* @param code - Code with variables
|
||||
@@ -121,6 +325,8 @@ export async function POST(req: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
const startTime = Date.now()
|
||||
let stdout = ''
|
||||
let userCodeStartLine = 3 // Default value for error reporting
|
||||
let resolvedCode = '' // Store resolved code for error reporting
|
||||
|
||||
try {
|
||||
const body = await req.json()
|
||||
@@ -149,13 +355,15 @@ export async function POST(req: NextRequest) {
|
||||
})
|
||||
|
||||
// Resolve variables in the code with workflow environment variables
|
||||
const { resolvedCode, contextVariables } = resolveCodeVariables(
|
||||
const codeResolution = resolveCodeVariables(
|
||||
code,
|
||||
executionParams,
|
||||
envVars,
|
||||
blockData,
|
||||
blockNameMapping
|
||||
)
|
||||
resolvedCode = codeResolution.resolvedCode
|
||||
const contextVariables = codeResolution.contextVariables
|
||||
|
||||
const executionMethod = 'vm' // Default execution method
|
||||
|
||||
@@ -301,16 +509,12 @@ export async function POST(req: NextRequest) {
|
||||
// timeout,
|
||||
// displayErrors: true,
|
||||
// })
|
||||
// logger.info(`[${requestId}] VM execution result`, {
|
||||
// result,
|
||||
// stdout,
|
||||
// })
|
||||
// }
|
||||
// } else {
|
||||
logger.info(`[${requestId}] Using VM for code execution`, {
|
||||
resolvedCode,
|
||||
executionParams,
|
||||
envVars,
|
||||
hasEnvVars: Object.keys(envVars).length > 0,
|
||||
})
|
||||
|
||||
// Create a secure context with console logging
|
||||
@@ -336,28 +540,40 @@ export async function POST(req: NextRequest) {
|
||||
},
|
||||
})
|
||||
|
||||
const script = new Script(`
|
||||
(async () => {
|
||||
try {
|
||||
${
|
||||
isCustomTool
|
||||
? `// For custom tools, make parameters directly accessible
|
||||
${Object.keys(executionParams)
|
||||
.map((key) => `const ${key} = params.${key};`)
|
||||
.join('\n ')}`
|
||||
: ''
|
||||
}
|
||||
${resolvedCode}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
throw error;
|
||||
}
|
||||
})()
|
||||
`)
|
||||
// Calculate line offset for user code to provide accurate error reporting
|
||||
const wrapperLines = ['(async () => {', ' try {']
|
||||
|
||||
// Add custom tool parameter declarations if needed
|
||||
if (isCustomTool) {
|
||||
wrapperLines.push(' // For custom tools, make parameters directly accessible')
|
||||
Object.keys(executionParams).forEach((key) => {
|
||||
wrapperLines.push(` const ${key} = params.${key};`)
|
||||
})
|
||||
}
|
||||
|
||||
userCodeStartLine = wrapperLines.length + 1 // +1 because user code starts on next line
|
||||
|
||||
// Build the complete script with proper formatting for line numbers
|
||||
const fullScript = [
|
||||
...wrapperLines,
|
||||
` ${resolvedCode.split('\n').join('\n ')}`, // Indent user code
|
||||
' } catch (error) {',
|
||||
' console.error(error);',
|
||||
' throw error;',
|
||||
' }',
|
||||
'})()',
|
||||
].join('\n')
|
||||
|
||||
const script = new Script(fullScript, {
|
||||
filename: 'user-function.js', // This filename will appear in stack traces
|
||||
lineOffset: 0, // Start line numbering from 0
|
||||
columnOffset: 0, // Start column numbering from 0
|
||||
})
|
||||
|
||||
const result = await script.runInContext(context, {
|
||||
timeout,
|
||||
displayErrors: true,
|
||||
breakOnSigint: true, // Allow breaking on SIGINT for better debugging
|
||||
})
|
||||
// }
|
||||
|
||||
@@ -384,14 +600,40 @@ export async function POST(req: NextRequest) {
|
||||
executionTime,
|
||||
})
|
||||
|
||||
const enhancedError = extractEnhancedError(error, userCodeStartLine, resolvedCode)
|
||||
const userFriendlyErrorMessage = createUserFriendlyErrorMessage(
|
||||
enhancedError,
|
||||
requestId,
|
||||
resolvedCode
|
||||
)
|
||||
|
||||
// Log enhanced error details for debugging
|
||||
logger.error(`[${requestId}] Enhanced error details`, {
|
||||
originalMessage: error.message,
|
||||
enhancedMessage: userFriendlyErrorMessage,
|
||||
line: enhancedError.line,
|
||||
column: enhancedError.column,
|
||||
lineContent: enhancedError.lineContent,
|
||||
errorType: enhancedError.name,
|
||||
userCodeStartLine,
|
||||
})
|
||||
|
||||
const errorResponse = {
|
||||
success: false,
|
||||
error: error.message || 'Code execution failed',
|
||||
error: userFriendlyErrorMessage,
|
||||
output: {
|
||||
result: null,
|
||||
stdout,
|
||||
executionTime,
|
||||
},
|
||||
// Include debug information in development or for debugging
|
||||
debug: {
|
||||
line: enhancedError.line,
|
||||
column: enhancedError.column,
|
||||
errorType: enhancedError.name,
|
||||
lineContent: enhancedError.lineContent,
|
||||
stack: enhancedError.stack,
|
||||
},
|
||||
}
|
||||
|
||||
return NextResponse.json(errorResponse, { status: 500 })
|
||||
|
||||
@@ -137,24 +137,22 @@ export async function POST(request: NextRequest) {
|
||||
const safeExecutionData = {
|
||||
success: executionData.success,
|
||||
output: {
|
||||
response: {
|
||||
// Sanitize content to remove non-ASCII characters that would cause ByteString errors
|
||||
content: executionData.output?.response?.content
|
||||
? String(executionData.output.response.content).replace(/[\u0080-\uFFFF]/g, '')
|
||||
: '',
|
||||
model: executionData.output?.response?.model,
|
||||
tokens: executionData.output?.response?.tokens || {
|
||||
prompt: 0,
|
||||
completion: 0,
|
||||
total: 0,
|
||||
},
|
||||
// Sanitize any potential Unicode characters in tool calls
|
||||
toolCalls: executionData.output?.response?.toolCalls
|
||||
? sanitizeToolCalls(executionData.output.response.toolCalls)
|
||||
: undefined,
|
||||
providerTiming: executionData.output?.response?.providerTiming,
|
||||
cost: executionData.output?.response?.cost,
|
||||
// Sanitize content to remove non-ASCII characters that would cause ByteString errors
|
||||
content: executionData.output?.content
|
||||
? String(executionData.output.content).replace(/[\u0080-\uFFFF]/g, '')
|
||||
: '',
|
||||
model: executionData.output?.model,
|
||||
tokens: executionData.output?.tokens || {
|
||||
prompt: 0,
|
||||
completion: 0,
|
||||
total: 0,
|
||||
},
|
||||
// Sanitize any potential Unicode characters in tool calls
|
||||
toolCalls: executionData.output?.toolCalls
|
||||
? sanitizeToolCalls(executionData.output.toolCalls)
|
||||
: undefined,
|
||||
providerTiming: executionData.output?.providerTiming,
|
||||
cost: executionData.output?.cost,
|
||||
},
|
||||
error: executionData.error,
|
||||
logs: [], // Strip logs from header to avoid encoding issues
|
||||
|
||||
@@ -46,11 +46,19 @@ const formatResponse = (responseData: any, status = 200) => {
|
||||
*/
|
||||
const createErrorResponse = (error: any, status = 500, additionalData = {}) => {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const errorStack = error instanceof Error ? error.stack : undefined
|
||||
|
||||
logger.error('Creating error response', {
|
||||
errorMessage,
|
||||
status,
|
||||
stack: process.env.NODE_ENV === 'development' ? errorStack : undefined,
|
||||
})
|
||||
|
||||
return formatResponse(
|
||||
{
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
stack: process.env.NODE_ENV === 'development' ? errorStack : undefined,
|
||||
...additionalData,
|
||||
},
|
||||
status
|
||||
@@ -67,6 +75,7 @@ export async function GET(request: Request) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
if (!targetUrl) {
|
||||
logger.error(`[${requestId}] Missing 'url' parameter`)
|
||||
return createErrorResponse("Missing 'url' parameter", 400)
|
||||
}
|
||||
|
||||
@@ -126,6 +135,10 @@ export async function GET(request: Request) {
|
||||
: response.statusText || `HTTP error ${response.status}`
|
||||
: undefined
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(`[${requestId}] External API error: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
// Return the proxied response
|
||||
return formatResponse({
|
||||
success: response.ok,
|
||||
@@ -139,6 +152,7 @@ export async function GET(request: Request) {
|
||||
logger.error(`[${requestId}] Proxy GET request failed`, {
|
||||
url: targetUrl,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return createErrorResponse(error)
|
||||
@@ -151,22 +165,40 @@ export async function POST(request: Request) {
|
||||
const startTimeISO = startTime.toISOString()
|
||||
|
||||
try {
|
||||
const { toolId, params } = await request.json()
|
||||
// Parse request body
|
||||
let requestBody
|
||||
try {
|
||||
requestBody = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
throw new Error('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Proxy request for tool`, {
|
||||
toolId,
|
||||
hasParams: !!params && Object.keys(params).length > 0,
|
||||
})
|
||||
const { toolId, params } = requestBody
|
||||
|
||||
if (!toolId) {
|
||||
logger.error(`[${requestId}] Missing toolId in request`)
|
||||
throw new Error('Missing toolId in request')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Processing tool: ${toolId}`)
|
||||
|
||||
// Get tool
|
||||
const tool = getTool(toolId)
|
||||
|
||||
if (!tool) {
|
||||
logger.error(`[${requestId}] Tool not found: ${toolId}`)
|
||||
throw new Error(`Tool not found: ${toolId}`)
|
||||
}
|
||||
|
||||
// Validate the tool and its parameters
|
||||
try {
|
||||
validateToolRequest(toolId, tool, params)
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Tool validation failed`, {
|
||||
toolId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
} catch (validationError) {
|
||||
logger.warn(`[${requestId}] Tool validation failed for ${toolId}`, {
|
||||
error: validationError instanceof Error ? validationError.message : String(validationError),
|
||||
})
|
||||
|
||||
// Add timing information even to error responses
|
||||
@@ -174,23 +206,18 @@ export async function POST(request: Request) {
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
return createErrorResponse(error, 400, {
|
||||
return createErrorResponse(validationError, 400, {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
})
|
||||
}
|
||||
if (!tool) {
|
||||
logger.error(`[${requestId}] Tool not found`, { toolId })
|
||||
throw new Error(`Tool not found: ${toolId}`)
|
||||
}
|
||||
|
||||
// Use executeTool with skipProxy=true to prevent recursive proxy calls, and skipPostProcess=true to prevent duplicate post-processing
|
||||
// Execute tool
|
||||
const result = await executeTool(toolId, params, true, true)
|
||||
|
||||
if (!result.success) {
|
||||
logger.warn(`[${requestId}] Tool execution failed`, {
|
||||
toolId,
|
||||
logger.warn(`[${requestId}] Tool execution failed for ${toolId}`, {
|
||||
error: result.error || 'Unknown error',
|
||||
})
|
||||
|
||||
@@ -217,9 +244,13 @@ export async function POST(request: Request) {
|
||||
}
|
||||
// Fallback
|
||||
throw new Error('Tool returned an error')
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
throw e
|
||||
} catch (transformError) {
|
||||
logger.error(`[${requestId}] Error transformation failed for ${toolId}`, {
|
||||
error:
|
||||
transformError instanceof Error ? transformError.message : String(transformError),
|
||||
})
|
||||
if (transformError instanceof Error) {
|
||||
throw transformError
|
||||
}
|
||||
throw new Error('Tool returned an error')
|
||||
}
|
||||
@@ -246,12 +277,7 @@ export async function POST(request: Request) {
|
||||
},
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool executed successfully`, {
|
||||
toolId,
|
||||
duration,
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
})
|
||||
logger.info(`[${requestId}] Tool executed successfully: ${toolId} (${duration}ms)`)
|
||||
|
||||
// Return the response with CORS headers
|
||||
return formatResponse(responseWithTimingData)
|
||||
@@ -259,6 +285,7 @@ export async function POST(request: Request) {
|
||||
logger.error(`[${requestId}] Proxy request failed`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
name: error instanceof Error ? error.name : undefined,
|
||||
})
|
||||
|
||||
// Add timing information even to error responses
|
||||
|
||||
@@ -17,6 +17,17 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
|
||||
mockExecutionDependencies()
|
||||
|
||||
// Mock the normalized tables helper
|
||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue({
|
||||
blocks: sampleWorkflowState.blocks,
|
||||
edges: sampleWorkflowState.edges || [],
|
||||
loops: sampleWorkflowState.loops || {},
|
||||
parallels: sampleWorkflowState.parallels || {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('croner', () => ({
|
||||
Cron: vi.fn().mockImplementation(() => ({
|
||||
nextRun: vi.fn().mockReturnValue(new Date(Date.now() + 60000)), // Next run in 1 minute
|
||||
|
||||
@@ -14,13 +14,13 @@ import {
|
||||
} from '@/lib/schedules/utils'
|
||||
import { checkServerSideUsageLimits } from '@/lib/usage-monitor'
|
||||
import { decryptSecret } from '@/lib/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import { environment, userStats, workflow, workflowSchedule } from '@/db/schema'
|
||||
import { Executor } from '@/executor'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { mergeSubblockState } from '@/stores/workflows/server-utils'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
// Add dynamic export to prevent caching
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -149,8 +149,27 @@ export async function GET(req: NextRequest) {
|
||||
continue
|
||||
}
|
||||
|
||||
const state = workflowRecord.state as WorkflowState
|
||||
const { blocks, edges, loops, parallels } = state
|
||||
// Load workflow data from normalized tables (no fallback to deprecated state column)
|
||||
logger.debug(
|
||||
`[${requestId}] Loading workflow ${schedule.workflowId} from normalized tables`
|
||||
)
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(schedule.workflowId)
|
||||
|
||||
if (!normalizedData) {
|
||||
logger.error(
|
||||
`[${requestId}] No normalized data found for scheduled workflow ${schedule.workflowId}`
|
||||
)
|
||||
throw new Error(`Workflow data not found in normalized tables for ${schedule.workflowId}`)
|
||||
}
|
||||
|
||||
// Use normalized data only
|
||||
const blocks = normalizedData.blocks
|
||||
const edges = normalizedData.edges
|
||||
const loops = normalizedData.loops
|
||||
const parallels = normalizedData.parallels
|
||||
logger.info(
|
||||
`[${requestId}] Loaded scheduled workflow ${schedule.workflowId} from normalized tables`
|
||||
)
|
||||
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
|
||||
@@ -405,9 +424,13 @@ export async function GET(req: NextRequest) {
|
||||
.limit(1)
|
||||
|
||||
if (workflowRecord) {
|
||||
const state = workflowRecord.state as WorkflowState
|
||||
const { blocks } = state
|
||||
nextRunAt = calculateNextRunTime(schedule, blocks)
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(schedule.workflowId)
|
||||
|
||||
if (!normalizedData) {
|
||||
nextRunAt = new Date(now.getTime() + 24 * 60 * 60 * 1000)
|
||||
} else {
|
||||
nextRunAt = calculateNextRunTime(schedule, normalizedData.blocks)
|
||||
}
|
||||
} else {
|
||||
nextRunAt = new Date(now.getTime() + 24 * 60 * 60 * 1000)
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ const SettingsSchema = z.object({
|
||||
debugMode: z.boolean().optional(),
|
||||
autoConnect: z.boolean().optional(),
|
||||
autoFillEnvVars: z.boolean().optional(),
|
||||
autoPan: z.boolean().optional(),
|
||||
telemetryEnabled: z.boolean().optional(),
|
||||
telemetryNotifiedUser: z.boolean().optional(),
|
||||
emailPreferences: z
|
||||
@@ -32,6 +33,7 @@ const defaultSettings = {
|
||||
debugMode: false,
|
||||
autoConnect: true,
|
||||
autoFillEnvVars: true,
|
||||
autoPan: true,
|
||||
telemetryEnabled: true,
|
||||
telemetryNotifiedUser: false,
|
||||
emailPreferences: {},
|
||||
@@ -65,6 +67,7 @@ export async function GET() {
|
||||
debugMode: userSettings.debugMode,
|
||||
autoConnect: userSettings.autoConnect,
|
||||
autoFillEnvVars: userSettings.autoFillEnvVars,
|
||||
autoPan: userSettings.autoPan,
|
||||
telemetryEnabled: userSettings.telemetryEnabled,
|
||||
telemetryNotifiedUser: userSettings.telemetryNotifiedUser,
|
||||
emailPreferences: userSettings.emailPreferences ?? {},
|
||||
|
||||
@@ -5,11 +5,7 @@ import { NextRequest } from 'next/server'
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockExecutionDependencies,
|
||||
sampleWorkflowState,
|
||||
} from '@/app/api/__test-utils__/utils'
|
||||
import { createMockRequest, mockExecutionDependencies } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
// Define mock functions at the top level to be used in mocks
|
||||
const hasProcessedMessageMock = vi.fn().mockResolvedValue(false)
|
||||
@@ -148,10 +144,18 @@ describe('Webhook Trigger API Route', () => {
|
||||
vi.resetAllMocks()
|
||||
vi.clearAllTimers()
|
||||
|
||||
// Mock all dependencies
|
||||
mockExecutionDependencies()
|
||||
|
||||
// Reset mock behaviors to default for each test
|
||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue({
|
||||
blocks: {},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
hasProcessedMessageMock.mockResolvedValue(false)
|
||||
markMessageAsProcessedMock.mockResolvedValue(true)
|
||||
acquireLockMock.mockResolvedValue(true)
|
||||
@@ -159,12 +163,10 @@ describe('Webhook Trigger API Route', () => {
|
||||
processGenericDeduplicationMock.mockResolvedValue(null)
|
||||
processWebhookMock.mockResolvedValue(new Response('Webhook processed', { status: 200 }))
|
||||
|
||||
// Restore original crypto.randomUUID if it was mocked
|
||||
if ((global as any).crypto?.randomUUID) {
|
||||
vi.spyOn(crypto, 'randomUUID').mockRestore()
|
||||
}
|
||||
|
||||
// Mock crypto.randomUUID to return predictable values
|
||||
vi.spyOn(crypto, 'randomUUID').mockReturnValue('mock-uuid-12345')
|
||||
})
|
||||
|
||||
@@ -263,7 +265,6 @@ describe('Webhook Trigger API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: sampleWorkflowState,
|
||||
},
|
||||
},
|
||||
])
|
||||
@@ -355,7 +356,6 @@ describe('Webhook Trigger API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: sampleWorkflowState,
|
||||
},
|
||||
},
|
||||
])
|
||||
@@ -409,7 +409,6 @@ describe('Webhook Trigger API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: sampleWorkflowState,
|
||||
},
|
||||
},
|
||||
])
|
||||
@@ -482,7 +481,6 @@ describe('Webhook Trigger API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: sampleWorkflowState,
|
||||
},
|
||||
},
|
||||
])
|
||||
@@ -553,7 +551,6 @@ describe('Webhook Trigger API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: sampleWorkflowState,
|
||||
},
|
||||
},
|
||||
])
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
processWebhook,
|
||||
processWhatsAppDeduplication,
|
||||
} from '@/lib/webhooks/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { db } from '@/db'
|
||||
import { webhook, workflow } from '@/db/schema'
|
||||
|
||||
@@ -187,6 +188,24 @@ export async function POST(
|
||||
foundWebhook = webhooks[0].webhook
|
||||
foundWorkflow = webhooks[0].workflow
|
||||
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(foundWorkflow.id)
|
||||
|
||||
if (!normalizedData) {
|
||||
logger.error(`[${requestId}] No normalized data found for webhook workflow ${foundWorkflow.id}`)
|
||||
return new NextResponse('Workflow data not found in normalized tables', { status: 500 })
|
||||
}
|
||||
|
||||
// Construct state from normalized data only (execution-focused, no frontend state fields)
|
||||
foundWorkflow.state = {
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: foundWorkflow.isDeployed || false,
|
||||
deployedAt: foundWorkflow.deployedAt,
|
||||
}
|
||||
|
||||
// Special handling for Telegram webhooks to work around middleware User-Agent checks
|
||||
if (foundWebhook.provider === 'telegram') {
|
||||
// Log detailed information about the request for debugging
|
||||
|
||||
@@ -31,6 +31,48 @@ describe('Workflow Deployment API Route', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock serializer
|
||||
vi.doMock('@/serializer', () => ({
|
||||
serializeWorkflow: vi.fn().mockReturnValue({
|
||||
version: '1.0',
|
||||
blocks: [
|
||||
{
|
||||
id: 'block-1',
|
||||
metadata: { id: 'starter', name: 'Start' },
|
||||
position: { x: 100, y: 100 },
|
||||
config: { tool: 'starter', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue({
|
||||
blocks: {
|
||||
'block-1': {
|
||||
id: 'block-1',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
data: {},
|
||||
},
|
||||
},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('../../middleware', () => ({
|
||||
validateWorkflowAccess: vi.fn().mockResolvedValue({
|
||||
workflow: {
|
||||
@@ -54,6 +96,80 @@ describe('Workflow Deployment API Route', () => {
|
||||
})
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock the database schema module
|
||||
vi.doMock('@/db/schema', () => ({
|
||||
workflow: {},
|
||||
apiKey: {},
|
||||
workflowBlocks: {},
|
||||
workflowEdges: {},
|
||||
workflowSubflows: {},
|
||||
}))
|
||||
|
||||
// Mock drizzle-orm operators
|
||||
vi.doMock('drizzle-orm', () => ({
|
||||
eq: vi.fn((field, value) => ({ field, value, type: 'eq' })),
|
||||
and: vi.fn((...conditions) => ({ conditions, type: 'and' })),
|
||||
}))
|
||||
|
||||
// Mock the database module with proper chainable query builder
|
||||
let selectCallCount = 0
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: vi.fn().mockImplementation(() => {
|
||||
selectCallCount++
|
||||
return {
|
||||
from: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockImplementation(() => ({
|
||||
limit: vi.fn().mockImplementation(() => {
|
||||
// First call: workflow lookup (should return workflow)
|
||||
if (selectCallCount === 1) {
|
||||
return Promise.resolve([{ userId: 'user-id', id: 'workflow-id' }])
|
||||
}
|
||||
// Second call: blocks lookup
|
||||
if (selectCallCount === 2) {
|
||||
return Promise.resolve([
|
||||
{
|
||||
id: 'block-1',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
positionX: '100',
|
||||
positionY: '100',
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
data: {},
|
||||
},
|
||||
])
|
||||
}
|
||||
// Third call: edges lookup
|
||||
if (selectCallCount === 3) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Fourth call: subflows lookup
|
||||
if (selectCallCount === 4) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Fifth call: API key lookup (should return empty for new key test)
|
||||
if (selectCallCount === 5) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Default: empty array
|
||||
return Promise.resolve([])
|
||||
}),
|
||||
})),
|
||||
})),
|
||||
}
|
||||
}),
|
||||
insert: vi.fn().mockImplementation(() => ({
|
||||
values: vi.fn().mockResolvedValue([{ id: 'mock-api-key-id' }]),
|
||||
})),
|
||||
update: vi.fn().mockImplementation(() => ({
|
||||
set: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockResolvedValue([]),
|
||||
})),
|
||||
})),
|
||||
},
|
||||
}))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
@@ -74,6 +190,7 @@ describe('Workflow Deployment API Route', () => {
|
||||
isDeployed: false,
|
||||
deployedAt: null,
|
||||
userId: 'user-id',
|
||||
deployedState: null,
|
||||
},
|
||||
]),
|
||||
}),
|
||||
@@ -104,16 +221,7 @@ describe('Workflow Deployment API Route', () => {
|
||||
* This should generate a new API key
|
||||
*/
|
||||
it('should create new API key when deploying workflow for user with no API key', async () => {
|
||||
const mockInsert = vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue(undefined),
|
||||
})
|
||||
|
||||
const mockUpdate = vi.fn().mockReturnValue({
|
||||
set: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([{ id: 'workflow-id' }]),
|
||||
}),
|
||||
})
|
||||
|
||||
// Override the global mock for this specific test
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: vi
|
||||
@@ -121,15 +229,10 @@ describe('Workflow Deployment API Route', () => {
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([
|
||||
{
|
||||
userId: 'user-id',
|
||||
},
|
||||
]),
|
||||
limit: vi.fn().mockResolvedValue([{ userId: 'user-id', id: 'workflow-id' }]),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
// Mock normalized table queries (blocks, edges, subflows)
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([
|
||||
@@ -163,8 +266,14 @@ describe('Workflow Deployment API Route', () => {
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: mockInsert,
|
||||
update: mockUpdate,
|
||||
insert: vi.fn().mockImplementation(() => ({
|
||||
values: vi.fn().mockResolvedValue([{ id: 'mock-api-key-id' }]),
|
||||
})),
|
||||
update: vi.fn().mockImplementation(() => ({
|
||||
set: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockResolvedValue([]),
|
||||
})),
|
||||
})),
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -183,9 +292,6 @@ describe('Workflow Deployment API Route', () => {
|
||||
expect(data).toHaveProperty('apiKey', 'sim_testkeygenerated12345')
|
||||
expect(data).toHaveProperty('isDeployed', true)
|
||||
expect(data).toHaveProperty('deployedAt')
|
||||
|
||||
expect(mockInsert).toHaveBeenCalled()
|
||||
expect(mockUpdate).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -193,14 +299,7 @@ describe('Workflow Deployment API Route', () => {
|
||||
* This should use the existing API key
|
||||
*/
|
||||
it('should use existing API key when deploying workflow', async () => {
|
||||
const mockInsert = vi.fn()
|
||||
|
||||
const mockUpdate = vi.fn().mockReturnValue({
|
||||
set: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([{ id: 'workflow-id' }]),
|
||||
}),
|
||||
})
|
||||
|
||||
// Override the global mock for this specific test
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: vi
|
||||
@@ -208,15 +307,10 @@ describe('Workflow Deployment API Route', () => {
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([
|
||||
{
|
||||
userId: 'user-id',
|
||||
},
|
||||
]),
|
||||
limit: vi.fn().mockResolvedValue([{ userId: 'user-id', id: 'workflow-id' }]),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
// Mock normalized table queries (blocks, edges, subflows)
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([
|
||||
@@ -246,16 +340,18 @@ describe('Workflow Deployment API Route', () => {
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([
|
||||
{
|
||||
key: 'sim_existingtestapikey12345',
|
||||
},
|
||||
]), // Existing API key
|
||||
limit: vi.fn().mockResolvedValue([{ key: 'sim_existingtestapikey12345' }]), // Existing API key
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: mockInsert,
|
||||
update: mockUpdate,
|
||||
insert: vi.fn().mockImplementation(() => ({
|
||||
values: vi.fn().mockResolvedValue([{ id: 'mock-api-key-id' }]),
|
||||
})),
|
||||
update: vi.fn().mockImplementation(() => ({
|
||||
set: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockResolvedValue([]),
|
||||
})),
|
||||
})),
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -273,9 +369,6 @@ describe('Workflow Deployment API Route', () => {
|
||||
|
||||
expect(data).toHaveProperty('apiKey', 'sim_existingtestapikey12345')
|
||||
expect(data).toHaveProperty('isDeployed', true)
|
||||
|
||||
expect(mockInsert).not.toHaveBeenCalled()
|
||||
expect(mockUpdate).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
/**
|
||||
|
||||
@@ -32,7 +32,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
isDeployed: workflow.isDeployed,
|
||||
deployedAt: workflow.deployedAt,
|
||||
userId: workflow.userId,
|
||||
state: workflow.state,
|
||||
deployedState: workflow.deployedState,
|
||||
})
|
||||
.from(workflow)
|
||||
@@ -93,11 +92,25 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
// Check if the workflow has meaningful changes that would require redeployment
|
||||
let needsRedeployment = false
|
||||
if (workflowData.deployedState) {
|
||||
const { hasWorkflowChanged } = await import('@/lib/workflows/utils')
|
||||
needsRedeployment = hasWorkflowChanged(
|
||||
workflowData.state as any,
|
||||
workflowData.deployedState as any
|
||||
)
|
||||
// Load current state from normalized tables for comparison
|
||||
const { loadWorkflowFromNormalizedTables } = await import('@/lib/workflows/db-helpers')
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(id)
|
||||
|
||||
if (normalizedData) {
|
||||
// Convert normalized data to WorkflowState format for comparison
|
||||
const currentState = {
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
parallels: normalizedData.parallels,
|
||||
}
|
||||
|
||||
const { hasWorkflowChanged } = await import('@/lib/workflows/utils')
|
||||
needsRedeployment = hasWorkflowChanged(
|
||||
currentState as any,
|
||||
workflowData.deployedState as any
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully retrieved deployment info: ${id}`)
|
||||
@@ -130,7 +143,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
const workflowData = await db
|
||||
.select({
|
||||
userId: workflow.userId,
|
||||
state: workflow.state,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, id))
|
||||
|
||||
@@ -24,45 +24,54 @@ describe('Workflow Execution API Route', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
|
||||
// Mock workflow middleware
|
||||
vi.doMock('@/app/api/workflows/middleware', () => ({
|
||||
validateWorkflowAccess: vi.fn().mockResolvedValue({
|
||||
workflow: {
|
||||
id: 'workflow-id',
|
||||
userId: 'user-id',
|
||||
state: {
|
||||
blocks: {
|
||||
'starter-id': {
|
||||
id: 'starter-id',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
enabled: true,
|
||||
},
|
||||
'agent-id': {
|
||||
id: 'agent-id',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
position: { x: 300, y: 100 },
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
id: 'edge-1',
|
||||
source: 'starter-id',
|
||||
target: 'agent-id',
|
||||
sourceHandle: 'source',
|
||||
targetHandle: 'target',
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
// Reset execute mock to track calls
|
||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue({
|
||||
blocks: {
|
||||
'starter-id': {
|
||||
id: 'starter-id',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
data: {},
|
||||
},
|
||||
'agent-id': {
|
||||
id: 'agent-id',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
position: { x: 300, y: 100 },
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
data: {},
|
||||
},
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
id: 'edge-1',
|
||||
source: 'starter-id',
|
||||
target: 'agent-id',
|
||||
sourceHandle: 'source',
|
||||
targetHandle: 'target',
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
executeMock = vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
output: {
|
||||
@@ -76,14 +85,12 @@ describe('Workflow Execution API Route', () => {
|
||||
},
|
||||
})
|
||||
|
||||
// Mock executor
|
||||
vi.doMock('@/executor', () => ({
|
||||
Executor: vi.fn().mockImplementation(() => ({
|
||||
execute: executeMock,
|
||||
})),
|
||||
}))
|
||||
|
||||
// Mock environment variables
|
||||
vi.doMock('@/lib/utils', () => ({
|
||||
decryptSecret: vi.fn().mockResolvedValue({
|
||||
decrypted: 'decrypted-secret-value',
|
||||
@@ -92,13 +99,11 @@ describe('Workflow Execution API Route', () => {
|
||||
getRotatingApiKey: vi.fn().mockReturnValue('rotated-api-key'),
|
||||
}))
|
||||
|
||||
// Mock logger
|
||||
vi.doMock('@/lib/logs/execution-logger', () => ({
|
||||
persistExecutionLogs: vi.fn().mockResolvedValue(undefined),
|
||||
persistExecutionError: vi.fn().mockResolvedValue(undefined),
|
||||
}))
|
||||
|
||||
// Mock trace spans
|
||||
vi.doMock('@/lib/logs/trace-spans', () => ({
|
||||
buildTraceSpans: vi.fn().mockReturnValue({
|
||||
traceSpans: [],
|
||||
@@ -106,13 +111,11 @@ describe('Workflow Execution API Route', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock workflow run counts
|
||||
vi.doMock('@/lib/workflows/utils', () => ({
|
||||
updateWorkflowRunCounts: vi.fn().mockResolvedValue(undefined),
|
||||
workflowHasResponseBlock: vi.fn().mockReturnValue(false),
|
||||
}))
|
||||
|
||||
// Mock database
|
||||
vi.doMock('@/db', () => {
|
||||
const mockDb = {
|
||||
select: vi.fn().mockImplementation(() => ({
|
||||
@@ -140,7 +143,6 @@ describe('Workflow Execution API Route', () => {
|
||||
return { db: mockDb }
|
||||
})
|
||||
|
||||
// Mock Serializer
|
||||
vi.doMock('@/serializer', () => ({
|
||||
Serializer: vi.fn().mockImplementation(() => ({
|
||||
serializeWorkflow: vi.fn().mockReturnValue({
|
||||
@@ -162,49 +164,37 @@ describe('Workflow Execution API Route', () => {
|
||||
* Simulates direct execution with URL-based parameters
|
||||
*/
|
||||
it('should execute workflow with GET request successfully', async () => {
|
||||
// Create a mock request with query parameters
|
||||
const req = createMockRequest('GET')
|
||||
|
||||
// Create params similar to what Next.js would provide
|
||||
const params = Promise.resolve({ id: 'workflow-id' })
|
||||
|
||||
// Import the handler after mocks are set up
|
||||
const { GET } = await import('./route')
|
||||
|
||||
// Call the handler
|
||||
const response = await GET(req, { params })
|
||||
|
||||
// Get the actual status code - in some implementations this might not be 200
|
||||
// Based on the current implementation, validate the response exists
|
||||
expect(response).toBeDefined()
|
||||
|
||||
// Try to parse the response body
|
||||
let data
|
||||
try {
|
||||
data = await response.json()
|
||||
} catch (e) {
|
||||
// If we can't parse JSON, the response may not be what we expect
|
||||
console.error('Response could not be parsed as JSON:', await response.text())
|
||||
throw e
|
||||
}
|
||||
|
||||
// If status is 200, verify success structure
|
||||
if (response.status === 200) {
|
||||
expect(data).toHaveProperty('success', true)
|
||||
expect(data).toHaveProperty('output')
|
||||
expect(data.output).toHaveProperty('response')
|
||||
}
|
||||
|
||||
// Verify middleware was called
|
||||
const validateWorkflowAccess = (await import('@/app/api/workflows/middleware'))
|
||||
.validateWorkflowAccess
|
||||
expect(validateWorkflowAccess).toHaveBeenCalledWith(expect.any(Object), 'workflow-id')
|
||||
|
||||
// Verify executor was initialized
|
||||
const Executor = (await import('@/executor')).Executor
|
||||
expect(Executor).toHaveBeenCalled()
|
||||
|
||||
// Verify execute was called with undefined input (GET requests don't have body)
|
||||
expect(executeMock).toHaveBeenCalledWith('workflow-id')
|
||||
})
|
||||
|
||||
@@ -213,67 +203,50 @@ describe('Workflow Execution API Route', () => {
|
||||
* Simulates execution with a JSON body containing parameters
|
||||
*/
|
||||
it('should execute workflow with POST request successfully', async () => {
|
||||
// Create request body with custom inputs
|
||||
const requestBody = {
|
||||
inputs: {
|
||||
message: 'Test input message',
|
||||
},
|
||||
}
|
||||
|
||||
// Create a mock request with the request body
|
||||
const req = createMockRequest('POST', requestBody)
|
||||
|
||||
// Create params similar to what Next.js would provide
|
||||
const params = Promise.resolve({ id: 'workflow-id' })
|
||||
|
||||
// Import the handler after mocks are set up
|
||||
const { POST } = await import('./route')
|
||||
|
||||
// Call the handler
|
||||
const response = await POST(req, { params })
|
||||
|
||||
// Ensure response exists
|
||||
expect(response).toBeDefined()
|
||||
|
||||
// Try to parse the response body
|
||||
let data
|
||||
try {
|
||||
data = await response.json()
|
||||
} catch (e) {
|
||||
// If we can't parse JSON, the response may not be what we expect
|
||||
console.error('Response could not be parsed as JSON:', await response.text())
|
||||
throw e
|
||||
}
|
||||
|
||||
// If status is 200, verify success structure
|
||||
if (response.status === 200) {
|
||||
expect(data).toHaveProperty('success', true)
|
||||
expect(data).toHaveProperty('output')
|
||||
expect(data.output).toHaveProperty('response')
|
||||
}
|
||||
|
||||
// Verify middleware was called
|
||||
const validateWorkflowAccess = (await import('@/app/api/workflows/middleware'))
|
||||
.validateWorkflowAccess
|
||||
expect(validateWorkflowAccess).toHaveBeenCalledWith(expect.any(Object), 'workflow-id')
|
||||
|
||||
// Verify executor was constructed
|
||||
const Executor = (await import('@/executor')).Executor
|
||||
expect(Executor).toHaveBeenCalled()
|
||||
|
||||
// Verify execute was called with the input body
|
||||
expect(executeMock).toHaveBeenCalledWith('workflow-id')
|
||||
|
||||
// Updated expectations to match actual implementation
|
||||
// The structure should match: serializedWorkflow, processedBlockStates, decryptedEnvVars, processedInput, workflowVariables
|
||||
expect(Executor).toHaveBeenCalledWith(
|
||||
expect.anything(), // serializedWorkflow
|
||||
expect.anything(), // processedBlockStates
|
||||
expect.anything(), // decryptedEnvVars
|
||||
expect.objectContaining({
|
||||
// processedInput
|
||||
input: requestBody,
|
||||
}),
|
||||
requestBody, // processedInput (direct input, not wrapped)
|
||||
expect.anything() // workflowVariables
|
||||
)
|
||||
})
|
||||
@@ -282,7 +255,6 @@ describe('Workflow Execution API Route', () => {
|
||||
* Test POST execution with structured input matching the input format
|
||||
*/
|
||||
it('should execute workflow with structured input matching the input format', async () => {
|
||||
// Create structured input matching the expected input format
|
||||
const structuredInput = {
|
||||
firstName: 'John',
|
||||
age: 30,
|
||||
@@ -291,36 +263,26 @@ describe('Workflow Execution API Route', () => {
|
||||
tags: ['test', 'api'],
|
||||
}
|
||||
|
||||
// Create a mock request with the structured input
|
||||
const req = createMockRequest('POST', structuredInput)
|
||||
|
||||
// Create params similar to what Next.js would provide
|
||||
const params = Promise.resolve({ id: 'workflow-id' })
|
||||
|
||||
// Import the handler after mocks are set up
|
||||
const { POST } = await import('./route')
|
||||
|
||||
// Call the handler
|
||||
const response = await POST(req, { params })
|
||||
|
||||
// Ensure response exists and is successful
|
||||
expect(response).toBeDefined()
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
// Parse the response body
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('success', true)
|
||||
|
||||
// Verify the executor was constructed with the structured input - updated to match implementation
|
||||
const Executor = (await import('@/executor')).Executor
|
||||
expect(Executor).toHaveBeenCalledWith(
|
||||
expect.anything(), // serializedWorkflow
|
||||
expect.anything(), // processedBlockStates
|
||||
expect.anything(), // decryptedEnvVars
|
||||
expect.objectContaining({
|
||||
// processedInput
|
||||
input: structuredInput,
|
||||
}),
|
||||
structuredInput, // processedInput (direct input, not wrapped)
|
||||
expect.anything() // workflowVariables
|
||||
)
|
||||
})
|
||||
@@ -478,39 +440,51 @@ describe('Workflow Execution API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow-with-vars-id',
|
||||
userId: 'user-id',
|
||||
state: {
|
||||
blocks: {
|
||||
'starter-id': {
|
||||
id: 'starter-id',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
enabled: true,
|
||||
},
|
||||
'agent-id': {
|
||||
id: 'agent-id',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
position: { x: 300, y: 100 },
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
id: 'edge-1',
|
||||
source: 'starter-id',
|
||||
target: 'agent-id',
|
||||
sourceHandle: 'source',
|
||||
targetHandle: 'target',
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
},
|
||||
variables: workflowVariables,
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock normalized tables helper for this specific test
|
||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue({
|
||||
blocks: {
|
||||
'starter-id': {
|
||||
id: 'starter-id',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
position: { x: 100, y: 100 },
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
data: {},
|
||||
},
|
||||
'agent-id': {
|
||||
id: 'agent-id',
|
||||
type: 'agent',
|
||||
name: 'Agent',
|
||||
position: { x: 300, y: 100 },
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
data: {},
|
||||
},
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
id: 'edge-1',
|
||||
source: 'starter-id',
|
||||
target: 'agent-id',
|
||||
sourceHandle: 'source',
|
||||
targetHandle: 'target',
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Create a constructor mock to capture the arguments
|
||||
const executorConstructorMock = vi.fn().mockImplementation(() => ({
|
||||
execute: vi.fn().mockResolvedValue({
|
||||
|
||||
@@ -7,6 +7,7 @@ import { persistExecutionError, persistExecutionLogs } from '@/lib/logs/executio
|
||||
import { buildTraceSpans } from '@/lib/logs/trace-spans'
|
||||
import { checkServerSideUsageLimits } from '@/lib/usage-monitor'
|
||||
import { decryptSecret } from '@/lib/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import {
|
||||
createHttpResponseFromBlock,
|
||||
updateWorkflowRunCounts,
|
||||
@@ -76,37 +77,45 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any) {
|
||||
input ? JSON.stringify(input, null, 2) : 'No input provided'
|
||||
)
|
||||
|
||||
// Validate and structure input for maximum compatibility
|
||||
let processedInput = input
|
||||
if (input && typeof input === 'object') {
|
||||
// Ensure input is properly structured for the starter block
|
||||
if (input.input === undefined) {
|
||||
// If input is not already nested, structure it properly
|
||||
processedInput = { input: input }
|
||||
logger.info(
|
||||
`[${requestId}] Restructured input for workflow:`,
|
||||
JSON.stringify(processedInput, null, 2)
|
||||
)
|
||||
}
|
||||
}
|
||||
// Use input directly for API workflows
|
||||
const processedInput = input
|
||||
logger.info(
|
||||
`[${requestId}] Using input directly for workflow:`,
|
||||
JSON.stringify(processedInput, null, 2)
|
||||
)
|
||||
|
||||
try {
|
||||
runningExecutions.add(executionKey)
|
||||
logger.info(`[${requestId}] Starting workflow execution: ${workflowId}`)
|
||||
|
||||
// Use the deployed state if available, otherwise fall back to current state
|
||||
const workflowState = workflow.deployedState || workflow.state
|
||||
// Load workflow data from normalized tables
|
||||
logger.debug(`[${requestId}] Loading workflow ${workflowId} from normalized tables`)
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
|
||||
if (!workflow.deployedState) {
|
||||
logger.warn(
|
||||
`[${requestId}] No deployed state found for workflow: ${workflowId}, using current state`
|
||||
)
|
||||
let blocks: Record<string, any>
|
||||
let edges: any[]
|
||||
let loops: Record<string, any>
|
||||
let parallels: Record<string, any>
|
||||
|
||||
if (normalizedData) {
|
||||
// Use normalized data as primary source
|
||||
;({ blocks, edges, loops, parallels } = normalizedData)
|
||||
logger.info(`[${requestId}] Using normalized tables for workflow execution: ${workflowId}`)
|
||||
} else {
|
||||
logger.info(`[${requestId}] Using deployed state for workflow execution: ${workflowId}`)
|
||||
}
|
||||
// Fallback to deployed state if available (for legacy workflows)
|
||||
logger.warn(
|
||||
`[${requestId}] No normalized data found, falling back to deployed state for workflow: ${workflowId}`
|
||||
)
|
||||
|
||||
const state = workflowState as WorkflowState
|
||||
const { blocks, edges, loops, parallels } = state
|
||||
if (!workflow.deployedState) {
|
||||
throw new Error(
|
||||
`Workflow ${workflowId} has no deployed state and no normalized data available`
|
||||
)
|
||||
}
|
||||
|
||||
const deployedState = workflow.deployedState as WorkflowState
|
||||
;({ blocks, edges, loops, parallels } = deployedState)
|
||||
}
|
||||
|
||||
// Use the same execution flow as in scheduled executions
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
@@ -365,13 +374,13 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
logger.info(`[${requestId}] No request body provided`)
|
||||
}
|
||||
|
||||
// Don't double-nest the input if it's already structured
|
||||
// Pass the raw body directly as input for API workflows
|
||||
const hasContent = Object.keys(body).length > 0
|
||||
const input = hasContent ? { input: body } : {}
|
||||
const input = hasContent ? body : {}
|
||||
|
||||
logger.info(`[${requestId}] Input passed to workflow:`, JSON.stringify(input, null, 2))
|
||||
|
||||
// Execute workflow with the structured input
|
||||
// Execute workflow with the raw input
|
||||
const result = await executeWorkflow(validation.workflow, requestId, input)
|
||||
|
||||
// Check if the workflow execution contains a response block output
|
||||
|
||||
@@ -2,6 +2,7 @@ import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { verifyInternalToken } from '@/lib/auth/internal'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getUserEntityPermissions, hasAdminPermission } from '@/lib/permissions/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
@@ -28,14 +29,29 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
const { id: workflowId } = await params
|
||||
|
||||
try {
|
||||
// Get the session
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized access attempt for workflow ${workflowId}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
// Check for internal JWT token for server-side calls
|
||||
const authHeader = request.headers.get('authorization')
|
||||
let isInternalCall = false
|
||||
|
||||
if (authHeader?.startsWith('Bearer ')) {
|
||||
const token = authHeader.split(' ')[1]
|
||||
isInternalCall = await verifyInternalToken(token)
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
let userId: string | null = null
|
||||
|
||||
if (isInternalCall) {
|
||||
// For internal calls, we'll skip user-specific access checks
|
||||
logger.info(`[${requestId}] Internal API call for workflow ${workflowId}`)
|
||||
} else {
|
||||
// Get the session for regular user calls
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized access attempt for workflow ${workflowId}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
userId = session.user.id
|
||||
}
|
||||
|
||||
// Fetch the workflow
|
||||
const workflowData = await db
|
||||
@@ -52,26 +68,31 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
// Check if user has access to this workflow
|
||||
let hasAccess = false
|
||||
|
||||
// Case 1: User owns the workflow
|
||||
if (workflowData.userId === userId) {
|
||||
if (isInternalCall) {
|
||||
// Internal calls have full access
|
||||
hasAccess = true
|
||||
}
|
||||
|
||||
// Case 2: Workflow belongs to a workspace the user has permissions for
|
||||
if (!hasAccess && workflowData.workspaceId) {
|
||||
const userPermission = await getUserEntityPermissions(
|
||||
userId,
|
||||
'workspace',
|
||||
workflowData.workspaceId
|
||||
)
|
||||
if (userPermission !== null) {
|
||||
} else {
|
||||
// Case 1: User owns the workflow
|
||||
if (workflowData.userId === userId) {
|
||||
hasAccess = true
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] User ${userId} denied access to workflow ${workflowId}`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
// Case 2: Workflow belongs to a workspace the user has permissions for
|
||||
if (!hasAccess && workflowData.workspaceId && userId) {
|
||||
const userPermission = await getUserEntityPermissions(
|
||||
userId,
|
||||
'workspace',
|
||||
workflowData.workspaceId
|
||||
)
|
||||
if (userPermission !== null) {
|
||||
hasAccess = true
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] User ${userId} denied access to workflow ${workflowId}`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
}
|
||||
|
||||
// Try to load from normalized tables first
|
||||
|
||||
@@ -104,7 +104,7 @@ async function createWorkspace(userId: string, name: string) {
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
// Create "Workflow 1" for the workspace with start block
|
||||
// Create initial workflow for the workspace with start block
|
||||
const starterId = crypto.randomUUID()
|
||||
const initialState = {
|
||||
blocks: {
|
||||
@@ -170,7 +170,7 @@ async function createWorkspace(userId: string, name: string) {
|
||||
userId,
|
||||
workspaceId,
|
||||
folderId: null,
|
||||
name: 'Workflow 1',
|
||||
name: 'default-agent',
|
||||
description: 'Your first workflow - start building here!',
|
||||
state: initialState,
|
||||
color: '#3972F6',
|
||||
|
||||
@@ -297,7 +297,7 @@ export default function ChatClient({ subdomain }: { subdomain: string }) {
|
||||
try {
|
||||
// Send structured payload to maintain chat context
|
||||
const payload = {
|
||||
message:
|
||||
input:
|
||||
typeof userMessage.content === 'string'
|
||||
? userMessage.content
|
||||
: JSON.stringify(userMessage.content),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { AlertCircle, Loader2, X } from 'lucide-react'
|
||||
import { AlertCircle, ChevronDown, ChevronUp, Loader2, X } from 'lucide-react'
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
@@ -16,6 +16,7 @@ import { Button } from '@/components/ui/button'
|
||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import type { ChunkData, DocumentData } from '@/stores/knowledge/store'
|
||||
|
||||
@@ -28,6 +29,12 @@ interface EditChunkModalProps {
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
onChunkUpdate?: (updatedChunk: ChunkData) => void
|
||||
// New props for navigation
|
||||
allChunks?: ChunkData[]
|
||||
currentPage?: number
|
||||
totalPages?: number
|
||||
onNavigateToChunk?: (chunk: ChunkData) => void
|
||||
onNavigateToPage?: (page: number, selectChunk: 'first' | 'last') => Promise<void>
|
||||
}
|
||||
|
||||
export function EditChunkModal({
|
||||
@@ -37,11 +44,18 @@ export function EditChunkModal({
|
||||
isOpen,
|
||||
onClose,
|
||||
onChunkUpdate,
|
||||
allChunks = [],
|
||||
currentPage = 1,
|
||||
totalPages = 1,
|
||||
onNavigateToChunk,
|
||||
onNavigateToPage,
|
||||
}: EditChunkModalProps) {
|
||||
const [editedContent, setEditedContent] = useState(chunk?.content || '')
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [isNavigating, setIsNavigating] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
|
||||
// Check if there are unsaved changes
|
||||
const hasUnsavedChanges = editedContent !== (chunk?.content || '')
|
||||
@@ -53,6 +67,13 @@ export function EditChunkModal({
|
||||
}
|
||||
}, [chunk?.id, chunk?.content])
|
||||
|
||||
// Find current chunk index in the current page
|
||||
const currentChunkIndex = chunk ? allChunks.findIndex((c) => c.id === chunk.id) : -1
|
||||
|
||||
// Calculate navigation availability
|
||||
const canNavigatePrev = currentChunkIndex > 0 || currentPage > 1
|
||||
const canNavigateNext = currentChunkIndex < allChunks.length - 1 || currentPage < totalPages
|
||||
|
||||
const handleSaveContent = async () => {
|
||||
if (!chunk || !document) return
|
||||
|
||||
@@ -82,7 +103,6 @@ export function EditChunkModal({
|
||||
|
||||
if (result.success && onChunkUpdate) {
|
||||
onChunkUpdate(result.data)
|
||||
onClose()
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating chunk:', err)
|
||||
@@ -92,8 +112,51 @@ export function EditChunkModal({
|
||||
}
|
||||
}
|
||||
|
||||
const navigateToChunk = async (direction: 'prev' | 'next') => {
|
||||
if (!chunk || isNavigating) return
|
||||
|
||||
try {
|
||||
setIsNavigating(true)
|
||||
|
||||
if (direction === 'prev') {
|
||||
if (currentChunkIndex > 0) {
|
||||
// Navigate to previous chunk in current page
|
||||
const prevChunk = allChunks[currentChunkIndex - 1]
|
||||
onNavigateToChunk?.(prevChunk)
|
||||
} else if (currentPage > 1) {
|
||||
// Load previous page and navigate to last chunk
|
||||
await onNavigateToPage?.(currentPage - 1, 'last')
|
||||
}
|
||||
} else {
|
||||
if (currentChunkIndex < allChunks.length - 1) {
|
||||
// Navigate to next chunk in current page
|
||||
const nextChunk = allChunks[currentChunkIndex + 1]
|
||||
onNavigateToChunk?.(nextChunk)
|
||||
} else if (currentPage < totalPages) {
|
||||
// Load next page and navigate to first chunk
|
||||
await onNavigateToPage?.(currentPage + 1, 'first')
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Error navigating ${direction}:`, err)
|
||||
setError(`Failed to navigate to ${direction === 'prev' ? 'previous' : 'next'} chunk`)
|
||||
} finally {
|
||||
setIsNavigating(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleNavigate = (direction: 'prev' | 'next') => {
|
||||
if (hasUnsavedChanges) {
|
||||
setPendingNavigation(() => () => navigateToChunk(direction))
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
void navigateToChunk(direction)
|
||||
}
|
||||
}
|
||||
|
||||
const handleCloseAttempt = () => {
|
||||
if (hasUnsavedChanges && !isSaving) {
|
||||
setPendingNavigation(null)
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
onClose()
|
||||
@@ -102,7 +165,12 @@ export function EditChunkModal({
|
||||
|
||||
const handleConfirmDiscard = () => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
onClose()
|
||||
if (pendingNavigation) {
|
||||
void pendingNavigation()
|
||||
setPendingNavigation(null)
|
||||
} else {
|
||||
onClose()
|
||||
}
|
||||
}
|
||||
|
||||
const isFormValid = editedContent.trim().length > 0 && editedContent.trim().length <= 10000
|
||||
@@ -118,7 +186,59 @@ export function EditChunkModal({
|
||||
>
|
||||
<DialogHeader className='flex-shrink-0 border-b px-6 py-4'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<DialogTitle className='font-medium text-lg'>Edit Chunk</DialogTitle>
|
||||
<div className='flex items-center gap-3'>
|
||||
<DialogTitle className='font-medium text-lg'>Edit Chunk</DialogTitle>
|
||||
|
||||
{/* Navigation Controls */}
|
||||
<div className='flex items-center gap-1'>
|
||||
<Tooltip>
|
||||
<TooltipTrigger
|
||||
asChild
|
||||
onFocus={(e) => e.preventDefault()}
|
||||
onBlur={(e) => e.preventDefault()}
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => handleNavigate('prev')}
|
||||
disabled={!canNavigatePrev || isNavigating || isSaving}
|
||||
className='h-8 w-8 p-0'
|
||||
>
|
||||
<ChevronUp className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom'>
|
||||
Previous chunk{' '}
|
||||
{currentPage > 1 && currentChunkIndex === 0 ? '(previous page)' : ''}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip>
|
||||
<TooltipTrigger
|
||||
asChild
|
||||
onFocus={(e) => e.preventDefault()}
|
||||
onBlur={(e) => e.preventDefault()}
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => handleNavigate('next')}
|
||||
disabled={!canNavigateNext || isNavigating || isSaving}
|
||||
className='h-8 w-8 p-0'
|
||||
>
|
||||
<ChevronDown className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom'>
|
||||
Next chunk{' '}
|
||||
{currentPage < totalPages && currentChunkIndex === allChunks.length - 1
|
||||
? '(next page)'
|
||||
: ''}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='icon'
|
||||
@@ -142,7 +262,7 @@ export function EditChunkModal({
|
||||
{document?.filename || 'Unknown Document'}
|
||||
</p>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Editing chunk #{chunk.chunkIndex}
|
||||
Editing chunk #{chunk.chunkIndex} • Page {currentPage} of {totalPages}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -167,7 +287,7 @@ export function EditChunkModal({
|
||||
onChange={(e) => setEditedContent(e.target.value)}
|
||||
placeholder='Enter chunk content...'
|
||||
className='flex-1 resize-none'
|
||||
disabled={isSaving}
|
||||
disabled={isSaving || isNavigating}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
@@ -176,12 +296,16 @@ export function EditChunkModal({
|
||||
{/* Footer */}
|
||||
<div className='mt-auto border-t px-6 pt-4 pb-6'>
|
||||
<div className='flex justify-between'>
|
||||
<Button variant='outline' onClick={handleCloseAttempt} disabled={isSaving}>
|
||||
<Button
|
||||
variant='outline'
|
||||
onClick={handleCloseAttempt}
|
||||
disabled={isSaving || isNavigating}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleSaveContent}
|
||||
disabled={!isFormValid || isSaving || !hasUnsavedChanges}
|
||||
disabled={!isFormValid || isSaving || !hasUnsavedChanges || isNavigating}
|
||||
className='bg-[#701FFC] font-[480] text-primary-foreground shadow-[0_0_0_0_#701FFC] transition-all duration-200 hover:bg-[#6518E6] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
|
||||
>
|
||||
{isSaving ? (
|
||||
@@ -205,12 +329,19 @@ export function EditChunkModal({
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Unsaved Changes</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
You have unsaved changes to this chunk content. Are you sure you want to discard your
|
||||
changes and close the editor?
|
||||
You have unsaved changes to this chunk content.
|
||||
{pendingNavigation
|
||||
? ' Do you want to discard your changes and navigate to the next chunk?'
|
||||
: ' Are you sure you want to discard your changes and close the editor?'}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel onClick={() => setShowUnsavedChangesAlert(false)}>
|
||||
<AlertDialogCancel
|
||||
onClick={() => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
setPendingNavigation(null)
|
||||
}}
|
||||
>
|
||||
Keep Editing
|
||||
</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
|
||||
@@ -767,6 +767,30 @@ export function Document({
|
||||
updateChunk(updatedChunk.id, updatedChunk)
|
||||
setSelectedChunk(updatedChunk)
|
||||
}}
|
||||
allChunks={chunks}
|
||||
currentPage={currentPage}
|
||||
totalPages={totalPages}
|
||||
onNavigateToChunk={(chunk: ChunkData) => {
|
||||
setSelectedChunk(chunk)
|
||||
}}
|
||||
onNavigateToPage={async (page: number, selectChunk: 'first' | 'last') => {
|
||||
await goToPage(page)
|
||||
|
||||
const checkAndSelectChunk = () => {
|
||||
if (!isLoadingChunks && chunks.length > 0) {
|
||||
if (selectChunk === 'first') {
|
||||
setSelectedChunk(chunks[0])
|
||||
} else {
|
||||
setSelectedChunk(chunks[chunks.length - 1])
|
||||
}
|
||||
} else {
|
||||
// Retry after a short delay if chunks aren't loaded yet
|
||||
setTimeout(checkAndSelectChunk, 100)
|
||||
}
|
||||
}
|
||||
|
||||
setTimeout(checkAndSelectChunk, 0)
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Create Chunk Modal */}
|
||||
|
||||
@@ -36,16 +36,11 @@ import { useKnowledgeBase, useKnowledgeBaseDocuments } from '@/hooks/use-knowled
|
||||
import { type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
import { KnowledgeHeader } from '../components/knowledge-header/knowledge-header'
|
||||
import { useKnowledgeUpload } from '../hooks/use-knowledge-upload'
|
||||
import { KnowledgeBaseLoading } from './components/knowledge-base-loading/knowledge-base-loading'
|
||||
|
||||
const logger = createLogger('KnowledgeBase')
|
||||
|
||||
interface ProcessedDocumentResponse {
|
||||
documentId: string
|
||||
filename: string
|
||||
status: string
|
||||
}
|
||||
|
||||
interface KnowledgeBaseProps {
|
||||
id: string
|
||||
knowledgeBaseName?: string
|
||||
@@ -145,17 +140,32 @@ export function KnowledgeBase({
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const [isBulkOperating, setIsBulkOperating] = useState(false)
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
const [uploadError, setUploadError] = useState<{
|
||||
message: string
|
||||
timestamp: number
|
||||
} | null>(null)
|
||||
const [uploadProgress, setUploadProgress] = useState<{
|
||||
stage: 'idle' | 'uploading' | 'processing' | 'completing'
|
||||
filesCompleted: number
|
||||
totalFiles: number
|
||||
currentFile?: string
|
||||
}>({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
|
||||
const { isUploading, uploadProgress, uploadError, uploadFiles, clearError } = useKnowledgeUpload({
|
||||
onUploadComplete: async (uploadedFiles) => {
|
||||
const pendingDocuments: DocumentData[] = uploadedFiles.map((file, index) => ({
|
||||
id: `temp-${Date.now()}-${index}`,
|
||||
knowledgeBaseId: id,
|
||||
filename: file.filename,
|
||||
fileUrl: file.fileUrl,
|
||||
fileSize: file.fileSize,
|
||||
mimeType: file.mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}))
|
||||
|
||||
useKnowledgeStore.getState().addPendingDocuments(id, pendingDocuments)
|
||||
|
||||
await refreshDocuments()
|
||||
},
|
||||
})
|
||||
const router = useRouter()
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
@@ -240,11 +250,11 @@ export function KnowledgeBase({
|
||||
useEffect(() => {
|
||||
if (uploadError) {
|
||||
const timer = setTimeout(() => {
|
||||
setUploadError(null)
|
||||
clearError()
|
||||
}, 8000)
|
||||
return () => clearTimeout(timer)
|
||||
}
|
||||
}, [uploadError])
|
||||
}, [uploadError, clearError])
|
||||
|
||||
// Filter documents based on search query
|
||||
const filteredDocuments = documents.filter((doc) =>
|
||||
@@ -448,153 +458,18 @@ export function KnowledgeBase({
|
||||
const files = e.target.files
|
||||
if (!files || files.length === 0) return
|
||||
|
||||
interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
try {
|
||||
setIsUploading(true)
|
||||
setUploadError(null)
|
||||
setUploadProgress({ stage: 'uploading', filesCompleted: 0, totalFiles: files.length })
|
||||
|
||||
// Upload all files and start processing
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
const fileArray = Array.from(files)
|
||||
|
||||
for (const [index, file] of fileArray.entries()) {
|
||||
setUploadProgress((prev) => ({ ...prev, currentFile: file.name, filesCompleted: index }))
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json()
|
||||
throw new Error(`Failed to upload ${file.name}: ${errorData.error || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
|
||||
// Validate upload result structure
|
||||
if (!uploadResult.path) {
|
||||
throw new Error(`Invalid upload response for ${file.name}: missing file path`)
|
||||
}
|
||||
|
||||
uploadedFiles.push({
|
||||
filename: file.name,
|
||||
fileUrl: uploadResult.path.startsWith('http')
|
||||
? uploadResult.path
|
||||
: `${window.location.origin}${uploadResult.path}`,
|
||||
fileSize: file.size,
|
||||
mimeType: file.type,
|
||||
})
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({
|
||||
...prev,
|
||||
stage: 'processing',
|
||||
filesCompleted: fileArray.length,
|
||||
}))
|
||||
|
||||
// Start async document processing
|
||||
const processResponse = await fetch(`/api/knowledge/${id}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: knowledgeBase?.chunkingConfig?.maxSize || 1024,
|
||||
minCharactersPerChunk: knowledgeBase?.chunkingConfig?.minSize || 100,
|
||||
chunkOverlap: knowledgeBase?.chunkingConfig?.overlap || 200,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}),
|
||||
const chunkingConfig = knowledgeBase?.chunkingConfig
|
||||
await uploadFiles(Array.from(files), id, {
|
||||
chunkSize: chunkingConfig?.maxSize || 1024,
|
||||
minCharactersPerChunk: chunkingConfig?.minSize || 100,
|
||||
chunkOverlap: chunkingConfig?.overlap || 200,
|
||||
recipe: 'default',
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
const errorData = await processResponse.json()
|
||||
throw new Error(
|
||||
`Failed to start document processing: ${errorData.error || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Validate process result structure
|
||||
if (!processResult.success) {
|
||||
throw new Error(`Document processing failed: ${processResult.error || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
if (!processResult.data || !processResult.data.documentsCreated) {
|
||||
throw new Error('Invalid processing response: missing document data')
|
||||
}
|
||||
|
||||
// Create pending document objects and add them to the store immediately
|
||||
const pendingDocuments: DocumentData[] = processResult.data.documentsCreated.map(
|
||||
(doc: ProcessedDocumentResponse, index: number) => {
|
||||
if (!doc.documentId || !doc.filename) {
|
||||
logger.error(`Invalid document data received:`, doc)
|
||||
throw new Error(
|
||||
`Invalid document data for ${uploadedFiles[index]?.filename || 'unknown file'}`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
id: doc.documentId,
|
||||
knowledgeBaseId: id,
|
||||
filename: doc.filename,
|
||||
fileUrl: uploadedFiles[index].fileUrl,
|
||||
fileSize: uploadedFiles[index].fileSize,
|
||||
mimeType: uploadedFiles[index].mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
// Add pending documents to store for immediate UI update
|
||||
useKnowledgeStore.getState().addPendingDocuments(id, pendingDocuments)
|
||||
|
||||
logger.info(`Successfully started processing ${uploadedFiles.length} documents`)
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'completing' }))
|
||||
|
||||
// Trigger a refresh to ensure documents are properly loaded
|
||||
await refreshDocuments()
|
||||
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
} catch (err) {
|
||||
logger.error('Error uploading documents:', err)
|
||||
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : 'Unknown error occurred during upload'
|
||||
setUploadError({
|
||||
message: errorMessage,
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
|
||||
// Show user-friendly error message in console for debugging
|
||||
console.error('Document upload failed:', errorMessage)
|
||||
} catch (error) {
|
||||
logger.error('Error uploading files:', error)
|
||||
// Error handling is managed by the upload hook
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
// Reset the file input
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
@@ -995,7 +870,7 @@ export function KnowledgeBase({
|
||||
</tr>
|
||||
))
|
||||
) : (
|
||||
filteredDocuments.map((doc, index) => {
|
||||
filteredDocuments.map((doc) => {
|
||||
const isSelected = selectedDocuments.has(doc.id)
|
||||
const statusDisplay = getStatusDisplay(doc)
|
||||
// const processingTime = getProcessingTime(doc)
|
||||
@@ -1254,7 +1129,7 @@ export function KnowledgeBase({
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => setUploadError(null)}
|
||||
onClick={() => clearError()}
|
||||
className='flex-shrink-0 rounded-sm opacity-70 hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring'
|
||||
>
|
||||
<X className='h-4 w-4' />
|
||||
|
||||
@@ -13,8 +13,8 @@ import { Label } from '@/components/ui/label'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components/icons/document-icons'
|
||||
import type { DocumentData, KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import { useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import { useKnowledgeUpload } from '../../hooks/use-knowledge-upload'
|
||||
|
||||
const logger = createLogger('CreateModal')
|
||||
|
||||
@@ -29,12 +29,6 @@ const ACCEPTED_FILE_TYPES = [
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
]
|
||||
|
||||
interface ProcessedDocumentResponse {
|
||||
documentId: string
|
||||
filename: string
|
||||
status: string
|
||||
}
|
||||
|
||||
interface FileWithPreview extends File {
|
||||
preview: string
|
||||
}
|
||||
@@ -89,6 +83,12 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
const scrollContainerRef = useRef<HTMLDivElement>(null)
|
||||
const dropZoneRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
const { uploadFiles } = useKnowledgeUpload({
|
||||
onUploadComplete: (uploadedFiles) => {
|
||||
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
||||
},
|
||||
})
|
||||
|
||||
// Cleanup file preview URLs when component unmounts to prevent memory leaks
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
@@ -235,19 +235,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
return `${Number.parseFloat((bytes / k ** i).toFixed(1))} ${sizes[i]}`
|
||||
}
|
||||
|
||||
// Helper function to create uploadedFiles array from file uploads
|
||||
const createUploadedFile = (
|
||||
filename: string,
|
||||
fileUrl: string,
|
||||
fileSize: number,
|
||||
mimeType: string
|
||||
) => ({
|
||||
filename,
|
||||
fileUrl: fileUrl.startsWith('http') ? fileUrl : `${window.location.origin}${fileUrl}`,
|
||||
fileSize,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const onSubmit = async (data: FormValues) => {
|
||||
setIsSubmitting(true)
|
||||
setSubmitStatus(null)
|
||||
@@ -285,138 +272,14 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
|
||||
const newKnowledgeBase = result.data
|
||||
|
||||
// If files are uploaded, upload them and start processing
|
||||
if (files.length > 0) {
|
||||
// First, upload all files to get their URLs
|
||||
interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const presignedResponse = await fetch('/api/files/presigned', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
fileSize: file.size,
|
||||
}),
|
||||
})
|
||||
|
||||
const presignedData = await presignedResponse.json()
|
||||
|
||||
if (presignedResponse.ok && presignedData.directUploadSupported) {
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'Content-Type': file.type,
|
||||
}
|
||||
|
||||
// Add Azure-specific headers if provided
|
||||
if (presignedData.uploadHeaders) {
|
||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
||||
}
|
||||
|
||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
||||
method: 'PUT',
|
||||
headers: uploadHeaders, // Use the merged headers
|
||||
body: file,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
throw new Error(
|
||||
`Direct upload failed: ${uploadResponse.status} ${uploadResponse.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(file.name, presignedData.fileInfo.path, file.size, file.type)
|
||||
)
|
||||
} else {
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json()
|
||||
throw new Error(
|
||||
`Failed to upload ${file.name}: ${errorData.error || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(file.name, uploadResult.path, file.size, file.type)
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to upload ${file.name}: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Start async document processing
|
||||
const processResponse = await fetch(`/api/knowledge/${newKnowledgeBase.id}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: data.maxChunkSize,
|
||||
minCharactersPerChunk: data.minChunkSize,
|
||||
chunkOverlap: data.overlapSize,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}),
|
||||
const uploadedFiles = await uploadFiles(files, newKnowledgeBase.id, {
|
||||
chunkSize: data.maxChunkSize,
|
||||
minCharactersPerChunk: data.minChunkSize,
|
||||
chunkOverlap: data.overlapSize,
|
||||
recipe: 'default',
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
throw new Error('Failed to start document processing')
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Create pending document objects and add them to the store immediately
|
||||
if (processResult.success && processResult.data.documentsCreated) {
|
||||
const pendingDocuments: DocumentData[] = processResult.data.documentsCreated.map(
|
||||
(doc: ProcessedDocumentResponse, index: number) => ({
|
||||
id: doc.documentId,
|
||||
knowledgeBaseId: newKnowledgeBase.id,
|
||||
filename: doc.filename,
|
||||
fileUrl: uploadedFiles[index].fileUrl,
|
||||
fileSize: uploadedFiles[index].fileSize,
|
||||
mimeType: uploadedFiles[index].mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
})
|
||||
)
|
||||
|
||||
// Add pending documents to store for immediate UI update
|
||||
useKnowledgeStore.getState().addPendingDocuments(newKnowledgeBase.id, pendingDocuments)
|
||||
}
|
||||
|
||||
// Update the knowledge base object with the correct document count
|
||||
newKnowledgeBase.docCount = uploadedFiles.length
|
||||
|
||||
|
||||
@@ -0,0 +1,352 @@
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('KnowledgeUpload')
|
||||
|
||||
export interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
export interface UploadProgress {
|
||||
stage: 'idle' | 'uploading' | 'processing' | 'completing'
|
||||
filesCompleted: number
|
||||
totalFiles: number
|
||||
currentFile?: string
|
||||
}
|
||||
|
||||
export interface UploadError {
|
||||
message: string
|
||||
timestamp: number
|
||||
code?: string
|
||||
details?: any
|
||||
}
|
||||
|
||||
export interface ProcessingOptions {
|
||||
chunkSize?: number
|
||||
minCharactersPerChunk?: number
|
||||
chunkOverlap?: number
|
||||
recipe?: string
|
||||
}
|
||||
|
||||
export interface UseKnowledgeUploadOptions {
|
||||
onUploadComplete?: (uploadedFiles: UploadedFile[]) => void
|
||||
onError?: (error: UploadError) => void
|
||||
}
|
||||
|
||||
class KnowledgeUploadError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public details?: any
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'KnowledgeUploadError'
|
||||
}
|
||||
}
|
||||
|
||||
class PresignedUrlError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'PRESIGNED_URL_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
class DirectUploadError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'DIRECT_UPLOAD_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
class ProcessingError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'PROCESSING_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
const [uploadProgress, setUploadProgress] = useState<UploadProgress>({
|
||||
stage: 'idle',
|
||||
filesCompleted: 0,
|
||||
totalFiles: 0,
|
||||
})
|
||||
const [uploadError, setUploadError] = useState<UploadError | null>(null)
|
||||
|
||||
const createUploadedFile = (
|
||||
filename: string,
|
||||
fileUrl: string,
|
||||
fileSize: number,
|
||||
mimeType: string
|
||||
): UploadedFile => ({
|
||||
filename,
|
||||
fileUrl,
|
||||
fileSize,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const createErrorFromException = (error: unknown, defaultMessage: string): UploadError => {
|
||||
if (error instanceof KnowledgeUploadError) {
|
||||
return {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
details: error.details,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
return {
|
||||
message: error.message,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
message: defaultMessage,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
const uploadFiles = async (
|
||||
files: File[],
|
||||
knowledgeBaseId: string,
|
||||
processingOptions: ProcessingOptions = {}
|
||||
): Promise<UploadedFile[]> => {
|
||||
if (files.length === 0) {
|
||||
throw new KnowledgeUploadError('No files provided for upload', 'NO_FILES')
|
||||
}
|
||||
|
||||
if (!knowledgeBaseId?.trim()) {
|
||||
throw new KnowledgeUploadError('Knowledge base ID is required', 'INVALID_KB_ID')
|
||||
}
|
||||
|
||||
try {
|
||||
setIsUploading(true)
|
||||
setUploadError(null)
|
||||
setUploadProgress({ stage: 'uploading', filesCompleted: 0, totalFiles: files.length })
|
||||
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
|
||||
// Upload all files using presigned URLs
|
||||
for (const [index, file] of files.entries()) {
|
||||
setUploadProgress((prev) => ({
|
||||
...prev,
|
||||
currentFile: file.name,
|
||||
filesCompleted: index,
|
||||
}))
|
||||
|
||||
try {
|
||||
// Get presigned URL
|
||||
const presignedResponse = await fetch('/api/files/presigned?type=knowledge-base', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
fileSize: file.size,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!presignedResponse.ok) {
|
||||
let errorDetails: any = null
|
||||
try {
|
||||
errorDetails = await presignedResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
throw new PresignedUrlError(
|
||||
`Failed to get presigned URL for ${file.name}: ${presignedResponse.status} ${presignedResponse.statusText}`,
|
||||
errorDetails
|
||||
)
|
||||
}
|
||||
|
||||
const presignedData = await presignedResponse.json()
|
||||
|
||||
if (presignedData.directUploadSupported) {
|
||||
// Use presigned URL for direct upload
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'Content-Type': file.type,
|
||||
}
|
||||
|
||||
// Add Azure-specific headers if provided
|
||||
if (presignedData.uploadHeaders) {
|
||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
||||
}
|
||||
|
||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
||||
method: 'PUT',
|
||||
headers: uploadHeaders,
|
||||
body: file,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
throw new DirectUploadError(
|
||||
`Direct upload failed for ${file.name}: ${uploadResponse.status} ${uploadResponse.statusText}`,
|
||||
{ uploadResponse: uploadResponse.statusText }
|
||||
)
|
||||
}
|
||||
|
||||
// Convert relative path to full URL for schema validation
|
||||
const fullFileUrl = presignedData.fileInfo.path.startsWith('http')
|
||||
? presignedData.fileInfo.path
|
||||
: `${window.location.origin}${presignedData.fileInfo.path}`
|
||||
|
||||
uploadedFiles.push(createUploadedFile(file.name, fullFileUrl, file.size, file.type))
|
||||
} else {
|
||||
// Fallback to traditional upload through API route
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
let errorData: any = null
|
||||
try {
|
||||
errorData = await uploadResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
throw new DirectUploadError(
|
||||
`Failed to upload ${file.name}: ${errorData?.error || 'Unknown error'}`,
|
||||
errorData
|
||||
)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
|
||||
// Validate upload result structure
|
||||
if (!uploadResult.path) {
|
||||
throw new DirectUploadError(
|
||||
`Invalid upload response for ${file.name}: missing file path`,
|
||||
uploadResult
|
||||
)
|
||||
}
|
||||
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(
|
||||
file.name,
|
||||
uploadResult.path.startsWith('http')
|
||||
? uploadResult.path
|
||||
: `${window.location.origin}${uploadResult.path}`,
|
||||
file.size,
|
||||
file.type
|
||||
)
|
||||
)
|
||||
}
|
||||
} catch (fileError) {
|
||||
logger.error(`Error uploading file ${file.name}:`, fileError)
|
||||
throw fileError // Re-throw to be caught by outer try-catch
|
||||
}
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'processing' }))
|
||||
|
||||
// Start async document processing
|
||||
const processPayload = {
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: processingOptions.chunkSize || 1024,
|
||||
minCharactersPerChunk: processingOptions.minCharactersPerChunk || 100,
|
||||
chunkOverlap: processingOptions.chunkOverlap || 200,
|
||||
recipe: processingOptions.recipe || 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}
|
||||
|
||||
const processResponse = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(processPayload),
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
let errorData: any = null
|
||||
try {
|
||||
errorData = await processResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
logger.error('Document processing failed:', {
|
||||
status: processResponse.status,
|
||||
error: errorData,
|
||||
uploadedFiles: uploadedFiles.map((f) => ({
|
||||
filename: f.filename,
|
||||
fileUrl: f.fileUrl,
|
||||
fileSize: f.fileSize,
|
||||
mimeType: f.mimeType,
|
||||
})),
|
||||
})
|
||||
|
||||
throw new ProcessingError(
|
||||
`Failed to start document processing: ${errorData?.error || errorData?.message || 'Unknown error'}`,
|
||||
errorData
|
||||
)
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Validate process result structure
|
||||
if (!processResult.success) {
|
||||
throw new ProcessingError(
|
||||
`Document processing failed: ${processResult.error || 'Unknown error'}`,
|
||||
processResult
|
||||
)
|
||||
}
|
||||
|
||||
if (!processResult.data || !processResult.data.documentsCreated) {
|
||||
throw new ProcessingError(
|
||||
'Invalid processing response: missing document data',
|
||||
processResult
|
||||
)
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'completing' }))
|
||||
|
||||
logger.info(`Successfully started processing ${uploadedFiles.length} documents`)
|
||||
|
||||
// Call success callback
|
||||
options.onUploadComplete?.(uploadedFiles)
|
||||
|
||||
return uploadedFiles
|
||||
} catch (err) {
|
||||
logger.error('Error uploading documents:', err)
|
||||
|
||||
const error = createErrorFromException(err, 'Unknown error occurred during upload')
|
||||
setUploadError(error)
|
||||
options.onError?.(error)
|
||||
|
||||
// Show user-friendly error message in console for debugging
|
||||
console.error('Document upload failed:', error.message)
|
||||
|
||||
throw err
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
}
|
||||
}
|
||||
|
||||
const clearError = () => {
|
||||
setUploadError(null)
|
||||
}
|
||||
|
||||
return {
|
||||
isUploading,
|
||||
uploadProgress,
|
||||
uploadError,
|
||||
uploadFiles,
|
||||
clearError,
|
||||
}
|
||||
}
|
||||
@@ -140,12 +140,20 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
result.logs?.filter((log) => !messageIdMap.has(log.blockId)) || []
|
||||
|
||||
if (nonStreamingLogs.length > 0) {
|
||||
const outputsToRender = selectedOutputs.filter((outputId) =>
|
||||
nonStreamingLogs.some((log) => log.blockId === outputId.split('.')[0])
|
||||
)
|
||||
const outputsToRender = selectedOutputs.filter((outputId) => {
|
||||
// Extract block ID correctly - handle both formats:
|
||||
// - "blockId" (direct block ID)
|
||||
// - "blockId_response.result" (block ID with path)
|
||||
const blockIdForOutput = outputId.includes('_')
|
||||
? outputId.split('_')[0]
|
||||
: outputId.split('.')[0]
|
||||
return nonStreamingLogs.some((log) => log.blockId === blockIdForOutput)
|
||||
})
|
||||
|
||||
for (const outputId of outputsToRender) {
|
||||
const blockIdForOutput = outputId.split('.')[0]
|
||||
const blockIdForOutput = outputId.includes('_')
|
||||
? outputId.split('_')[0]
|
||||
: outputId.split('.')[0]
|
||||
const path = outputId.substring(blockIdForOutput.length + 1)
|
||||
const log = nonStreamingLogs.find((l) => l.blockId === blockIdForOutput)
|
||||
|
||||
|
||||
@@ -53,13 +53,41 @@ export function OutputSelect({
|
||||
const addOutput = (path: string, outputObj: any, prefix = '') => {
|
||||
const fullPath = prefix ? `${prefix}.${path}` : path
|
||||
|
||||
if (typeof outputObj === 'object' && outputObj !== null) {
|
||||
// For objects, recursively add each property
|
||||
// If not an object or is null, treat as leaf node
|
||||
if (typeof outputObj !== 'object' || outputObj === null) {
|
||||
const output = {
|
||||
id: `${block.id}_${fullPath}`,
|
||||
label: `${blockName}.${fullPath}`,
|
||||
blockId: block.id,
|
||||
blockName: block.name || `Block ${block.id}`,
|
||||
blockType: block.type,
|
||||
path: fullPath,
|
||||
}
|
||||
outputs.push(output)
|
||||
return
|
||||
}
|
||||
|
||||
// If has 'type' property, treat as schema definition (leaf node)
|
||||
if ('type' in outputObj && typeof outputObj.type === 'string') {
|
||||
const output = {
|
||||
id: `${block.id}_${fullPath}`,
|
||||
label: `${blockName}.${fullPath}`,
|
||||
blockId: block.id,
|
||||
blockName: block.name || `Block ${block.id}`,
|
||||
blockType: block.type,
|
||||
path: fullPath,
|
||||
}
|
||||
outputs.push(output)
|
||||
return
|
||||
}
|
||||
|
||||
// For objects without type, recursively add each property
|
||||
if (!Array.isArray(outputObj)) {
|
||||
Object.entries(outputObj).forEach(([key, value]) => {
|
||||
addOutput(key, value, fullPath)
|
||||
})
|
||||
} else {
|
||||
// Add leaf node as output option
|
||||
// For arrays, treat as leaf node
|
||||
outputs.push({
|
||||
id: `${block.id}_${fullPath}`,
|
||||
label: `${blockName}.${fullPath}`,
|
||||
@@ -71,10 +99,10 @@ export function OutputSelect({
|
||||
}
|
||||
}
|
||||
|
||||
// Start with the response object
|
||||
if (block.outputs.response) {
|
||||
addOutput('response', block.outputs.response)
|
||||
}
|
||||
// Process all output properties directly (flattened structure)
|
||||
Object.entries(block.outputs).forEach(([key, value]) => {
|
||||
addOutput(key, value)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -145,11 +145,13 @@ export const Toolbar = React.memo(() => {
|
||||
{blocks.map((block) => (
|
||||
<ToolbarBlock key={block.type} config={block} disabled={!userPermissions.canEdit} />
|
||||
))}
|
||||
{activeTab === 'blocks' && !searchQuery && (
|
||||
<>
|
||||
<LoopToolbarItem disabled={!userPermissions.canEdit} />
|
||||
<ParallelToolbarItem disabled={!userPermissions.canEdit} />
|
||||
</>
|
||||
{((activeTab === 'blocks' && !searchQuery) ||
|
||||
(searchQuery && 'loop'.includes(searchQuery.toLowerCase()))) && (
|
||||
<LoopToolbarItem disabled={!userPermissions.canEdit} />
|
||||
)}
|
||||
{((activeTab === 'blocks' && !searchQuery) ||
|
||||
(searchQuery && 'parallel'.includes(searchQuery.toLowerCase()))) && (
|
||||
<ParallelToolbarItem disabled={!userPermissions.canEdit} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -4,10 +4,11 @@ import {
|
||||
type ConnectedBlock,
|
||||
useBlockConnections,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-block-connections'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { getBlock } from '@/blocks'
|
||||
|
||||
interface ConnectionBlocksProps {
|
||||
blockId: string
|
||||
horizontalHandles: boolean
|
||||
setIsConnecting: (isConnecting: boolean) => void
|
||||
isDisabled?: boolean
|
||||
}
|
||||
@@ -20,6 +21,7 @@ interface ResponseField {
|
||||
|
||||
export function ConnectionBlocks({
|
||||
blockId,
|
||||
horizontalHandles,
|
||||
setIsConnecting,
|
||||
isDisabled = false,
|
||||
}: ConnectionBlocksProps) {
|
||||
@@ -39,6 +41,10 @@ export function ConnectionBlocks({
|
||||
|
||||
e.stopPropagation() // Prevent parent drag handlers from firing
|
||||
setIsConnecting(true)
|
||||
|
||||
// If no specific field is provided, use all available output types
|
||||
const outputType = field ? field.name : connection.outputType
|
||||
|
||||
e.dataTransfer.setData(
|
||||
'application/json',
|
||||
JSON.stringify({
|
||||
@@ -46,9 +52,13 @@ export function ConnectionBlocks({
|
||||
connectionData: {
|
||||
id: connection.id,
|
||||
name: connection.name,
|
||||
outputType: field ? field.name : connection.outputType,
|
||||
outputType: outputType,
|
||||
sourceBlockId: connection.id,
|
||||
fieldType: field?.type,
|
||||
// Include all available output types for reference
|
||||
allOutputTypes: Array.isArray(connection.outputType)
|
||||
? connection.outputType
|
||||
: [connection.outputType],
|
||||
},
|
||||
})
|
||||
)
|
||||
@@ -59,147 +69,59 @@ export function ConnectionBlocks({
|
||||
setIsConnecting(false)
|
||||
}
|
||||
|
||||
// Helper function to extract fields from JSON Schema
|
||||
const extractFieldsFromSchema = (connection: ConnectedBlock): ResponseField[] => {
|
||||
// Handle legacy format with fields array
|
||||
if (connection.responseFormat?.fields) {
|
||||
return connection.responseFormat.fields
|
||||
}
|
||||
|
||||
// Handle new JSON Schema format
|
||||
const schema = connection.responseFormat?.schema || connection.responseFormat
|
||||
// Safely check if schema and properties exist
|
||||
if (
|
||||
!schema ||
|
||||
typeof schema !== 'object' ||
|
||||
!('properties' in schema) ||
|
||||
typeof schema.properties !== 'object'
|
||||
) {
|
||||
return []
|
||||
}
|
||||
return Object.entries(schema.properties).map(([name, prop]: [string, any]) => ({
|
||||
name,
|
||||
type: Array.isArray(prop) ? 'array' : prop.type || 'string',
|
||||
description: prop.description,
|
||||
}))
|
||||
}
|
||||
|
||||
// Extract fields from starter block input format
|
||||
const extractFieldsFromStarterInput = (connection: ConnectedBlock): ResponseField[] => {
|
||||
// Only process for starter blocks
|
||||
if (connection.type !== 'starter') return []
|
||||
|
||||
try {
|
||||
// Get input format from subblock store
|
||||
const inputFormat = useSubBlockStore.getState().getValue(connection.id, 'inputFormat')
|
||||
|
||||
// Make sure we have a valid input format
|
||||
if (!inputFormat || !Array.isArray(inputFormat) || inputFormat.length === 0) {
|
||||
return [{ name: 'input', type: 'any' }]
|
||||
}
|
||||
|
||||
// Check if any fields have been configured with names
|
||||
const hasConfiguredFields = inputFormat.some(
|
||||
(field: any) => field.name && field.name.trim() !== ''
|
||||
)
|
||||
|
||||
// If no fields have been configured, return the default input field
|
||||
if (!hasConfiguredFields) {
|
||||
return [{ name: 'input', type: 'any' }]
|
||||
}
|
||||
|
||||
// Map input fields to response fields
|
||||
return inputFormat.map((field: any) => ({
|
||||
name: `input.${field.name}`,
|
||||
type: field.type || 'string',
|
||||
description: field.description,
|
||||
}))
|
||||
} catch (e) {
|
||||
console.error('Error extracting fields from starter input format:', e)
|
||||
return [{ name: 'input', type: 'any' }]
|
||||
}
|
||||
}
|
||||
|
||||
// Deduplicate connections by ID
|
||||
const connectionMap = incomingConnections.reduce(
|
||||
(acc, connection) => {
|
||||
acc[connection.id] = connection
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, ConnectedBlock>
|
||||
)
|
||||
|
||||
// Sort connections by name
|
||||
const sortedConnections = Object.values(connectionMap).sort((a, b) =>
|
||||
a.name.localeCompare(b.name)
|
||||
)
|
||||
// Use connections in distance order (already sorted and deduplicated by the hook)
|
||||
const sortedConnections = incomingConnections
|
||||
|
||||
// Helper function to render a connection card
|
||||
const renderConnectionCard = (connection: ConnectedBlock, field?: ResponseField) => {
|
||||
const displayName = connection.name.replace(/\s+/g, '').toLowerCase()
|
||||
const renderConnectionCard = (connection: ConnectedBlock) => {
|
||||
// Get block configuration for icon and color
|
||||
const blockConfig = getBlock(connection.type)
|
||||
const displayName = connection.name // Use the actual block name instead of transforming it
|
||||
const Icon = blockConfig?.icon
|
||||
const bgColor = blockConfig?.bgColor || '#6B7280' // Fallback to gray
|
||||
|
||||
return (
|
||||
<Card
|
||||
key={`${field ? field.name : connection.id}`}
|
||||
key={`${connection.id}-${connection.name}`}
|
||||
draggable={!isDisabled}
|
||||
onDragStart={(e) => handleDragStart(e, connection, field)}
|
||||
onDragStart={(e) => handleDragStart(e, connection)}
|
||||
onDragEnd={handleDragEnd}
|
||||
className={cn(
|
||||
'group flex w-max items-center rounded-lg border bg-card p-2 shadow-sm transition-colors',
|
||||
'group flex w-max items-center gap-2 rounded-lg border bg-card p-2 shadow-sm transition-colors',
|
||||
!isDisabled
|
||||
? 'cursor-grab hover:bg-accent/50 active:cursor-grabbing'
|
||||
: 'cursor-not-allowed opacity-60'
|
||||
)}
|
||||
>
|
||||
{/* Block icon with color */}
|
||||
{Icon && (
|
||||
<div
|
||||
className='flex h-5 w-5 flex-shrink-0 items-center justify-center rounded'
|
||||
style={{ backgroundColor: bgColor }}
|
||||
>
|
||||
<Icon className='h-3 w-3 text-white' />
|
||||
</div>
|
||||
)}
|
||||
<div className='text-sm'>
|
||||
<span className='font-medium leading-none'>{displayName}</span>
|
||||
<span className='text-muted-foreground'>
|
||||
{field
|
||||
? `.${field.name}`
|
||||
: typeof connection.outputType === 'string'
|
||||
? `.${connection.outputType}`
|
||||
: ''}
|
||||
</span>
|
||||
</div>
|
||||
</Card>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='absolute top-0 right-full flex max-h-[400px] flex-col items-end space-y-2 overflow-y-auto pr-5'>
|
||||
{sortedConnections.map((connection, index) => {
|
||||
// Special handling for starter blocks with input format
|
||||
if (connection.type === 'starter') {
|
||||
const starterFields = extractFieldsFromStarterInput(connection)
|
||||
// Generate all connection cards - one per block, not per output field
|
||||
const connectionCards: React.ReactNode[] = []
|
||||
|
||||
if (starterFields.length > 0) {
|
||||
return (
|
||||
<div key={connection.id} className='space-y-2'>
|
||||
{starterFields.map((field) => renderConnectionCard(connection, field))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
sortedConnections.forEach((connection) => {
|
||||
connectionCards.push(renderConnectionCard(connection))
|
||||
})
|
||||
|
||||
// Regular connection handling
|
||||
return (
|
||||
<div key={`${connection.id}-${index}`} className='space-y-2'>
|
||||
{Array.isArray(connection.outputType)
|
||||
? // Handle array of field names
|
||||
connection.outputType.map((fieldName) => {
|
||||
// Try to find field in response format
|
||||
const fields = extractFieldsFromSchema(connection)
|
||||
const field = fields.find((f) => f.name === fieldName) || {
|
||||
name: fieldName,
|
||||
type: 'string',
|
||||
}
|
||||
// Position and layout based on handle orientation - reverse of ports
|
||||
// When ports are horizontal: connection blocks on top, aligned to left, closest blocks on bottom row
|
||||
// When ports are vertical (default): connection blocks on left, stack vertically, aligned to right
|
||||
const containerClasses = horizontalHandles
|
||||
? 'absolute bottom-full left-0 flex max-w-[600px] flex-wrap-reverse gap-2 pb-3'
|
||||
: 'absolute top-0 right-full flex max-h-[400px] max-w-[200px] flex-col items-end gap-2 overflow-y-auto pr-3'
|
||||
|
||||
return renderConnectionCard(connection, field)
|
||||
})
|
||||
: renderConnectionCard(connection)}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
return <div className={containerClasses}>{connectionCards}</div>
|
||||
}
|
||||
|
||||
@@ -13,8 +13,7 @@ import {
|
||||
} from '@/components/ui/command'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
|
||||
interface DocumentData {
|
||||
id: string
|
||||
@@ -51,46 +50,25 @@ export function DocumentSelector({
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: DocumentSelectorProps) {
|
||||
const { getValue } = useSubBlockStore()
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
|
||||
const [documents, setDocuments] = useState<DocumentData[]>([])
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [open, setOpen] = useState(false)
|
||||
const [selectedDocument, setSelectedDocument] = useState<DocumentData | null>(null)
|
||||
const [initialFetchDone, setInitialFetchDone] = useState(false)
|
||||
const [selectedId, setSelectedId] = useState('')
|
||||
|
||||
// Get the current value from the store
|
||||
const storeValue = getValue(blockId, subBlock.id)
|
||||
// Use the proper hook to get the current value and setter
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
|
||||
// Get the knowledge base ID from the same block's knowledgeBaseId subblock
|
||||
const knowledgeBaseId = getValue(blockId, 'knowledgeBaseId')
|
||||
const [knowledgeBaseId] = useSubBlockValue(blockId, 'knowledgeBaseId')
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
|
||||
// Initialize selectedId with the effective value
|
||||
useEffect(() => {
|
||||
if (isPreview && previewValue !== undefined) {
|
||||
setSelectedId(previewValue || '')
|
||||
} else {
|
||||
setSelectedId(value || '')
|
||||
}
|
||||
}, [value, isPreview, previewValue])
|
||||
|
||||
// Update local state when external value changes
|
||||
useEffect(() => {
|
||||
const currentValue = isPreview ? previewValue : value
|
||||
setSelectedId(currentValue || '')
|
||||
}, [value, isPreview, previewValue])
|
||||
|
||||
// Fetch documents for the selected knowledge base
|
||||
const fetchDocuments = useCallback(async () => {
|
||||
if (!knowledgeBaseId) {
|
||||
setDocuments([])
|
||||
setError('No knowledge base selected')
|
||||
setInitialFetchDone(true)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -111,47 +89,12 @@ export function DocumentSelector({
|
||||
|
||||
const fetchedDocuments = result.data || []
|
||||
setDocuments(fetchedDocuments)
|
||||
setInitialFetchDone(true)
|
||||
|
||||
// Auto-selection logic: if we have a valid selection, keep it
|
||||
// If there's only one document, select it
|
||||
// If we have a value but it's not in the documents, reset it
|
||||
if (selectedId && !fetchedDocuments.some((doc: DocumentData) => doc.id === selectedId)) {
|
||||
setSelectedId('')
|
||||
if (!isPreview) {
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, '')
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
(!selectedId || !fetchedDocuments.some((doc: DocumentData) => doc.id === selectedId)) &&
|
||||
fetchedDocuments.length > 0
|
||||
) {
|
||||
if (fetchedDocuments.length === 1) {
|
||||
// If only one document, auto-select it
|
||||
const singleDoc = fetchedDocuments[0]
|
||||
setSelectedId(singleDoc.id)
|
||||
setSelectedDocument(singleDoc)
|
||||
if (!isPreview) {
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, singleDoc.id)
|
||||
}
|
||||
onDocumentSelect?.(singleDoc.id)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if ((err as Error).name === 'AbortError') return
|
||||
setError((err as Error).message)
|
||||
setDocuments([])
|
||||
}
|
||||
}, [
|
||||
knowledgeBaseId,
|
||||
selectedId,
|
||||
collaborativeSetSubblockValue,
|
||||
blockId,
|
||||
subBlock.id,
|
||||
isPreview,
|
||||
onDocumentSelect,
|
||||
])
|
||||
}, [knowledgeBaseId])
|
||||
|
||||
// Handle dropdown open/close - fetch documents when opening
|
||||
const handleOpenChange = (isOpen: boolean) => {
|
||||
@@ -170,50 +113,34 @@ export function DocumentSelector({
|
||||
if (isPreview) return
|
||||
|
||||
setSelectedDocument(document)
|
||||
setSelectedId(document.id)
|
||||
|
||||
if (!isPreview) {
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, document.id)
|
||||
}
|
||||
|
||||
setStoreValue(document.id)
|
||||
onDocumentSelect?.(document.id)
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
// Sync selected document with value prop
|
||||
useEffect(() => {
|
||||
if (selectedId && documents.length > 0) {
|
||||
const docInfo = documents.find((doc) => doc.id === selectedId)
|
||||
if (docInfo) {
|
||||
setSelectedDocument(docInfo)
|
||||
} else {
|
||||
setSelectedDocument(null)
|
||||
}
|
||||
} else if (!selectedId) {
|
||||
if (value && documents.length > 0) {
|
||||
const docInfo = documents.find((doc) => doc.id === value)
|
||||
setSelectedDocument(docInfo || null)
|
||||
} else {
|
||||
setSelectedDocument(null)
|
||||
}
|
||||
}, [selectedId, documents])
|
||||
}, [value, documents])
|
||||
|
||||
// Reset documents when knowledge base changes
|
||||
useEffect(() => {
|
||||
if (knowledgeBaseId) {
|
||||
setDocuments([])
|
||||
setSelectedDocument(null)
|
||||
setSelectedId('')
|
||||
setInitialFetchDone(false)
|
||||
setError(null)
|
||||
if (!isPreview) {
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, '')
|
||||
}
|
||||
}
|
||||
}, [knowledgeBaseId, blockId, subBlock.id, collaborativeSetSubblockValue, isPreview])
|
||||
setDocuments([])
|
||||
setSelectedDocument(null)
|
||||
setError(null)
|
||||
}, [knowledgeBaseId])
|
||||
|
||||
// Fetch documents when knowledge base is available and we haven't fetched yet
|
||||
// Fetch documents when knowledge base is available
|
||||
useEffect(() => {
|
||||
if (knowledgeBaseId && !initialFetchDone && !isPreview) {
|
||||
if (knowledgeBaseId && !isPreview) {
|
||||
fetchDocuments()
|
||||
}
|
||||
}, [knowledgeBaseId, initialFetchDone, fetchDocuments, isPreview])
|
||||
}, [knowledgeBaseId, isPreview, fetchDocuments])
|
||||
|
||||
const formatDocumentName = (document: DocumentData) => {
|
||||
return document.filename
|
||||
@@ -307,7 +234,7 @@ export function DocumentSelector({
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{document.id === selectedId && <Check className='ml-auto h-4 w-4' />}
|
||||
{document.id === value && <Check className='ml-auto h-4 w-4' />}
|
||||
</CommandItem>
|
||||
))}
|
||||
</CommandGroup>
|
||||
|
||||
@@ -7,6 +7,7 @@ import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
import type { ConfluenceFileInfo } from './components/confluence-file-selector'
|
||||
import { ConfluenceFileSelector } from './components/confluence-file-selector'
|
||||
import type { DiscordChannelInfo } from './components/discord-channel-selector'
|
||||
@@ -40,6 +41,9 @@ export function FileSelectorInput({
|
||||
const { getValue } = useSubBlockStore()
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
|
||||
// Use the proper hook to get the current value and setter
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
const [selectedFileId, setSelectedFileId] = useState<string>('')
|
||||
const [_fileInfo, setFileInfo] = useState<FileInfo | ConfluenceFileInfo | null>(null)
|
||||
const [selectedIssueId, setSelectedIssueId] = useState<string>('')
|
||||
@@ -66,7 +70,7 @@ export function FileSelectorInput({
|
||||
const serverId = isDiscord ? (getValue(blockId, 'serverId') as string) || '' : ''
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : getValue(blockId, subBlock.id)
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
|
||||
// Get the current value from the store or prop value if in preview mode
|
||||
useEffect(() => {
|
||||
@@ -117,14 +121,14 @@ export function FileSelectorInput({
|
||||
const handleFileChange = (fileId: string, info?: any) => {
|
||||
setSelectedFileId(fileId)
|
||||
setFileInfo(info || null)
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, fileId)
|
||||
setStoreValue(fileId)
|
||||
}
|
||||
|
||||
// Handle issue selection
|
||||
const handleIssueChange = (issueKey: string, info?: JiraIssueInfo) => {
|
||||
setSelectedIssueId(issueKey)
|
||||
setIssueInfo(info || null)
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, issueKey)
|
||||
setStoreValue(issueKey)
|
||||
|
||||
// Clear the fields when a new issue is selected
|
||||
if (isJira) {
|
||||
@@ -137,14 +141,14 @@ export function FileSelectorInput({
|
||||
const handleChannelChange = (channelId: string, info?: DiscordChannelInfo) => {
|
||||
setSelectedChannelId(channelId)
|
||||
setChannelInfo(info || null)
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, channelId)
|
||||
setStoreValue(channelId)
|
||||
}
|
||||
|
||||
// Handle calendar selection
|
||||
const handleCalendarChange = (calendarId: string, info?: GoogleCalendarInfo) => {
|
||||
setSelectedCalendarId(calendarId)
|
||||
setCalendarInfo(info || null)
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, calendarId)
|
||||
setStoreValue(calendarId)
|
||||
}
|
||||
|
||||
// For Google Drive
|
||||
|
||||
@@ -4,7 +4,6 @@ import { useRef, useState } from 'react'
|
||||
import { X } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Progress } from '@/components/ui/progress'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useNotificationStore } from '@/stores/notifications/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -58,7 +57,6 @@ export function FileUpload({
|
||||
// Stores
|
||||
const { addNotification } = useNotificationStore()
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
@@ -298,16 +296,10 @@ export function FileUpload({
|
||||
const newFiles = Array.from(uniqueFiles.values())
|
||||
|
||||
setStoreValue(newFiles)
|
||||
|
||||
// Use collaborative update for persistence
|
||||
collaborativeSetSubblockValue(blockId, subBlockId, newFiles)
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
} else {
|
||||
// For single file: Replace with last uploaded file
|
||||
setStoreValue(uploadedFiles[0] || null)
|
||||
|
||||
// Use collaborative update for persistence
|
||||
collaborativeSetSubblockValue(blockId, subBlockId, uploadedFiles[0] || null)
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -363,19 +355,9 @@ export function FileUpload({
|
||||
const filesArray = Array.isArray(value) ? value : value ? [value] : []
|
||||
const updatedFiles = filesArray.filter((f) => f.path !== file.path)
|
||||
setStoreValue(updatedFiles.length > 0 ? updatedFiles : null)
|
||||
|
||||
// Use collaborative update for persistence
|
||||
collaborativeSetSubblockValue(
|
||||
blockId,
|
||||
subBlockId,
|
||||
updatedFiles.length > 0 ? updatedFiles : null
|
||||
)
|
||||
} else {
|
||||
// For single file: Clear the value
|
||||
setStoreValue(null)
|
||||
|
||||
// Use collaborative update for persistence
|
||||
collaborativeSetSubblockValue(blockId, subBlockId, null)
|
||||
}
|
||||
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
@@ -416,7 +398,6 @@ export function FileUpload({
|
||||
|
||||
// Clear input state immediately for better UX
|
||||
setStoreValue(null)
|
||||
collaborativeSetSubblockValue(blockId, subBlockId, null)
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
|
||||
if (fileInputRef.current) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { Check, ChevronDown, RefreshCw, X } from 'lucide-react'
|
||||
import { PackageSearchIcon } from '@/components/icons'
|
||||
import { Button } from '@/components/ui/button'
|
||||
@@ -14,9 +14,8 @@ import {
|
||||
} from '@/components/ui/command'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { type KnowledgeBaseData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useSubBlockValue } from '../../../sub-block/hooks/use-sub-block-value'
|
||||
|
||||
interface KnowledgeBaseSelectorProps {
|
||||
blockId: string
|
||||
@@ -37,24 +36,39 @@ export function KnowledgeBaseSelector({
|
||||
}: KnowledgeBaseSelectorProps) {
|
||||
const { getKnowledgeBasesList, knowledgeBasesList, loadingKnowledgeBasesList } =
|
||||
useKnowledgeStore()
|
||||
const { getValue } = useSubBlockStore()
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
|
||||
const [knowledgeBases, setKnowledgeBases] = useState<KnowledgeBaseData[]>([])
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [open, setOpen] = useState(false)
|
||||
const [selectedKnowledgeBases, setSelectedKnowledgeBases] = useState<KnowledgeBaseData[]>([])
|
||||
const [initialFetchDone, setInitialFetchDone] = useState(false)
|
||||
|
||||
// Get the current value from the store
|
||||
const storeValue = getValue(blockId, subBlock.id)
|
||||
// Use the proper hook to get the current value and setter - this prevents infinite loops
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
|
||||
const isMultiSelect = subBlock.multiSelect === true
|
||||
|
||||
// Compute selected knowledge bases directly from value - no local state to avoid loops
|
||||
const selectedKnowledgeBases = useMemo(() => {
|
||||
if (value && knowledgeBases.length > 0) {
|
||||
const selectedIds =
|
||||
typeof value === 'string'
|
||||
? value.includes(',')
|
||||
? value
|
||||
.split(',')
|
||||
.map((id) => id.trim())
|
||||
.filter((id) => id.length > 0)
|
||||
: [value]
|
||||
: []
|
||||
|
||||
return knowledgeBases.filter((kb) => selectedIds.includes(kb.id))
|
||||
}
|
||||
return []
|
||||
}, [value, knowledgeBases])
|
||||
|
||||
// Fetch knowledge bases
|
||||
const fetchKnowledgeBases = useCallback(async () => {
|
||||
setLoading(true)
|
||||
@@ -89,12 +103,8 @@ export function KnowledgeBaseSelector({
|
||||
const handleSelectSingleKnowledgeBase = (knowledgeBase: KnowledgeBaseData) => {
|
||||
if (isPreview) return
|
||||
|
||||
setSelectedKnowledgeBases([knowledgeBase])
|
||||
|
||||
if (!isPreview) {
|
||||
// Use collaborative update for both local store and persistence
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, knowledgeBase.id)
|
||||
}
|
||||
// Use the hook's setter which handles collaborative updates
|
||||
setStoreValue(knowledgeBase.id)
|
||||
|
||||
onKnowledgeBaseSelect?.(knowledgeBase.id)
|
||||
setOpen(false)
|
||||
@@ -115,16 +125,13 @@ export function KnowledgeBaseSelector({
|
||||
newSelected = [...selectedKnowledgeBases, knowledgeBase]
|
||||
}
|
||||
|
||||
setSelectedKnowledgeBases(newSelected)
|
||||
const selectedIds = newSelected.map((kb) => kb.id)
|
||||
const valueToStore = selectedIds.length === 1 ? selectedIds[0] : selectedIds.join(',')
|
||||
|
||||
if (!isPreview) {
|
||||
const selectedIds = newSelected.map((kb) => kb.id)
|
||||
const valueToStore = selectedIds.length === 1 ? selectedIds[0] : selectedIds.join(',')
|
||||
// Use collaborative update for both local store and persistence
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, valueToStore)
|
||||
}
|
||||
// Use the hook's setter which handles collaborative updates
|
||||
setStoreValue(valueToStore)
|
||||
|
||||
onKnowledgeBaseSelect?.(newSelected.map((kb) => kb.id))
|
||||
onKnowledgeBaseSelect?.(selectedIds)
|
||||
}
|
||||
|
||||
// Remove selected knowledge base (for multi-select tags)
|
||||
@@ -132,38 +139,15 @@ export function KnowledgeBaseSelector({
|
||||
if (isPreview) return
|
||||
|
||||
const newSelected = selectedKnowledgeBases.filter((kb) => kb.id !== knowledgeBaseId)
|
||||
setSelectedKnowledgeBases(newSelected)
|
||||
const selectedIds = newSelected.map((kb) => kb.id)
|
||||
const valueToStore = selectedIds.length === 1 ? selectedIds[0] : selectedIds.join(',')
|
||||
|
||||
if (!isPreview) {
|
||||
const selectedIds = newSelected.map((kb) => kb.id)
|
||||
const valueToStore = selectedIds.length === 1 ? selectedIds[0] : selectedIds.join(',')
|
||||
// Use collaborative update for both local store and persistence
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, valueToStore)
|
||||
}
|
||||
// Use the hook's setter which handles collaborative updates
|
||||
setStoreValue(valueToStore)
|
||||
|
||||
onKnowledgeBaseSelect?.(newSelected.map((kb) => kb.id))
|
||||
onKnowledgeBaseSelect?.(selectedIds)
|
||||
}
|
||||
|
||||
// Sync selected knowledge bases with value prop
|
||||
useEffect(() => {
|
||||
if (value && knowledgeBases.length > 0) {
|
||||
const selectedIds =
|
||||
typeof value === 'string'
|
||||
? value.includes(',')
|
||||
? value
|
||||
.split(',')
|
||||
.map((id) => id.trim())
|
||||
.filter((id) => id.length > 0)
|
||||
: [value]
|
||||
: []
|
||||
|
||||
const selectedKbs = knowledgeBases.filter((kb) => selectedIds.includes(kb.id))
|
||||
setSelectedKnowledgeBases(selectedKbs)
|
||||
} else if (!value) {
|
||||
setSelectedKnowledgeBases([])
|
||||
}
|
||||
}, [value, knowledgeBases])
|
||||
|
||||
// Use cached data if available
|
||||
useEffect(() => {
|
||||
if (knowledgeBasesList.length > 0 && !initialFetchDone) {
|
||||
@@ -177,6 +161,7 @@ export function KnowledgeBaseSelector({
|
||||
if (
|
||||
value &&
|
||||
selectedKnowledgeBases.length === 0 &&
|
||||
knowledgeBases.length === 0 &&
|
||||
!loading &&
|
||||
!initialFetchDone &&
|
||||
!isPreview
|
||||
@@ -186,6 +171,7 @@ export function KnowledgeBaseSelector({
|
||||
}, [
|
||||
value,
|
||||
selectedKnowledgeBases.length,
|
||||
knowledgeBases.length,
|
||||
loading,
|
||||
initialFetchDone,
|
||||
fetchKnowledgeBases,
|
||||
|
||||
@@ -5,6 +5,7 @@ import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/comp
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
import { type DiscordServerInfo, DiscordServerSelector } from './components/discord-server-selector'
|
||||
import { type JiraProjectInfo, JiraProjectSelector } from './components/jira-project-selector'
|
||||
import { type LinearProjectInfo, LinearProjectSelector } from './components/linear-project-selector'
|
||||
@@ -32,6 +33,9 @@ export function ProjectSelectorInput({
|
||||
const [selectedProjectId, setSelectedProjectId] = useState<string>('')
|
||||
const [_projectInfo, setProjectInfo] = useState<JiraProjectInfo | DiscordServerInfo | null>(null)
|
||||
|
||||
// Use the proper hook to get the current value and setter
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
|
||||
// Get provider-specific values
|
||||
const provider = subBlock.provider || 'jira'
|
||||
const isDiscord = provider === 'discord'
|
||||
@@ -60,7 +64,7 @@ export function ProjectSelectorInput({
|
||||
) => {
|
||||
setSelectedProjectId(projectId)
|
||||
setProjectInfo(info || null)
|
||||
collaborativeSetSubblockValue(blockId, subBlock.id, projectId)
|
||||
setStoreValue(projectId)
|
||||
|
||||
// Clear the issue-related fields when a new project is selected
|
||||
if (provider === 'jira') {
|
||||
|
||||
@@ -26,10 +26,10 @@ interface ScheduleConfigProps {
|
||||
|
||||
export function ScheduleConfig({
|
||||
blockId,
|
||||
subBlockId,
|
||||
subBlockId: _subBlockId,
|
||||
isConnecting,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
previewValue: _previewValue,
|
||||
disabled = false,
|
||||
}: ScheduleConfigProps) {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
@@ -56,13 +56,7 @@ export function ScheduleConfig({
|
||||
|
||||
// Get the startWorkflow value to determine if scheduling is enabled
|
||||
// and expose the setter so we can update it
|
||||
const [startWorkflow, setStartWorkflow] = useSubBlockValue(blockId, 'startWorkflow')
|
||||
const isScheduleEnabled = startWorkflow === 'schedule'
|
||||
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const [_startWorkflow, setStartWorkflow] = useSubBlockValue(blockId, 'startWorkflow')
|
||||
|
||||
// Function to check if schedule exists in the database
|
||||
const checkSchedule = async () => {
|
||||
@@ -110,10 +104,17 @@ export function ScheduleConfig({
|
||||
|
||||
// Check for schedule on mount and when relevant dependencies change
|
||||
useEffect(() => {
|
||||
// Always check for schedules regardless of the UI setting
|
||||
// This ensures we detect schedules even when the UI is set to manual
|
||||
checkSchedule()
|
||||
}, [workflowId, scheduleType, isModalOpen, refreshCounter])
|
||||
// Only check for schedules when workflowId changes or modal opens
|
||||
// Avoid checking on every scheduleType change to prevent excessive API calls
|
||||
if (workflowId && (isModalOpen || refreshCounter > 0)) {
|
||||
checkSchedule()
|
||||
}
|
||||
|
||||
// Cleanup function to reset loading state
|
||||
return () => {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [workflowId, isModalOpen, refreshCounter])
|
||||
|
||||
// Format the schedule information for display
|
||||
const getScheduleInfo = () => {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useCallback, useEffect, useRef } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { getProviderFromModel } from '@/providers/utils'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
@@ -167,6 +168,8 @@ export function useSubBlockValue<T = any>(
|
||||
subBlockId: string,
|
||||
triggerWorkflowUpdate = false
|
||||
): readonly [T | null, (value: T) => void] {
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
|
||||
const blockType = useWorkflowStore(
|
||||
useCallback((state) => state.blocks?.[blockId]?.type, [blockId])
|
||||
)
|
||||
@@ -228,25 +231,24 @@ export function useSubBlockValue<T = any>(
|
||||
storeApiKeyValue(blockId, blockType, modelValue, newValue, storeValue)
|
||||
}
|
||||
|
||||
// Update the subblock store directly
|
||||
useSubBlockStore.getState().setValue(blockId, subBlockId, valueCopy)
|
||||
|
||||
// Dispatch event to trigger socket emission only (not store update)
|
||||
const event = new CustomEvent('update-subblock-value', {
|
||||
detail: {
|
||||
blockId,
|
||||
subBlockId,
|
||||
value: valueCopy,
|
||||
},
|
||||
})
|
||||
window.dispatchEvent(event)
|
||||
// Use collaborative function which handles both local store update and socket emission
|
||||
collaborativeSetSubblockValue(blockId, subBlockId, valueCopy)
|
||||
|
||||
if (triggerWorkflowUpdate) {
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
}
|
||||
}
|
||||
},
|
||||
[blockId, subBlockId, blockType, isApiKey, storeValue, triggerWorkflowUpdate, modelValue]
|
||||
[
|
||||
blockId,
|
||||
subBlockId,
|
||||
blockType,
|
||||
isApiKey,
|
||||
storeValue,
|
||||
triggerWorkflowUpdate,
|
||||
modelValue,
|
||||
collaborativeSetSubblockValue,
|
||||
]
|
||||
)
|
||||
|
||||
// Initialize valueRef on first render
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { BookOpen, Code, Info, RectangleHorizontal, RectangleVertical } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Button } from '@/components/ui/button'
|
||||
@@ -83,6 +84,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
const isActiveBlock = useExecutionStore((state) => state.activeBlockIds.has(id))
|
||||
const isActive = dataIsActive || isActiveBlock
|
||||
|
||||
// Get the current workflow ID from URL params instead of global state
|
||||
// This prevents race conditions when switching workflows rapidly
|
||||
const params = useParams()
|
||||
const currentWorkflowId = params.workflowId as string
|
||||
|
||||
const reactivateSchedule = async (scheduleId: string) => {
|
||||
try {
|
||||
const response = await fetch(`/api/schedules/${scheduleId}`, {
|
||||
@@ -94,7 +100,10 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
fetchScheduleInfo()
|
||||
// Use the current workflow ID from params instead of global state
|
||||
if (currentWorkflowId) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
}
|
||||
} else {
|
||||
console.error('Failed to reactivate schedule')
|
||||
}
|
||||
@@ -103,11 +112,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
}
|
||||
}
|
||||
|
||||
const fetchScheduleInfo = async () => {
|
||||
const fetchScheduleInfo = async (workflowId: string) => {
|
||||
if (!workflowId) return
|
||||
|
||||
try {
|
||||
setIsLoadingScheduleInfo(true)
|
||||
const workflowId = useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (!workflowId) return
|
||||
|
||||
const response = await fetch(`/api/schedules?workflowId=${workflowId}&mode=schedule`, {
|
||||
cache: 'no-store',
|
||||
@@ -176,12 +185,18 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (type === 'starter') {
|
||||
fetchScheduleInfo()
|
||||
if (type === 'starter' && currentWorkflowId) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
} else {
|
||||
setScheduleInfo(null)
|
||||
setIsLoadingScheduleInfo(false) // Reset loading state when not a starter block
|
||||
}
|
||||
}, [type])
|
||||
|
||||
// Cleanup function to reset loading state when component unmounts or workflow changes
|
||||
return () => {
|
||||
setIsLoadingScheduleInfo(false)
|
||||
}
|
||||
}, [type, currentWorkflowId])
|
||||
|
||||
// Get webhook information for the tooltip
|
||||
useEffect(() => {
|
||||
@@ -436,6 +451,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
blockId={id}
|
||||
setIsConnecting={setIsConnecting}
|
||||
isDisabled={!userPermissions.canEdit}
|
||||
horizontalHandles={horizontalHandles}
|
||||
/>
|
||||
|
||||
{/* Input Handle - Don't show for starter blocks */}
|
||||
@@ -683,7 +699,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
{Object.entries(config.outputs).map(([key, value]) => (
|
||||
<div key={key} className='mb-1'>
|
||||
<span className='text-muted-foreground'>{key}</span>{' '}
|
||||
{typeof value.type === 'object' ? (
|
||||
{typeof value === 'object' ? (
|
||||
<div className='mt-1 pl-3'>
|
||||
{Object.entries(value.type).map(([typeKey, typeValue]) => (
|
||||
<div key={typeKey} className='flex items-start'>
|
||||
@@ -697,7 +713,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<span className='text-green-500'>{value.type as string}</span>
|
||||
<span className='text-green-500'>{value as string}</span>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { shallow } from 'zustand/shallow'
|
||||
import { BlockPathCalculator } from '@/lib/block-path-calculator'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -53,63 +54,6 @@ function extractFieldsFromSchema(schema: any): Field[] {
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds all blocks along paths leading to the target block
|
||||
* This is a reverse traversal from the target node to find all ancestors
|
||||
* along connected paths
|
||||
* @param edges - List of all edges in the graph
|
||||
* @param targetNodeId - ID of the target block we're finding connections for
|
||||
* @returns Array of unique ancestor node IDs
|
||||
*/
|
||||
function findAllPathNodes(edges: any[], targetNodeId: string): string[] {
|
||||
// We'll use a reverse topological sort approach by tracking "distance" from target
|
||||
const nodeDistances = new Map<string, number>()
|
||||
const visited = new Set<string>()
|
||||
const queue: [string, number][] = [[targetNodeId, 0]] // [nodeId, distance]
|
||||
const pathNodes = new Set<string>()
|
||||
|
||||
// Build a reverse adjacency list for faster traversal
|
||||
const reverseAdjList: Record<string, string[]> = {}
|
||||
for (const edge of edges) {
|
||||
if (!reverseAdjList[edge.target]) {
|
||||
reverseAdjList[edge.target] = []
|
||||
}
|
||||
reverseAdjList[edge.target].push(edge.source)
|
||||
}
|
||||
|
||||
// BFS to find all ancestors and their shortest distance from target
|
||||
while (queue.length > 0) {
|
||||
const [currentNodeId, distance] = queue.shift()!
|
||||
|
||||
if (visited.has(currentNodeId)) {
|
||||
// If we've seen this node before, update its distance if this path is shorter
|
||||
const currentDistance = nodeDistances.get(currentNodeId) || Number.POSITIVE_INFINITY
|
||||
if (distance < currentDistance) {
|
||||
nodeDistances.set(currentNodeId, distance)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
visited.add(currentNodeId)
|
||||
nodeDistances.set(currentNodeId, distance)
|
||||
|
||||
// Don't add the target node itself to the results
|
||||
if (currentNodeId !== targetNodeId) {
|
||||
pathNodes.add(currentNodeId)
|
||||
}
|
||||
|
||||
// Get all incoming edges from the reverse adjacency list
|
||||
const incomingNodeIds = reverseAdjList[currentNodeId] || []
|
||||
|
||||
// Add all source nodes to the queue with incremented distance
|
||||
for (const sourceId of incomingNodeIds) {
|
||||
queue.push([sourceId, distance + 1])
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(pathNodes)
|
||||
}
|
||||
|
||||
export function useBlockConnections(blockId: string) {
|
||||
const { edges, blocks } = useWorkflowStore(
|
||||
(state) => ({
|
||||
@@ -120,7 +64,7 @@ export function useBlockConnections(blockId: string) {
|
||||
)
|
||||
|
||||
// Find all blocks along paths leading to this block
|
||||
const allPathNodeIds = findAllPathNodes(edges, blockId)
|
||||
const allPathNodeIds = BlockPathCalculator.findAllPathNodes(edges, blockId)
|
||||
|
||||
// Map each path node to a ConnectedBlock structure
|
||||
const allPathConnections = allPathNodeIds
|
||||
|
||||
@@ -82,9 +82,9 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
// If this was a streaming response and we have the final content, update it
|
||||
if (streamContent && result.output?.response && typeof streamContent === 'string') {
|
||||
if (streamContent && result.output && typeof streamContent === 'string') {
|
||||
// Update the content with the final streaming content
|
||||
enrichedResult.output.response.content = streamContent
|
||||
enrichedResult.output.content = streamContent
|
||||
|
||||
// Also update any block logs to include the content where appropriate
|
||||
if (enrichedResult.logs) {
|
||||
@@ -97,10 +97,9 @@ export function useWorkflowExecution() {
|
||||
if (
|
||||
isStreamingBlock &&
|
||||
(log.blockType === 'agent' || log.blockType === 'router') &&
|
||||
log.output?.response
|
||||
) {
|
||||
log.output.response.content = streamContent
|
||||
}
|
||||
log.output
|
||||
)
|
||||
log.output.content = streamContent
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -122,7 +121,7 @@ export function useWorkflowExecution() {
|
||||
|
||||
return executionId
|
||||
} catch (error) {
|
||||
logger.error('Error persisting logs:', { error })
|
||||
logger.error('Error persisting logs:', error)
|
||||
return executionId
|
||||
}
|
||||
}
|
||||
@@ -215,8 +214,8 @@ export function useWorkflowExecution() {
|
||||
result.logs?.forEach((log: BlockLog) => {
|
||||
if (streamedContent.has(log.blockId)) {
|
||||
const content = streamedContent.get(log.blockId) || ''
|
||||
if (log.output?.response) {
|
||||
log.output.response.content = content
|
||||
if (log.output) {
|
||||
log.output.content = content
|
||||
}
|
||||
useConsoleStore.getState().updateConsole(log.blockId, content)
|
||||
}
|
||||
@@ -225,9 +224,9 @@ export function useWorkflowExecution() {
|
||||
controller.enqueue(
|
||||
encoder.encode(`data: ${JSON.stringify({ event: 'final', data: result })}\n\n`)
|
||||
)
|
||||
persistLogs(executionId, result).catch((err) => {
|
||||
logger.error('Error persisting logs:', { error: err })
|
||||
})
|
||||
persistLogs(executionId, result).catch((err) =>
|
||||
logger.error('Error persisting logs:', err)
|
||||
)
|
||||
}
|
||||
} catch (error: any) {
|
||||
controller.error(error)
|
||||
@@ -437,7 +436,7 @@ export function useWorkflowExecution() {
|
||||
|
||||
const errorResult: ExecutionResult = {
|
||||
success: false,
|
||||
output: { response: {} },
|
||||
output: {},
|
||||
error: errorMessage,
|
||||
logs: [],
|
||||
}
|
||||
@@ -560,7 +559,7 @@ export function useWorkflowExecution() {
|
||||
// Create error result
|
||||
const errorResult = {
|
||||
success: false,
|
||||
output: { response: {} },
|
||||
output: {},
|
||||
error: errorMessage,
|
||||
logs: debugContext.blockLogs,
|
||||
}
|
||||
@@ -647,7 +646,7 @@ export function useWorkflowExecution() {
|
||||
|
||||
let currentResult: ExecutionResult = {
|
||||
success: true,
|
||||
output: { response: {} },
|
||||
output: {},
|
||||
logs: debugContext.blockLogs,
|
||||
}
|
||||
|
||||
@@ -743,7 +742,7 @@ export function useWorkflowExecution() {
|
||||
// Create error result
|
||||
const errorResult = {
|
||||
success: false,
|
||||
output: { response: {} },
|
||||
output: {},
|
||||
error: errorMessage,
|
||||
logs: debugContext.blockLogs,
|
||||
}
|
||||
|
||||
@@ -15,9 +15,14 @@ import { useFolderStore } from '@/stores/folders/store'
|
||||
interface CreateMenuProps {
|
||||
onCreateWorkflow: (folderId?: string) => void
|
||||
isCollapsed?: boolean
|
||||
isCreatingWorkflow?: boolean
|
||||
}
|
||||
|
||||
export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
export function CreateMenu({
|
||||
onCreateWorkflow,
|
||||
isCollapsed,
|
||||
isCreatingWorkflow = false,
|
||||
}: CreateMenuProps) {
|
||||
const [showFolderDialog, setShowFolderDialog] = useState(false)
|
||||
const [folderName, setFolderName] = useState('')
|
||||
const [isCreating, setIsCreating] = useState(false)
|
||||
@@ -73,6 +78,7 @@ export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
onClick={handleCreateWorkflow}
|
||||
onMouseEnter={() => setIsHoverOpen(true)}
|
||||
onMouseLeave={() => setIsHoverOpen(false)}
|
||||
disabled={isCreatingWorkflow}
|
||||
>
|
||||
<Plus
|
||||
className={cn(
|
||||
@@ -101,11 +107,17 @@ export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
onCloseAutoFocus={(e) => e.preventDefault()}
|
||||
>
|
||||
<button
|
||||
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
|
||||
className={cn(
|
||||
'flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors',
|
||||
isCreatingWorkflow
|
||||
? 'cursor-not-allowed opacity-50'
|
||||
: 'hover:bg-accent hover:text-accent-foreground'
|
||||
)}
|
||||
onClick={handleCreateWorkflow}
|
||||
disabled={isCreatingWorkflow}
|
||||
>
|
||||
<File className='h-4 w-4' />
|
||||
New Workflow
|
||||
{isCreatingWorkflow ? 'Creating...' : 'New Workflow'}
|
||||
</button>
|
||||
<button
|
||||
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
|
||||
|
||||
@@ -19,6 +19,7 @@ const TOOLTIPS = {
|
||||
debugMode: 'Enable visual debugging information during execution.',
|
||||
autoConnect: 'Automatically connect nodes.',
|
||||
autoFillEnvVars: 'Automatically fill API keys.',
|
||||
autoPan: 'Automatically pan to active blocks during workflow execution.',
|
||||
}
|
||||
|
||||
export function General() {
|
||||
@@ -30,11 +31,13 @@ export function General() {
|
||||
const isAutoConnectEnabled = useGeneralStore((state) => state.isAutoConnectEnabled)
|
||||
const isDebugModeEnabled = useGeneralStore((state) => state.isDebugModeEnabled)
|
||||
const isAutoFillEnvVarsEnabled = useGeneralStore((state) => state.isAutoFillEnvVarsEnabled)
|
||||
const isAutoPanEnabled = useGeneralStore((state) => state.isAutoPanEnabled)
|
||||
|
||||
const setTheme = useGeneralStore((state) => state.setTheme)
|
||||
const toggleAutoConnect = useGeneralStore((state) => state.toggleAutoConnect)
|
||||
const toggleDebugMode = useGeneralStore((state) => state.toggleDebugMode)
|
||||
const toggleAutoFillEnvVars = useGeneralStore((state) => state.toggleAutoFillEnvVars)
|
||||
const toggleAutoPan = useGeneralStore((state) => state.toggleAutoPan)
|
||||
const loadSettings = useGeneralStore((state) => state.loadSettings)
|
||||
|
||||
useEffect(() => {
|
||||
@@ -66,6 +69,12 @@ export function General() {
|
||||
}
|
||||
}
|
||||
|
||||
const handleAutoPanChange = (checked: boolean) => {
|
||||
if (checked !== isAutoPanEnabled) {
|
||||
toggleAutoPan()
|
||||
}
|
||||
}
|
||||
|
||||
const handleRetry = () => {
|
||||
setRetryCount((prev) => prev + 1)
|
||||
}
|
||||
@@ -200,6 +209,35 @@ export function General() {
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
<div className='flex items-center justify-between py-1'>
|
||||
<div className='flex items-center gap-2'>
|
||||
<Label htmlFor='auto-pan' className='font-medium'>
|
||||
Auto-pan during execution
|
||||
</Label>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='h-7 p-1 text-gray-500'
|
||||
aria-label='Learn more about auto-pan feature'
|
||||
disabled={isLoading}
|
||||
>
|
||||
<Info className='h-5 w-5' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='top' className='max-w-[300px] p-3'>
|
||||
<p className='text-sm'>{TOOLTIPS.autoPan}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Switch
|
||||
id='auto-pan'
|
||||
checked={isAutoPanEnabled}
|
||||
onCheckedChange={handleAutoPanChange}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -41,6 +41,9 @@ export function Sidebar() {
|
||||
const { isPending: sessionLoading } = useSession()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const isLoading = workflowsLoading || sessionLoading
|
||||
|
||||
// Add state to prevent multiple simultaneous workflow creations
|
||||
const [isCreatingWorkflow, setIsCreatingWorkflow] = useState(false)
|
||||
const router = useRouter()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
@@ -108,7 +111,14 @@ export function Sidebar() {
|
||||
|
||||
// Create workflow handler
|
||||
const handleCreateWorkflow = async (folderId?: string) => {
|
||||
// Prevent multiple simultaneous workflow creations
|
||||
if (isCreatingWorkflow) {
|
||||
logger.info('Workflow creation already in progress, ignoring request')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
setIsCreatingWorkflow(true)
|
||||
const id = await createWorkflow({
|
||||
workspaceId: workspaceId || undefined,
|
||||
folderId: folderId || undefined,
|
||||
@@ -116,6 +126,8 @@ export function Sidebar() {
|
||||
router.push(`/workspace/${workspaceId}/w/${id}`)
|
||||
} catch (error) {
|
||||
logger.error('Error creating workflow:', error)
|
||||
} finally {
|
||||
setIsCreatingWorkflow(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,7 +185,11 @@ export function Sidebar() {
|
||||
{isLoading ? <Skeleton className='h-4 w-16' /> : 'Workflows'}
|
||||
</h2>
|
||||
{!isCollapsed && !isLoading && (
|
||||
<CreateMenu onCreateWorkflow={handleCreateWorkflow} isCollapsed={false} />
|
||||
<CreateMenu
|
||||
onCreateWorkflow={handleCreateWorkflow}
|
||||
isCollapsed={false}
|
||||
isCreatingWorkflow={isCreatingWorkflow}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<FolderTree
|
||||
|
||||
@@ -332,25 +332,9 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
|
||||
tools: { type: 'json', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
tokens: 'any',
|
||||
toolCalls: 'any',
|
||||
},
|
||||
dependsOn: {
|
||||
subBlockId: 'responseFormat',
|
||||
condition: {
|
||||
whenEmpty: {
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
tokens: 'any',
|
||||
toolCalls: 'any',
|
||||
},
|
||||
whenFilled: 'json',
|
||||
},
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
tokens: 'any',
|
||||
toolCalls: 'any',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -179,12 +179,8 @@ export const AirtableBlock: BlockConfig<AirtableResponse> = {
|
||||
},
|
||||
// Output structure depends on the operation, covered by AirtableResponse union type
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
records: 'json', // Optional: for list, create, updateMultiple
|
||||
record: 'json', // Optional: for get, update single
|
||||
metadata: 'json', // Required: present in all responses
|
||||
},
|
||||
},
|
||||
records: 'json', // Optional: for list, create, updateMultiple
|
||||
record: 'json', // Optional: for get, update single
|
||||
metadata: 'json', // Required: present in all responses
|
||||
},
|
||||
}
|
||||
|
||||
@@ -62,12 +62,8 @@ export const ApiBlock: BlockConfig<RequestResponse> = {
|
||||
params: { type: 'json', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
data: 'any',
|
||||
status: 'number',
|
||||
headers: 'json',
|
||||
},
|
||||
},
|
||||
data: 'any',
|
||||
status: 'number',
|
||||
headers: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -112,13 +112,9 @@ export const AutoblocksBlock: BlockConfig<AutoblocksResponse> = {
|
||||
environment: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
promptId: 'string',
|
||||
version: 'string',
|
||||
renderedPrompt: 'string',
|
||||
templates: 'json',
|
||||
},
|
||||
},
|
||||
promptId: 'string',
|
||||
version: 'string',
|
||||
renderedPrompt: 'string',
|
||||
templates: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -76,13 +76,9 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
|
||||
save_browser_data: { type: 'boolean', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
id: 'string',
|
||||
success: 'boolean',
|
||||
output: 'any',
|
||||
steps: 'json',
|
||||
},
|
||||
},
|
||||
id: 'string',
|
||||
success: 'boolean',
|
||||
output: 'any',
|
||||
steps: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -50,10 +50,6 @@ Plain Text: Best for populating a table in free-form style.
|
||||
data: { type: 'json', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
data: 'any',
|
||||
},
|
||||
},
|
||||
data: 'any',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -37,13 +37,9 @@ export const ConditionBlock: BlockConfig<ConditionBlockOutput> = {
|
||||
},
|
||||
inputs: {},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
conditionResult: 'boolean',
|
||||
selectedPath: 'json',
|
||||
selectedConditionId: 'string',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
conditionResult: 'boolean',
|
||||
selectedPath: 'json',
|
||||
selectedConditionId: 'string',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -109,14 +109,10 @@ export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
||||
content: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
ts: 'string',
|
||||
pageId: 'string',
|
||||
content: 'string',
|
||||
title: 'string',
|
||||
success: 'boolean',
|
||||
},
|
||||
},
|
||||
ts: 'string',
|
||||
pageId: 'string',
|
||||
content: 'string',
|
||||
title: 'string',
|
||||
success: 'boolean',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -149,11 +149,7 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
||||
userId: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
message: 'string',
|
||||
data: 'any',
|
||||
},
|
||||
},
|
||||
message: 'string',
|
||||
data: 'any',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -39,11 +39,7 @@ export const ElevenLabsBlock: BlockConfig<ElevenLabsBlockResponse> = {
|
||||
},
|
||||
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
audioUrl: 'string',
|
||||
},
|
||||
},
|
||||
audioUrl: 'string',
|
||||
},
|
||||
|
||||
subBlocks: [
|
||||
|
||||
@@ -307,25 +307,9 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
|
||||
content: { type: 'string' as ParamType, required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
tokens: 'any',
|
||||
cost: 'any',
|
||||
},
|
||||
dependsOn: {
|
||||
subBlockId: 'metrics',
|
||||
condition: {
|
||||
whenEmpty: {
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
tokens: 'any',
|
||||
cost: 'any',
|
||||
},
|
||||
whenFilled: 'json',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
tokens: 'any',
|
||||
cost: 'any',
|
||||
} as any,
|
||||
}
|
||||
|
||||
@@ -190,16 +190,12 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
url: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
// Search output
|
||||
results: 'json',
|
||||
// Find Similar Links output
|
||||
similarLinks: 'json',
|
||||
// Answer output
|
||||
answer: 'string',
|
||||
citations: 'json',
|
||||
},
|
||||
},
|
||||
// Search output
|
||||
results: 'json',
|
||||
// Find Similar Links output
|
||||
similarLinks: 'json',
|
||||
// Answer output
|
||||
answer: 'string',
|
||||
citations: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -130,11 +130,7 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
file: { type: 'json', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
files: 'json',
|
||||
combinedContent: 'string',
|
||||
},
|
||||
},
|
||||
files: 'json',
|
||||
combinedContent: 'string',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -90,16 +90,12 @@ export const FirecrawlBlock: BlockConfig<FirecrawlResponse> = {
|
||||
scrapeOptions: { type: 'json', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
// Scrape output
|
||||
markdown: 'string',
|
||||
html: 'any',
|
||||
metadata: 'json',
|
||||
// Search output
|
||||
data: 'json',
|
||||
warning: 'any',
|
||||
},
|
||||
},
|
||||
// Scrape output
|
||||
markdown: 'string',
|
||||
html: 'any',
|
||||
metadata: 'json',
|
||||
// Search output
|
||||
data: 'json',
|
||||
warning: 'any',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -27,11 +27,7 @@ export const FunctionBlock: BlockConfig<CodeExecutionOutput> = {
|
||||
timeout: { type: 'number', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
result: 'any',
|
||||
stdout: 'string',
|
||||
},
|
||||
},
|
||||
result: 'any',
|
||||
stdout: 'string',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -167,11 +167,7 @@ export const GitHubBlock: BlockConfig<GitHubResponse> = {
|
||||
branch: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -179,11 +179,7 @@ export const GmailBlock: BlockConfig<GmailToolResponse> = {
|
||||
maxResults: { type: 'number', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -87,11 +87,7 @@ export const GoogleSearchBlock: BlockConfig<GoogleSearchResponse> = {
|
||||
},
|
||||
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
items: 'json',
|
||||
searchInformation: 'json',
|
||||
} as any,
|
||||
},
|
||||
items: 'json',
|
||||
searchInformation: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -284,11 +284,7 @@ export const GoogleCalendarBlock: BlockConfig<GoogleCalendarResponse> = {
|
||||
sendUpdates: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -181,12 +181,8 @@ export const GoogleDocsBlock: BlockConfig<GoogleDocsResponse> = {
|
||||
content: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
updatedContent: 'boolean',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
updatedContent: 'boolean',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -265,11 +265,7 @@ export const GoogleDriveBlock: BlockConfig<GoogleDriveResponse> = {
|
||||
pageSize: { type: 'number', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
file: 'json',
|
||||
files: 'json',
|
||||
},
|
||||
},
|
||||
file: 'json',
|
||||
files: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -211,16 +211,12 @@ export const GoogleSheetsBlock: BlockConfig<GoogleSheetsResponse> = {
|
||||
insertDataOption: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
data: 'json',
|
||||
metadata: 'json',
|
||||
updatedRange: 'string',
|
||||
updatedRows: 'number',
|
||||
updatedColumns: 'number',
|
||||
updatedCells: 'number',
|
||||
tableRange: 'string',
|
||||
},
|
||||
},
|
||||
data: 'json',
|
||||
metadata: 'json',
|
||||
updatedRange: 'string',
|
||||
updatedRows: 'number',
|
||||
updatedColumns: 'number',
|
||||
updatedCells: 'number',
|
||||
tableRange: 'string',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -82,17 +82,13 @@ export const GuestyBlock: BlockConfig<GuestyReservationResponse | GuestyGuestRes
|
||||
phoneNumber: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
id: 'string',
|
||||
guest: 'json',
|
||||
checkIn: 'string',
|
||||
checkOut: 'string',
|
||||
status: 'string',
|
||||
listing: 'json',
|
||||
money: 'json',
|
||||
guests: 'json',
|
||||
},
|
||||
},
|
||||
id: 'string',
|
||||
guest: 'json',
|
||||
checkIn: 'string',
|
||||
checkOut: 'string',
|
||||
status: 'string',
|
||||
listing: 'json',
|
||||
money: 'json',
|
||||
guests: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -114,12 +114,8 @@ export const HuggingFaceBlock: BlockConfig<HuggingFaceChatResponse> = {
|
||||
apiKey: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
usage: 'json',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
usage: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -153,12 +153,8 @@ export const ImageGeneratorBlock: BlockConfig<DalleResponse> = {
|
||||
apiKey: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
image: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
image: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -51,10 +51,6 @@ export const JinaBlock: BlockConfig<ReadUrlResponse> = {
|
||||
apiKey: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -187,17 +187,13 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
||||
issueType: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
ts: 'string',
|
||||
issueKey: 'string',
|
||||
summary: 'string',
|
||||
description: 'string',
|
||||
created: 'string',
|
||||
updated: 'string',
|
||||
success: 'boolean',
|
||||
url: 'string',
|
||||
},
|
||||
},
|
||||
ts: 'string',
|
||||
issueKey: 'string',
|
||||
summary: 'string',
|
||||
description: 'string',
|
||||
created: 'string',
|
||||
updated: 'string',
|
||||
success: 'boolean',
|
||||
url: 'string',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -6,13 +6,13 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
name: 'Knowledge',
|
||||
description: 'Use vector search',
|
||||
longDescription:
|
||||
'Perform semantic vector search across one or more knowledge bases or upload new chunks to documents. Uses advanced AI embeddings to understand meaning and context for search operations.',
|
||||
'Perform semantic vector search across knowledge bases, upload individual chunks to existing documents, or create new documents from text content. Uses advanced AI embeddings to understand meaning and context for search operations.',
|
||||
bgColor: '#00B0B0',
|
||||
icon: PackageSearchIcon,
|
||||
category: 'blocks',
|
||||
docsLink: 'https://docs.simstudio.ai/blocks/knowledge',
|
||||
tools: {
|
||||
access: ['knowledge_search', 'knowledge_upload_chunk'],
|
||||
access: ['knowledge_search', 'knowledge_upload_chunk', 'knowledge_create_document'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
@@ -20,6 +20,8 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
return 'knowledge_search'
|
||||
case 'upload_chunk':
|
||||
return 'knowledge_upload_chunk'
|
||||
case 'create_document':
|
||||
return 'knowledge_create_document'
|
||||
default:
|
||||
return 'knowledge_search'
|
||||
}
|
||||
@@ -36,13 +38,9 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
content: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
results: 'json',
|
||||
query: 'string',
|
||||
totalResults: 'number',
|
||||
},
|
||||
},
|
||||
results: 'json',
|
||||
query: 'string',
|
||||
totalResults: 'number',
|
||||
},
|
||||
subBlocks: [
|
||||
{
|
||||
@@ -53,6 +51,7 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
options: [
|
||||
{ label: 'Search', id: 'search' },
|
||||
{ label: 'Upload Chunk', id: 'upload_chunk' },
|
||||
{ label: 'Create Document', id: 'create_document' },
|
||||
],
|
||||
value: () => 'search',
|
||||
},
|
||||
@@ -72,7 +71,7 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
layout: 'full',
|
||||
placeholder: 'Select knowledge base',
|
||||
multiSelect: false,
|
||||
condition: { field: 'operation', value: 'upload_chunk' },
|
||||
condition: { field: 'operation', value: ['upload_chunk', 'create_document'] },
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
@@ -107,5 +106,22 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
rows: 6,
|
||||
condition: { field: 'operation', value: 'upload_chunk' },
|
||||
},
|
||||
{
|
||||
id: 'name',
|
||||
title: 'Document Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter document name',
|
||||
condition: { field: 'operation', value: ['create_document'] },
|
||||
},
|
||||
{
|
||||
id: 'content',
|
||||
title: 'Document Content',
|
||||
type: 'long-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter the document content',
|
||||
rows: 6,
|
||||
condition: { field: 'operation', value: ['create_document'] },
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
@@ -99,11 +99,7 @@ export const LinearBlock: BlockConfig<LinearResponse> = {
|
||||
description: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
issues: 'json',
|
||||
issue: 'json',
|
||||
},
|
||||
},
|
||||
issues: 'json',
|
||||
issue: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -63,11 +63,7 @@ export const LinkupBlock: BlockConfig<LinkupSearchToolResponse> = {
|
||||
},
|
||||
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
answer: 'string',
|
||||
sources: 'json',
|
||||
},
|
||||
},
|
||||
answer: 'string',
|
||||
sources: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -290,12 +290,8 @@ export const Mem0Block: BlockConfig<Mem0Response> = {
|
||||
limit: { type: 'number', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
ids: 'any',
|
||||
memories: 'any',
|
||||
searchResults: 'any',
|
||||
},
|
||||
},
|
||||
ids: 'any',
|
||||
memories: 'any',
|
||||
searchResults: 'any',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -105,12 +105,8 @@ export const MemoryBlock: BlockConfig = {
|
||||
content: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
memories: 'any',
|
||||
id: 'string',
|
||||
},
|
||||
},
|
||||
memories: 'any',
|
||||
id: 'string',
|
||||
},
|
||||
subBlocks: [
|
||||
{
|
||||
|
||||
@@ -199,17 +199,13 @@ export const MicrosoftExcelBlock: BlockConfig<MicrosoftExcelResponse> = {
|
||||
valueInputOption: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
data: 'json',
|
||||
metadata: 'json',
|
||||
updatedRange: 'string',
|
||||
updatedRows: 'number',
|
||||
updatedColumns: 'number',
|
||||
updatedCells: 'number',
|
||||
index: 'number',
|
||||
values: 'json',
|
||||
},
|
||||
},
|
||||
data: 'json',
|
||||
metadata: 'json',
|
||||
updatedRange: 'string',
|
||||
updatedRows: 'number',
|
||||
updatedColumns: 'number',
|
||||
updatedCells: 'number',
|
||||
index: 'number',
|
||||
values: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -169,12 +169,8 @@ export const MicrosoftTeamsBlock: BlockConfig<MicrosoftTeamsResponse> = {
|
||||
content: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
updatedContent: 'boolean',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
updatedContent: 'boolean',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -202,11 +202,7 @@ export const MistralParseBlock: BlockConfig<MistralParserOutput> = {
|
||||
// imageMinSize: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -174,11 +174,7 @@ export const NotionBlock: BlockConfig<NotionResponse> = {
|
||||
properties: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
metadata: 'any',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
metadata: 'any',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -49,12 +49,8 @@ export const OpenAIBlock: BlockConfig = {
|
||||
apiKey: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
embeddings: 'json',
|
||||
model: 'string',
|
||||
usage: 'json',
|
||||
},
|
||||
},
|
||||
embeddings: 'json',
|
||||
model: 'string',
|
||||
usage: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -140,11 +140,7 @@ export const OutlookBlock: BlockConfig<
|
||||
maxResults: { type: 'number', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
message: 'string',
|
||||
results: 'json',
|
||||
},
|
||||
},
|
||||
message: 'string',
|
||||
results: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -106,12 +106,8 @@ export const PerplexityBlock: BlockConfig<PerplexityChatResponse> = {
|
||||
apiKey: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
usage: 'json',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
usage: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -268,15 +268,11 @@ export const PineconeBlock: BlockConfig<PineconeResponse> = {
|
||||
},
|
||||
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
matches: 'any',
|
||||
upsertedCount: 'any',
|
||||
data: 'any',
|
||||
model: 'any',
|
||||
vector_type: 'any',
|
||||
usage: 'any',
|
||||
},
|
||||
},
|
||||
matches: 'any',
|
||||
upsertedCount: 'any',
|
||||
data: 'any',
|
||||
model: 'any',
|
||||
vector_type: 'any',
|
||||
usage: 'any',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -181,13 +181,9 @@ export const RedditBlock: BlockConfig<
|
||||
commentLimit: { type: 'number', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
subreddit: 'string',
|
||||
posts: 'json',
|
||||
post: 'json',
|
||||
comments: 'json',
|
||||
},
|
||||
},
|
||||
subreddit: 'string',
|
||||
posts: 'json',
|
||||
post: 'json',
|
||||
comments: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -92,12 +92,8 @@ export const ResponseBlock: BlockConfig<ResponseBlockOutput> = {
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
data: 'json',
|
||||
status: 'number',
|
||||
headers: 'json',
|
||||
},
|
||||
},
|
||||
data: 'json',
|
||||
status: 'number',
|
||||
headers: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -180,14 +180,10 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
|
||||
apiKey: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
tokens: 'any',
|
||||
cost: 'any',
|
||||
selectedPath: 'json',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
tokens: 'any',
|
||||
cost: 'any',
|
||||
selectedPath: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -96,11 +96,7 @@ export const S3Block: BlockConfig<S3Response> = {
|
||||
s3Uri: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
url: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
},
|
||||
url: 'string',
|
||||
metadata: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -69,10 +69,6 @@ export const SerperBlock: BlockConfig<SearchResponse> = {
|
||||
type: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
searchResults: 'json',
|
||||
},
|
||||
},
|
||||
searchResults: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -138,11 +138,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
text: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
ts: 'string',
|
||||
channel: 'string',
|
||||
},
|
||||
},
|
||||
ts: 'string',
|
||||
channel: 'string',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -64,10 +64,6 @@ export const StagehandBlock: BlockConfig<StagehandExtractResponse> = {
|
||||
apiKey: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
data: 'json',
|
||||
},
|
||||
},
|
||||
data: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -83,11 +83,7 @@ export const StagehandAgentBlock: BlockConfig<StagehandAgentResponse> = {
|
||||
outputSchema: { type: 'json', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
agentResult: 'json',
|
||||
structuredOutput: 'any',
|
||||
},
|
||||
},
|
||||
agentResult: 'json',
|
||||
structuredOutput: 'any',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,14 +1,7 @@
|
||||
import { StartIcon } from '@/components/icons'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
import type { BlockConfig } from '../types'
|
||||
|
||||
interface StarterBlockOutput extends ToolResponse {
|
||||
output: {
|
||||
input: any
|
||||
}
|
||||
}
|
||||
|
||||
export const StarterBlock: BlockConfig<StarterBlockOutput> = {
|
||||
export const StarterBlock: BlockConfig = {
|
||||
type: 'starter',
|
||||
name: 'Starter',
|
||||
description: 'Start workflow',
|
||||
@@ -189,11 +182,5 @@ export const StarterBlock: BlockConfig<StarterBlockOutput> = {
|
||||
inputs: {
|
||||
input: { type: 'json', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
input: 'any',
|
||||
},
|
||||
},
|
||||
},
|
||||
outputs: {},
|
||||
}
|
||||
|
||||
@@ -109,11 +109,7 @@ export const SupabaseBlock: BlockConfig<SupabaseResponse> = {
|
||||
data: { type: 'string', required: false, requiredForToolCall: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
message: 'string',
|
||||
results: 'json',
|
||||
},
|
||||
},
|
||||
message: 'string',
|
||||
results: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -98,15 +98,11 @@ export const TavilyBlock: BlockConfig<TavilyResponse> = {
|
||||
extract_depth: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
results: 'json',
|
||||
answer: 'any',
|
||||
query: 'string',
|
||||
content: 'string',
|
||||
title: 'string',
|
||||
url: 'string',
|
||||
},
|
||||
},
|
||||
results: 'json',
|
||||
answer: 'any',
|
||||
query: 'string',
|
||||
content: 'string',
|
||||
title: 'string',
|
||||
url: 'string',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -55,11 +55,7 @@ export const TelegramBlock: BlockConfig<TelegramMessageResponse> = {
|
||||
text: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
ok: 'boolean',
|
||||
result: 'json',
|
||||
},
|
||||
},
|
||||
ok: 'boolean',
|
||||
result: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user