mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 15:07:55 -05:00
feat(deployed-chat): added file upload to workflow execute API, added to deployed chat, updated chat panel (#1588)
* feat(deployed-chat): updated chat panel UI, deployed chat and API can now accept files * added nested tag dropdown for files * added duplicate file validation to chat panel * update docs & SDKs * fixed build * rm extraneous comments * ack PR comments, cut multiple DB roundtrips for permissions & api key checks in api/workflows * allow read-only users to access deployment info, but not take actions * add downloadable file to logs for files passed in via API * protect files/serve route that is only used client-side --------- Co-authored-by: waleed <waleed>
This commit is contained in:
@@ -593,14 +593,91 @@ async function executeClientSideWorkflow() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
console.log('Workflow result:', result);
|
console.log('Workflow result:', result);
|
||||||
|
|
||||||
// Update UI with result
|
// Update UI with result
|
||||||
document.getElementById('result')!.textContent =
|
document.getElementById('result')!.textContent =
|
||||||
JSON.stringify(result.output, null, 2);
|
JSON.stringify(result.output, null, 2);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error:', error);
|
console.error('Error:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### File Upload
|
||||||
|
|
||||||
|
File objects are automatically detected and converted to base64 format. Include them in your input under the field name matching your workflow's API trigger input format.
|
||||||
|
|
||||||
|
The SDK converts File objects to this format:
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
type: 'file',
|
||||||
|
data: 'data:mime/type;base64,base64data',
|
||||||
|
name: 'filename',
|
||||||
|
mime: 'mime/type'
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively, you can manually provide files using the URL format:
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
type: 'url',
|
||||||
|
data: 'https://example.com/file.pdf',
|
||||||
|
name: 'file.pdf',
|
||||||
|
mime: 'application/pdf'
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<Tabs items={['Browser', 'Node.js']}>
|
||||||
|
<Tab value="Browser">
|
||||||
|
```typescript
|
||||||
|
import { SimStudioClient } from 'simstudio-ts-sdk';
|
||||||
|
|
||||||
|
const client = new SimStudioClient({
|
||||||
|
apiKey: process.env.NEXT_PUBLIC_SIM_API_KEY!
|
||||||
|
});
|
||||||
|
|
||||||
|
// From file input
|
||||||
|
async function handleFileUpload(event: Event) {
|
||||||
|
const input = event.target as HTMLInputElement;
|
||||||
|
const files = Array.from(input.files || []);
|
||||||
|
|
||||||
|
// Include files under the field name from your API trigger's input format
|
||||||
|
const result = await client.executeWorkflow('workflow-id', {
|
||||||
|
input: {
|
||||||
|
documents: files, // Must match your workflow's "files" field name
|
||||||
|
instructions: 'Analyze these documents'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('Result:', result);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
</Tab>
|
||||||
|
<Tab value="Node.js">
|
||||||
|
```typescript
|
||||||
|
import { SimStudioClient } from 'simstudio-ts-sdk';
|
||||||
|
import fs from 'fs';
|
||||||
|
|
||||||
|
const client = new SimStudioClient({
|
||||||
|
apiKey: process.env.SIM_API_KEY!
|
||||||
|
});
|
||||||
|
|
||||||
|
// Read file and create File object
|
||||||
|
const fileBuffer = fs.readFileSync('./document.pdf');
|
||||||
|
const file = new File([fileBuffer], 'document.pdf', {
|
||||||
|
type: 'application/pdf'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Include files under the field name from your API trigger's input format
|
||||||
|
const result = await client.executeWorkflow('workflow-id', {
|
||||||
|
input: {
|
||||||
|
documents: [file], // Must match your workflow's "files" field name
|
||||||
|
query: 'Summarize this document'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
</Tab>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
// Attach to button click
|
// Attach to button click
|
||||||
document.getElementById('executeBtn')?.addEventListener('click', executeClientSideWorkflow);
|
document.getElementById('executeBtn')?.addEventListener('click', executeClientSideWorkflow);
|
||||||
|
|||||||
@@ -22,9 +22,17 @@ The API trigger exposes your workflow as a secure HTTP endpoint. Send JSON data
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
Add an **Input Format** field for each parameter. Runtime output keys mirror the schema and are also available under `<api.input>`.
|
Add an **Input Format** field for each parameter. Supported types:
|
||||||
|
|
||||||
Manual runs in the editor use the `value` column so you can test without sending a request. During execution the resolver populates both `<api.userId>` and `<api.input.userId>`.
|
- **string** - Text values
|
||||||
|
- **number** - Numeric values
|
||||||
|
- **boolean** - True/false values
|
||||||
|
- **json** - JSON objects
|
||||||
|
- **files** - File uploads (access via `<api.fieldName[0].url>`, `<api.fieldName[0].name>`, etc.)
|
||||||
|
|
||||||
|
Runtime output keys mirror the schema and are available under `<api.input>`.
|
||||||
|
|
||||||
|
Manual runs in the editor use the `value` column so you can test without sending a request. During execution the resolver populates both `<api.fieldName>` and `<api.input.fieldName>`.
|
||||||
|
|
||||||
## Request Example
|
## Request Example
|
||||||
|
|
||||||
@@ -123,6 +131,53 @@ data: {"blockId":"agent1-uuid","chunk":" complete"}
|
|||||||
| `<api.field>` | Field defined in the Input Format |
|
| `<api.field>` | Field defined in the Input Format |
|
||||||
| `<api.input>` | Entire structured request body |
|
| `<api.input>` | Entire structured request body |
|
||||||
|
|
||||||
|
### File Upload Format
|
||||||
|
|
||||||
|
The API accepts files in two formats:
|
||||||
|
|
||||||
|
**1. Base64-encoded files** (recommended for SDKs):
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"documents": [{
|
||||||
|
"type": "file",
|
||||||
|
"data": "data:application/pdf;base64,JVBERi0xLjQK...",
|
||||||
|
"name": "document.pdf",
|
||||||
|
"mime": "application/pdf"
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- Maximum file size: 20MB per file
|
||||||
|
- Files are uploaded to cloud storage and converted to UserFile objects with all properties
|
||||||
|
|
||||||
|
**2. Direct URL references**:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"documents": [{
|
||||||
|
"type": "url",
|
||||||
|
"data": "https://example.com/document.pdf",
|
||||||
|
"name": "document.pdf",
|
||||||
|
"mime": "application/pdf"
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- File is not uploaded, URL is passed through directly
|
||||||
|
- Useful for referencing existing files
|
||||||
|
|
||||||
|
### File Properties
|
||||||
|
|
||||||
|
For files, access all properties:
|
||||||
|
|
||||||
|
| Property | Description | Type |
|
||||||
|
|----------|-------------|------|
|
||||||
|
| `<api.fieldName[0].url>` | Signed download URL | string |
|
||||||
|
| `<api.fieldName[0].name>` | Original filename | string |
|
||||||
|
| `<api.fieldName[0].size>` | File size in bytes | number |
|
||||||
|
| `<api.fieldName[0].type>` | MIME type | string |
|
||||||
|
| `<api.fieldName[0].uploadedAt>` | Upload timestamp (ISO 8601) | string |
|
||||||
|
| `<api.fieldName[0].expiresAt>` | URL expiry timestamp (ISO 8601) | string |
|
||||||
|
|
||||||
|
For URL-referenced files, the same properties are available except `uploadedAt` and `expiresAt` since the file is not uploaded to our storage.
|
||||||
|
|
||||||
If no Input Format is defined, the executor exposes the raw JSON at `<api.input>` only.
|
If no Input Format is defined, the executor exposes the raw JSON at `<api.input>` only.
|
||||||
|
|
||||||
<Callout type="warning">
|
<Callout type="warning">
|
||||||
|
|||||||
@@ -24,13 +24,24 @@ The Chat trigger creates a conversational interface for your workflow. Deploy yo
|
|||||||
|
|
||||||
The trigger writes three fields that downstream blocks can reference:
|
The trigger writes three fields that downstream blocks can reference:
|
||||||
|
|
||||||
| Reference | Description |
|
| Reference | Description | Type |
|
||||||
|-----------|-------------|
|
|-----------|-------------|------|
|
||||||
| `<chat.input>` | Latest user message |
|
| `<chat.input>` | Latest user message | string |
|
||||||
| `<chat.conversationId>` | Conversation thread ID |
|
| `<chat.conversationId>` | Conversation thread ID | string |
|
||||||
| `<chat.files>` | Optional uploaded files |
|
| `<chat.files>` | Optional uploaded files | files array |
|
||||||
|
|
||||||
Files include `name`, `mimeType`, and a signed download `url`.
|
### File Properties
|
||||||
|
|
||||||
|
Access individual file properties using array indexing:
|
||||||
|
|
||||||
|
| Property | Description | Type |
|
||||||
|
|----------|-------------|------|
|
||||||
|
| `<chat.files[0].url>` | Signed download URL | string |
|
||||||
|
| `<chat.files[0].name>` | Original filename | string |
|
||||||
|
| `<chat.files[0].size>` | File size in bytes | number |
|
||||||
|
| `<chat.files[0].type>` | MIME type | string |
|
||||||
|
| `<chat.files[0].uploadedAt>` | Upload timestamp (ISO 8601) | string |
|
||||||
|
| `<chat.files[0].expiresAt>` | URL expiry timestamp (ISO 8601) | string |
|
||||||
|
|
||||||
## Usage Notes
|
## Usage Notes
|
||||||
|
|
||||||
|
|||||||
@@ -1116,12 +1116,20 @@ export function createMockDatabase(options: MockDatabaseOptions = {}) {
|
|||||||
|
|
||||||
const createUpdateChain = () => ({
|
const createUpdateChain = () => ({
|
||||||
set: vi.fn().mockImplementation(() => ({
|
set: vi.fn().mockImplementation(() => ({
|
||||||
where: vi.fn().mockImplementation(() => {
|
where: vi.fn().mockImplementation(() => ({
|
||||||
if (updateOptions.throwError) {
|
returning: vi.fn().mockImplementation(() => {
|
||||||
return Promise.reject(createDbError('update', updateOptions.errorMessage))
|
if (updateOptions.throwError) {
|
||||||
}
|
return Promise.reject(createDbError('update', updateOptions.errorMessage))
|
||||||
return Promise.resolve(updateOptions.results)
|
}
|
||||||
}),
|
return Promise.resolve(updateOptions.results)
|
||||||
|
}),
|
||||||
|
then: vi.fn().mockImplementation((resolve) => {
|
||||||
|
if (updateOptions.throwError) {
|
||||||
|
return Promise.reject(createDbError('update', updateOptions.errorMessage))
|
||||||
|
}
|
||||||
|
return Promise.resolve(updateOptions.results).then(resolve)
|
||||||
|
}),
|
||||||
|
})),
|
||||||
})),
|
})),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
|||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
import {
|
import {
|
||||||
addCorsHeaders,
|
addCorsHeaders,
|
||||||
|
processChatFiles,
|
||||||
setChatAuthCookie,
|
setChatAuthCookie,
|
||||||
validateAuthToken,
|
validateAuthToken,
|
||||||
validateChatAuth,
|
validateChatAuth,
|
||||||
@@ -75,7 +76,7 @@ export async function POST(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Use the already parsed body
|
// Use the already parsed body
|
||||||
const { input, password, email, conversationId } = parsedBody
|
const { input, password, email, conversationId, files } = parsedBody
|
||||||
|
|
||||||
// If this is an authentication request (has password or email but no input),
|
// If this is an authentication request (has password or email but no input),
|
||||||
// set auth cookie and return success
|
// set auth cookie and return success
|
||||||
@@ -88,8 +89,8 @@ export async function POST(
|
|||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
// For chat messages, create regular response
|
// For chat messages, create regular response (allow empty input if files are present)
|
||||||
if (!input) {
|
if (!input && (!files || files.length === 0)) {
|
||||||
return addCorsHeaders(createErrorResponse('No input provided', 400), request)
|
return addCorsHeaders(createErrorResponse('No input provided', 400), request)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -108,7 +109,6 @@ export async function POST(
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Transform outputConfigs to selectedOutputs format (blockId_attribute format)
|
|
||||||
const selectedOutputs: string[] = []
|
const selectedOutputs: string[] = []
|
||||||
if (deployment.outputConfigs && Array.isArray(deployment.outputConfigs)) {
|
if (deployment.outputConfigs && Array.isArray(deployment.outputConfigs)) {
|
||||||
for (const config of deployment.outputConfigs) {
|
for (const config of deployment.outputConfigs) {
|
||||||
@@ -123,11 +123,30 @@ export async function POST(
|
|||||||
const { SSE_HEADERS } = await import('@/lib/utils')
|
const { SSE_HEADERS } = await import('@/lib/utils')
|
||||||
const { createFilteredResult } = await import('@/app/api/workflows/[id]/execute/route')
|
const { createFilteredResult } = await import('@/app/api/workflows/[id]/execute/route')
|
||||||
|
|
||||||
|
const workflowInput: any = { input, conversationId }
|
||||||
|
if (files && Array.isArray(files) && files.length > 0) {
|
||||||
|
logger.debug(`[${requestId}] Processing ${files.length} attached files`)
|
||||||
|
|
||||||
|
const executionId = crypto.randomUUID()
|
||||||
|
const executionContext = {
|
||||||
|
workspaceId: deployment.userId,
|
||||||
|
workflowId: deployment.workflowId,
|
||||||
|
executionId,
|
||||||
|
}
|
||||||
|
|
||||||
|
const uploadedFiles = await processChatFiles(files, executionContext, requestId)
|
||||||
|
|
||||||
|
if (uploadedFiles.length > 0) {
|
||||||
|
workflowInput.files = uploadedFiles
|
||||||
|
logger.info(`[${requestId}] Successfully processed ${uploadedFiles.length} files`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const stream = await createStreamingResponse({
|
const stream = await createStreamingResponse({
|
||||||
requestId,
|
requestId,
|
||||||
workflow: { id: deployment.workflowId, userId: deployment.userId, isDeployed: true },
|
workflow: { id: deployment.workflowId, userId: deployment.userId, isDeployed: true },
|
||||||
input: { input, conversationId }, // Format for chat_trigger
|
input: workflowInput,
|
||||||
executingUserId: deployment.userId, // Use workflow owner's ID for chat deployments
|
executingUserId: deployment.userId,
|
||||||
streamConfig: {
|
streamConfig: {
|
||||||
selectedOutputs,
|
selectedOutputs,
|
||||||
isSecureMode: true,
|
isSecureMode: true,
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ import { isDev } from '@/lib/environment'
|
|||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { hasAdminPermission } from '@/lib/permissions/utils'
|
import { hasAdminPermission } from '@/lib/permissions/utils'
|
||||||
import { decryptSecret } from '@/lib/utils'
|
import { decryptSecret } from '@/lib/utils'
|
||||||
|
import { uploadExecutionFile } from '@/lib/workflows/execution-file-storage'
|
||||||
|
import type { UserFile } from '@/executor/types'
|
||||||
|
|
||||||
const logger = createLogger('ChatAuthUtils')
|
const logger = createLogger('ChatAuthUtils')
|
||||||
|
|
||||||
@@ -263,3 +265,61 @@ export async function validateChatAuth(
|
|||||||
// Unknown auth type
|
// Unknown auth type
|
||||||
return { authorized: false, error: 'Unsupported authentication type' }
|
return { authorized: false, error: 'Unsupported authentication type' }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process and upload chat files to execution storage
|
||||||
|
* Handles both base64 dataUrl format and direct URL pass-through
|
||||||
|
*/
|
||||||
|
export async function processChatFiles(
|
||||||
|
files: Array<{ dataUrl?: string; url?: string; name: string; type: string }>,
|
||||||
|
executionContext: { workspaceId: string; workflowId: string; executionId: string },
|
||||||
|
requestId: string
|
||||||
|
): Promise<UserFile[]> {
|
||||||
|
const uploadedFiles: UserFile[] = []
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
try {
|
||||||
|
if (file.dataUrl) {
|
||||||
|
const dataUrlPrefix = 'data:'
|
||||||
|
const base64Prefix = ';base64,'
|
||||||
|
|
||||||
|
if (!file.dataUrl.startsWith(dataUrlPrefix)) {
|
||||||
|
logger.warn(`[${requestId}] Invalid dataUrl format for file: ${file.name}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const base64Index = file.dataUrl.indexOf(base64Prefix)
|
||||||
|
if (base64Index === -1) {
|
||||||
|
logger.warn(
|
||||||
|
`[${requestId}] Invalid dataUrl format (no base64 marker) for file: ${file.name}`
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const mimeType = file.dataUrl.substring(dataUrlPrefix.length, base64Index)
|
||||||
|
const base64Data = file.dataUrl.substring(base64Index + base64Prefix.length)
|
||||||
|
const buffer = Buffer.from(base64Data, 'base64')
|
||||||
|
|
||||||
|
logger.debug(`[${requestId}] Uploading file to S3: ${file.name} (${buffer.length} bytes)`)
|
||||||
|
|
||||||
|
const userFile = await uploadExecutionFile(
|
||||||
|
executionContext,
|
||||||
|
buffer,
|
||||||
|
file.name,
|
||||||
|
mimeType || file.type
|
||||||
|
)
|
||||||
|
|
||||||
|
uploadedFiles.push(userFile)
|
||||||
|
logger.debug(`[${requestId}] Successfully uploaded ${file.name} with URL: ${userFile.url}`)
|
||||||
|
} else if (file.url) {
|
||||||
|
uploadedFiles.push(file as UserFile)
|
||||||
|
logger.debug(`[${requestId}] Using existing URL for file: ${file.name}`)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
||||||
|
throw new Error(`Failed to upload file: ${file.name}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return uploadedFiles
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
import { readFile } from 'fs/promises'
|
import { readFile } from 'fs/promises'
|
||||||
import type { NextRequest, NextResponse } from 'next/server'
|
import type { NextRequest } from 'next/server'
|
||||||
|
import { NextResponse } from 'next/server'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { downloadFile, getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
import { downloadFile, getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||||
import { S3_KB_CONFIG } from '@/lib/uploads/setup'
|
import { S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||||
import '@/lib/uploads/setup.server'
|
import '@/lib/uploads/setup.server'
|
||||||
|
import { getSession } from '@/lib/auth'
|
||||||
import {
|
import {
|
||||||
createErrorResponse,
|
createErrorResponse,
|
||||||
createFileResponse,
|
createFileResponse,
|
||||||
@@ -15,9 +16,6 @@ import {
|
|||||||
|
|
||||||
const logger = createLogger('FilesServeAPI')
|
const logger = createLogger('FilesServeAPI')
|
||||||
|
|
||||||
/**
|
|
||||||
* Main API route handler for serving files
|
|
||||||
*/
|
|
||||||
export async function GET(
|
export async function GET(
|
||||||
request: NextRequest,
|
request: NextRequest,
|
||||||
{ params }: { params: Promise<{ path: string[] }> }
|
{ params }: { params: Promise<{ path: string[] }> }
|
||||||
@@ -31,27 +29,30 @@ export async function GET(
|
|||||||
|
|
||||||
logger.info('File serve request:', { path })
|
logger.info('File serve request:', { path })
|
||||||
|
|
||||||
// Join the path segments to get the filename or cloud key
|
const session = await getSession()
|
||||||
const fullPath = path.join('/')
|
if (!session?.user?.id) {
|
||||||
|
logger.warn('Unauthorized file access attempt', { path })
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
// Check if this is a cloud file (path starts with 's3/' or 'blob/')
|
const userId = session.user.id
|
||||||
|
const fullPath = path.join('/')
|
||||||
const isS3Path = path[0] === 's3'
|
const isS3Path = path[0] === 's3'
|
||||||
const isBlobPath = path[0] === 'blob'
|
const isBlobPath = path[0] === 'blob'
|
||||||
const isCloudPath = isS3Path || isBlobPath
|
const isCloudPath = isS3Path || isBlobPath
|
||||||
|
const cloudKey = isCloudPath ? path.slice(1).join('/') : fullPath
|
||||||
|
const isExecutionFile = cloudKey.split('/').length >= 3 && !cloudKey.startsWith('kb/')
|
||||||
|
|
||||||
// Use cloud handler if in production, path explicitly specifies cloud storage, or we're using cloud storage
|
if (!isExecutionFile) {
|
||||||
if (isUsingCloudStorage() || isCloudPath) {
|
logger.info('Authenticated file access granted', { userId, path: cloudKey })
|
||||||
// Extract the actual key (remove 's3/' or 'blob/' prefix if present)
|
|
||||||
const cloudKey = isCloudPath ? path.slice(1).join('/') : fullPath
|
|
||||||
|
|
||||||
// Get bucket type from query parameter
|
|
||||||
const bucketType = request.nextUrl.searchParams.get('bucket')
|
|
||||||
|
|
||||||
return await handleCloudProxy(cloudKey, bucketType)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use local handler for local files
|
if (isUsingCloudStorage() || isCloudPath) {
|
||||||
return await handleLocalFile(fullPath)
|
const bucketType = request.nextUrl.searchParams.get('bucket')
|
||||||
|
return await handleCloudProxy(cloudKey, bucketType, userId)
|
||||||
|
}
|
||||||
|
|
||||||
|
return await handleLocalFile(fullPath, userId)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error serving file:', error)
|
logger.error('Error serving file:', error)
|
||||||
|
|
||||||
@@ -63,10 +64,7 @@ export async function GET(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
async function handleLocalFile(filename: string, userId: string): Promise<NextResponse> {
|
||||||
* Handle local file serving
|
|
||||||
*/
|
|
||||||
async function handleLocalFile(filename: string): Promise<NextResponse> {
|
|
||||||
try {
|
try {
|
||||||
const filePath = findLocalFile(filename)
|
const filePath = findLocalFile(filename)
|
||||||
|
|
||||||
@@ -77,6 +75,8 @@ async function handleLocalFile(filename: string): Promise<NextResponse> {
|
|||||||
const fileBuffer = await readFile(filePath)
|
const fileBuffer = await readFile(filePath)
|
||||||
const contentType = getContentType(filename)
|
const contentType = getContentType(filename)
|
||||||
|
|
||||||
|
logger.info('Local file served', { userId, filename, size: fileBuffer.length })
|
||||||
|
|
||||||
return createFileResponse({
|
return createFileResponse({
|
||||||
buffer: fileBuffer,
|
buffer: fileBuffer,
|
||||||
contentType,
|
contentType,
|
||||||
@@ -112,12 +112,10 @@ async function downloadKBFile(cloudKey: string): Promise<Buffer> {
|
|||||||
throw new Error(`Unsupported storage provider for KB files: ${storageProvider}`)
|
throw new Error(`Unsupported storage provider for KB files: ${storageProvider}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Proxy cloud file through our server
|
|
||||||
*/
|
|
||||||
async function handleCloudProxy(
|
async function handleCloudProxy(
|
||||||
cloudKey: string,
|
cloudKey: string,
|
||||||
bucketType?: string | null
|
bucketType?: string | null,
|
||||||
|
userId?: string
|
||||||
): Promise<NextResponse> {
|
): Promise<NextResponse> {
|
||||||
try {
|
try {
|
||||||
// Check if this is a KB file (starts with 'kb/')
|
// Check if this is a KB file (starts with 'kb/')
|
||||||
@@ -156,6 +154,13 @@ async function handleCloudProxy(
|
|||||||
const originalFilename = cloudKey.split('/').pop() || 'download'
|
const originalFilename = cloudKey.split('/').pop() || 'download'
|
||||||
const contentType = getContentType(originalFilename)
|
const contentType = getContentType(originalFilename)
|
||||||
|
|
||||||
|
logger.info('Cloud file served', {
|
||||||
|
userId,
|
||||||
|
key: cloudKey,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
bucket: bucketType || 'default',
|
||||||
|
})
|
||||||
|
|
||||||
return createFileResponse({
|
return createFileResponse({
|
||||||
buffer: fileBuffer,
|
buffer: fileBuffer,
|
||||||
contentType,
|
contentType,
|
||||||
|
|||||||
@@ -1,17 +1,14 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { workflow as workflowTable } from '@sim/db/schema'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
import { applyAutoLayout } from '@/lib/workflows/autolayout'
|
import { applyAutoLayout } from '@/lib/workflows/autolayout'
|
||||||
import {
|
import {
|
||||||
loadWorkflowFromNormalizedTables,
|
loadWorkflowFromNormalizedTables,
|
||||||
type NormalizedWorkflowData,
|
type NormalizedWorkflowData,
|
||||||
} from '@/lib/workflows/db-helpers'
|
} from '@/lib/workflows/db-helpers'
|
||||||
|
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -77,11 +74,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
|||||||
})
|
})
|
||||||
|
|
||||||
// Fetch the workflow to check ownership/access
|
// Fetch the workflow to check ownership/access
|
||||||
const workflowData = await db
|
const accessContext = await getWorkflowAccessContext(workflowId, userId)
|
||||||
.select()
|
const workflowData = accessContext?.workflow
|
||||||
.from(workflowTable)
|
|
||||||
.where(eq(workflowTable.id, workflowId))
|
|
||||||
.then((rows) => rows[0])
|
|
||||||
|
|
||||||
if (!workflowData) {
|
if (!workflowData) {
|
||||||
logger.warn(`[${requestId}] Workflow ${workflowId} not found for autolayout`)
|
logger.warn(`[${requestId}] Workflow ${workflowId} not found for autolayout`)
|
||||||
@@ -89,24 +83,12 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check if user has permission to update this workflow
|
// Check if user has permission to update this workflow
|
||||||
let canUpdate = false
|
const canUpdate =
|
||||||
|
accessContext?.isOwner ||
|
||||||
// Case 1: User owns the workflow
|
(workflowData.workspaceId
|
||||||
if (workflowData.userId === userId) {
|
? accessContext?.workspacePermission === 'write' ||
|
||||||
canUpdate = true
|
accessContext?.workspacePermission === 'admin'
|
||||||
}
|
: false)
|
||||||
|
|
||||||
// Case 2: Workflow belongs to a workspace and user has write or admin permission
|
|
||||||
if (!canUpdate && workflowData.workspaceId) {
|
|
||||||
const userPermission = await getUserEntityPermissions(
|
|
||||||
userId,
|
|
||||||
'workspace',
|
|
||||||
workflowData.workspaceId
|
|
||||||
)
|
|
||||||
if (userPermission === 'write' || userPermission === 'admin') {
|
|
||||||
canUpdate = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!canUpdate) {
|
if (!canUpdate) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
|
|||||||
@@ -47,17 +47,17 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
// Duplicate workflow and all related data in a transaction
|
// Duplicate workflow and all related data in a transaction
|
||||||
const result = await db.transaction(async (tx) => {
|
const result = await db.transaction(async (tx) => {
|
||||||
// First verify the source workflow exists
|
// First verify the source workflow exists
|
||||||
const sourceWorkflow = await tx
|
const sourceWorkflowRow = await tx
|
||||||
.select()
|
.select()
|
||||||
.from(workflow)
|
.from(workflow)
|
||||||
.where(eq(workflow.id, sourceWorkflowId))
|
.where(eq(workflow.id, sourceWorkflowId))
|
||||||
.limit(1)
|
.limit(1)
|
||||||
|
|
||||||
if (sourceWorkflow.length === 0) {
|
if (sourceWorkflowRow.length === 0) {
|
||||||
throw new Error('Source workflow not found')
|
throw new Error('Source workflow not found')
|
||||||
}
|
}
|
||||||
|
|
||||||
const source = sourceWorkflow[0]
|
const source = sourceWorkflowRow[0]
|
||||||
|
|
||||||
// Check if user has permission to access the source workflow
|
// Check if user has permission to access the source workflow
|
||||||
let canAccessSource = false
|
let canAccessSource = false
|
||||||
|
|||||||
@@ -23,7 +23,11 @@ import {
|
|||||||
workflowHasResponseBlock,
|
workflowHasResponseBlock,
|
||||||
} from '@/lib/workflows/utils'
|
} from '@/lib/workflows/utils'
|
||||||
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
|
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
|
||||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
import {
|
||||||
|
createErrorResponse,
|
||||||
|
createSuccessResponse,
|
||||||
|
processApiWorkflowField,
|
||||||
|
} from '@/app/api/workflows/utils'
|
||||||
import { Executor } from '@/executor'
|
import { Executor } from '@/executor'
|
||||||
import type { ExecutionResult } from '@/executor/types'
|
import type { ExecutionResult } from '@/executor/types'
|
||||||
import { Serializer } from '@/serializer'
|
import { Serializer } from '@/serializer'
|
||||||
@@ -74,16 +78,13 @@ function resolveOutputIds(
|
|||||||
return selectedOutputs
|
return selectedOutputs
|
||||||
}
|
}
|
||||||
|
|
||||||
// UUID regex to detect if it's already in blockId_attribute format
|
|
||||||
const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/i
|
const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/i
|
||||||
|
|
||||||
return selectedOutputs.map((outputId) => {
|
return selectedOutputs.map((outputId) => {
|
||||||
// If it starts with a UUID, it's already in blockId_attribute format (from chat deployments)
|
|
||||||
if (UUID_REGEX.test(outputId)) {
|
if (UUID_REGEX.test(outputId)) {
|
||||||
return outputId
|
return outputId
|
||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise, it's in blockName.path format from the user/API
|
|
||||||
const dotIndex = outputId.indexOf('.')
|
const dotIndex = outputId.indexOf('.')
|
||||||
if (dotIndex === -1) {
|
if (dotIndex === -1) {
|
||||||
logger.warn(`Invalid output ID format (missing dot): ${outputId}`)
|
logger.warn(`Invalid output ID format (missing dot): ${outputId}`)
|
||||||
@@ -93,7 +94,6 @@ function resolveOutputIds(
|
|||||||
const blockName = outputId.substring(0, dotIndex)
|
const blockName = outputId.substring(0, dotIndex)
|
||||||
const path = outputId.substring(dotIndex + 1)
|
const path = outputId.substring(dotIndex + 1)
|
||||||
|
|
||||||
// Find the block by name (case-insensitive, ignoring spaces)
|
|
||||||
const normalizedBlockName = blockName.toLowerCase().replace(/\s+/g, '')
|
const normalizedBlockName = blockName.toLowerCase().replace(/\s+/g, '')
|
||||||
const block = Object.values(blocks).find((b: any) => {
|
const block = Object.values(blocks).find((b: any) => {
|
||||||
const normalized = (b.name || '').toLowerCase().replace(/\s+/g, '')
|
const normalized = (b.name || '').toLowerCase().replace(/\s+/g, '')
|
||||||
@@ -137,9 +137,6 @@ export async function executeWorkflow(
|
|||||||
|
|
||||||
const loggingSession = new LoggingSession(workflowId, executionId, 'api', requestId)
|
const loggingSession = new LoggingSession(workflowId, executionId, 'api', requestId)
|
||||||
|
|
||||||
// Rate limiting is now handled before entering the sync queue
|
|
||||||
|
|
||||||
// Check if the actor has exceeded their usage limits
|
|
||||||
const usageCheck = await checkServerSideUsageLimits(actorUserId)
|
const usageCheck = await checkServerSideUsageLimits(actorUserId)
|
||||||
if (usageCheck.isExceeded) {
|
if (usageCheck.isExceeded) {
|
||||||
logger.warn(`[${requestId}] User ${workflow.userId} has exceeded usage limits`, {
|
logger.warn(`[${requestId}] User ${workflow.userId} has exceeded usage limits`, {
|
||||||
@@ -151,13 +148,11 @@ export async function executeWorkflow(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Log input to help debug
|
|
||||||
logger.info(
|
logger.info(
|
||||||
`[${requestId}] Executing workflow with input:`,
|
`[${requestId}] Executing workflow with input:`,
|
||||||
input ? JSON.stringify(input, null, 2) : 'No input provided'
|
input ? JSON.stringify(input, null, 2) : 'No input provided'
|
||||||
)
|
)
|
||||||
|
|
||||||
// Use input directly for API workflows
|
|
||||||
const processedInput = input
|
const processedInput = input
|
||||||
logger.info(
|
logger.info(
|
||||||
`[${requestId}] Using input directly for workflow:`,
|
`[${requestId}] Using input directly for workflow:`,
|
||||||
@@ -168,10 +163,7 @@ export async function executeWorkflow(
|
|||||||
runningExecutions.add(executionKey)
|
runningExecutions.add(executionKey)
|
||||||
logger.info(`[${requestId}] Starting workflow execution: ${workflowId}`)
|
logger.info(`[${requestId}] Starting workflow execution: ${workflowId}`)
|
||||||
|
|
||||||
// Load workflow data from deployed state for API executions
|
|
||||||
const deployedData = await loadDeployedWorkflowState(workflowId)
|
const deployedData = await loadDeployedWorkflowState(workflowId)
|
||||||
|
|
||||||
// Use deployed data as primary source for API executions
|
|
||||||
const { blocks, edges, loops, parallels } = deployedData
|
const { blocks, edges, loops, parallels } = deployedData
|
||||||
logger.info(`[${requestId}] Using deployed state for workflow execution: ${workflowId}`)
|
logger.info(`[${requestId}] Using deployed state for workflow execution: ${workflowId}`)
|
||||||
logger.debug(`[${requestId}] Deployed data loaded:`, {
|
logger.debug(`[${requestId}] Deployed data loaded:`, {
|
||||||
@@ -181,10 +173,8 @@ export async function executeWorkflow(
|
|||||||
parallelsCount: Object.keys(parallels || {}).length,
|
parallelsCount: Object.keys(parallels || {}).length,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Use the same execution flow as in scheduled executions
|
|
||||||
const mergedStates = mergeSubblockState(blocks)
|
const mergedStates = mergeSubblockState(blocks)
|
||||||
|
|
||||||
// Load personal (for the executing user) and workspace env (workspace overrides personal)
|
|
||||||
const { personalEncrypted, workspaceEncrypted } = await getPersonalAndWorkspaceEnv(
|
const { personalEncrypted, workspaceEncrypted } = await getPersonalAndWorkspaceEnv(
|
||||||
actorUserId,
|
actorUserId,
|
||||||
workflow.workspaceId || undefined
|
workflow.workspaceId || undefined
|
||||||
@@ -197,7 +187,6 @@ export async function executeWorkflow(
|
|||||||
variables,
|
variables,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Replace environment variables in the block states
|
|
||||||
const currentBlockStates = await Object.entries(mergedStates).reduce(
|
const currentBlockStates = await Object.entries(mergedStates).reduce(
|
||||||
async (accPromise, [id, block]) => {
|
async (accPromise, [id, block]) => {
|
||||||
const acc = await accPromise
|
const acc = await accPromise
|
||||||
@@ -206,13 +195,11 @@ export async function executeWorkflow(
|
|||||||
const subAcc = await subAccPromise
|
const subAcc = await subAccPromise
|
||||||
let value = subBlock.value
|
let value = subBlock.value
|
||||||
|
|
||||||
// If the value is a string and contains environment variable syntax
|
|
||||||
if (typeof value === 'string' && value.includes('{{') && value.includes('}}')) {
|
if (typeof value === 'string' && value.includes('{{') && value.includes('}}')) {
|
||||||
const matches = value.match(/{{([^}]+)}}/g)
|
const matches = value.match(/{{([^}]+)}}/g)
|
||||||
if (matches) {
|
if (matches) {
|
||||||
// Process all matches sequentially
|
|
||||||
for (const match of matches) {
|
for (const match of matches) {
|
||||||
const varName = match.slice(2, -2) // Remove {{ and }}
|
const varName = match.slice(2, -2)
|
||||||
const encryptedValue = variables[varName]
|
const encryptedValue = variables[varName]
|
||||||
if (!encryptedValue) {
|
if (!encryptedValue) {
|
||||||
throw new Error(`Environment variable "${varName}" was not found`)
|
throw new Error(`Environment variable "${varName}" was not found`)
|
||||||
@@ -244,7 +231,6 @@ export async function executeWorkflow(
|
|||||||
Promise.resolve({} as Record<string, Record<string, any>>)
|
Promise.resolve({} as Record<string, Record<string, any>>)
|
||||||
)
|
)
|
||||||
|
|
||||||
// Create a map of decrypted environment variables
|
|
||||||
const decryptedEnvVars: Record<string, string> = {}
|
const decryptedEnvVars: Record<string, string> = {}
|
||||||
for (const [key, encryptedValue] of Object.entries(variables)) {
|
for (const [key, encryptedValue] of Object.entries(variables)) {
|
||||||
try {
|
try {
|
||||||
@@ -256,22 +242,17 @@ export async function executeWorkflow(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process the block states to ensure response formats are properly parsed
|
|
||||||
const processedBlockStates = Object.entries(currentBlockStates).reduce(
|
const processedBlockStates = Object.entries(currentBlockStates).reduce(
|
||||||
(acc, [blockId, blockState]) => {
|
(acc, [blockId, blockState]) => {
|
||||||
// Check if this block has a responseFormat that needs to be parsed
|
|
||||||
if (blockState.responseFormat && typeof blockState.responseFormat === 'string') {
|
if (blockState.responseFormat && typeof blockState.responseFormat === 'string') {
|
||||||
const responseFormatValue = blockState.responseFormat.trim()
|
const responseFormatValue = blockState.responseFormat.trim()
|
||||||
|
|
||||||
// Check for variable references like <start.input>
|
|
||||||
if (responseFormatValue.startsWith('<') && responseFormatValue.includes('>')) {
|
if (responseFormatValue.startsWith('<') && responseFormatValue.includes('>')) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
`[${requestId}] Response format contains variable reference for block ${blockId}`
|
`[${requestId}] Response format contains variable reference for block ${blockId}`
|
||||||
)
|
)
|
||||||
// Keep variable references as-is - they will be resolved during execution
|
|
||||||
acc[blockId] = blockState
|
acc[blockId] = blockState
|
||||||
} else if (responseFormatValue === '') {
|
} else if (responseFormatValue === '') {
|
||||||
// Empty string - remove response format
|
|
||||||
acc[blockId] = {
|
acc[blockId] = {
|
||||||
...blockState,
|
...blockState,
|
||||||
responseFormat: undefined,
|
responseFormat: undefined,
|
||||||
@@ -279,7 +260,6 @@ export async function executeWorkflow(
|
|||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
logger.debug(`[${requestId}] Parsing responseFormat for block ${blockId}`)
|
logger.debug(`[${requestId}] Parsing responseFormat for block ${blockId}`)
|
||||||
// Attempt to parse the responseFormat if it's a string
|
|
||||||
const parsedResponseFormat = JSON.parse(responseFormatValue)
|
const parsedResponseFormat = JSON.parse(responseFormatValue)
|
||||||
|
|
||||||
acc[blockId] = {
|
acc[blockId] = {
|
||||||
@@ -291,7 +271,6 @@ export async function executeWorkflow(
|
|||||||
`[${requestId}] Failed to parse responseFormat for block ${blockId}, using undefined`,
|
`[${requestId}] Failed to parse responseFormat for block ${blockId}, using undefined`,
|
||||||
error
|
error
|
||||||
)
|
)
|
||||||
// Set to undefined instead of keeping malformed JSON - this allows execution to continue
|
|
||||||
acc[blockId] = {
|
acc[blockId] = {
|
||||||
...blockState,
|
...blockState,
|
||||||
responseFormat: undefined,
|
responseFormat: undefined,
|
||||||
@@ -306,7 +285,6 @@ export async function executeWorkflow(
|
|||||||
{} as Record<string, Record<string, any>>
|
{} as Record<string, Record<string, any>>
|
||||||
)
|
)
|
||||||
|
|
||||||
// Get workflow variables - they are stored as JSON objects in the database
|
|
||||||
const workflowVariables = (workflow.variables as Record<string, any>) || {}
|
const workflowVariables = (workflow.variables as Record<string, any>) || {}
|
||||||
|
|
||||||
if (Object.keys(workflowVariables).length > 0) {
|
if (Object.keys(workflowVariables).length > 0) {
|
||||||
@@ -317,20 +295,15 @@ export async function executeWorkflow(
|
|||||||
logger.debug(`[${requestId}] No workflow variables found for: ${workflowId}`)
|
logger.debug(`[${requestId}] No workflow variables found for: ${workflowId}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Serialize and execute the workflow
|
|
||||||
logger.debug(`[${requestId}] Serializing workflow: ${workflowId}`)
|
logger.debug(`[${requestId}] Serializing workflow: ${workflowId}`)
|
||||||
const serializedWorkflow = new Serializer().serializeWorkflow(
|
const serializedWorkflow = new Serializer().serializeWorkflow(
|
||||||
mergedStates,
|
mergedStates,
|
||||||
edges,
|
edges,
|
||||||
loops,
|
loops,
|
||||||
parallels,
|
parallels,
|
||||||
true // Enable validation during execution
|
true
|
||||||
)
|
)
|
||||||
|
|
||||||
// Determine trigger start block based on execution type
|
|
||||||
// - 'chat': For chat deployments (looks for chat_trigger block)
|
|
||||||
// - 'api': For direct API execution (looks for api_trigger block)
|
|
||||||
// streamConfig is passed from POST handler when using streaming/chat
|
|
||||||
const preferredTriggerType = streamConfig?.workflowTriggerType || 'api'
|
const preferredTriggerType = streamConfig?.workflowTriggerType || 'api'
|
||||||
const startBlock = TriggerUtils.findStartBlock(mergedStates, preferredTriggerType, false)
|
const startBlock = TriggerUtils.findStartBlock(mergedStates, preferredTriggerType, false)
|
||||||
|
|
||||||
@@ -346,8 +319,6 @@ export async function executeWorkflow(
|
|||||||
const startBlockId = startBlock.blockId
|
const startBlockId = startBlock.blockId
|
||||||
const triggerBlock = startBlock.block
|
const triggerBlock = startBlock.block
|
||||||
|
|
||||||
// Check if the API trigger has any outgoing connections (except for legacy starter blocks)
|
|
||||||
// Legacy starter blocks have their own validation in the executor
|
|
||||||
if (triggerBlock.type !== 'starter') {
|
if (triggerBlock.type !== 'starter') {
|
||||||
const outgoingConnections = serializedWorkflow.connections.filter(
|
const outgoingConnections = serializedWorkflow.connections.filter(
|
||||||
(conn) => conn.source === startBlockId
|
(conn) => conn.source === startBlockId
|
||||||
@@ -358,14 +329,12 @@ export async function executeWorkflow(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build context extensions
|
|
||||||
const contextExtensions: any = {
|
const contextExtensions: any = {
|
||||||
executionId,
|
executionId,
|
||||||
workspaceId: workflow.workspaceId,
|
workspaceId: workflow.workspaceId,
|
||||||
isDeployedContext: true,
|
isDeployedContext: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add streaming configuration if enabled
|
|
||||||
if (streamConfig?.enabled) {
|
if (streamConfig?.enabled) {
|
||||||
contextExtensions.stream = true
|
contextExtensions.stream = true
|
||||||
contextExtensions.selectedOutputs = streamConfig.selectedOutputs || []
|
contextExtensions.selectedOutputs = streamConfig.selectedOutputs || []
|
||||||
@@ -386,10 +355,8 @@ export async function executeWorkflow(
|
|||||||
contextExtensions,
|
contextExtensions,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Set up logging on the executor
|
|
||||||
loggingSession.setupExecutor(executor)
|
loggingSession.setupExecutor(executor)
|
||||||
|
|
||||||
// Execute workflow (will always return ExecutionResult since we don't use onStream)
|
|
||||||
const result = (await executor.execute(workflowId, startBlockId)) as ExecutionResult
|
const result = (await executor.execute(workflowId, startBlockId)) as ExecutionResult
|
||||||
|
|
||||||
logger.info(`[${requestId}] Workflow execution completed: ${workflowId}`, {
|
logger.info(`[${requestId}] Workflow execution completed: ${workflowId}`, {
|
||||||
@@ -397,14 +364,11 @@ export async function executeWorkflow(
|
|||||||
executionTime: result.metadata?.duration,
|
executionTime: result.metadata?.duration,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Build trace spans from execution result (works for both success and failure)
|
|
||||||
const { traceSpans, totalDuration } = buildTraceSpans(result)
|
const { traceSpans, totalDuration } = buildTraceSpans(result)
|
||||||
|
|
||||||
// Update workflow run counts if execution was successful
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
await updateWorkflowRunCounts(workflowId)
|
await updateWorkflowRunCounts(workflowId)
|
||||||
|
|
||||||
// Track API call in user stats
|
|
||||||
await db
|
await db
|
||||||
.update(userStats)
|
.update(userStats)
|
||||||
.set({
|
.set({
|
||||||
@@ -419,9 +383,9 @@ export async function executeWorkflow(
|
|||||||
totalDurationMs: totalDuration || 0,
|
totalDurationMs: totalDuration || 0,
|
||||||
finalOutput: result.output || {},
|
finalOutput: result.output || {},
|
||||||
traceSpans: (traceSpans || []) as any,
|
traceSpans: (traceSpans || []) as any,
|
||||||
|
workflowInput: processedInput,
|
||||||
})
|
})
|
||||||
|
|
||||||
// For non-streaming, return the execution result
|
|
||||||
return result
|
return result
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
logger.error(`[${requestId}] Workflow execution failed: ${workflowId}`, error)
|
logger.error(`[${requestId}] Workflow execution failed: ${workflowId}`, error)
|
||||||
@@ -461,22 +425,16 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
return createErrorResponse(validation.error.message, validation.error.status)
|
return createErrorResponse(validation.error.message, validation.error.status)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine trigger type based on authentication
|
|
||||||
let triggerType: TriggerType = 'manual'
|
let triggerType: TriggerType = 'manual'
|
||||||
const session = await getSession()
|
const session = await getSession()
|
||||||
if (!session?.user?.id) {
|
if (!session?.user?.id) {
|
||||||
// Check for API key
|
|
||||||
const apiKeyHeader = request.headers.get('X-API-Key')
|
const apiKeyHeader = request.headers.get('X-API-Key')
|
||||||
if (apiKeyHeader) {
|
if (apiKeyHeader) {
|
||||||
triggerType = 'api'
|
triggerType = 'api'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: Async execution is now handled in the POST handler below
|
|
||||||
|
|
||||||
// Synchronous execution
|
|
||||||
try {
|
try {
|
||||||
// Resolve actor user id
|
|
||||||
let actorUserId: string | null = null
|
let actorUserId: string | null = null
|
||||||
if (triggerType === 'manual') {
|
if (triggerType === 'manual') {
|
||||||
actorUserId = session!.user!.id
|
actorUserId = session!.user!.id
|
||||||
@@ -491,7 +449,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
void updateApiKeyLastUsed(auth.keyId).catch(() => {})
|
void updateApiKeyLastUsed(auth.keyId).catch(() => {})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check rate limits BEFORE entering execution for API requests
|
|
||||||
const userSubscription = await getHighestPrioritySubscription(actorUserId)
|
const userSubscription = await getHighestPrioritySubscription(actorUserId)
|
||||||
const rateLimiter = new RateLimiter()
|
const rateLimiter = new RateLimiter()
|
||||||
const rateLimitCheck = await rateLimiter.checkRateLimitWithSubscription(
|
const rateLimitCheck = await rateLimiter.checkRateLimitWithSubscription(
|
||||||
@@ -514,13 +471,11 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
actorUserId as string
|
actorUserId as string
|
||||||
)
|
)
|
||||||
|
|
||||||
// Check if the workflow execution contains a response block output
|
|
||||||
const hasResponseBlock = workflowHasResponseBlock(result)
|
const hasResponseBlock = workflowHasResponseBlock(result)
|
||||||
if (hasResponseBlock) {
|
if (hasResponseBlock) {
|
||||||
return createHttpResponseFromBlock(result)
|
return createHttpResponseFromBlock(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter out logs and workflowConnections from the API response
|
|
||||||
const filteredResult = createFilteredResult(result)
|
const filteredResult = createFilteredResult(result)
|
||||||
return createSuccessResponse(filteredResult)
|
return createSuccessResponse(filteredResult)
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@@ -536,12 +491,10 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
logger.error(`[${requestId}] Error executing workflow: ${id}`, error)
|
logger.error(`[${requestId}] Error executing workflow: ${id}`, error)
|
||||||
|
|
||||||
// Check if this is a rate limit error
|
|
||||||
if (error instanceof RateLimitError) {
|
if (error instanceof RateLimitError) {
|
||||||
return createErrorResponse(error.message, error.statusCode, 'RATE_LIMIT_EXCEEDED')
|
return createErrorResponse(error.message, error.statusCode, 'RATE_LIMIT_EXCEEDED')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if this is a usage limit error
|
|
||||||
if (error instanceof UsageLimitError) {
|
if (error instanceof UsageLimitError) {
|
||||||
return createErrorResponse(error.message, error.statusCode, 'USAGE_LIMIT_EXCEEDED')
|
return createErrorResponse(error.message, error.statusCode, 'USAGE_LIMIT_EXCEEDED')
|
||||||
}
|
}
|
||||||
@@ -566,18 +519,15 @@ export async function POST(
|
|||||||
const workflowId = id
|
const workflowId = id
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Validate workflow access
|
|
||||||
const validation = await validateWorkflowAccess(request as NextRequest, id)
|
const validation = await validateWorkflowAccess(request as NextRequest, id)
|
||||||
if (validation.error) {
|
if (validation.error) {
|
||||||
logger.warn(`[${requestId}] Workflow access validation failed: ${validation.error.message}`)
|
logger.warn(`[${requestId}] Workflow access validation failed: ${validation.error.message}`)
|
||||||
return createErrorResponse(validation.error.message, validation.error.status)
|
return createErrorResponse(validation.error.message, validation.error.status)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check execution mode from header
|
|
||||||
const executionMode = request.headers.get('X-Execution-Mode')
|
const executionMode = request.headers.get('X-Execution-Mode')
|
||||||
const isAsync = executionMode === 'async'
|
const isAsync = executionMode === 'async'
|
||||||
|
|
||||||
// Parse request body first to check for internal parameters
|
|
||||||
const body = await request.text()
|
const body = await request.text()
|
||||||
logger.info(`[${requestId}] ${body ? 'Request body provided' : 'No request body provided'}`)
|
logger.info(`[${requestId}] ${body ? 'Request body provided' : 'No request body provided'}`)
|
||||||
|
|
||||||
@@ -616,17 +566,78 @@ export async function POST(
|
|||||||
streamResponse,
|
streamResponse,
|
||||||
selectedOutputs,
|
selectedOutputs,
|
||||||
workflowTriggerType,
|
workflowTriggerType,
|
||||||
input,
|
input: rawInput,
|
||||||
} = extractExecutionParams(request as NextRequest, parsedBody)
|
} = extractExecutionParams(request as NextRequest, parsedBody)
|
||||||
|
|
||||||
// Get authenticated user and determine trigger type
|
let processedInput = rawInput
|
||||||
|
logger.info(`[${requestId}] Raw input received:`, JSON.stringify(rawInput, null, 2))
|
||||||
|
|
||||||
|
try {
|
||||||
|
const deployedData = await loadDeployedWorkflowState(workflowId)
|
||||||
|
const blocks = deployedData.blocks || {}
|
||||||
|
logger.info(`[${requestId}] Loaded ${Object.keys(blocks).length} blocks from workflow`)
|
||||||
|
|
||||||
|
const apiTriggerBlock = Object.values(blocks).find(
|
||||||
|
(block: any) => block.type === 'api_trigger'
|
||||||
|
) as any
|
||||||
|
logger.info(`[${requestId}] API trigger block found:`, !!apiTriggerBlock)
|
||||||
|
|
||||||
|
if (apiTriggerBlock?.subBlocks?.inputFormat?.value) {
|
||||||
|
const inputFormat = apiTriggerBlock.subBlocks.inputFormat.value as Array<{
|
||||||
|
name: string
|
||||||
|
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
||||||
|
}>
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Input format fields:`,
|
||||||
|
inputFormat.map((f) => `${f.name}:${f.type}`).join(', ')
|
||||||
|
)
|
||||||
|
|
||||||
|
const fileFields = inputFormat.filter((field) => field.type === 'files')
|
||||||
|
logger.info(`[${requestId}] Found ${fileFields.length} file-type fields`)
|
||||||
|
|
||||||
|
if (fileFields.length > 0 && typeof rawInput === 'object' && rawInput !== null) {
|
||||||
|
const executionContext = {
|
||||||
|
workspaceId: validation.workflow.workspaceId,
|
||||||
|
workflowId,
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const fileField of fileFields) {
|
||||||
|
const fieldValue = rawInput[fileField.name]
|
||||||
|
|
||||||
|
if (fieldValue && typeof fieldValue === 'object') {
|
||||||
|
const uploadedFiles = await processApiWorkflowField(
|
||||||
|
fieldValue,
|
||||||
|
executionContext,
|
||||||
|
requestId
|
||||||
|
)
|
||||||
|
|
||||||
|
if (uploadedFiles.length > 0) {
|
||||||
|
processedInput = {
|
||||||
|
...processedInput,
|
||||||
|
[fileField.name]: uploadedFiles,
|
||||||
|
}
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Successfully processed ${uploadedFiles.length} file(s) for field: ${fileField.name}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Failed to process file uploads:`, error)
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'Failed to process file uploads'
|
||||||
|
return createErrorResponse(errorMessage, 400)
|
||||||
|
}
|
||||||
|
|
||||||
|
const input = processedInput
|
||||||
|
|
||||||
let authenticatedUserId: string
|
let authenticatedUserId: string
|
||||||
let triggerType: TriggerType = 'manual'
|
let triggerType: TriggerType = 'manual'
|
||||||
|
|
||||||
// For internal calls (chat deployments), use the workflow owner's ID
|
|
||||||
if (finalIsSecureMode) {
|
if (finalIsSecureMode) {
|
||||||
authenticatedUserId = validation.workflow.userId
|
authenticatedUserId = validation.workflow.userId
|
||||||
triggerType = 'manual' // Chat deployments use manual trigger type (no rate limit)
|
triggerType = 'manual'
|
||||||
} else {
|
} else {
|
||||||
const session = await getSession()
|
const session = await getSession()
|
||||||
const apiKeyHeader = request.headers.get('X-API-Key')
|
const apiKeyHeader = request.headers.get('X-API-Key')
|
||||||
@@ -649,7 +660,6 @@ export async function POST(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get user subscription (checks both personal and org subscriptions)
|
|
||||||
const userSubscription = await getHighestPrioritySubscription(authenticatedUserId)
|
const userSubscription = await getHighestPrioritySubscription(authenticatedUserId)
|
||||||
|
|
||||||
if (isAsync) {
|
if (isAsync) {
|
||||||
@@ -659,7 +669,7 @@ export async function POST(
|
|||||||
authenticatedUserId,
|
authenticatedUserId,
|
||||||
userSubscription,
|
userSubscription,
|
||||||
'api',
|
'api',
|
||||||
true // isAsync = true
|
true
|
||||||
)
|
)
|
||||||
|
|
||||||
if (!rateLimitCheck.allowed) {
|
if (!rateLimitCheck.allowed) {
|
||||||
@@ -683,7 +693,6 @@ export async function POST(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rate limit passed - always use Trigger.dev for async executions
|
|
||||||
const handle = await tasks.trigger('workflow-execution', {
|
const handle = await tasks.trigger('workflow-execution', {
|
||||||
workflowId,
|
workflowId,
|
||||||
userId: authenticatedUserId,
|
userId: authenticatedUserId,
|
||||||
@@ -723,7 +732,7 @@ export async function POST(
|
|||||||
authenticatedUserId,
|
authenticatedUserId,
|
||||||
userSubscription,
|
userSubscription,
|
||||||
triggerType,
|
triggerType,
|
||||||
false // isAsync = false for sync calls
|
false
|
||||||
)
|
)
|
||||||
|
|
||||||
if (!rateLimitCheck.allowed) {
|
if (!rateLimitCheck.allowed) {
|
||||||
@@ -732,15 +741,12 @@ export async function POST(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle streaming response - wrap execution in SSE stream
|
|
||||||
if (streamResponse) {
|
if (streamResponse) {
|
||||||
// Load workflow blocks to resolve output IDs from blockName.attribute to blockId_attribute format
|
|
||||||
const deployedData = await loadDeployedWorkflowState(workflowId)
|
const deployedData = await loadDeployedWorkflowState(workflowId)
|
||||||
const resolvedSelectedOutputs = selectedOutputs
|
const resolvedSelectedOutputs = selectedOutputs
|
||||||
? resolveOutputIds(selectedOutputs, deployedData.blocks || {})
|
? resolveOutputIds(selectedOutputs, deployedData.blocks || {})
|
||||||
: selectedOutputs
|
: selectedOutputs
|
||||||
|
|
||||||
// Use shared streaming response creator
|
|
||||||
const { createStreamingResponse } = await import('@/lib/workflows/streaming')
|
const { createStreamingResponse } = await import('@/lib/workflows/streaming')
|
||||||
const { SSE_HEADERS } = await import('@/lib/utils')
|
const { SSE_HEADERS } = await import('@/lib/utils')
|
||||||
|
|
||||||
@@ -763,7 +769,6 @@ export async function POST(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Non-streaming execution
|
|
||||||
const result = await executeWorkflow(
|
const result = await executeWorkflow(
|
||||||
validation.workflow,
|
validation.workflow,
|
||||||
requestId,
|
requestId,
|
||||||
@@ -772,13 +777,11 @@ export async function POST(
|
|||||||
undefined
|
undefined
|
||||||
)
|
)
|
||||||
|
|
||||||
// Non-streaming response
|
|
||||||
const hasResponseBlock = workflowHasResponseBlock(result)
|
const hasResponseBlock = workflowHasResponseBlock(result)
|
||||||
if (hasResponseBlock) {
|
if (hasResponseBlock) {
|
||||||
return createHttpResponseFromBlock(result)
|
return createHttpResponseFromBlock(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter out logs and workflowConnections from the API response
|
|
||||||
const filteredResult = createFilteredResult(result)
|
const filteredResult = createFilteredResult(result)
|
||||||
return createSuccessResponse(filteredResult)
|
return createSuccessResponse(filteredResult)
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
@@ -794,17 +797,14 @@ export async function POST(
|
|||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
logger.error(`[${requestId}] Error executing workflow: ${workflowId}`, error)
|
logger.error(`[${requestId}] Error executing workflow: ${workflowId}`, error)
|
||||||
|
|
||||||
// Check if this is a rate limit error
|
|
||||||
if (error instanceof RateLimitError) {
|
if (error instanceof RateLimitError) {
|
||||||
return createErrorResponse(error.message, error.statusCode, 'RATE_LIMIT_EXCEEDED')
|
return createErrorResponse(error.message, error.statusCode, 'RATE_LIMIT_EXCEEDED')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if this is a usage limit error
|
|
||||||
if (error instanceof UsageLimitError) {
|
if (error instanceof UsageLimitError) {
|
||||||
return createErrorResponse(error.message, error.statusCode, 'USAGE_LIMIT_EXCEEDED')
|
return createErrorResponse(error.message, error.statusCode, 'USAGE_LIMIT_EXCEEDED')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if this is a rate limit error (string match for backward compatibility)
|
|
||||||
if (error.message?.includes('Rate limit exceeded')) {
|
if (error.message?.includes('Rate limit exceeded')) {
|
||||||
return createErrorResponse(error.message, 429, 'RATE_LIMIT_EXCEEDED')
|
return createErrorResponse(error.message, 429, 'RATE_LIMIT_EXCEEDED')
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,6 +16,9 @@ describe('Workflow By ID API Route', () => {
|
|||||||
error: vi.fn(),
|
error: vi.fn(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const mockGetWorkflowById = vi.fn()
|
||||||
|
const mockGetWorkflowAccessContext = vi.fn()
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.resetModules()
|
vi.resetModules()
|
||||||
|
|
||||||
@@ -30,6 +33,20 @@ describe('Workflow By ID API Route', () => {
|
|||||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(null),
|
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(null),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
mockGetWorkflowById.mockReset()
|
||||||
|
mockGetWorkflowAccessContext.mockReset()
|
||||||
|
|
||||||
|
vi.doMock('@/lib/workflows/utils', async () => {
|
||||||
|
const actual =
|
||||||
|
await vi.importActual<typeof import('@/lib/workflows/utils')>('@/lib/workflows/utils')
|
||||||
|
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
|
getWorkflowById: mockGetWorkflowById,
|
||||||
|
getWorkflowAccessContext: mockGetWorkflowAccessContext,
|
||||||
|
}
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -60,17 +77,14 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
mockGetWorkflowById.mockResolvedValueOnce(null)
|
||||||
db: {
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: vi.fn().mockReturnValue({
|
workflow: null,
|
||||||
from: vi.fn().mockReturnValue({
|
workspaceOwnerId: null,
|
||||||
where: vi.fn().mockReturnValue({
|
workspacePermission: null,
|
||||||
then: vi.fn().mockResolvedValue(undefined),
|
isOwner: false,
|
||||||
}),
|
isWorkspaceOwner: false,
|
||||||
}),
|
})
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/nonexistent')
|
const req = new NextRequest('http://localhost:3000/api/workflows/nonexistent')
|
||||||
const params = Promise.resolve({ id: 'nonexistent' })
|
const params = Promise.resolve({ id: 'nonexistent' })
|
||||||
@@ -105,22 +119,28 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
db: {
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: vi.fn().mockReturnValue({
|
workflow: mockWorkflow,
|
||||||
from: vi.fn().mockReturnValue({
|
workspaceOwnerId: null,
|
||||||
where: vi.fn().mockReturnValue({
|
workspacePermission: null,
|
||||||
then: vi.fn().mockResolvedValue(mockWorkflow),
|
isOwner: true,
|
||||||
}),
|
isWorkspaceOwner: false,
|
||||||
}),
|
})
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
|
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: null,
|
||||||
|
workspacePermission: null,
|
||||||
|
isOwner: true,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
|
})
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123')
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123')
|
||||||
const params = Promise.resolve({ id: 'workflow-123' })
|
const params = Promise.resolve({ id: 'workflow-123' })
|
||||||
|
|
||||||
@@ -154,22 +174,28 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
db: {
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: vi.fn().mockReturnValue({
|
workflow: mockWorkflow,
|
||||||
from: vi.fn().mockReturnValue({
|
workspaceOwnerId: 'workspace-456',
|
||||||
where: vi.fn().mockReturnValue({
|
workspacePermission: 'admin',
|
||||||
then: vi.fn().mockResolvedValue(mockWorkflow),
|
isOwner: false,
|
||||||
}),
|
isWorkspaceOwner: false,
|
||||||
}),
|
})
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
|
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: 'workspace-456',
|
||||||
|
workspacePermission: 'read',
|
||||||
|
isOwner: false,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
|
})
|
||||||
|
|
||||||
vi.doMock('@/lib/permissions/utils', () => ({
|
vi.doMock('@/lib/permissions/utils', () => ({
|
||||||
getUserEntityPermissions: vi.fn().mockResolvedValue('read'),
|
getUserEntityPermissions: vi.fn().mockResolvedValue('read'),
|
||||||
hasAdminPermission: vi.fn().mockResolvedValue(false),
|
hasAdminPermission: vi.fn().mockResolvedValue(false),
|
||||||
@@ -200,22 +226,14 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
db: {
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: vi.fn().mockReturnValue({
|
workflow: mockWorkflow,
|
||||||
from: vi.fn().mockReturnValue({
|
workspaceOwnerId: 'workspace-456',
|
||||||
where: vi.fn().mockReturnValue({
|
workspacePermission: null,
|
||||||
then: vi.fn().mockResolvedValue(mockWorkflow),
|
isOwner: false,
|
||||||
}),
|
isWorkspaceOwner: false,
|
||||||
}),
|
})
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.doMock('@/lib/permissions/utils', () => ({
|
|
||||||
getUserEntityPermissions: vi.fn().mockResolvedValue(null),
|
|
||||||
hasAdminPermission: vi.fn().mockResolvedValue(false),
|
|
||||||
}))
|
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123')
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123')
|
||||||
const params = Promise.resolve({ id: 'workflow-123' })
|
const params = Promise.resolve({ id: 'workflow-123' })
|
||||||
@@ -250,17 +268,14 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
db: {
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: vi.fn().mockReturnValue({
|
workflow: mockWorkflow,
|
||||||
from: vi.fn().mockReturnValue({
|
workspaceOwnerId: null,
|
||||||
where: vi.fn().mockReturnValue({
|
workspacePermission: null,
|
||||||
then: vi.fn().mockResolvedValue(mockWorkflow),
|
isOwner: true,
|
||||||
}),
|
isWorkspaceOwner: false,
|
||||||
}),
|
})
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
|
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
|
||||||
@@ -294,19 +309,22 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: null,
|
||||||
|
workspacePermission: null,
|
||||||
|
isOwner: true,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
|
})
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
vi.doMock('@sim/db', () => ({
|
||||||
db: {
|
db: {
|
||||||
select: vi.fn().mockReturnValue({
|
|
||||||
from: vi.fn().mockReturnValue({
|
|
||||||
where: vi.fn().mockReturnValue({
|
|
||||||
then: vi.fn().mockResolvedValue(mockWorkflow),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
delete: vi.fn().mockReturnValue({
|
delete: vi.fn().mockReturnValue({
|
||||||
where: vi.fn().mockResolvedValue(undefined),
|
where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
|
workflow: {},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
global.fetch = vi.fn().mockResolvedValue({
|
global.fetch = vi.fn().mockResolvedValue({
|
||||||
@@ -340,24 +358,22 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: 'workspace-456',
|
||||||
|
workspacePermission: 'admin',
|
||||||
|
isOwner: false,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
|
})
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
vi.doMock('@sim/db', () => ({
|
||||||
db: {
|
db: {
|
||||||
select: vi.fn().mockReturnValue({
|
|
||||||
from: vi.fn().mockReturnValue({
|
|
||||||
where: vi.fn().mockReturnValue({
|
|
||||||
then: vi.fn().mockResolvedValue(mockWorkflow),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
delete: vi.fn().mockReturnValue({
|
delete: vi.fn().mockReturnValue({
|
||||||
where: vi.fn().mockResolvedValue(undefined),
|
where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
}))
|
workflow: {},
|
||||||
|
|
||||||
vi.doMock('@/lib/permissions/utils', () => ({
|
|
||||||
getUserEntityPermissions: vi.fn().mockResolvedValue('admin'),
|
|
||||||
hasAdminPermission: vi.fn().mockResolvedValue(true),
|
|
||||||
}))
|
}))
|
||||||
|
|
||||||
global.fetch = vi.fn().mockResolvedValue({
|
global.fetch = vi.fn().mockResolvedValue({
|
||||||
@@ -391,22 +407,14 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
db: {
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: vi.fn().mockReturnValue({
|
workflow: mockWorkflow,
|
||||||
from: vi.fn().mockReturnValue({
|
workspaceOwnerId: 'workspace-456',
|
||||||
where: vi.fn().mockReturnValue({
|
workspacePermission: null,
|
||||||
then: vi.fn().mockResolvedValue(mockWorkflow),
|
isOwner: false,
|
||||||
}),
|
isWorkspaceOwner: false,
|
||||||
}),
|
})
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.doMock('@/lib/permissions/utils', () => ({
|
|
||||||
getUserEntityPermissions: vi.fn().mockResolvedValue('read'),
|
|
||||||
hasAdminPermission: vi.fn().mockResolvedValue(false),
|
|
||||||
}))
|
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||||
method: 'DELETE',
|
method: 'DELETE',
|
||||||
@@ -432,6 +440,7 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const updateData = { name: 'Updated Workflow' }
|
const updateData = { name: 'Updated Workflow' }
|
||||||
|
const updatedWorkflow = { ...mockWorkflow, ...updateData, updatedAt: new Date() }
|
||||||
|
|
||||||
vi.doMock('@/lib/auth', () => ({
|
vi.doMock('@/lib/auth', () => ({
|
||||||
getSession: vi.fn().mockResolvedValue({
|
getSession: vi.fn().mockResolvedValue({
|
||||||
@@ -439,23 +448,26 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: null,
|
||||||
|
workspacePermission: null,
|
||||||
|
isOwner: true,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
|
})
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
vi.doMock('@sim/db', () => ({
|
||||||
db: {
|
db: {
|
||||||
select: vi.fn().mockReturnValue({
|
|
||||||
from: vi.fn().mockReturnValue({
|
|
||||||
where: vi.fn().mockReturnValue({
|
|
||||||
then: vi.fn().mockResolvedValue(mockWorkflow),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
update: vi.fn().mockReturnValue({
|
update: vi.fn().mockReturnValue({
|
||||||
set: vi.fn().mockReturnValue({
|
set: vi.fn().mockReturnValue({
|
||||||
where: vi.fn().mockReturnValue({
|
where: vi.fn().mockReturnValue({
|
||||||
returning: vi.fn().mockResolvedValue([{ ...mockWorkflow, ...updateData }]),
|
returning: vi.fn().mockResolvedValue([updatedWorkflow]),
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
|
workflow: {},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||||
@@ -481,6 +493,7 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const updateData = { name: 'Updated Workflow' }
|
const updateData = { name: 'Updated Workflow' }
|
||||||
|
const updatedWorkflow = { ...mockWorkflow, ...updateData, updatedAt: new Date() }
|
||||||
|
|
||||||
vi.doMock('@/lib/auth', () => ({
|
vi.doMock('@/lib/auth', () => ({
|
||||||
getSession: vi.fn().mockResolvedValue({
|
getSession: vi.fn().mockResolvedValue({
|
||||||
@@ -488,28 +501,26 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: 'workspace-456',
|
||||||
|
workspacePermission: 'write',
|
||||||
|
isOwner: false,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
|
})
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
vi.doMock('@sim/db', () => ({
|
||||||
db: {
|
db: {
|
||||||
select: vi.fn().mockReturnValue({
|
|
||||||
from: vi.fn().mockReturnValue({
|
|
||||||
where: vi.fn().mockReturnValue({
|
|
||||||
then: vi.fn().mockResolvedValue(mockWorkflow),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
update: vi.fn().mockReturnValue({
|
update: vi.fn().mockReturnValue({
|
||||||
set: vi.fn().mockReturnValue({
|
set: vi.fn().mockReturnValue({
|
||||||
where: vi.fn().mockReturnValue({
|
where: vi.fn().mockReturnValue({
|
||||||
returning: vi.fn().mockResolvedValue([{ ...mockWorkflow, ...updateData }]),
|
returning: vi.fn().mockResolvedValue([updatedWorkflow]),
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
}))
|
workflow: {},
|
||||||
|
|
||||||
vi.doMock('@/lib/permissions/utils', () => ({
|
|
||||||
getUserEntityPermissions: vi.fn().mockResolvedValue('write'),
|
|
||||||
hasAdminPermission: vi.fn().mockResolvedValue(false),
|
|
||||||
}))
|
}))
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||||
@@ -542,22 +553,14 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
db: {
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: vi.fn().mockReturnValue({
|
workflow: mockWorkflow,
|
||||||
from: vi.fn().mockReturnValue({
|
workspaceOwnerId: 'workspace-456',
|
||||||
where: vi.fn().mockReturnValue({
|
workspacePermission: 'read',
|
||||||
then: vi.fn().mockResolvedValue(mockWorkflow),
|
isOwner: false,
|
||||||
}),
|
isWorkspaceOwner: false,
|
||||||
}),
|
})
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.doMock('@/lib/permissions/utils', () => ({
|
|
||||||
getUserEntityPermissions: vi.fn().mockResolvedValue('read'),
|
|
||||||
hasAdminPermission: vi.fn().mockResolvedValue(false),
|
|
||||||
}))
|
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||||
method: 'PUT',
|
method: 'PUT',
|
||||||
@@ -587,17 +590,14 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
mockGetWorkflowById.mockResolvedValueOnce(mockWorkflow)
|
||||||
db: {
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: vi.fn().mockReturnValue({
|
workflow: mockWorkflow,
|
||||||
from: vi.fn().mockReturnValue({
|
workspaceOwnerId: null,
|
||||||
where: vi.fn().mockReturnValue({
|
workspacePermission: null,
|
||||||
then: vi.fn().mockResolvedValue(mockWorkflow),
|
isOwner: true,
|
||||||
}),
|
isWorkspaceOwner: false,
|
||||||
}),
|
})
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
// Invalid data - empty name
|
// Invalid data - empty name
|
||||||
const invalidData = { name: '' }
|
const invalidData = { name: '' }
|
||||||
@@ -625,17 +625,7 @@ describe('Workflow By ID API Route', () => {
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => ({
|
mockGetWorkflowById.mockRejectedValueOnce(new Error('Database connection timeout'))
|
||||||
db: {
|
|
||||||
select: vi.fn().mockReturnValue({
|
|
||||||
from: vi.fn().mockReturnValue({
|
|
||||||
where: vi.fn().mockReturnValue({
|
|
||||||
then: vi.fn().mockRejectedValue(new Error('Database connection timeout')),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123')
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123')
|
||||||
const params = Promise.resolve({ id: 'workflow-123' })
|
const params = Promise.resolve({ id: 'workflow-123' })
|
||||||
|
|||||||
@@ -8,9 +8,9 @@ import { getSession } from '@/lib/auth'
|
|||||||
import { verifyInternalToken } from '@/lib/auth/internal'
|
import { verifyInternalToken } from '@/lib/auth/internal'
|
||||||
import { env } from '@/lib/env'
|
import { env } from '@/lib/env'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { getUserEntityPermissions, hasAdminPermission } from '@/lib/permissions/utils'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||||
|
import { getWorkflowAccessContext, getWorkflowById } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowByIdAPI')
|
const logger = createLogger('WorkflowByIdAPI')
|
||||||
|
|
||||||
@@ -74,12 +74,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
userId = authenticatedUserId
|
userId = authenticatedUserId
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch the workflow
|
let accessContext = null
|
||||||
const workflowData = await db
|
let workflowData = await getWorkflowById(workflowId)
|
||||||
.select()
|
|
||||||
.from(workflow)
|
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.then((rows) => rows[0])
|
|
||||||
|
|
||||||
if (!workflowData) {
|
if (!workflowData) {
|
||||||
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
||||||
@@ -94,18 +90,21 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
hasAccess = true
|
hasAccess = true
|
||||||
} else {
|
} else {
|
||||||
// Case 1: User owns the workflow
|
// Case 1: User owns the workflow
|
||||||
if (workflowData.userId === userId) {
|
if (workflowData) {
|
||||||
hasAccess = true
|
accessContext = await getWorkflowAccessContext(workflowId, userId ?? undefined)
|
||||||
}
|
|
||||||
|
|
||||||
// Case 2: Workflow belongs to a workspace the user has permissions for
|
if (!accessContext) {
|
||||||
if (!hasAccess && workflowData.workspaceId && userId) {
|
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
||||||
const userPermission = await getUserEntityPermissions(
|
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||||
userId,
|
}
|
||||||
'workspace',
|
|
||||||
workflowData.workspaceId
|
workflowData = accessContext.workflow
|
||||||
)
|
|
||||||
if (userPermission !== null) {
|
if (accessContext.isOwner) {
|
||||||
|
hasAccess = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!hasAccess && workflowData.workspaceId && accessContext.workspacePermission) {
|
||||||
hasAccess = true
|
hasAccess = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -179,11 +178,8 @@ export async function DELETE(
|
|||||||
|
|
||||||
const userId = session.user.id
|
const userId = session.user.id
|
||||||
|
|
||||||
const workflowData = await db
|
const accessContext = await getWorkflowAccessContext(workflowId, userId)
|
||||||
.select()
|
const workflowData = accessContext?.workflow || (await getWorkflowById(workflowId))
|
||||||
.from(workflow)
|
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.then((rows) => rows[0])
|
|
||||||
|
|
||||||
if (!workflowData) {
|
if (!workflowData) {
|
||||||
logger.warn(`[${requestId}] Workflow ${workflowId} not found for deletion`)
|
logger.warn(`[${requestId}] Workflow ${workflowId} not found for deletion`)
|
||||||
@@ -200,8 +196,8 @@ export async function DELETE(
|
|||||||
|
|
||||||
// Case 2: Workflow belongs to a workspace and user has admin permission
|
// Case 2: Workflow belongs to a workspace and user has admin permission
|
||||||
if (!canDelete && workflowData.workspaceId) {
|
if (!canDelete && workflowData.workspaceId) {
|
||||||
const hasAdmin = await hasAdminPermission(userId, workflowData.workspaceId)
|
const context = accessContext || (await getWorkflowAccessContext(workflowId, userId))
|
||||||
if (hasAdmin) {
|
if (context?.workspacePermission === 'admin') {
|
||||||
canDelete = true
|
canDelete = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -320,11 +316,8 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
const updates = UpdateWorkflowSchema.parse(body)
|
const updates = UpdateWorkflowSchema.parse(body)
|
||||||
|
|
||||||
// Fetch the workflow to check ownership/access
|
// Fetch the workflow to check ownership/access
|
||||||
const workflowData = await db
|
const accessContext = await getWorkflowAccessContext(workflowId, userId)
|
||||||
.select()
|
const workflowData = accessContext?.workflow || (await getWorkflowById(workflowId))
|
||||||
.from(workflow)
|
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.then((rows) => rows[0])
|
|
||||||
|
|
||||||
if (!workflowData) {
|
if (!workflowData) {
|
||||||
logger.warn(`[${requestId}] Workflow ${workflowId} not found for update`)
|
logger.warn(`[${requestId}] Workflow ${workflowId} not found for update`)
|
||||||
@@ -341,12 +334,8 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
|
|
||||||
// Case 2: Workflow belongs to a workspace and user has write or admin permission
|
// Case 2: Workflow belongs to a workspace and user has write or admin permission
|
||||||
if (!canUpdate && workflowData.workspaceId) {
|
if (!canUpdate && workflowData.workspaceId) {
|
||||||
const userPermission = await getUserEntityPermissions(
|
const context = accessContext || (await getWorkflowAccessContext(workflowId, userId))
|
||||||
userId,
|
if (context?.workspacePermission === 'write' || context?.workspacePermission === 'admin') {
|
||||||
'workspace',
|
|
||||||
workflowData.workspaceId
|
|
||||||
)
|
|
||||||
if (userPermission === 'write' || userPermission === 'admin') {
|
|
||||||
canUpdate = true
|
canUpdate = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,10 +5,10 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
import { extractAndPersistCustomTools } from '@/lib/workflows/custom-tools-persistence'
|
import { extractAndPersistCustomTools } from '@/lib/workflows/custom-tools-persistence'
|
||||||
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/db-helpers'
|
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||||
|
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
|
||||||
import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/validation'
|
import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/validation'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowStateAPI')
|
const logger = createLogger('WorkflowStateAPI')
|
||||||
@@ -124,11 +124,8 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
const state = WorkflowStateSchema.parse(body)
|
const state = WorkflowStateSchema.parse(body)
|
||||||
|
|
||||||
// Fetch the workflow to check ownership/access
|
// Fetch the workflow to check ownership/access
|
||||||
const workflowData = await db
|
const accessContext = await getWorkflowAccessContext(workflowId, userId)
|
||||||
.select()
|
const workflowData = accessContext?.workflow
|
||||||
.from(workflow)
|
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.then((rows) => rows[0])
|
|
||||||
|
|
||||||
if (!workflowData) {
|
if (!workflowData) {
|
||||||
logger.warn(`[${requestId}] Workflow ${workflowId} not found for state update`)
|
logger.warn(`[${requestId}] Workflow ${workflowId} not found for state update`)
|
||||||
@@ -136,24 +133,12 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check if user has permission to update this workflow
|
// Check if user has permission to update this workflow
|
||||||
let canUpdate = false
|
const canUpdate =
|
||||||
|
accessContext?.isOwner ||
|
||||||
// Case 1: User owns the workflow
|
(workflowData.workspaceId
|
||||||
if (workflowData.userId === userId) {
|
? accessContext?.workspacePermission === 'write' ||
|
||||||
canUpdate = true
|
accessContext?.workspacePermission === 'admin'
|
||||||
}
|
: false)
|
||||||
|
|
||||||
// Case 2: Workflow belongs to a workspace and user has write or admin permission
|
|
||||||
if (!canUpdate && workflowData.workspaceId) {
|
|
||||||
const userPermission = await getUserEntityPermissions(
|
|
||||||
userId,
|
|
||||||
'workspace',
|
|
||||||
workflowData.workspaceId
|
|
||||||
)
|
|
||||||
if (userPermission === 'write' || userPermission === 'admin') {
|
|
||||||
canUpdate = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!canUpdate) {
|
if (!canUpdate) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
|
|||||||
@@ -18,12 +18,18 @@ import {
|
|||||||
describe('Workflow Variables API Route', () => {
|
describe('Workflow Variables API Route', () => {
|
||||||
let authMocks: ReturnType<typeof mockAuth>
|
let authMocks: ReturnType<typeof mockAuth>
|
||||||
let databaseMocks: ReturnType<typeof createMockDatabase>
|
let databaseMocks: ReturnType<typeof createMockDatabase>
|
||||||
|
const mockGetWorkflowAccessContext = vi.fn()
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.resetModules()
|
vi.resetModules()
|
||||||
setupCommonApiMocks()
|
setupCommonApiMocks()
|
||||||
mockCryptoUuid('mock-request-id-12345678')
|
mockCryptoUuid('mock-request-id-12345678')
|
||||||
authMocks = mockAuth(mockUser)
|
authMocks = mockAuth(mockUser)
|
||||||
|
mockGetWorkflowAccessContext.mockReset()
|
||||||
|
|
||||||
|
vi.doMock('@/lib/workflows/utils', () => ({
|
||||||
|
getWorkflowAccessContext: mockGetWorkflowAccessContext,
|
||||||
|
}))
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -47,9 +53,7 @@ describe('Workflow Variables API Route', () => {
|
|||||||
|
|
||||||
it('should return 404 when workflow does not exist', async () => {
|
it('should return 404 when workflow does not exist', async () => {
|
||||||
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
||||||
databaseMocks = createMockDatabase({
|
mockGetWorkflowAccessContext.mockResolvedValueOnce(null)
|
||||||
select: { results: [[]] }, // No workflow found
|
|
||||||
})
|
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/nonexistent/variables')
|
const req = new NextRequest('http://localhost:3000/api/workflows/nonexistent/variables')
|
||||||
const params = Promise.resolve({ id: 'nonexistent' })
|
const params = Promise.resolve({ id: 'nonexistent' })
|
||||||
@@ -73,8 +77,12 @@ describe('Workflow Variables API Route', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
||||||
databaseMocks = createMockDatabase({
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: { results: [[mockWorkflow]] },
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: null,
|
||||||
|
workspacePermission: null,
|
||||||
|
isOwner: true,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
})
|
})
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables')
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables')
|
||||||
@@ -99,14 +107,14 @@ describe('Workflow Variables API Route', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
||||||
databaseMocks = createMockDatabase({
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: { results: [[mockWorkflow]] },
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: 'workspace-owner',
|
||||||
|
workspacePermission: 'read',
|
||||||
|
isOwner: false,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
})
|
})
|
||||||
|
|
||||||
vi.doMock('@/lib/permissions/utils', () => ({
|
|
||||||
getUserEntityPermissions: vi.fn().mockResolvedValue('read'),
|
|
||||||
}))
|
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables')
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables')
|
||||||
const params = Promise.resolve({ id: 'workflow-123' })
|
const params = Promise.resolve({ id: 'workflow-123' })
|
||||||
|
|
||||||
@@ -116,14 +124,6 @@ describe('Workflow Variables API Route', () => {
|
|||||||
expect(response.status).toBe(200)
|
expect(response.status).toBe(200)
|
||||||
const data = await response.json()
|
const data = await response.json()
|
||||||
expect(data.data).toEqual(mockWorkflow.variables)
|
expect(data.data).toEqual(mockWorkflow.variables)
|
||||||
|
|
||||||
// Verify permissions check was called
|
|
||||||
const { getUserEntityPermissions } = await import('@/lib/permissions/utils')
|
|
||||||
expect(getUserEntityPermissions).toHaveBeenCalledWith(
|
|
||||||
'user-123',
|
|
||||||
'workspace',
|
|
||||||
'workspace-456'
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should deny access when user has no workspace permissions', async () => {
|
it('should deny access when user has no workspace permissions', async () => {
|
||||||
@@ -135,14 +135,14 @@ describe('Workflow Variables API Route', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
||||||
databaseMocks = createMockDatabase({
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: { results: [[mockWorkflow]] },
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: 'workspace-owner',
|
||||||
|
workspacePermission: null,
|
||||||
|
isOwner: false,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
})
|
})
|
||||||
|
|
||||||
vi.doMock('@/lib/permissions/utils', () => ({
|
|
||||||
getUserEntityPermissions: vi.fn().mockResolvedValue(null),
|
|
||||||
}))
|
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables')
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables')
|
||||||
const params = Promise.resolve({ id: 'workflow-123' })
|
const params = Promise.resolve({ id: 'workflow-123' })
|
||||||
|
|
||||||
@@ -165,8 +165,12 @@ describe('Workflow Variables API Route', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
||||||
databaseMocks = createMockDatabase({
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: { results: [[mockWorkflow]] },
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: null,
|
||||||
|
workspacePermission: null,
|
||||||
|
isOwner: true,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
})
|
})
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables')
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables')
|
||||||
@@ -191,8 +195,15 @@ describe('Workflow Variables API Route', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
||||||
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: null,
|
||||||
|
workspacePermission: null,
|
||||||
|
isOwner: true,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
|
})
|
||||||
|
|
||||||
databaseMocks = createMockDatabase({
|
databaseMocks = createMockDatabase({
|
||||||
select: { results: [[mockWorkflow]] },
|
|
||||||
update: { results: [{}] },
|
update: { results: [{}] },
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -223,14 +234,14 @@ describe('Workflow Variables API Route', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
||||||
databaseMocks = createMockDatabase({
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: { results: [[mockWorkflow]] },
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: 'workspace-owner',
|
||||||
|
workspacePermission: null,
|
||||||
|
isOwner: false,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
})
|
})
|
||||||
|
|
||||||
vi.doMock('@/lib/permissions/utils', () => ({
|
|
||||||
getUserEntityPermissions: vi.fn().mockResolvedValue(null),
|
|
||||||
}))
|
|
||||||
|
|
||||||
const variables = [
|
const variables = [
|
||||||
{ id: 'var-1', workflowId: 'workflow-123', name: 'test', type: 'string', value: 'hello' },
|
{ id: 'var-1', workflowId: 'workflow-123', name: 'test', type: 'string', value: 'hello' },
|
||||||
]
|
]
|
||||||
@@ -258,8 +269,12 @@ describe('Workflow Variables API Route', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
||||||
databaseMocks = createMockDatabase({
|
mockGetWorkflowAccessContext.mockResolvedValueOnce({
|
||||||
select: { results: [[mockWorkflow]] },
|
workflow: mockWorkflow,
|
||||||
|
workspaceOwnerId: null,
|
||||||
|
workspacePermission: null,
|
||||||
|
isOwner: true,
|
||||||
|
isWorkspaceOwner: false,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Invalid data - missing required fields
|
// Invalid data - missing required fields
|
||||||
@@ -283,9 +298,7 @@ describe('Workflow Variables API Route', () => {
|
|||||||
describe('Error handling', () => {
|
describe('Error handling', () => {
|
||||||
it.concurrent('should handle database errors gracefully', async () => {
|
it.concurrent('should handle database errors gracefully', async () => {
|
||||||
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
authMocks.setAuthenticated({ id: 'user-123', email: 'test@example.com' })
|
||||||
databaseMocks = createMockDatabase({
|
mockGetWorkflowAccessContext.mockRejectedValueOnce(new Error('Database connection failed'))
|
||||||
select: { throwError: true, errorMessage: 'Database connection failed' },
|
|
||||||
})
|
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables')
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables')
|
||||||
const params = Promise.resolve({ id: 'workflow-123' })
|
const params = Promise.resolve({ id: 'workflow-123' })
|
||||||
|
|||||||
@@ -5,8 +5,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
|
||||||
import type { Variable } from '@/stores/panel/variables/types'
|
import type { Variable } from '@/stores/panel/variables/types'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowVariablesAPI')
|
const logger = createLogger('WorkflowVariablesAPI')
|
||||||
@@ -35,32 +35,18 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the workflow record
|
// Get the workflow record
|
||||||
const workflowRecord = await db
|
const accessContext = await getWorkflowAccessContext(workflowId, session.user.id)
|
||||||
.select()
|
const workflowData = accessContext?.workflow
|
||||||
.from(workflow)
|
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!workflowRecord.length) {
|
if (!workflowData) {
|
||||||
logger.warn(`[${requestId}] Workflow not found: ${workflowId}`)
|
logger.warn(`[${requestId}] Workflow not found: ${workflowId}`)
|
||||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflowData = workflowRecord[0]
|
|
||||||
const workspaceId = workflowData.workspaceId
|
const workspaceId = workflowData.workspaceId
|
||||||
|
|
||||||
// Check authorization - either the user owns the workflow or has workspace permissions
|
// Check authorization - either the user owns the workflow or has workspace permissions
|
||||||
let isAuthorized = workflowData.userId === session.user.id
|
const isAuthorized =
|
||||||
|
accessContext?.isOwner || (workspaceId ? accessContext?.workspacePermission !== null : false)
|
||||||
// If not authorized by ownership and the workflow belongs to a workspace, check workspace permissions
|
|
||||||
if (!isAuthorized && workspaceId) {
|
|
||||||
const userPermission = await getUserEntityPermissions(
|
|
||||||
session.user.id,
|
|
||||||
'workspace',
|
|
||||||
workspaceId
|
|
||||||
)
|
|
||||||
isAuthorized = userPermission !== null
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isAuthorized) {
|
if (!isAuthorized) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
@@ -125,32 +111,18 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get the workflow record
|
// Get the workflow record
|
||||||
const workflowRecord = await db
|
const accessContext = await getWorkflowAccessContext(workflowId, session.user.id)
|
||||||
.select()
|
const workflowData = accessContext?.workflow
|
||||||
.from(workflow)
|
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!workflowRecord.length) {
|
if (!workflowData) {
|
||||||
logger.warn(`[${requestId}] Workflow not found: ${workflowId}`)
|
logger.warn(`[${requestId}] Workflow not found: ${workflowId}`)
|
||||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflowData = workflowRecord[0]
|
|
||||||
const workspaceId = workflowData.workspaceId
|
const workspaceId = workflowData.workspaceId
|
||||||
|
|
||||||
// Check authorization - either the user owns the workflow or has workspace permissions
|
// Check authorization - either the user owns the workflow or has workspace permissions
|
||||||
let isAuthorized = workflowData.userId === session.user.id
|
const isAuthorized =
|
||||||
|
accessContext?.isOwner || (workspaceId ? accessContext?.workspacePermission !== null : false)
|
||||||
// If not authorized by ownership and the workflow belongs to a workspace, check workspace permissions
|
|
||||||
if (!isAuthorized && workspaceId) {
|
|
||||||
const userPermission = await getUserEntityPermissions(
|
|
||||||
session.user.id,
|
|
||||||
'workspace',
|
|
||||||
workspaceId
|
|
||||||
)
|
|
||||||
isAuthorized = userPermission !== null
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isAuthorized) {
|
if (!isAuthorized) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
|
|||||||
@@ -1,9 +1,14 @@
|
|||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||||
|
import { uploadExecutionFile } from '@/lib/workflows/execution-file-storage'
|
||||||
|
import type { UserFile } from '@/executor/types'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowUtils')
|
const logger = createLogger('WorkflowUtils')
|
||||||
|
|
||||||
|
const MAX_FILE_SIZE = 20 * 1024 * 1024 // 20MB
|
||||||
|
|
||||||
export function createErrorResponse(error: string, status: number, code?: string) {
|
export function createErrorResponse(error: string, status: number, code?: string) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -37,3 +42,99 @@ export async function verifyWorkspaceMembership(
|
|||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process API workflow files - handles both base64 ('file' type) and URL pass-through ('url' type)
|
||||||
|
*/
|
||||||
|
export async function processApiWorkflowFiles(
|
||||||
|
file: { type: string; data: string; name: string; mime?: string },
|
||||||
|
executionContext: { workspaceId: string; workflowId: string; executionId: string },
|
||||||
|
requestId: string
|
||||||
|
): Promise<UserFile | null> {
|
||||||
|
if (file.type === 'file' && file.data && file.name) {
|
||||||
|
const dataUrlPrefix = 'data:'
|
||||||
|
const base64Prefix = ';base64,'
|
||||||
|
|
||||||
|
if (!file.data.startsWith(dataUrlPrefix)) {
|
||||||
|
logger.warn(`[${requestId}] Invalid data format for file: ${file.name}`)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const base64Index = file.data.indexOf(base64Prefix)
|
||||||
|
if (base64Index === -1) {
|
||||||
|
logger.warn(`[${requestId}] Invalid data format (no base64 marker) for file: ${file.name}`)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const mimeType = file.data.substring(dataUrlPrefix.length, base64Index)
|
||||||
|
const base64Data = file.data.substring(base64Index + base64Prefix.length)
|
||||||
|
const buffer = Buffer.from(base64Data, 'base64')
|
||||||
|
|
||||||
|
if (buffer.length > MAX_FILE_SIZE) {
|
||||||
|
const fileSizeMB = (buffer.length / (1024 * 1024)).toFixed(2)
|
||||||
|
throw new Error(
|
||||||
|
`File "${file.name}" exceeds the maximum size limit of 20MB (actual size: ${fileSizeMB}MB)`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug(`[${requestId}] Uploading file: ${file.name} (${buffer.length} bytes)`)
|
||||||
|
|
||||||
|
const userFile = await uploadExecutionFile(
|
||||||
|
executionContext,
|
||||||
|
buffer,
|
||||||
|
file.name,
|
||||||
|
mimeType || file.mime || 'application/octet-stream'
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(`[${requestId}] Successfully uploaded ${file.name}`)
|
||||||
|
return userFile
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.type === 'url' && file.data) {
|
||||||
|
return {
|
||||||
|
id: uuidv4(),
|
||||||
|
url: file.data,
|
||||||
|
name: file.name,
|
||||||
|
size: 0,
|
||||||
|
type: file.mime || 'application/octet-stream',
|
||||||
|
key: `url/${file.name}`,
|
||||||
|
uploadedAt: new Date().toISOString(),
|
||||||
|
expiresAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000).toISOString(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process all files for a given field in the API workflow input
|
||||||
|
*/
|
||||||
|
export async function processApiWorkflowField(
|
||||||
|
fieldValue: any,
|
||||||
|
executionContext: { workspaceId: string; workflowId: string },
|
||||||
|
requestId: string
|
||||||
|
): Promise<UserFile[]> {
|
||||||
|
if (!fieldValue || typeof fieldValue !== 'object') {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = Array.isArray(fieldValue) ? fieldValue : [fieldValue]
|
||||||
|
const uploadedFiles: UserFile[] = []
|
||||||
|
const executionId = uuidv4()
|
||||||
|
const fullContext = { ...executionContext, executionId }
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
try {
|
||||||
|
const userFile = await processApiWorkflowFiles(file, fullContext, requestId)
|
||||||
|
|
||||||
|
if (userFile) {
|
||||||
|
uploadedFiles.push(userFile)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
||||||
|
throw new Error(`Failed to upload file: ${file.name}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return uploadedFiles
|
||||||
|
}
|
||||||
|
|||||||
@@ -45,6 +45,18 @@ const DEFAULT_VOICE_SETTINGS = {
|
|||||||
voiceId: 'EXAVITQu4vr4xnSDxMaL', // Default ElevenLabs voice (Bella)
|
voiceId: 'EXAVITQu4vr4xnSDxMaL', // Default ElevenLabs voice (Bella)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a File object to a base64 data URL
|
||||||
|
*/
|
||||||
|
function fileToBase64(file: File): Promise<string> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const reader = new FileReader()
|
||||||
|
reader.onload = () => resolve(reader.result as string)
|
||||||
|
reader.onerror = reject
|
||||||
|
reader.readAsDataURL(file)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates an audio stream handler for text-to-speech conversion
|
* Creates an audio stream handler for text-to-speech conversion
|
||||||
* @param streamTextToAudio - Function to stream text to audio
|
* @param streamTextToAudio - Function to stream text to audio
|
||||||
@@ -265,20 +277,43 @@ export default function ChatClient({ identifier }: { identifier: string }) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Handle sending a message
|
// Handle sending a message
|
||||||
const handleSendMessage = async (messageParam?: string, isVoiceInput = false) => {
|
const handleSendMessage = async (
|
||||||
|
messageParam?: string,
|
||||||
|
isVoiceInput = false,
|
||||||
|
files?: Array<{
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
size: number
|
||||||
|
type: string
|
||||||
|
file: File
|
||||||
|
dataUrl?: string
|
||||||
|
}>
|
||||||
|
) => {
|
||||||
const messageToSend = messageParam ?? inputValue
|
const messageToSend = messageParam ?? inputValue
|
||||||
if (!messageToSend.trim() || isLoading) return
|
if ((!messageToSend.trim() && (!files || files.length === 0)) || isLoading) return
|
||||||
|
|
||||||
logger.info('Sending message:', { messageToSend, isVoiceInput, conversationId })
|
logger.info('Sending message:', {
|
||||||
|
messageToSend,
|
||||||
|
isVoiceInput,
|
||||||
|
conversationId,
|
||||||
|
filesCount: files?.length,
|
||||||
|
})
|
||||||
|
|
||||||
// Reset userHasScrolled when sending a new message
|
// Reset userHasScrolled when sending a new message
|
||||||
setUserHasScrolled(false)
|
setUserHasScrolled(false)
|
||||||
|
|
||||||
const userMessage: ChatMessage = {
|
const userMessage: ChatMessage = {
|
||||||
id: crypto.randomUUID(),
|
id: crypto.randomUUID(),
|
||||||
content: messageToSend,
|
content: messageToSend || (files && files.length > 0 ? `Sent ${files.length} file(s)` : ''),
|
||||||
type: 'user',
|
type: 'user',
|
||||||
timestamp: new Date(),
|
timestamp: new Date(),
|
||||||
|
attachments: files?.map((file) => ({
|
||||||
|
id: file.id,
|
||||||
|
name: file.name,
|
||||||
|
type: file.type,
|
||||||
|
size: file.size,
|
||||||
|
dataUrl: file.dataUrl || '',
|
||||||
|
})),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the user's message to the chat
|
// Add the user's message to the chat
|
||||||
@@ -299,7 +334,7 @@ export default function ChatClient({ identifier }: { identifier: string }) {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
// Send structured payload to maintain chat context
|
// Send structured payload to maintain chat context
|
||||||
const payload = {
|
const payload: any = {
|
||||||
input:
|
input:
|
||||||
typeof userMessage.content === 'string'
|
typeof userMessage.content === 'string'
|
||||||
? userMessage.content
|
? userMessage.content
|
||||||
@@ -307,7 +342,22 @@ export default function ChatClient({ identifier }: { identifier: string }) {
|
|||||||
conversationId,
|
conversationId,
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('API payload:', payload)
|
// Add files if present (convert to base64 for JSON transmission)
|
||||||
|
if (files && files.length > 0) {
|
||||||
|
payload.files = await Promise.all(
|
||||||
|
files.map(async (file) => ({
|
||||||
|
name: file.name,
|
||||||
|
size: file.size,
|
||||||
|
type: file.type,
|
||||||
|
dataUrl: file.dataUrl || (await fileToBase64(file.file)),
|
||||||
|
}))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('API payload:', {
|
||||||
|
...payload,
|
||||||
|
files: payload.files ? `${payload.files.length} files` : undefined,
|
||||||
|
})
|
||||||
|
|
||||||
const response = await fetch(`/api/chat/${identifier}`, {
|
const response = await fetch(`/api/chat/${identifier}`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -499,8 +549,8 @@ export default function ChatClient({ identifier }: { identifier: string }) {
|
|||||||
<div className='relative p-3 pb-4 md:p-4 md:pb-6'>
|
<div className='relative p-3 pb-4 md:p-4 md:pb-6'>
|
||||||
<div className='relative mx-auto max-w-3xl md:max-w-[748px]'>
|
<div className='relative mx-auto max-w-3xl md:max-w-[748px]'>
|
||||||
<ChatInput
|
<ChatInput
|
||||||
onSubmit={(value, isVoiceInput) => {
|
onSubmit={(value, isVoiceInput, files) => {
|
||||||
void handleSendMessage(value, isVoiceInput)
|
void handleSendMessage(value, isVoiceInput, files)
|
||||||
}}
|
}}
|
||||||
isStreaming={isStreamingResponse}
|
isStreaming={isStreamingResponse}
|
||||||
onStopStreaming={() => stopStreaming(setMessages)}
|
onStopStreaming={() => stopStreaming(setMessages)}
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
import type React from 'react'
|
import type React from 'react'
|
||||||
import { useEffect, useRef, useState } from 'react'
|
import { useEffect, useRef, useState } from 'react'
|
||||||
import { motion } from 'framer-motion'
|
import { motion } from 'framer-motion'
|
||||||
import { Send, Square } from 'lucide-react'
|
import { AlertCircle, Paperclip, Send, Square, X } from 'lucide-react'
|
||||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
|
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
|
||||||
import { VoiceInput } from '@/app/chat/components/input/voice-input'
|
import { VoiceInput } from '@/app/chat/components/input/voice-input'
|
||||||
|
|
||||||
@@ -12,8 +12,17 @@ const PLACEHOLDER_DESKTOP = 'Enter a message or click the mic to speak'
|
|||||||
const MAX_TEXTAREA_HEIGHT = 120 // Max height in pixels (e.g., for about 3-4 lines)
|
const MAX_TEXTAREA_HEIGHT = 120 // Max height in pixels (e.g., for about 3-4 lines)
|
||||||
const MAX_TEXTAREA_HEIGHT_MOBILE = 100 // Smaller for mobile
|
const MAX_TEXTAREA_HEIGHT_MOBILE = 100 // Smaller for mobile
|
||||||
|
|
||||||
|
interface AttachedFile {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
size: number
|
||||||
|
type: string
|
||||||
|
file: File
|
||||||
|
dataUrl?: string
|
||||||
|
}
|
||||||
|
|
||||||
export const ChatInput: React.FC<{
|
export const ChatInput: React.FC<{
|
||||||
onSubmit?: (value: string, isVoiceInput?: boolean) => void
|
onSubmit?: (value: string, isVoiceInput?: boolean, files?: AttachedFile[]) => void
|
||||||
isStreaming?: boolean
|
isStreaming?: boolean
|
||||||
onStopStreaming?: () => void
|
onStopStreaming?: () => void
|
||||||
onVoiceStart?: () => void
|
onVoiceStart?: () => void
|
||||||
@@ -21,8 +30,11 @@ export const ChatInput: React.FC<{
|
|||||||
}> = ({ onSubmit, isStreaming = false, onStopStreaming, onVoiceStart, voiceOnly = false }) => {
|
}> = ({ onSubmit, isStreaming = false, onStopStreaming, onVoiceStart, voiceOnly = false }) => {
|
||||||
const wrapperRef = useRef<HTMLDivElement>(null)
|
const wrapperRef = useRef<HTMLDivElement>(null)
|
||||||
const textareaRef = useRef<HTMLTextAreaElement>(null) // Ref for the textarea
|
const textareaRef = useRef<HTMLTextAreaElement>(null) // Ref for the textarea
|
||||||
|
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||||
const [isActive, setIsActive] = useState(false)
|
const [isActive, setIsActive] = useState(false)
|
||||||
const [inputValue, setInputValue] = useState('')
|
const [inputValue, setInputValue] = useState('')
|
||||||
|
const [attachedFiles, setAttachedFiles] = useState<AttachedFile[]>([])
|
||||||
|
const [uploadErrors, setUploadErrors] = useState<string[]>([])
|
||||||
|
|
||||||
// Check if speech-to-text is available in the browser
|
// Check if speech-to-text is available in the browser
|
||||||
const isSttAvailable =
|
const isSttAvailable =
|
||||||
@@ -85,10 +97,75 @@ export const ChatInput: React.FC<{
|
|||||||
// Focus is now handled by the useEffect above
|
// Focus is now handled by the useEffect above
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle file selection
|
||||||
|
const handleFileSelect = async (selectedFiles: FileList | null) => {
|
||||||
|
if (!selectedFiles) return
|
||||||
|
|
||||||
|
const newFiles: AttachedFile[] = []
|
||||||
|
const maxSize = 10 * 1024 * 1024 // 10MB limit
|
||||||
|
const maxFiles = 5
|
||||||
|
|
||||||
|
for (let i = 0; i < selectedFiles.length; i++) {
|
||||||
|
if (attachedFiles.length + newFiles.length >= maxFiles) break
|
||||||
|
|
||||||
|
const file = selectedFiles[i]
|
||||||
|
|
||||||
|
// Check file size
|
||||||
|
if (file.size > maxSize) {
|
||||||
|
setUploadErrors((prev) => [...prev, `${file.name} is too large (max 10MB)`])
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for duplicates
|
||||||
|
const isDuplicate = attachedFiles.some(
|
||||||
|
(existingFile) => existingFile.name === file.name && existingFile.size === file.size
|
||||||
|
)
|
||||||
|
if (isDuplicate) {
|
||||||
|
setUploadErrors((prev) => [...prev, `${file.name} already added`])
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read file as data URL if it's an image
|
||||||
|
let dataUrl: string | undefined
|
||||||
|
if (file.type.startsWith('image/')) {
|
||||||
|
try {
|
||||||
|
dataUrl = await new Promise<string>((resolve, reject) => {
|
||||||
|
const reader = new FileReader()
|
||||||
|
reader.onload = () => resolve(reader.result as string)
|
||||||
|
reader.onerror = reject
|
||||||
|
reader.readAsDataURL(file)
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error reading file:', error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
newFiles.push({
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
name: file.name,
|
||||||
|
size: file.size,
|
||||||
|
type: file.type,
|
||||||
|
file,
|
||||||
|
dataUrl,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newFiles.length > 0) {
|
||||||
|
setAttachedFiles([...attachedFiles, ...newFiles])
|
||||||
|
setUploadErrors([]) // Clear errors when files are successfully added
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleRemoveFile = (fileId: string) => {
|
||||||
|
setAttachedFiles(attachedFiles.filter((f) => f.id !== fileId))
|
||||||
|
}
|
||||||
|
|
||||||
const handleSubmit = () => {
|
const handleSubmit = () => {
|
||||||
if (!inputValue.trim()) return
|
if (!inputValue.trim() && attachedFiles.length === 0) return
|
||||||
onSubmit?.(inputValue.trim(), false) // false = not voice input
|
onSubmit?.(inputValue.trim(), false, attachedFiles) // false = not voice input
|
||||||
setInputValue('')
|
setInputValue('')
|
||||||
|
setAttachedFiles([])
|
||||||
|
setUploadErrors([]) // Clear errors when sending message
|
||||||
if (textareaRef.current) {
|
if (textareaRef.current) {
|
||||||
textareaRef.current.style.height = 'auto' // Reset height after submit
|
textareaRef.current.style.height = 'auto' // Reset height after submit
|
||||||
textareaRef.current.style.overflowY = 'hidden' // Ensure overflow is hidden
|
textareaRef.current.style.overflowY = 'hidden' // Ensure overflow is hidden
|
||||||
@@ -132,6 +209,29 @@ export const ChatInput: React.FC<{
|
|||||||
<>
|
<>
|
||||||
<div className='fixed right-0 bottom-0 left-0 flex w-full items-center justify-center bg-gradient-to-t from-white to-transparent px-4 pb-4 text-black md:px-0 md:pb-4'>
|
<div className='fixed right-0 bottom-0 left-0 flex w-full items-center justify-center bg-gradient-to-t from-white to-transparent px-4 pb-4 text-black md:px-0 md:pb-4'>
|
||||||
<div ref={wrapperRef} className='w-full max-w-3xl md:max-w-[748px]'>
|
<div ref={wrapperRef} className='w-full max-w-3xl md:max-w-[748px]'>
|
||||||
|
{/* Error Messages */}
|
||||||
|
{uploadErrors.length > 0 && (
|
||||||
|
<div className='mb-3'>
|
||||||
|
<div className='rounded-lg border border-red-200 bg-red-50 p-3 dark:border-red-800/50 dark:bg-red-950/20'>
|
||||||
|
<div className='flex items-start gap-2'>
|
||||||
|
<AlertCircle className='mt-0.5 h-4 w-4 shrink-0 text-red-600 dark:text-red-400' />
|
||||||
|
<div className='flex-1'>
|
||||||
|
<div className='mb-1 font-medium text-red-800 text-sm dark:text-red-300'>
|
||||||
|
File upload error
|
||||||
|
</div>
|
||||||
|
<div className='space-y-1'>
|
||||||
|
{uploadErrors.map((error, idx) => (
|
||||||
|
<div key={idx} className='text-red-700 text-sm dark:text-red-400'>
|
||||||
|
{error}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Text Input Area with Controls */}
|
{/* Text Input Area with Controls */}
|
||||||
<motion.div
|
<motion.div
|
||||||
className='rounded-2xl border border-gray-200 bg-white shadow-sm md:rounded-3xl'
|
className='rounded-2xl border border-gray-200 bg-white shadow-sm md:rounded-3xl'
|
||||||
@@ -140,27 +240,99 @@ export const ChatInput: React.FC<{
|
|||||||
animate={{ opacity: 1, y: 0 }}
|
animate={{ opacity: 1, y: 0 }}
|
||||||
transition={{ duration: 0.2 }}
|
transition={{ duration: 0.2 }}
|
||||||
>
|
>
|
||||||
<div className='flex items-center gap-2 p-3 md:p-4'>
|
{/* File Previews */}
|
||||||
{/* Voice Input */}
|
{attachedFiles.length > 0 && (
|
||||||
{isSttAvailable && (
|
<div className='mb-2 flex flex-wrap gap-2 px-3 pt-3 md:px-4'>
|
||||||
<TooltipProvider>
|
{attachedFiles.map((file) => {
|
||||||
<Tooltip>
|
const formatFileSize = (bytes: number) => {
|
||||||
<TooltipTrigger asChild>
|
if (bytes === 0) return '0 B'
|
||||||
<div>
|
const k = 1024
|
||||||
<VoiceInput
|
const sizes = ['B', 'KB', 'MB', 'GB']
|
||||||
onVoiceStart={handleVoiceStart}
|
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||||
disabled={isStreaming}
|
return `${Math.round((bytes / k ** i) * 10) / 10} ${sizes[i]}`
|
||||||
minimal
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={file.id}
|
||||||
|
className={`group relative overflow-hidden rounded-lg border border-gray-200 bg-white dark:border-gray-700 dark:bg-gray-800 ${
|
||||||
|
file.dataUrl
|
||||||
|
? 'h-16 w-16 md:h-20 md:w-20'
|
||||||
|
: 'flex h-16 min-w-[120px] max-w-[200px] items-center gap-2 px-2 md:h-20 md:min-w-[140px] md:max-w-[220px] md:px-3'
|
||||||
|
}`}
|
||||||
|
title=''
|
||||||
|
>
|
||||||
|
{file.dataUrl ? (
|
||||||
|
<img
|
||||||
|
src={file.dataUrl}
|
||||||
|
alt={file.name}
|
||||||
|
className='h-full w-full object-cover'
|
||||||
/>
|
/>
|
||||||
</div>
|
) : (
|
||||||
</TooltipTrigger>
|
<>
|
||||||
<TooltipContent side='top'>
|
<div className='flex h-8 w-8 flex-shrink-0 items-center justify-center rounded bg-gray-100 md:h-10 md:w-10 dark:bg-gray-700'>
|
||||||
<p>Start voice conversation</p>
|
<Paperclip
|
||||||
<span className='text-gray-500 text-xs'>Click to enter voice mode</span>
|
size={16}
|
||||||
</TooltipContent>
|
className='text-gray-500 md:h-5 md:w-5 dark:text-gray-400'
|
||||||
</Tooltip>
|
/>
|
||||||
</TooltipProvider>
|
</div>
|
||||||
)}
|
<div className='min-w-0 flex-1'>
|
||||||
|
<div className='truncate font-medium text-gray-800 text-xs dark:text-gray-200'>
|
||||||
|
{file.name}
|
||||||
|
</div>
|
||||||
|
<div className='text-[10px] text-gray-500 dark:text-gray-400'>
|
||||||
|
{formatFileSize(file.size)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
<button
|
||||||
|
type='button'
|
||||||
|
onClick={() => handleRemoveFile(file.id)}
|
||||||
|
className='absolute top-1 right-1 rounded-full bg-gray-800/80 p-1 text-white opacity-0 transition-opacity hover:bg-gray-800/80 hover:text-white group-hover:opacity-100 dark:bg-black/70 dark:hover:bg-black/70 dark:hover:text-white'
|
||||||
|
>
|
||||||
|
<X size={12} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className='flex items-center gap-2 p-3 md:p-4'>
|
||||||
|
{/* Paperclip Button */}
|
||||||
|
<TooltipProvider>
|
||||||
|
<Tooltip>
|
||||||
|
<TooltipTrigger asChild>
|
||||||
|
<button
|
||||||
|
type='button'
|
||||||
|
onClick={() => fileInputRef.current?.click()}
|
||||||
|
disabled={isStreaming || attachedFiles.length >= 5}
|
||||||
|
className='flex items-center justify-center rounded-full p-1.5 text-gray-600 transition-colors hover:bg-gray-100 disabled:cursor-not-allowed disabled:opacity-50 md:p-2'
|
||||||
|
>
|
||||||
|
<Paperclip size={16} className='md:h-5 md:w-5' />
|
||||||
|
</button>
|
||||||
|
</TooltipTrigger>
|
||||||
|
<TooltipContent side='top'>
|
||||||
|
<p>Attach files</p>
|
||||||
|
</TooltipContent>
|
||||||
|
</Tooltip>
|
||||||
|
</TooltipProvider>
|
||||||
|
|
||||||
|
{/* Hidden file input */}
|
||||||
|
<input
|
||||||
|
ref={fileInputRef}
|
||||||
|
type='file'
|
||||||
|
multiple
|
||||||
|
onChange={(e) => {
|
||||||
|
handleFileSelect(e.target.files)
|
||||||
|
if (fileInputRef.current) {
|
||||||
|
fileInputRef.current.value = ''
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
className='hidden'
|
||||||
|
disabled={isStreaming}
|
||||||
|
/>
|
||||||
|
|
||||||
{/* Text Input Container */}
|
{/* Text Input Container */}
|
||||||
<div className='relative flex-1'>
|
<div className='relative flex-1'>
|
||||||
@@ -208,10 +380,30 @@ export const ChatInput: React.FC<{
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Voice Input */}
|
||||||
|
{isSttAvailable && (
|
||||||
|
<TooltipProvider>
|
||||||
|
<Tooltip>
|
||||||
|
<TooltipTrigger asChild>
|
||||||
|
<div>
|
||||||
|
<VoiceInput
|
||||||
|
onVoiceStart={handleVoiceStart}
|
||||||
|
disabled={isStreaming}
|
||||||
|
minimal
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</TooltipTrigger>
|
||||||
|
<TooltipContent side='top'>
|
||||||
|
<p>Start voice conversation</p>
|
||||||
|
</TooltipContent>
|
||||||
|
</Tooltip>
|
||||||
|
</TooltipProvider>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Send Button */}
|
{/* Send Button */}
|
||||||
<button
|
<button
|
||||||
className={`flex items-center justify-center rounded-full p-1.5 text-white transition-colors md:p-2 ${
|
className={`flex items-center justify-center rounded-full p-1.5 text-white transition-colors md:p-2 ${
|
||||||
inputValue.trim()
|
inputValue.trim() || attachedFiles.length > 0
|
||||||
? 'bg-black hover:bg-zinc-700'
|
? 'bg-black hover:bg-zinc-700'
|
||||||
: 'cursor-default bg-gray-300 hover:bg-gray-400'
|
: 'cursor-default bg-gray-300 hover:bg-gray-400'
|
||||||
}`}
|
}`}
|
||||||
|
|||||||
@@ -72,19 +72,17 @@ export function VoiceInput({
|
|||||||
|
|
||||||
if (minimal) {
|
if (minimal) {
|
||||||
return (
|
return (
|
||||||
<motion.button
|
<button
|
||||||
type='button'
|
type='button'
|
||||||
onClick={handleVoiceClick}
|
onClick={handleVoiceClick}
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
className={`flex items-center justify-center p-1 transition-colors duration-200 ${
|
className={`flex items-center justify-center rounded-full p-1.5 text-gray-600 transition-colors duration-200 hover:bg-gray-100 md:p-2 ${
|
||||||
disabled ? 'cursor-not-allowed opacity-50' : 'cursor-pointer hover:text-gray-600'
|
disabled ? 'cursor-not-allowed opacity-50' : 'cursor-pointer'
|
||||||
}`}
|
}`}
|
||||||
whileHover={{ scale: 1.05 }}
|
|
||||||
whileTap={{ scale: 0.95 }}
|
|
||||||
title='Start voice conversation'
|
title='Start voice conversation'
|
||||||
>
|
>
|
||||||
<Mic size={18} className='text-gray-500' />
|
<Mic size={16} className='md:h-5 md:w-5' />
|
||||||
</motion.button>
|
</button>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,18 @@
|
|||||||
'use client'
|
'use client'
|
||||||
|
|
||||||
import { memo, useMemo, useState } from 'react'
|
import { memo, useMemo, useState } from 'react'
|
||||||
import { Check, Copy } from 'lucide-react'
|
import { Check, Copy, File as FileIcon, FileText, Image as ImageIcon } from 'lucide-react'
|
||||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
|
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
|
||||||
import MarkdownRenderer from './components/markdown-renderer'
|
import MarkdownRenderer from './components/markdown-renderer'
|
||||||
|
|
||||||
|
export interface ChatAttachment {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
dataUrl: string
|
||||||
|
size?: number
|
||||||
|
}
|
||||||
|
|
||||||
export interface ChatMessage {
|
export interface ChatMessage {
|
||||||
id: string
|
id: string
|
||||||
content: string | Record<string, unknown>
|
content: string | Record<string, unknown>
|
||||||
@@ -12,6 +20,7 @@ export interface ChatMessage {
|
|||||||
timestamp: Date
|
timestamp: Date
|
||||||
isInitialMessage?: boolean
|
isInitialMessage?: boolean
|
||||||
isStreaming?: boolean
|
isStreaming?: boolean
|
||||||
|
attachments?: ChatAttachment[]
|
||||||
}
|
}
|
||||||
|
|
||||||
function EnhancedMarkdownRenderer({ content }: { content: string }) {
|
function EnhancedMarkdownRenderer({ content }: { content: string }) {
|
||||||
@@ -39,15 +48,96 @@ export const ClientChatMessage = memo(
|
|||||||
return (
|
return (
|
||||||
<div className='px-4 py-5' data-message-id={message.id}>
|
<div className='px-4 py-5' data-message-id={message.id}>
|
||||||
<div className='mx-auto max-w-3xl'>
|
<div className='mx-auto max-w-3xl'>
|
||||||
|
{/* File attachments displayed above the message */}
|
||||||
|
{message.attachments && message.attachments.length > 0 && (
|
||||||
|
<div className='mb-2 flex justify-end'>
|
||||||
|
<div className='flex flex-wrap gap-2'>
|
||||||
|
{message.attachments.map((attachment) => {
|
||||||
|
const isImage = attachment.type.startsWith('image/')
|
||||||
|
const getFileIcon = (type: string) => {
|
||||||
|
if (type.includes('pdf'))
|
||||||
|
return (
|
||||||
|
<FileText className='h-5 w-5 text-gray-500 md:h-6 md:w-6 dark:text-gray-400' />
|
||||||
|
)
|
||||||
|
if (type.startsWith('image/'))
|
||||||
|
return (
|
||||||
|
<ImageIcon className='h-5 w-5 text-gray-500 md:h-6 md:w-6 dark:text-gray-400' />
|
||||||
|
)
|
||||||
|
if (type.includes('text') || type.includes('json'))
|
||||||
|
return (
|
||||||
|
<FileText className='h-5 w-5 text-gray-500 md:h-6 md:w-6 dark:text-gray-400' />
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
<FileIcon className='h-5 w-5 text-gray-500 md:h-6 md:w-6 dark:text-gray-400' />
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const formatFileSize = (bytes?: number) => {
|
||||||
|
if (!bytes || bytes === 0) return ''
|
||||||
|
const k = 1024
|
||||||
|
const sizes = ['B', 'KB', 'MB', 'GB']
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||||
|
return `${Math.round((bytes / k ** i) * 10) / 10} ${sizes[i]}`
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={attachment.id}
|
||||||
|
className={`relative overflow-hidden rounded-2xl border border-gray-200 bg-gray-50 dark:border-gray-700 dark:bg-gray-800 ${
|
||||||
|
attachment.dataUrl?.trim() ? 'cursor-pointer' : ''
|
||||||
|
} ${
|
||||||
|
isImage
|
||||||
|
? 'h-16 w-16 md:h-20 md:w-20'
|
||||||
|
: 'flex h-16 min-w-[140px] max-w-[220px] items-center gap-2 px-3 md:h-20 md:min-w-[160px] md:max-w-[240px]'
|
||||||
|
}`}
|
||||||
|
onClick={(e) => {
|
||||||
|
if (attachment.dataUrl?.trim()) {
|
||||||
|
e.preventDefault()
|
||||||
|
window.open(attachment.dataUrl, '_blank')
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{isImage ? (
|
||||||
|
<img
|
||||||
|
src={attachment.dataUrl}
|
||||||
|
alt={attachment.name}
|
||||||
|
className='h-full w-full object-cover'
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<div className='flex h-10 w-10 flex-shrink-0 items-center justify-center rounded bg-gray-100 md:h-12 md:w-12 dark:bg-gray-700'>
|
||||||
|
{getFileIcon(attachment.type)}
|
||||||
|
</div>
|
||||||
|
<div className='min-w-0 flex-1'>
|
||||||
|
<div className='truncate font-medium text-gray-800 text-xs md:text-sm dark:text-gray-200'>
|
||||||
|
{attachment.name}
|
||||||
|
</div>
|
||||||
|
{attachment.size && (
|
||||||
|
<div className='text-[10px] text-gray-500 md:text-xs dark:text-gray-400'>
|
||||||
|
{formatFileSize(attachment.size)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
<div className='flex justify-end'>
|
<div className='flex justify-end'>
|
||||||
<div className='max-w-[80%] rounded-3xl bg-[#F4F4F4] px-4 py-3 dark:bg-gray-600'>
|
<div className='max-w-[80%] rounded-3xl bg-[#F4F4F4] px-4 py-3 dark:bg-gray-600'>
|
||||||
<div className='whitespace-pre-wrap break-words text-base text-gray-800 leading-relaxed dark:text-gray-100'>
|
{/* Render text content if present and not just file count message */}
|
||||||
{isJsonObject ? (
|
{message.content && !String(message.content).startsWith('Sent') && (
|
||||||
<pre>{JSON.stringify(message.content, null, 2)}</pre>
|
<div className='whitespace-pre-wrap break-words text-base text-gray-800 leading-relaxed dark:text-gray-100'>
|
||||||
) : (
|
{isJsonObject ? (
|
||||||
<span>{message.content as string}</span>
|
<pre>{JSON.stringify(message.content, null, 2)}</pre>
|
||||||
)}
|
) : (
|
||||||
</div>
|
<span>{message.content as string}</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -236,8 +236,8 @@ export function ExampleCommand({
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<div className='group relative rounded-md border bg-background transition-colors hover:bg-muted/50'>
|
<div className='group relative overflow-x-auto rounded-md border bg-background transition-colors hover:bg-muted/50'>
|
||||||
<pre className='whitespace-pre-wrap p-3 font-mono text-xs'>{getDisplayCommand()}</pre>
|
<pre className='whitespace-pre p-3 font-mono text-xs'>{getDisplayCommand()}</pre>
|
||||||
<CopyButton text={getActualCommand()} />
|
<CopyButton text={getActualCommand()} />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -139,6 +139,16 @@ export function DeployModal({
|
|||||||
case 'array':
|
case 'array':
|
||||||
exampleData[field.name] = [1, 2, 3]
|
exampleData[field.name] = [1, 2, 3]
|
||||||
break
|
break
|
||||||
|
case 'files':
|
||||||
|
exampleData[field.name] = [
|
||||||
|
{
|
||||||
|
data: 'data:application/pdf;base64,...',
|
||||||
|
type: 'file',
|
||||||
|
name: 'document.pdf',
|
||||||
|
mime: 'application/pdf',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
'use client'
|
'use client'
|
||||||
|
|
||||||
import { type KeyboardEvent, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
import { type KeyboardEvent, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||||
import { ArrowDown, ArrowUp } from 'lucide-react'
|
import { AlertCircle, ArrowDown, ArrowUp, File, FileText, Image, Paperclip, X } from 'lucide-react'
|
||||||
import { Button } from '@/components/ui/button'
|
import { Button } from '@/components/ui/button'
|
||||||
import { Input } from '@/components/ui/input'
|
import { Input } from '@/components/ui/input'
|
||||||
import { Notice } from '@/components/ui/notice'
|
|
||||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import {
|
||||||
@@ -13,7 +12,6 @@ import {
|
|||||||
parseOutputContentSafely,
|
parseOutputContentSafely,
|
||||||
} from '@/lib/response-format'
|
} from '@/lib/response-format'
|
||||||
import {
|
import {
|
||||||
ChatFileUpload,
|
|
||||||
ChatMessage,
|
ChatMessage,
|
||||||
OutputSelect,
|
OutputSelect,
|
||||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/chat/components'
|
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/chat/components'
|
||||||
@@ -261,12 +259,40 @@ export function Chat({ chatMessage, setChatMessage }: ChatProps) {
|
|||||||
let result: any = null
|
let result: any = null
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Add user message
|
// Read files as data URLs for display in chat (only images to avoid localStorage quota issues)
|
||||||
|
const attachmentsWithData = await Promise.all(
|
||||||
|
chatFiles.map(async (file) => {
|
||||||
|
let dataUrl = ''
|
||||||
|
// Only read images as data URLs to avoid storing large files in localStorage
|
||||||
|
if (file.type.startsWith('image/')) {
|
||||||
|
try {
|
||||||
|
dataUrl = await new Promise<string>((resolve, reject) => {
|
||||||
|
const reader = new FileReader()
|
||||||
|
reader.onload = () => resolve(reader.result as string)
|
||||||
|
reader.onerror = reject
|
||||||
|
reader.readAsDataURL(file.file)
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error reading file as data URL:', error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
id: file.id,
|
||||||
|
name: file.name,
|
||||||
|
type: file.type,
|
||||||
|
size: file.size,
|
||||||
|
dataUrl,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// Add user message with attachments (include all files, even non-images without dataUrl)
|
||||||
addMessage({
|
addMessage({
|
||||||
content:
|
content:
|
||||||
sentMessage || (chatFiles.length > 0 ? `Uploaded ${chatFiles.length} file(s)` : ''),
|
sentMessage || (chatFiles.length > 0 ? `Uploaded ${chatFiles.length} file(s)` : ''),
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
type: 'user',
|
type: 'user',
|
||||||
|
attachments: attachmentsWithData,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Prepare workflow input
|
// Prepare workflow input
|
||||||
@@ -626,66 +652,212 @@ export function Chat({ chatMessage, setChatMessage }: ChatProps) {
|
|||||||
|
|
||||||
if (validNewFiles.length > 0) {
|
if (validNewFiles.length > 0) {
|
||||||
setChatFiles([...chatFiles, ...validNewFiles])
|
setChatFiles([...chatFiles, ...validNewFiles])
|
||||||
|
setUploadErrors([]) // Clear errors when files are successfully added
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{/* File upload section */}
|
{/* Error messages */}
|
||||||
<div className='mb-2'>
|
{uploadErrors.length > 0 && (
|
||||||
{uploadErrors.length > 0 && (
|
<div className='mb-2'>
|
||||||
<div className='mb-2'>
|
<div className='rounded-lg border border-red-200 bg-red-50 p-3 dark:border-red-800/50 dark:bg-red-950/20'>
|
||||||
<Notice variant='error' title='File upload error'>
|
<div className='flex items-start gap-2'>
|
||||||
<ul className='list-disc pl-5'>
|
<AlertCircle className='mt-0.5 h-4 w-4 shrink-0 text-red-600 dark:text-red-400' />
|
||||||
{uploadErrors.map((err, idx) => (
|
<div className='flex-1'>
|
||||||
<li key={idx}>{err}</li>
|
<div className='mb-1 font-medium text-red-800 text-sm dark:text-red-300'>
|
||||||
))}
|
File upload error
|
||||||
</ul>
|
</div>
|
||||||
</Notice>
|
<div className='space-y-1'>
|
||||||
|
{uploadErrors.map((err, idx) => (
|
||||||
|
<div key={idx} className='text-red-700 text-sm dark:text-red-400'>
|
||||||
|
{err}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Combined input container matching copilot style */}
|
||||||
|
<div
|
||||||
|
className={`rounded-[8px] border border-[#E5E5E5] bg-[#FFFFFF] p-2 shadow-xs transition-all duration-200 dark:border-[#414141] dark:bg-[var(--surface-elevated)] ${
|
||||||
|
isDragOver
|
||||||
|
? 'border-[var(--brand-primary-hover-hex)] bg-purple-50/50 dark:border-[var(--brand-primary-hover-hex)] dark:bg-purple-950/20'
|
||||||
|
: ''
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{/* File thumbnails */}
|
||||||
|
{chatFiles.length > 0 && (
|
||||||
|
<div className='mb-2 flex flex-wrap gap-1.5'>
|
||||||
|
{chatFiles.map((file) => {
|
||||||
|
const isImage = file.type.startsWith('image/')
|
||||||
|
const previewUrl = isImage ? URL.createObjectURL(file.file) : null
|
||||||
|
const getFileIcon = (type: string) => {
|
||||||
|
if (type.includes('pdf'))
|
||||||
|
return <FileText className='h-5 w-5 text-muted-foreground' />
|
||||||
|
if (type.startsWith('image/'))
|
||||||
|
return <Image className='h-5 w-5 text-muted-foreground' />
|
||||||
|
if (type.includes('text') || type.includes('json'))
|
||||||
|
return <FileText className='h-5 w-5 text-muted-foreground' />
|
||||||
|
return <File className='h-5 w-5 text-muted-foreground' />
|
||||||
|
}
|
||||||
|
const formatFileSize = (bytes: number) => {
|
||||||
|
if (bytes === 0) return '0 B'
|
||||||
|
const k = 1024
|
||||||
|
const sizes = ['B', 'KB', 'MB', 'GB']
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||||
|
return `${Math.round((bytes / k ** i) * 10) / 10} ${sizes[i]}`
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={file.id}
|
||||||
|
className={`group relative overflow-hidden rounded-md border border-border/50 bg-muted/20 ${
|
||||||
|
previewUrl
|
||||||
|
? 'h-16 w-16'
|
||||||
|
: 'flex h-16 min-w-[120px] max-w-[200px] items-center gap-2 px-2'
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{previewUrl ? (
|
||||||
|
<img
|
||||||
|
src={previewUrl}
|
||||||
|
alt={file.name}
|
||||||
|
className='h-full w-full object-cover'
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<div className='flex h-8 w-8 flex-shrink-0 items-center justify-center rounded bg-background/50'>
|
||||||
|
{getFileIcon(file.type)}
|
||||||
|
</div>
|
||||||
|
<div className='min-w-0 flex-1'>
|
||||||
|
<div className='truncate font-medium text-foreground text-xs'>
|
||||||
|
{file.name}
|
||||||
|
</div>
|
||||||
|
<div className='text-[10px] text-muted-foreground'>
|
||||||
|
{formatFileSize(file.size)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Remove button */}
|
||||||
|
<Button
|
||||||
|
variant='ghost'
|
||||||
|
size='icon'
|
||||||
|
onClick={(e) => {
|
||||||
|
e.stopPropagation()
|
||||||
|
if (previewUrl) URL.revokeObjectURL(previewUrl)
|
||||||
|
setChatFiles(chatFiles.filter((f) => f.id !== file.id))
|
||||||
|
}}
|
||||||
|
className='absolute top-0.5 right-0.5 h-5 w-5 bg-gray-800/80 p-0 text-white opacity-0 transition-opacity hover:bg-gray-800/80 hover:text-white group-hover:opacity-100 dark:bg-black/70 dark:hover:bg-black/70 dark:hover:text-white'
|
||||||
|
>
|
||||||
|
<X className='h-3 w-3' />
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
})}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
<ChatFileUpload
|
|
||||||
files={chatFiles}
|
|
||||||
onFilesChange={(files) => {
|
|
||||||
setChatFiles(files)
|
|
||||||
}}
|
|
||||||
maxFiles={5}
|
|
||||||
maxSize={10}
|
|
||||||
disabled={!activeWorkflowId || isExecuting || isUploadingFiles}
|
|
||||||
onError={(errors) => setUploadErrors(errors)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className='flex gap-2'>
|
{/* Input row */}
|
||||||
<Input
|
<div className='flex items-center gap-1'>
|
||||||
ref={inputRef}
|
{/* Attach button */}
|
||||||
value={chatMessage}
|
<Button
|
||||||
onChange={(e) => {
|
variant='ghost'
|
||||||
setChatMessage(e.target.value)
|
size='icon'
|
||||||
setHistoryIndex(-1) // Reset history index when typing
|
onClick={() => document.getElementById('chat-file-input')?.click()}
|
||||||
}}
|
disabled={
|
||||||
onKeyDown={handleKeyPress}
|
!activeWorkflowId || isExecuting || isUploadingFiles || chatFiles.length >= 5
|
||||||
placeholder={isDragOver ? 'Drop files here...' : 'Type a message...'}
|
}
|
||||||
className={`h-9 flex-1 rounded-lg border-[#E5E5E5] bg-[#FFFFFF] text-muted-foreground shadow-xs focus-visible:ring-0 focus-visible:ring-offset-0 dark:border-[#414141] dark:bg-[var(--surface-elevated)] ${
|
className='h-6 w-6 shrink-0 text-muted-foreground hover:text-foreground'
|
||||||
isDragOver
|
title='Attach files'
|
||||||
? 'border-[var(--brand-primary-hover-hex)] bg-purple-50/50 dark:border-[var(--brand-primary-hover-hex)] dark:bg-purple-950/20'
|
>
|
||||||
: ''
|
<Paperclip className='h-3 w-3' />
|
||||||
}`}
|
</Button>
|
||||||
disabled={!activeWorkflowId || isExecuting || isUploadingFiles}
|
|
||||||
/>
|
{/* Hidden file input */}
|
||||||
<Button
|
<input
|
||||||
onClick={handleSendMessage}
|
id='chat-file-input'
|
||||||
size='icon'
|
type='file'
|
||||||
disabled={
|
multiple
|
||||||
(!chatMessage.trim() && chatFiles.length === 0) ||
|
onChange={(e) => {
|
||||||
!activeWorkflowId ||
|
const files = e.target.files
|
||||||
isExecuting ||
|
if (!files) return
|
||||||
isUploadingFiles
|
|
||||||
}
|
const newFiles: ChatFile[] = []
|
||||||
className='h-9 w-9 rounded-lg bg-[var(--brand-primary-hover-hex)] text-white shadow-[0_0_0_0_var(--brand-primary-hover-hex)] transition-all duration-200 hover:bg-[var(--brand-primary-hover-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
|
const errors: string[] = []
|
||||||
>
|
for (let i = 0; i < files.length; i++) {
|
||||||
<ArrowUp className='h-4 w-4' />
|
if (chatFiles.length + newFiles.length >= 5) {
|
||||||
</Button>
|
errors.push('Maximum 5 files allowed')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
const file = files[i]
|
||||||
|
if (file.size > 10 * 1024 * 1024) {
|
||||||
|
errors.push(`${file.name} is too large (max 10MB)`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for duplicates
|
||||||
|
const isDuplicate = chatFiles.some(
|
||||||
|
(existingFile) =>
|
||||||
|
existingFile.name === file.name && existingFile.size === file.size
|
||||||
|
)
|
||||||
|
if (isDuplicate) {
|
||||||
|
errors.push(`${file.name} already added`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
newFiles.push({
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
name: file.name,
|
||||||
|
size: file.size,
|
||||||
|
type: file.type,
|
||||||
|
file,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (errors.length > 0) setUploadErrors(errors)
|
||||||
|
if (newFiles.length > 0) {
|
||||||
|
setChatFiles([...chatFiles, ...newFiles])
|
||||||
|
setUploadErrors([]) // Clear errors when files are successfully added
|
||||||
|
}
|
||||||
|
e.target.value = ''
|
||||||
|
}}
|
||||||
|
className='hidden'
|
||||||
|
disabled={!activeWorkflowId || isExecuting || isUploadingFiles}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Text input */}
|
||||||
|
<Input
|
||||||
|
ref={inputRef}
|
||||||
|
value={chatMessage}
|
||||||
|
onChange={(e) => {
|
||||||
|
setChatMessage(e.target.value)
|
||||||
|
setHistoryIndex(-1)
|
||||||
|
}}
|
||||||
|
onKeyDown={handleKeyPress}
|
||||||
|
placeholder={isDragOver ? 'Drop files here...' : 'Type a message...'}
|
||||||
|
className='h-8 flex-1 border-0 bg-transparent font-sans text-foreground text-sm shadow-none placeholder:text-muted-foreground focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||||
|
disabled={!activeWorkflowId || isExecuting || isUploadingFiles}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Send button */}
|
||||||
|
<Button
|
||||||
|
onClick={handleSendMessage}
|
||||||
|
size='icon'
|
||||||
|
disabled={
|
||||||
|
(!chatMessage.trim() && chatFiles.length === 0) ||
|
||||||
|
!activeWorkflowId ||
|
||||||
|
isExecuting ||
|
||||||
|
isUploadingFiles
|
||||||
|
}
|
||||||
|
className='h-6 w-6 shrink-0 rounded-full bg-[var(--brand-primary-hover-hex)] text-white shadow-[0_0_0_0_var(--brand-primary-hover-hex)] transition-all duration-200 hover:bg-[var(--brand-primary-hover-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
|
||||||
|
>
|
||||||
|
<ArrowUp className='h-3 w-3' />
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,4 +1,13 @@
|
|||||||
import { useMemo } from 'react'
|
import { useMemo } from 'react'
|
||||||
|
import { File, FileText, Image as ImageIcon } from 'lucide-react'
|
||||||
|
|
||||||
|
interface ChatAttachment {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
dataUrl: string
|
||||||
|
size?: number
|
||||||
|
}
|
||||||
|
|
||||||
interface ChatMessageProps {
|
interface ChatMessageProps {
|
||||||
message: {
|
message: {
|
||||||
@@ -7,6 +16,7 @@ interface ChatMessageProps {
|
|||||||
timestamp: string | Date
|
timestamp: string | Date
|
||||||
type: 'user' | 'workflow'
|
type: 'user' | 'workflow'
|
||||||
isStreaming?: boolean
|
isStreaming?: boolean
|
||||||
|
attachments?: ChatAttachment[]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -58,12 +68,81 @@ export function ChatMessage({ message }: ChatMessageProps) {
|
|||||||
if (message.type === 'user') {
|
if (message.type === 'user') {
|
||||||
return (
|
return (
|
||||||
<div className='w-full py-2'>
|
<div className='w-full py-2'>
|
||||||
|
{/* File attachments displayed above the message, completely separate from message box */}
|
||||||
|
{message.attachments && message.attachments.length > 0 && (
|
||||||
|
<div className='mb-1 flex justify-end'>
|
||||||
|
<div className='flex flex-wrap gap-1.5'>
|
||||||
|
{message.attachments.map((attachment) => {
|
||||||
|
const isImage = attachment.type.startsWith('image/')
|
||||||
|
const getFileIcon = (type: string) => {
|
||||||
|
if (type.includes('pdf'))
|
||||||
|
return <FileText className='h-5 w-5 text-muted-foreground' />
|
||||||
|
if (type.startsWith('image/'))
|
||||||
|
return <ImageIcon className='h-5 w-5 text-muted-foreground' />
|
||||||
|
if (type.includes('text') || type.includes('json'))
|
||||||
|
return <FileText className='h-5 w-5 text-muted-foreground' />
|
||||||
|
return <File className='h-5 w-5 text-muted-foreground' />
|
||||||
|
}
|
||||||
|
const formatFileSize = (bytes?: number) => {
|
||||||
|
if (!bytes || bytes === 0) return ''
|
||||||
|
const k = 1024
|
||||||
|
const sizes = ['B', 'KB', 'MB', 'GB']
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||||
|
return `${Math.round((bytes / k ** i) * 10) / 10} ${sizes[i]}`
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={attachment.id}
|
||||||
|
className={`relative overflow-hidden rounded-md border border-border/50 bg-muted/20 ${
|
||||||
|
attachment.dataUrl?.trim() ? 'cursor-pointer' : ''
|
||||||
|
} ${isImage ? 'h-16 w-16' : 'flex h-16 min-w-[120px] max-w-[200px] items-center gap-2 px-2'}`}
|
||||||
|
onClick={(e) => {
|
||||||
|
if (attachment.dataUrl?.trim()) {
|
||||||
|
e.preventDefault()
|
||||||
|
window.open(attachment.dataUrl, '_blank')
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{isImage && attachment.dataUrl ? (
|
||||||
|
<img
|
||||||
|
src={attachment.dataUrl}
|
||||||
|
alt={attachment.name}
|
||||||
|
className='h-full w-full object-cover'
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<div className='flex h-8 w-8 flex-shrink-0 items-center justify-center rounded bg-background/50'>
|
||||||
|
{getFileIcon(attachment.type)}
|
||||||
|
</div>
|
||||||
|
<div className='min-w-0 flex-1'>
|
||||||
|
<div className='truncate font-medium text-foreground text-xs'>
|
||||||
|
{attachment.name}
|
||||||
|
</div>
|
||||||
|
{attachment.size && (
|
||||||
|
<div className='text-[10px] text-muted-foreground'>
|
||||||
|
{formatFileSize(attachment.size)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
<div className='flex justify-end'>
|
<div className='flex justify-end'>
|
||||||
<div className='max-w-[80%]'>
|
<div className='max-w-[80%]'>
|
||||||
<div className='rounded-[10px] bg-secondary px-3 py-2'>
|
<div className='rounded-[10px] bg-secondary px-3 py-2'>
|
||||||
<div className='whitespace-pre-wrap break-words font-normal text-foreground text-sm leading-normal'>
|
{/* Render text content if present and not just file count message */}
|
||||||
<WordWrap text={formattedContent} />
|
{formattedContent && !formattedContent.startsWith('Uploaded') && (
|
||||||
</div>
|
<div className='whitespace-pre-wrap break-words font-normal text-foreground text-sm leading-normal'>
|
||||||
|
<WordWrap text={formattedContent} />
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -78,7 +157,7 @@ export function ChatMessage({ message }: ChatMessageProps) {
|
|||||||
<div className='whitespace-pre-wrap break-words text-foreground'>
|
<div className='whitespace-pre-wrap break-words text-foreground'>
|
||||||
<WordWrap text={formattedContent} />
|
<WordWrap text={formattedContent} />
|
||||||
{message.isStreaming && (
|
{message.isStreaming && (
|
||||||
<span className='ml-1 inline-block h-4 w-2 animate-pulse bg-primary' />
|
<span className='ml-1 inline-block h-4 w-2 animate-pulse bg-gray-400 dark:bg-gray-300' />
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { ResponseFormat as SharedResponseFormat } from '@/app/workspace/[workspa
|
|||||||
export interface JSONProperty {
|
export interface JSONProperty {
|
||||||
id: string
|
id: string
|
||||||
key: string
|
key: string
|
||||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array'
|
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'file'
|
||||||
value?: any
|
value?: any
|
||||||
collapsed?: boolean
|
collapsed?: boolean
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { useEffect, useRef, useState } from 'react'
|
import { useEffect, useRef, useState } from 'react'
|
||||||
import { ChevronDown, Plus, Trash } from 'lucide-react'
|
import { ChevronDown, Paperclip, Plus, Trash } from 'lucide-react'
|
||||||
import { Badge } from '@/components/ui/badge'
|
import { Badge } from '@/components/ui/badge'
|
||||||
import { Button } from '@/components/ui/button'
|
import { Button } from '@/components/ui/button'
|
||||||
import {
|
import {
|
||||||
@@ -27,7 +27,7 @@ import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/
|
|||||||
interface Field {
|
interface Field {
|
||||||
id: string
|
id: string
|
||||||
name: string
|
name: string
|
||||||
type?: 'string' | 'number' | 'boolean' | 'object' | 'array'
|
type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
||||||
value?: string
|
value?: string
|
||||||
collapsed?: boolean
|
collapsed?: boolean
|
||||||
}
|
}
|
||||||
@@ -339,37 +339,46 @@ export function FieldFormat({
|
|||||||
onClick={() => updateField(field.id, 'type', 'string')}
|
onClick={() => updateField(field.id, 'type', 'string')}
|
||||||
className='cursor-pointer'
|
className='cursor-pointer'
|
||||||
>
|
>
|
||||||
<span className='mr-2 font-mono'>Aa</span>
|
<span className='mr-2 w-6 text-center font-mono'>Aa</span>
|
||||||
<span>String</span>
|
<span>String</span>
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
<DropdownMenuItem
|
<DropdownMenuItem
|
||||||
onClick={() => updateField(field.id, 'type', 'number')}
|
onClick={() => updateField(field.id, 'type', 'number')}
|
||||||
className='cursor-pointer'
|
className='cursor-pointer'
|
||||||
>
|
>
|
||||||
<span className='mr-2 font-mono'>123</span>
|
<span className='mr-2 w-6 text-center font-mono'>123</span>
|
||||||
<span>Number</span>
|
<span>Number</span>
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
<DropdownMenuItem
|
<DropdownMenuItem
|
||||||
onClick={() => updateField(field.id, 'type', 'boolean')}
|
onClick={() => updateField(field.id, 'type', 'boolean')}
|
||||||
className='cursor-pointer'
|
className='cursor-pointer'
|
||||||
>
|
>
|
||||||
<span className='mr-2 font-mono'>0/1</span>
|
<span className='mr-2 w-6 text-center font-mono'>0/1</span>
|
||||||
<span>Boolean</span>
|
<span>Boolean</span>
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
<DropdownMenuItem
|
<DropdownMenuItem
|
||||||
onClick={() => updateField(field.id, 'type', 'object')}
|
onClick={() => updateField(field.id, 'type', 'object')}
|
||||||
className='cursor-pointer'
|
className='cursor-pointer'
|
||||||
>
|
>
|
||||||
<span className='mr-2 font-mono'>{'{}'}</span>
|
<span className='mr-2 w-6 text-center font-mono'>{'{}'}</span>
|
||||||
<span>Object</span>
|
<span>Object</span>
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
<DropdownMenuItem
|
<DropdownMenuItem
|
||||||
onClick={() => updateField(field.id, 'type', 'array')}
|
onClick={() => updateField(field.id, 'type', 'array')}
|
||||||
className='cursor-pointer'
|
className='cursor-pointer'
|
||||||
>
|
>
|
||||||
<span className='mr-2 font-mono'>[]</span>
|
<span className='mr-2 w-6 text-center font-mono'>[]</span>
|
||||||
<span>Array</span>
|
<span>Array</span>
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
|
<DropdownMenuItem
|
||||||
|
onClick={() => updateField(field.id, 'type', 'files')}
|
||||||
|
className='cursor-pointer'
|
||||||
|
>
|
||||||
|
<div className='mr-2 flex w-6 justify-center'>
|
||||||
|
<Paperclip className='h-4 w-4' />
|
||||||
|
</div>
|
||||||
|
<span>Files</span>
|
||||||
|
</DropdownMenuItem>
|
||||||
</DropdownMenuContent>
|
</DropdownMenuContent>
|
||||||
</DropdownMenu>
|
</DropdownMenu>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ export const ChatTriggerBlock: BlockConfig = {
|
|||||||
outputs: {
|
outputs: {
|
||||||
input: { type: 'string', description: 'User message' },
|
input: { type: 'string', description: 'User message' },
|
||||||
conversationId: { type: 'string', description: 'Conversation ID' },
|
conversationId: { type: 'string', description: 'Conversation ID' },
|
||||||
files: { type: 'array', description: 'Uploaded files' },
|
files: { type: 'files', description: 'Uploaded files' },
|
||||||
},
|
},
|
||||||
triggers: {
|
triggers: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
|||||||
@@ -3,7 +3,14 @@ import type { ToolResponse } from '@/tools/types'
|
|||||||
|
|
||||||
export type BlockIcon = (props: SVGProps<SVGSVGElement>) => JSX.Element
|
export type BlockIcon = (props: SVGProps<SVGSVGElement>) => JSX.Element
|
||||||
export type ParamType = 'string' | 'number' | 'boolean' | 'json'
|
export type ParamType = 'string' | 'number' | 'boolean' | 'json'
|
||||||
export type PrimitiveValueType = 'string' | 'number' | 'boolean' | 'json' | 'array' | 'any'
|
export type PrimitiveValueType =
|
||||||
|
| 'string'
|
||||||
|
| 'number'
|
||||||
|
| 'boolean'
|
||||||
|
| 'json'
|
||||||
|
| 'array'
|
||||||
|
| 'files'
|
||||||
|
| 'any'
|
||||||
|
|
||||||
export type BlockCategory = 'blocks' | 'tools' | 'triggers'
|
export type BlockCategory = 'blocks' | 'tools' | 'triggers'
|
||||||
|
|
||||||
|
|||||||
@@ -155,7 +155,7 @@ const getOutputTypeForPath = (
|
|||||||
const chatModeTypes: Record<string, string> = {
|
const chatModeTypes: Record<string, string> = {
|
||||||
input: 'string',
|
input: 'string',
|
||||||
conversationId: 'string',
|
conversationId: 'string',
|
||||||
files: 'array',
|
files: 'files',
|
||||||
}
|
}
|
||||||
return chatModeTypes[outputPath] || 'any'
|
return chatModeTypes[outputPath] || 'any'
|
||||||
}
|
}
|
||||||
@@ -336,10 +336,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
|||||||
|
|
||||||
const combinedAccessiblePrefixes = useMemo(() => {
|
const combinedAccessiblePrefixes = useMemo(() => {
|
||||||
if (!rawAccessiblePrefixes) return new Set<string>()
|
if (!rawAccessiblePrefixes) return new Set<string>()
|
||||||
const normalized = new Set<string>(rawAccessiblePrefixes)
|
return new Set<string>(rawAccessiblePrefixes)
|
||||||
normalized.add(normalizeBlockName(blockId))
|
}, [rawAccessiblePrefixes])
|
||||||
return normalized
|
|
||||||
}, [rawAccessiblePrefixes, blockId])
|
|
||||||
|
|
||||||
// Subscribe to live subblock values for the active workflow to react to input format changes
|
// Subscribe to live subblock values for the active workflow to react to input format changes
|
||||||
const workflowSubBlockValues = useSubBlockStore((state) =>
|
const workflowSubBlockValues = useSubBlockStore((state) =>
|
||||||
@@ -997,6 +995,33 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
|||||||
|
|
||||||
let processedTag = tag
|
let processedTag = tag
|
||||||
|
|
||||||
|
// Check if this is a file property and add [0] automatically
|
||||||
|
const fileProperties = ['url', 'name', 'size', 'type', 'key', 'uploadedAt', 'expiresAt']
|
||||||
|
const parts = tag.split('.')
|
||||||
|
if (parts.length >= 2 && fileProperties.includes(parts[parts.length - 1])) {
|
||||||
|
const fieldName = parts[parts.length - 2]
|
||||||
|
|
||||||
|
if (blockGroup) {
|
||||||
|
const block = useWorkflowStore.getState().blocks[blockGroup.blockId]
|
||||||
|
const blockConfig = block ? (getBlock(block.type) ?? null) : null
|
||||||
|
const mergedSubBlocks = getMergedSubBlocks(blockGroup.blockId)
|
||||||
|
|
||||||
|
const fieldType = getOutputTypeForPath(
|
||||||
|
block,
|
||||||
|
blockConfig,
|
||||||
|
blockGroup.blockId,
|
||||||
|
fieldName,
|
||||||
|
mergedSubBlocks
|
||||||
|
)
|
||||||
|
|
||||||
|
if (fieldType === 'files') {
|
||||||
|
const blockAndField = parts.slice(0, -1).join('.')
|
||||||
|
const property = parts[parts.length - 1]
|
||||||
|
processedTag = `${blockAndField}[0].${property}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (tag.startsWith(TAG_PREFIXES.VARIABLE)) {
|
if (tag.startsWith(TAG_PREFIXES.VARIABLE)) {
|
||||||
const variableName = tag.substring(TAG_PREFIXES.VARIABLE.length)
|
const variableName = tag.substring(TAG_PREFIXES.VARIABLE.length)
|
||||||
const variableObj = workflowVariables.find(
|
const variableObj = workflowVariables.find(
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ const logger = createLogger('ResponseBlockHandler')
|
|||||||
interface JSONProperty {
|
interface JSONProperty {
|
||||||
id: string
|
id: string
|
||||||
name: string
|
name: string
|
||||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array'
|
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
||||||
value: any
|
value: any
|
||||||
collapsed?: boolean
|
collapsed?: boolean
|
||||||
}
|
}
|
||||||
@@ -140,6 +140,9 @@ export class ResponseBlockHandler implements BlockHandler {
|
|||||||
return this.convertNumberValue(prop.value)
|
return this.convertNumberValue(prop.value)
|
||||||
case 'boolean':
|
case 'boolean':
|
||||||
return this.convertBooleanValue(prop.value)
|
return this.convertBooleanValue(prop.value)
|
||||||
|
case 'files':
|
||||||
|
// File values should be passed through as-is (UserFile objects)
|
||||||
|
return prop.value
|
||||||
default:
|
default:
|
||||||
return prop.value
|
return prop.value
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -126,8 +126,17 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
|||||||
}
|
}
|
||||||
finalOutput: BlockOutputData
|
finalOutput: BlockOutputData
|
||||||
traceSpans?: TraceSpan[]
|
traceSpans?: TraceSpan[]
|
||||||
|
workflowInput?: any
|
||||||
}): Promise<WorkflowExecutionLog> {
|
}): Promise<WorkflowExecutionLog> {
|
||||||
const { executionId, endedAt, totalDurationMs, costSummary, finalOutput, traceSpans } = params
|
const {
|
||||||
|
executionId,
|
||||||
|
endedAt,
|
||||||
|
totalDurationMs,
|
||||||
|
costSummary,
|
||||||
|
finalOutput,
|
||||||
|
traceSpans,
|
||||||
|
workflowInput,
|
||||||
|
} = params
|
||||||
|
|
||||||
logger.debug(`Completing workflow execution ${executionId}`)
|
logger.debug(`Completing workflow execution ${executionId}`)
|
||||||
|
|
||||||
@@ -145,8 +154,8 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
|||||||
|
|
||||||
const level = hasErrors ? 'error' : 'info'
|
const level = hasErrors ? 'error' : 'info'
|
||||||
|
|
||||||
// Extract files from trace spans and final output
|
// Extract files from trace spans, final output, and workflow input
|
||||||
const executionFiles = this.extractFilesFromExecution(traceSpans, finalOutput)
|
const executionFiles = this.extractFilesFromExecution(traceSpans, finalOutput, workflowInput)
|
||||||
|
|
||||||
const [updatedLog] = await db
|
const [updatedLog] = await db
|
||||||
.update(workflowExecutionLogs)
|
.update(workflowExecutionLogs)
|
||||||
@@ -456,9 +465,13 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract file references from execution trace spans and final output
|
* Extract file references from execution trace spans, final output, and workflow input
|
||||||
*/
|
*/
|
||||||
private extractFilesFromExecution(traceSpans?: any[], finalOutput?: any): any[] {
|
private extractFilesFromExecution(
|
||||||
|
traceSpans?: any[],
|
||||||
|
finalOutput?: any,
|
||||||
|
workflowInput?: any
|
||||||
|
): any[] {
|
||||||
const files: any[] = []
|
const files: any[] = []
|
||||||
const seenFileIds = new Set<string>()
|
const seenFileIds = new Set<string>()
|
||||||
|
|
||||||
@@ -558,6 +571,15 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
|||||||
extractFilesFromObject(finalOutput, 'final_output')
|
extractFilesFromObject(finalOutput, 'final_output')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Extract files from workflow input
|
||||||
|
if (workflowInput) {
|
||||||
|
extractFilesFromObject(workflowInput, 'workflow_input')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug(`Extracted ${files.length} file(s) from execution`, {
|
||||||
|
fileNames: files.map((f) => f.name),
|
||||||
|
})
|
||||||
|
|
||||||
return files
|
return files
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ export interface SessionCompleteParams {
|
|||||||
totalDurationMs?: number
|
totalDurationMs?: number
|
||||||
finalOutput?: any
|
finalOutput?: any
|
||||||
traceSpans?: any[]
|
traceSpans?: any[]
|
||||||
|
workflowInput?: any
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SessionErrorCompleteParams {
|
export interface SessionErrorCompleteParams {
|
||||||
@@ -106,7 +107,7 @@ export class LoggingSession {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async complete(params: SessionCompleteParams = {}): Promise<void> {
|
async complete(params: SessionCompleteParams = {}): Promise<void> {
|
||||||
const { endedAt, totalDurationMs, finalOutput, traceSpans } = params
|
const { endedAt, totalDurationMs, finalOutput, traceSpans, workflowInput } = params
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const costSummary = calculateCostSummary(traceSpans || [])
|
const costSummary = calculateCostSummary(traceSpans || [])
|
||||||
@@ -118,6 +119,7 @@ export class LoggingSession {
|
|||||||
costSummary,
|
costSummary,
|
||||||
finalOutput: finalOutput || {},
|
finalOutput: finalOutput || {},
|
||||||
traceSpans: traceSpans || [],
|
traceSpans: traceSpans || [],
|
||||||
|
workflowInput,
|
||||||
})
|
})
|
||||||
|
|
||||||
if (this.requestId) {
|
if (this.requestId) {
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ export function getBlockOutputs(
|
|||||||
return {
|
return {
|
||||||
input: { type: 'string', description: 'User message' },
|
input: { type: 'string', description: 'User message' },
|
||||||
conversationId: { type: 'string', description: 'Conversation ID' },
|
conversationId: { type: 'string', description: 'Conversation ID' },
|
||||||
files: { type: 'array', description: 'Uploaded files' },
|
files: { type: 'files', description: 'Uploaded files' },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
@@ -136,7 +136,20 @@ export function getBlockOutputPaths(
|
|||||||
|
|
||||||
// If value has 'type' property, it's a leaf node (output definition)
|
// If value has 'type' property, it's a leaf node (output definition)
|
||||||
if (value && typeof value === 'object' && 'type' in value) {
|
if (value && typeof value === 'object' && 'type' in value) {
|
||||||
paths.push(path)
|
// Special handling for 'files' type - expand to show array element properties
|
||||||
|
if (value.type === 'files') {
|
||||||
|
// Show properties without [0] for cleaner display
|
||||||
|
// The tag dropdown will add [0] automatically when inserting
|
||||||
|
paths.push(`${path}.url`)
|
||||||
|
paths.push(`${path}.name`)
|
||||||
|
paths.push(`${path}.size`)
|
||||||
|
paths.push(`${path}.type`)
|
||||||
|
paths.push(`${path}.key`)
|
||||||
|
paths.push(`${path}.uploadedAt`)
|
||||||
|
paths.push(`${path}.expiresAt`)
|
||||||
|
} else {
|
||||||
|
paths.push(path)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// If value is an object without 'type', recurse into it
|
// If value is an object without 'type', recurse into it
|
||||||
else if (value && typeof value === 'object' && !Array.isArray(value)) {
|
else if (value && typeof value === 'object' && !Array.isArray(value)) {
|
||||||
@@ -164,8 +177,33 @@ export function getBlockOutputType(
|
|||||||
): string {
|
): string {
|
||||||
const outputs = getBlockOutputs(blockType, subBlocks, triggerMode)
|
const outputs = getBlockOutputs(blockType, subBlocks, triggerMode)
|
||||||
|
|
||||||
// Navigate through nested path
|
const arrayIndexRegex = /\[(\d+)\]/g
|
||||||
const pathParts = outputPath.split('.')
|
const cleanPath = outputPath.replace(arrayIndexRegex, '')
|
||||||
|
const pathParts = cleanPath.split('.').filter(Boolean)
|
||||||
|
|
||||||
|
const filePropertyTypes: Record<string, string> = {
|
||||||
|
url: 'string',
|
||||||
|
name: 'string',
|
||||||
|
size: 'number',
|
||||||
|
type: 'string',
|
||||||
|
key: 'string',
|
||||||
|
uploadedAt: 'string',
|
||||||
|
expiresAt: 'string',
|
||||||
|
}
|
||||||
|
|
||||||
|
const lastPart = pathParts[pathParts.length - 1]
|
||||||
|
if (lastPart && filePropertyTypes[lastPart]) {
|
||||||
|
const parentPath = pathParts.slice(0, -1).join('.')
|
||||||
|
let current: any = outputs
|
||||||
|
for (const part of pathParts.slice(0, -1)) {
|
||||||
|
if (!current || typeof current !== 'object') break
|
||||||
|
current = current[part]
|
||||||
|
}
|
||||||
|
if (current && typeof current === 'object' && 'type' in current && current.type === 'files') {
|
||||||
|
return filePropertyTypes[lastPart]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let current: any = outputs
|
let current: any = outputs
|
||||||
|
|
||||||
for (const part of pathParts) {
|
for (const part of pathParts) {
|
||||||
|
|||||||
@@ -1,53 +1,173 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { apiKey, userStats, workflow as workflowTable } from '@sim/db/schema'
|
import {
|
||||||
import { eq } from 'drizzle-orm'
|
apiKey,
|
||||||
|
permissions,
|
||||||
|
userStats,
|
||||||
|
workflow as workflowTable,
|
||||||
|
workspace,
|
||||||
|
} from '@sim/db/schema'
|
||||||
|
import type { InferSelectModel } from 'drizzle-orm'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { getEnv } from '@/lib/env'
|
import { getEnv } from '@/lib/env'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { hasWorkspaceAdminAccess } from '@/lib/permissions/utils'
|
import type { PermissionType } from '@/lib/permissions/utils'
|
||||||
import type { ExecutionResult } from '@/executor/types'
|
import type { ExecutionResult } from '@/executor/types'
|
||||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowUtils')
|
const logger = createLogger('WorkflowUtils')
|
||||||
|
|
||||||
|
const WORKFLOW_BASE_SELECTION = {
|
||||||
|
id: workflowTable.id,
|
||||||
|
userId: workflowTable.userId,
|
||||||
|
workspaceId: workflowTable.workspaceId,
|
||||||
|
folderId: workflowTable.folderId,
|
||||||
|
name: workflowTable.name,
|
||||||
|
description: workflowTable.description,
|
||||||
|
color: workflowTable.color,
|
||||||
|
lastSynced: workflowTable.lastSynced,
|
||||||
|
createdAt: workflowTable.createdAt,
|
||||||
|
updatedAt: workflowTable.updatedAt,
|
||||||
|
isDeployed: workflowTable.isDeployed,
|
||||||
|
deployedState: workflowTable.deployedState,
|
||||||
|
deployedAt: workflowTable.deployedAt,
|
||||||
|
pinnedApiKeyId: workflowTable.pinnedApiKeyId,
|
||||||
|
collaborators: workflowTable.collaborators,
|
||||||
|
runCount: workflowTable.runCount,
|
||||||
|
lastRunAt: workflowTable.lastRunAt,
|
||||||
|
variables: workflowTable.variables,
|
||||||
|
isPublished: workflowTable.isPublished,
|
||||||
|
marketplaceData: workflowTable.marketplaceData,
|
||||||
|
pinnedApiKeyKey: apiKey.key,
|
||||||
|
pinnedApiKeyName: apiKey.name,
|
||||||
|
pinnedApiKeyType: apiKey.type,
|
||||||
|
pinnedApiKeyWorkspaceId: apiKey.workspaceId,
|
||||||
|
}
|
||||||
|
|
||||||
|
type WorkflowSelection = InferSelectModel<typeof workflowTable>
|
||||||
|
type ApiKeySelection = InferSelectModel<typeof apiKey>
|
||||||
|
|
||||||
|
type WorkflowRow = WorkflowSelection & {
|
||||||
|
pinnedApiKeyKey: ApiKeySelection['key'] | null
|
||||||
|
pinnedApiKeyName: ApiKeySelection['name'] | null
|
||||||
|
pinnedApiKeyType: ApiKeySelection['type'] | null
|
||||||
|
pinnedApiKeyWorkspaceId: ApiKeySelection['workspaceId'] | null
|
||||||
|
}
|
||||||
|
|
||||||
|
type WorkflowWithPinnedKey = WorkflowSelection & {
|
||||||
|
pinnedApiKey: Pick<ApiKeySelection, 'id' | 'name' | 'key' | 'type' | 'workspaceId'> | null
|
||||||
|
}
|
||||||
|
|
||||||
|
function mapWorkflowRow(row: WorkflowRow | undefined): WorkflowWithPinnedKey | undefined {
|
||||||
|
if (!row) {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
pinnedApiKeyKey,
|
||||||
|
pinnedApiKeyName,
|
||||||
|
pinnedApiKeyType,
|
||||||
|
pinnedApiKeyWorkspaceId,
|
||||||
|
...workflowWithoutDerived
|
||||||
|
} = row
|
||||||
|
|
||||||
|
const pinnedApiKey =
|
||||||
|
workflowWithoutDerived.pinnedApiKeyId && pinnedApiKeyKey && pinnedApiKeyName && pinnedApiKeyType
|
||||||
|
? {
|
||||||
|
id: workflowWithoutDerived.pinnedApiKeyId,
|
||||||
|
name: pinnedApiKeyName,
|
||||||
|
key: pinnedApiKeyKey,
|
||||||
|
type: pinnedApiKeyType,
|
||||||
|
workspaceId: pinnedApiKeyWorkspaceId,
|
||||||
|
}
|
||||||
|
: null
|
||||||
|
|
||||||
|
return {
|
||||||
|
...workflowWithoutDerived,
|
||||||
|
pinnedApiKey,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function getWorkflowById(id: string) {
|
export async function getWorkflowById(id: string) {
|
||||||
const workflows = await db
|
const rows = await db
|
||||||
.select({
|
.select(WORKFLOW_BASE_SELECTION)
|
||||||
id: workflowTable.id,
|
|
||||||
userId: workflowTable.userId,
|
|
||||||
workspaceId: workflowTable.workspaceId,
|
|
||||||
folderId: workflowTable.folderId,
|
|
||||||
name: workflowTable.name,
|
|
||||||
description: workflowTable.description,
|
|
||||||
color: workflowTable.color,
|
|
||||||
lastSynced: workflowTable.lastSynced,
|
|
||||||
createdAt: workflowTable.createdAt,
|
|
||||||
updatedAt: workflowTable.updatedAt,
|
|
||||||
isDeployed: workflowTable.isDeployed,
|
|
||||||
deployedState: workflowTable.deployedState,
|
|
||||||
deployedAt: workflowTable.deployedAt,
|
|
||||||
pinnedApiKeyId: workflowTable.pinnedApiKeyId,
|
|
||||||
collaborators: workflowTable.collaborators,
|
|
||||||
runCount: workflowTable.runCount,
|
|
||||||
lastRunAt: workflowTable.lastRunAt,
|
|
||||||
variables: workflowTable.variables,
|
|
||||||
isPublished: workflowTable.isPublished,
|
|
||||||
marketplaceData: workflowTable.marketplaceData,
|
|
||||||
pinnedApiKey: {
|
|
||||||
id: apiKey.id,
|
|
||||||
name: apiKey.name,
|
|
||||||
key: apiKey.key,
|
|
||||||
type: apiKey.type,
|
|
||||||
workspaceId: apiKey.workspaceId,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.from(workflowTable)
|
.from(workflowTable)
|
||||||
.leftJoin(apiKey, eq(workflowTable.pinnedApiKeyId, apiKey.id))
|
.leftJoin(apiKey, eq(workflowTable.pinnedApiKeyId, apiKey.id))
|
||||||
.where(eq(workflowTable.id, id))
|
.where(eq(workflowTable.id, id))
|
||||||
.limit(1)
|
.limit(1)
|
||||||
|
|
||||||
return workflows[0]
|
return mapWorkflowRow(rows[0] as WorkflowRow | undefined)
|
||||||
|
}
|
||||||
|
|
||||||
|
type WorkflowRecord = ReturnType<typeof getWorkflowById> extends Promise<infer R>
|
||||||
|
? NonNullable<R>
|
||||||
|
: never
|
||||||
|
|
||||||
|
export interface WorkflowAccessContext {
|
||||||
|
workflow: WorkflowRecord
|
||||||
|
workspaceOwnerId: string | null
|
||||||
|
workspacePermission: PermissionType | null
|
||||||
|
isOwner: boolean
|
||||||
|
isWorkspaceOwner: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getWorkflowAccessContext(
|
||||||
|
workflowId: string,
|
||||||
|
userId?: string
|
||||||
|
): Promise<WorkflowAccessContext | null> {
|
||||||
|
const rows = await db
|
||||||
|
.select({
|
||||||
|
...WORKFLOW_BASE_SELECTION,
|
||||||
|
workspaceOwnerId: workspace.ownerId,
|
||||||
|
workspacePermission: permissions.permissionType,
|
||||||
|
})
|
||||||
|
.from(workflowTable)
|
||||||
|
.leftJoin(apiKey, eq(workflowTable.pinnedApiKeyId, apiKey.id))
|
||||||
|
.leftJoin(workspace, eq(workspace.id, workflowTable.workspaceId))
|
||||||
|
.leftJoin(
|
||||||
|
permissions,
|
||||||
|
and(
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.entityId, workflowTable.workspaceId),
|
||||||
|
userId ? eq(permissions.userId, userId) : eq(permissions.userId, '' as unknown as string)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.where(eq(workflowTable.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const row = rows[0] as
|
||||||
|
| (WorkflowRow & {
|
||||||
|
workspaceOwnerId: string | null
|
||||||
|
workspacePermission: PermissionType | null
|
||||||
|
})
|
||||||
|
| undefined
|
||||||
|
|
||||||
|
if (!row) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflow = mapWorkflowRow(row as WorkflowRow)
|
||||||
|
|
||||||
|
if (!workflow) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedWorkspaceOwner = row.workspaceOwnerId ?? null
|
||||||
|
const resolvedWorkspacePermission = row.workspacePermission ?? null
|
||||||
|
|
||||||
|
const resolvedUserId = userId ?? null
|
||||||
|
|
||||||
|
const isOwner = resolvedUserId ? workflow.userId === resolvedUserId : false
|
||||||
|
const isWorkspaceOwner = resolvedUserId ? resolvedWorkspaceOwner === resolvedUserId : false
|
||||||
|
|
||||||
|
return {
|
||||||
|
workflow,
|
||||||
|
workspaceOwnerId: resolvedWorkspaceOwner,
|
||||||
|
workspacePermission: resolvedWorkspacePermission,
|
||||||
|
isOwner,
|
||||||
|
isWorkspaceOwner,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function updateWorkflowRunCounts(workflowId: string, runs = 1) {
|
export async function updateWorkflowRunCounts(workflowId: string, runs = 1) {
|
||||||
@@ -421,8 +541,8 @@ export async function validateWorkflowPermissions(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflow = await getWorkflowById(workflowId)
|
const accessContext = await getWorkflowAccessContext(workflowId, session.user.id)
|
||||||
if (!workflow) {
|
if (!accessContext) {
|
||||||
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
||||||
return {
|
return {
|
||||||
error: { message: 'Workflow not found', status: 404 },
|
error: { message: 'Workflow not found', status: 404 },
|
||||||
@@ -431,9 +551,31 @@ export async function validateWorkflowPermissions(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { workflow, workspacePermission, isOwner } = accessContext
|
||||||
|
|
||||||
|
if (isOwner) {
|
||||||
|
return {
|
||||||
|
error: null,
|
||||||
|
session,
|
||||||
|
workflow,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (workflow.workspaceId) {
|
if (workflow.workspaceId) {
|
||||||
const hasAccess = await hasWorkspaceAdminAccess(session.user.id, workflow.workspaceId)
|
let hasPermission = false
|
||||||
if (!hasAccess) {
|
|
||||||
|
if (action === 'read') {
|
||||||
|
// Any workspace permission allows read
|
||||||
|
hasPermission = workspacePermission !== null
|
||||||
|
} else if (action === 'write') {
|
||||||
|
// Write or admin permission allows write
|
||||||
|
hasPermission = workspacePermission === 'write' || workspacePermission === 'admin'
|
||||||
|
} else if (action === 'admin') {
|
||||||
|
// Only admin permission allows admin actions
|
||||||
|
hasPermission = workspacePermission === 'admin'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!hasPermission) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`[${requestId}] User ${session.user.id} unauthorized to ${action} workflow ${workflowId} in workspace ${workflow.workspaceId}`
|
`[${requestId}] User ${session.user.id} unauthorized to ${action} workflow ${workflowId} in workspace ${workflow.workspaceId}`
|
||||||
)
|
)
|
||||||
@@ -444,15 +586,13 @@ export async function validateWorkflowPermissions(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (workflow.userId !== session.user.id) {
|
logger.warn(
|
||||||
logger.warn(
|
`[${requestId}] User ${session.user.id} unauthorized to ${action} workflow ${workflowId} owned by ${workflow.userId}`
|
||||||
`[${requestId}] User ${session.user.id} unauthorized to ${action} workflow ${workflowId} owned by ${workflow.userId}`
|
)
|
||||||
)
|
return {
|
||||||
return {
|
error: { message: `Unauthorized: Access denied to ${action} this workflow`, status: 403 },
|
||||||
error: { message: `Unauthorized: Access denied to ${action} this workflow`, status: 403 },
|
session: null,
|
||||||
session: null,
|
workflow: null,
|
||||||
workflow: null,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,11 @@
|
|||||||
|
export interface ChatAttachment {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
dataUrl: string
|
||||||
|
size?: number
|
||||||
|
}
|
||||||
|
|
||||||
export interface ChatMessage {
|
export interface ChatMessage {
|
||||||
id: string
|
id: string
|
||||||
content: string | any
|
content: string | any
|
||||||
@@ -6,6 +14,7 @@ export interface ChatMessage {
|
|||||||
timestamp: string
|
timestamp: string
|
||||||
blockId?: string
|
blockId?: string
|
||||||
isStreaming?: boolean
|
isStreaming?: boolean
|
||||||
|
attachments?: ChatAttachment[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface OutputConfig {
|
export interface OutputConfig {
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ result = client.execute_workflow(
|
|||||||
|
|
||||||
**Parameters:**
|
**Parameters:**
|
||||||
- `workflow_id` (str): The ID of the workflow to execute
|
- `workflow_id` (str): The ID of the workflow to execute
|
||||||
- `input_data` (dict, optional): Input data to pass to the workflow
|
- `input_data` (dict, optional): Input data to pass to the workflow. File objects are automatically converted to base64.
|
||||||
- `timeout` (float): Timeout in seconds (default: 30.0)
|
- `timeout` (float): Timeout in seconds (default: 30.0)
|
||||||
|
|
||||||
**Returns:** `WorkflowExecutionResult`
|
**Returns:** `WorkflowExecutionResult`
|
||||||
@@ -265,6 +265,57 @@ client = SimStudioClient(
|
|||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### File Upload
|
||||||
|
|
||||||
|
File objects are automatically detected and converted to base64 format. Include them in your input under the field name matching your workflow's API trigger input format:
|
||||||
|
|
||||||
|
The SDK converts file objects to this format:
|
||||||
|
```python
|
||||||
|
{
|
||||||
|
'type': 'file',
|
||||||
|
'data': 'data:mime/type;base64,base64data',
|
||||||
|
'name': 'filename',
|
||||||
|
'mime': 'mime/type'
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively, you can manually provide files using the URL format:
|
||||||
|
```python
|
||||||
|
{
|
||||||
|
'type': 'url',
|
||||||
|
'data': 'https://example.com/file.pdf',
|
||||||
|
'name': 'file.pdf',
|
||||||
|
'mime': 'application/pdf'
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
from simstudio import SimStudioClient
|
||||||
|
import os
|
||||||
|
|
||||||
|
client = SimStudioClient(api_key=os.getenv("SIM_API_KEY"))
|
||||||
|
|
||||||
|
# Upload a single file - include it under the field name from your API trigger
|
||||||
|
with open('document.pdf', 'rb') as f:
|
||||||
|
result = client.execute_workflow(
|
||||||
|
'workflow-id',
|
||||||
|
input_data={
|
||||||
|
'documents': [f], # Must match your workflow's "files" field name
|
||||||
|
'instructions': 'Analyze this document'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upload multiple files
|
||||||
|
with open('doc1.pdf', 'rb') as f1, open('doc2.pdf', 'rb') as f2:
|
||||||
|
result = client.execute_workflow(
|
||||||
|
'workflow-id',
|
||||||
|
input_data={
|
||||||
|
'attachments': [f1, f2], # Must match your workflow's "files" field name
|
||||||
|
'query': 'Compare these documents'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
### Batch Workflow Execution
|
### Batch Workflow Execution
|
||||||
|
|
||||||
```python
|
```python
|
||||||
@@ -276,14 +327,14 @@ client = SimStudioClient(api_key=os.getenv("SIM_API_KEY"))
|
|||||||
def execute_workflows_batch(workflow_data_pairs):
|
def execute_workflows_batch(workflow_data_pairs):
|
||||||
"""Execute multiple workflows with different input data."""
|
"""Execute multiple workflows with different input data."""
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
for workflow_id, input_data in workflow_data_pairs:
|
for workflow_id, input_data in workflow_data_pairs:
|
||||||
try:
|
try:
|
||||||
# Validate workflow before execution
|
# Validate workflow before execution
|
||||||
if not client.validate_workflow(workflow_id):
|
if not client.validate_workflow(workflow_id):
|
||||||
print(f"Skipping {workflow_id}: not deployed")
|
print(f"Skipping {workflow_id}: not deployed")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
result = client.execute_workflow(workflow_id, input_data)
|
result = client.execute_workflow(workflow_id, input_data)
|
||||||
results.append({
|
results.append({
|
||||||
"workflow_id": workflow_id,
|
"workflow_id": workflow_id,
|
||||||
@@ -291,14 +342,14 @@ def execute_workflows_batch(workflow_data_pairs):
|
|||||||
"output": result.output,
|
"output": result.output,
|
||||||
"error": result.error
|
"error": result.error
|
||||||
})
|
})
|
||||||
|
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
results.append({
|
results.append({
|
||||||
"workflow_id": workflow_id,
|
"workflow_id": workflow_id,
|
||||||
"success": False,
|
"success": False,
|
||||||
"error": str(error)
|
"error": str(error)
|
||||||
})
|
})
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
# Example usage
|
# Example usage
|
||||||
|
|||||||
55
packages/python-sdk/examples/file_upload.py
Normal file
55
packages/python-sdk/examples/file_upload.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
"""
|
||||||
|
Example: Upload files with workflow execution
|
||||||
|
|
||||||
|
This example demonstrates how to upload files when executing a workflow.
|
||||||
|
Files are automatically detected and converted to base64 format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from simstudio import SimStudioClient
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Initialize the client
|
||||||
|
api_key = os.getenv('SIM_API_KEY')
|
||||||
|
if not api_key:
|
||||||
|
raise ValueError('SIM_API_KEY environment variable is required')
|
||||||
|
|
||||||
|
client = SimStudioClient(api_key=api_key)
|
||||||
|
|
||||||
|
# Example 1: Upload a single file
|
||||||
|
# Include file under the field name from your workflow's API trigger input format
|
||||||
|
print("Example 1: Upload a single file")
|
||||||
|
with open('document.pdf', 'rb') as f:
|
||||||
|
result = client.execute_workflow(
|
||||||
|
workflow_id='your-workflow-id',
|
||||||
|
input_data={
|
||||||
|
'documents': [f], # Field name must match your API trigger's file input field
|
||||||
|
'instructions': 'Analyze this document'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.success:
|
||||||
|
print(f"Success! Output: {result.output}")
|
||||||
|
else:
|
||||||
|
print(f"Failed: {result.error}")
|
||||||
|
|
||||||
|
# Example 2: Upload multiple files
|
||||||
|
print("\nExample 2: Upload multiple files")
|
||||||
|
with open('document1.pdf', 'rb') as f1, open('document2.pdf', 'rb') as f2:
|
||||||
|
result = client.execute_workflow(
|
||||||
|
workflow_id='your-workflow-id',
|
||||||
|
input_data={
|
||||||
|
'attachments': [f1, f2], # Field name must match your API trigger's file input field
|
||||||
|
'query': 'Compare these documents'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if result.success:
|
||||||
|
print(f"Success! Output: {result.output}")
|
||||||
|
else:
|
||||||
|
print(f"Failed: {result.error}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
@@ -8,6 +8,7 @@ from typing import Any, Dict, Optional, Union
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
import time
|
import time
|
||||||
import random
|
import random
|
||||||
|
import os
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
@@ -109,6 +110,53 @@ class SimStudioClient:
|
|||||||
})
|
})
|
||||||
self._rate_limit_info: Optional[RateLimitInfo] = None
|
self._rate_limit_info: Optional[RateLimitInfo] = None
|
||||||
|
|
||||||
|
def _convert_files_to_base64(self, value: Any) -> Any:
|
||||||
|
"""
|
||||||
|
Convert file objects in input to API format (base64).
|
||||||
|
Recursively processes nested dicts and lists.
|
||||||
|
"""
|
||||||
|
import base64
|
||||||
|
import io
|
||||||
|
|
||||||
|
# Check if this is a file-like object
|
||||||
|
if hasattr(value, 'read') and callable(value.read):
|
||||||
|
# Save current position if seekable
|
||||||
|
initial_pos = value.tell() if hasattr(value, 'tell') else None
|
||||||
|
|
||||||
|
# Read file bytes
|
||||||
|
file_bytes = value.read()
|
||||||
|
|
||||||
|
# Restore position if seekable
|
||||||
|
if initial_pos is not None and hasattr(value, 'seek'):
|
||||||
|
value.seek(initial_pos)
|
||||||
|
|
||||||
|
# Encode to base64
|
||||||
|
base64_data = base64.b64encode(file_bytes).decode('utf-8')
|
||||||
|
|
||||||
|
# Get file metadata
|
||||||
|
filename = getattr(value, 'name', 'file')
|
||||||
|
if isinstance(filename, str):
|
||||||
|
filename = os.path.basename(filename)
|
||||||
|
|
||||||
|
content_type = getattr(value, 'content_type', 'application/octet-stream')
|
||||||
|
|
||||||
|
return {
|
||||||
|
'type': 'file',
|
||||||
|
'data': f'data:{content_type};base64,{base64_data}',
|
||||||
|
'name': filename,
|
||||||
|
'mime': content_type
|
||||||
|
}
|
||||||
|
|
||||||
|
# Recursively process lists
|
||||||
|
if isinstance(value, list):
|
||||||
|
return [self._convert_files_to_base64(item) for item in value]
|
||||||
|
|
||||||
|
# Recursively process dicts
|
||||||
|
if isinstance(value, dict):
|
||||||
|
return {k: self._convert_files_to_base64(v) for k, v in value.items()}
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
def execute_workflow(
|
def execute_workflow(
|
||||||
self,
|
self,
|
||||||
workflow_id: str,
|
workflow_id: str,
|
||||||
@@ -122,9 +170,11 @@ class SimStudioClient:
|
|||||||
Execute a workflow with optional input data.
|
Execute a workflow with optional input data.
|
||||||
If async_execution is True, returns immediately with a task ID.
|
If async_execution is True, returns immediately with a task ID.
|
||||||
|
|
||||||
|
File objects in input_data will be automatically detected and converted to base64.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
workflow_id: The ID of the workflow to execute
|
workflow_id: The ID of the workflow to execute
|
||||||
input_data: Input data to pass to the workflow
|
input_data: Input data to pass to the workflow (can include file-like objects)
|
||||||
timeout: Timeout in seconds (default: 30.0)
|
timeout: Timeout in seconds (default: 30.0)
|
||||||
stream: Enable streaming responses (default: None)
|
stream: Enable streaming responses (default: None)
|
||||||
selected_outputs: Block outputs to stream (e.g., ["agent1.content"])
|
selected_outputs: Block outputs to stream (e.g., ["agent1.content"])
|
||||||
@@ -138,19 +188,23 @@ class SimStudioClient:
|
|||||||
"""
|
"""
|
||||||
url = f"{self.base_url}/api/workflows/{workflow_id}/execute"
|
url = f"{self.base_url}/api/workflows/{workflow_id}/execute"
|
||||||
|
|
||||||
# Build request body - spread input at root level, then add API control parameters
|
|
||||||
body = input_data.copy() if input_data is not None else {}
|
|
||||||
if stream is not None:
|
|
||||||
body['stream'] = stream
|
|
||||||
if selected_outputs is not None:
|
|
||||||
body['selectedOutputs'] = selected_outputs
|
|
||||||
|
|
||||||
# Build headers - async execution uses X-Execution-Mode header
|
# Build headers - async execution uses X-Execution-Mode header
|
||||||
headers = self._session.headers.copy()
|
headers = self._session.headers.copy()
|
||||||
if async_execution:
|
if async_execution:
|
||||||
headers['X-Execution-Mode'] = 'async'
|
headers['X-Execution-Mode'] = 'async'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# Build JSON body - spread input at root level, then add API control parameters
|
||||||
|
body = input_data.copy() if input_data is not None else {}
|
||||||
|
|
||||||
|
# Convert any file objects in the input to base64 format
|
||||||
|
body = self._convert_files_to_base64(body)
|
||||||
|
|
||||||
|
if stream is not None:
|
||||||
|
body['stream'] = stream
|
||||||
|
if selected_outputs is not None:
|
||||||
|
body['selectedOutputs'] = selected_outputs
|
||||||
|
|
||||||
response = self._session.post(
|
response = self._session.post(
|
||||||
url,
|
url,
|
||||||
json=body,
|
json=body,
|
||||||
@@ -281,7 +335,7 @@ class SimStudioClient:
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
workflow_id: The ID of the workflow to execute
|
workflow_id: The ID of the workflow to execute
|
||||||
input_data: Input data to pass to the workflow
|
input_data: Input data to pass to the workflow (can include file-like objects)
|
||||||
timeout: Timeout for the initial request in seconds
|
timeout: Timeout for the initial request in seconds
|
||||||
stream: Enable streaming responses (default: None)
|
stream: Enable streaming responses (default: None)
|
||||||
selected_outputs: Block outputs to stream (e.g., ["agent1.content"])
|
selected_outputs: Block outputs to stream (e.g., ["agent1.content"])
|
||||||
@@ -373,7 +427,7 @@ class SimStudioClient:
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
workflow_id: The ID of the workflow to execute
|
workflow_id: The ID of the workflow to execute
|
||||||
input_data: Input data to pass to the workflow
|
input_data: Input data to pass to the workflow (can include file-like objects)
|
||||||
timeout: Timeout in seconds
|
timeout: Timeout in seconds
|
||||||
stream: Enable streaming responses
|
stream: Enable streaming responses
|
||||||
selected_outputs: Block outputs to stream
|
selected_outputs: Block outputs to stream
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ const result = await client.executeWorkflow('workflow-id', {
|
|||||||
**Parameters:**
|
**Parameters:**
|
||||||
- `workflowId` (string): The ID of the workflow to execute
|
- `workflowId` (string): The ID of the workflow to execute
|
||||||
- `options` (ExecutionOptions, optional):
|
- `options` (ExecutionOptions, optional):
|
||||||
- `input` (any): Input data to pass to the workflow
|
- `input` (any): Input data to pass to the workflow. File objects are automatically converted to base64.
|
||||||
- `timeout` (number): Timeout in milliseconds (default: 30000)
|
- `timeout` (number): Timeout in milliseconds (default: 30000)
|
||||||
|
|
||||||
**Returns:** `Promise<WorkflowExecutionResult>`
|
**Returns:** `Promise<WorkflowExecutionResult>`
|
||||||
@@ -261,6 +261,64 @@ const client = new SimStudioClient({
|
|||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### File Upload
|
||||||
|
|
||||||
|
File objects are automatically detected and converted to base64 format. Include them in your input under the field name matching your workflow's API trigger input format:
|
||||||
|
|
||||||
|
The SDK converts File objects to this format:
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
type: 'file',
|
||||||
|
data: 'data:mime/type;base64,base64data',
|
||||||
|
name: 'filename',
|
||||||
|
mime: 'mime/type'
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively, you can manually provide files using the URL format:
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
type: 'url',
|
||||||
|
data: 'https://example.com/file.pdf',
|
||||||
|
name: 'file.pdf',
|
||||||
|
mime: 'application/pdf'
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { SimStudioClient } from 'simstudio-ts-sdk';
|
||||||
|
import fs from 'fs';
|
||||||
|
|
||||||
|
const client = new SimStudioClient({
|
||||||
|
apiKey: process.env.SIM_API_KEY!
|
||||||
|
});
|
||||||
|
|
||||||
|
// Node.js: Read file and create File object
|
||||||
|
const fileBuffer = fs.readFileSync('./document.pdf');
|
||||||
|
const file = new File([fileBuffer], 'document.pdf', { type: 'application/pdf' });
|
||||||
|
|
||||||
|
// Include files under the field name from your API trigger's input format
|
||||||
|
const result = await client.executeWorkflow('workflow-id', {
|
||||||
|
input: {
|
||||||
|
documents: [file], // Field name must match your API trigger's file input field
|
||||||
|
instructions: 'Process this document'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Browser: From file input
|
||||||
|
const handleFileUpload = async (event: Event) => {
|
||||||
|
const input = event.target as HTMLInputElement;
|
||||||
|
const files = Array.from(input.files || []);
|
||||||
|
|
||||||
|
const result = await client.executeWorkflow('workflow-id', {
|
||||||
|
input: {
|
||||||
|
attachments: files, // Field name must match your API trigger's file input field
|
||||||
|
query: 'Analyze these files'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
## Getting Your API Key
|
## Getting Your API Key
|
||||||
|
|
||||||
1. Log in to your [Sim](https://sim.ai) account
|
1. Log in to your [Sim](https://sim.ai) account
|
||||||
|
|||||||
@@ -108,6 +108,55 @@ export class SimStudioClient {
|
|||||||
* Execute a workflow with optional input data
|
* Execute a workflow with optional input data
|
||||||
* If async is true, returns immediately with a task ID
|
* If async is true, returns immediately with a task ID
|
||||||
*/
|
*/
|
||||||
|
/**
|
||||||
|
* Convert File objects in input to API format (base64)
|
||||||
|
* Recursively processes nested objects and arrays
|
||||||
|
*/
|
||||||
|
private async convertFilesToBase64(
|
||||||
|
value: any,
|
||||||
|
visited: WeakSet<object> = new WeakSet()
|
||||||
|
): Promise<any> {
|
||||||
|
if (value instanceof File) {
|
||||||
|
const arrayBuffer = await value.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
const base64 = buffer.toString('base64')
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: 'file',
|
||||||
|
data: `data:${value.type || 'application/octet-stream'};base64,${base64}`,
|
||||||
|
name: value.name,
|
||||||
|
mime: value.type || 'application/octet-stream',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
if (visited.has(value)) {
|
||||||
|
return '[Circular]'
|
||||||
|
}
|
||||||
|
visited.add(value)
|
||||||
|
const result = await Promise.all(
|
||||||
|
value.map((item) => this.convertFilesToBase64(item, visited))
|
||||||
|
)
|
||||||
|
visited.delete(value)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value !== null && typeof value === 'object') {
|
||||||
|
if (visited.has(value)) {
|
||||||
|
return '[Circular]'
|
||||||
|
}
|
||||||
|
visited.add(value)
|
||||||
|
const converted: any = {}
|
||||||
|
for (const [key, val] of Object.entries(value)) {
|
||||||
|
converted[key] = await this.convertFilesToBase64(val, visited)
|
||||||
|
}
|
||||||
|
visited.delete(value)
|
||||||
|
return converted
|
||||||
|
}
|
||||||
|
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
async executeWorkflow(
|
async executeWorkflow(
|
||||||
workflowId: string,
|
workflowId: string,
|
||||||
options: ExecutionOptions = {}
|
options: ExecutionOptions = {}
|
||||||
@@ -121,15 +170,6 @@ export class SimStudioClient {
|
|||||||
setTimeout(() => reject(new Error('TIMEOUT')), timeout)
|
setTimeout(() => reject(new Error('TIMEOUT')), timeout)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Build request body - spread input at root level, then add API control parameters
|
|
||||||
const body: any = input !== undefined ? { ...input } : {}
|
|
||||||
if (stream !== undefined) {
|
|
||||||
body.stream = stream
|
|
||||||
}
|
|
||||||
if (selectedOutputs !== undefined) {
|
|
||||||
body.selectedOutputs = selectedOutputs
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build headers - async execution uses X-Execution-Mode header
|
// Build headers - async execution uses X-Execution-Mode header
|
||||||
const headers: Record<string, string> = {
|
const headers: Record<string, string> = {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -139,10 +179,23 @@ export class SimStudioClient {
|
|||||||
headers['X-Execution-Mode'] = 'async'
|
headers['X-Execution-Mode'] = 'async'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Build JSON body - spread input at root level, then add API control parameters
|
||||||
|
let jsonBody: any = input !== undefined ? { ...input } : {}
|
||||||
|
|
||||||
|
// Convert any File objects in the input to base64 format
|
||||||
|
jsonBody = await this.convertFilesToBase64(jsonBody)
|
||||||
|
|
||||||
|
if (stream !== undefined) {
|
||||||
|
jsonBody.stream = stream
|
||||||
|
}
|
||||||
|
if (selectedOutputs !== undefined) {
|
||||||
|
jsonBody.selectedOutputs = selectedOutputs
|
||||||
|
}
|
||||||
|
|
||||||
const fetchPromise = fetch(url, {
|
const fetchPromise = fetch(url, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers,
|
headers,
|
||||||
body: JSON.stringify(body),
|
body: JSON.stringify(jsonBody),
|
||||||
})
|
})
|
||||||
|
|
||||||
const response = await Promise.race([fetchPromise, timeoutPromise])
|
const response = await Promise.race([fetchPromise, timeoutPromise])
|
||||||
|
|||||||
Reference in New Issue
Block a user