mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 15:38:00 -05:00
Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c2f786e40b | ||
|
|
f3bc1fc250 | ||
|
|
78b5ae7b3d | ||
|
|
016cd6750c | ||
|
|
3b982533d1 |
@@ -66,17 +66,17 @@ Define the data to pass to the child workflow:
|
||||
|
||||
- **Single Variable Input**: Select a variable or block output to pass to the child workflow
|
||||
- **Variable References**: Use `<variable.name>` to reference workflow variables
|
||||
- **Block References**: Use `<blockName.response.field>` to reference outputs from previous blocks
|
||||
- **Automatic Mapping**: The selected data is automatically available as `start.response.input` in the child workflow
|
||||
- **Block References**: Use `<blockName.field>` to reference outputs from previous blocks
|
||||
- **Automatic Mapping**: The selected data is automatically available as `start.input` in the child workflow
|
||||
- **Optional**: The input field is optional - child workflows can run without input data
|
||||
- **Type Preservation**: Variable types (strings, numbers, objects, etc.) are preserved when passed to the child workflow
|
||||
|
||||
### Examples of Input References
|
||||
|
||||
- `<variable.customerData>` - Pass a workflow variable
|
||||
- `<dataProcessor.response.result>` - Pass the result from a previous block
|
||||
- `<start.response.input>` - Pass the original workflow input
|
||||
- `<apiCall.response.data.user>` - Pass a specific field from an API response
|
||||
- `<dataProcessor.result>` - Pass the result from a previous block
|
||||
- `<start.input>` - Pass the original workflow input
|
||||
- `<apiCall.data.user>` - Pass a specific field from an API response
|
||||
|
||||
### Execution Context
|
||||
|
||||
@@ -109,7 +109,7 @@ To prevent infinite recursion and ensure system stability, the Workflow block in
|
||||
<strong>Workflow ID</strong>: The identifier of the workflow to execute
|
||||
</li>
|
||||
<li>
|
||||
<strong>Input Variable</strong>: Variable or block reference to pass to the child workflow (e.g., `<variable.name>` or `<block.response.field>`)
|
||||
<strong>Input Variable</strong>: Variable or block reference to pass to the child workflow (e.g., `<variable.name>` or `<block.field>`)
|
||||
</li>
|
||||
</ul>
|
||||
</Tab>
|
||||
@@ -150,23 +150,23 @@ blocks:
|
||||
- type: workflow
|
||||
name: "Setup Customer Account"
|
||||
workflowId: "account-setup-workflow"
|
||||
input: "<Validate Customer Data.response.result>"
|
||||
input: "<Validate Customer Data.result>"
|
||||
|
||||
- type: workflow
|
||||
name: "Send Welcome Email"
|
||||
workflowId: "welcome-email-workflow"
|
||||
input: "<Setup Customer Account.response.result.accountDetails>"
|
||||
input: "<Setup Customer Account.result.accountDetails>"
|
||||
```
|
||||
|
||||
### Child Workflow: Customer Validation
|
||||
```yaml
|
||||
# Reusable customer validation workflow
|
||||
# Access the input data using: start.response.input
|
||||
# Access the input data using: start.input
|
||||
blocks:
|
||||
- type: function
|
||||
name: "Validate Email"
|
||||
code: |
|
||||
const customerData = start.response.input;
|
||||
const customerData = start.input;
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
|
||||
return emailRegex.test(customerData.email);
|
||||
|
||||
@@ -174,7 +174,7 @@ blocks:
|
||||
name: "Check Credit Score"
|
||||
url: "https://api.creditcheck.com/score"
|
||||
method: "POST"
|
||||
body: "<start.response.input>"
|
||||
body: "<start.input>"
|
||||
```
|
||||
|
||||
### Variable Reference Examples
|
||||
@@ -184,13 +184,13 @@ blocks:
|
||||
input: "<variable.customerInfo>"
|
||||
|
||||
# Using block outputs
|
||||
input: "<dataProcessor.response.cleanedData>"
|
||||
input: "<dataProcessor.cleanedData>"
|
||||
|
||||
# Using nested object properties
|
||||
input: "<apiCall.response.data.user.profile>"
|
||||
input: "<apiCall.data.user.profile>"
|
||||
|
||||
# Using array elements (if supported by the resolver)
|
||||
input: "<listProcessor.response.items[0]>"
|
||||
input: "<listProcessor.items[0]>"
|
||||
```
|
||||
|
||||
## Access Control and Permissions
|
||||
|
||||
@@ -81,4 +81,4 @@ Sim Studio provides a wide range of features designed to accelerate your develop
|
||||
|
||||
##
|
||||
|
||||
Ready to get started? Check out our [Getting Started](/getting-started) guide or explore our [Blocks](/docs/blocks) and [Tools](/docs/tools) in more detail.
|
||||
Ready to get started? Check out our [Getting Started](/getting-started) guide or explore our [Blocks](/blocks) and [Tools](/tools) in more detail.
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"fumadocs-mdx": "^11.5.6",
|
||||
"fumadocs-ui": "^15.0.16",
|
||||
"lucide-react": "^0.511.0",
|
||||
"next": "^15.2.3",
|
||||
"next": "^15.3.2",
|
||||
"next-themes": "^0.4.6",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
|
||||
@@ -93,7 +93,7 @@ export const sampleWorkflowState = {
|
||||
webhookPath: { id: 'webhookPath', type: 'short-input', value: '' },
|
||||
},
|
||||
outputs: {
|
||||
response: { type: { input: 'any' } },
|
||||
input: 'any',
|
||||
},
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
@@ -111,7 +111,7 @@ export const sampleWorkflowState = {
|
||||
type: 'long-input',
|
||||
value: 'You are a helpful assistant',
|
||||
},
|
||||
context: { id: 'context', type: 'short-input', value: '<start.response.input>' },
|
||||
context: { id: 'context', type: 'short-input', value: '<start.input>' },
|
||||
model: { id: 'model', type: 'dropdown', value: 'gpt-4o' },
|
||||
apiKey: { id: 'apiKey', type: 'short-input', value: '{{OPENAI_API_KEY}}' },
|
||||
},
|
||||
@@ -138,6 +138,7 @@ export const sampleWorkflowState = {
|
||||
},
|
||||
],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
}
|
||||
@@ -764,6 +765,20 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
bucket: 'test-s3-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
S3_KB_CONFIG: {
|
||||
bucket: 'test-s3-kb-bucket',
|
||||
region: 'us-east-1',
|
||||
},
|
||||
BLOB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@aws-sdk/client-s3', () => ({
|
||||
@@ -806,6 +821,11 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-container',
|
||||
},
|
||||
BLOB_KB_CONFIG: {
|
||||
accountName: 'testaccount',
|
||||
accountKey: 'testkey',
|
||||
containerName: 'test-kb-container',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@azure/storage-blob', () => ({
|
||||
|
||||
@@ -14,6 +14,8 @@ const logger = createLogger('OAuthTokenAPI')
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
logger.info(`[${requestId}] OAuth token API POST request received`)
|
||||
|
||||
try {
|
||||
// Parse request body
|
||||
const body = await request.json()
|
||||
@@ -38,6 +40,7 @@ export async function POST(request: NextRequest) {
|
||||
const credential = await getCredential(requestId, credentialId, userId)
|
||||
|
||||
if (!credential) {
|
||||
logger.error(`[${requestId}] Credential not found: ${credentialId}`)
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
@@ -45,7 +48,8 @@ export async function POST(request: NextRequest) {
|
||||
// Refresh the token if needed
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId)
|
||||
return NextResponse.json({ accessToken }, { status: 200 })
|
||||
} catch (_error) {
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to refresh access token:`, error)
|
||||
return NextResponse.json({ error: 'Failed to refresh access token' }, { status: 401 })
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -89,6 +89,7 @@ export async function getOAuthToken(userId: string, providerId: string): Promise
|
||||
// Check if the token is expired and needs refreshing
|
||||
const now = new Date()
|
||||
const tokenExpiry = credential.accessTokenExpiresAt
|
||||
// Only refresh if we have an expiration time AND it's expired AND we have a refresh token
|
||||
const needsRefresh = tokenExpiry && tokenExpiry < now && !!credential.refreshToken
|
||||
|
||||
if (needsRefresh) {
|
||||
@@ -166,7 +167,9 @@ export async function refreshAccessTokenIfNeeded(
|
||||
// Check if we need to refresh the token
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const now = new Date()
|
||||
const needsRefresh = !expiresAt || expiresAt <= now
|
||||
// Only refresh if we have an expiration time AND it's expired
|
||||
// If no expiration time is set (newly created credentials), assume token is valid
|
||||
const needsRefresh = expiresAt && expiresAt <= now
|
||||
|
||||
const accessToken = credential.accessToken
|
||||
|
||||
@@ -233,7 +236,9 @@ export async function refreshTokenIfNeeded(
|
||||
// Check if we need to refresh the token
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const now = new Date()
|
||||
const needsRefresh = !expiresAt || expiresAt <= now
|
||||
// Only refresh if we have an expiration time AND it's expired
|
||||
// If no expiration time is set (newly created credentials), assume token is valid
|
||||
const needsRefresh = expiresAt && expiresAt <= now
|
||||
|
||||
// If token is still valid, return it directly
|
||||
if (!needsRefresh || !credential.refreshToken) {
|
||||
|
||||
@@ -241,7 +241,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
describe('POST endpoint', () => {
|
||||
it('should handle authentication requests without messages', async () => {
|
||||
it('should handle authentication requests without input', async () => {
|
||||
const req = createMockRequest('POST', { password: 'test-password' })
|
||||
const params = Promise.resolve({ subdomain: 'password-protected-chat' })
|
||||
|
||||
@@ -257,7 +257,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
expect(mockSetChatAuthCookie).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return 400 for requests without message', async () => {
|
||||
it('should return 400 for requests without input', async () => {
|
||||
const req = createMockRequest('POST', {})
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
@@ -269,7 +269,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error')
|
||||
expect(data).toHaveProperty('message', 'No message provided')
|
||||
expect(data).toHaveProperty('message', 'No input provided')
|
||||
})
|
||||
|
||||
it('should return 401 for unauthorized access', async () => {
|
||||
@@ -279,7 +279,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
error: 'Authentication required',
|
||||
}))
|
||||
|
||||
const req = createMockRequest('POST', { message: 'Hello' })
|
||||
const req = createMockRequest('POST', { input: 'Hello' })
|
||||
const params = Promise.resolve({ subdomain: 'protected-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -342,7 +342,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
}
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', { message: 'Hello' })
|
||||
const req = createMockRequest('POST', { input: 'Hello' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -357,7 +357,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
it('should return streaming response for valid chat messages', async () => {
|
||||
const req = createMockRequest('POST', { message: 'Hello world', conversationId: 'conv-123' })
|
||||
const req = createMockRequest('POST', { input: 'Hello world', conversationId: 'conv-123' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -374,7 +374,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
it('should handle streaming response body correctly', async () => {
|
||||
const req = createMockRequest('POST', { message: 'Hello world' })
|
||||
const req = createMockRequest('POST', { input: 'Hello world' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -404,7 +404,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
throw new Error('Execution failed')
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', { message: 'Trigger error' })
|
||||
const req = createMockRequest('POST', { input: 'Trigger error' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
@@ -444,7 +444,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
|
||||
it('should pass conversationId to executeWorkflowForChat when provided', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
message: 'Hello world',
|
||||
input: 'Hello world',
|
||||
conversationId: 'test-conversation-123',
|
||||
})
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
@@ -461,7 +461,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
|
||||
it('should handle missing conversationId gracefully', async () => {
|
||||
const req = createMockRequest('POST', { message: 'Hello world' })
|
||||
const req = createMockRequest('POST', { input: 'Hello world' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
@@ -72,11 +72,11 @@ export async function POST(
|
||||
}
|
||||
|
||||
// Use the already parsed body
|
||||
const { message, password, email, conversationId } = parsedBody
|
||||
const { input, password, email, conversationId } = parsedBody
|
||||
|
||||
// If this is an authentication request (has password or email but no message),
|
||||
// If this is an authentication request (has password or email but no input),
|
||||
// set auth cookie and return success
|
||||
if ((password || email) && !message) {
|
||||
if ((password || email) && !input) {
|
||||
const response = addCorsHeaders(createSuccessResponse({ authenticated: true }), request)
|
||||
|
||||
// Set authentication cookie
|
||||
@@ -86,8 +86,8 @@ export async function POST(
|
||||
}
|
||||
|
||||
// For chat messages, create regular response
|
||||
if (!message) {
|
||||
return addCorsHeaders(createErrorResponse('No message provided', 400), request)
|
||||
if (!input) {
|
||||
return addCorsHeaders(createErrorResponse('No input provided', 400), request)
|
||||
}
|
||||
|
||||
// Get the workflow for this chat
|
||||
@@ -105,8 +105,8 @@ export async function POST(
|
||||
}
|
||||
|
||||
try {
|
||||
// Execute workflow with structured input (message + conversationId for context)
|
||||
const result = await executeWorkflowForChat(deployment.id, message, conversationId)
|
||||
// Execute workflow with structured input (input + conversationId for context)
|
||||
const result = await executeWorkflowForChat(deployment.id, input, conversationId)
|
||||
|
||||
// The result is always a ReadableStream that we can pipe to the client
|
||||
const streamResponse = new NextResponse(result, {
|
||||
@@ -194,6 +194,7 @@ export async function GET(
|
||||
description: deployment.description,
|
||||
customizations: deployment.customizations,
|
||||
authType: deployment.authType,
|
||||
outputConfigs: deployment.outputConfigs,
|
||||
}),
|
||||
request
|
||||
)
|
||||
@@ -219,6 +220,7 @@ export async function GET(
|
||||
description: deployment.description,
|
||||
customizations: deployment.customizations,
|
||||
authType: deployment.authType,
|
||||
outputConfigs: deployment.outputConfigs,
|
||||
}),
|
||||
request
|
||||
)
|
||||
|
||||
@@ -3,8 +3,9 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { persistExecutionLogs } from '@/lib/logs/execution-logger'
|
||||
import { EnhancedLoggingSession } from '@/lib/logs/enhanced-logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/trace-spans'
|
||||
import { processStreamingBlockLogs } from '@/lib/tokenization'
|
||||
import { decryptSecret } from '@/lib/utils'
|
||||
import { db } from '@/db'
|
||||
import { chat, environment as envTable, userStats, workflow } from '@/db/schema'
|
||||
@@ -128,10 +129,10 @@ export async function validateChatAuth(
|
||||
return { authorized: false, error: 'Password is required' }
|
||||
}
|
||||
|
||||
const { password, message } = parsedBody
|
||||
const { password, input } = parsedBody
|
||||
|
||||
// If this is a chat message, not an auth attempt
|
||||
if (message && !password) {
|
||||
if (input && !password) {
|
||||
return { authorized: false, error: 'auth_required_password' }
|
||||
}
|
||||
|
||||
@@ -170,10 +171,10 @@ export async function validateChatAuth(
|
||||
return { authorized: false, error: 'Email is required' }
|
||||
}
|
||||
|
||||
const { email, message } = parsedBody
|
||||
const { email, input } = parsedBody
|
||||
|
||||
// If this is a chat message, not an auth attempt
|
||||
if (message && !email) {
|
||||
if (input && !email) {
|
||||
return { authorized: false, error: 'auth_required_email' }
|
||||
}
|
||||
|
||||
@@ -211,17 +212,17 @@ export async function validateChatAuth(
|
||||
/**
|
||||
* Executes a workflow for a chat request and returns the formatted output.
|
||||
*
|
||||
* When workflows reference <start.response.input>, they receive a structured JSON
|
||||
* containing both the message and conversationId for maintaining chat context.
|
||||
* When workflows reference <start.input>, they receive the input directly.
|
||||
* The conversationId is available at <start.conversationId> for maintaining chat context.
|
||||
*
|
||||
* @param chatId - Chat deployment identifier
|
||||
* @param message - User's chat message
|
||||
* @param input - User's chat input
|
||||
* @param conversationId - Optional ID for maintaining conversation context
|
||||
* @returns Workflow execution result formatted for the chat interface
|
||||
*/
|
||||
export async function executeWorkflowForChat(
|
||||
chatId: string,
|
||||
message: string,
|
||||
input: string,
|
||||
conversationId?: string
|
||||
): Promise<any> {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
@@ -252,32 +253,42 @@ export async function executeWorkflowForChat(
|
||||
|
||||
const deployment = deploymentResult[0]
|
||||
const workflowId = deployment.workflowId
|
||||
const executionId = uuidv4()
|
||||
|
||||
// Set up enhanced logging for chat execution
|
||||
const loggingSession = new EnhancedLoggingSession(workflowId, executionId, 'chat', requestId)
|
||||
|
||||
// Check for multi-output configuration in customizations
|
||||
const customizations = (deployment.customizations || {}) as Record<string, any>
|
||||
let outputBlockIds: string[] = []
|
||||
let outputPaths: string[] = []
|
||||
|
||||
// Extract output configs from the new schema format
|
||||
let selectedOutputIds: string[] = []
|
||||
if (deployment.outputConfigs && Array.isArray(deployment.outputConfigs)) {
|
||||
// Extract block IDs and paths from the new outputConfigs array format
|
||||
// Extract output IDs in the format expected by the streaming processor
|
||||
logger.debug(
|
||||
`[${requestId}] Found ${deployment.outputConfigs.length} output configs in deployment`
|
||||
)
|
||||
deployment.outputConfigs.forEach((config) => {
|
||||
|
||||
selectedOutputIds = deployment.outputConfigs.map((config) => {
|
||||
const outputId = config.path
|
||||
? `${config.blockId}_${config.path}`
|
||||
: `${config.blockId}.content`
|
||||
|
||||
logger.debug(
|
||||
`[${requestId}] Processing output config: blockId=${config.blockId}, path=${config.path || 'none'}`
|
||||
`[${requestId}] Processing output config: blockId=${config.blockId}, path=${config.path || 'content'} -> outputId=${outputId}`
|
||||
)
|
||||
|
||||
return outputId
|
||||
})
|
||||
|
||||
// Also extract block IDs for legacy compatibility
|
||||
outputBlockIds = deployment.outputConfigs.map((config) => config.blockId)
|
||||
outputPaths = deployment.outputConfigs.map((config) => config.path || '')
|
||||
} else {
|
||||
// Use customizations as fallback
|
||||
outputBlockIds = Array.isArray(customizations.outputBlockIds)
|
||||
? customizations.outputBlockIds
|
||||
: []
|
||||
outputPaths = Array.isArray(customizations.outputPaths) ? customizations.outputPaths : []
|
||||
}
|
||||
|
||||
// Fall back to customizations if we still have no outputs
|
||||
@@ -287,10 +298,11 @@ export async function executeWorkflowForChat(
|
||||
customizations.outputBlockIds.length > 0
|
||||
) {
|
||||
outputBlockIds = customizations.outputBlockIds
|
||||
outputPaths = customizations.outputPaths || new Array(outputBlockIds.length).fill('')
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Using ${outputBlockIds.length} output blocks for extraction`)
|
||||
logger.debug(
|
||||
`[${requestId}] Using ${outputBlockIds.length} output blocks and ${selectedOutputIds.length} selected output IDs for extraction`
|
||||
)
|
||||
|
||||
// Find the workflow (deployedState is NOT deprecated - needed for chat execution)
|
||||
const workflowResult = await db
|
||||
@@ -407,6 +419,13 @@ export async function executeWorkflowForChat(
|
||||
{} as Record<string, Record<string, any>>
|
||||
)
|
||||
|
||||
// Start enhanced logging session
|
||||
await loggingSession.safeStart({
|
||||
userId: deployment.userId,
|
||||
workspaceId: '', // TODO: Get from workflow
|
||||
variables: workflowVariables,
|
||||
})
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
const encoder = new TextEncoder()
|
||||
@@ -445,11 +464,11 @@ export async function executeWorkflowForChat(
|
||||
workflow: serializedWorkflow,
|
||||
currentBlockStates: processedBlockStates,
|
||||
envVarValues: decryptedEnvVars,
|
||||
workflowInput: { input: message, conversationId },
|
||||
workflowInput: { input: input, conversationId },
|
||||
workflowVariables,
|
||||
contextExtensions: {
|
||||
stream: true,
|
||||
selectedOutputIds: outputBlockIds,
|
||||
selectedOutputIds: selectedOutputIds.length > 0 ? selectedOutputIds : outputBlockIds,
|
||||
edges: edges.map((e: any) => ({
|
||||
source: e.source,
|
||||
target: e.target,
|
||||
@@ -458,16 +477,41 @@ export async function executeWorkflowForChat(
|
||||
},
|
||||
})
|
||||
|
||||
const result = await executor.execute(workflowId)
|
||||
// Set up enhanced logging on the executor
|
||||
loggingSession.setupExecutor(executor)
|
||||
|
||||
let result
|
||||
try {
|
||||
result = await executor.execute(workflowId)
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Chat workflow execution failed:`, error)
|
||||
await loggingSession.safeCompleteWithError({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: 0,
|
||||
error: {
|
||||
message: error.message || 'Chat workflow execution failed',
|
||||
stackTrace: error.stack,
|
||||
},
|
||||
})
|
||||
throw error
|
||||
}
|
||||
|
||||
if (result && 'success' in result) {
|
||||
result.logs?.forEach((log: BlockLog) => {
|
||||
if (streamedContent.has(log.blockId)) {
|
||||
if (log.output?.response) {
|
||||
log.output.response.content = streamedContent.get(log.blockId)
|
||||
// Update streamed content and apply tokenization
|
||||
if (result.logs) {
|
||||
result.logs.forEach((log: BlockLog) => {
|
||||
if (streamedContent.has(log.blockId)) {
|
||||
const content = streamedContent.get(log.blockId)
|
||||
if (log.output) {
|
||||
log.output.content = content
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Process all logs for streaming tokenization
|
||||
const processedCount = processStreamingBlockLogs(result.logs, streamedContent)
|
||||
logger.info(`[CHAT-API] Processed ${processedCount} blocks for streaming tokenization`)
|
||||
}
|
||||
|
||||
const { traceSpans, totalDuration } = buildTraceSpans(result)
|
||||
const enrichedResult = { ...result, traceSpans, totalDuration }
|
||||
@@ -481,8 +525,7 @@ export async function executeWorkflowForChat(
|
||||
;(enrichedResult.metadata as any).conversationId = conversationId
|
||||
}
|
||||
const executionId = uuidv4()
|
||||
await persistExecutionLogs(workflowId, executionId, enrichedResult, 'chat')
|
||||
logger.debug(`Persisted logs for deployed chat: ${executionId}`)
|
||||
logger.debug(`Generated execution ID for deployed chat: ${executionId}`)
|
||||
|
||||
if (result.success) {
|
||||
try {
|
||||
@@ -506,6 +549,17 @@ export async function executeWorkflowForChat(
|
||||
)
|
||||
}
|
||||
|
||||
// Complete enhanced logging session (for both success and failure)
|
||||
if (result && 'success' in result) {
|
||||
const { traceSpans } = buildTraceSpans(result)
|
||||
await loggingSession.safeComplete({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: result.metadata?.duration || 0,
|
||||
finalOutput: result.output,
|
||||
traceSpans,
|
||||
})
|
||||
}
|
||||
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
@@ -239,7 +239,7 @@ Example Scenario:
|
||||
User Prompt: "Fetch user data from an API. Use the User ID passed in as 'userId' and an API Key stored as the 'SERVICE_API_KEY' environment variable."
|
||||
|
||||
Generated Code:
|
||||
const userId = <block.response.content>; // Correct: Accessing input parameter without quotes
|
||||
const userId = <block.content>; // Correct: Accessing input parameter without quotes
|
||||
const apiKey = {{SERVICE_API_KEY}}; // Correct: Accessing environment variable without quotes
|
||||
const url = \`https://api.example.com/users/\${userId}\`;
|
||||
|
||||
@@ -273,7 +273,7 @@ Do not include import/require statements unless absolutely necessary and they ar
|
||||
Do not include markdown formatting or explanations.
|
||||
Output only the raw TypeScript code. Use modern TypeScript features where appropriate. Do not use semicolons.
|
||||
Example:
|
||||
const userId = <block.response.content> as string
|
||||
const userId = <block.content> as string
|
||||
const apiKey = {{SERVICE_API_KEY}}
|
||||
const response = await fetch(\`https://api.example.com/users/\${userId}\`, { headers: { Authorization: \`Bearer \${apiKey}\` } })
|
||||
if (!response.ok) {
|
||||
|
||||
@@ -39,8 +39,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Direct uploads are only available when cloud storage is enabled')
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -64,7 +65,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('fileName is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when contentType is missing', async () => {
|
||||
@@ -87,7 +89,59 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Missing fileName or contentType')
|
||||
expect(data.error).toBe('contentType is required and cannot be empty')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when fileSize is invalid', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'test.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: 0,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('fileSize must be a positive number')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should return error when file size exceeds limit', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const largeFileSize = 150 * 1024 * 1024 // 150MB (exceeds 100MB limit)
|
||||
const request = new NextRequest('http://localhost:3000/api/files/presigned', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'large-file.txt',
|
||||
contentType: 'text/plain',
|
||||
fileSize: largeFileSize,
|
||||
}),
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toContain('exceeds maximum allowed size')
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
|
||||
it('should generate S3 presigned URL successfully', async () => {
|
||||
@@ -122,6 +176,34 @@ describe('/api/files/presigned', () => {
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate knowledge-base S3 presigned URL with kb prefix', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
storageProvider: 's3',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost:3000/api/files/presigned?type=knowledge-base',
|
||||
{
|
||||
method: 'POST',
|
||||
body: JSON.stringify({
|
||||
fileName: 'knowledge-doc.pdf',
|
||||
contentType: 'application/pdf',
|
||||
fileSize: 2048,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.fileInfo.key).toMatch(/^kb\/.*knowledge-doc\.pdf$/)
|
||||
expect(data.directUploadSupported).toBe(true)
|
||||
})
|
||||
|
||||
it('should generate Azure Blob presigned URL successfully', async () => {
|
||||
setupFileApiMocks({
|
||||
cloudEnabled: true,
|
||||
@@ -182,8 +264,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toBe('Unknown storage provider')
|
||||
expect(response.status).toBe(500) // Changed from 400 to 500 (StorageConfigError)
|
||||
expect(data.error).toBe('Unknown storage provider: unknown') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
expect(data.directUploadSupported).toBe(false)
|
||||
})
|
||||
|
||||
@@ -225,8 +308,10 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('S3 service unavailable')
|
||||
expect(data.error).toBe(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
) // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle Azure Blob errors gracefully', async () => {
|
||||
@@ -269,8 +354,8 @@ describe('/api/files/presigned', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Error')
|
||||
expect(data.message).toBe('Azure service unavailable')
|
||||
expect(data.error).toBe('Failed to generate Azure Blob presigned URL') // Updated error message
|
||||
expect(data.code).toBe('STORAGE_CONFIG_ERROR')
|
||||
})
|
||||
|
||||
it('should handle malformed JSON gracefully', async () => {
|
||||
@@ -289,9 +374,9 @@ describe('/api/files/presigned', () => {
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('SyntaxError')
|
||||
expect(data.message).toContain('Unexpected token')
|
||||
expect(response.status).toBe(400) // Changed from 500 to 400 (ValidationError)
|
||||
expect(data.error).toBe('Invalid JSON in request body') // Updated error message
|
||||
expect(data.code).toBe('VALIDATION_ERROR')
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { getBlobServiceClient } from '@/lib/uploads/blob/blob-client'
|
||||
import { getS3Client, sanitizeFilenameForMetadata } from '@/lib/uploads/s3/s3-client'
|
||||
import { BLOB_CONFIG, S3_CONFIG } from '@/lib/uploads/setup'
|
||||
import { BLOB_CONFIG, BLOB_KB_CONFIG, S3_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import { createErrorResponse, createOptionsResponse } from '../utils'
|
||||
|
||||
const logger = createLogger('PresignedUploadAPI')
|
||||
@@ -17,124 +17,148 @@ interface PresignedUrlRequest {
|
||||
fileSize: number
|
||||
}
|
||||
|
||||
type UploadType = 'general' | 'knowledge-base'
|
||||
|
||||
class PresignedUrlError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public statusCode = 400
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'PresignedUrlError'
|
||||
}
|
||||
}
|
||||
|
||||
class StorageConfigError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'STORAGE_CONFIG_ERROR', 500)
|
||||
}
|
||||
}
|
||||
|
||||
class ValidationError extends PresignedUrlError {
|
||||
constructor(message: string) {
|
||||
super(message, 'VALIDATION_ERROR', 400)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
// Parse the request body
|
||||
const data: PresignedUrlRequest = await request.json()
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName || !contentType) {
|
||||
return NextResponse.json({ error: 'Missing fileName or contentType' }, { status: 400 })
|
||||
let data: PresignedUrlRequest
|
||||
try {
|
||||
data = await request.json()
|
||||
} catch {
|
||||
throw new ValidationError('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
// Only proceed if cloud storage is enabled
|
||||
const { fileName, contentType, fileSize } = data
|
||||
|
||||
if (!fileName?.trim()) {
|
||||
throw new ValidationError('fileName is required and cannot be empty')
|
||||
}
|
||||
if (!contentType?.trim()) {
|
||||
throw new ValidationError('contentType is required and cannot be empty')
|
||||
}
|
||||
if (!fileSize || fileSize <= 0) {
|
||||
throw new ValidationError('fileSize must be a positive number')
|
||||
}
|
||||
|
||||
const MAX_FILE_SIZE = 100 * 1024 * 1024
|
||||
if (fileSize > MAX_FILE_SIZE) {
|
||||
throw new ValidationError(
|
||||
`File size (${fileSize} bytes) exceeds maximum allowed size (${MAX_FILE_SIZE} bytes)`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadTypeParam = request.nextUrl.searchParams.get('type')
|
||||
const uploadType: UploadType =
|
||||
uploadTypeParam === 'knowledge-base' ? 'knowledge-base' : 'general'
|
||||
|
||||
if (!isUsingCloudStorage()) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Direct uploads are only available when cloud storage is enabled',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
throw new StorageConfigError(
|
||||
'Direct uploads are only available when cloud storage is enabled'
|
||||
)
|
||||
}
|
||||
|
||||
const storageProvider = getStorageProvider()
|
||||
logger.info(`Generating ${uploadType} presigned URL for ${fileName} using ${storageProvider}`)
|
||||
|
||||
switch (storageProvider) {
|
||||
case 's3':
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleS3PresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
case 'blob':
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize)
|
||||
return await handleBlobPresignedUrl(fileName, contentType, fileSize, uploadType)
|
||||
default:
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Unknown storage provider',
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
throw new StorageConfigError(`Unknown storage provider: ${storageProvider}`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error generating presigned URL:', error)
|
||||
|
||||
if (error instanceof PresignedUrlError) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error.message,
|
||||
code: error.code,
|
||||
directUploadSupported: false,
|
||||
},
|
||||
{ status: error.statusCode }
|
||||
)
|
||||
}
|
||||
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate presigned URL')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleS3PresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
// Sanitize the original filename for S3 metadata to prevent header errors
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create the S3 command
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: S3_CONFIG.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
|
||||
// Generate the presigned URL
|
||||
const presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
})
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(fileName: string, contentType: string, fileSize: number) {
|
||||
// Create a unique key for the file
|
||||
const safeFileName = fileName.replace(/\s+/g, '-')
|
||||
const uniqueKey = `${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
async function handleS3PresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
const config = uploadType === 'knowledge-base' ? S3_KB_CONFIG : S3_CONFIG
|
||||
|
||||
// Generate SAS token for upload (write permission)
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: BLOB_CONFIG.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
if (!config.bucket || !config.region) {
|
||||
throw new StorageConfigError(`S3 configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(BLOB_CONFIG.accountName, BLOB_CONFIG.accountKey || '')
|
||||
).toString()
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
const sanitizedOriginalName = sanitizeFilenameForMetadata(fileName)
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
const metadata: Record<string, string> = {
|
||||
originalName: sanitizedOriginalName,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
|
||||
logger.info(`Generated presigned URL for ${fileName} (${uniqueKey})`)
|
||||
if (uploadType === 'knowledge-base') {
|
||||
metadata.purpose = 'knowledge-base'
|
||||
}
|
||||
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: config.bucket,
|
||||
Key: uniqueKey,
|
||||
ContentType: contentType,
|
||||
Metadata: metadata,
|
||||
})
|
||||
|
||||
let presignedUrl: string
|
||||
try {
|
||||
presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
} catch (s3Error) {
|
||||
logger.error('Failed to generate S3 presigned URL:', s3Error)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate S3 presigned URL - check AWS credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} S3 presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
@@ -146,22 +170,103 @@ async function handleBlobPresignedUrl(fileName: string, contentType: string, fil
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders: {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error generating Blob presigned URL:', error)
|
||||
return createErrorResponse(
|
||||
error instanceof Error ? error : new Error('Failed to generate Blob presigned URL')
|
||||
)
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in S3 presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate S3 presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
async function handleBlobPresignedUrl(
|
||||
fileName: string,
|
||||
contentType: string,
|
||||
fileSize: number,
|
||||
uploadType: UploadType
|
||||
) {
|
||||
try {
|
||||
const config = uploadType === 'knowledge-base' ? BLOB_KB_CONFIG : BLOB_CONFIG
|
||||
|
||||
if (
|
||||
!config.accountName ||
|
||||
!config.containerName ||
|
||||
(!config.accountKey && !config.connectionString)
|
||||
) {
|
||||
throw new StorageConfigError(`Azure Blob configuration missing for ${uploadType} uploads`)
|
||||
}
|
||||
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
const prefix = uploadType === 'knowledge-base' ? 'kb/' : ''
|
||||
const uniqueKey = `${prefix}${Date.now()}-${uuidv4()}-${safeFileName}`
|
||||
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(config.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||
|
||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||
await import('@azure/storage-blob')
|
||||
|
||||
const sasOptions = {
|
||||
containerName: config.containerName,
|
||||
blobName: uniqueKey,
|
||||
permissions: BlobSASPermissions.parse('w'), // Write permission for upload
|
||||
startsOn: new Date(),
|
||||
expiresOn: new Date(Date.now() + 3600 * 1000), // 1 hour expiration
|
||||
}
|
||||
|
||||
let sasToken: string
|
||||
try {
|
||||
sasToken = generateBlobSASQueryParameters(
|
||||
sasOptions,
|
||||
new StorageSharedKeyCredential(config.accountName, config.accountKey || '')
|
||||
).toString()
|
||||
} catch (blobError) {
|
||||
logger.error('Failed to generate Azure Blob SAS token:', blobError)
|
||||
throw new StorageConfigError(
|
||||
'Failed to generate Azure Blob SAS token - check Azure credentials and permissions'
|
||||
)
|
||||
}
|
||||
|
||||
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
|
||||
|
||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
logger.info(`Generated ${uploadType} Azure Blob presigned URL for ${fileName} (${uniqueKey})`)
|
||||
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'x-ms-blob-type': 'BlockBlob',
|
||||
'x-ms-blob-content-type': contentType,
|
||||
'x-ms-meta-originalname': encodeURIComponent(fileName),
|
||||
'x-ms-meta-uploadedat': new Date().toISOString(),
|
||||
}
|
||||
|
||||
if (uploadType === 'knowledge-base') {
|
||||
uploadHeaders['x-ms-meta-purpose'] = 'knowledge-base'
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
presignedUrl,
|
||||
fileInfo: {
|
||||
path: servePath,
|
||||
key: uniqueKey,
|
||||
name: fileName,
|
||||
size: fileSize,
|
||||
type: contentType,
|
||||
},
|
||||
directUploadSupported: true,
|
||||
uploadHeaders,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof PresignedUrlError) {
|
||||
throw error
|
||||
}
|
||||
logger.error('Error in Azure Blob presigned URL generation:', error)
|
||||
throw new StorageConfigError('Failed to generate Azure Blob presigned URL')
|
||||
}
|
||||
}
|
||||
|
||||
// Handle preflight requests
|
||||
export async function OPTIONS() {
|
||||
return createOptionsResponse()
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { readFile } from 'fs/promises'
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { downloadFile, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { downloadFile, getStorageProvider, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { BLOB_KB_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/setup'
|
||||
import '@/lib/uploads/setup.server'
|
||||
|
||||
import {
|
||||
@@ -16,6 +17,19 @@ export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('FilesServeAPI')
|
||||
|
||||
async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
readableStream.on('data', (data) => {
|
||||
chunks.push(data instanceof Buffer ? data : Buffer.from(data))
|
||||
})
|
||||
readableStream.on('end', () => {
|
||||
resolve(Buffer.concat(chunks))
|
||||
})
|
||||
readableStream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Main API route handler for serving files
|
||||
*/
|
||||
@@ -85,12 +99,65 @@ async function handleLocalFile(filename: string): Promise<NextResponse> {
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadKBFile(cloudKey: string): Promise<Buffer> {
|
||||
const storageProvider = getStorageProvider()
|
||||
|
||||
if (storageProvider === 'blob') {
|
||||
logger.info(`Downloading KB file from Azure Blob Storage: ${cloudKey}`)
|
||||
// Use KB-specific blob configuration
|
||||
const { getBlobServiceClient } = await import('@/lib/uploads/blob/blob-client')
|
||||
const blobServiceClient = getBlobServiceClient()
|
||||
const containerClient = blobServiceClient.getContainerClient(BLOB_KB_CONFIG.containerName)
|
||||
const blockBlobClient = containerClient.getBlockBlobClient(cloudKey)
|
||||
|
||||
const downloadBlockBlobResponse = await blockBlobClient.download()
|
||||
if (!downloadBlockBlobResponse.readableStreamBody) {
|
||||
throw new Error('Failed to get readable stream from blob download')
|
||||
}
|
||||
|
||||
// Convert stream to buffer
|
||||
return await streamToBuffer(downloadBlockBlobResponse.readableStreamBody)
|
||||
}
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
logger.info(`Downloading KB file from S3: ${cloudKey}`)
|
||||
// Use KB-specific S3 configuration
|
||||
const { getS3Client } = await import('@/lib/uploads/s3/s3-client')
|
||||
const { GetObjectCommand } = await import('@aws-sdk/client-s3')
|
||||
|
||||
const s3Client = getS3Client()
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: S3_KB_CONFIG.bucket,
|
||||
Key: cloudKey,
|
||||
})
|
||||
|
||||
const response = await s3Client.send(command)
|
||||
if (!response.Body) {
|
||||
throw new Error('No body in S3 response')
|
||||
}
|
||||
|
||||
// Convert stream to buffer using the same method as the regular S3 client
|
||||
const stream = response.Body as any
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
stream.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks)))
|
||||
stream.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported storage provider for KB files: ${storageProvider}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Proxy cloud file through our server
|
||||
*/
|
||||
async function handleCloudProxy(cloudKey: string): Promise<NextResponse> {
|
||||
try {
|
||||
const fileBuffer = await downloadFile(cloudKey)
|
||||
// Check if this is a KB file (starts with 'kb/')
|
||||
const isKBFile = cloudKey.startsWith('kb/')
|
||||
|
||||
const fileBuffer = isKBFile ? await downloadKBFile(cloudKey) : await downloadFile(cloudKey)
|
||||
|
||||
// Extract the original filename from the key (last part after last /)
|
||||
const originalFilename = cloudKey.split('/').pop() || 'download'
|
||||
|
||||
@@ -40,6 +40,7 @@ describe('Individual Folder API Route', () => {
|
||||
}
|
||||
|
||||
const { mockAuthenticatedUser, mockUnauthenticated } = mockAuth(TEST_USER)
|
||||
const mockGetUserEntityPermissions = vi.fn()
|
||||
|
||||
function createFolderDbMock(options: FolderDbMockOptions = {}) {
|
||||
const {
|
||||
@@ -109,6 +110,12 @@ describe('Individual Folder API Route', () => {
|
||||
vi.resetModules()
|
||||
vi.clearAllMocks()
|
||||
setupCommonApiMocks()
|
||||
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin')
|
||||
|
||||
vi.doMock('@/lib/permissions/utils', () => ({
|
||||
getUserEntityPermissions: mockGetUserEntityPermissions,
|
||||
}))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
@@ -181,6 +188,72 @@ describe('Individual Folder API Route', () => {
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
})
|
||||
|
||||
it('should return 403 when user has only read permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('read') // Read-only permissions
|
||||
|
||||
const dbMock = createFolderDbMock()
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('PUT', {
|
||||
name: 'Updated Folder',
|
||||
})
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { PUT } = await import('./route')
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error', 'Write access required to update folders')
|
||||
})
|
||||
|
||||
it('should allow folder update for write permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('write') // Write permissions
|
||||
|
||||
const dbMock = createFolderDbMock()
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('PUT', {
|
||||
name: 'Updated Folder',
|
||||
})
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { PUT } = await import('./route')
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('folder')
|
||||
})
|
||||
|
||||
it('should allow folder update for admin permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin') // Admin permissions
|
||||
|
||||
const dbMock = createFolderDbMock()
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('PUT', {
|
||||
name: 'Updated Folder',
|
||||
})
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { PUT } = await import('./route')
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('folder')
|
||||
})
|
||||
|
||||
it('should return 400 when trying to set folder as its own parent', async () => {
|
||||
mockAuthenticatedUser()
|
||||
|
||||
@@ -387,6 +460,68 @@ describe('Individual Folder API Route', () => {
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
})
|
||||
|
||||
it('should return 403 when user has only read permissions for delete', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('read') // Read-only permissions
|
||||
|
||||
const dbMock = createFolderDbMock()
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('DELETE')
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { DELETE } = await import('./route')
|
||||
|
||||
const response = await DELETE(req, { params })
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error', 'Admin access required to delete folders')
|
||||
})
|
||||
|
||||
it('should return 403 when user has only write permissions for delete', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('write') // Write permissions (not enough for delete)
|
||||
|
||||
const dbMock = createFolderDbMock()
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('DELETE')
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { DELETE } = await import('./route')
|
||||
|
||||
const response = await DELETE(req, { params })
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error', 'Admin access required to delete folders')
|
||||
})
|
||||
|
||||
it('should allow folder deletion for admin permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin') // Admin permissions
|
||||
|
||||
const dbMock = createFolderDbMock({
|
||||
folderLookupResult: mockFolder,
|
||||
})
|
||||
vi.doMock('@/db', () => dbMock)
|
||||
|
||||
const req = createMockRequest('DELETE')
|
||||
const params = Promise.resolve({ id: 'folder-1' })
|
||||
|
||||
const { DELETE } = await import('./route')
|
||||
|
||||
const response = await DELETE(req, { params })
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('success', true)
|
||||
})
|
||||
|
||||
it('should handle database errors during deletion', async () => {
|
||||
mockAuthenticatedUser()
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
import { workflow, workflowFolder } from '@/db/schema'
|
||||
|
||||
@@ -19,17 +20,31 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
const body = await request.json()
|
||||
const { name, color, isExpanded, parentId } = body
|
||||
|
||||
// Verify the folder exists and belongs to the user
|
||||
// Verify the folder exists
|
||||
const existingFolder = await db
|
||||
.select()
|
||||
.from(workflowFolder)
|
||||
.where(and(eq(workflowFolder.id, id), eq(workflowFolder.userId, session.user.id)))
|
||||
.where(eq(workflowFolder.id, id))
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (!existingFolder) {
|
||||
return NextResponse.json({ error: 'Folder not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Check if user has write permissions for the workspace
|
||||
const workspacePermission = await getUserEntityPermissions(
|
||||
session.user.id,
|
||||
'workspace',
|
||||
existingFolder.workspaceId
|
||||
)
|
||||
|
||||
if (!workspacePermission || workspacePermission === 'read') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Write access required to update folders' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Prevent setting a folder as its own parent or creating circular references
|
||||
if (parentId && parentId === id) {
|
||||
return NextResponse.json({ error: 'Folder cannot be its own parent' }, { status: 400 })
|
||||
@@ -81,19 +96,33 @@ export async function DELETE(
|
||||
|
||||
const { id } = await params
|
||||
|
||||
// Verify the folder exists and belongs to the user
|
||||
// Verify the folder exists
|
||||
const existingFolder = await db
|
||||
.select()
|
||||
.from(workflowFolder)
|
||||
.where(and(eq(workflowFolder.id, id), eq(workflowFolder.userId, session.user.id)))
|
||||
.where(eq(workflowFolder.id, id))
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (!existingFolder) {
|
||||
return NextResponse.json({ error: 'Folder not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Check if user has admin permissions for the workspace (admin-only for deletions)
|
||||
const workspacePermission = await getUserEntityPermissions(
|
||||
session.user.id,
|
||||
'workspace',
|
||||
existingFolder.workspaceId
|
||||
)
|
||||
|
||||
if (workspacePermission !== 'admin') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Admin access required to delete folders' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Recursively delete folder and all its contents
|
||||
const deletionStats = await deleteFolderRecursively(id, session.user.id)
|
||||
const deletionStats = await deleteFolderRecursively(id, existingFolder.workspaceId)
|
||||
|
||||
logger.info('Deleted folder and all contents:', {
|
||||
id,
|
||||
@@ -113,41 +142,40 @@ export async function DELETE(
|
||||
// Helper function to recursively delete a folder and all its contents
|
||||
async function deleteFolderRecursively(
|
||||
folderId: string,
|
||||
userId: string
|
||||
workspaceId: string
|
||||
): Promise<{ folders: number; workflows: number }> {
|
||||
const stats = { folders: 0, workflows: 0 }
|
||||
|
||||
// Get all child folders first
|
||||
// Get all child folders first (workspace-scoped, not user-scoped)
|
||||
const childFolders = await db
|
||||
.select({ id: workflowFolder.id })
|
||||
.from(workflowFolder)
|
||||
.where(and(eq(workflowFolder.parentId, folderId), eq(workflowFolder.userId, userId)))
|
||||
.where(and(eq(workflowFolder.parentId, folderId), eq(workflowFolder.workspaceId, workspaceId)))
|
||||
|
||||
// Recursively delete child folders
|
||||
for (const childFolder of childFolders) {
|
||||
const childStats = await deleteFolderRecursively(childFolder.id, userId)
|
||||
const childStats = await deleteFolderRecursively(childFolder.id, workspaceId)
|
||||
stats.folders += childStats.folders
|
||||
stats.workflows += childStats.workflows
|
||||
}
|
||||
|
||||
// Delete all workflows in this folder
|
||||
// Delete all workflows in this folder (workspace-scoped, not user-scoped)
|
||||
// The database cascade will handle deleting related workflow_blocks, workflow_edges, workflow_subflows
|
||||
const workflowsInFolder = await db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(and(eq(workflow.folderId, folderId), eq(workflow.userId, userId)))
|
||||
.where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId)))
|
||||
|
||||
if (workflowsInFolder.length > 0) {
|
||||
await db
|
||||
.delete(workflow)
|
||||
.where(and(eq(workflow.folderId, folderId), eq(workflow.userId, userId)))
|
||||
.where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId)))
|
||||
|
||||
stats.workflows += workflowsInFolder.length
|
||||
}
|
||||
|
||||
// Delete this folder
|
||||
await db
|
||||
.delete(workflowFolder)
|
||||
.where(and(eq(workflowFolder.id, folderId), eq(workflowFolder.userId, userId)))
|
||||
await db.delete(workflowFolder).where(eq(workflowFolder.id, folderId))
|
||||
|
||||
stats.folders += 1
|
||||
|
||||
|
||||
@@ -52,6 +52,7 @@ describe('Folders API Route', () => {
|
||||
const mockValues = vi.fn()
|
||||
const mockReturning = vi.fn()
|
||||
const mockTransaction = vi.fn()
|
||||
const mockGetUserEntityPermissions = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
@@ -72,6 +73,8 @@ describe('Folders API Route', () => {
|
||||
mockValues.mockReturnValue({ returning: mockReturning })
|
||||
mockReturning.mockReturnValue([mockFolders[0]])
|
||||
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin')
|
||||
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: mockSelect,
|
||||
@@ -79,6 +82,10 @@ describe('Folders API Route', () => {
|
||||
transaction: mockTransaction,
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/permissions/utils', () => ({
|
||||
getUserEntityPermissions: mockGetUserEntityPermissions,
|
||||
}))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
@@ -143,6 +150,42 @@ describe('Folders API Route', () => {
|
||||
expect(data).toHaveProperty('error', 'Workspace ID is required')
|
||||
})
|
||||
|
||||
it('should return 403 when user has no workspace permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue(null) // No permissions
|
||||
|
||||
const mockRequest = createMockRequest('GET')
|
||||
Object.defineProperty(mockRequest, 'url', {
|
||||
value: 'http://localhost:3000/api/folders?workspaceId=workspace-123',
|
||||
})
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(mockRequest)
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error', 'Access denied to this workspace')
|
||||
})
|
||||
|
||||
it('should return 403 when user has only read permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('read') // Read-only permissions
|
||||
|
||||
const mockRequest = createMockRequest('GET')
|
||||
Object.defineProperty(mockRequest, 'url', {
|
||||
value: 'http://localhost:3000/api/folders?workspaceId=workspace-123',
|
||||
})
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(mockRequest)
|
||||
|
||||
expect(response.status).toBe(200) // Should work for read permissions
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('folders')
|
||||
})
|
||||
|
||||
it('should handle database errors gracefully', async () => {
|
||||
mockAuthenticatedUser()
|
||||
|
||||
@@ -295,6 +338,100 @@ describe('Folders API Route', () => {
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
})
|
||||
|
||||
it('should return 403 when user has only read permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('read') // Read-only permissions
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: 'Test Folder',
|
||||
workspaceId: 'workspace-123',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('error', 'Write or Admin access required to create folders')
|
||||
})
|
||||
|
||||
it('should allow folder creation for write permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('write') // Write permissions
|
||||
|
||||
mockTransaction.mockImplementationOnce(async (callback: any) => {
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue([]), // No existing folders
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockReturnValue([mockFolders[0]]),
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: 'Test Folder',
|
||||
workspaceId: 'workspace-123',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('folder')
|
||||
})
|
||||
|
||||
it('should allow folder creation for admin permissions', async () => {
|
||||
mockAuthenticatedUser()
|
||||
mockGetUserEntityPermissions.mockResolvedValue('admin') // Admin permissions
|
||||
|
||||
mockTransaction.mockImplementationOnce(async (callback: any) => {
|
||||
const tx = {
|
||||
select: vi.fn().mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
orderBy: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockReturnValue([]), // No existing folders
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockReturnValue([mockFolders[0]]),
|
||||
}),
|
||||
}),
|
||||
}
|
||||
return await callback(tx)
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
name: 'Test Folder',
|
||||
workspaceId: 'workspace-123',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const response = await POST(req)
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('folder')
|
||||
})
|
||||
|
||||
it('should return 400 when required fields are missing', async () => {
|
||||
const testCases = [
|
||||
{ name: '', workspaceId: 'workspace-123' }, // Missing name
|
||||
|
||||
@@ -2,6 +2,7 @@ import { and, asc, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||
import { db } from '@/db'
|
||||
import { workflowFolder } from '@/db/schema'
|
||||
|
||||
@@ -22,13 +23,23 @@ export async function GET(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Workspace ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Fetch all folders for the workspace, ordered by sortOrder and createdAt
|
||||
// Check if user has workspace permissions
|
||||
const workspacePermission = await getUserEntityPermissions(
|
||||
session.user.id,
|
||||
'workspace',
|
||||
workspaceId
|
||||
)
|
||||
|
||||
if (!workspacePermission) {
|
||||
return NextResponse.json({ error: 'Access denied to this workspace' }, { status: 403 })
|
||||
}
|
||||
|
||||
// If user has workspace permissions, fetch ALL folders in the workspace
|
||||
// This allows shared workspace members to see folders created by other users
|
||||
const folders = await db
|
||||
.select()
|
||||
.from(workflowFolder)
|
||||
.where(
|
||||
and(eq(workflowFolder.workspaceId, workspaceId), eq(workflowFolder.userId, session.user.id))
|
||||
)
|
||||
.where(eq(workflowFolder.workspaceId, workspaceId))
|
||||
.orderBy(asc(workflowFolder.sortOrder), asc(workflowFolder.createdAt))
|
||||
|
||||
return NextResponse.json({ folders })
|
||||
@@ -53,19 +64,33 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Name and workspace ID are required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Check if user has workspace permissions (at least 'write' access to create folders)
|
||||
const workspacePermission = await getUserEntityPermissions(
|
||||
session.user.id,
|
||||
'workspace',
|
||||
workspaceId
|
||||
)
|
||||
|
||||
if (!workspacePermission || workspacePermission === 'read') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Write or Admin access required to create folders' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Generate a new ID
|
||||
const id = crypto.randomUUID()
|
||||
|
||||
// Use transaction to ensure sortOrder consistency
|
||||
const newFolder = await db.transaction(async (tx) => {
|
||||
// Get the next sort order for the parent (or root level)
|
||||
// Consider all folders in the workspace, not just those created by current user
|
||||
const existingFolders = await tx
|
||||
.select({ sortOrder: workflowFolder.sortOrder })
|
||||
.from(workflowFolder)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowFolder.workspaceId, workspaceId),
|
||||
eq(workflowFolder.userId, session.user.id),
|
||||
parentId ? eq(workflowFolder.parentId, parentId) : isNull(workflowFolder.parentId)
|
||||
)
|
||||
)
|
||||
|
||||
76
apps/sim/app/api/logs/[executionId]/frozen-canvas/route.ts
Normal file
76
apps/sim/app/api/logs/[executionId]/frozen-canvas/route.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { workflowExecutionLogs, workflowExecutionSnapshots } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('FrozenCanvasAPI')
|
||||
|
||||
export async function GET(
|
||||
_request: NextRequest,
|
||||
{ params }: { params: Promise<{ executionId: string }> }
|
||||
) {
|
||||
try {
|
||||
const { executionId } = await params
|
||||
|
||||
logger.debug(`Fetching frozen canvas data for execution: ${executionId}`)
|
||||
|
||||
// Get the workflow execution log to find the snapshot
|
||||
const [workflowLog] = await db
|
||||
.select()
|
||||
.from(workflowExecutionLogs)
|
||||
.where(eq(workflowExecutionLogs.executionId, executionId))
|
||||
.limit(1)
|
||||
|
||||
if (!workflowLog) {
|
||||
return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Get the workflow state snapshot
|
||||
const [snapshot] = await db
|
||||
.select()
|
||||
.from(workflowExecutionSnapshots)
|
||||
.where(eq(workflowExecutionSnapshots.id, workflowLog.stateSnapshotId))
|
||||
.limit(1)
|
||||
|
||||
if (!snapshot) {
|
||||
return NextResponse.json({ error: 'Workflow state snapshot not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const response = {
|
||||
executionId,
|
||||
workflowId: workflowLog.workflowId,
|
||||
workflowState: snapshot.stateData,
|
||||
executionMetadata: {
|
||||
trigger: workflowLog.trigger,
|
||||
startedAt: workflowLog.startedAt.toISOString(),
|
||||
endedAt: workflowLog.endedAt?.toISOString(),
|
||||
totalDurationMs: workflowLog.totalDurationMs,
|
||||
blockStats: {
|
||||
total: workflowLog.blockCount,
|
||||
success: workflowLog.successCount,
|
||||
error: workflowLog.errorCount,
|
||||
skipped: workflowLog.skippedCount,
|
||||
},
|
||||
cost: {
|
||||
total: workflowLog.totalCost ? Number.parseFloat(workflowLog.totalCost) : null,
|
||||
input: workflowLog.totalInputCost ? Number.parseFloat(workflowLog.totalInputCost) : null,
|
||||
output: workflowLog.totalOutputCost
|
||||
? Number.parseFloat(workflowLog.totalOutputCost)
|
||||
: null,
|
||||
},
|
||||
totalTokens: workflowLog.totalTokens,
|
||||
},
|
||||
}
|
||||
|
||||
logger.debug(`Successfully fetched frozen canvas data for execution: ${executionId}`)
|
||||
logger.debug(
|
||||
`Workflow state contains ${Object.keys((snapshot.stateData as any)?.blocks || {}).length} blocks`
|
||||
)
|
||||
|
||||
return NextResponse.json(response)
|
||||
} catch (error) {
|
||||
logger.error('Error fetching frozen canvas data:', error)
|
||||
return NextResponse.json({ error: 'Failed to fetch frozen canvas data' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -3,9 +3,10 @@ import { and, eq, inArray, lt, sql } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { snapshotService } from '@/lib/logs/snapshot-service'
|
||||
import { getS3Client } from '@/lib/uploads/s3/s3-client'
|
||||
import { db } from '@/db'
|
||||
import { subscription, user, workflow, workflowLogs } from '@/db/schema'
|
||||
import { subscription, user, workflow, workflowExecutionLogs } from '@/db/schema'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -66,99 +67,143 @@ export async function GET(request: Request) {
|
||||
const workflowIds = workflowsQuery.map((w) => w.id)
|
||||
|
||||
const results = {
|
||||
total: 0,
|
||||
archived: 0,
|
||||
archiveFailed: 0,
|
||||
deleted: 0,
|
||||
deleteFailed: 0,
|
||||
enhancedLogs: {
|
||||
total: 0,
|
||||
archived: 0,
|
||||
archiveFailed: 0,
|
||||
deleted: 0,
|
||||
deleteFailed: 0,
|
||||
},
|
||||
snapshots: {
|
||||
cleaned: 0,
|
||||
cleanupFailed: 0,
|
||||
},
|
||||
}
|
||||
|
||||
const startTime = Date.now()
|
||||
const MAX_BATCHES = 10
|
||||
|
||||
// Process enhanced logging cleanup
|
||||
let batchesProcessed = 0
|
||||
let hasMoreLogs = true
|
||||
|
||||
logger.info(`Starting enhanced logs cleanup for ${workflowIds.length} workflows`)
|
||||
|
||||
while (hasMoreLogs && batchesProcessed < MAX_BATCHES) {
|
||||
const oldLogs = await db
|
||||
// Query enhanced execution logs that need cleanup
|
||||
const oldEnhancedLogs = await db
|
||||
.select({
|
||||
id: workflowLogs.id,
|
||||
workflowId: workflowLogs.workflowId,
|
||||
executionId: workflowLogs.executionId,
|
||||
level: workflowLogs.level,
|
||||
message: workflowLogs.message,
|
||||
duration: workflowLogs.duration,
|
||||
trigger: workflowLogs.trigger,
|
||||
createdAt: workflowLogs.createdAt,
|
||||
metadata: workflowLogs.metadata,
|
||||
id: workflowExecutionLogs.id,
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
|
||||
level: workflowExecutionLogs.level,
|
||||
message: workflowExecutionLogs.message,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
blockCount: workflowExecutionLogs.blockCount,
|
||||
successCount: workflowExecutionLogs.successCount,
|
||||
errorCount: workflowExecutionLogs.errorCount,
|
||||
skippedCount: workflowExecutionLogs.skippedCount,
|
||||
totalCost: workflowExecutionLogs.totalCost,
|
||||
totalInputCost: workflowExecutionLogs.totalInputCost,
|
||||
totalOutputCost: workflowExecutionLogs.totalOutputCost,
|
||||
totalTokens: workflowExecutionLogs.totalTokens,
|
||||
metadata: workflowExecutionLogs.metadata,
|
||||
createdAt: workflowExecutionLogs.createdAt,
|
||||
})
|
||||
.from(workflowLogs)
|
||||
.from(workflowExecutionLogs)
|
||||
.where(
|
||||
and(
|
||||
inArray(workflowLogs.workflowId, workflowIds),
|
||||
lt(workflowLogs.createdAt, retentionDate)
|
||||
inArray(workflowExecutionLogs.workflowId, workflowIds),
|
||||
lt(workflowExecutionLogs.createdAt, retentionDate)
|
||||
)
|
||||
)
|
||||
.limit(BATCH_SIZE)
|
||||
|
||||
results.total += oldLogs.length
|
||||
results.enhancedLogs.total += oldEnhancedLogs.length
|
||||
|
||||
for (const log of oldLogs) {
|
||||
for (const log of oldEnhancedLogs) {
|
||||
const today = new Date().toISOString().split('T')[0]
|
||||
|
||||
const logKey = `archived-logs/${today}/${log.id}.json`
|
||||
const logData = JSON.stringify(log)
|
||||
// Archive enhanced log with more detailed structure
|
||||
const enhancedLogKey = `archived-enhanced-logs/${today}/${log.id}.json`
|
||||
const enhancedLogData = JSON.stringify({
|
||||
...log,
|
||||
archivedAt: new Date().toISOString(),
|
||||
logType: 'enhanced',
|
||||
})
|
||||
|
||||
try {
|
||||
await getS3Client().send(
|
||||
new PutObjectCommand({
|
||||
Bucket: S3_CONFIG.bucket,
|
||||
Key: logKey,
|
||||
Body: logData,
|
||||
Key: enhancedLogKey,
|
||||
Body: enhancedLogData,
|
||||
ContentType: 'application/json',
|
||||
Metadata: {
|
||||
logId: String(log.id),
|
||||
workflowId: String(log.workflowId),
|
||||
executionId: String(log.executionId),
|
||||
logType: 'enhanced',
|
||||
archivedAt: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
results.archived++
|
||||
results.enhancedLogs.archived++
|
||||
|
||||
try {
|
||||
// Delete enhanced log (will cascade to workflowExecutionBlocks due to foreign key)
|
||||
const deleteResult = await db
|
||||
.delete(workflowLogs)
|
||||
.where(eq(workflowLogs.id, log.id))
|
||||
.returning({ id: workflowLogs.id })
|
||||
.delete(workflowExecutionLogs)
|
||||
.where(eq(workflowExecutionLogs.id, log.id))
|
||||
.returning({ id: workflowExecutionLogs.id })
|
||||
|
||||
if (deleteResult.length > 0) {
|
||||
results.deleted++
|
||||
results.enhancedLogs.deleted++
|
||||
} else {
|
||||
results.deleteFailed++
|
||||
logger.warn(`Failed to delete log ${log.id} after archiving: No rows deleted`)
|
||||
results.enhancedLogs.deleteFailed++
|
||||
logger.warn(
|
||||
`Failed to delete enhanced log ${log.id} after archiving: No rows deleted`
|
||||
)
|
||||
}
|
||||
} catch (deleteError) {
|
||||
results.deleteFailed++
|
||||
logger.error(`Error deleting log ${log.id} after archiving:`, { deleteError })
|
||||
results.enhancedLogs.deleteFailed++
|
||||
logger.error(`Error deleting enhanced log ${log.id} after archiving:`, { deleteError })
|
||||
}
|
||||
} catch (archiveError) {
|
||||
results.archiveFailed++
|
||||
logger.error(`Failed to archive log ${log.id}:`, { archiveError })
|
||||
results.enhancedLogs.archiveFailed++
|
||||
logger.error(`Failed to archive enhanced log ${log.id}:`, { archiveError })
|
||||
}
|
||||
}
|
||||
|
||||
batchesProcessed++
|
||||
hasMoreLogs = oldLogs.length === BATCH_SIZE
|
||||
hasMoreLogs = oldEnhancedLogs.length === BATCH_SIZE
|
||||
|
||||
logger.info(`Processed batch ${batchesProcessed}: ${oldLogs.length} logs`)
|
||||
logger.info(
|
||||
`Processed enhanced logs batch ${batchesProcessed}: ${oldEnhancedLogs.length} logs`
|
||||
)
|
||||
}
|
||||
|
||||
// Cleanup orphaned snapshots
|
||||
try {
|
||||
const snapshotRetentionDays = Number(env.FREE_PLAN_LOG_RETENTION_DAYS || '7') + 1 // Keep snapshots 1 day longer
|
||||
const cleanedSnapshots = await snapshotService.cleanupOrphanedSnapshots(snapshotRetentionDays)
|
||||
results.snapshots.cleaned = cleanedSnapshots
|
||||
logger.info(`Cleaned up ${cleanedSnapshots} orphaned snapshots`)
|
||||
} catch (snapshotError) {
|
||||
results.snapshots.cleanupFailed = 1
|
||||
logger.error('Error cleaning up orphaned snapshots:', { snapshotError })
|
||||
}
|
||||
|
||||
const timeElapsed = (Date.now() - startTime) / 1000
|
||||
const reachedLimit = batchesProcessed >= MAX_BATCHES && hasMoreLogs
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Processed ${batchesProcessed} batches (${results.total} logs) in ${timeElapsed.toFixed(2)}s${reachedLimit ? ' (batch limit reached)' : ''}`,
|
||||
message: `Processed ${batchesProcessed} enhanced log batches (${results.enhancedLogs.total} logs) in ${timeElapsed.toFixed(2)}s${reachedLimit ? ' (batch limit reached)' : ''}`,
|
||||
results,
|
||||
complete: !hasMoreLogs,
|
||||
batchLimitReached: reachedLimit,
|
||||
|
||||
499
apps/sim/app/api/logs/enhanced/route.ts
Normal file
499
apps/sim/app/api/logs/enhanced/route.ts
Normal file
@@ -0,0 +1,499 @@
|
||||
import { and, desc, eq, gte, inArray, lte, or, type SQL, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { workflow, workflowExecutionBlocks, workflowExecutionLogs } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('EnhancedLogsAPI')
|
||||
|
||||
// Helper function to extract block executions from trace spans
|
||||
function extractBlockExecutionsFromTraceSpans(traceSpans: any[]): any[] {
|
||||
const blockExecutions: any[] = []
|
||||
|
||||
function processSpan(span: any) {
|
||||
if (span.blockId) {
|
||||
blockExecutions.push({
|
||||
id: span.id,
|
||||
blockId: span.blockId,
|
||||
blockName: span.name || '',
|
||||
blockType: span.type,
|
||||
startedAt: span.startTime,
|
||||
endedAt: span.endTime,
|
||||
durationMs: span.duration || 0,
|
||||
status: span.status || 'success',
|
||||
errorMessage: span.output?.error || undefined,
|
||||
inputData: span.input || {},
|
||||
outputData: span.output || {},
|
||||
cost: span.cost || undefined,
|
||||
metadata: {},
|
||||
})
|
||||
}
|
||||
|
||||
// Process children recursively
|
||||
if (span.children && Array.isArray(span.children)) {
|
||||
span.children.forEach(processSpan)
|
||||
}
|
||||
}
|
||||
|
||||
traceSpans.forEach(processSpan)
|
||||
return blockExecutions
|
||||
}
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const revalidate = 0
|
||||
|
||||
const QueryParamsSchema = z.object({
|
||||
includeWorkflow: z.coerce.boolean().optional().default(false),
|
||||
includeBlocks: z.coerce.boolean().optional().default(false),
|
||||
limit: z.coerce.number().optional().default(100),
|
||||
offset: z.coerce.number().optional().default(0),
|
||||
level: z.string().optional(),
|
||||
workflowIds: z.string().optional(), // Comma-separated list of workflow IDs
|
||||
folderIds: z.string().optional(), // Comma-separated list of folder IDs
|
||||
triggers: z.string().optional(), // Comma-separated list of trigger types
|
||||
startDate: z.string().optional(),
|
||||
endDate: z.string().optional(),
|
||||
search: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized enhanced logs access attempt`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const params = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
// Get user's workflows
|
||||
const userWorkflows = await db
|
||||
.select({ id: workflow.id, folderId: workflow.folderId })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.userId, userId))
|
||||
|
||||
const userWorkflowIds = userWorkflows.map((w) => w.id)
|
||||
|
||||
if (userWorkflowIds.length === 0) {
|
||||
return NextResponse.json({ data: [], total: 0 }, { status: 200 })
|
||||
}
|
||||
|
||||
// Build conditions for enhanced logs
|
||||
let conditions: SQL | undefined = inArray(workflowExecutionLogs.workflowId, userWorkflowIds)
|
||||
|
||||
// Filter by level
|
||||
if (params.level && params.level !== 'all') {
|
||||
conditions = and(conditions, eq(workflowExecutionLogs.level, params.level))
|
||||
}
|
||||
|
||||
// Filter by specific workflow IDs
|
||||
if (params.workflowIds) {
|
||||
const workflowIds = params.workflowIds.split(',').filter(Boolean)
|
||||
const filteredWorkflowIds = workflowIds.filter((id) => userWorkflowIds.includes(id))
|
||||
if (filteredWorkflowIds.length > 0) {
|
||||
conditions = and(
|
||||
conditions,
|
||||
inArray(workflowExecutionLogs.workflowId, filteredWorkflowIds)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by folder IDs
|
||||
if (params.folderIds) {
|
||||
const folderIds = params.folderIds.split(',').filter(Boolean)
|
||||
const workflowsInFolders = userWorkflows
|
||||
.filter((w) => w.folderId && folderIds.includes(w.folderId))
|
||||
.map((w) => w.id)
|
||||
|
||||
if (workflowsInFolders.length > 0) {
|
||||
conditions = and(
|
||||
conditions,
|
||||
inArray(workflowExecutionLogs.workflowId, workflowsInFolders)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by triggers
|
||||
if (params.triggers) {
|
||||
const triggers = params.triggers.split(',').filter(Boolean)
|
||||
if (triggers.length > 0 && !triggers.includes('all')) {
|
||||
conditions = and(conditions, inArray(workflowExecutionLogs.trigger, triggers))
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by date range
|
||||
if (params.startDate) {
|
||||
conditions = and(
|
||||
conditions,
|
||||
gte(workflowExecutionLogs.startedAt, new Date(params.startDate))
|
||||
)
|
||||
}
|
||||
if (params.endDate) {
|
||||
conditions = and(conditions, lte(workflowExecutionLogs.startedAt, new Date(params.endDate)))
|
||||
}
|
||||
|
||||
// Filter by search query
|
||||
if (params.search) {
|
||||
const searchTerm = `%${params.search}%`
|
||||
conditions = and(
|
||||
conditions,
|
||||
or(
|
||||
sql`${workflowExecutionLogs.message} ILIKE ${searchTerm}`,
|
||||
sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Execute the query
|
||||
const logs = await db
|
||||
.select()
|
||||
.from(workflowExecutionLogs)
|
||||
.where(conditions)
|
||||
.orderBy(desc(workflowExecutionLogs.startedAt))
|
||||
.limit(params.limit)
|
||||
.offset(params.offset)
|
||||
|
||||
// Get total count for pagination
|
||||
const countResult = await db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(workflowExecutionLogs)
|
||||
.where(conditions)
|
||||
|
||||
const count = countResult[0]?.count || 0
|
||||
|
||||
// Get block executions for all workflow executions
|
||||
const executionIds = logs.map((log) => log.executionId)
|
||||
let blockExecutionsByExecution: Record<string, any[]> = {}
|
||||
|
||||
if (executionIds.length > 0) {
|
||||
const blockLogs = await db
|
||||
.select()
|
||||
.from(workflowExecutionBlocks)
|
||||
.where(inArray(workflowExecutionBlocks.executionId, executionIds))
|
||||
.orderBy(workflowExecutionBlocks.startedAt)
|
||||
|
||||
// Group block logs by execution ID
|
||||
blockExecutionsByExecution = blockLogs.reduce(
|
||||
(acc, blockLog) => {
|
||||
if (!acc[blockLog.executionId]) {
|
||||
acc[blockLog.executionId] = []
|
||||
}
|
||||
acc[blockLog.executionId].push({
|
||||
id: blockLog.id,
|
||||
blockId: blockLog.blockId,
|
||||
blockName: blockLog.blockName || '',
|
||||
blockType: blockLog.blockType,
|
||||
startedAt: blockLog.startedAt.toISOString(),
|
||||
endedAt: blockLog.endedAt?.toISOString() || blockLog.startedAt.toISOString(),
|
||||
durationMs: blockLog.durationMs || 0,
|
||||
status: blockLog.status,
|
||||
errorMessage: blockLog.errorMessage || undefined,
|
||||
errorStackTrace: blockLog.errorStackTrace || undefined,
|
||||
inputData: blockLog.inputData,
|
||||
outputData: blockLog.outputData,
|
||||
cost: blockLog.costTotal
|
||||
? {
|
||||
input: Number(blockLog.costInput) || 0,
|
||||
output: Number(blockLog.costOutput) || 0,
|
||||
total: Number(blockLog.costTotal) || 0,
|
||||
tokens: {
|
||||
prompt: blockLog.tokensPrompt || 0,
|
||||
completion: blockLog.tokensCompletion || 0,
|
||||
total: blockLog.tokensTotal || 0,
|
||||
},
|
||||
model: blockLog.modelUsed || '',
|
||||
}
|
||||
: undefined,
|
||||
metadata: blockLog.metadata || {},
|
||||
})
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, any[]>
|
||||
)
|
||||
}
|
||||
|
||||
// Create clean trace spans from block executions
|
||||
const createTraceSpans = (blockExecutions: any[]) => {
|
||||
return blockExecutions.map((block, index) => {
|
||||
// For error blocks, include error information in the output
|
||||
let output = block.outputData
|
||||
if (block.status === 'error' && block.errorMessage) {
|
||||
output = {
|
||||
...output,
|
||||
error: block.errorMessage,
|
||||
stackTrace: block.errorStackTrace,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: block.id,
|
||||
name: `Block ${block.blockName || block.blockType} (${block.blockType})`,
|
||||
type: block.blockType,
|
||||
duration: block.durationMs,
|
||||
startTime: block.startedAt,
|
||||
endTime: block.endedAt,
|
||||
status: block.status === 'success' ? 'success' : 'error',
|
||||
blockId: block.blockId,
|
||||
input: block.inputData,
|
||||
output,
|
||||
tokens: block.cost?.tokens?.total || 0,
|
||||
relativeStartMs: index * 100,
|
||||
children: [],
|
||||
toolCalls: [],
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Extract cost information from block executions
|
||||
const extractCostSummary = (blockExecutions: any[]) => {
|
||||
let totalCost = 0
|
||||
let totalInputCost = 0
|
||||
let totalOutputCost = 0
|
||||
let totalTokens = 0
|
||||
let totalPromptTokens = 0
|
||||
let totalCompletionTokens = 0
|
||||
const models = new Map()
|
||||
|
||||
blockExecutions.forEach((block) => {
|
||||
if (block.cost) {
|
||||
totalCost += Number(block.cost.total) || 0
|
||||
totalInputCost += Number(block.cost.input) || 0
|
||||
totalOutputCost += Number(block.cost.output) || 0
|
||||
totalTokens += block.cost.tokens?.total || 0
|
||||
totalPromptTokens += block.cost.tokens?.prompt || 0
|
||||
totalCompletionTokens += block.cost.tokens?.completion || 0
|
||||
|
||||
// Track per-model costs
|
||||
if (block.cost.model) {
|
||||
if (!models.has(block.cost.model)) {
|
||||
models.set(block.cost.model, {
|
||||
input: 0,
|
||||
output: 0,
|
||||
total: 0,
|
||||
tokens: { prompt: 0, completion: 0, total: 0 },
|
||||
})
|
||||
}
|
||||
const modelCost = models.get(block.cost.model)
|
||||
modelCost.input += Number(block.cost.input) || 0
|
||||
modelCost.output += Number(block.cost.output) || 0
|
||||
modelCost.total += Number(block.cost.total) || 0
|
||||
modelCost.tokens.prompt += block.cost.tokens?.prompt || 0
|
||||
modelCost.tokens.completion += block.cost.tokens?.completion || 0
|
||||
modelCost.tokens.total += block.cost.tokens?.total || 0
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
total: totalCost,
|
||||
input: totalInputCost,
|
||||
output: totalOutputCost,
|
||||
tokens: {
|
||||
total: totalTokens,
|
||||
prompt: totalPromptTokens,
|
||||
completion: totalCompletionTokens,
|
||||
},
|
||||
models: Object.fromEntries(models), // Convert Map to object for JSON serialization
|
||||
}
|
||||
}
|
||||
|
||||
// Transform to clean enhanced log format
|
||||
const enhancedLogs = logs.map((log) => {
|
||||
const blockExecutions = blockExecutionsByExecution[log.executionId] || []
|
||||
|
||||
// Use stored trace spans from metadata if available, otherwise create from block executions
|
||||
const storedTraceSpans = (log.metadata as any)?.traceSpans
|
||||
const traceSpans =
|
||||
storedTraceSpans && Array.isArray(storedTraceSpans) && storedTraceSpans.length > 0
|
||||
? storedTraceSpans
|
||||
: createTraceSpans(blockExecutions)
|
||||
|
||||
// Use extracted cost summary if available, otherwise use stored values
|
||||
const costSummary =
|
||||
blockExecutions.length > 0
|
||||
? extractCostSummary(blockExecutions)
|
||||
: {
|
||||
input: Number(log.totalInputCost) || 0,
|
||||
output: Number(log.totalOutputCost) || 0,
|
||||
total: Number(log.totalCost) || 0,
|
||||
tokens: {
|
||||
total: log.totalTokens || 0,
|
||||
prompt: (log.metadata as any)?.tokenBreakdown?.prompt || 0,
|
||||
completion: (log.metadata as any)?.tokenBreakdown?.completion || 0,
|
||||
},
|
||||
models: (log.metadata as any)?.models || {},
|
||||
}
|
||||
|
||||
return {
|
||||
id: log.id,
|
||||
workflowId: log.workflowId,
|
||||
executionId: log.executionId,
|
||||
level: log.level,
|
||||
message: log.message,
|
||||
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
|
||||
trigger: log.trigger,
|
||||
createdAt: log.startedAt.toISOString(),
|
||||
metadata: {
|
||||
totalDuration: log.totalDurationMs,
|
||||
cost: costSummary,
|
||||
blockStats: {
|
||||
total: log.blockCount,
|
||||
success: log.successCount,
|
||||
error: log.errorCount,
|
||||
skipped: log.skippedCount,
|
||||
},
|
||||
traceSpans,
|
||||
blockExecutions,
|
||||
enhanced: true,
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
if (params.includeWorkflow) {
|
||||
const workflowIds = [...new Set(logs.map((log) => log.workflowId))]
|
||||
const workflowConditions = inArray(workflow.id, workflowIds)
|
||||
|
||||
const workflowData = await db.select().from(workflow).where(workflowConditions)
|
||||
const workflowMap = new Map(workflowData.map((w) => [w.id, w]))
|
||||
|
||||
const logsWithWorkflow = enhancedLogs.map((log) => ({
|
||||
...log,
|
||||
workflow: workflowMap.get(log.workflowId) || null,
|
||||
}))
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
data: logsWithWorkflow,
|
||||
total: Number(count),
|
||||
page: Math.floor(params.offset / params.limit) + 1,
|
||||
pageSize: params.limit,
|
||||
totalPages: Math.ceil(Number(count) / params.limit),
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
|
||||
// Include block execution data if requested
|
||||
if (params.includeBlocks) {
|
||||
const executionIds = logs.map((log) => log.executionId)
|
||||
|
||||
if (executionIds.length > 0) {
|
||||
const blockLogs = await db
|
||||
.select()
|
||||
.from(workflowExecutionBlocks)
|
||||
.where(inArray(workflowExecutionBlocks.executionId, executionIds))
|
||||
.orderBy(workflowExecutionBlocks.startedAt)
|
||||
|
||||
// Group block logs by execution ID
|
||||
const blockLogsByExecution = blockLogs.reduce(
|
||||
(acc, blockLog) => {
|
||||
if (!acc[blockLog.executionId]) {
|
||||
acc[blockLog.executionId] = []
|
||||
}
|
||||
acc[blockLog.executionId].push({
|
||||
id: blockLog.id,
|
||||
blockId: blockLog.blockId,
|
||||
blockName: blockLog.blockName || '',
|
||||
blockType: blockLog.blockType,
|
||||
startedAt: blockLog.startedAt.toISOString(),
|
||||
endedAt: blockLog.endedAt?.toISOString() || blockLog.startedAt.toISOString(),
|
||||
durationMs: blockLog.durationMs || 0,
|
||||
status: blockLog.status,
|
||||
errorMessage: blockLog.errorMessage || undefined,
|
||||
inputData: blockLog.inputData,
|
||||
outputData: blockLog.outputData,
|
||||
cost: blockLog.costTotal
|
||||
? {
|
||||
input: Number(blockLog.costInput) || 0,
|
||||
output: Number(blockLog.costOutput) || 0,
|
||||
total: Number(blockLog.costTotal) || 0,
|
||||
tokens: {
|
||||
prompt: blockLog.tokensPrompt || 0,
|
||||
completion: blockLog.tokensCompletion || 0,
|
||||
total: blockLog.tokensTotal || 0,
|
||||
},
|
||||
model: blockLog.modelUsed || '',
|
||||
}
|
||||
: undefined,
|
||||
})
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, any[]>
|
||||
)
|
||||
|
||||
// For executions with no block logs in the database,
|
||||
// extract block executions from stored trace spans in metadata
|
||||
logs.forEach((log) => {
|
||||
if (
|
||||
!blockLogsByExecution[log.executionId] ||
|
||||
blockLogsByExecution[log.executionId].length === 0
|
||||
) {
|
||||
const storedTraceSpans = (log.metadata as any)?.traceSpans
|
||||
if (storedTraceSpans && Array.isArray(storedTraceSpans)) {
|
||||
blockLogsByExecution[log.executionId] =
|
||||
extractBlockExecutionsFromTraceSpans(storedTraceSpans)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Add block logs to metadata
|
||||
const logsWithBlocks = enhancedLogs.map((log) => ({
|
||||
...log,
|
||||
metadata: {
|
||||
...log.metadata,
|
||||
blockExecutions: blockLogsByExecution[log.executionId] || [],
|
||||
},
|
||||
}))
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
data: logsWithBlocks,
|
||||
total: Number(count),
|
||||
page: Math.floor(params.offset / params.limit) + 1,
|
||||
pageSize: params.limit,
|
||||
totalPages: Math.ceil(Number(count) / params.limit),
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Return basic logs
|
||||
return NextResponse.json(
|
||||
{
|
||||
data: enhancedLogs,
|
||||
total: Number(count),
|
||||
page: Math.floor(params.offset / params.limit) + 1,
|
||||
pageSize: params.limit,
|
||||
totalPages: Math.ceil(Number(count) / params.limit),
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid enhanced logs request parameters`, {
|
||||
errors: validationError.errors,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid request parameters',
|
||||
details: validationError.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
throw validationError
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Enhanced logs fetch error`, error)
|
||||
return NextResponse.json({ error: error.message }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
@@ -40,7 +40,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
const memories = await db
|
||||
.select()
|
||||
.from(memory)
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId), isNull(memory.deletedAt)))
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
.orderBy(memory.createdAt)
|
||||
.limit(1)
|
||||
|
||||
@@ -112,7 +112,7 @@ export async function DELETE(
|
||||
const existingMemory = await db
|
||||
.select({ id: memory.id })
|
||||
.from(memory)
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId), isNull(memory.deletedAt)))
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
.limit(1)
|
||||
|
||||
if (existingMemory.length === 0) {
|
||||
@@ -128,14 +128,8 @@ export async function DELETE(
|
||||
)
|
||||
}
|
||||
|
||||
// Soft delete by setting deletedAt timestamp
|
||||
await db
|
||||
.update(memory)
|
||||
.set({
|
||||
deletedAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
// Hard delete the memory
|
||||
await db.delete(memory).where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
|
||||
logger.info(`[${requestId}] Memory deleted successfully: ${id} for workflow: ${workflowId}`)
|
||||
return NextResponse.json(
|
||||
@@ -202,7 +196,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
const existingMemories = await db
|
||||
.select()
|
||||
.from(memory)
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId), isNull(memory.deletedAt)))
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
.limit(1)
|
||||
|
||||
if (existingMemories.length === 0) {
|
||||
@@ -250,13 +244,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
}
|
||||
|
||||
// Update the memory with new data
|
||||
await db
|
||||
.update(memory)
|
||||
.set({
|
||||
data,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
await db.delete(memory).where(and(eq(memory.key, id), eq(memory.workflowId, workflowId)))
|
||||
|
||||
// Fetch the updated memory
|
||||
const updatedMemories = await db
|
||||
|
||||
@@ -137,24 +137,22 @@ export async function POST(request: NextRequest) {
|
||||
const safeExecutionData = {
|
||||
success: executionData.success,
|
||||
output: {
|
||||
response: {
|
||||
// Sanitize content to remove non-ASCII characters that would cause ByteString errors
|
||||
content: executionData.output?.response?.content
|
||||
? String(executionData.output.response.content).replace(/[\u0080-\uFFFF]/g, '')
|
||||
: '',
|
||||
model: executionData.output?.response?.model,
|
||||
tokens: executionData.output?.response?.tokens || {
|
||||
prompt: 0,
|
||||
completion: 0,
|
||||
total: 0,
|
||||
},
|
||||
// Sanitize any potential Unicode characters in tool calls
|
||||
toolCalls: executionData.output?.response?.toolCalls
|
||||
? sanitizeToolCalls(executionData.output.response.toolCalls)
|
||||
: undefined,
|
||||
providerTiming: executionData.output?.response?.providerTiming,
|
||||
cost: executionData.output?.response?.cost,
|
||||
// Sanitize content to remove non-ASCII characters that would cause ByteString errors
|
||||
content: executionData.output?.content
|
||||
? String(executionData.output.content).replace(/[\u0080-\uFFFF]/g, '')
|
||||
: '',
|
||||
model: executionData.output?.model,
|
||||
tokens: executionData.output?.tokens || {
|
||||
prompt: 0,
|
||||
completion: 0,
|
||||
total: 0,
|
||||
},
|
||||
// Sanitize any potential Unicode characters in tool calls
|
||||
toolCalls: executionData.output?.toolCalls
|
||||
? sanitizeToolCalls(executionData.output.toolCalls)
|
||||
: undefined,
|
||||
providerTiming: executionData.output?.providerTiming,
|
||||
cost: executionData.output?.cost,
|
||||
},
|
||||
error: executionData.error,
|
||||
logs: [], // Strip logs from header to avoid encoding issues
|
||||
|
||||
@@ -46,11 +46,19 @@ const formatResponse = (responseData: any, status = 200) => {
|
||||
*/
|
||||
const createErrorResponse = (error: any, status = 500, additionalData = {}) => {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const errorStack = error instanceof Error ? error.stack : undefined
|
||||
|
||||
logger.error('Creating error response', {
|
||||
errorMessage,
|
||||
status,
|
||||
stack: process.env.NODE_ENV === 'development' ? errorStack : undefined,
|
||||
})
|
||||
|
||||
return formatResponse(
|
||||
{
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
stack: process.env.NODE_ENV === 'development' ? errorStack : undefined,
|
||||
...additionalData,
|
||||
},
|
||||
status
|
||||
@@ -67,6 +75,7 @@ export async function GET(request: Request) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
if (!targetUrl) {
|
||||
logger.error(`[${requestId}] Missing 'url' parameter`)
|
||||
return createErrorResponse("Missing 'url' parameter", 400)
|
||||
}
|
||||
|
||||
@@ -126,6 +135,10 @@ export async function GET(request: Request) {
|
||||
: response.statusText || `HTTP error ${response.status}`
|
||||
: undefined
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(`[${requestId}] External API error: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
// Return the proxied response
|
||||
return formatResponse({
|
||||
success: response.ok,
|
||||
@@ -139,6 +152,7 @@ export async function GET(request: Request) {
|
||||
logger.error(`[${requestId}] Proxy GET request failed`, {
|
||||
url: targetUrl,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return createErrorResponse(error)
|
||||
@@ -151,22 +165,40 @@ export async function POST(request: Request) {
|
||||
const startTimeISO = startTime.toISOString()
|
||||
|
||||
try {
|
||||
const { toolId, params } = await request.json()
|
||||
// Parse request body
|
||||
let requestBody
|
||||
try {
|
||||
requestBody = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
throw new Error('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Proxy request for tool`, {
|
||||
toolId,
|
||||
hasParams: !!params && Object.keys(params).length > 0,
|
||||
})
|
||||
const { toolId, params } = requestBody
|
||||
|
||||
if (!toolId) {
|
||||
logger.error(`[${requestId}] Missing toolId in request`)
|
||||
throw new Error('Missing toolId in request')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Processing tool: ${toolId}`)
|
||||
|
||||
// Get tool
|
||||
const tool = getTool(toolId)
|
||||
|
||||
if (!tool) {
|
||||
logger.error(`[${requestId}] Tool not found: ${toolId}`)
|
||||
throw new Error(`Tool not found: ${toolId}`)
|
||||
}
|
||||
|
||||
// Validate the tool and its parameters
|
||||
try {
|
||||
validateToolRequest(toolId, tool, params)
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Tool validation failed`, {
|
||||
toolId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
} catch (validationError) {
|
||||
logger.warn(`[${requestId}] Tool validation failed for ${toolId}`, {
|
||||
error: validationError instanceof Error ? validationError.message : String(validationError),
|
||||
})
|
||||
|
||||
// Add timing information even to error responses
|
||||
@@ -174,23 +206,18 @@ export async function POST(request: Request) {
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
return createErrorResponse(error, 400, {
|
||||
return createErrorResponse(validationError, 400, {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
})
|
||||
}
|
||||
if (!tool) {
|
||||
logger.error(`[${requestId}] Tool not found`, { toolId })
|
||||
throw new Error(`Tool not found: ${toolId}`)
|
||||
}
|
||||
|
||||
// Use executeTool with skipProxy=true to prevent recursive proxy calls, and skipPostProcess=true to prevent duplicate post-processing
|
||||
// Execute tool
|
||||
const result = await executeTool(toolId, params, true, true)
|
||||
|
||||
if (!result.success) {
|
||||
logger.warn(`[${requestId}] Tool execution failed`, {
|
||||
toolId,
|
||||
logger.warn(`[${requestId}] Tool execution failed for ${toolId}`, {
|
||||
error: result.error || 'Unknown error',
|
||||
})
|
||||
|
||||
@@ -217,9 +244,13 @@ export async function POST(request: Request) {
|
||||
}
|
||||
// Fallback
|
||||
throw new Error('Tool returned an error')
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
throw e
|
||||
} catch (transformError) {
|
||||
logger.error(`[${requestId}] Error transformation failed for ${toolId}`, {
|
||||
error:
|
||||
transformError instanceof Error ? transformError.message : String(transformError),
|
||||
})
|
||||
if (transformError instanceof Error) {
|
||||
throw transformError
|
||||
}
|
||||
throw new Error('Tool returned an error')
|
||||
}
|
||||
@@ -246,12 +277,7 @@ export async function POST(request: Request) {
|
||||
},
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool executed successfully`, {
|
||||
toolId,
|
||||
duration,
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
})
|
||||
logger.info(`[${requestId}] Tool executed successfully: ${toolId} (${duration}ms)`)
|
||||
|
||||
// Return the response with CORS headers
|
||||
return formatResponse(responseWithTimingData)
|
||||
@@ -259,6 +285,7 @@ export async function POST(request: Request) {
|
||||
logger.error(`[${requestId}] Proxy request failed`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
name: error instanceof Error ? error.name : undefined,
|
||||
})
|
||||
|
||||
// Add timing information even to error responses
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
mockExecutionDependencies,
|
||||
mockScheduleExecuteDb,
|
||||
sampleWorkflowState,
|
||||
@@ -23,7 +22,7 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
blocks: sampleWorkflowState.blocks,
|
||||
edges: sampleWorkflowState.edges || [],
|
||||
loops: sampleWorkflowState.loops || {},
|
||||
parallels: sampleWorkflowState.parallels || {},
|
||||
parallels: {},
|
||||
isFromNormalizedTables: true,
|
||||
}),
|
||||
}))
|
||||
@@ -122,9 +121,8 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
})),
|
||||
}))
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const response = await GET()
|
||||
expect(response).toBeDefined()
|
||||
|
||||
const data = await response.json()
|
||||
@@ -136,7 +134,6 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
const persistExecutionErrorMock = vi.fn().mockResolvedValue(undefined)
|
||||
|
||||
vi.doMock('@/lib/logs/execution-logger', () => ({
|
||||
persistExecutionLogs: vi.fn().mockResolvedValue(undefined),
|
||||
persistExecutionError: persistExecutionErrorMock,
|
||||
}))
|
||||
|
||||
@@ -146,9 +143,8 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
})),
|
||||
}))
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const response = await GET()
|
||||
|
||||
expect(response).toBeDefined()
|
||||
|
||||
@@ -176,9 +172,8 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
return { db: mockDb }
|
||||
})
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const response = await GET()
|
||||
expect(response.status).toBe(200)
|
||||
const data = await response.json()
|
||||
expect(data).toHaveProperty('executedCount', 0)
|
||||
@@ -205,9 +200,8 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
return { db: mockDb }
|
||||
})
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const response = await GET()
|
||||
expect(response.status).toBe(500)
|
||||
const data = await response.json()
|
||||
|
||||
@@ -238,9 +232,8 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
],
|
||||
})
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const response = await GET()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
})
|
||||
@@ -269,9 +262,8 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
],
|
||||
})
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const response = await GET()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const data = await response.json()
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { Cron } from 'croner'
|
||||
import { and, eq, lte, not, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { persistExecutionError, persistExecutionLogs } from '@/lib/logs/execution-logger'
|
||||
import { EnhancedLoggingSession } from '@/lib/logs/enhanced-logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/trace-spans'
|
||||
import {
|
||||
type BlockState,
|
||||
@@ -17,7 +17,7 @@ import { decryptSecret } from '@/lib/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import { environment, userStats, workflow, workflowSchedule } from '@/db/schema'
|
||||
import { environment as environmentTable, userStats, workflow, workflowSchedule } from '@/db/schema'
|
||||
import { Executor } from '@/executor'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { mergeSubblockState } from '@/stores/workflows/server-utils'
|
||||
@@ -58,7 +58,7 @@ const EnvVarsSchema = z.record(z.string())
|
||||
|
||||
const runningExecutions = new Set<string>()
|
||||
|
||||
export async function GET(req: NextRequest) {
|
||||
export async function GET() {
|
||||
logger.info(`Scheduled execution triggered at ${new Date().toISOString()}`)
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
const now = new Date()
|
||||
@@ -85,6 +85,7 @@ export async function GET(req: NextRequest) {
|
||||
|
||||
for (const schedule of dueSchedules) {
|
||||
const executionId = uuidv4()
|
||||
let loggingSession: EnhancedLoggingSession | null = null
|
||||
|
||||
try {
|
||||
if (runningExecutions.has(schedule.workflowId)) {
|
||||
@@ -118,15 +119,7 @@ export async function GET(req: NextRequest) {
|
||||
}
|
||||
)
|
||||
|
||||
await persistExecutionError(
|
||||
schedule.workflowId,
|
||||
executionId,
|
||||
new Error(
|
||||
usageCheck.message ||
|
||||
'Usage limit exceeded. Please upgrade your plan to continue running scheduled workflows.'
|
||||
),
|
||||
'schedule'
|
||||
)
|
||||
// Error logging handled by enhanced logging session
|
||||
|
||||
const retryDelay = 24 * 60 * 60 * 1000 // 24 hour delay for exceeded limits
|
||||
const nextRetryAt = new Date(now.getTime() + retryDelay)
|
||||
@@ -176,8 +169,8 @@ export async function GET(req: NextRequest) {
|
||||
// Retrieve environment variables for this user (if any).
|
||||
const [userEnv] = await db
|
||||
.select()
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, workflowRecord.userId))
|
||||
.from(environmentTable)
|
||||
.where(eq(environmentTable.userId, workflowRecord.userId))
|
||||
.limit(1)
|
||||
|
||||
if (!userEnv) {
|
||||
@@ -306,6 +299,30 @@ export async function GET(req: NextRequest) {
|
||||
logger.debug(`[${requestId}] No workflow variables found for: ${schedule.workflowId}`)
|
||||
}
|
||||
|
||||
// Start enhanced logging
|
||||
loggingSession = new EnhancedLoggingSession(
|
||||
schedule.workflowId,
|
||||
executionId,
|
||||
'schedule',
|
||||
requestId
|
||||
)
|
||||
|
||||
// Load the actual workflow state from normalized tables
|
||||
const enhancedNormalizedData = await loadWorkflowFromNormalizedTables(schedule.workflowId)
|
||||
|
||||
if (!enhancedNormalizedData) {
|
||||
throw new Error(
|
||||
`Workflow ${schedule.workflowId} has no normalized data available. Ensure the workflow is properly saved to normalized tables.`
|
||||
)
|
||||
}
|
||||
|
||||
// Start enhanced logging with environment variables
|
||||
await loggingSession.safeStart({
|
||||
userId: workflowRecord.userId,
|
||||
workspaceId: workflowRecord.workspaceId || '',
|
||||
variables: variables || {},
|
||||
})
|
||||
|
||||
const executor = new Executor(
|
||||
serializedWorkflow,
|
||||
processedBlockStates,
|
||||
@@ -313,6 +330,10 @@ export async function GET(req: NextRequest) {
|
||||
input,
|
||||
workflowVariables
|
||||
)
|
||||
|
||||
// Set up enhanced logging on the executor
|
||||
loggingSession.setupExecutor(executor)
|
||||
|
||||
const result = await executor.execute(schedule.workflowId)
|
||||
|
||||
const executionResult =
|
||||
@@ -343,13 +364,16 @@ export async function GET(req: NextRequest) {
|
||||
|
||||
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
|
||||
|
||||
const enrichedResult = {
|
||||
...executionResult,
|
||||
traceSpans,
|
||||
totalDuration,
|
||||
}
|
||||
// Log individual block executions to enhanced system are automatically
|
||||
// handled by the logging session
|
||||
|
||||
await persistExecutionLogs(schedule.workflowId, executionId, enrichedResult, 'schedule')
|
||||
// Complete enhanced logging
|
||||
await loggingSession.safeComplete({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: totalDuration || 0,
|
||||
finalOutput: executionResult.output || {},
|
||||
traceSpans: (traceSpans || []) as any,
|
||||
})
|
||||
|
||||
if (executionResult.success) {
|
||||
logger.info(`[${requestId}] Workflow ${schedule.workflowId} executed successfully`)
|
||||
@@ -413,7 +437,18 @@ export async function GET(req: NextRequest) {
|
||||
error
|
||||
)
|
||||
|
||||
await persistExecutionError(schedule.workflowId, executionId, error, 'schedule')
|
||||
// Error logging handled by enhanced logging session
|
||||
|
||||
if (loggingSession) {
|
||||
await loggingSession.safeCompleteWithError({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: 0,
|
||||
error: {
|
||||
message: error.message || 'Scheduled workflow execution failed',
|
||||
stackTrace: error.stack,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
let nextRunAt: Date
|
||||
try {
|
||||
|
||||
@@ -14,6 +14,7 @@ const SettingsSchema = z.object({
|
||||
debugMode: z.boolean().optional(),
|
||||
autoConnect: z.boolean().optional(),
|
||||
autoFillEnvVars: z.boolean().optional(),
|
||||
autoPan: z.boolean().optional(),
|
||||
telemetryEnabled: z.boolean().optional(),
|
||||
telemetryNotifiedUser: z.boolean().optional(),
|
||||
emailPreferences: z
|
||||
@@ -32,6 +33,7 @@ const defaultSettings = {
|
||||
debugMode: false,
|
||||
autoConnect: true,
|
||||
autoFillEnvVars: true,
|
||||
autoPan: true,
|
||||
telemetryEnabled: true,
|
||||
telemetryNotifiedUser: false,
|
||||
emailPreferences: {},
|
||||
@@ -65,6 +67,7 @@ export async function GET() {
|
||||
debugMode: userSettings.debugMode,
|
||||
autoConnect: userSettings.autoConnect,
|
||||
autoFillEnvVars: userSettings.autoFillEnvVars,
|
||||
autoPan: userSettings.autoPan,
|
||||
telemetryEnabled: userSettings.telemetryEnabled,
|
||||
telemetryNotifiedUser: userSettings.telemetryNotifiedUser,
|
||||
emailPreferences: userSettings.emailPreferences ?? {},
|
||||
|
||||
@@ -32,7 +32,6 @@ const executeMock = vi.fn().mockResolvedValue({
|
||||
endTime: new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
const persistExecutionLogsMock = vi.fn().mockResolvedValue(undefined)
|
||||
const persistExecutionErrorMock = vi.fn().mockResolvedValue(undefined)
|
||||
|
||||
// Mock the DB schema objects
|
||||
@@ -80,7 +79,6 @@ vi.mock('@/executor', () => ({
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/logs/execution-logger', () => ({
|
||||
persistExecutionLogs: persistExecutionLogsMock,
|
||||
persistExecutionError: persistExecutionErrorMock,
|
||||
}))
|
||||
|
||||
|
||||
@@ -31,6 +31,27 @@ describe('Workflow Deployment API Route', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock serializer
|
||||
vi.doMock('@/serializer', () => ({
|
||||
serializeWorkflow: vi.fn().mockReturnValue({
|
||||
version: '1.0',
|
||||
blocks: [
|
||||
{
|
||||
id: 'block-1',
|
||||
metadata: { id: 'starter', name: 'Start' },
|
||||
position: { x: 100, y: 100 },
|
||||
config: { tool: 'starter', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/workflows/db-helpers', () => ({
|
||||
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue({
|
||||
blocks: {
|
||||
@@ -75,6 +96,80 @@ describe('Workflow Deployment API Route', () => {
|
||||
})
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock the database schema module
|
||||
vi.doMock('@/db/schema', () => ({
|
||||
workflow: {},
|
||||
apiKey: {},
|
||||
workflowBlocks: {},
|
||||
workflowEdges: {},
|
||||
workflowSubflows: {},
|
||||
}))
|
||||
|
||||
// Mock drizzle-orm operators
|
||||
vi.doMock('drizzle-orm', () => ({
|
||||
eq: vi.fn((field, value) => ({ field, value, type: 'eq' })),
|
||||
and: vi.fn((...conditions) => ({ conditions, type: 'and' })),
|
||||
}))
|
||||
|
||||
// Mock the database module with proper chainable query builder
|
||||
let selectCallCount = 0
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: vi.fn().mockImplementation(() => {
|
||||
selectCallCount++
|
||||
return {
|
||||
from: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockImplementation(() => ({
|
||||
limit: vi.fn().mockImplementation(() => {
|
||||
// First call: workflow lookup (should return workflow)
|
||||
if (selectCallCount === 1) {
|
||||
return Promise.resolve([{ userId: 'user-id', id: 'workflow-id' }])
|
||||
}
|
||||
// Second call: blocks lookup
|
||||
if (selectCallCount === 2) {
|
||||
return Promise.resolve([
|
||||
{
|
||||
id: 'block-1',
|
||||
type: 'starter',
|
||||
name: 'Start',
|
||||
positionX: '100',
|
||||
positionY: '100',
|
||||
enabled: true,
|
||||
subBlocks: {},
|
||||
data: {},
|
||||
},
|
||||
])
|
||||
}
|
||||
// Third call: edges lookup
|
||||
if (selectCallCount === 3) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Fourth call: subflows lookup
|
||||
if (selectCallCount === 4) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Fifth call: API key lookup (should return empty for new key test)
|
||||
if (selectCallCount === 5) {
|
||||
return Promise.resolve([])
|
||||
}
|
||||
// Default: empty array
|
||||
return Promise.resolve([])
|
||||
}),
|
||||
})),
|
||||
})),
|
||||
}
|
||||
}),
|
||||
insert: vi.fn().mockImplementation(() => ({
|
||||
values: vi.fn().mockResolvedValue([{ id: 'mock-api-key-id' }]),
|
||||
})),
|
||||
update: vi.fn().mockImplementation(() => ({
|
||||
set: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockResolvedValue([]),
|
||||
})),
|
||||
})),
|
||||
},
|
||||
}))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
@@ -126,16 +221,7 @@ describe('Workflow Deployment API Route', () => {
|
||||
* This should generate a new API key
|
||||
*/
|
||||
it('should create new API key when deploying workflow for user with no API key', async () => {
|
||||
const mockInsert = vi.fn().mockReturnValue({
|
||||
values: vi.fn().mockReturnValue(undefined),
|
||||
})
|
||||
|
||||
const mockUpdate = vi.fn().mockReturnValue({
|
||||
set: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([{ id: 'workflow-id' }]),
|
||||
}),
|
||||
})
|
||||
|
||||
// Override the global mock for this specific test
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: vi
|
||||
@@ -143,11 +229,7 @@ describe('Workflow Deployment API Route', () => {
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([
|
||||
{
|
||||
userId: 'user-id',
|
||||
},
|
||||
]),
|
||||
limit: vi.fn().mockResolvedValue([{ userId: 'user-id', id: 'workflow-id' }]),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
@@ -184,8 +266,14 @@ describe('Workflow Deployment API Route', () => {
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: mockInsert,
|
||||
update: mockUpdate,
|
||||
insert: vi.fn().mockImplementation(() => ({
|
||||
values: vi.fn().mockResolvedValue([{ id: 'mock-api-key-id' }]),
|
||||
})),
|
||||
update: vi.fn().mockImplementation(() => ({
|
||||
set: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockResolvedValue([]),
|
||||
})),
|
||||
})),
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -204,9 +292,6 @@ describe('Workflow Deployment API Route', () => {
|
||||
expect(data).toHaveProperty('apiKey', 'sim_testkeygenerated12345')
|
||||
expect(data).toHaveProperty('isDeployed', true)
|
||||
expect(data).toHaveProperty('deployedAt')
|
||||
|
||||
expect(mockInsert).toHaveBeenCalled()
|
||||
expect(mockUpdate).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -214,14 +299,7 @@ describe('Workflow Deployment API Route', () => {
|
||||
* This should use the existing API key
|
||||
*/
|
||||
it('should use existing API key when deploying workflow', async () => {
|
||||
const mockInsert = vi.fn()
|
||||
|
||||
const mockUpdate = vi.fn().mockReturnValue({
|
||||
set: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue([{ id: 'workflow-id' }]),
|
||||
}),
|
||||
})
|
||||
|
||||
// Override the global mock for this specific test
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: vi
|
||||
@@ -229,11 +307,7 @@ describe('Workflow Deployment API Route', () => {
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([
|
||||
{
|
||||
userId: 'user-id',
|
||||
},
|
||||
]),
|
||||
limit: vi.fn().mockResolvedValue([{ userId: 'user-id', id: 'workflow-id' }]),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
@@ -266,16 +340,18 @@ describe('Workflow Deployment API Route', () => {
|
||||
.mockReturnValueOnce({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([
|
||||
{
|
||||
key: 'sim_existingtestapikey12345',
|
||||
},
|
||||
]), // Existing API key
|
||||
limit: vi.fn().mockResolvedValue([{ key: 'sim_existingtestapikey12345' }]), // Existing API key
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
insert: mockInsert,
|
||||
update: mockUpdate,
|
||||
insert: vi.fn().mockImplementation(() => ({
|
||||
values: vi.fn().mockResolvedValue([{ id: 'mock-api-key-id' }]),
|
||||
})),
|
||||
update: vi.fn().mockImplementation(() => ({
|
||||
set: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockResolvedValue([]),
|
||||
})),
|
||||
})),
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -293,9 +369,6 @@ describe('Workflow Deployment API Route', () => {
|
||||
|
||||
expect(data).toHaveProperty('apiKey', 'sim_existingtestapikey12345')
|
||||
expect(data).toHaveProperty('isDeployed', true)
|
||||
|
||||
expect(mockInsert).not.toHaveBeenCalled()
|
||||
expect(mockUpdate).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
/**
|
||||
|
||||
@@ -88,6 +88,7 @@ describe('Workflow Execution API Route', () => {
|
||||
vi.doMock('@/executor', () => ({
|
||||
Executor: vi.fn().mockImplementation(() => ({
|
||||
execute: executeMock,
|
||||
setEnhancedLogger: vi.fn(),
|
||||
})),
|
||||
}))
|
||||
|
||||
@@ -104,6 +105,14 @@ describe('Workflow Execution API Route', () => {
|
||||
persistExecutionError: vi.fn().mockResolvedValue(undefined),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/logs/enhanced-execution-logger', () => ({
|
||||
enhancedExecutionLogger: {
|
||||
startWorkflowExecution: vi.fn().mockResolvedValue(undefined),
|
||||
logBlockExecution: vi.fn().mockResolvedValue(undefined),
|
||||
completeWorkflowExecution: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/logs/trace-spans', () => ({
|
||||
buildTraceSpans: vi.fn().mockReturnValue({
|
||||
traceSpans: [],
|
||||
@@ -246,10 +255,7 @@ describe('Workflow Execution API Route', () => {
|
||||
expect.anything(), // serializedWorkflow
|
||||
expect.anything(), // processedBlockStates
|
||||
expect.anything(), // decryptedEnvVars
|
||||
expect.objectContaining({
|
||||
// processedInput
|
||||
input: requestBody,
|
||||
}),
|
||||
requestBody, // processedInput (direct input, not wrapped)
|
||||
expect.anything() // workflowVariables
|
||||
)
|
||||
})
|
||||
@@ -285,10 +291,7 @@ describe('Workflow Execution API Route', () => {
|
||||
expect.anything(), // serializedWorkflow
|
||||
expect.anything(), // processedBlockStates
|
||||
expect.anything(), // decryptedEnvVars
|
||||
expect.objectContaining({
|
||||
// processedInput
|
||||
input: structuredInput,
|
||||
}),
|
||||
structuredInput, // processedInput (direct input, not wrapped)
|
||||
expect.anything() // workflowVariables
|
||||
)
|
||||
})
|
||||
@@ -401,6 +404,7 @@ describe('Workflow Execution API Route', () => {
|
||||
vi.doMock('@/executor', () => ({
|
||||
Executor: vi.fn().mockImplementation(() => ({
|
||||
execute: vi.fn().mockRejectedValue(new Error('Execution failed')),
|
||||
setEnhancedLogger: vi.fn(),
|
||||
})),
|
||||
}))
|
||||
|
||||
@@ -424,10 +428,10 @@ describe('Workflow Execution API Route', () => {
|
||||
expect(data).toHaveProperty('error')
|
||||
expect(data.error).toContain('Execution failed')
|
||||
|
||||
// Verify error logger was called
|
||||
const persistExecutionError = (await import('@/lib/logs/execution-logger'))
|
||||
.persistExecutionError
|
||||
expect(persistExecutionError).toHaveBeenCalled()
|
||||
// Verify enhanced logger was called for error completion
|
||||
const enhancedExecutionLogger = (await import('@/lib/logs/enhanced-execution-logger'))
|
||||
.enhancedExecutionLogger
|
||||
expect(enhancedExecutionLogger.completeWorkflowExecution).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
/**
|
||||
|
||||
@@ -3,7 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { persistExecutionError, persistExecutionLogs } from '@/lib/logs/execution-logger'
|
||||
import { EnhancedLoggingSession } from '@/lib/logs/enhanced-logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/trace-spans'
|
||||
import { checkServerSideUsageLimits } from '@/lib/usage-monitor'
|
||||
import { decryptSecret } from '@/lib/utils'
|
||||
@@ -14,11 +14,10 @@ import {
|
||||
workflowHasResponseBlock,
|
||||
} from '@/lib/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import { environment, userStats } from '@/db/schema'
|
||||
import { environment as environmentTable, userStats } from '@/db/schema'
|
||||
import { Executor } from '@/executor'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { mergeSubblockState } from '@/stores/workflows/server-utils'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { validateWorkflowAccess } from '../../middleware'
|
||||
import { createErrorResponse, createSuccessResponse } from '../../utils'
|
||||
|
||||
@@ -59,6 +58,8 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any) {
|
||||
throw new Error('Execution is already running')
|
||||
}
|
||||
|
||||
const loggingSession = new EnhancedLoggingSession(workflowId, executionId, 'api', requestId)
|
||||
|
||||
// Check if the user has exceeded their usage limits
|
||||
const usageCheck = await checkServerSideUsageLimits(workflow.userId)
|
||||
if (usageCheck.isExceeded) {
|
||||
@@ -77,19 +78,12 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any) {
|
||||
input ? JSON.stringify(input, null, 2) : 'No input provided'
|
||||
)
|
||||
|
||||
// Validate and structure input for maximum compatibility
|
||||
let processedInput = input
|
||||
if (input && typeof input === 'object') {
|
||||
// Ensure input is properly structured for the starter block
|
||||
if (input.input === undefined) {
|
||||
// If input is not already nested, structure it properly
|
||||
processedInput = { input: input }
|
||||
logger.info(
|
||||
`[${requestId}] Restructured input for workflow:`,
|
||||
JSON.stringify(processedInput, null, 2)
|
||||
)
|
||||
}
|
||||
}
|
||||
// Use input directly for API workflows
|
||||
const processedInput = input
|
||||
logger.info(
|
||||
`[${requestId}] Using input directly for workflow:`,
|
||||
JSON.stringify(processedInput, null, 2)
|
||||
)
|
||||
|
||||
try {
|
||||
runningExecutions.add(executionKey)
|
||||
@@ -99,39 +93,30 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any) {
|
||||
logger.debug(`[${requestId}] Loading workflow ${workflowId} from normalized tables`)
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
|
||||
let blocks: Record<string, any>
|
||||
let edges: any[]
|
||||
let loops: Record<string, any>
|
||||
let parallels: Record<string, any>
|
||||
|
||||
if (normalizedData) {
|
||||
// Use normalized data as primary source
|
||||
;({ blocks, edges, loops, parallels } = normalizedData)
|
||||
logger.info(`[${requestId}] Using normalized tables for workflow execution: ${workflowId}`)
|
||||
} else {
|
||||
// Fallback to deployed state if available (for legacy workflows)
|
||||
logger.warn(
|
||||
`[${requestId}] No normalized data found, falling back to deployed state for workflow: ${workflowId}`
|
||||
if (!normalizedData) {
|
||||
throw new Error(
|
||||
`Workflow ${workflowId} has no normalized data available. Ensure the workflow is properly saved to normalized tables.`
|
||||
)
|
||||
|
||||
if (!workflow.deployedState) {
|
||||
throw new Error(
|
||||
`Workflow ${workflowId} has no deployed state and no normalized data available`
|
||||
)
|
||||
}
|
||||
|
||||
const deployedState = workflow.deployedState as WorkflowState
|
||||
;({ blocks, edges, loops, parallels } = deployedState)
|
||||
}
|
||||
|
||||
// Use normalized data as primary source
|
||||
const { blocks, edges, loops, parallels } = normalizedData
|
||||
logger.info(`[${requestId}] Using normalized tables for workflow execution: ${workflowId}`)
|
||||
logger.debug(`[${requestId}] Normalized data loaded:`, {
|
||||
blocksCount: Object.keys(blocks || {}).length,
|
||||
edgesCount: (edges || []).length,
|
||||
loopsCount: Object.keys(loops || {}).length,
|
||||
parallelsCount: Object.keys(parallels || {}).length,
|
||||
})
|
||||
|
||||
// Use the same execution flow as in scheduled executions
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
|
||||
// Fetch the user's environment variables (if any)
|
||||
const [userEnv] = await db
|
||||
.select()
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, workflow.userId))
|
||||
.from(environmentTable)
|
||||
.where(eq(environmentTable.userId, workflow.userId))
|
||||
.limit(1)
|
||||
|
||||
if (!userEnv) {
|
||||
@@ -140,9 +125,14 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any) {
|
||||
)
|
||||
}
|
||||
|
||||
// Parse and validate environment variables.
|
||||
const variables = EnvVarsSchema.parse(userEnv?.variables ?? {})
|
||||
|
||||
await loggingSession.safeStart({
|
||||
userId: workflow.userId,
|
||||
workspaceId: workflow.workspaceId,
|
||||
variables,
|
||||
})
|
||||
|
||||
// Replace environment variables in the block states
|
||||
const currentBlockStates = await Object.entries(mergedStates).reduce(
|
||||
async (accPromise, [id, block]) => {
|
||||
@@ -207,18 +197,42 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any) {
|
||||
(acc, [blockId, blockState]) => {
|
||||
// Check if this block has a responseFormat that needs to be parsed
|
||||
if (blockState.responseFormat && typeof blockState.responseFormat === 'string') {
|
||||
try {
|
||||
logger.debug(`[${requestId}] Parsing responseFormat for block ${blockId}`)
|
||||
// Attempt to parse the responseFormat if it's a string
|
||||
const parsedResponseFormat = JSON.parse(blockState.responseFormat)
|
||||
const responseFormatValue = blockState.responseFormat.trim()
|
||||
|
||||
// Check for variable references like <start.input>
|
||||
if (responseFormatValue.startsWith('<') && responseFormatValue.includes('>')) {
|
||||
logger.debug(
|
||||
`[${requestId}] Response format contains variable reference for block ${blockId}`
|
||||
)
|
||||
// Keep variable references as-is - they will be resolved during execution
|
||||
acc[blockId] = blockState
|
||||
} else if (responseFormatValue === '') {
|
||||
// Empty string - remove response format
|
||||
acc[blockId] = {
|
||||
...blockState,
|
||||
responseFormat: parsedResponseFormat,
|
||||
responseFormat: undefined,
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
logger.debug(`[${requestId}] Parsing responseFormat for block ${blockId}`)
|
||||
// Attempt to parse the responseFormat if it's a string
|
||||
const parsedResponseFormat = JSON.parse(responseFormatValue)
|
||||
|
||||
acc[blockId] = {
|
||||
...blockState,
|
||||
responseFormat: parsedResponseFormat,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`[${requestId}] Failed to parse responseFormat for block ${blockId}, using undefined`,
|
||||
error
|
||||
)
|
||||
// Set to undefined instead of keeping malformed JSON - this allows execution to continue
|
||||
acc[blockId] = {
|
||||
...blockState,
|
||||
responseFormat: undefined,
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to parse responseFormat for block ${blockId}`, error)
|
||||
acc[blockId] = blockState
|
||||
}
|
||||
} else {
|
||||
acc[blockId] = blockState
|
||||
@@ -267,6 +281,9 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any) {
|
||||
workflowVariables
|
||||
)
|
||||
|
||||
// Set up enhanced logging on the executor
|
||||
loggingSession.setupExecutor(executor)
|
||||
|
||||
const result = await executor.execute(workflowId)
|
||||
|
||||
// Check if we got a StreamingExecution result (with stream + execution properties)
|
||||
@@ -278,6 +295,9 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any) {
|
||||
executionTime: executionResult.metadata?.duration,
|
||||
})
|
||||
|
||||
// Build trace spans from execution result (works for both success and failure)
|
||||
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
|
||||
|
||||
// Update workflow run counts if execution was successful
|
||||
if (executionResult.success) {
|
||||
await updateWorkflowRunCounts(workflowId)
|
||||
@@ -292,24 +312,26 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any) {
|
||||
.where(eq(userStats.userId, workflow.userId))
|
||||
}
|
||||
|
||||
// Build trace spans from execution logs
|
||||
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
|
||||
|
||||
// Add trace spans to the execution result
|
||||
const enrichedResult = {
|
||||
...executionResult,
|
||||
traceSpans,
|
||||
totalDuration,
|
||||
}
|
||||
|
||||
// Log each execution step and the final result
|
||||
await persistExecutionLogs(workflowId, executionId, enrichedResult, 'api')
|
||||
await loggingSession.safeComplete({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: totalDuration || 0,
|
||||
finalOutput: executionResult.output || {},
|
||||
traceSpans: (traceSpans || []) as any,
|
||||
})
|
||||
|
||||
return executionResult
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Workflow execution failed: ${workflowId}`, error)
|
||||
// Log the error
|
||||
await persistExecutionError(workflowId, executionId, error, 'api')
|
||||
|
||||
await loggingSession.safeCompleteWithError({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: 0,
|
||||
error: {
|
||||
message: error.message || 'Workflow execution failed',
|
||||
stackTrace: error.stack,
|
||||
},
|
||||
})
|
||||
|
||||
throw error
|
||||
} finally {
|
||||
runningExecutions.delete(executionKey)
|
||||
@@ -381,13 +403,13 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
logger.info(`[${requestId}] No request body provided`)
|
||||
}
|
||||
|
||||
// Don't double-nest the input if it's already structured
|
||||
// Pass the raw body directly as input for API workflows
|
||||
const hasContent = Object.keys(body).length > 0
|
||||
const input = hasContent ? { input: body } : {}
|
||||
const input = hasContent ? body : {}
|
||||
|
||||
logger.info(`[${requestId}] Input passed to workflow:`, JSON.stringify(input, null, 2))
|
||||
|
||||
// Execute workflow with the structured input
|
||||
// Execute workflow with the raw input
|
||||
const result = await executeWorkflow(validation.workflow, requestId, input)
|
||||
|
||||
// Check if the workflow execution contains a response block output
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { persistExecutionLogs, persistLog } from '@/lib/logs/execution-logger'
|
||||
import { EnhancedLoggingSession } from '@/lib/logs/enhanced-logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/trace-spans'
|
||||
import { validateWorkflowAccess } from '../../middleware'
|
||||
import { createErrorResponse, createSuccessResponse } from '../../utils'
|
||||
|
||||
@@ -33,9 +33,25 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
// Check if this execution is from chat using only the explicit source flag
|
||||
const isChatExecution = result.metadata?.source === 'chat'
|
||||
|
||||
// Use persistExecutionLogs which handles tool call extraction
|
||||
// Use 'chat' trigger type for chat executions, otherwise 'manual'
|
||||
await persistExecutionLogs(id, executionId, result, isChatExecution ? 'chat' : 'manual')
|
||||
// Also log to enhanced system
|
||||
const triggerType = isChatExecution ? 'chat' : 'manual'
|
||||
const loggingSession = new EnhancedLoggingSession(id, executionId, triggerType, requestId)
|
||||
|
||||
await loggingSession.safeStart({
|
||||
userId: '', // TODO: Get from session
|
||||
workspaceId: '', // TODO: Get from workflow
|
||||
variables: {},
|
||||
})
|
||||
|
||||
// Build trace spans from execution logs
|
||||
const { traceSpans } = buildTraceSpans(result)
|
||||
|
||||
await loggingSession.safeComplete({
|
||||
endedAt: new Date().toISOString(),
|
||||
totalDurationMs: result.metadata?.duration || 0,
|
||||
finalOutput: result.output || {},
|
||||
traceSpans,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
message: 'Execution logs persisted successfully',
|
||||
@@ -52,21 +68,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
executionId,
|
||||
})
|
||||
|
||||
// Persist each log using the original method
|
||||
for (const log of logs) {
|
||||
await persistLog({
|
||||
id: uuidv4(),
|
||||
workflowId: id,
|
||||
executionId,
|
||||
level: log.level,
|
||||
message: log.message,
|
||||
duration: log.duration,
|
||||
trigger: log.trigger || 'manual',
|
||||
createdAt: new Date(log.createdAt || new Date()),
|
||||
metadata: log.metadata,
|
||||
})
|
||||
}
|
||||
|
||||
return createSuccessResponse({ message: 'Logs persisted successfully' })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error persisting logs for workflow: ${id}`, error)
|
||||
|
||||
121
apps/sim/app/api/workflows/[id]/revert-to-deployed/route.ts
Normal file
121
apps/sim/app/api/workflows/[id]/revert-to-deployed/route.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import crypto from 'crypto'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { db } from '@/db'
|
||||
import { workflow } from '@/db/schema'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { validateWorkflowAccess } from '../../middleware'
|
||||
import { createErrorResponse, createSuccessResponse } from '../../utils'
|
||||
|
||||
const logger = createLogger('RevertToDeployedAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
/**
|
||||
* POST /api/workflows/[id]/revert-to-deployed
|
||||
* Revert workflow to its deployed state by saving deployed state to normalized tables
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
logger.debug(`[${requestId}] Reverting workflow to deployed state: ${id}`)
|
||||
const validation = await validateWorkflowAccess(request, id, false)
|
||||
|
||||
if (validation.error) {
|
||||
logger.warn(`[${requestId}] Workflow revert failed: ${validation.error.message}`)
|
||||
return createErrorResponse(validation.error.message, validation.error.status)
|
||||
}
|
||||
|
||||
const workflowData = validation.workflow
|
||||
|
||||
// Check if workflow is deployed and has deployed state
|
||||
if (!workflowData.isDeployed || !workflowData.deployedState) {
|
||||
logger.warn(`[${requestId}] Cannot revert: workflow is not deployed or has no deployed state`)
|
||||
return createErrorResponse('Workflow is not deployed or has no deployed state', 400)
|
||||
}
|
||||
|
||||
// Validate deployed state structure
|
||||
const deployedState = workflowData.deployedState as WorkflowState
|
||||
if (!deployedState.blocks || !deployedState.edges) {
|
||||
logger.error(`[${requestId}] Invalid deployed state structure`, { deployedState })
|
||||
return createErrorResponse('Invalid deployed state structure', 500)
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Saving deployed state to normalized tables`, {
|
||||
blocksCount: Object.keys(deployedState.blocks).length,
|
||||
edgesCount: deployedState.edges.length,
|
||||
loopsCount: Object.keys(deployedState.loops || {}).length,
|
||||
parallelsCount: Object.keys(deployedState.parallels || {}).length,
|
||||
})
|
||||
|
||||
// Save deployed state to normalized tables
|
||||
const saveResult = await saveWorkflowToNormalizedTables(id, {
|
||||
blocks: deployedState.blocks,
|
||||
edges: deployedState.edges,
|
||||
loops: deployedState.loops || {},
|
||||
parallels: deployedState.parallels || {},
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: workflowData.isDeployed,
|
||||
deployedAt: workflowData.deployedAt,
|
||||
deploymentStatuses: deployedState.deploymentStatuses || {},
|
||||
hasActiveSchedule: deployedState.hasActiveSchedule || false,
|
||||
hasActiveWebhook: deployedState.hasActiveWebhook || false,
|
||||
})
|
||||
|
||||
if (!saveResult.success) {
|
||||
logger.error(`[${requestId}] Failed to save deployed state to normalized tables`, {
|
||||
error: saveResult.error,
|
||||
})
|
||||
return createErrorResponse(
|
||||
saveResult.error || 'Failed to save deployed state to normalized tables',
|
||||
500
|
||||
)
|
||||
}
|
||||
|
||||
// Update workflow's last_synced timestamp to indicate changes
|
||||
await db
|
||||
.update(workflow)
|
||||
.set({
|
||||
lastSynced: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(workflow.id, id))
|
||||
|
||||
// Notify socket server about the revert operation for real-time sync
|
||||
try {
|
||||
const socketServerUrl = process.env.SOCKET_SERVER_URL || 'http://localhost:3002'
|
||||
await fetch(`${socketServerUrl}/api/workflow-reverted`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
workflowId: id,
|
||||
timestamp: Date.now(),
|
||||
}),
|
||||
})
|
||||
logger.debug(`[${requestId}] Notified socket server about workflow revert: ${id}`)
|
||||
} catch (socketError) {
|
||||
// Don't fail the request if socket notification fails
|
||||
logger.warn(`[${requestId}] Failed to notify socket server about revert:`, socketError)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully reverted workflow to deployed state: ${id}`)
|
||||
|
||||
return createSuccessResponse({
|
||||
message: 'Workflow successfully reverted to deployed state',
|
||||
lastSaved: Date.now(),
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error reverting workflow to deployed state: ${id}`, {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
})
|
||||
return createErrorResponse(error.message || 'Failed to revert workflow to deployed state', 500)
|
||||
}
|
||||
}
|
||||
@@ -274,14 +274,6 @@ describe('Workflow By ID API Route', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
const mockTransaction = vi.fn().mockImplementation(async (callback) => {
|
||||
await callback({
|
||||
delete: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue(undefined),
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: vi.fn().mockReturnValue({
|
||||
@@ -291,7 +283,9 @@ describe('Workflow By ID API Route', () => {
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
transaction: mockTransaction,
|
||||
delete: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue(undefined),
|
||||
}),
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -326,14 +320,6 @@ describe('Workflow By ID API Route', () => {
|
||||
}),
|
||||
}))
|
||||
|
||||
const mockTransaction = vi.fn().mockImplementation(async (callback) => {
|
||||
await callback({
|
||||
delete: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue(undefined),
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: vi.fn().mockReturnValue({
|
||||
@@ -343,7 +329,9 @@ describe('Workflow By ID API Route', () => {
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
transaction: mockTransaction,
|
||||
delete: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockResolvedValue(undefined),
|
||||
}),
|
||||
},
|
||||
}))
|
||||
|
||||
|
||||
@@ -2,11 +2,13 @@ import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { verifyInternalToken } from '@/lib/auth/internal'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getUserEntityPermissions, hasAdminPermission } from '@/lib/permissions/utils'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { db } from '@/db'
|
||||
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@/db/schema'
|
||||
import { workflow } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('WorkflowByIdAPI')
|
||||
|
||||
@@ -28,14 +30,29 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
const { id: workflowId } = await params
|
||||
|
||||
try {
|
||||
// Get the session
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized access attempt for workflow ${workflowId}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
// Check for internal JWT token for server-side calls
|
||||
const authHeader = request.headers.get('authorization')
|
||||
let isInternalCall = false
|
||||
|
||||
if (authHeader?.startsWith('Bearer ')) {
|
||||
const token = authHeader.split(' ')[1]
|
||||
isInternalCall = await verifyInternalToken(token)
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
let userId: string | null = null
|
||||
|
||||
if (isInternalCall) {
|
||||
// For internal calls, we'll skip user-specific access checks
|
||||
logger.info(`[${requestId}] Internal API call for workflow ${workflowId}`)
|
||||
} else {
|
||||
// Get the session for regular user calls
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized access attempt for workflow ${workflowId}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
userId = session.user.id
|
||||
}
|
||||
|
||||
// Fetch the workflow
|
||||
const workflowData = await db
|
||||
@@ -52,26 +69,31 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
// Check if user has access to this workflow
|
||||
let hasAccess = false
|
||||
|
||||
// Case 1: User owns the workflow
|
||||
if (workflowData.userId === userId) {
|
||||
if (isInternalCall) {
|
||||
// Internal calls have full access
|
||||
hasAccess = true
|
||||
}
|
||||
|
||||
// Case 2: Workflow belongs to a workspace the user has permissions for
|
||||
if (!hasAccess && workflowData.workspaceId) {
|
||||
const userPermission = await getUserEntityPermissions(
|
||||
userId,
|
||||
'workspace',
|
||||
workflowData.workspaceId
|
||||
)
|
||||
if (userPermission !== null) {
|
||||
} else {
|
||||
// Case 1: User owns the workflow
|
||||
if (workflowData.userId === userId) {
|
||||
hasAccess = true
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] User ${userId} denied access to workflow ${workflowId}`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
// Case 2: Workflow belongs to a workspace the user has permissions for
|
||||
if (!hasAccess && workflowData.workspaceId && userId) {
|
||||
const userPermission = await getUserEntityPermissions(
|
||||
userId,
|
||||
'workspace',
|
||||
workflowData.workspaceId
|
||||
)
|
||||
if (userPermission !== null) {
|
||||
hasAccess = true
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] User ${userId} denied access to workflow ${workflowId}`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
}
|
||||
|
||||
// Try to load from normalized tables first
|
||||
@@ -185,16 +207,7 @@ export async function DELETE(
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Delete workflow and all related data in a transaction
|
||||
await db.transaction(async (tx) => {
|
||||
// Delete from normalized tables first (foreign key constraints)
|
||||
await tx.delete(workflowSubflows).where(eq(workflowSubflows.workflowId, workflowId))
|
||||
await tx.delete(workflowEdges).where(eq(workflowEdges.workflowId, workflowId))
|
||||
await tx.delete(workflowBlocks).where(eq(workflowBlocks.workflowId, workflowId))
|
||||
|
||||
// Delete the main workflow record
|
||||
await tx.delete(workflow).where(eq(workflow.id, workflowId))
|
||||
})
|
||||
await db.delete(workflow).where(eq(workflow.id, workflowId))
|
||||
|
||||
const elapsed = Date.now() - startTime
|
||||
logger.info(`[${requestId}] Successfully deleted workflow ${workflowId} in ${elapsed}ms`)
|
||||
@@ -203,7 +216,7 @@ export async function DELETE(
|
||||
// This prevents "Block not found" errors when collaborative updates try to process
|
||||
// after the workflow has been deleted
|
||||
try {
|
||||
const socketUrl = process.env.SOCKET_SERVER_URL || 'http://localhost:3002'
|
||||
const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002'
|
||||
const socketResponse = await fetch(`${socketUrl}/api/workflow-deleted`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
|
||||
@@ -2,13 +2,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import {
|
||||
workflow,
|
||||
workflowBlocks,
|
||||
workflowEdges,
|
||||
workflowSubflows,
|
||||
workspaceMember,
|
||||
} from '@/db/schema'
|
||||
import { workflow, workspaceMember } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('WorkspaceByIdAPI')
|
||||
|
||||
@@ -26,9 +20,9 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
const workspaceId = id
|
||||
|
||||
// Check if user has read access to this workspace
|
||||
// Check if user has any access to this workspace
|
||||
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||
if (userPermission !== 'read') {
|
||||
if (!userPermission) {
|
||||
return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 404 })
|
||||
}
|
||||
|
||||
@@ -126,20 +120,10 @@ export async function DELETE(
|
||||
|
||||
// Delete workspace and all related data in a transaction
|
||||
await db.transaction(async (tx) => {
|
||||
// Get all workflows in this workspace
|
||||
const workspaceWorkflows = await tx
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.workspaceId, workspaceId))
|
||||
|
||||
// Delete all workflow-related data for each workflow
|
||||
for (const wf of workspaceWorkflows) {
|
||||
await tx.delete(workflowSubflows).where(eq(workflowSubflows.workflowId, wf.id))
|
||||
await tx.delete(workflowEdges).where(eq(workflowEdges.workflowId, wf.id))
|
||||
await tx.delete(workflowBlocks).where(eq(workflowBlocks.workflowId, wf.id))
|
||||
}
|
||||
|
||||
// Delete all workflows in the workspace
|
||||
// Delete all workflows in the workspace - database cascade will handle all workflow-related data
|
||||
// The database cascade will handle deleting related workflow_blocks, workflow_edges, workflow_subflows,
|
||||
// workflow_logs, workflow_execution_snapshots, workflow_execution_logs, workflow_execution_trace_spans,
|
||||
// workflow_schedule, webhook, marketplace, chat, and memory records
|
||||
await tx.delete(workflow).where(eq(workflow.workspaceId, workspaceId))
|
||||
|
||||
// Delete workspace members
|
||||
|
||||
@@ -60,7 +60,7 @@ export async function GET(req: NextRequest) {
|
||||
|
||||
return NextResponse.json({ invitations })
|
||||
} catch (error) {
|
||||
console.error('Error fetching workspace invitations:', error)
|
||||
logger.error('Error fetching workspace invitations:', error)
|
||||
return NextResponse.json({ error: 'Failed to fetch invitations' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -204,7 +204,7 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
return NextResponse.json({ success: true, invitation: invitationData })
|
||||
} catch (error) {
|
||||
console.error('Error creating workspace invitation:', error)
|
||||
logger.error('Error creating workspace invitation:', error)
|
||||
return NextResponse.json({ error: 'Failed to create invitation' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -252,9 +252,9 @@ async function sendInvitationEmail({
|
||||
html: emailHtml,
|
||||
})
|
||||
|
||||
console.log(`Invitation email sent to ${to}`)
|
||||
logger.info(`Invitation email sent to ${to}`)
|
||||
} catch (error) {
|
||||
console.error('Error sending invitation email:', error)
|
||||
logger.error('Error sending invitation email:', error)
|
||||
// Continue even if email fails - the invitation is still created
|
||||
}
|
||||
}
|
||||
|
||||
@@ -104,7 +104,7 @@ async function createWorkspace(userId: string, name: string) {
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
// Create "Workflow 1" for the workspace with start block
|
||||
// Create initial workflow for the workspace with start block
|
||||
const starterId = crypto.randomUUID()
|
||||
const initialState = {
|
||||
blocks: {
|
||||
@@ -170,7 +170,7 @@ async function createWorkspace(userId: string, name: string) {
|
||||
userId,
|
||||
workspaceId,
|
||||
folderId: null,
|
||||
name: 'Workflow 1',
|
||||
name: 'default-agent',
|
||||
description: 'Your first workflow - start building here!',
|
||||
state: initialState,
|
||||
color: '#3972F6',
|
||||
|
||||
@@ -33,6 +33,7 @@ interface ChatConfig {
|
||||
headerText?: string
|
||||
}
|
||||
authType?: 'public' | 'password' | 'email'
|
||||
outputConfigs?: Array<{ blockId: string; path?: string }>
|
||||
}
|
||||
|
||||
interface AudioStreamingOptions {
|
||||
@@ -297,7 +298,7 @@ export default function ChatClient({ subdomain }: { subdomain: string }) {
|
||||
try {
|
||||
// Send structured payload to maintain chat context
|
||||
const payload = {
|
||||
message:
|
||||
input:
|
||||
typeof userMessage.content === 'string'
|
||||
? userMessage.content
|
||||
: JSON.stringify(userMessage.content),
|
||||
@@ -373,8 +374,16 @@ export default function ChatClient({ subdomain }: { subdomain: string }) {
|
||||
const json = JSON.parse(line.substring(6))
|
||||
const { blockId, chunk: contentChunk, event: eventType } = json
|
||||
|
||||
if (eventType === 'final') {
|
||||
if (eventType === 'final' && json.data) {
|
||||
setIsLoading(false)
|
||||
|
||||
// Process final execution result for field extraction
|
||||
const result = json.data
|
||||
const nonStreamingLogs =
|
||||
result.logs?.filter((log: any) => !messageIdMap.has(log.blockId)) || []
|
||||
|
||||
// Chat field extraction will be handled by the backend using deployment outputConfigs
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { AlertCircle, Loader2, X } from 'lucide-react'
|
||||
import { AlertCircle, ChevronDown, ChevronUp, Loader2, X } from 'lucide-react'
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
@@ -16,6 +16,7 @@ import { Button } from '@/components/ui/button'
|
||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import type { ChunkData, DocumentData } from '@/stores/knowledge/store'
|
||||
|
||||
@@ -28,6 +29,12 @@ interface EditChunkModalProps {
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
onChunkUpdate?: (updatedChunk: ChunkData) => void
|
||||
// New props for navigation
|
||||
allChunks?: ChunkData[]
|
||||
currentPage?: number
|
||||
totalPages?: number
|
||||
onNavigateToChunk?: (chunk: ChunkData) => void
|
||||
onNavigateToPage?: (page: number, selectChunk: 'first' | 'last') => Promise<void>
|
||||
}
|
||||
|
||||
export function EditChunkModal({
|
||||
@@ -37,11 +44,18 @@ export function EditChunkModal({
|
||||
isOpen,
|
||||
onClose,
|
||||
onChunkUpdate,
|
||||
allChunks = [],
|
||||
currentPage = 1,
|
||||
totalPages = 1,
|
||||
onNavigateToChunk,
|
||||
onNavigateToPage,
|
||||
}: EditChunkModalProps) {
|
||||
const [editedContent, setEditedContent] = useState(chunk?.content || '')
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [isNavigating, setIsNavigating] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
|
||||
// Check if there are unsaved changes
|
||||
const hasUnsavedChanges = editedContent !== (chunk?.content || '')
|
||||
@@ -53,6 +67,13 @@ export function EditChunkModal({
|
||||
}
|
||||
}, [chunk?.id, chunk?.content])
|
||||
|
||||
// Find current chunk index in the current page
|
||||
const currentChunkIndex = chunk ? allChunks.findIndex((c) => c.id === chunk.id) : -1
|
||||
|
||||
// Calculate navigation availability
|
||||
const canNavigatePrev = currentChunkIndex > 0 || currentPage > 1
|
||||
const canNavigateNext = currentChunkIndex < allChunks.length - 1 || currentPage < totalPages
|
||||
|
||||
const handleSaveContent = async () => {
|
||||
if (!chunk || !document) return
|
||||
|
||||
@@ -82,7 +103,6 @@ export function EditChunkModal({
|
||||
|
||||
if (result.success && onChunkUpdate) {
|
||||
onChunkUpdate(result.data)
|
||||
onClose()
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error updating chunk:', err)
|
||||
@@ -92,8 +112,51 @@ export function EditChunkModal({
|
||||
}
|
||||
}
|
||||
|
||||
const navigateToChunk = async (direction: 'prev' | 'next') => {
|
||||
if (!chunk || isNavigating) return
|
||||
|
||||
try {
|
||||
setIsNavigating(true)
|
||||
|
||||
if (direction === 'prev') {
|
||||
if (currentChunkIndex > 0) {
|
||||
// Navigate to previous chunk in current page
|
||||
const prevChunk = allChunks[currentChunkIndex - 1]
|
||||
onNavigateToChunk?.(prevChunk)
|
||||
} else if (currentPage > 1) {
|
||||
// Load previous page and navigate to last chunk
|
||||
await onNavigateToPage?.(currentPage - 1, 'last')
|
||||
}
|
||||
} else {
|
||||
if (currentChunkIndex < allChunks.length - 1) {
|
||||
// Navigate to next chunk in current page
|
||||
const nextChunk = allChunks[currentChunkIndex + 1]
|
||||
onNavigateToChunk?.(nextChunk)
|
||||
} else if (currentPage < totalPages) {
|
||||
// Load next page and navigate to first chunk
|
||||
await onNavigateToPage?.(currentPage + 1, 'first')
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Error navigating ${direction}:`, err)
|
||||
setError(`Failed to navigate to ${direction === 'prev' ? 'previous' : 'next'} chunk`)
|
||||
} finally {
|
||||
setIsNavigating(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleNavigate = (direction: 'prev' | 'next') => {
|
||||
if (hasUnsavedChanges) {
|
||||
setPendingNavigation(() => () => navigateToChunk(direction))
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
void navigateToChunk(direction)
|
||||
}
|
||||
}
|
||||
|
||||
const handleCloseAttempt = () => {
|
||||
if (hasUnsavedChanges && !isSaving) {
|
||||
setPendingNavigation(null)
|
||||
setShowUnsavedChangesAlert(true)
|
||||
} else {
|
||||
onClose()
|
||||
@@ -102,7 +165,12 @@ export function EditChunkModal({
|
||||
|
||||
const handleConfirmDiscard = () => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
onClose()
|
||||
if (pendingNavigation) {
|
||||
void pendingNavigation()
|
||||
setPendingNavigation(null)
|
||||
} else {
|
||||
onClose()
|
||||
}
|
||||
}
|
||||
|
||||
const isFormValid = editedContent.trim().length > 0 && editedContent.trim().length <= 10000
|
||||
@@ -118,7 +186,59 @@ export function EditChunkModal({
|
||||
>
|
||||
<DialogHeader className='flex-shrink-0 border-b px-6 py-4'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<DialogTitle className='font-medium text-lg'>Edit Chunk</DialogTitle>
|
||||
<div className='flex items-center gap-3'>
|
||||
<DialogTitle className='font-medium text-lg'>Edit Chunk</DialogTitle>
|
||||
|
||||
{/* Navigation Controls */}
|
||||
<div className='flex items-center gap-1'>
|
||||
<Tooltip>
|
||||
<TooltipTrigger
|
||||
asChild
|
||||
onFocus={(e) => e.preventDefault()}
|
||||
onBlur={(e) => e.preventDefault()}
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => handleNavigate('prev')}
|
||||
disabled={!canNavigatePrev || isNavigating || isSaving}
|
||||
className='h-8 w-8 p-0'
|
||||
>
|
||||
<ChevronUp className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom'>
|
||||
Previous chunk{' '}
|
||||
{currentPage > 1 && currentChunkIndex === 0 ? '(previous page)' : ''}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip>
|
||||
<TooltipTrigger
|
||||
asChild
|
||||
onFocus={(e) => e.preventDefault()}
|
||||
onBlur={(e) => e.preventDefault()}
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => handleNavigate('next')}
|
||||
disabled={!canNavigateNext || isNavigating || isSaving}
|
||||
className='h-8 w-8 p-0'
|
||||
>
|
||||
<ChevronDown className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='bottom'>
|
||||
Next chunk{' '}
|
||||
{currentPage < totalPages && currentChunkIndex === allChunks.length - 1
|
||||
? '(next page)'
|
||||
: ''}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='icon'
|
||||
@@ -142,7 +262,7 @@ export function EditChunkModal({
|
||||
{document?.filename || 'Unknown Document'}
|
||||
</p>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Editing chunk #{chunk.chunkIndex}
|
||||
Editing chunk #{chunk.chunkIndex} • Page {currentPage} of {totalPages}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -167,7 +287,7 @@ export function EditChunkModal({
|
||||
onChange={(e) => setEditedContent(e.target.value)}
|
||||
placeholder='Enter chunk content...'
|
||||
className='flex-1 resize-none'
|
||||
disabled={isSaving}
|
||||
disabled={isSaving || isNavigating}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
@@ -176,12 +296,16 @@ export function EditChunkModal({
|
||||
{/* Footer */}
|
||||
<div className='mt-auto border-t px-6 pt-4 pb-6'>
|
||||
<div className='flex justify-between'>
|
||||
<Button variant='outline' onClick={handleCloseAttempt} disabled={isSaving}>
|
||||
<Button
|
||||
variant='outline'
|
||||
onClick={handleCloseAttempt}
|
||||
disabled={isSaving || isNavigating}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleSaveContent}
|
||||
disabled={!isFormValid || isSaving || !hasUnsavedChanges}
|
||||
disabled={!isFormValid || isSaving || !hasUnsavedChanges || isNavigating}
|
||||
className='bg-[#701FFC] font-[480] text-primary-foreground shadow-[0_0_0_0_#701FFC] transition-all duration-200 hover:bg-[#6518E6] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
|
||||
>
|
||||
{isSaving ? (
|
||||
@@ -205,12 +329,19 @@ export function EditChunkModal({
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Unsaved Changes</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
You have unsaved changes to this chunk content. Are you sure you want to discard your
|
||||
changes and close the editor?
|
||||
You have unsaved changes to this chunk content.
|
||||
{pendingNavigation
|
||||
? ' Do you want to discard your changes and navigate to the next chunk?'
|
||||
: ' Are you sure you want to discard your changes and close the editor?'}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel onClick={() => setShowUnsavedChangesAlert(false)}>
|
||||
<AlertDialogCancel
|
||||
onClick={() => {
|
||||
setShowUnsavedChangesAlert(false)
|
||||
setPendingNavigation(null)
|
||||
}}
|
||||
>
|
||||
Keep Editing
|
||||
</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
|
||||
@@ -767,6 +767,30 @@ export function Document({
|
||||
updateChunk(updatedChunk.id, updatedChunk)
|
||||
setSelectedChunk(updatedChunk)
|
||||
}}
|
||||
allChunks={chunks}
|
||||
currentPage={currentPage}
|
||||
totalPages={totalPages}
|
||||
onNavigateToChunk={(chunk: ChunkData) => {
|
||||
setSelectedChunk(chunk)
|
||||
}}
|
||||
onNavigateToPage={async (page: number, selectChunk: 'first' | 'last') => {
|
||||
await goToPage(page)
|
||||
|
||||
const checkAndSelectChunk = () => {
|
||||
if (!isLoadingChunks && chunks.length > 0) {
|
||||
if (selectChunk === 'first') {
|
||||
setSelectedChunk(chunks[0])
|
||||
} else {
|
||||
setSelectedChunk(chunks[chunks.length - 1])
|
||||
}
|
||||
} else {
|
||||
// Retry after a short delay if chunks aren't loaded yet
|
||||
setTimeout(checkAndSelectChunk, 100)
|
||||
}
|
||||
}
|
||||
|
||||
setTimeout(checkAndSelectChunk, 0)
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Create Chunk Modal */}
|
||||
|
||||
@@ -36,16 +36,11 @@ import { useKnowledgeBase, useKnowledgeBaseDocuments } from '@/hooks/use-knowled
|
||||
import { type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
import { KnowledgeHeader } from '../components/knowledge-header/knowledge-header'
|
||||
import { useKnowledgeUpload } from '../hooks/use-knowledge-upload'
|
||||
import { KnowledgeBaseLoading } from './components/knowledge-base-loading/knowledge-base-loading'
|
||||
|
||||
const logger = createLogger('KnowledgeBase')
|
||||
|
||||
interface ProcessedDocumentResponse {
|
||||
documentId: string
|
||||
filename: string
|
||||
status: string
|
||||
}
|
||||
|
||||
interface KnowledgeBaseProps {
|
||||
id: string
|
||||
knowledgeBaseName?: string
|
||||
@@ -145,17 +140,32 @@ export function KnowledgeBase({
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const [isBulkOperating, setIsBulkOperating] = useState(false)
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
const [uploadError, setUploadError] = useState<{
|
||||
message: string
|
||||
timestamp: number
|
||||
} | null>(null)
|
||||
const [uploadProgress, setUploadProgress] = useState<{
|
||||
stage: 'idle' | 'uploading' | 'processing' | 'completing'
|
||||
filesCompleted: number
|
||||
totalFiles: number
|
||||
currentFile?: string
|
||||
}>({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
|
||||
const { isUploading, uploadProgress, uploadError, uploadFiles, clearError } = useKnowledgeUpload({
|
||||
onUploadComplete: async (uploadedFiles) => {
|
||||
const pendingDocuments: DocumentData[] = uploadedFiles.map((file, index) => ({
|
||||
id: `temp-${Date.now()}-${index}`,
|
||||
knowledgeBaseId: id,
|
||||
filename: file.filename,
|
||||
fileUrl: file.fileUrl,
|
||||
fileSize: file.fileSize,
|
||||
mimeType: file.mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}))
|
||||
|
||||
useKnowledgeStore.getState().addPendingDocuments(id, pendingDocuments)
|
||||
|
||||
await refreshDocuments()
|
||||
},
|
||||
})
|
||||
const router = useRouter()
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
@@ -240,11 +250,11 @@ export function KnowledgeBase({
|
||||
useEffect(() => {
|
||||
if (uploadError) {
|
||||
const timer = setTimeout(() => {
|
||||
setUploadError(null)
|
||||
clearError()
|
||||
}, 8000)
|
||||
return () => clearTimeout(timer)
|
||||
}
|
||||
}, [uploadError])
|
||||
}, [uploadError, clearError])
|
||||
|
||||
// Filter documents based on search query
|
||||
const filteredDocuments = documents.filter((doc) =>
|
||||
@@ -448,153 +458,18 @@ export function KnowledgeBase({
|
||||
const files = e.target.files
|
||||
if (!files || files.length === 0) return
|
||||
|
||||
interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
try {
|
||||
setIsUploading(true)
|
||||
setUploadError(null)
|
||||
setUploadProgress({ stage: 'uploading', filesCompleted: 0, totalFiles: files.length })
|
||||
|
||||
// Upload all files and start processing
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
const fileArray = Array.from(files)
|
||||
|
||||
for (const [index, file] of fileArray.entries()) {
|
||||
setUploadProgress((prev) => ({ ...prev, currentFile: file.name, filesCompleted: index }))
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json()
|
||||
throw new Error(`Failed to upload ${file.name}: ${errorData.error || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
|
||||
// Validate upload result structure
|
||||
if (!uploadResult.path) {
|
||||
throw new Error(`Invalid upload response for ${file.name}: missing file path`)
|
||||
}
|
||||
|
||||
uploadedFiles.push({
|
||||
filename: file.name,
|
||||
fileUrl: uploadResult.path.startsWith('http')
|
||||
? uploadResult.path
|
||||
: `${window.location.origin}${uploadResult.path}`,
|
||||
fileSize: file.size,
|
||||
mimeType: file.type,
|
||||
})
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({
|
||||
...prev,
|
||||
stage: 'processing',
|
||||
filesCompleted: fileArray.length,
|
||||
}))
|
||||
|
||||
// Start async document processing
|
||||
const processResponse = await fetch(`/api/knowledge/${id}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: knowledgeBase?.chunkingConfig?.maxSize || 1024,
|
||||
minCharactersPerChunk: knowledgeBase?.chunkingConfig?.minSize || 100,
|
||||
chunkOverlap: knowledgeBase?.chunkingConfig?.overlap || 200,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}),
|
||||
const chunkingConfig = knowledgeBase?.chunkingConfig
|
||||
await uploadFiles(Array.from(files), id, {
|
||||
chunkSize: chunkingConfig?.maxSize || 1024,
|
||||
minCharactersPerChunk: chunkingConfig?.minSize || 100,
|
||||
chunkOverlap: chunkingConfig?.overlap || 200,
|
||||
recipe: 'default',
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
const errorData = await processResponse.json()
|
||||
throw new Error(
|
||||
`Failed to start document processing: ${errorData.error || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Validate process result structure
|
||||
if (!processResult.success) {
|
||||
throw new Error(`Document processing failed: ${processResult.error || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
if (!processResult.data || !processResult.data.documentsCreated) {
|
||||
throw new Error('Invalid processing response: missing document data')
|
||||
}
|
||||
|
||||
// Create pending document objects and add them to the store immediately
|
||||
const pendingDocuments: DocumentData[] = processResult.data.documentsCreated.map(
|
||||
(doc: ProcessedDocumentResponse, index: number) => {
|
||||
if (!doc.documentId || !doc.filename) {
|
||||
logger.error(`Invalid document data received:`, doc)
|
||||
throw new Error(
|
||||
`Invalid document data for ${uploadedFiles[index]?.filename || 'unknown file'}`
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
id: doc.documentId,
|
||||
knowledgeBaseId: id,
|
||||
filename: doc.filename,
|
||||
fileUrl: uploadedFiles[index].fileUrl,
|
||||
fileSize: uploadedFiles[index].fileSize,
|
||||
mimeType: uploadedFiles[index].mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
// Add pending documents to store for immediate UI update
|
||||
useKnowledgeStore.getState().addPendingDocuments(id, pendingDocuments)
|
||||
|
||||
logger.info(`Successfully started processing ${uploadedFiles.length} documents`)
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'completing' }))
|
||||
|
||||
// Trigger a refresh to ensure documents are properly loaded
|
||||
await refreshDocuments()
|
||||
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
} catch (err) {
|
||||
logger.error('Error uploading documents:', err)
|
||||
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : 'Unknown error occurred during upload'
|
||||
setUploadError({
|
||||
message: errorMessage,
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
|
||||
// Show user-friendly error message in console for debugging
|
||||
console.error('Document upload failed:', errorMessage)
|
||||
} catch (error) {
|
||||
logger.error('Error uploading files:', error)
|
||||
// Error handling is managed by the upload hook
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
// Reset the file input
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
@@ -995,7 +870,7 @@ export function KnowledgeBase({
|
||||
</tr>
|
||||
))
|
||||
) : (
|
||||
filteredDocuments.map((doc, index) => {
|
||||
filteredDocuments.map((doc) => {
|
||||
const isSelected = selectedDocuments.has(doc.id)
|
||||
const statusDisplay = getStatusDisplay(doc)
|
||||
// const processingTime = getProcessingTime(doc)
|
||||
@@ -1254,7 +1129,7 @@ export function KnowledgeBase({
|
||||
</p>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => setUploadError(null)}
|
||||
onClick={() => clearError()}
|
||||
className='flex-shrink-0 rounded-sm opacity-70 hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring'
|
||||
>
|
||||
<X className='h-4 w-4' />
|
||||
|
||||
@@ -13,8 +13,8 @@ import { Label } from '@/components/ui/label'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components/icons/document-icons'
|
||||
import type { DocumentData, KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import { useKnowledgeStore } from '@/stores/knowledge/store'
|
||||
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||
import { useKnowledgeUpload } from '../../hooks/use-knowledge-upload'
|
||||
|
||||
const logger = createLogger('CreateModal')
|
||||
|
||||
@@ -29,12 +29,6 @@ const ACCEPTED_FILE_TYPES = [
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
]
|
||||
|
||||
interface ProcessedDocumentResponse {
|
||||
documentId: string
|
||||
filename: string
|
||||
status: string
|
||||
}
|
||||
|
||||
interface FileWithPreview extends File {
|
||||
preview: string
|
||||
}
|
||||
@@ -89,6 +83,12 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
const scrollContainerRef = useRef<HTMLDivElement>(null)
|
||||
const dropZoneRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
const { uploadFiles } = useKnowledgeUpload({
|
||||
onUploadComplete: (uploadedFiles) => {
|
||||
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
||||
},
|
||||
})
|
||||
|
||||
// Cleanup file preview URLs when component unmounts to prevent memory leaks
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
@@ -235,19 +235,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
return `${Number.parseFloat((bytes / k ** i).toFixed(1))} ${sizes[i]}`
|
||||
}
|
||||
|
||||
// Helper function to create uploadedFiles array from file uploads
|
||||
const createUploadedFile = (
|
||||
filename: string,
|
||||
fileUrl: string,
|
||||
fileSize: number,
|
||||
mimeType: string
|
||||
) => ({
|
||||
filename,
|
||||
fileUrl: fileUrl.startsWith('http') ? fileUrl : `${window.location.origin}${fileUrl}`,
|
||||
fileSize,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const onSubmit = async (data: FormValues) => {
|
||||
setIsSubmitting(true)
|
||||
setSubmitStatus(null)
|
||||
@@ -285,138 +272,14 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
|
||||
const newKnowledgeBase = result.data
|
||||
|
||||
// If files are uploaded, upload them and start processing
|
||||
if (files.length > 0) {
|
||||
// First, upload all files to get their URLs
|
||||
interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const presignedResponse = await fetch('/api/files/presigned', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
fileSize: file.size,
|
||||
}),
|
||||
})
|
||||
|
||||
const presignedData = await presignedResponse.json()
|
||||
|
||||
if (presignedResponse.ok && presignedData.directUploadSupported) {
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'Content-Type': file.type,
|
||||
}
|
||||
|
||||
// Add Azure-specific headers if provided
|
||||
if (presignedData.uploadHeaders) {
|
||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
||||
}
|
||||
|
||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
||||
method: 'PUT',
|
||||
headers: uploadHeaders, // Use the merged headers
|
||||
body: file,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
throw new Error(
|
||||
`Direct upload failed: ${uploadResponse.status} ${uploadResponse.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(file.name, presignedData.fileInfo.path, file.size, file.type)
|
||||
)
|
||||
} else {
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json()
|
||||
throw new Error(
|
||||
`Failed to upload ${file.name}: ${errorData.error || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(file.name, uploadResult.path, file.size, file.type)
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to upload ${file.name}: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Start async document processing
|
||||
const processResponse = await fetch(`/api/knowledge/${newKnowledgeBase.id}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: data.maxChunkSize,
|
||||
minCharactersPerChunk: data.minChunkSize,
|
||||
chunkOverlap: data.overlapSize,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}),
|
||||
const uploadedFiles = await uploadFiles(files, newKnowledgeBase.id, {
|
||||
chunkSize: data.maxChunkSize,
|
||||
minCharactersPerChunk: data.minChunkSize,
|
||||
chunkOverlap: data.overlapSize,
|
||||
recipe: 'default',
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
throw new Error('Failed to start document processing')
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Create pending document objects and add them to the store immediately
|
||||
if (processResult.success && processResult.data.documentsCreated) {
|
||||
const pendingDocuments: DocumentData[] = processResult.data.documentsCreated.map(
|
||||
(doc: ProcessedDocumentResponse, index: number) => ({
|
||||
id: doc.documentId,
|
||||
knowledgeBaseId: newKnowledgeBase.id,
|
||||
filename: doc.filename,
|
||||
fileUrl: uploadedFiles[index].fileUrl,
|
||||
fileSize: uploadedFiles[index].fileSize,
|
||||
mimeType: uploadedFiles[index].mimeType,
|
||||
chunkCount: 0,
|
||||
tokenCount: 0,
|
||||
characterCount: 0,
|
||||
processingStatus: 'pending' as const,
|
||||
processingStartedAt: null,
|
||||
processingCompletedAt: null,
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
})
|
||||
)
|
||||
|
||||
// Add pending documents to store for immediate UI update
|
||||
useKnowledgeStore.getState().addPendingDocuments(newKnowledgeBase.id, pendingDocuments)
|
||||
}
|
||||
|
||||
// Update the knowledge base object with the correct document count
|
||||
newKnowledgeBase.docCount = uploadedFiles.length
|
||||
|
||||
|
||||
@@ -0,0 +1,352 @@
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('KnowledgeUpload')
|
||||
|
||||
export interface UploadedFile {
|
||||
filename: string
|
||||
fileUrl: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
}
|
||||
|
||||
export interface UploadProgress {
|
||||
stage: 'idle' | 'uploading' | 'processing' | 'completing'
|
||||
filesCompleted: number
|
||||
totalFiles: number
|
||||
currentFile?: string
|
||||
}
|
||||
|
||||
export interface UploadError {
|
||||
message: string
|
||||
timestamp: number
|
||||
code?: string
|
||||
details?: any
|
||||
}
|
||||
|
||||
export interface ProcessingOptions {
|
||||
chunkSize?: number
|
||||
minCharactersPerChunk?: number
|
||||
chunkOverlap?: number
|
||||
recipe?: string
|
||||
}
|
||||
|
||||
export interface UseKnowledgeUploadOptions {
|
||||
onUploadComplete?: (uploadedFiles: UploadedFile[]) => void
|
||||
onError?: (error: UploadError) => void
|
||||
}
|
||||
|
||||
class KnowledgeUploadError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public details?: any
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'KnowledgeUploadError'
|
||||
}
|
||||
}
|
||||
|
||||
class PresignedUrlError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'PRESIGNED_URL_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
class DirectUploadError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'DIRECT_UPLOAD_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
class ProcessingError extends KnowledgeUploadError {
|
||||
constructor(message: string, details?: any) {
|
||||
super(message, 'PROCESSING_ERROR', details)
|
||||
}
|
||||
}
|
||||
|
||||
export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
const [isUploading, setIsUploading] = useState(false)
|
||||
const [uploadProgress, setUploadProgress] = useState<UploadProgress>({
|
||||
stage: 'idle',
|
||||
filesCompleted: 0,
|
||||
totalFiles: 0,
|
||||
})
|
||||
const [uploadError, setUploadError] = useState<UploadError | null>(null)
|
||||
|
||||
const createUploadedFile = (
|
||||
filename: string,
|
||||
fileUrl: string,
|
||||
fileSize: number,
|
||||
mimeType: string
|
||||
): UploadedFile => ({
|
||||
filename,
|
||||
fileUrl,
|
||||
fileSize,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const createErrorFromException = (error: unknown, defaultMessage: string): UploadError => {
|
||||
if (error instanceof KnowledgeUploadError) {
|
||||
return {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
details: error.details,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
return {
|
||||
message: error.message,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
message: defaultMessage,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
const uploadFiles = async (
|
||||
files: File[],
|
||||
knowledgeBaseId: string,
|
||||
processingOptions: ProcessingOptions = {}
|
||||
): Promise<UploadedFile[]> => {
|
||||
if (files.length === 0) {
|
||||
throw new KnowledgeUploadError('No files provided for upload', 'NO_FILES')
|
||||
}
|
||||
|
||||
if (!knowledgeBaseId?.trim()) {
|
||||
throw new KnowledgeUploadError('Knowledge base ID is required', 'INVALID_KB_ID')
|
||||
}
|
||||
|
||||
try {
|
||||
setIsUploading(true)
|
||||
setUploadError(null)
|
||||
setUploadProgress({ stage: 'uploading', filesCompleted: 0, totalFiles: files.length })
|
||||
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
|
||||
// Upload all files using presigned URLs
|
||||
for (const [index, file] of files.entries()) {
|
||||
setUploadProgress((prev) => ({
|
||||
...prev,
|
||||
currentFile: file.name,
|
||||
filesCompleted: index,
|
||||
}))
|
||||
|
||||
try {
|
||||
// Get presigned URL
|
||||
const presignedResponse = await fetch('/api/files/presigned?type=knowledge-base', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
fileSize: file.size,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!presignedResponse.ok) {
|
||||
let errorDetails: any = null
|
||||
try {
|
||||
errorDetails = await presignedResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
throw new PresignedUrlError(
|
||||
`Failed to get presigned URL for ${file.name}: ${presignedResponse.status} ${presignedResponse.statusText}`,
|
||||
errorDetails
|
||||
)
|
||||
}
|
||||
|
||||
const presignedData = await presignedResponse.json()
|
||||
|
||||
if (presignedData.directUploadSupported) {
|
||||
// Use presigned URL for direct upload
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'Content-Type': file.type,
|
||||
}
|
||||
|
||||
// Add Azure-specific headers if provided
|
||||
if (presignedData.uploadHeaders) {
|
||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
||||
}
|
||||
|
||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
||||
method: 'PUT',
|
||||
headers: uploadHeaders,
|
||||
body: file,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
throw new DirectUploadError(
|
||||
`Direct upload failed for ${file.name}: ${uploadResponse.status} ${uploadResponse.statusText}`,
|
||||
{ uploadResponse: uploadResponse.statusText }
|
||||
)
|
||||
}
|
||||
|
||||
// Convert relative path to full URL for schema validation
|
||||
const fullFileUrl = presignedData.fileInfo.path.startsWith('http')
|
||||
? presignedData.fileInfo.path
|
||||
: `${window.location.origin}${presignedData.fileInfo.path}`
|
||||
|
||||
uploadedFiles.push(createUploadedFile(file.name, fullFileUrl, file.size, file.type))
|
||||
} else {
|
||||
// Fallback to traditional upload through API route
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
const uploadResponse = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
let errorData: any = null
|
||||
try {
|
||||
errorData = await uploadResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
throw new DirectUploadError(
|
||||
`Failed to upload ${file.name}: ${errorData?.error || 'Unknown error'}`,
|
||||
errorData
|
||||
)
|
||||
}
|
||||
|
||||
const uploadResult = await uploadResponse.json()
|
||||
|
||||
// Validate upload result structure
|
||||
if (!uploadResult.path) {
|
||||
throw new DirectUploadError(
|
||||
`Invalid upload response for ${file.name}: missing file path`,
|
||||
uploadResult
|
||||
)
|
||||
}
|
||||
|
||||
uploadedFiles.push(
|
||||
createUploadedFile(
|
||||
file.name,
|
||||
uploadResult.path.startsWith('http')
|
||||
? uploadResult.path
|
||||
: `${window.location.origin}${uploadResult.path}`,
|
||||
file.size,
|
||||
file.type
|
||||
)
|
||||
)
|
||||
}
|
||||
} catch (fileError) {
|
||||
logger.error(`Error uploading file ${file.name}:`, fileError)
|
||||
throw fileError // Re-throw to be caught by outer try-catch
|
||||
}
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'processing' }))
|
||||
|
||||
// Start async document processing
|
||||
const processPayload = {
|
||||
documents: uploadedFiles,
|
||||
processingOptions: {
|
||||
chunkSize: processingOptions.chunkSize || 1024,
|
||||
minCharactersPerChunk: processingOptions.minCharactersPerChunk || 100,
|
||||
chunkOverlap: processingOptions.chunkOverlap || 200,
|
||||
recipe: processingOptions.recipe || 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
bulk: true,
|
||||
}
|
||||
|
||||
const processResponse = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(processPayload),
|
||||
})
|
||||
|
||||
if (!processResponse.ok) {
|
||||
let errorData: any = null
|
||||
try {
|
||||
errorData = await processResponse.json()
|
||||
} catch {
|
||||
// Ignore JSON parsing errors
|
||||
}
|
||||
|
||||
logger.error('Document processing failed:', {
|
||||
status: processResponse.status,
|
||||
error: errorData,
|
||||
uploadedFiles: uploadedFiles.map((f) => ({
|
||||
filename: f.filename,
|
||||
fileUrl: f.fileUrl,
|
||||
fileSize: f.fileSize,
|
||||
mimeType: f.mimeType,
|
||||
})),
|
||||
})
|
||||
|
||||
throw new ProcessingError(
|
||||
`Failed to start document processing: ${errorData?.error || errorData?.message || 'Unknown error'}`,
|
||||
errorData
|
||||
)
|
||||
}
|
||||
|
||||
const processResult = await processResponse.json()
|
||||
|
||||
// Validate process result structure
|
||||
if (!processResult.success) {
|
||||
throw new ProcessingError(
|
||||
`Document processing failed: ${processResult.error || 'Unknown error'}`,
|
||||
processResult
|
||||
)
|
||||
}
|
||||
|
||||
if (!processResult.data || !processResult.data.documentsCreated) {
|
||||
throw new ProcessingError(
|
||||
'Invalid processing response: missing document data',
|
||||
processResult
|
||||
)
|
||||
}
|
||||
|
||||
setUploadProgress((prev) => ({ ...prev, stage: 'completing' }))
|
||||
|
||||
logger.info(`Successfully started processing ${uploadedFiles.length} documents`)
|
||||
|
||||
// Call success callback
|
||||
options.onUploadComplete?.(uploadedFiles)
|
||||
|
||||
return uploadedFiles
|
||||
} catch (err) {
|
||||
logger.error('Error uploading documents:', err)
|
||||
|
||||
const error = createErrorFromException(err, 'Unknown error occurred during upload')
|
||||
setUploadError(error)
|
||||
options.onError?.(error)
|
||||
|
||||
// Show user-friendly error message in console for debugging
|
||||
console.error('Document upload failed:', error.message)
|
||||
|
||||
throw err
|
||||
} finally {
|
||||
setIsUploading(false)
|
||||
setUploadProgress({ stage: 'idle', filesCompleted: 0, totalFiles: 0 })
|
||||
}
|
||||
}
|
||||
|
||||
const clearError = () => {
|
||||
setUploadError(null)
|
||||
}
|
||||
|
||||
return {
|
||||
isUploading,
|
||||
uploadProgress,
|
||||
uploadError,
|
||||
uploadFiles,
|
||||
clearError,
|
||||
}
|
||||
}
|
||||
@@ -36,7 +36,7 @@ export function ControlBar() {
|
||||
const fetchLogs = async () => {
|
||||
try {
|
||||
const queryParams = buildQueryParams(1, 50) // Get first 50 logs for refresh
|
||||
const response = await fetch(`/api/logs?${queryParams}`)
|
||||
const response = await fetch(`/api/logs/enhanced?${queryParams}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Error fetching logs: ${response.statusText}`)
|
||||
|
||||
@@ -0,0 +1,99 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { Eye, Maximize2, Minimize2, X } from 'lucide-react'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { FrozenCanvas } from './frozen-canvas'
|
||||
|
||||
interface FrozenCanvasModalProps {
|
||||
executionId: string
|
||||
workflowName?: string
|
||||
trigger?: string
|
||||
traceSpans?: any[] // TraceSpans data from log metadata
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
}
|
||||
|
||||
export function FrozenCanvasModal({
|
||||
executionId,
|
||||
workflowName,
|
||||
trigger,
|
||||
traceSpans,
|
||||
isOpen,
|
||||
onClose,
|
||||
}: FrozenCanvasModalProps) {
|
||||
const [isFullscreen, setIsFullscreen] = useState(false)
|
||||
|
||||
const toggleFullscreen = () => {
|
||||
setIsFullscreen(!isFullscreen)
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||
<DialogContent
|
||||
className={cn(
|
||||
'flex flex-col gap-0 p-0',
|
||||
isFullscreen
|
||||
? 'h-[100vh] max-h-[100vh] w-[100vw] max-w-[100vw] rounded-none'
|
||||
: 'h-[90vh] max-h-[90vh] overflow-hidden sm:max-w-[1100px]'
|
||||
)}
|
||||
hideCloseButton={true}
|
||||
>
|
||||
{/* Header */}
|
||||
<DialogHeader className='flex flex-row items-center justify-between border-b bg-background p-4'>
|
||||
<div className='flex items-center gap-3'>
|
||||
<Eye className='h-5 w-5 text-blue-500 dark:text-blue-400' />
|
||||
<div>
|
||||
<DialogTitle className='font-semibold text-foreground text-lg'>
|
||||
Logged Workflow State
|
||||
</DialogTitle>
|
||||
<div className='mt-1 flex items-center gap-2'>
|
||||
{workflowName && (
|
||||
<span className='text-muted-foreground text-sm'>{workflowName}</span>
|
||||
)}
|
||||
{trigger && (
|
||||
<Badge variant='secondary' className='text-xs'>
|
||||
{trigger}
|
||||
</Badge>
|
||||
)}
|
||||
<span className='font-mono text-muted-foreground text-xs'>
|
||||
{executionId.slice(0, 8)}...
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex items-center gap-2'>
|
||||
<Button variant='ghost' size='sm' onClick={toggleFullscreen} className='h-8 w-8 p-0'>
|
||||
{isFullscreen ? <Minimize2 className='h-4 w-4' /> : <Maximize2 className='h-4 w-4' />}
|
||||
</Button>
|
||||
<Button variant='ghost' size='sm' onClick={onClose} className='h-8 w-8 p-0'>
|
||||
<X className='h-4 w-4' />
|
||||
</Button>
|
||||
</div>
|
||||
</DialogHeader>
|
||||
|
||||
{/* Canvas Container */}
|
||||
<div className='min-h-0 flex-1'>
|
||||
<FrozenCanvas
|
||||
executionId={executionId}
|
||||
traceSpans={traceSpans}
|
||||
height='100%'
|
||||
width='100%'
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Footer with instructions */}
|
||||
<div className='border-t bg-background px-6 py-3'>
|
||||
<div className='text-muted-foreground text-sm'>
|
||||
💡 Click on blocks to see their input and output data at execution time. This canvas
|
||||
shows the exact state of the workflow when this execution was captured.
|
||||
</div>
|
||||
</div>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,467 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import {
|
||||
AlertCircle,
|
||||
ChevronLeft,
|
||||
ChevronRight,
|
||||
Clock,
|
||||
DollarSign,
|
||||
Hash,
|
||||
Loader2,
|
||||
X,
|
||||
Zap,
|
||||
} from 'lucide-react'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { cn, redactApiKeys } from '@/lib/utils'
|
||||
import { WorkflowPreview } from '@/app/workspace/[workspaceId]/w/components/workflow-preview/workflow-preview'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('FrozenCanvas')
|
||||
|
||||
function formatExecutionData(executionData: any) {
|
||||
const {
|
||||
inputData,
|
||||
outputData,
|
||||
cost,
|
||||
tokens,
|
||||
durationMs,
|
||||
status,
|
||||
blockName,
|
||||
blockType,
|
||||
errorMessage,
|
||||
errorStackTrace,
|
||||
} = executionData
|
||||
|
||||
return {
|
||||
blockName: blockName || 'Unknown Block',
|
||||
blockType: blockType || 'unknown',
|
||||
status,
|
||||
duration: durationMs ? `${durationMs}ms` : 'N/A',
|
||||
input: redactApiKeys(inputData || {}),
|
||||
output: redactApiKeys(outputData || {}),
|
||||
errorMessage,
|
||||
errorStackTrace,
|
||||
cost: cost
|
||||
? {
|
||||
input: cost.input || 0,
|
||||
output: cost.output || 0,
|
||||
total: cost.total || 0,
|
||||
}
|
||||
: null,
|
||||
tokens: tokens
|
||||
? {
|
||||
prompt: tokens.prompt || 0,
|
||||
completion: tokens.completion || 0,
|
||||
total: tokens.total || 0,
|
||||
}
|
||||
: null,
|
||||
}
|
||||
}
|
||||
|
||||
function getCurrentIterationData(blockExecutionData: any) {
|
||||
if (blockExecutionData.iterations && Array.isArray(blockExecutionData.iterations)) {
|
||||
const currentIndex = blockExecutionData.currentIteration ?? 0
|
||||
return {
|
||||
executionData: blockExecutionData.iterations[currentIndex],
|
||||
currentIteration: currentIndex,
|
||||
totalIterations: blockExecutionData.totalIterations ?? blockExecutionData.iterations.length,
|
||||
hasMultipleIterations: blockExecutionData.iterations.length > 1,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
executionData: blockExecutionData,
|
||||
currentIteration: 0,
|
||||
totalIterations: 1,
|
||||
hasMultipleIterations: false,
|
||||
}
|
||||
}
|
||||
|
||||
function PinnedLogs({ executionData, onClose }: { executionData: any; onClose: () => void }) {
|
||||
const [currentIterationIndex, setCurrentIterationIndex] = useState(0)
|
||||
|
||||
const iterationInfo = getCurrentIterationData({
|
||||
...executionData,
|
||||
currentIteration: currentIterationIndex,
|
||||
})
|
||||
|
||||
const formatted = formatExecutionData(iterationInfo.executionData)
|
||||
|
||||
const totalIterations = executionData.iterations?.length || 1
|
||||
|
||||
const goToPreviousIteration = () => {
|
||||
if (currentIterationIndex > 0) {
|
||||
setCurrentIterationIndex(currentIterationIndex - 1)
|
||||
}
|
||||
}
|
||||
|
||||
const goToNextIteration = () => {
|
||||
if (currentIterationIndex < totalIterations - 1) {
|
||||
setCurrentIterationIndex(currentIterationIndex + 1)
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentIterationIndex(0)
|
||||
}, [executionData])
|
||||
|
||||
return (
|
||||
<Card className='fixed top-4 right-4 z-[100] max-h-[calc(100vh-8rem)] w-96 overflow-y-auto border-border bg-background shadow-lg'>
|
||||
<CardHeader className='pb-3'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<CardTitle className='flex items-center gap-2 text-foreground text-lg'>
|
||||
<Zap className='h-5 w-5' />
|
||||
{formatted.blockName}
|
||||
</CardTitle>
|
||||
<button onClick={onClose} className='rounded-sm p-1 text-foreground hover:bg-muted'>
|
||||
<X className='h-4 w-4' />
|
||||
</button>
|
||||
</div>
|
||||
<div className='flex items-center justify-between'>
|
||||
<div className='flex items-center gap-2'>
|
||||
<Badge variant={formatted.status === 'success' ? 'default' : 'destructive'}>
|
||||
{formatted.blockType}
|
||||
</Badge>
|
||||
<Badge variant='outline'>{formatted.status}</Badge>
|
||||
</div>
|
||||
|
||||
{/* Iteration Navigation */}
|
||||
{iterationInfo.hasMultipleIterations && (
|
||||
<div className='flex items-center gap-1'>
|
||||
<button
|
||||
onClick={goToPreviousIteration}
|
||||
disabled={currentIterationIndex === 0}
|
||||
className='rounded p-1 text-muted-foreground hover:bg-muted hover:text-foreground disabled:cursor-not-allowed disabled:opacity-50'
|
||||
>
|
||||
<ChevronLeft className='h-4 w-4' />
|
||||
</button>
|
||||
<span className='px-2 text-muted-foreground text-xs'>
|
||||
{currentIterationIndex + 1} / {iterationInfo.totalIterations}
|
||||
</span>
|
||||
<button
|
||||
onClick={goToNextIteration}
|
||||
disabled={currentIterationIndex === totalIterations - 1}
|
||||
className='rounded p-1 text-muted-foreground hover:bg-muted hover:text-foreground disabled:cursor-not-allowed disabled:opacity-50'
|
||||
>
|
||||
<ChevronRight className='h-4 w-4' />
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</CardHeader>
|
||||
|
||||
<CardContent className='space-y-4'>
|
||||
<div className='grid grid-cols-2 gap-4'>
|
||||
<div className='flex items-center gap-2'>
|
||||
<Clock className='h-4 w-4 text-muted-foreground' />
|
||||
<span className='text-foreground text-sm'>{formatted.duration}</span>
|
||||
</div>
|
||||
|
||||
{formatted.cost && (
|
||||
<div className='flex items-center gap-2'>
|
||||
<DollarSign className='h-4 w-4 text-muted-foreground' />
|
||||
<span className='text-foreground text-sm'>${formatted.cost.total.toFixed(5)}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{formatted.tokens && (
|
||||
<div className='flex items-center gap-2'>
|
||||
<Hash className='h-4 w-4 text-muted-foreground' />
|
||||
<span className='text-foreground text-sm'>{formatted.tokens.total} tokens</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h4 className='mb-2 font-medium text-foreground text-sm'>Input</h4>
|
||||
<div className='max-h-32 overflow-y-auto rounded bg-muted p-3 font-mono text-xs'>
|
||||
<pre className='text-foreground'>{JSON.stringify(formatted.input, null, 2)}</pre>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h4 className='mb-2 font-medium text-foreground text-sm'>Output</h4>
|
||||
<div className='max-h-32 overflow-y-auto rounded bg-muted p-3 font-mono text-xs'>
|
||||
<pre className='text-foreground'>{JSON.stringify(formatted.output, null, 2)}</pre>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{formatted.cost && (
|
||||
<div>
|
||||
<h4 className='mb-2 font-medium text-foreground text-sm'>Cost Breakdown</h4>
|
||||
<div className='space-y-1 text-sm'>
|
||||
<div className='flex justify-between text-foreground'>
|
||||
<span>Input:</span>
|
||||
<span>${formatted.cost.input.toFixed(5)}</span>
|
||||
</div>
|
||||
<div className='flex justify-between text-foreground'>
|
||||
<span>Output:</span>
|
||||
<span>${formatted.cost.output.toFixed(5)}</span>
|
||||
</div>
|
||||
<div className='flex justify-between border-border border-t pt-1 font-medium text-foreground'>
|
||||
<span>Total:</span>
|
||||
<span>${formatted.cost.total.toFixed(5)}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{formatted.tokens && (
|
||||
<div>
|
||||
<h4 className='mb-2 font-medium text-foreground text-sm'>Token Usage</h4>
|
||||
<div className='space-y-1 text-sm'>
|
||||
<div className='flex justify-between text-foreground'>
|
||||
<span>Prompt:</span>
|
||||
<span>{formatted.tokens.prompt}</span>
|
||||
</div>
|
||||
<div className='flex justify-between text-foreground'>
|
||||
<span>Completion:</span>
|
||||
<span>{formatted.tokens.completion}</span>
|
||||
</div>
|
||||
<div className='flex justify-between border-border border-t pt-1 font-medium text-foreground'>
|
||||
<span>Total:</span>
|
||||
<span>{formatted.tokens.total}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
)
|
||||
}
|
||||
|
||||
interface FrozenCanvasData {
|
||||
executionId: string
|
||||
workflowId: string
|
||||
workflowState: WorkflowState
|
||||
executionMetadata: {
|
||||
trigger: string
|
||||
startedAt: string
|
||||
endedAt?: string
|
||||
totalDurationMs?: number
|
||||
blockStats: {
|
||||
total: number
|
||||
success: number
|
||||
error: number
|
||||
skipped: number
|
||||
}
|
||||
cost: {
|
||||
total: number | null
|
||||
input: number | null
|
||||
output: number | null
|
||||
}
|
||||
totalTokens: number | null
|
||||
}
|
||||
}
|
||||
|
||||
interface FrozenCanvasProps {
|
||||
executionId: string
|
||||
traceSpans?: any[]
|
||||
className?: string
|
||||
height?: string | number
|
||||
width?: string | number
|
||||
}
|
||||
|
||||
export function FrozenCanvas({
|
||||
executionId,
|
||||
traceSpans,
|
||||
className,
|
||||
height = '100%',
|
||||
width = '100%',
|
||||
}: FrozenCanvasProps) {
|
||||
const [data, setData] = useState<FrozenCanvasData | null>(null)
|
||||
const [blockExecutions, setBlockExecutions] = useState<Record<string, any>>({})
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const [pinnedBlockId, setPinnedBlockId] = useState<string | null>(null)
|
||||
|
||||
// Process traceSpans to create blockExecutions map
|
||||
useEffect(() => {
|
||||
if (traceSpans && Array.isArray(traceSpans)) {
|
||||
const blockExecutionMap: Record<string, any> = {}
|
||||
|
||||
const workflowSpan = traceSpans[0]
|
||||
if (workflowSpan?.children && Array.isArray(workflowSpan.children)) {
|
||||
const traceSpansByBlockId = workflowSpan.children.reduce((acc: any, span: any) => {
|
||||
if (span.blockId) {
|
||||
if (!acc[span.blockId]) {
|
||||
acc[span.blockId] = []
|
||||
}
|
||||
acc[span.blockId].push(span)
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
for (const [blockId, spans] of Object.entries(traceSpansByBlockId)) {
|
||||
const spanArray = spans as any[]
|
||||
|
||||
const iterations = spanArray.map((span: any) => {
|
||||
// Extract error information from span output if status is error
|
||||
let errorMessage = null
|
||||
let errorStackTrace = null
|
||||
|
||||
if (span.status === 'error' && span.output) {
|
||||
// Error information can be in different formats in the output
|
||||
if (typeof span.output === 'string') {
|
||||
errorMessage = span.output
|
||||
} else if (span.output.error) {
|
||||
errorMessage = span.output.error
|
||||
errorStackTrace = span.output.stackTrace || span.output.stack
|
||||
} else if (span.output.message) {
|
||||
errorMessage = span.output.message
|
||||
errorStackTrace = span.output.stackTrace || span.output.stack
|
||||
} else {
|
||||
// Fallback: stringify the entire output for error cases
|
||||
errorMessage = JSON.stringify(span.output)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: span.id,
|
||||
blockId: span.blockId,
|
||||
blockName: span.name,
|
||||
blockType: span.type,
|
||||
status: span.status,
|
||||
startedAt: span.startTime,
|
||||
endedAt: span.endTime,
|
||||
durationMs: span.duration,
|
||||
inputData: span.input,
|
||||
outputData: span.output,
|
||||
errorMessage,
|
||||
errorStackTrace,
|
||||
cost: span.cost || {
|
||||
input: null,
|
||||
output: null,
|
||||
total: null,
|
||||
},
|
||||
tokens: span.tokens || {
|
||||
prompt: null,
|
||||
completion: null,
|
||||
total: null,
|
||||
},
|
||||
modelUsed: span.model || null,
|
||||
metadata: {},
|
||||
}
|
||||
})
|
||||
|
||||
blockExecutionMap[blockId] = {
|
||||
iterations,
|
||||
currentIteration: 0,
|
||||
totalIterations: iterations.length,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setBlockExecutions(blockExecutionMap)
|
||||
}
|
||||
}, [traceSpans])
|
||||
|
||||
useEffect(() => {
|
||||
const fetchData = async () => {
|
||||
try {
|
||||
setLoading(true)
|
||||
setError(null)
|
||||
|
||||
const response = await fetch(`/api/logs/${executionId}/frozen-canvas`)
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch frozen canvas data: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
setData(result)
|
||||
logger.debug(`Loaded frozen canvas data for execution: ${executionId}`)
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'Unknown error'
|
||||
logger.error('Failed to fetch frozen canvas data:', err)
|
||||
setError(errorMessage)
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
fetchData()
|
||||
}, [executionId])
|
||||
|
||||
// No need to create a temporary workflow - just use the workflowState directly
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className={cn('flex items-center justify-center', className)} style={{ height, width }}>
|
||||
<div className='flex items-center gap-2 text-muted-foreground'>
|
||||
<Loader2 className='h-5 w-5 animate-spin' />
|
||||
<span>Loading frozen canvas...</span>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div className={cn('flex items-center justify-center', className)} style={{ height, width }}>
|
||||
<div className='flex items-center gap-2 text-destructive'>
|
||||
<AlertCircle className='h-5 w-5' />
|
||||
<span>Failed to load frozen canvas: {error}</span>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
return (
|
||||
<div className={cn('flex items-center justify-center', className)} style={{ height, width }}>
|
||||
<div className='text-muted-foreground'>No data available</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Check if this is a migrated log without real workflow state
|
||||
const isMigratedLog = (data.workflowState as any)?._migrated === true
|
||||
if (isMigratedLog) {
|
||||
return (
|
||||
<div
|
||||
className={cn('flex flex-col items-center justify-center gap-4 p-8', className)}
|
||||
style={{ height, width }}
|
||||
>
|
||||
<div className='flex items-center gap-3 text-amber-600 dark:text-amber-400'>
|
||||
<AlertCircle className='h-6 w-6' />
|
||||
<span className='font-medium text-lg'>Logged State Not Found</span>
|
||||
</div>
|
||||
<div className='max-w-md text-center text-muted-foreground text-sm'>
|
||||
This log was migrated from the old logging system. The workflow state at execution time is
|
||||
not available.
|
||||
</div>
|
||||
<div className='text-muted-foreground text-xs'>
|
||||
Note: {(data.workflowState as any)?._note}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<div style={{ height, width }} className={cn('frozen-canvas-mode h-full w-full', className)}>
|
||||
<WorkflowPreview
|
||||
workflowState={data.workflowState}
|
||||
showSubBlocks={true}
|
||||
isPannable={true}
|
||||
onNodeClick={(blockId) => {
|
||||
if (blockExecutions[blockId]) {
|
||||
setPinnedBlockId(blockId)
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{pinnedBlockId && blockExecutions[pinnedBlockId] && (
|
||||
<PinnedLogs
|
||||
executionData={blockExecutions[pinnedBlockId]}
|
||||
onClose={() => setPinnedBlockId(null)}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
export { FrozenCanvas } from './frozen-canvas'
|
||||
export { FrozenCanvasModal } from './frozen-canvas-modal'
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { ChevronDown, ChevronUp, X } from 'lucide-react'
|
||||
import { ChevronDown, ChevronUp, Eye, X } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { CopyButton } from '@/components/ui/copy-button'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
@@ -10,6 +10,7 @@ import { redactApiKeys } from '@/lib/utils'
|
||||
import type { WorkflowLog } from '@/app/workspace/[workspaceId]/logs/stores/types'
|
||||
import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils/format-date'
|
||||
import { formatCost } from '@/providers/utils'
|
||||
import { FrozenCanvasModal } from '../frozen-canvas/frozen-canvas-modal'
|
||||
import { ToolCallsDisplay } from '../tool-calls/tool-calls-display'
|
||||
import { TraceSpansDisplay } from '../trace-spans/trace-spans-display'
|
||||
import LogMarkdownRenderer from './components/markdown-renderer'
|
||||
@@ -153,7 +154,7 @@ const BlockContentDisplay = ({
|
||||
<>
|
||||
<CopyButton text={redactedOutput} className='z-10 h-7 w-7' />
|
||||
{isJson ? (
|
||||
<pre className='w-full overflow-visible whitespace-pre-wrap break-all text-sm'>
|
||||
<pre className='w-full overflow-y-auto overflow-x-hidden whitespace-pre-wrap break-all text-sm'>
|
||||
{redactedOutput}
|
||||
</pre>
|
||||
) : (
|
||||
@@ -166,7 +167,7 @@ const BlockContentDisplay = ({
|
||||
text={JSON.stringify(redactedBlockInput, null, 2)}
|
||||
className='z-10 h-7 w-7'
|
||||
/>
|
||||
<pre className='w-full overflow-visible whitespace-pre-wrap break-all text-sm'>
|
||||
<pre className='w-full overflow-y-auto overflow-x-hidden whitespace-pre-wrap break-all text-sm'>
|
||||
{JSON.stringify(redactedBlockInput, null, 2)}
|
||||
</pre>
|
||||
</>
|
||||
@@ -193,6 +194,8 @@ export function Sidebar({
|
||||
const [isDragging, setIsDragging] = useState(false)
|
||||
const [_currentLogId, setCurrentLogId] = useState<string | null>(null)
|
||||
const [isTraceExpanded, setIsTraceExpanded] = useState(false)
|
||||
const [isModelsExpanded, setIsModelsExpanded] = useState(false)
|
||||
const [isFrozenCanvasOpen, setIsFrozenCanvasOpen] = useState(false)
|
||||
const scrollAreaRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
// Update currentLogId when log changes
|
||||
@@ -238,22 +241,26 @@ export function Sidebar({
|
||||
// Determine if this is a workflow execution log
|
||||
const isWorkflowExecutionLog = useMemo(() => {
|
||||
if (!log) return false
|
||||
// Check if message contains "workflow executed" or similar phrases
|
||||
// Check if message contains workflow execution phrases (success or failure)
|
||||
return (
|
||||
log.message.toLowerCase().includes('workflow executed') ||
|
||||
log.message.toLowerCase().includes('execution completed') ||
|
||||
(log.trigger === 'manual' && log.duration)
|
||||
log.message.toLowerCase().includes('workflow execution failed') ||
|
||||
log.message.toLowerCase().includes('execution failed') ||
|
||||
(log.trigger === 'manual' && log.duration) ||
|
||||
// Also check if we have enhanced logging metadata with trace spans
|
||||
(log.metadata?.enhanced && log.metadata?.traceSpans)
|
||||
)
|
||||
}, [log])
|
||||
|
||||
// Helper to determine if we have trace spans to display
|
||||
const _hasTraceSpans = useMemo(() => {
|
||||
return !!(log?.metadata?.traceSpans && log.metadata.traceSpans.length > 0)
|
||||
}, [log])
|
||||
|
||||
// Helper to determine if we have cost information to display
|
||||
const hasCostInfo = useMemo(() => {
|
||||
return !!(log?.metadata?.cost && (log.metadata.cost.input || log.metadata.cost.output))
|
||||
return !!(
|
||||
log?.metadata?.cost &&
|
||||
((log.metadata.cost.input && log.metadata.cost.input > 0) ||
|
||||
(log.metadata.cost.output && log.metadata.cost.output > 0) ||
|
||||
(log.metadata.cost.total && log.metadata.cost.total > 0))
|
||||
)
|
||||
}, [log])
|
||||
|
||||
const isWorkflowWithCost = useMemo(() => {
|
||||
@@ -487,6 +494,103 @@ export function Sidebar({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Enhanced Stats - only show for enhanced logs */}
|
||||
{log.metadata?.enhanced && log.metadata?.blockStats && (
|
||||
<div>
|
||||
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>
|
||||
Block Execution Stats
|
||||
</h3>
|
||||
<div className='space-y-1 text-sm'>
|
||||
<div className='flex justify-between'>
|
||||
<span>Total Blocks:</span>
|
||||
<span className='font-medium'>{log.metadata.blockStats.total}</span>
|
||||
</div>
|
||||
<div className='flex justify-between'>
|
||||
<span>Successful:</span>
|
||||
<span className='font-medium text-green-600'>
|
||||
{log.metadata.blockStats.success}
|
||||
</span>
|
||||
</div>
|
||||
{log.metadata.blockStats.error > 0 && (
|
||||
<div className='flex justify-between'>
|
||||
<span>Failed:</span>
|
||||
<span className='font-medium text-red-600'>
|
||||
{log.metadata.blockStats.error}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
{log.metadata.blockStats.skipped > 0 && (
|
||||
<div className='flex justify-between'>
|
||||
<span>Skipped:</span>
|
||||
<span className='font-medium text-yellow-600'>
|
||||
{log.metadata.blockStats.skipped}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Enhanced Cost - only show for enhanced logs with actual cost data */}
|
||||
{log.metadata?.enhanced && hasCostInfo && (
|
||||
<div>
|
||||
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Cost Breakdown</h3>
|
||||
<div className='space-y-1 text-sm'>
|
||||
{(log.metadata?.cost?.total ?? 0) > 0 && (
|
||||
<div className='flex justify-between'>
|
||||
<span>Total Cost:</span>
|
||||
<span className='font-medium'>
|
||||
${log.metadata?.cost?.total?.toFixed(4)}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
{(log.metadata?.cost?.input ?? 0) > 0 && (
|
||||
<div className='flex justify-between'>
|
||||
<span>Input Cost:</span>
|
||||
<span className='text-muted-foreground'>
|
||||
${log.metadata?.cost?.input?.toFixed(4)}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
{(log.metadata?.cost?.output ?? 0) > 0 && (
|
||||
<div className='flex justify-between'>
|
||||
<span>Output Cost:</span>
|
||||
<span className='text-muted-foreground'>
|
||||
${log.metadata?.cost?.output?.toFixed(4)}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
{(log.metadata?.cost?.tokens?.total ?? 0) > 0 && (
|
||||
<div className='flex justify-between'>
|
||||
<span>Total Tokens:</span>
|
||||
<span className='text-muted-foreground'>
|
||||
{log.metadata?.cost?.tokens?.total?.toLocaleString()}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Frozen Canvas Button - only show for workflow execution logs with execution ID */}
|
||||
{isWorkflowExecutionLog && log.executionId && (
|
||||
<div>
|
||||
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Workflow State</h3>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={() => setIsFrozenCanvasOpen(true)}
|
||||
className='w-full justify-start gap-2'
|
||||
>
|
||||
<Eye className='h-4 w-4' />
|
||||
View Frozen Canvas
|
||||
</Button>
|
||||
<p className='mt-1 text-muted-foreground text-xs'>
|
||||
See the exact workflow state and block inputs/outputs at execution time
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Message Content */}
|
||||
<div className='w-full pb-2'>
|
||||
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Message</h3>
|
||||
@@ -517,42 +621,94 @@ export function Sidebar({
|
||||
)}
|
||||
|
||||
{/* Cost Information (moved to bottom) */}
|
||||
{hasCostInfo && log.metadata?.cost && (
|
||||
{hasCostInfo && (
|
||||
<div>
|
||||
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>
|
||||
{isWorkflowWithCost ? 'Total Model Cost' : 'Model Cost'}
|
||||
</h3>
|
||||
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Models</h3>
|
||||
<div className='overflow-hidden rounded-md border'>
|
||||
<div className='space-y-2 p-3'>
|
||||
{log.metadata.cost.model && (
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='text-muted-foreground text-sm'>Model:</span>
|
||||
<span className='text-sm'>{log.metadata.cost.model}</span>
|
||||
</div>
|
||||
)}
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='text-muted-foreground text-sm'>Input:</span>
|
||||
<span className='text-sm'>{formatCost(log.metadata.cost.input || 0)}</span>
|
||||
<span className='text-sm'>
|
||||
{formatCost(log.metadata?.cost?.input || 0)}
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='text-muted-foreground text-sm'>Output:</span>
|
||||
<span className='text-sm'>{formatCost(log.metadata.cost.output || 0)}</span>
|
||||
<span className='text-sm'>
|
||||
{formatCost(log.metadata?.cost?.output || 0)}
|
||||
</span>
|
||||
</div>
|
||||
<div className='mt-1 flex items-center justify-between border-t pt-2'>
|
||||
<span className='text-muted-foreground text-sm'>Total:</span>
|
||||
<span className='text-foreground text-sm'>
|
||||
{formatCost(log.metadata.cost.total || 0)}
|
||||
{formatCost(log.metadata?.cost?.total || 0)}
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='text-muted-foreground text-xs'>Tokens:</span>
|
||||
<span className='text-muted-foreground text-xs'>
|
||||
{log.metadata.cost.tokens?.prompt || 0} in /{' '}
|
||||
{log.metadata.cost.tokens?.completion || 0} out
|
||||
{log.metadata?.cost?.tokens?.prompt || 0} in /{' '}
|
||||
{log.metadata?.cost?.tokens?.completion || 0} out
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Models Breakdown */}
|
||||
{log.metadata?.cost?.models &&
|
||||
Object.keys(log.metadata?.cost?.models).length > 0 && (
|
||||
<div className='border-t'>
|
||||
<button
|
||||
onClick={() => setIsModelsExpanded(!isModelsExpanded)}
|
||||
className='flex w-full items-center justify-between p-3 text-left transition-colors hover:bg-muted/50'
|
||||
>
|
||||
<span className='font-medium text-muted-foreground text-xs'>
|
||||
Model Breakdown (
|
||||
{Object.keys(log.metadata?.cost?.models || {}).length})
|
||||
</span>
|
||||
{isModelsExpanded ? (
|
||||
<ChevronUp className='h-3 w-3 text-muted-foreground' />
|
||||
) : (
|
||||
<ChevronDown className='h-3 w-3 text-muted-foreground' />
|
||||
)}
|
||||
</button>
|
||||
|
||||
{isModelsExpanded && (
|
||||
<div className='space-y-3 border-t bg-muted/30 p-3'>
|
||||
{Object.entries(log.metadata?.cost?.models || {}).map(
|
||||
([model, cost]: [string, any]) => (
|
||||
<div key={model} className='space-y-1'>
|
||||
<div className='font-medium font-mono text-xs'>{model}</div>
|
||||
<div className='space-y-1 text-xs'>
|
||||
<div className='flex justify-between'>
|
||||
<span className='text-muted-foreground'>Input:</span>
|
||||
<span>{formatCost(cost.input || 0)}</span>
|
||||
</div>
|
||||
<div className='flex justify-between'>
|
||||
<span className='text-muted-foreground'>Output:</span>
|
||||
<span>{formatCost(cost.output || 0)}</span>
|
||||
</div>
|
||||
<div className='flex justify-between border-t pt-1'>
|
||||
<span className='text-muted-foreground'>Total:</span>
|
||||
<span className='font-medium'>
|
||||
{formatCost(cost.total || 0)}
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex justify-between'>
|
||||
<span className='text-muted-foreground'>Tokens:</span>
|
||||
<span>
|
||||
{cost.tokens?.prompt || 0} in /{' '}
|
||||
{cost.tokens?.completion || 0} out
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{isWorkflowWithCost && (
|
||||
<div className='border-t bg-muted p-3 text-muted-foreground text-xs'>
|
||||
<p>
|
||||
@@ -568,6 +724,18 @@ export function Sidebar({
|
||||
</ScrollArea>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Frozen Canvas Modal */}
|
||||
{log?.executionId && (
|
||||
<FrozenCanvasModal
|
||||
executionId={log.executionId}
|
||||
workflowName={log.workflow?.name}
|
||||
trigger={log.trigger || undefined}
|
||||
traceSpans={log.metadata?.traceSpans}
|
||||
isOpen={isFrozenCanvasOpen}
|
||||
onClose={() => setIsFrozenCanvasOpen(false)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -111,7 +111,7 @@ function ToolCallItem({ toolCall, index }: ToolCallItemProps) {
|
||||
{toolCall.input && (
|
||||
<div>
|
||||
<div className='mb-1 text-muted-foreground'>Input</div>
|
||||
<pre className='group relative max-h-32 overflow-auto rounded bg-background p-2'>
|
||||
<pre className='group relative max-h-32 overflow-y-auto overflow-x-hidden whitespace-pre-wrap break-all rounded bg-background p-2'>
|
||||
<CopyButton text={JSON.stringify(toolCall.input, null, 2)} />
|
||||
<code>{JSON.stringify(toolCall.input, null, 2)}</code>
|
||||
</pre>
|
||||
@@ -122,7 +122,7 @@ function ToolCallItem({ toolCall, index }: ToolCallItemProps) {
|
||||
{toolCall.status === 'success' && toolCall.output && (
|
||||
<div>
|
||||
<div className='mb-1 text-muted-foreground'>Output</div>
|
||||
<pre className='group relative max-h-32 overflow-auto rounded bg-background p-2'>
|
||||
<pre className='group relative max-h-32 overflow-y-auto overflow-x-hidden whitespace-pre-wrap break-all rounded bg-background p-2'>
|
||||
<CopyButton text={JSON.stringify(toolCall.output, null, 2)} />
|
||||
<code>{JSON.stringify(toolCall.output, null, 2)}</code>
|
||||
</pre>
|
||||
@@ -132,7 +132,7 @@ function ToolCallItem({ toolCall, index }: ToolCallItemProps) {
|
||||
{toolCall.status === 'error' && toolCall.error && (
|
||||
<div>
|
||||
<div className='mb-1 text-destructive'>Error</div>
|
||||
<pre className='group relative max-h-32 overflow-auto rounded bg-destructive/10 p-2 text-destructive'>
|
||||
<pre className='group relative max-h-32 overflow-y-auto overflow-x-hidden whitespace-pre-wrap break-all rounded bg-destructive/10 p-2 text-destructive'>
|
||||
<CopyButton text={toolCall.error} />
|
||||
<code>{toolCall.error}</code>
|
||||
</pre>
|
||||
|
||||
@@ -27,6 +27,174 @@ interface TraceSpansDisplayProps {
|
||||
onExpansionChange?: (expanded: boolean) => void
|
||||
}
|
||||
|
||||
// Transform raw block data into clean, user-friendly format
|
||||
function transformBlockData(data: any, blockType: string, isInput: boolean) {
|
||||
if (!data) return null
|
||||
|
||||
// For input data, filter out sensitive information
|
||||
if (isInput) {
|
||||
const cleanInput = { ...data }
|
||||
|
||||
// Remove sensitive fields
|
||||
if (cleanInput.apiKey) {
|
||||
cleanInput.apiKey = '***'
|
||||
}
|
||||
if (cleanInput.azureApiKey) {
|
||||
cleanInput.azureApiKey = '***'
|
||||
}
|
||||
|
||||
// Remove null/undefined values for cleaner display
|
||||
Object.keys(cleanInput).forEach((key) => {
|
||||
if (cleanInput[key] === null || cleanInput[key] === undefined) {
|
||||
delete cleanInput[key]
|
||||
}
|
||||
})
|
||||
|
||||
return cleanInput
|
||||
}
|
||||
|
||||
// For output data, extract meaningful information based on block type
|
||||
if (data.response) {
|
||||
const response = data.response
|
||||
|
||||
switch (blockType) {
|
||||
case 'agent':
|
||||
return {
|
||||
content: response.content,
|
||||
model: data.model,
|
||||
tokens: data.tokens,
|
||||
toolCalls: response.toolCalls,
|
||||
...(data.cost && { cost: data.cost }),
|
||||
}
|
||||
|
||||
case 'function':
|
||||
return {
|
||||
result: response.result,
|
||||
stdout: response.stdout,
|
||||
...(response.executionTime && { executionTime: `${response.executionTime}ms` }),
|
||||
}
|
||||
|
||||
case 'api':
|
||||
return {
|
||||
data: response.data,
|
||||
status: response.status,
|
||||
headers: response.headers,
|
||||
}
|
||||
|
||||
default:
|
||||
// For other block types, show the response content
|
||||
return response
|
||||
}
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
// Component to display block input/output data in a clean, readable format
|
||||
function BlockDataDisplay({
|
||||
data,
|
||||
blockType,
|
||||
isInput = false,
|
||||
isError = false,
|
||||
}: {
|
||||
data: any
|
||||
blockType?: string
|
||||
isInput?: boolean
|
||||
isError?: boolean
|
||||
}) {
|
||||
if (!data) return null
|
||||
|
||||
// Handle different data types
|
||||
const renderValue = (value: any, key?: string): React.ReactNode => {
|
||||
if (value === null) return <span className='text-muted-foreground italic'>null</span>
|
||||
if (value === undefined) return <span className='text-muted-foreground italic'>undefined</span>
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return <span className='break-all text-green-700 dark:text-green-400'>"{value}"</span>
|
||||
}
|
||||
|
||||
if (typeof value === 'number') {
|
||||
return <span className='text-blue-700 dark:text-blue-400'>{value}</span>
|
||||
}
|
||||
|
||||
if (typeof value === 'boolean') {
|
||||
return <span className='text-purple-700 dark:text-purple-400'>{value.toString()}</span>
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 0) return <span className='text-muted-foreground'>[]</span>
|
||||
return (
|
||||
<div className='space-y-1'>
|
||||
<span className='text-muted-foreground'>[</span>
|
||||
<div className='ml-4 space-y-1'>
|
||||
{value.map((item, index) => (
|
||||
<div key={index} className='flex min-w-0 gap-2'>
|
||||
<span className='flex-shrink-0 text-muted-foreground text-xs'>{index}:</span>
|
||||
<div className='min-w-0 flex-1 overflow-hidden'>{renderValue(item)}</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<span className='text-muted-foreground'>]</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (typeof value === 'object') {
|
||||
const entries = Object.entries(value)
|
||||
if (entries.length === 0) return <span className='text-muted-foreground'>{'{}'}</span>
|
||||
|
||||
return (
|
||||
<div className='space-y-1'>
|
||||
{entries.map(([objKey, objValue]) => (
|
||||
<div key={objKey} className='flex min-w-0 gap-2'>
|
||||
<span className='flex-shrink-0 font-medium text-orange-700 dark:text-orange-400'>
|
||||
{objKey}:
|
||||
</span>
|
||||
<div className='min-w-0 flex-1 overflow-hidden'>{renderValue(objValue, objKey)}</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return <span>{String(value)}</span>
|
||||
}
|
||||
|
||||
// Transform the data for better display
|
||||
const transformedData = transformBlockData(data, blockType || 'unknown', isInput)
|
||||
|
||||
// Special handling for error output
|
||||
if (isError && data.error) {
|
||||
return (
|
||||
<div className='space-y-2 text-xs'>
|
||||
<div className='rounded border border-red-200 bg-red-50 p-2 dark:border-red-800 dark:bg-red-950/20'>
|
||||
<div className='mb-1 font-medium text-red-800 dark:text-red-400'>Error</div>
|
||||
<div className='text-red-700 dark:text-red-300'>{data.error}</div>
|
||||
</div>
|
||||
{/* Show other output data if available */}
|
||||
{transformedData &&
|
||||
Object.keys(transformedData).filter((key) => key !== 'error' && key !== 'success')
|
||||
.length > 0 && (
|
||||
<div className='space-y-1'>
|
||||
{Object.entries(transformedData)
|
||||
.filter(([key]) => key !== 'error' && key !== 'success')
|
||||
.map(([key, value]) => (
|
||||
<div key={key} className='flex gap-2'>
|
||||
<span className='font-medium text-orange-700 dark:text-orange-400'>{key}:</span>
|
||||
{renderValue(value, key)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='space-y-1 overflow-hidden text-xs'>{renderValue(transformedData || data)}</div>
|
||||
)
|
||||
}
|
||||
|
||||
export function TraceSpansDisplay({
|
||||
traceSpans,
|
||||
totalDuration = 0,
|
||||
@@ -35,6 +203,30 @@ export function TraceSpansDisplay({
|
||||
// Keep track of expanded spans
|
||||
const [expandedSpans, setExpandedSpans] = useState<Set<string>>(new Set())
|
||||
|
||||
// Function to collect all span IDs recursively (for expand all functionality)
|
||||
const collectAllSpanIds = (spans: TraceSpan[]): string[] => {
|
||||
const ids: string[] = []
|
||||
|
||||
const collectIds = (span: TraceSpan) => {
|
||||
const spanId = span.id || `span-${span.name}-${span.startTime}`
|
||||
ids.push(spanId)
|
||||
|
||||
// Process children
|
||||
if (span.children && span.children.length > 0) {
|
||||
span.children.forEach(collectIds)
|
||||
}
|
||||
}
|
||||
|
||||
spans.forEach(collectIds)
|
||||
return ids
|
||||
}
|
||||
|
||||
const allSpanIds = useMemo(() => {
|
||||
if (!traceSpans || traceSpans.length === 0) return []
|
||||
return collectAllSpanIds(traceSpans)
|
||||
}, [traceSpans])
|
||||
|
||||
// Early return after all hooks
|
||||
if (!traceSpans || traceSpans.length === 0) {
|
||||
return <div className='text-muted-foreground text-sm'>No trace data available</div>
|
||||
}
|
||||
@@ -61,26 +253,6 @@ export function TraceSpansDisplay({
|
||||
// This ensures parallel spans are represented correctly in the timeline
|
||||
const actualTotalDuration = workflowEndTime - workflowStartTime
|
||||
|
||||
// Function to collect all span IDs recursively (for expand all functionality)
|
||||
const collectAllSpanIds = (spans: TraceSpan[]): string[] => {
|
||||
const ids: string[] = []
|
||||
|
||||
const collectIds = (span: TraceSpan) => {
|
||||
const spanId = span.id || `span-${span.name}-${span.startTime}`
|
||||
ids.push(spanId)
|
||||
|
||||
// Process children
|
||||
if (span.children && span.children.length > 0) {
|
||||
span.children.forEach(collectIds)
|
||||
}
|
||||
}
|
||||
|
||||
spans.forEach(collectIds)
|
||||
return ids
|
||||
}
|
||||
|
||||
const allSpanIds = useMemo(() => collectAllSpanIds(traceSpans), [traceSpans])
|
||||
|
||||
// Handle span toggling
|
||||
const handleSpanToggle = (spanId: string, expanded: boolean, hasSubItems: boolean) => {
|
||||
const newExpandedSpans = new Set(expandedSpans)
|
||||
@@ -140,11 +312,14 @@ export function TraceSpansDisplay({
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
<div className='overflow-hidden rounded-md border shadow-sm'>
|
||||
<div className='w-full overflow-hidden rounded-md border shadow-sm'>
|
||||
{traceSpans.map((span, index) => {
|
||||
const hasSubItems =
|
||||
const hasSubItems = Boolean(
|
||||
(span.children && span.children.length > 0) ||
|
||||
(span.toolCalls && span.toolCalls.length > 0)
|
||||
(span.toolCalls && span.toolCalls.length > 0) ||
|
||||
span.input ||
|
||||
span.output
|
||||
)
|
||||
return (
|
||||
<TraceSpanItem
|
||||
key={index}
|
||||
@@ -430,6 +605,43 @@ function TraceSpanItem({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Children and tool calls */}
|
||||
{expanded && (
|
||||
<div>
|
||||
{/* Block Input/Output Data */}
|
||||
{(span.input || span.output) && (
|
||||
<div className='mt-2 ml-8 space-y-3 overflow-hidden'>
|
||||
{/* Input Data */}
|
||||
{span.input && (
|
||||
<div>
|
||||
<h4 className='mb-2 font-medium text-muted-foreground text-xs'>Input</h4>
|
||||
<div className='overflow-hidden rounded-md bg-secondary/30 p-3'>
|
||||
<BlockDataDisplay data={span.input} blockType={span.type} isInput={true} />
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Output Data */}
|
||||
{span.output && (
|
||||
<div>
|
||||
<h4 className='mb-2 font-medium text-muted-foreground text-xs'>
|
||||
{span.status === 'error' ? 'Error Details' : 'Output'}
|
||||
</h4>
|
||||
<div className='overflow-hidden rounded-md bg-secondary/30 p-3'>
|
||||
<BlockDataDisplay
|
||||
data={span.output}
|
||||
blockType={span.type}
|
||||
isInput={false}
|
||||
isError={span.status === 'error'}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Children and tool calls */}
|
||||
{expanded && (
|
||||
<div>
|
||||
@@ -437,9 +649,12 @@ function TraceSpanItem({
|
||||
{hasChildren && (
|
||||
<div>
|
||||
{span.children?.map((childSpan, index) => {
|
||||
const childHasSubItems =
|
||||
const childHasSubItems = Boolean(
|
||||
(childSpan.children && childSpan.children.length > 0) ||
|
||||
(childSpan.toolCalls && childSpan.toolCalls.length > 0)
|
||||
(childSpan.toolCalls && childSpan.toolCalls.length > 0) ||
|
||||
childSpan.input ||
|
||||
childSpan.output
|
||||
)
|
||||
|
||||
return (
|
||||
<TraceSpanItem
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { AlertCircle, Info, Loader2 } from 'lucide-react'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
@@ -14,34 +14,6 @@ import { formatDate } from './utils/format-date'
|
||||
const logger = createLogger('Logs')
|
||||
const LOGS_PER_PAGE = 50
|
||||
|
||||
const getLevelBadgeStyles = (level: string) => {
|
||||
switch (level.toLowerCase()) {
|
||||
case 'error':
|
||||
return 'bg-destructive/20 text-destructive error-badge'
|
||||
case 'warn':
|
||||
return 'bg-warning/20 text-warning'
|
||||
default:
|
||||
return 'bg-secondary text-secondary-foreground'
|
||||
}
|
||||
}
|
||||
|
||||
const getTriggerBadgeStyles = (trigger: string) => {
|
||||
switch (trigger.toLowerCase()) {
|
||||
case 'manual':
|
||||
return 'bg-secondary text-secondary-foreground'
|
||||
case 'api':
|
||||
return 'bg-blue-100 dark:bg-blue-950/40 text-blue-700 dark:text-blue-400'
|
||||
case 'webhook':
|
||||
return 'bg-orange-100 dark:bg-orange-950/40 text-orange-700 dark:text-orange-400'
|
||||
case 'schedule':
|
||||
return 'bg-green-100 dark:bg-green-950/40 text-green-700 dark:text-green-400'
|
||||
case 'chat':
|
||||
return 'bg-purple-100 dark:bg-purple-950/40 text-purple-700 dark:text-purple-400'
|
||||
default:
|
||||
return 'bg-gray-100 dark:bg-gray-800 text-gray-700 dark:text-gray-400'
|
||||
}
|
||||
}
|
||||
|
||||
const selectedRowAnimation = `
|
||||
@keyframes borderPulse {
|
||||
0% { border-left-color: hsl(var(--primary) / 0.3) }
|
||||
@@ -87,28 +59,6 @@ export default function Logs() {
|
||||
const isSidebarCollapsed =
|
||||
mode === 'expanded' ? !isExpanded : mode === 'collapsed' || mode === 'hover'
|
||||
|
||||
const executionGroups = useMemo(() => {
|
||||
const groups: Record<string, WorkflowLog[]> = {}
|
||||
|
||||
// Group logs by executionId
|
||||
logs.forEach((log) => {
|
||||
if (log.executionId) {
|
||||
if (!groups[log.executionId]) {
|
||||
groups[log.executionId] = []
|
||||
}
|
||||
groups[log.executionId].push(log)
|
||||
}
|
||||
})
|
||||
|
||||
Object.keys(groups).forEach((executionId) => {
|
||||
groups[executionId].sort(
|
||||
(a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
|
||||
)
|
||||
})
|
||||
|
||||
return groups
|
||||
}, [logs])
|
||||
|
||||
const handleLogClick = (log: WorkflowLog) => {
|
||||
setSelectedLog(log)
|
||||
const index = logs.findIndex((l) => l.id === log.id)
|
||||
@@ -134,6 +84,8 @@ export default function Logs() {
|
||||
|
||||
const handleCloseSidebar = () => {
|
||||
setIsSidebarOpen(false)
|
||||
setSelectedLog(null)
|
||||
setSelectedLogIndex(-1)
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
@@ -155,7 +107,7 @@ export default function Logs() {
|
||||
}
|
||||
|
||||
const queryParams = buildQueryParams(pageNum, LOGS_PER_PAGE)
|
||||
const response = await fetch(`/api/logs?${queryParams}`)
|
||||
const response = await fetch(`/api/logs/enhanced?${queryParams}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Error fetching logs: ${response.statusText}`)
|
||||
@@ -203,7 +155,7 @@ export default function Logs() {
|
||||
try {
|
||||
setLoading(true)
|
||||
const queryParams = buildQueryParams(1, LOGS_PER_PAGE)
|
||||
const response = await fetch(`/api/logs?${queryParams}`)
|
||||
const response = await fetch(`/api/logs/enhanced?${queryParams}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Error fetching logs: ${response.statusText}`)
|
||||
@@ -353,46 +305,19 @@ export default function Logs() {
|
||||
<div className='flex flex-1 flex-col overflow-hidden'>
|
||||
{/* Table container */}
|
||||
<div className='flex flex-1 flex-col overflow-hidden'>
|
||||
{/* Table header - fixed */}
|
||||
<div className='sticky top-0 z-10 border-b bg-background'>
|
||||
<table className='w-full table-fixed'>
|
||||
<colgroup>
|
||||
<col className={`${isSidebarCollapsed ? 'w-[16%]' : 'w-[19%]'}`} />
|
||||
<col className='w-[8%] md:w-[7%]' />
|
||||
<col className='w-[12%] md:w-[10%]' />
|
||||
<col className='hidden w-[8%] lg:table-column' />
|
||||
<col className='hidden w-[8%] lg:table-column' />
|
||||
<col
|
||||
className={`${isSidebarCollapsed ? 'w-auto md:w-[53%] lg:w-auto' : 'w-auto md:w-[50%] lg:w-auto'}`}
|
||||
/>
|
||||
<col className='w-[8%] md:w-[10%]' />
|
||||
</colgroup>
|
||||
<thead>
|
||||
<tr>
|
||||
<th className='px-4 pt-2 pb-3 text-left font-medium'>
|
||||
<span className='text-muted-foreground text-xs leading-none'>Time</span>
|
||||
</th>
|
||||
<th className='px-4 pt-2 pb-3 text-left font-medium'>
|
||||
<span className='text-muted-foreground text-xs leading-none'>Status</span>
|
||||
</th>
|
||||
<th className='px-4 pt-2 pb-3 text-left font-medium'>
|
||||
<span className='text-muted-foreground text-xs leading-none'>Workflow</span>
|
||||
</th>
|
||||
<th className='hidden px-4 pt-2 pb-3 text-left font-medium lg:table-cell'>
|
||||
<span className='text-muted-foreground text-xs leading-none'>id</span>
|
||||
</th>
|
||||
<th className='hidden px-4 pt-2 pb-3 text-left font-medium lg:table-cell'>
|
||||
<span className='text-muted-foreground text-xs leading-none'>Trigger</span>
|
||||
</th>
|
||||
<th className='px-4 pt-2 pb-3 text-left font-medium'>
|
||||
<span className='text-muted-foreground text-xs leading-none'>Message</span>
|
||||
</th>
|
||||
<th className='px-4 pt-2 pb-3 text-left font-medium'>
|
||||
<span className='text-muted-foreground text-xs leading-none'>Duration</span>
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
{/* Table with fixed layout */}
|
||||
<div className='w-full min-w-[800px]'>
|
||||
{/* Header */}
|
||||
<div className='border-border/50 border-b'>
|
||||
<div className='grid grid-cols-[160px_100px_1fr_120px_100px_100px] gap-4 px-4 py-3 font-medium text-muted-foreground text-xs'>
|
||||
<div>Time</div>
|
||||
<div>Status</div>
|
||||
<div>Workflow</div>
|
||||
<div className='hidden lg:block'>Trigger</div>
|
||||
<div className='hidden xl:block'>Cost</div>
|
||||
<div>Duration</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Table body - scrollable */}
|
||||
@@ -419,163 +344,106 @@ export default function Logs() {
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<table className='w-full table-fixed'>
|
||||
<colgroup>
|
||||
<col className={`${isSidebarCollapsed ? 'w-[16%]' : 'w-[19%]'}`} />
|
||||
<col className='w-[8%] md:w-[7%]' />
|
||||
<col className='w-[12%] md:w-[10%]' />
|
||||
<col className='hidden w-[8%] lg:table-column' />
|
||||
<col className='hidden w-[8%] lg:table-column' />
|
||||
<col
|
||||
className={`${isSidebarCollapsed ? 'w-auto md:w-[53%] lg:w-auto' : 'w-auto md:w-[50%] lg:w-auto'}`}
|
||||
/>
|
||||
<col className='w-[8%] md:w-[10%]' />
|
||||
</colgroup>
|
||||
<tbody>
|
||||
{logs.map((log) => {
|
||||
const formattedDate = formatDate(log.createdAt)
|
||||
const isSelected = selectedLog?.id === log.id
|
||||
const _isWorkflowExecutionLog =
|
||||
log.executionId && executionGroups[log.executionId].length === 1
|
||||
<div className='space-y-1 p-4'>
|
||||
{logs.map((log) => {
|
||||
const formattedDate = formatDate(log.createdAt)
|
||||
const isSelected = selectedLog?.id === log.id
|
||||
|
||||
return (
|
||||
<tr
|
||||
key={log.id}
|
||||
ref={isSelected ? selectedRowRef : null}
|
||||
className={`cursor-pointer border-b transition-colors ${
|
||||
isSelected
|
||||
? 'selected-row border-l-2 bg-accent/40 hover:bg-accent/50'
|
||||
: 'hover:bg-accent/30'
|
||||
}`}
|
||||
onClick={() => handleLogClick(log)}
|
||||
>
|
||||
{/* Time column */}
|
||||
<td className='px-4 py-3'>
|
||||
<div className='flex flex-col justify-center'>
|
||||
<div className='flex items-center font-medium text-xs'>
|
||||
<span>{formattedDate.formatted}</span>
|
||||
<span className='mx-1.5 hidden text-muted-foreground xl:inline'>
|
||||
•
|
||||
</span>
|
||||
<span className='hidden text-muted-foreground xl:inline'>
|
||||
{new Date(log.createdAt).toLocaleDateString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
year: 'numeric',
|
||||
})}
|
||||
</span>
|
||||
</div>
|
||||
<div className='mt-0.5 text-muted-foreground text-xs'>
|
||||
<span>{formattedDate.relative}</span>
|
||||
</div>
|
||||
return (
|
||||
<div
|
||||
key={log.id}
|
||||
ref={isSelected ? selectedRowRef : null}
|
||||
className={`cursor-pointer rounded-lg border transition-all duration-200 ${
|
||||
isSelected
|
||||
? 'border-primary bg-accent/40 shadow-sm'
|
||||
: 'border-border hover:border-border/80 hover:bg-accent/20'
|
||||
}`}
|
||||
onClick={() => handleLogClick(log)}
|
||||
>
|
||||
<div className='grid grid-cols-[160px_100px_1fr_120px_100px_100px] gap-4 p-4'>
|
||||
{/* Time */}
|
||||
<div>
|
||||
<div className='font-medium text-sm'>{formattedDate.formatted}</div>
|
||||
<div className='text-muted-foreground text-xs'>
|
||||
{formattedDate.relative}
|
||||
</div>
|
||||
</td>
|
||||
</div>
|
||||
|
||||
{/* Level column */}
|
||||
<td className='px-4 py-3'>
|
||||
{/* Status */}
|
||||
<div>
|
||||
<div
|
||||
className={`inline-flex items-center justify-center rounded-md px-2 py-1 text-xs ${getLevelBadgeStyles(log.level)}`}
|
||||
className={`inline-flex items-center justify-center rounded-md px-2 py-1 text-xs ${
|
||||
log.level === 'error'
|
||||
? 'bg-red-100 text-red-800'
|
||||
: 'bg-green-100 text-green-800'
|
||||
}`}
|
||||
>
|
||||
<span className='font-medium'>{log.level}</span>
|
||||
<span className='font-medium'>
|
||||
{log.level === 'error' ? 'Failed' : 'Success'}
|
||||
</span>
|
||||
</div>
|
||||
</td>
|
||||
</div>
|
||||
|
||||
{/* Workflow column */}
|
||||
<td className='px-4 py-3'>
|
||||
{log.workflow && (
|
||||
<div
|
||||
className='inline-flex max-w-full items-center truncate rounded-md px-2 py-1 text-xs'
|
||||
style={{
|
||||
backgroundColor: `${log.workflow.color}20`,
|
||||
color: log.workflow.color,
|
||||
}}
|
||||
title={log.workflow.name}
|
||||
>
|
||||
<span className='truncate font-medium'>{log.workflow.name}</span>
|
||||
</div>
|
||||
)}
|
||||
</td>
|
||||
|
||||
{/* ID column - hidden on small screens */}
|
||||
<td className='hidden px-4 py-3 lg:table-cell'>
|
||||
<div className='font-mono text-muted-foreground text-xs'>
|
||||
{log.executionId ? `#${log.executionId.substring(0, 4)}` : '—'}
|
||||
{/* Workflow */}
|
||||
<div className='min-w-0'>
|
||||
<div className='truncate font-medium text-sm'>
|
||||
{log.workflow?.name || 'Unknown Workflow'}
|
||||
</div>
|
||||
</td>
|
||||
|
||||
{/* Trigger column - hidden on medium screens and below */}
|
||||
<td className='hidden px-4 py-3 lg:table-cell'>
|
||||
{log.trigger && (
|
||||
<div
|
||||
className={`inline-flex items-center rounded-md px-2 py-1 text-xs ${getTriggerBadgeStyles(log.trigger)}`}
|
||||
>
|
||||
<span className='font-medium'>{log.trigger}</span>
|
||||
</div>
|
||||
)}
|
||||
</td>
|
||||
|
||||
{/* Message column */}
|
||||
<td className='px-4 py-3'>
|
||||
<div className='truncate text-sm' title={log.message}>
|
||||
<div className='truncate text-muted-foreground text-xs'>
|
||||
{log.message}
|
||||
</div>
|
||||
</td>
|
||||
</div>
|
||||
|
||||
{/* Duration column */}
|
||||
<td className='px-4 py-3'>
|
||||
{/* Trigger */}
|
||||
<div className='hidden lg:block'>
|
||||
<div className='text-muted-foreground text-xs'>
|
||||
{log.trigger || '—'}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Cost */}
|
||||
<div className='hidden xl:block'>
|
||||
<div className='text-xs'>
|
||||
{log.metadata?.enhanced && log.metadata?.cost?.total ? (
|
||||
<span className='text-muted-foreground'>
|
||||
${log.metadata.cost.total.toFixed(4)}
|
||||
</span>
|
||||
) : (
|
||||
<span className='text-muted-foreground'>—</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Duration */}
|
||||
<div>
|
||||
<div className='text-muted-foreground text-xs'>
|
||||
{log.duration || '—'}
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
)
|
||||
})}
|
||||
|
||||
{/* Infinite scroll loader */}
|
||||
{hasMore && (
|
||||
<tr>
|
||||
<td colSpan={7}>
|
||||
<div
|
||||
ref={loaderRef}
|
||||
className='flex items-center justify-center py-2'
|
||||
style={{ height: '50px' }}
|
||||
>
|
||||
{isFetchingMore && (
|
||||
<div className='flex items-center gap-2 text-muted-foreground opacity-70'>
|
||||
<Loader2 className='h-4 w-4 animate-spin' />
|
||||
<span className='text-xs'>Loading more logs...</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
|
||||
{/* Footer status indicator - useful for development */}
|
||||
<tr className='border-t'>
|
||||
<td colSpan={7}>
|
||||
<div className='flex items-center justify-between px-4 py-2 text-muted-foreground text-xs'>
|
||||
<span>Showing {logs.length} logs</span>
|
||||
<div className='flex items-center gap-4'>
|
||||
{isFetchingMore ? (
|
||||
<div className='flex items-center gap-2' />
|
||||
) : hasMore ? (
|
||||
<button
|
||||
type='button'
|
||||
onClick={loadMoreLogs}
|
||||
className='text-primary text-xs hover:underline'
|
||||
>
|
||||
Load more logs
|
||||
</button>
|
||||
) : (
|
||||
<span>End of logs</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
|
||||
{/* Infinite scroll loader */}
|
||||
{hasMore && (
|
||||
<div className='flex items-center justify-center py-4'>
|
||||
<div
|
||||
ref={loaderRef}
|
||||
className='flex items-center gap-2 text-muted-foreground'
|
||||
>
|
||||
{isFetchingMore ? (
|
||||
<>
|
||||
<Loader2 className='h-4 w-4 animate-spin' />
|
||||
<span className='text-sm'>Loading more...</span>
|
||||
</>
|
||||
) : (
|
||||
<span className='text-sm'>Scroll to load more</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -22,7 +22,19 @@ export interface ToolCallMetadata {
|
||||
}
|
||||
|
||||
export interface CostMetadata {
|
||||
model?: string
|
||||
models?: Record<
|
||||
string,
|
||||
{
|
||||
input: number
|
||||
output: number
|
||||
total: number
|
||||
tokens?: {
|
||||
prompt?: number
|
||||
completion?: number
|
||||
total?: number
|
||||
}
|
||||
}
|
||||
>
|
||||
input?: number
|
||||
output?: number
|
||||
total?: number
|
||||
@@ -53,6 +65,7 @@ export interface TraceSpan {
|
||||
relativeStartMs?: number // Time in ms from the start of the parent span
|
||||
blockId?: string // Added to track the original block ID for relationship mapping
|
||||
input?: Record<string, any> // Added to store input data for this span
|
||||
output?: Record<string, any> // Added to store output data for this span
|
||||
}
|
||||
|
||||
export interface WorkflowLog {
|
||||
@@ -70,6 +83,29 @@ export interface WorkflowLog {
|
||||
totalDuration?: number
|
||||
cost?: CostMetadata
|
||||
blockInput?: Record<string, any>
|
||||
enhanced?: boolean
|
||||
blockStats?: {
|
||||
total: number
|
||||
success: number
|
||||
error: number
|
||||
skipped: number
|
||||
}
|
||||
blockExecutions?: Array<{
|
||||
id: string
|
||||
blockId: string
|
||||
blockName: string
|
||||
blockType: string
|
||||
startedAt: string
|
||||
endedAt: string
|
||||
durationMs: number
|
||||
status: 'success' | 'error' | 'skipped'
|
||||
errorMessage?: string
|
||||
errorStackTrace?: string
|
||||
inputData: any
|
||||
outputData: any
|
||||
cost?: CostMetadata
|
||||
metadata: any
|
||||
}>
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -30,6 +30,7 @@ import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getBaseDomain } from '@/lib/urls/utils'
|
||||
import { cn } from '@/lib/utils'
|
||||
@@ -54,7 +55,7 @@ interface ChatDeployProps {
|
||||
type AuthType = 'public' | 'password' | 'email'
|
||||
|
||||
const getDomainSuffix = (() => {
|
||||
const suffix = process.env.NODE_ENV === 'development' ? `.${getBaseDomain()}` : '.simstudio.ai'
|
||||
const suffix = env.NODE_ENV === 'development' ? `.${getBaseDomain()}` : '.simstudio.ai'
|
||||
return () => suffix
|
||||
})()
|
||||
|
||||
|
||||
@@ -1,53 +1,57 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { AlertTriangle, RefreshCw } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
|
||||
|
||||
interface ConnectionStatusProps {
|
||||
isConnected: boolean
|
||||
}
|
||||
|
||||
export function ConnectionStatus({ isConnected }: ConnectionStatusProps) {
|
||||
const [showOfflineNotice, setShowOfflineNotice] = useState(false)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
useEffect(() => {
|
||||
let timeoutId: NodeJS.Timeout
|
||||
const handleRefresh = () => {
|
||||
window.location.reload()
|
||||
}
|
||||
|
||||
if (!isConnected) {
|
||||
// Show offline notice after 6 seconds of being disconnected
|
||||
timeoutId = setTimeout(() => {
|
||||
setShowOfflineNotice(true)
|
||||
}, 6000) // 6 seconds
|
||||
} else {
|
||||
// Hide notice immediately when reconnected
|
||||
setShowOfflineNotice(false)
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId)
|
||||
}
|
||||
}
|
||||
}, [isConnected])
|
||||
|
||||
// Don't render anything if connected or if we haven't been disconnected long enough
|
||||
if (!showOfflineNotice) {
|
||||
// Don't render anything if not in offline mode
|
||||
if (!userPermissions.isOfflineMode) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex items-center gap-1.5'>
|
||||
<div className='flex items-center gap-1.5 text-red-600'>
|
||||
<div className='flex items-center gap-2 rounded-md border border-red-200 bg-red-50 px-3 py-2'>
|
||||
<div className='flex items-center gap-2 text-red-700'>
|
||||
<div className='relative flex items-center justify-center'>
|
||||
<div className='absolute h-3 w-3 animate-ping rounded-full bg-red-500/20' />
|
||||
<div className='relative h-2 w-2 rounded-full bg-red-500' />
|
||||
{!isConnected && (
|
||||
<div className='absolute h-4 w-4 animate-ping rounded-full bg-red-500/20' />
|
||||
)}
|
||||
<AlertTriangle className='relative h-4 w-4' />
|
||||
</div>
|
||||
<div className='flex flex-col'>
|
||||
<span className='font-medium text-xs leading-tight'>Connection lost</span>
|
||||
<span className='text-xs leading-tight opacity-90'>
|
||||
Changes not saved - please refresh
|
||||
<span className='font-medium text-xs leading-tight'>
|
||||
{isConnected ? 'Reconnected' : 'Connection lost - please refresh'}
|
||||
</span>
|
||||
<span className='text-red-600 text-xs leading-tight'>
|
||||
{isConnected ? 'Refresh to continue editing' : 'Read-only mode active'}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
onClick={handleRefresh}
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='h-7 w-7 p-0 text-red-700 hover:bg-red-100 hover:text-red-800'
|
||||
>
|
||||
<RefreshCw className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent className='z-[9999]'>Refresh page to continue editing</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -44,16 +44,6 @@ export function UserAvatarStack({
|
||||
}
|
||||
}, [users, maxVisible])
|
||||
|
||||
// Show connection status component regardless of user count
|
||||
// This will handle the offline notice when disconnected for 15 seconds
|
||||
const connectionStatusElement = <ConnectionStatus isConnected={isConnected} />
|
||||
|
||||
// Only show presence when there are multiple users (>1)
|
||||
// But always show connection status
|
||||
if (users.length <= 1) {
|
||||
return connectionStatusElement
|
||||
}
|
||||
|
||||
// Determine spacing based on size
|
||||
const spacingClass = {
|
||||
sm: '-space-x-1',
|
||||
@@ -62,46 +52,55 @@ export function UserAvatarStack({
|
||||
}[size]
|
||||
|
||||
return (
|
||||
<div className={`flex items-center ${spacingClass} ${className}`}>
|
||||
{/* Connection status - always present */}
|
||||
{connectionStatusElement}
|
||||
<div className={`flex items-center gap-3 ${className}`}>
|
||||
{/* Connection status - always check, shows when offline */}
|
||||
<ConnectionStatus isConnected={isConnected} />
|
||||
|
||||
{/* Render visible user avatars */}
|
||||
{visibleUsers.map((user, index) => (
|
||||
<UserAvatar
|
||||
key={user.connectionId}
|
||||
connectionId={user.connectionId}
|
||||
name={user.name}
|
||||
color={user.color}
|
||||
size={size}
|
||||
index={index}
|
||||
tooltipContent={
|
||||
user.name ? (
|
||||
<div className='text-center'>
|
||||
<div className='font-medium'>{user.name}</div>
|
||||
{user.info && <div className='mt-1 text-muted-foreground text-xs'>{user.info}</div>}
|
||||
</div>
|
||||
) : null
|
||||
}
|
||||
/>
|
||||
))}
|
||||
{/* Only show avatar stack when there are multiple users (>1) */}
|
||||
{users.length > 1 && (
|
||||
<div className={`flex items-center ${spacingClass}`}>
|
||||
{/* Render visible user avatars */}
|
||||
{visibleUsers.map((user, index) => (
|
||||
<UserAvatar
|
||||
key={user.connectionId}
|
||||
connectionId={user.connectionId}
|
||||
name={user.name}
|
||||
color={user.color}
|
||||
size={size}
|
||||
index={index}
|
||||
tooltipContent={
|
||||
user.name ? (
|
||||
<div className='text-center'>
|
||||
<div className='font-medium'>{user.name}</div>
|
||||
{user.info && (
|
||||
<div className='mt-1 text-muted-foreground text-xs'>{user.info}</div>
|
||||
)}
|
||||
</div>
|
||||
) : null
|
||||
}
|
||||
/>
|
||||
))}
|
||||
|
||||
{/* Render overflow indicator if there are more users */}
|
||||
{overflowCount > 0 && (
|
||||
<UserAvatar
|
||||
connectionId='overflow-indicator' // Use a unique string identifier
|
||||
name={`+${overflowCount}`}
|
||||
size={size}
|
||||
index={visibleUsers.length}
|
||||
tooltipContent={
|
||||
<div className='text-center'>
|
||||
<div className='font-medium'>
|
||||
{overflowCount} more user{overflowCount > 1 ? 's' : ''}
|
||||
</div>
|
||||
<div className='mt-1 text-muted-foreground text-xs'>{users.length} total online</div>
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
{/* Render overflow indicator if there are more users */}
|
||||
{overflowCount > 0 && (
|
||||
<UserAvatar
|
||||
connectionId='overflow-indicator' // Use a unique string identifier
|
||||
name={`+${overflowCount}`}
|
||||
size={size}
|
||||
index={visibleUsers.length}
|
||||
tooltipContent={
|
||||
<div className='text-center'>
|
||||
<div className='font-medium'>
|
||||
{overflowCount} more user{overflowCount > 1 ? 's' : ''}
|
||||
</div>
|
||||
<div className='mt-1 text-muted-foreground text-xs'>
|
||||
{users.length} total online
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -458,7 +458,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
* Handle deleting the current workflow
|
||||
*/
|
||||
const handleDeleteWorkflow = () => {
|
||||
if (!activeWorkflowId || !userPermissions.canEdit) return
|
||||
if (!activeWorkflowId || !userPermissions.canAdmin) return
|
||||
|
||||
const sidebarWorkflows = getSidebarOrderedWorkflows()
|
||||
const currentIndex = sidebarWorkflows.findIndex((w) => w.id === activeWorkflowId)
|
||||
@@ -670,7 +670,11 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
</h2>
|
||||
</TooltipTrigger>
|
||||
{!canEdit && (
|
||||
<TooltipContent>Edit permissions required to rename workflows</TooltipContent>
|
||||
<TooltipContent>
|
||||
{userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Edit permissions required to rename workflows'}
|
||||
</TooltipContent>
|
||||
)}
|
||||
</Tooltip>
|
||||
)}
|
||||
@@ -691,12 +695,12 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
* Render delete workflow button with confirmation dialog
|
||||
*/
|
||||
const renderDeleteButton = () => {
|
||||
const canEdit = userPermissions.canEdit
|
||||
const canAdmin = userPermissions.canAdmin
|
||||
const hasMultipleWorkflows = Object.keys(workflows).length > 1
|
||||
const isDisabled = !canEdit || !hasMultipleWorkflows
|
||||
const isDisabled = !canAdmin || !hasMultipleWorkflows
|
||||
|
||||
const getTooltipText = () => {
|
||||
if (!canEdit) return 'Admin permission required to delete workflows'
|
||||
if (!canAdmin) return 'Admin permission required to delete workflows'
|
||||
if (!hasMultipleWorkflows) return 'Cannot delete the last workflow'
|
||||
return 'Delete Workflow'
|
||||
}
|
||||
@@ -934,7 +938,11 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
)}
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{canEdit ? 'Duplicate Workflow' : 'Admin permission required to duplicate workflows'}
|
||||
{canEdit
|
||||
? 'Duplicate Workflow'
|
||||
: userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Admin permission required to duplicate workflows'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)
|
||||
@@ -975,7 +983,9 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
</TooltipTrigger>
|
||||
<TooltipContent command='Shift+L'>
|
||||
{!userPermissions.canEdit
|
||||
? 'Admin permission required to use auto-layout'
|
||||
? userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Admin permission required to use auto-layout'
|
||||
: 'Auto Layout'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
@@ -5,6 +5,12 @@ import { ArrowUp } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import {
|
||||
extractBlockIdFromOutputId,
|
||||
extractPathFromOutputId,
|
||||
parseOutputContentSafely,
|
||||
} from '@/lib/response-format'
|
||||
import type { BlockLog, ExecutionResult } from '@/executor/types'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import { useChatStore } from '@/stores/panel/chat/store'
|
||||
@@ -14,6 +20,8 @@ import { useWorkflowExecution } from '../../../../hooks/use-workflow-execution'
|
||||
import { ChatMessage } from './components/chat-message/chat-message'
|
||||
import { OutputSelect } from './components/output-select/output-select'
|
||||
|
||||
const logger = createLogger('ChatPanel')
|
||||
|
||||
interface ChatProps {
|
||||
panelWidth: number
|
||||
chatMessage: string
|
||||
@@ -60,8 +68,8 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
const selected = selectedWorkflowOutputs[activeWorkflowId]
|
||||
|
||||
if (!selected || selected.length === 0) {
|
||||
const defaultSelection = outputEntries.length > 0 ? [outputEntries[0].id] : []
|
||||
return defaultSelection
|
||||
// Return empty array when nothing is explicitly selected
|
||||
return []
|
||||
}
|
||||
|
||||
// Ensure we have no duplicates in the selection
|
||||
@@ -74,7 +82,7 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
}
|
||||
|
||||
return selected
|
||||
}, [selectedWorkflowOutputs, activeWorkflowId, outputEntries, setSelectedWorkflowOutput])
|
||||
}, [selectedWorkflowOutputs, activeWorkflowId, setSelectedWorkflowOutput])
|
||||
|
||||
// Auto-scroll to bottom when new messages are added
|
||||
useEffect(() => {
|
||||
@@ -140,18 +148,23 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
result.logs?.filter((log) => !messageIdMap.has(log.blockId)) || []
|
||||
|
||||
if (nonStreamingLogs.length > 0) {
|
||||
const outputsToRender = selectedOutputs.filter((outputId) =>
|
||||
nonStreamingLogs.some((log) => log.blockId === outputId.split('.')[0])
|
||||
)
|
||||
const outputsToRender = selectedOutputs.filter((outputId) => {
|
||||
const blockIdForOutput = extractBlockIdFromOutputId(outputId)
|
||||
return nonStreamingLogs.some((log) => log.blockId === blockIdForOutput)
|
||||
})
|
||||
|
||||
for (const outputId of outputsToRender) {
|
||||
const blockIdForOutput = outputId.split('.')[0]
|
||||
const path = outputId.substring(blockIdForOutput.length + 1)
|
||||
const blockIdForOutput = extractBlockIdFromOutputId(outputId)
|
||||
const path = extractPathFromOutputId(outputId, blockIdForOutput)
|
||||
const log = nonStreamingLogs.find((l) => l.blockId === blockIdForOutput)
|
||||
|
||||
if (log) {
|
||||
let outputValue: any = log.output
|
||||
|
||||
if (path) {
|
||||
// Parse JSON content safely
|
||||
outputValue = parseOutputContentSafely(outputValue)
|
||||
|
||||
const pathParts = path.split('.')
|
||||
for (const part of pathParts) {
|
||||
if (
|
||||
@@ -203,42 +216,41 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error parsing stream data:', e)
|
||||
logger.error('Error parsing stream data:', e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
processStream().catch((e) => console.error('Error processing stream:', e))
|
||||
processStream().catch((e) => logger.error('Error processing stream:', e))
|
||||
} else if (result && 'success' in result && result.success && 'logs' in result) {
|
||||
const finalOutputs: any[] = []
|
||||
|
||||
if (selectedOutputs && selectedOutputs.length > 0) {
|
||||
if (selectedOutputs?.length > 0) {
|
||||
for (const outputId of selectedOutputs) {
|
||||
// Find the log that corresponds to the start of the outputId
|
||||
const log = result.logs?.find(
|
||||
(l: BlockLog) => l.blockId === outputId || outputId.startsWith(`${l.blockId}_`)
|
||||
)
|
||||
const blockIdForOutput = extractBlockIdFromOutputId(outputId)
|
||||
const path = extractPathFromOutputId(outputId, blockIdForOutput)
|
||||
const log = result.logs?.find((l: BlockLog) => l.blockId === blockIdForOutput)
|
||||
|
||||
if (log) {
|
||||
let output = log.output
|
||||
// Check if there is a path to traverse
|
||||
if (outputId.length > log.blockId.length) {
|
||||
const path = outputId.substring(log.blockId.length + 1)
|
||||
if (path) {
|
||||
const pathParts = path.split('.')
|
||||
let current = output
|
||||
for (const part of pathParts) {
|
||||
if (current && typeof current === 'object' && part in current) {
|
||||
current = current[part]
|
||||
} else {
|
||||
current = undefined
|
||||
break
|
||||
}
|
||||
|
||||
if (path) {
|
||||
// Parse JSON content safely
|
||||
output = parseOutputContentSafely(output)
|
||||
|
||||
const pathParts = path.split('.')
|
||||
let current = output
|
||||
for (const part of pathParts) {
|
||||
if (current && typeof current === 'object' && part in current) {
|
||||
current = current[part]
|
||||
} else {
|
||||
current = undefined
|
||||
break
|
||||
}
|
||||
output = current
|
||||
}
|
||||
output = current
|
||||
}
|
||||
if (output !== undefined) {
|
||||
finalOutputs.push(output)
|
||||
@@ -247,10 +259,8 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
}
|
||||
}
|
||||
|
||||
// If no specific outputs could be resolved, fall back to the final workflow output
|
||||
if (finalOutputs.length === 0 && result.output) {
|
||||
finalOutputs.push(result.output)
|
||||
}
|
||||
// Only show outputs if something was explicitly selected
|
||||
// If no outputs are selected, don't show anything
|
||||
|
||||
// Add a new message for each resolved output
|
||||
finalOutputs.forEach((output) => {
|
||||
@@ -258,19 +268,8 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
if (typeof output === 'string') {
|
||||
content = output
|
||||
} else if (output && typeof output === 'object') {
|
||||
// Handle cases where output is { response: ... }
|
||||
const outputObj = output as Record<string, any>
|
||||
const response = outputObj.response
|
||||
if (response) {
|
||||
if (typeof response.content === 'string') {
|
||||
content = response.content
|
||||
} else {
|
||||
// Pretty print for better readability
|
||||
content = `\`\`\`json\n${JSON.stringify(response, null, 2)}\n\`\`\``
|
||||
}
|
||||
} else {
|
||||
content = `\`\`\`json\n${JSON.stringify(output, null, 2)}\n\`\`\``
|
||||
}
|
||||
// For structured responses, pretty print the JSON
|
||||
content = `\`\`\`json\n${JSON.stringify(output, null, 2)}\n\`\`\``
|
||||
}
|
||||
|
||||
if (content) {
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { Check, ChevronDown } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { extractFieldsFromSchema, parseResponseFormatSafely } from '@/lib/response-format'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
interface OutputSelectProps {
|
||||
@@ -48,18 +50,69 @@ export function OutputSelect({
|
||||
? block.name.replace(/\s+/g, '').toLowerCase()
|
||||
: `block-${block.id}`
|
||||
|
||||
// Check for custom response format first
|
||||
const responseFormatValue = useSubBlockStore.getState().getValue(block.id, 'responseFormat')
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, block.id)
|
||||
|
||||
let outputsToProcess: Record<string, any> = {}
|
||||
|
||||
if (responseFormat) {
|
||||
// Use custom schema properties if response format is specified
|
||||
const schemaFields = extractFieldsFromSchema(responseFormat)
|
||||
if (schemaFields.length > 0) {
|
||||
// Convert schema fields to output structure
|
||||
schemaFields.forEach((field) => {
|
||||
outputsToProcess[field.name] = { type: field.type }
|
||||
})
|
||||
} else {
|
||||
// Fallback to default outputs if schema extraction failed
|
||||
outputsToProcess = block.outputs || {}
|
||||
}
|
||||
} else {
|
||||
// Use default block outputs
|
||||
outputsToProcess = block.outputs || {}
|
||||
}
|
||||
|
||||
// Add response outputs
|
||||
if (block.outputs && typeof block.outputs === 'object') {
|
||||
if (Object.keys(outputsToProcess).length > 0) {
|
||||
const addOutput = (path: string, outputObj: any, prefix = '') => {
|
||||
const fullPath = prefix ? `${prefix}.${path}` : path
|
||||
|
||||
if (typeof outputObj === 'object' && outputObj !== null) {
|
||||
// For objects, recursively add each property
|
||||
// If not an object or is null, treat as leaf node
|
||||
if (typeof outputObj !== 'object' || outputObj === null) {
|
||||
const output = {
|
||||
id: `${block.id}_${fullPath}`,
|
||||
label: `${blockName}.${fullPath}`,
|
||||
blockId: block.id,
|
||||
blockName: block.name || `Block ${block.id}`,
|
||||
blockType: block.type,
|
||||
path: fullPath,
|
||||
}
|
||||
outputs.push(output)
|
||||
return
|
||||
}
|
||||
|
||||
// If has 'type' property, treat as schema definition (leaf node)
|
||||
if ('type' in outputObj && typeof outputObj.type === 'string') {
|
||||
const output = {
|
||||
id: `${block.id}_${fullPath}`,
|
||||
label: `${blockName}.${fullPath}`,
|
||||
blockId: block.id,
|
||||
blockName: block.name || `Block ${block.id}`,
|
||||
blockType: block.type,
|
||||
path: fullPath,
|
||||
}
|
||||
outputs.push(output)
|
||||
return
|
||||
}
|
||||
|
||||
// For objects without type, recursively add each property
|
||||
if (!Array.isArray(outputObj)) {
|
||||
Object.entries(outputObj).forEach(([key, value]) => {
|
||||
addOutput(key, value, fullPath)
|
||||
})
|
||||
} else {
|
||||
// Add leaf node as output option
|
||||
// For arrays, treat as leaf node
|
||||
outputs.push({
|
||||
id: `${block.id}_${fullPath}`,
|
||||
label: `${blockName}.${fullPath}`,
|
||||
@@ -71,10 +124,10 @@ export function OutputSelect({
|
||||
}
|
||||
}
|
||||
|
||||
// Start with the response object
|
||||
if (block.outputs.response) {
|
||||
addOutput('response', block.outputs.response)
|
||||
}
|
||||
// Process all output properties directly (flattened structure)
|
||||
Object.entries(outputsToProcess).forEach(([key, value]) => {
|
||||
addOutput(key, value)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -125,35 +125,33 @@ export function ConsoleEntry({ entry, consoleWidth }: ConsoleEntryProps) {
|
||||
<div className='flex items-start gap-2'>
|
||||
<Terminal className='mt-1 h-4 w-4 text-muted-foreground' />
|
||||
<div className='overflow-wrap-anywhere relative flex-1 whitespace-normal break-normal font-mono text-sm'>
|
||||
{typeof entry.output === 'object' &&
|
||||
entry.output !== null &&
|
||||
hasNestedStructure(entry.output) && (
|
||||
<div className='absolute top-0 right-0 z-10'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='h-6 px-2 text-muted-foreground hover:text-foreground'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
setExpandAllJson(!expandAllJson)
|
||||
}}
|
||||
>
|
||||
<span className='flex items-center'>
|
||||
{expandAllJson ? (
|
||||
<>
|
||||
<ChevronUp className='mr-1 h-3 w-3' />
|
||||
<span className='text-xs'>Collapse</span>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<ChevronDown className='mr-1 h-3 w-3' />
|
||||
<span className='text-xs'>Expand</span>
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
{entry.output != null && (
|
||||
<div className='absolute top-0 right-0 z-10'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='h-6 px-2 text-muted-foreground hover:text-foreground'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
setExpandAllJson(!expandAllJson)
|
||||
}}
|
||||
>
|
||||
<span className='flex items-center'>
|
||||
{expandAllJson ? (
|
||||
<>
|
||||
<ChevronUp className='mr-1 h-3 w-3' />
|
||||
<span className='text-xs'>Collapse</span>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<ChevronDown className='mr-1 h-3 w-3' />
|
||||
<span className='text-xs'>Expand</span>
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
<JSONView data={entry.output} initiallyExpanded={expandAllJson} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useCallback } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
|
||||
export type ToolbarBlockProps = {
|
||||
@@ -9,6 +10,8 @@ export type ToolbarBlockProps = {
|
||||
}
|
||||
|
||||
export function ToolbarBlock({ config, disabled = false }: ToolbarBlockProps) {
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const handleDragStart = (e: React.DragEvent) => {
|
||||
if (disabled) {
|
||||
e.preventDefault()
|
||||
@@ -66,7 +69,11 @@ export function ToolbarBlock({ config, disabled = false }: ToolbarBlockProps) {
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>{blockContent}</TooltipTrigger>
|
||||
<TooltipContent>Edit permissions required to add blocks</TooltipContent>
|
||||
<TooltipContent>
|
||||
{userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Edit permissions required to add blocks'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useCallback } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
|
||||
import { LoopTool } from '../../../loop-node/loop-config'
|
||||
|
||||
type LoopToolbarItemProps = {
|
||||
@@ -9,6 +10,8 @@ type LoopToolbarItemProps = {
|
||||
|
||||
// Custom component for the Loop Tool
|
||||
export default function LoopToolbarItem({ disabled = false }: LoopToolbarItemProps) {
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const handleDragStart = (e: React.DragEvent) => {
|
||||
if (disabled) {
|
||||
e.preventDefault()
|
||||
@@ -74,7 +77,11 @@ export default function LoopToolbarItem({ disabled = false }: LoopToolbarItemPro
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>{blockContent}</TooltipTrigger>
|
||||
<TooltipContent>Edit permissions required to add blocks</TooltipContent>
|
||||
<TooltipContent>
|
||||
{userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Edit permissions required to add blocks'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useCallback } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
|
||||
import { ParallelTool } from '../../../parallel-node/parallel-config'
|
||||
|
||||
type ParallelToolbarItemProps = {
|
||||
@@ -9,6 +10,7 @@ type ParallelToolbarItemProps = {
|
||||
|
||||
// Custom component for the Parallel Tool
|
||||
export default function ParallelToolbarItem({ disabled = false }: ParallelToolbarItemProps) {
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const handleDragStart = (e: React.DragEvent) => {
|
||||
if (disabled) {
|
||||
e.preventDefault()
|
||||
@@ -75,7 +77,11 @@ export default function ParallelToolbarItem({ disabled = false }: ParallelToolba
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>{blockContent}</TooltipTrigger>
|
||||
<TooltipContent>Edit permissions required to add blocks</TooltipContent>
|
||||
<TooltipContent>
|
||||
{userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Edit permissions required to add blocks'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -145,11 +145,13 @@ export const Toolbar = React.memo(() => {
|
||||
{blocks.map((block) => (
|
||||
<ToolbarBlock key={block.type} config={block} disabled={!userPermissions.canEdit} />
|
||||
))}
|
||||
{activeTab === 'blocks' && !searchQuery && (
|
||||
<>
|
||||
<LoopToolbarItem disabled={!userPermissions.canEdit} />
|
||||
<ParallelToolbarItem disabled={!userPermissions.canEdit} />
|
||||
</>
|
||||
{((activeTab === 'blocks' && !searchQuery) ||
|
||||
(searchQuery && 'loop'.includes(searchQuery.toLowerCase()))) && (
|
||||
<LoopToolbarItem disabled={!userPermissions.canEdit} />
|
||||
)}
|
||||
{((activeTab === 'blocks' && !searchQuery) ||
|
||||
(searchQuery && 'parallel'.includes(searchQuery.toLowerCase()))) && (
|
||||
<ParallelToolbarItem disabled={!userPermissions.canEdit} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -2,6 +2,7 @@ import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, Copy, Trash2 } from 'lu
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
@@ -22,9 +23,17 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
|
||||
const horizontalHandles = useWorkflowStore(
|
||||
(state) => state.blocks[blockId]?.horizontalHandles ?? false
|
||||
)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const isStarterBlock = blockType === 'starter'
|
||||
|
||||
const getTooltipMessage = (defaultMessage: string) => {
|
||||
if (disabled) {
|
||||
return userPermissions.isOfflineMode ? 'Connection lost - please refresh' : 'Read-only mode'
|
||||
}
|
||||
return defaultMessage
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
@@ -68,7 +77,7 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='right'>
|
||||
{disabled ? 'Read-only mode' : isEnabled ? 'Disable Block' : 'Enable Block'}
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
@@ -89,9 +98,7 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
|
||||
<Copy className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='right'>
|
||||
{disabled ? 'Read-only mode' : 'Duplicate Block'}
|
||||
</TooltipContent>
|
||||
<TooltipContent side='right'>{getTooltipMessage('Duplicate Block')}</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
@@ -116,7 +123,7 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='right'>
|
||||
{disabled ? 'Read-only mode' : horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports'}
|
||||
{getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
@@ -140,9 +147,7 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
|
||||
<Trash2 className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='right'>
|
||||
{disabled ? 'Read-only mode' : 'Delete Block'}
|
||||
</TooltipContent>
|
||||
<TooltipContent side='right'>{getTooltipMessage('Delete Block')}</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { Card } from '@/components/ui/card'
|
||||
import { cn } from '@/lib/utils'
|
||||
import {
|
||||
type ConnectedBlock,
|
||||
useBlockConnections,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-block-connections'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { getBlock } from '@/blocks'
|
||||
|
||||
interface ConnectionBlocksProps {
|
||||
blockId: string
|
||||
horizontalHandles: boolean
|
||||
setIsConnecting: (isConnecting: boolean) => void
|
||||
isDisabled?: boolean
|
||||
}
|
||||
@@ -20,6 +22,7 @@ interface ResponseField {
|
||||
|
||||
export function ConnectionBlocks({
|
||||
blockId,
|
||||
horizontalHandles,
|
||||
setIsConnecting,
|
||||
isDisabled = false,
|
||||
}: ConnectionBlocksProps) {
|
||||
@@ -39,6 +42,10 @@ export function ConnectionBlocks({
|
||||
|
||||
e.stopPropagation() // Prevent parent drag handlers from firing
|
||||
setIsConnecting(true)
|
||||
|
||||
// If no specific field is provided, use all available output types
|
||||
const outputType = field ? field.name : connection.outputType
|
||||
|
||||
e.dataTransfer.setData(
|
||||
'application/json',
|
||||
JSON.stringify({
|
||||
@@ -46,9 +53,13 @@ export function ConnectionBlocks({
|
||||
connectionData: {
|
||||
id: connection.id,
|
||||
name: connection.name,
|
||||
outputType: field ? field.name : connection.outputType,
|
||||
outputType: outputType,
|
||||
sourceBlockId: connection.id,
|
||||
fieldType: field?.type,
|
||||
// Include all available output types for reference
|
||||
allOutputTypes: Array.isArray(connection.outputType)
|
||||
? connection.outputType
|
||||
: [connection.outputType],
|
||||
},
|
||||
})
|
||||
)
|
||||
@@ -59,147 +70,71 @@ export function ConnectionBlocks({
|
||||
setIsConnecting(false)
|
||||
}
|
||||
|
||||
// Helper function to extract fields from JSON Schema
|
||||
const extractFieldsFromSchema = (connection: ConnectedBlock): ResponseField[] => {
|
||||
// Handle legacy format with fields array
|
||||
if (connection.responseFormat?.fields) {
|
||||
return connection.responseFormat.fields
|
||||
}
|
||||
|
||||
// Handle new JSON Schema format
|
||||
const schema = connection.responseFormat?.schema || connection.responseFormat
|
||||
// Safely check if schema and properties exist
|
||||
if (
|
||||
!schema ||
|
||||
typeof schema !== 'object' ||
|
||||
!('properties' in schema) ||
|
||||
typeof schema.properties !== 'object'
|
||||
) {
|
||||
return []
|
||||
}
|
||||
return Object.entries(schema.properties).map(([name, prop]: [string, any]) => ({
|
||||
name,
|
||||
type: Array.isArray(prop) ? 'array' : prop.type || 'string',
|
||||
description: prop.description,
|
||||
}))
|
||||
}
|
||||
|
||||
// Extract fields from starter block input format
|
||||
const extractFieldsFromStarterInput = (connection: ConnectedBlock): ResponseField[] => {
|
||||
// Only process for starter blocks
|
||||
if (connection.type !== 'starter') return []
|
||||
|
||||
try {
|
||||
// Get input format from subblock store
|
||||
const inputFormat = useSubBlockStore.getState().getValue(connection.id, 'inputFormat')
|
||||
|
||||
// Make sure we have a valid input format
|
||||
if (!inputFormat || !Array.isArray(inputFormat) || inputFormat.length === 0) {
|
||||
return [{ name: 'input', type: 'any' }]
|
||||
}
|
||||
|
||||
// Check if any fields have been configured with names
|
||||
const hasConfiguredFields = inputFormat.some(
|
||||
(field: any) => field.name && field.name.trim() !== ''
|
||||
)
|
||||
|
||||
// If no fields have been configured, return the default input field
|
||||
if (!hasConfiguredFields) {
|
||||
return [{ name: 'input', type: 'any' }]
|
||||
}
|
||||
|
||||
// Map input fields to response fields
|
||||
return inputFormat.map((field: any) => ({
|
||||
name: `input.${field.name}`,
|
||||
type: field.type || 'string',
|
||||
description: field.description,
|
||||
}))
|
||||
} catch (e) {
|
||||
console.error('Error extracting fields from starter input format:', e)
|
||||
return [{ name: 'input', type: 'any' }]
|
||||
}
|
||||
}
|
||||
|
||||
// Deduplicate connections by ID
|
||||
const connectionMap = incomingConnections.reduce(
|
||||
(acc, connection) => {
|
||||
acc[connection.id] = connection
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, ConnectedBlock>
|
||||
)
|
||||
|
||||
// Sort connections by name
|
||||
const sortedConnections = Object.values(connectionMap).sort((a, b) =>
|
||||
a.name.localeCompare(b.name)
|
||||
)
|
||||
// Use connections in distance order (already sorted and deduplicated by the hook)
|
||||
const sortedConnections = incomingConnections
|
||||
|
||||
// Helper function to render a connection card
|
||||
const renderConnectionCard = (connection: ConnectedBlock, field?: ResponseField) => {
|
||||
const displayName = connection.name.replace(/\s+/g, '').toLowerCase()
|
||||
const renderConnectionCard = (connection: ConnectedBlock) => {
|
||||
// Get block configuration for icon and color
|
||||
const blockConfig = getBlock(connection.type)
|
||||
const displayName = connection.name // Use the actual block name instead of transforming it
|
||||
|
||||
// Handle special blocks that aren't in the registry (loop and parallel)
|
||||
let Icon = blockConfig?.icon
|
||||
let bgColor = blockConfig?.bgColor || '#6B7280' // Fallback to gray
|
||||
|
||||
if (!blockConfig) {
|
||||
if (connection.type === 'loop') {
|
||||
Icon = RepeatIcon as typeof Icon
|
||||
bgColor = '#2FB3FF' // Blue color for loop blocks
|
||||
} else if (connection.type === 'parallel') {
|
||||
Icon = SplitIcon as typeof Icon
|
||||
bgColor = '#FEE12B' // Yellow color for parallel blocks
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Card
|
||||
key={`${field ? field.name : connection.id}`}
|
||||
key={`${connection.id}-${connection.name}`}
|
||||
draggable={!isDisabled}
|
||||
onDragStart={(e) => handleDragStart(e, connection, field)}
|
||||
onDragStart={(e) => handleDragStart(e, connection)}
|
||||
onDragEnd={handleDragEnd}
|
||||
className={cn(
|
||||
'group flex w-max items-center rounded-lg border bg-card p-2 shadow-sm transition-colors',
|
||||
'group flex w-max items-center gap-2 rounded-lg border bg-card p-2 shadow-sm transition-colors',
|
||||
!isDisabled
|
||||
? 'cursor-grab hover:bg-accent/50 active:cursor-grabbing'
|
||||
: 'cursor-not-allowed opacity-60'
|
||||
)}
|
||||
>
|
||||
{/* Block icon with color */}
|
||||
{Icon && (
|
||||
<div
|
||||
className='flex h-5 w-5 flex-shrink-0 items-center justify-center rounded'
|
||||
style={{ backgroundColor: bgColor }}
|
||||
>
|
||||
<Icon className='h-3 w-3 text-white' />
|
||||
</div>
|
||||
)}
|
||||
<div className='text-sm'>
|
||||
<span className='font-medium leading-none'>{displayName}</span>
|
||||
<span className='text-muted-foreground'>
|
||||
{field
|
||||
? `.${field.name}`
|
||||
: typeof connection.outputType === 'string'
|
||||
? `.${connection.outputType}`
|
||||
: ''}
|
||||
</span>
|
||||
</div>
|
||||
</Card>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='absolute top-0 right-full flex max-h-[400px] flex-col items-end space-y-2 overflow-y-auto pr-5'>
|
||||
{sortedConnections.map((connection, index) => {
|
||||
// Special handling for starter blocks with input format
|
||||
if (connection.type === 'starter') {
|
||||
const starterFields = extractFieldsFromStarterInput(connection)
|
||||
// Generate all connection cards - one per block, not per output field
|
||||
const connectionCards: React.ReactNode[] = []
|
||||
|
||||
if (starterFields.length > 0) {
|
||||
return (
|
||||
<div key={connection.id} className='space-y-2'>
|
||||
{starterFields.map((field) => renderConnectionCard(connection, field))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
sortedConnections.forEach((connection) => {
|
||||
connectionCards.push(renderConnectionCard(connection))
|
||||
})
|
||||
|
||||
// Regular connection handling
|
||||
return (
|
||||
<div key={`${connection.id}-${index}`} className='space-y-2'>
|
||||
{Array.isArray(connection.outputType)
|
||||
? // Handle array of field names
|
||||
connection.outputType.map((fieldName) => {
|
||||
// Try to find field in response format
|
||||
const fields = extractFieldsFromSchema(connection)
|
||||
const field = fields.find((f) => f.name === fieldName) || {
|
||||
name: fieldName,
|
||||
type: 'string',
|
||||
}
|
||||
// Position and layout based on handle orientation - reverse of ports
|
||||
// When ports are horizontal: connection blocks on top, aligned to left, closest blocks on bottom row
|
||||
// When ports are vertical (default): connection blocks on left, stack vertically, aligned to right
|
||||
const containerClasses = horizontalHandles
|
||||
? 'absolute bottom-full left-0 flex max-w-[600px] flex-wrap-reverse gap-2 pb-3'
|
||||
: 'absolute top-0 right-full flex max-h-[400px] max-w-[200px] flex-col items-end gap-2 overflow-y-auto pr-3'
|
||||
|
||||
return renderConnectionCard(connection, field)
|
||||
})
|
||||
: renderConnectionCard(connection)}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
return <div className={containerClasses}>{connectionCards}</div>
|
||||
}
|
||||
|
||||
@@ -73,8 +73,6 @@ export function Code({
|
||||
}
|
||||
}, [generationType])
|
||||
|
||||
// State management
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
|
||||
const [code, setCode] = useState<string>('')
|
||||
const [_lineCount, setLineCount] = useState(1)
|
||||
const [showTags, setShowTags] = useState(false)
|
||||
@@ -98,34 +96,13 @@ export function Code({
|
||||
const toggleCollapsed = () => {
|
||||
setCollapsedValue(blockId, collapsedStateKey, !isCollapsed)
|
||||
}
|
||||
// Use preview value when in preview mode, otherwise use store value or prop value
|
||||
const value = isPreview ? previewValue : propValue !== undefined ? propValue : storeValue
|
||||
|
||||
// Create refs to hold the handlers
|
||||
const handleStreamStartRef = useRef<() => void>(() => {})
|
||||
const handleGeneratedContentRef = useRef<(generatedCode: string) => void>(() => {})
|
||||
const handleStreamChunkRef = useRef<(chunk: string) => void>(() => {})
|
||||
|
||||
// AI Code Generation Hook
|
||||
const handleStreamStart = () => {
|
||||
setCode('')
|
||||
// Optionally clear the store value too, though handleStreamChunk will update it
|
||||
// setStoreValue('')
|
||||
}
|
||||
|
||||
const handleGeneratedContent = (generatedCode: string) => {
|
||||
setCode(generatedCode)
|
||||
if (!isPreview && !disabled) {
|
||||
setStoreValue(generatedCode)
|
||||
}
|
||||
}
|
||||
|
||||
// Handle streaming chunks directly into the editor
|
||||
const handleStreamChunk = (chunk: string) => {
|
||||
setCode((currentCode) => {
|
||||
const newCode = currentCode + chunk
|
||||
if (!isPreview && !disabled) {
|
||||
setStoreValue(newCode)
|
||||
}
|
||||
return newCode
|
||||
})
|
||||
}
|
||||
|
||||
const {
|
||||
isLoading: isAiLoading,
|
||||
isStreaming: isAiStreaming,
|
||||
@@ -140,11 +117,48 @@ export function Code({
|
||||
} = useCodeGeneration({
|
||||
generationType: generationType,
|
||||
initialContext: code,
|
||||
onGeneratedContent: handleGeneratedContent,
|
||||
onStreamChunk: handleStreamChunk,
|
||||
onStreamStart: handleStreamStart,
|
||||
onGeneratedContent: (content: string) => handleGeneratedContentRef.current?.(content),
|
||||
onStreamChunk: (chunk: string) => handleStreamChunkRef.current?.(chunk),
|
||||
onStreamStart: () => handleStreamStartRef.current?.(),
|
||||
})
|
||||
|
||||
// State management - useSubBlockValue with explicit streaming control
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId, false, {
|
||||
debounceMs: 150,
|
||||
isStreaming: isAiStreaming, // Use AI streaming state directly
|
||||
onStreamingEnd: () => {
|
||||
logger.debug('AI streaming ended, value persisted', { blockId, subBlockId })
|
||||
},
|
||||
})
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value or prop value
|
||||
const value = isPreview ? previewValue : propValue !== undefined ? propValue : storeValue
|
||||
|
||||
// Define the handlers now that we have access to setStoreValue
|
||||
handleStreamStartRef.current = () => {
|
||||
setCode('')
|
||||
// Streaming state is now controlled by isAiStreaming
|
||||
}
|
||||
|
||||
handleGeneratedContentRef.current = (generatedCode: string) => {
|
||||
setCode(generatedCode)
|
||||
if (!isPreview && !disabled) {
|
||||
setStoreValue(generatedCode)
|
||||
// Final value will be persisted when isAiStreaming becomes false
|
||||
}
|
||||
}
|
||||
|
||||
handleStreamChunkRef.current = (chunk: string) => {
|
||||
setCode((currentCode) => {
|
||||
const newCode = currentCode + chunk
|
||||
if (!isPreview && !disabled) {
|
||||
// Update the value - it won't be persisted until streaming ends
|
||||
setStoreValue(newCode)
|
||||
}
|
||||
return newCode
|
||||
})
|
||||
}
|
||||
|
||||
// Effects
|
||||
useEffect(() => {
|
||||
const valueString = value?.toString() ?? ''
|
||||
|
||||
@@ -19,7 +19,6 @@ import {
|
||||
type OAuthProvider,
|
||||
parseProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { saveToStorage } from '@/stores/workflows/persistence'
|
||||
|
||||
const logger = createLogger('OAuthRequiredModal')
|
||||
|
||||
@@ -157,42 +156,11 @@ export function OAuthRequiredModal({
|
||||
(scope) => !scope.includes('userinfo.email') && !scope.includes('userinfo.profile')
|
||||
)
|
||||
|
||||
const handleRedirectToSettings = () => {
|
||||
try {
|
||||
// Determine the appropriate serviceId and providerId
|
||||
const providerId = getProviderIdFromServiceId(effectiveServiceId)
|
||||
|
||||
// Store information about the required connection
|
||||
saveToStorage<string>('pending_service_id', effectiveServiceId)
|
||||
saveToStorage<string[]>('pending_oauth_scopes', requiredScopes)
|
||||
saveToStorage<string>('pending_oauth_return_url', window.location.href)
|
||||
saveToStorage<string>('pending_oauth_provider_id', providerId)
|
||||
saveToStorage<boolean>('from_oauth_modal', true)
|
||||
|
||||
// Close the modal
|
||||
onClose()
|
||||
|
||||
// Open the settings modal with the credentials tab
|
||||
const event = new CustomEvent('open-settings', {
|
||||
detail: { tab: 'credentials' },
|
||||
})
|
||||
window.dispatchEvent(event)
|
||||
} catch (error) {
|
||||
logger.error('Error redirecting to settings:', { error })
|
||||
}
|
||||
}
|
||||
|
||||
const handleConnectDirectly = async () => {
|
||||
try {
|
||||
// Determine the appropriate serviceId and providerId
|
||||
const providerId = getProviderIdFromServiceId(effectiveServiceId)
|
||||
|
||||
// Store information about the required connection
|
||||
saveToStorage<string>('pending_service_id', effectiveServiceId)
|
||||
saveToStorage<string[]>('pending_oauth_scopes', requiredScopes)
|
||||
saveToStorage<string>('pending_oauth_return_url', window.location.href)
|
||||
saveToStorage<string>('pending_oauth_provider_id', providerId)
|
||||
|
||||
// Close the modal
|
||||
onClose()
|
||||
|
||||
@@ -258,14 +226,6 @@ export function OAuthRequiredModal({
|
||||
<Button type='button' onClick={handleConnectDirectly} className='sm:order-3'>
|
||||
Connect Now
|
||||
</Button>
|
||||
<Button
|
||||
type='button'
|
||||
variant='secondary'
|
||||
onClick={handleRedirectToSettings}
|
||||
className='sm:order-2'
|
||||
>
|
||||
Go to Settings
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
@@ -21,31 +21,24 @@ import {
|
||||
type OAuthProvider,
|
||||
parseProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { saveToStorage } from '@/stores/workflows/persistence'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
import { OAuthRequiredModal } from './components/oauth-required-modal'
|
||||
|
||||
const logger = createLogger('CredentialSelector')
|
||||
|
||||
interface CredentialSelectorProps {
|
||||
value: string
|
||||
onChange: (value: string) => void
|
||||
provider: OAuthProvider
|
||||
requiredScopes?: string[]
|
||||
label?: string
|
||||
blockId: string
|
||||
subBlock: SubBlockConfig
|
||||
disabled?: boolean
|
||||
serviceId?: string
|
||||
isPreview?: boolean
|
||||
previewValue?: any | null
|
||||
}
|
||||
|
||||
export function CredentialSelector({
|
||||
value,
|
||||
onChange,
|
||||
provider,
|
||||
requiredScopes = [],
|
||||
label = 'Select credential',
|
||||
blockId,
|
||||
subBlock,
|
||||
disabled = false,
|
||||
serviceId,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: CredentialSelectorProps) {
|
||||
@@ -55,14 +48,22 @@ export function CredentialSelector({
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
const [selectedId, setSelectedId] = useState('')
|
||||
|
||||
// Use collaborative state management via useSubBlockValue hook
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
|
||||
// Extract values from subBlock config
|
||||
const provider = subBlock.provider as OAuthProvider
|
||||
const requiredScopes = subBlock.requiredScopes || []
|
||||
const label = subBlock.placeholder || 'Select credential'
|
||||
const serviceId = subBlock.serviceId
|
||||
|
||||
// Get the effective value (preview or store value)
|
||||
const effectiveValue = isPreview && previewValue !== undefined ? previewValue : storeValue
|
||||
|
||||
// Initialize selectedId with the effective value
|
||||
useEffect(() => {
|
||||
if (isPreview && previewValue !== undefined) {
|
||||
setSelectedId(previewValue || '')
|
||||
} else {
|
||||
setSelectedId(value)
|
||||
}
|
||||
}, [value, isPreview, previewValue])
|
||||
setSelectedId(effectiveValue || '')
|
||||
}, [effectiveValue])
|
||||
|
||||
// Derive service and provider IDs using useMemo
|
||||
const effectiveServiceId = useMemo(() => {
|
||||
@@ -85,7 +86,9 @@ export function CredentialSelector({
|
||||
// If we have a value but it's not in the credentials, reset it
|
||||
if (selectedId && !data.credentials.some((cred: Credential) => cred.id === selectedId)) {
|
||||
setSelectedId('')
|
||||
onChange('')
|
||||
if (!isPreview) {
|
||||
setStoreValue('')
|
||||
}
|
||||
}
|
||||
|
||||
// Auto-select logic:
|
||||
@@ -99,11 +102,15 @@ export function CredentialSelector({
|
||||
const defaultCred = data.credentials.find((cred: Credential) => cred.isDefault)
|
||||
if (defaultCred) {
|
||||
setSelectedId(defaultCred.id)
|
||||
onChange(defaultCred.id)
|
||||
if (!isPreview) {
|
||||
setStoreValue(defaultCred.id)
|
||||
}
|
||||
} else if (data.credentials.length === 1) {
|
||||
// If only one credential, select it
|
||||
setSelectedId(data.credentials[0].id)
|
||||
onChange(data.credentials[0].id)
|
||||
if (!isPreview) {
|
||||
setStoreValue(data.credentials[0].id)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -112,7 +119,7 @@ export function CredentialSelector({
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [effectiveProviderId, onChange, selectedId])
|
||||
}, [effectiveProviderId, selectedId, isPreview, setStoreValue])
|
||||
|
||||
// Fetch credentials on initial mount
|
||||
useEffect(() => {
|
||||
@@ -121,11 +128,7 @@ export function CredentialSelector({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
|
||||
// Update local state when external value changes
|
||||
useEffect(() => {
|
||||
const currentValue = isPreview ? previewValue : value
|
||||
setSelectedId(currentValue || '')
|
||||
}, [value, isPreview, previewValue])
|
||||
// This effect is no longer needed since we're using effectiveValue directly
|
||||
|
||||
// Listen for visibility changes to update credentials when user returns from settings
|
||||
useEffect(() => {
|
||||
@@ -158,19 +161,13 @@ export function CredentialSelector({
|
||||
const handleSelect = (credentialId: string) => {
|
||||
setSelectedId(credentialId)
|
||||
if (!isPreview) {
|
||||
onChange(credentialId)
|
||||
setStoreValue(credentialId)
|
||||
}
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
// Handle adding a new credential
|
||||
const handleAddCredential = () => {
|
||||
// Store information about the required connection
|
||||
saveToStorage<string>('pending_service_id', effectiveServiceId)
|
||||
saveToStorage<string[]>('pending_oauth_scopes', requiredScopes)
|
||||
saveToStorage<string>('pending_oauth_return_url', window.location.href)
|
||||
saveToStorage<string>('pending_oauth_provider_id', effectiveProviderId)
|
||||
|
||||
// Show the OAuth modal
|
||||
setShowOAuthModal(true)
|
||||
setOpen(false)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { Check, ChevronDown, FileText } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import {
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
} from '@/components/ui/command'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
|
||||
interface DocumentData {
|
||||
@@ -51,19 +50,16 @@ export function DocumentSelector({
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: DocumentSelectorProps) {
|
||||
const { getValue } = useSubBlockStore()
|
||||
|
||||
const [documents, setDocuments] = useState<DocumentData[]>([])
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [open, setOpen] = useState(false)
|
||||
const [selectedDocument, setSelectedDocument] = useState<DocumentData | null>(null)
|
||||
const [initialFetchDone, setInitialFetchDone] = useState(false)
|
||||
|
||||
// Use the proper hook to get the current value and setter
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
|
||||
// Get the knowledge base ID from the same block's knowledgeBaseId subblock - memoize to prevent re-renders
|
||||
const knowledgeBaseId = useMemo(() => getValue(blockId, 'knowledgeBaseId'), [getValue, blockId])
|
||||
// Get the knowledge base ID from the same block's knowledgeBaseId subblock
|
||||
const [knowledgeBaseId] = useSubBlockValue(blockId, 'knowledgeBaseId')
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
@@ -73,7 +69,6 @@ export function DocumentSelector({
|
||||
if (!knowledgeBaseId) {
|
||||
setDocuments([])
|
||||
setError('No knowledge base selected')
|
||||
setInitialFetchDone(true)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -94,7 +89,6 @@ export function DocumentSelector({
|
||||
|
||||
const fetchedDocuments = result.data || []
|
||||
setDocuments(fetchedDocuments)
|
||||
setInitialFetchDone(true)
|
||||
} catch (err) {
|
||||
if ((err as Error).name === 'AbortError') return
|
||||
setError((err as Error).message)
|
||||
@@ -138,16 +132,15 @@ export function DocumentSelector({
|
||||
useEffect(() => {
|
||||
setDocuments([])
|
||||
setSelectedDocument(null)
|
||||
setInitialFetchDone(false)
|
||||
setError(null)
|
||||
}, [knowledgeBaseId])
|
||||
|
||||
// Fetch documents when knowledge base is available and we haven't fetched yet
|
||||
// Fetch documents when knowledge base is available
|
||||
useEffect(() => {
|
||||
if (knowledgeBaseId && !initialFetchDone && !isPreview) {
|
||||
if (knowledgeBaseId && !isPreview) {
|
||||
fetchDocuments()
|
||||
}
|
||||
}, [knowledgeBaseId, initialFetchDone, isPreview, fetchDocuments])
|
||||
}, [knowledgeBaseId, isPreview, fetchDocuments])
|
||||
|
||||
const formatDocumentName = (document: DocumentData) => {
|
||||
return document.filename
|
||||
|
||||
@@ -19,7 +19,6 @@ import {
|
||||
getServiceIdFromScopes,
|
||||
type OAuthProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { saveToStorage } from '@/stores/workflows/persistence'
|
||||
import { OAuthRequiredModal } from '../../credential-selector/components/oauth-required-modal'
|
||||
|
||||
export interface ConfluenceFileInfo {
|
||||
@@ -355,15 +354,6 @@ export function ConfluenceFileSelector({
|
||||
|
||||
// Handle adding a new credential
|
||||
const handleAddCredential = () => {
|
||||
const effectiveServiceId = getServiceId()
|
||||
const providerId = getProviderId()
|
||||
|
||||
// Store information about the required connection
|
||||
saveToStorage<string>('pending_service_id', effectiveServiceId)
|
||||
saveToStorage<string[]>('pending_oauth_scopes', requiredScopes)
|
||||
saveToStorage<string>('pending_oauth_return_url', window.location.href)
|
||||
saveToStorage<string>('pending_oauth_provider_id', providerId)
|
||||
|
||||
// Show the OAuth modal
|
||||
setShowOAuthModal(true)
|
||||
setOpen(false)
|
||||
|
||||
@@ -24,7 +24,6 @@ import {
|
||||
type OAuthProvider,
|
||||
parseProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { saveToStorage } from '@/stores/workflows/persistence'
|
||||
import { OAuthRequiredModal } from '../../credential-selector/components/oauth-required-modal'
|
||||
|
||||
const logger = createLogger('GoogleDrivePicker')
|
||||
@@ -79,6 +78,7 @@ export function GoogleDrivePicker({
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [isLoadingSelectedFile, setIsLoadingSelectedFile] = useState(false)
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
const [credentialsLoaded, setCredentialsLoaded] = useState(false)
|
||||
const initialFetchRef = useRef(false)
|
||||
const [openPicker, _authResponse] = useDrivePicker()
|
||||
|
||||
@@ -97,6 +97,7 @@ export function GoogleDrivePicker({
|
||||
// Fetch available credentials for this provider
|
||||
const fetchCredentials = useCallback(async () => {
|
||||
setIsLoading(true)
|
||||
setCredentialsLoaded(false)
|
||||
try {
|
||||
const providerId = getProviderId()
|
||||
const response = await fetch(`/api/auth/oauth/credentials?provider=${providerId}`)
|
||||
@@ -128,6 +129,7 @@ export function GoogleDrivePicker({
|
||||
logger.error('Error fetching credentials:', { error })
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
setCredentialsLoaded(true)
|
||||
}
|
||||
}, [provider, getProviderId, selectedCredentialId])
|
||||
|
||||
@@ -154,9 +156,16 @@ export function GoogleDrivePicker({
|
||||
return data.file
|
||||
}
|
||||
} else {
|
||||
logger.error('Error fetching file by ID:', {
|
||||
error: await response.text(),
|
||||
})
|
||||
const errorText = await response.text()
|
||||
logger.error('Error fetching file by ID:', { error: errorText })
|
||||
|
||||
// If file not found or access denied, clear the selection
|
||||
if (response.status === 404 || response.status === 403) {
|
||||
logger.info('File not accessible, clearing selection')
|
||||
setSelectedFileId('')
|
||||
onChange('')
|
||||
onFileInfoChange?.(null)
|
||||
}
|
||||
}
|
||||
return null
|
||||
} catch (error) {
|
||||
@@ -166,7 +175,7 @@ export function GoogleDrivePicker({
|
||||
setIsLoadingSelectedFile(false)
|
||||
}
|
||||
},
|
||||
[selectedCredentialId, onFileInfoChange]
|
||||
[selectedCredentialId, onChange, onFileInfoChange]
|
||||
)
|
||||
|
||||
// Fetch credentials on initial mount
|
||||
@@ -177,20 +186,61 @@ export function GoogleDrivePicker({
|
||||
}
|
||||
}, [fetchCredentials])
|
||||
|
||||
// Fetch the selected file metadata once credentials are loaded or changed
|
||||
useEffect(() => {
|
||||
// If we have a file ID selected and credentials are ready but we still don't have the file info, fetch it
|
||||
if (value && selectedCredentialId && !selectedFile) {
|
||||
fetchFileById(value)
|
||||
}
|
||||
}, [value, selectedCredentialId, selectedFile, fetchFileById])
|
||||
|
||||
// Keep internal selectedFileId in sync with the value prop
|
||||
useEffect(() => {
|
||||
if (value !== selectedFileId) {
|
||||
const previousFileId = selectedFileId
|
||||
setSelectedFileId(value)
|
||||
// Only clear selected file info if we had a different file before (not initial load)
|
||||
if (previousFileId && previousFileId !== value && selectedFile) {
|
||||
setSelectedFile(null)
|
||||
}
|
||||
}
|
||||
}, [value])
|
||||
}, [value, selectedFileId, selectedFile])
|
||||
|
||||
// Track previous credential ID to detect changes
|
||||
const prevCredentialIdRef = useRef<string>('')
|
||||
|
||||
// Clear selected file when credentials are removed or changed
|
||||
useEffect(() => {
|
||||
const prevCredentialId = prevCredentialIdRef.current
|
||||
prevCredentialIdRef.current = selectedCredentialId
|
||||
|
||||
if (!selectedCredentialId) {
|
||||
// No credentials - clear everything
|
||||
if (selectedFile) {
|
||||
setSelectedFile(null)
|
||||
setSelectedFileId('')
|
||||
onChange('')
|
||||
}
|
||||
} else if (prevCredentialId && prevCredentialId !== selectedCredentialId) {
|
||||
// Credentials changed (not initial load) - clear file info to force refetch
|
||||
if (selectedFile) {
|
||||
setSelectedFile(null)
|
||||
}
|
||||
}
|
||||
}, [selectedCredentialId, selectedFile, onChange])
|
||||
|
||||
// Fetch the selected file metadata once credentials are loaded or changed
|
||||
useEffect(() => {
|
||||
// Only fetch if we have both a file ID and credentials, credentials are loaded, but no file info yet
|
||||
if (
|
||||
value &&
|
||||
selectedCredentialId &&
|
||||
credentialsLoaded &&
|
||||
!selectedFile &&
|
||||
!isLoadingSelectedFile
|
||||
) {
|
||||
fetchFileById(value)
|
||||
}
|
||||
}, [
|
||||
value,
|
||||
selectedCredentialId,
|
||||
credentialsLoaded,
|
||||
selectedFile,
|
||||
isLoadingSelectedFile,
|
||||
fetchFileById,
|
||||
])
|
||||
|
||||
// Fetch the access token for the selected credential
|
||||
const fetchAccessToken = async (): Promise<string | null> => {
|
||||
@@ -286,15 +336,6 @@ export function GoogleDrivePicker({
|
||||
|
||||
// Handle adding a new credential
|
||||
const handleAddCredential = () => {
|
||||
const effectiveServiceId = getServiceId()
|
||||
const providerId = getProviderId()
|
||||
|
||||
// Store information about the required connection
|
||||
saveToStorage<string>('pending_service_id', effectiveServiceId)
|
||||
saveToStorage<string[]>('pending_oauth_scopes', requiredScopes)
|
||||
saveToStorage<string>('pending_oauth_return_url', window.location.href)
|
||||
saveToStorage<string>('pending_oauth_provider_id', providerId)
|
||||
|
||||
// Show the OAuth modal
|
||||
setShowOAuthModal(true)
|
||||
setOpen(false)
|
||||
@@ -399,7 +440,7 @@ export function GoogleDrivePicker({
|
||||
{getFileIcon(selectedFile, 'sm')}
|
||||
<span className='truncate font-normal'>{selectedFile.name}</span>
|
||||
</div>
|
||||
) : selectedFileId && (isLoadingSelectedFile || !selectedCredentialId) ? (
|
||||
) : selectedFileId && isLoadingSelectedFile && selectedCredentialId ? (
|
||||
<div className='flex items-center gap-2'>
|
||||
<RefreshCw className='h-4 w-4 animate-spin' />
|
||||
<span className='text-muted-foreground'>Loading document...</span>
|
||||
|
||||
@@ -20,7 +20,6 @@ import {
|
||||
getServiceIdFromScopes,
|
||||
type OAuthProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { saveToStorage } from '@/stores/workflows/persistence'
|
||||
import { OAuthRequiredModal } from '../../credential-selector/components/oauth-required-modal'
|
||||
|
||||
const logger = new Logger('jira_issue_selector')
|
||||
@@ -420,15 +419,6 @@ export function JiraIssueSelector({
|
||||
|
||||
// Handle adding a new credential
|
||||
const handleAddCredential = () => {
|
||||
const effectiveServiceId = getServiceId()
|
||||
const providerId = getProviderId()
|
||||
|
||||
// Store information about the required connection
|
||||
saveToStorage<string>('pending_service_id', effectiveServiceId)
|
||||
saveToStorage<string[]>('pending_oauth_scopes', requiredScopes)
|
||||
saveToStorage<string>('pending_oauth_return_url', window.location.href)
|
||||
saveToStorage<string>('pending_oauth_provider_id', providerId)
|
||||
|
||||
// Show the OAuth modal
|
||||
setShowOAuthModal(true)
|
||||
setOpen(false)
|
||||
|
||||
@@ -23,7 +23,6 @@ import {
|
||||
type OAuthProvider,
|
||||
parseProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { saveToStorage } from '@/stores/workflows/persistence'
|
||||
import { OAuthRequiredModal } from '../../credential-selector/components/oauth-required-modal'
|
||||
|
||||
const logger = createLogger('MicrosoftFileSelector')
|
||||
@@ -75,6 +74,7 @@ export function MicrosoftFileSelector({
|
||||
const [availableFiles, setAvailableFiles] = useState<MicrosoftFileInfo[]>([])
|
||||
const [searchQuery, setSearchQuery] = useState<string>('')
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
const [credentialsLoaded, setCredentialsLoaded] = useState(false)
|
||||
const initialFetchRef = useRef(false)
|
||||
|
||||
// Determine the appropriate service ID based on provider and scopes
|
||||
@@ -92,6 +92,7 @@ export function MicrosoftFileSelector({
|
||||
// Fetch available credentials for this provider
|
||||
const fetchCredentials = useCallback(async () => {
|
||||
setIsLoading(true)
|
||||
setCredentialsLoaded(false)
|
||||
try {
|
||||
const providerId = getProviderId()
|
||||
const response = await fetch(`/api/auth/oauth/credentials?provider=${providerId}`)
|
||||
@@ -123,6 +124,7 @@ export function MicrosoftFileSelector({
|
||||
logger.error('Error fetching credentials:', { error })
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
setCredentialsLoaded(true)
|
||||
}
|
||||
}, [provider, getProviderId, selectedCredentialId])
|
||||
|
||||
@@ -183,9 +185,16 @@ export function MicrosoftFileSelector({
|
||||
return data.file
|
||||
}
|
||||
} else {
|
||||
logger.error('Error fetching file by ID:', {
|
||||
error: await response.text(),
|
||||
})
|
||||
const errorText = await response.text()
|
||||
logger.error('Error fetching file by ID:', { error: errorText })
|
||||
|
||||
// If file not found or access denied, clear the selection
|
||||
if (response.status === 404 || response.status === 403) {
|
||||
logger.info('File not accessible, clearing selection')
|
||||
setSelectedFileId('')
|
||||
onChange('')
|
||||
onFileInfoChange?.(null)
|
||||
}
|
||||
}
|
||||
return null
|
||||
} catch (error) {
|
||||
@@ -224,20 +233,61 @@ export function MicrosoftFileSelector({
|
||||
}
|
||||
}, [searchQuery, selectedCredentialId, fetchAvailableFiles])
|
||||
|
||||
// Fetch the selected file metadata once credentials are loaded or changed
|
||||
useEffect(() => {
|
||||
// If we have a file ID selected and credentials are ready but we still don't have the file info, fetch it
|
||||
if (value && selectedCredentialId && !selectedFile) {
|
||||
fetchFileById(value)
|
||||
}
|
||||
}, [value, selectedCredentialId, selectedFile, fetchFileById])
|
||||
|
||||
// Keep internal selectedFileId in sync with the value prop
|
||||
useEffect(() => {
|
||||
if (value !== selectedFileId) {
|
||||
const previousFileId = selectedFileId
|
||||
setSelectedFileId(value)
|
||||
// Only clear selected file info if we had a different file before (not initial load)
|
||||
if (previousFileId && previousFileId !== value && selectedFile) {
|
||||
setSelectedFile(null)
|
||||
}
|
||||
}
|
||||
}, [value])
|
||||
}, [value, selectedFileId, selectedFile])
|
||||
|
||||
// Track previous credential ID to detect changes
|
||||
const prevCredentialIdRef = useRef<string>('')
|
||||
|
||||
// Clear selected file when credentials are removed or changed
|
||||
useEffect(() => {
|
||||
const prevCredentialId = prevCredentialIdRef.current
|
||||
prevCredentialIdRef.current = selectedCredentialId
|
||||
|
||||
if (!selectedCredentialId) {
|
||||
// No credentials - clear everything
|
||||
if (selectedFile) {
|
||||
setSelectedFile(null)
|
||||
setSelectedFileId('')
|
||||
onChange('')
|
||||
}
|
||||
} else if (prevCredentialId && prevCredentialId !== selectedCredentialId) {
|
||||
// Credentials changed (not initial load) - clear file info to force refetch
|
||||
if (selectedFile) {
|
||||
setSelectedFile(null)
|
||||
}
|
||||
}
|
||||
}, [selectedCredentialId, selectedFile, onChange])
|
||||
|
||||
// Fetch the selected file metadata once credentials are loaded or changed
|
||||
useEffect(() => {
|
||||
// Only fetch if we have both a file ID and credentials, credentials are loaded, but no file info yet
|
||||
if (
|
||||
value &&
|
||||
selectedCredentialId &&
|
||||
credentialsLoaded &&
|
||||
!selectedFile &&
|
||||
!isLoadingSelectedFile
|
||||
) {
|
||||
fetchFileById(value)
|
||||
}
|
||||
}, [
|
||||
value,
|
||||
selectedCredentialId,
|
||||
credentialsLoaded,
|
||||
selectedFile,
|
||||
isLoadingSelectedFile,
|
||||
fetchFileById,
|
||||
])
|
||||
|
||||
// Handle selecting a file from the available files
|
||||
const handleFileSelect = (file: MicrosoftFileInfo) => {
|
||||
@@ -251,15 +301,6 @@ export function MicrosoftFileSelector({
|
||||
|
||||
// Handle adding a new credential
|
||||
const handleAddCredential = () => {
|
||||
const effectiveServiceId = getServiceId()
|
||||
const providerId = getProviderId()
|
||||
|
||||
// Store information about the required connection
|
||||
saveToStorage<string>('pending_service_id', effectiveServiceId)
|
||||
saveToStorage<string[]>('pending_oauth_scopes', requiredScopes)
|
||||
saveToStorage<string>('pending_oauth_return_url', window.location.href)
|
||||
saveToStorage<string>('pending_oauth_provider_id', providerId)
|
||||
|
||||
// Show the OAuth modal
|
||||
setShowOAuthModal(true)
|
||||
setOpen(false)
|
||||
@@ -381,7 +422,7 @@ export function MicrosoftFileSelector({
|
||||
{getFileIcon(selectedFile, 'sm')}
|
||||
<span className='truncate font-normal'>{selectedFile.name}</span>
|
||||
</div>
|
||||
) : selectedFileId && (isLoadingSelectedFile || !selectedCredentialId) ? (
|
||||
) : selectedFileId && isLoadingSelectedFile && selectedCredentialId ? (
|
||||
<div className='flex items-center gap-2'>
|
||||
<RefreshCw className='h-4 w-4 animate-spin' />
|
||||
<span className='text-muted-foreground'>Loading document...</span>
|
||||
|
||||
@@ -20,7 +20,6 @@ import {
|
||||
getServiceIdFromScopes,
|
||||
type OAuthProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { saveToStorage } from '@/stores/workflows/persistence'
|
||||
import { OAuthRequiredModal } from '../../credential-selector/components/oauth-required-modal'
|
||||
|
||||
const logger = new Logger('TeamsMessageSelector')
|
||||
@@ -399,15 +398,6 @@ export function TeamsMessageSelector({
|
||||
|
||||
// Handle adding a new credential
|
||||
const handleAddCredential = () => {
|
||||
const effectiveServiceId = getServiceId()
|
||||
const providerId = getProviderId()
|
||||
|
||||
// Store information about the required connection
|
||||
saveToStorage<string>('pending_service_id', effectiveServiceId)
|
||||
saveToStorage<string[]>('pending_oauth_scopes', requiredScopes)
|
||||
saveToStorage<string>('pending_oauth_return_url', window.location.href)
|
||||
saveToStorage<string>('pending_oauth_provider_id', providerId)
|
||||
|
||||
// Show the OAuth modal
|
||||
setShowOAuthModal(true)
|
||||
setOpen(false)
|
||||
|
||||
@@ -16,7 +16,6 @@ import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { type Credential, getProviderIdFromServiceId, getServiceIdFromScopes } from '@/lib/oauth'
|
||||
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/credential-selector/components/oauth-required-modal'
|
||||
import { saveToStorage } from '@/stores/workflows/persistence'
|
||||
|
||||
const logger = createLogger('FolderSelector')
|
||||
|
||||
@@ -274,15 +273,6 @@ export function FolderSelector({
|
||||
|
||||
// Handle adding a new credential
|
||||
const handleAddCredential = () => {
|
||||
const effectiveServiceId = getServiceId()
|
||||
const providerId = getProviderId()
|
||||
|
||||
// Store information about the required connection
|
||||
saveToStorage<string>('pending_service_id', effectiveServiceId)
|
||||
saveToStorage<string[]>('pending_oauth_scopes', requiredScopes)
|
||||
saveToStorage<string>('pending_oauth_return_url', window.location.href)
|
||||
saveToStorage<string>('pending_oauth_provider_id', providerId)
|
||||
|
||||
// Show the OAuth modal
|
||||
setShowOAuthModal(true)
|
||||
setOpen(false)
|
||||
|
||||
@@ -20,7 +20,6 @@ import {
|
||||
getServiceIdFromScopes,
|
||||
type OAuthProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { saveToStorage } from '@/stores/workflows/persistence'
|
||||
import { OAuthRequiredModal } from '../../credential-selector/components/oauth-required-modal'
|
||||
|
||||
const logger = new Logger('jira_project_selector')
|
||||
@@ -371,15 +370,6 @@ export function JiraProjectSelector({
|
||||
|
||||
// Handle adding a new credential
|
||||
const handleAddCredential = () => {
|
||||
const effectiveServiceId = getServiceId()
|
||||
const providerId = getProviderId()
|
||||
|
||||
// Store information about the required connection
|
||||
saveToStorage<string>('pending_service_id', effectiveServiceId)
|
||||
saveToStorage<string[]>('pending_oauth_scopes', requiredScopes)
|
||||
saveToStorage<string>('pending_oauth_return_url', window.location.href)
|
||||
saveToStorage<string>('pending_oauth_provider_id', providerId)
|
||||
|
||||
// Show the OAuth modal
|
||||
setShowOAuthModal(true)
|
||||
setOpen(false)
|
||||
|
||||
@@ -50,7 +50,11 @@ export function ResponseFormat({
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: ResponseFormatProps) {
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<JSONProperty[]>(blockId, subBlockId)
|
||||
// useSubBlockValue now includes debouncing by default
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<JSONProperty[]>(blockId, subBlockId, false, {
|
||||
debounceMs: 200, // Slightly longer debounce for complex structures
|
||||
})
|
||||
|
||||
const [showPreview, setShowPreview] = useState(false)
|
||||
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
@@ -290,7 +294,13 @@ export function ResponseFormat({
|
||||
{showPreview && (
|
||||
<div className='rounded border bg-muted/30 p-2'>
|
||||
<pre className='max-h-32 overflow-auto text-xs'>
|
||||
{JSON.stringify(generateJSON(properties), null, 2)}
|
||||
{(() => {
|
||||
try {
|
||||
return JSON.stringify(generateJSON(properties), null, 2)
|
||||
} catch (error) {
|
||||
return `Error generating preview: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
}
|
||||
})()}
|
||||
</pre>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -26,10 +26,10 @@ interface ScheduleConfigProps {
|
||||
|
||||
export function ScheduleConfig({
|
||||
blockId,
|
||||
subBlockId,
|
||||
subBlockId: _subBlockId,
|
||||
isConnecting,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
previewValue: _previewValue,
|
||||
disabled = false,
|
||||
}: ScheduleConfigProps) {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
@@ -56,13 +56,7 @@ export function ScheduleConfig({
|
||||
|
||||
// Get the startWorkflow value to determine if scheduling is enabled
|
||||
// and expose the setter so we can update it
|
||||
const [startWorkflow, setStartWorkflow] = useSubBlockValue(blockId, 'startWorkflow')
|
||||
const isScheduleEnabled = startWorkflow === 'schedule'
|
||||
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const [_startWorkflow, setStartWorkflow] = useSubBlockValue(blockId, 'startWorkflow')
|
||||
|
||||
// Function to check if schedule exists in the database
|
||||
const checkSchedule = async () => {
|
||||
@@ -110,10 +104,17 @@ export function ScheduleConfig({
|
||||
|
||||
// Check for schedule on mount and when relevant dependencies change
|
||||
useEffect(() => {
|
||||
// Always check for schedules regardless of the UI setting
|
||||
// This ensures we detect schedules even when the UI is set to manual
|
||||
checkSchedule()
|
||||
}, [workflowId, scheduleType, isModalOpen, refreshCounter])
|
||||
// Only check for schedules when workflowId changes or modal opens
|
||||
// Avoid checking on every scheduleType change to prevent excessive API calls
|
||||
if (workflowId && (isModalOpen || refreshCounter > 0)) {
|
||||
checkSchedule()
|
||||
}
|
||||
|
||||
// Cleanup function to reset loading state
|
||||
return () => {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [workflowId, isModalOpen, refreshCounter])
|
||||
|
||||
// Format the schedule information for display
|
||||
const getScheduleInfo = () => {
|
||||
|
||||
@@ -0,0 +1,213 @@
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { Check, ChevronDown, ExternalLink, Plus, RefreshCw } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import {
|
||||
Command,
|
||||
CommandEmpty,
|
||||
CommandGroup,
|
||||
CommandItem,
|
||||
CommandList,
|
||||
} from '@/components/ui/command'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import {
|
||||
type Credential,
|
||||
OAUTH_PROVIDERS,
|
||||
type OAuthProvider,
|
||||
type OAuthService,
|
||||
parseProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { OAuthRequiredModal } from '../../credential-selector/components/oauth-required-modal'
|
||||
|
||||
const logger = createLogger('ToolCredentialSelector')
|
||||
|
||||
// Helper functions for provider icons and names
|
||||
const getProviderIcon = (providerName: OAuthProvider) => {
|
||||
const { baseProvider } = parseProvider(providerName)
|
||||
const baseProviderConfig = OAUTH_PROVIDERS[baseProvider]
|
||||
|
||||
if (!baseProviderConfig) {
|
||||
return <ExternalLink className='h-4 w-4' />
|
||||
}
|
||||
// Always use the base provider icon for a more consistent UI
|
||||
return baseProviderConfig.icon({ className: 'h-4 w-4' })
|
||||
}
|
||||
|
||||
const getProviderName = (providerName: OAuthProvider) => {
|
||||
const { baseProvider } = parseProvider(providerName)
|
||||
const baseProviderConfig = OAUTH_PROVIDERS[baseProvider]
|
||||
|
||||
if (baseProviderConfig) {
|
||||
return baseProviderConfig.name
|
||||
}
|
||||
|
||||
// Fallback: capitalize the provider name
|
||||
return providerName
|
||||
.split('-')
|
||||
.map((part) => part.charAt(0).toUpperCase() + part.slice(1))
|
||||
.join(' ')
|
||||
}
|
||||
|
||||
interface ToolCredentialSelectorProps {
|
||||
value: string
|
||||
onChange: (value: string) => void
|
||||
provider: OAuthProvider
|
||||
requiredScopes?: string[]
|
||||
label?: string
|
||||
serviceId?: OAuthService
|
||||
disabled?: boolean
|
||||
}
|
||||
|
||||
export function ToolCredentialSelector({
|
||||
value,
|
||||
onChange,
|
||||
provider,
|
||||
requiredScopes = [],
|
||||
label = 'Select account',
|
||||
serviceId,
|
||||
disabled = false,
|
||||
}: ToolCredentialSelectorProps) {
|
||||
const [open, setOpen] = useState(false)
|
||||
const [credentials, setCredentials] = useState<Credential[]>([])
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
const [selectedId, setSelectedId] = useState('')
|
||||
|
||||
// Update selected ID when value changes
|
||||
useEffect(() => {
|
||||
setSelectedId(value)
|
||||
}, [value])
|
||||
|
||||
const fetchCredentials = useCallback(async () => {
|
||||
setIsLoading(true)
|
||||
try {
|
||||
const response = await fetch(`/api/auth/oauth/credentials?provider=${provider}`)
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
setCredentials(data.credentials || [])
|
||||
|
||||
// If we have a selected value but it's not in the credentials list, clear it
|
||||
if (value && !data.credentials?.some((cred: Credential) => cred.id === value)) {
|
||||
onChange('')
|
||||
}
|
||||
} else {
|
||||
logger.error('Error fetching credentials:', { error: await response.text() })
|
||||
setCredentials([])
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error fetching credentials:', { error })
|
||||
setCredentials([])
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [provider, value, onChange])
|
||||
|
||||
// Fetch credentials on mount and when provider changes
|
||||
useEffect(() => {
|
||||
fetchCredentials()
|
||||
}, [fetchCredentials])
|
||||
|
||||
const handleSelect = (credentialId: string) => {
|
||||
setSelectedId(credentialId)
|
||||
onChange(credentialId)
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
const handleOAuthClose = () => {
|
||||
setShowOAuthModal(false)
|
||||
// Refetch credentials to include any new ones
|
||||
fetchCredentials()
|
||||
}
|
||||
|
||||
const selectedCredential = credentials.find((cred) => cred.id === selectedId)
|
||||
|
||||
return (
|
||||
<>
|
||||
<Popover open={open} onOpenChange={setOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant='outline'
|
||||
role='combobox'
|
||||
aria-expanded={open}
|
||||
className='w-full justify-between'
|
||||
disabled={disabled}
|
||||
>
|
||||
{selectedCredential ? (
|
||||
<div className='flex items-center gap-2 overflow-hidden'>
|
||||
{getProviderIcon(provider)}
|
||||
<span className='truncate font-normal'>{selectedCredential.name}</span>
|
||||
</div>
|
||||
) : (
|
||||
<div className='flex items-center gap-2'>
|
||||
{getProviderIcon(provider)}
|
||||
<span className='text-muted-foreground'>{label}</span>
|
||||
</div>
|
||||
)}
|
||||
<ChevronDown className='ml-2 h-4 w-4 shrink-0 opacity-50' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className='w-[300px] p-0' align='start'>
|
||||
<Command>
|
||||
<CommandList>
|
||||
<CommandEmpty>
|
||||
{isLoading ? (
|
||||
<div className='flex items-center justify-center p-4'>
|
||||
<RefreshCw className='h-4 w-4 animate-spin' />
|
||||
<span className='ml-2'>Loading...</span>
|
||||
</div>
|
||||
) : credentials.length === 0 ? (
|
||||
<div className='p-4 text-center'>
|
||||
<p className='font-medium text-sm'>No accounts connected.</p>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Connect a {getProviderName(provider)} account to continue.
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className='p-4 text-center'>
|
||||
<p className='font-medium text-sm'>No accounts found.</p>
|
||||
</div>
|
||||
)}
|
||||
</CommandEmpty>
|
||||
|
||||
{credentials.length > 0 && (
|
||||
<CommandGroup>
|
||||
{credentials.map((credential) => (
|
||||
<CommandItem
|
||||
key={credential.id}
|
||||
value={credential.id}
|
||||
onSelect={() => handleSelect(credential.id)}
|
||||
>
|
||||
<div className='flex items-center gap-2'>
|
||||
{getProviderIcon(credential.provider)}
|
||||
<span className='font-normal'>{credential.name}</span>
|
||||
</div>
|
||||
{credential.id === selectedId && <Check className='ml-auto h-4 w-4' />}
|
||||
</CommandItem>
|
||||
))}
|
||||
</CommandGroup>
|
||||
)}
|
||||
|
||||
<CommandGroup>
|
||||
<CommandItem onSelect={() => setShowOAuthModal(true)}>
|
||||
<div className='flex items-center gap-2'>
|
||||
<Plus className='h-4 w-4' />
|
||||
<span className='font-normal'>Connect {getProviderName(provider)} account</span>
|
||||
</div>
|
||||
</CommandItem>
|
||||
</CommandGroup>
|
||||
</CommandList>
|
||||
</Command>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
<OAuthRequiredModal
|
||||
isOpen={showOAuthModal}
|
||||
onClose={handleOAuthClose}
|
||||
provider={provider}
|
||||
toolName={label}
|
||||
requiredScopes={requiredScopes}
|
||||
serviceId={serviceId}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -22,10 +22,10 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import { getTool } from '@/tools/utils'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
import { ChannelSelectorInput } from '../channel-selector/channel-selector-input'
|
||||
import { CredentialSelector } from '../credential-selector/credential-selector'
|
||||
import { ShortInput } from '../short-input'
|
||||
import { type CustomTool, CustomToolModal } from './components/custom-tool-modal/custom-tool-modal'
|
||||
import { ToolCommand } from './components/tool-command/tool-command'
|
||||
import { ToolCredentialSelector } from './components/tool-credential-selector'
|
||||
|
||||
interface ToolInputProps {
|
||||
blockId: string
|
||||
@@ -347,6 +347,8 @@ export function ToolInput({
|
||||
const [customToolModalOpen, setCustomToolModalOpen] = useState(false)
|
||||
const [editingToolIndex, setEditingToolIndex] = useState<number | null>(null)
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [draggedIndex, setDraggedIndex] = useState<number | null>(null)
|
||||
const [dragOverIndex, setDragOverIndex] = useState<number | null>(null)
|
||||
const isWide = useWorkflowStore((state) => state.blocks[blockId]?.isWide)
|
||||
const customTools = useCustomToolsStore((state) => state.getAllTools())
|
||||
const subBlockStore = useSubBlockStore()
|
||||
@@ -668,6 +670,46 @@ export function ToolInput({
|
||||
)
|
||||
}
|
||||
|
||||
const handleDragStart = (e: React.DragEvent, index: number) => {
|
||||
if (isPreview || disabled) return
|
||||
setDraggedIndex(index)
|
||||
e.dataTransfer.effectAllowed = 'move'
|
||||
e.dataTransfer.setData('text/html', '')
|
||||
}
|
||||
|
||||
const handleDragOver = (e: React.DragEvent, index: number) => {
|
||||
if (isPreview || disabled || draggedIndex === null) return
|
||||
e.preventDefault()
|
||||
e.dataTransfer.dropEffect = 'move'
|
||||
setDragOverIndex(index)
|
||||
}
|
||||
|
||||
const handleDragEnd = () => {
|
||||
setDraggedIndex(null)
|
||||
setDragOverIndex(null)
|
||||
}
|
||||
|
||||
const handleDrop = (e: React.DragEvent, dropIndex: number) => {
|
||||
if (isPreview || disabled || draggedIndex === null || draggedIndex === dropIndex) return
|
||||
e.preventDefault()
|
||||
|
||||
const newTools = [...selectedTools]
|
||||
const draggedTool = newTools[draggedIndex]
|
||||
|
||||
newTools.splice(draggedIndex, 1)
|
||||
|
||||
if (dropIndex === selectedTools.length) {
|
||||
newTools.push(draggedTool)
|
||||
} else {
|
||||
const adjustedDropIndex = draggedIndex < dropIndex ? dropIndex - 1 : dropIndex
|
||||
newTools.splice(adjustedDropIndex, 0, draggedTool)
|
||||
}
|
||||
|
||||
setStoreValue(newTools)
|
||||
setDraggedIndex(null)
|
||||
setDragOverIndex(null)
|
||||
}
|
||||
|
||||
const IconComponent = ({ icon: Icon, className }: { icon: any; className?: string }) => {
|
||||
if (!Icon) return null
|
||||
return <Icon className={className} />
|
||||
@@ -827,9 +869,34 @@ export function ToolInput({
|
||||
return (
|
||||
<div
|
||||
key={`${tool.type}-${toolIndex}`}
|
||||
className={cn('group flex flex-col', isWide ? 'w-[calc(50%-0.25rem)]' : 'w-full')}
|
||||
className={cn(
|
||||
'group relative flex flex-col transition-all duration-200 ease-in-out',
|
||||
isWide ? 'w-[calc(50%-0.25rem)]' : 'w-full',
|
||||
draggedIndex === toolIndex ? 'scale-95 opacity-40' : '',
|
||||
dragOverIndex === toolIndex && draggedIndex !== toolIndex && draggedIndex !== null
|
||||
? 'translate-y-1 transform'
|
||||
: '',
|
||||
selectedTools.length > 1 && !isPreview && !disabled
|
||||
? 'cursor-grab active:cursor-grabbing'
|
||||
: ''
|
||||
)}
|
||||
draggable={!isPreview && !disabled}
|
||||
onDragStart={(e) => handleDragStart(e, toolIndex)}
|
||||
onDragOver={(e) => handleDragOver(e, toolIndex)}
|
||||
onDragEnd={handleDragEnd}
|
||||
onDrop={(e) => handleDrop(e, toolIndex)}
|
||||
>
|
||||
<div className='flex flex-col overflow-visible rounded-md border bg-card'>
|
||||
{/* Subtle drop indicator - use border highlight instead of separate line */}
|
||||
<div
|
||||
className={cn(
|
||||
'flex flex-col overflow-visible rounded-md border bg-card',
|
||||
dragOverIndex === toolIndex &&
|
||||
draggedIndex !== toolIndex &&
|
||||
draggedIndex !== null
|
||||
? 'border-t-2 border-t-muted-foreground/40'
|
||||
: ''
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'flex items-center justify-between bg-accent/50 p-2',
|
||||
@@ -993,13 +1060,14 @@ export function ToolInput({
|
||||
<div className='font-medium text-muted-foreground text-xs'>
|
||||
Account
|
||||
</div>
|
||||
<CredentialSelector
|
||||
<ToolCredentialSelector
|
||||
value={tool.params.credential || ''}
|
||||
onChange={(value) => handleCredentialChange(toolIndex, value)}
|
||||
provider={oauthConfig.provider as OAuthProvider}
|
||||
requiredScopes={oauthConfig.additionalScopes || []}
|
||||
label={`Select ${oauthConfig.provider} account`}
|
||||
serviceId={oauthConfig.provider}
|
||||
disabled={disabled}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
@@ -1091,6 +1159,20 @@ export function ToolInput({
|
||||
)
|
||||
})}
|
||||
|
||||
{/* Drop zone for the end of the list */}
|
||||
{selectedTools.length > 0 && draggedIndex !== null && (
|
||||
<div
|
||||
className={cn(
|
||||
'h-2 w-full rounded transition-all duration-200 ease-in-out',
|
||||
dragOverIndex === selectedTools.length
|
||||
? 'border-b-2 border-b-muted-foreground/40'
|
||||
: ''
|
||||
)}
|
||||
onDragOver={(e) => handleDragOver(e, selectedTools.length)}
|
||||
onDrop={(e) => handleDrop(e, selectedTools.length)}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Popover open={open} onOpenChange={setOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
|
||||
@@ -16,7 +16,7 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
import { CredentialSelector } from '../credential-selector/credential-selector'
|
||||
import { ToolCredentialSelector } from '../tool-input/components/tool-credential-selector'
|
||||
import { WebhookModal } from './components/webhook-modal'
|
||||
|
||||
const logger = createLogger('WebhookConfig')
|
||||
@@ -564,7 +564,7 @@ export function WebhookConfig({
|
||||
{error && <div className='mb-2 text-red-500 text-sm dark:text-red-400'>{error}</div>}
|
||||
|
||||
<div className='mb-3'>
|
||||
<CredentialSelector
|
||||
<ToolCredentialSelector
|
||||
value={gmailCredentialId}
|
||||
onChange={handleCredentialChange}
|
||||
provider='google-email'
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
import { useCallback, useEffect, useRef } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { getProviderFromModel } from '@/providers/utils'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('SubBlockValue')
|
||||
|
||||
// Helper function to dispatch collaborative subblock updates
|
||||
const dispatchSubblockUpdate = (blockId: string, subBlockId: string, value: any) => {
|
||||
const event = new CustomEvent('update-subblock-value', {
|
||||
@@ -154,20 +158,31 @@ function storeApiKeyValue(
|
||||
}
|
||||
}
|
||||
|
||||
interface UseSubBlockValueOptions {
|
||||
debounceMs?: number
|
||||
isStreaming?: boolean // Explicit streaming state
|
||||
onStreamingEnd?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom hook to get and set values for a sub-block in a workflow.
|
||||
* Handles complex object values properly by using deep equality comparison.
|
||||
* Includes automatic debouncing and explicit streaming mode for AI generation.
|
||||
*
|
||||
* @param blockId The ID of the block containing the sub-block
|
||||
* @param subBlockId The ID of the sub-block
|
||||
* @param triggerWorkflowUpdate Whether to trigger a workflow update when the value changes
|
||||
* @returns A tuple containing the current value and a setter function
|
||||
* @param options Configuration for debouncing and streaming behavior
|
||||
* @returns A tuple containing the current value and setter function
|
||||
*/
|
||||
export function useSubBlockValue<T = any>(
|
||||
blockId: string,
|
||||
subBlockId: string,
|
||||
triggerWorkflowUpdate = false
|
||||
triggerWorkflowUpdate = false,
|
||||
options?: UseSubBlockValueOptions
|
||||
): readonly [T | null, (value: T) => void] {
|
||||
const { debounceMs = 150, isStreaming = false, onStreamingEnd } = options || {}
|
||||
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
|
||||
const blockType = useWorkflowStore(
|
||||
@@ -187,6 +202,12 @@ export function useSubBlockValue<T = any>(
|
||||
// Previous model reference for detecting model changes
|
||||
const prevModelRef = useRef<string | null>(null)
|
||||
|
||||
// Debouncing refs
|
||||
const debounceTimerRef = useRef<NodeJS.Timeout | null>(null)
|
||||
const lastEmittedValueRef = useRef<T | null>(null)
|
||||
const streamingValueRef = useRef<T | null>(null)
|
||||
const wasStreamingRef = useRef<boolean>(false)
|
||||
|
||||
// Get value from subblock store - always call this hook unconditionally
|
||||
const storeValue = useSubBlockStore(
|
||||
useCallback((state) => state.getValue(blockId, subBlockId), [blockId, subBlockId])
|
||||
@@ -211,6 +232,36 @@ export function useSubBlockValue<T = any>(
|
||||
// Compute the modelValue based on block type
|
||||
const modelValue = isProviderBasedBlock ? (modelSubBlockValue as string) : null
|
||||
|
||||
// Cleanup timer on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (debounceTimerRef.current) {
|
||||
clearTimeout(debounceTimerRef.current)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
// Emit the value to socket/DB
|
||||
const emitValue = useCallback(
|
||||
(value: T) => {
|
||||
collaborativeSetSubblockValue(blockId, subBlockId, value)
|
||||
lastEmittedValueRef.current = value
|
||||
},
|
||||
[blockId, subBlockId, collaborativeSetSubblockValue]
|
||||
)
|
||||
|
||||
// Handle streaming mode changes
|
||||
useEffect(() => {
|
||||
// If we just exited streaming mode, emit the final value
|
||||
if (wasStreamingRef.current && !isStreaming && streamingValueRef.current !== null) {
|
||||
logger.debug('Streaming ended, persisting final value', { blockId, subBlockId })
|
||||
emitValue(streamingValueRef.current)
|
||||
streamingValueRef.current = null
|
||||
onStreamingEnd?.()
|
||||
}
|
||||
wasStreamingRef.current = isStreaming
|
||||
}, [isStreaming, blockId, subBlockId, emitValue, onStreamingEnd])
|
||||
|
||||
// Hook to set a value in the subblock store
|
||||
const setValue = useCallback(
|
||||
(newValue: T) => {
|
||||
@@ -218,6 +269,22 @@ export function useSubBlockValue<T = any>(
|
||||
if (!isEqual(valueRef.current, newValue)) {
|
||||
valueRef.current = newValue
|
||||
|
||||
// Always update local store immediately for UI responsiveness
|
||||
useSubBlockStore.setState((state) => ({
|
||||
workflowValues: {
|
||||
...state.workflowValues,
|
||||
[useWorkflowRegistry.getState().activeWorkflowId || '']: {
|
||||
...state.workflowValues[useWorkflowRegistry.getState().activeWorkflowId || ''],
|
||||
[blockId]: {
|
||||
...state.workflowValues[useWorkflowRegistry.getState().activeWorkflowId || '']?.[
|
||||
blockId
|
||||
],
|
||||
[subBlockId]: newValue,
|
||||
},
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
// Ensure we're passing the actual value, not a reference that might change
|
||||
const valueCopy =
|
||||
newValue === null
|
||||
@@ -231,8 +298,27 @@ export function useSubBlockValue<T = any>(
|
||||
storeApiKeyValue(blockId, blockType, modelValue, newValue, storeValue)
|
||||
}
|
||||
|
||||
// Use collaborative function which handles both local store update and socket emission
|
||||
collaborativeSetSubblockValue(blockId, subBlockId, valueCopy)
|
||||
// Clear any existing debounce timer
|
||||
if (debounceTimerRef.current) {
|
||||
clearTimeout(debounceTimerRef.current)
|
||||
debounceTimerRef.current = null
|
||||
}
|
||||
|
||||
// If streaming, just store the value without emitting
|
||||
if (isStreaming) {
|
||||
streamingValueRef.current = valueCopy
|
||||
} else {
|
||||
// Detect large changes for extended debounce
|
||||
const isLargeChange = detectLargeChange(lastEmittedValueRef.current, valueCopy)
|
||||
const effectiveDebounceMs = isLargeChange ? debounceMs * 2 : debounceMs
|
||||
|
||||
// Debounce the socket emission
|
||||
debounceTimerRef.current = setTimeout(() => {
|
||||
if (valueRef.current !== null && valueRef.current !== lastEmittedValueRef.current) {
|
||||
emitValue(valueCopy)
|
||||
}
|
||||
}, effectiveDebounceMs)
|
||||
}
|
||||
|
||||
if (triggerWorkflowUpdate) {
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
@@ -247,7 +333,9 @@ export function useSubBlockValue<T = any>(
|
||||
storeValue,
|
||||
triggerWorkflowUpdate,
|
||||
modelValue,
|
||||
collaborativeSetSubblockValue,
|
||||
isStreaming,
|
||||
debounceMs,
|
||||
emitValue,
|
||||
]
|
||||
)
|
||||
|
||||
@@ -320,5 +408,29 @@ export function useSubBlockValue<T = any>(
|
||||
}
|
||||
}, [storeValue, initialValue])
|
||||
|
||||
// Return appropriate tuple based on whether options were provided
|
||||
return [storeValue !== undefined ? storeValue : initialValue, setValue] as const
|
||||
}
|
||||
|
||||
// Helper function to detect large changes
|
||||
function detectLargeChange(oldValue: any, newValue: any): boolean {
|
||||
// Handle null/undefined
|
||||
if (oldValue == null && newValue == null) return false
|
||||
if (oldValue == null || newValue == null) return true
|
||||
|
||||
// For strings, check if it's a large paste or deletion
|
||||
if (typeof oldValue === 'string' && typeof newValue === 'string') {
|
||||
const sizeDiff = Math.abs(newValue.length - oldValue.length)
|
||||
// Consider it a large change if more than 50 characters changed at once
|
||||
return sizeDiff > 50
|
||||
}
|
||||
|
||||
// For arrays, check length difference
|
||||
if (Array.isArray(oldValue) && Array.isArray(newValue)) {
|
||||
const sizeDiff = Math.abs(newValue.length - oldValue.length)
|
||||
return sizeDiff > 5
|
||||
}
|
||||
|
||||
// For other types, always treat as small change
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -297,27 +297,11 @@ export function SubBlock({
|
||||
case 'oauth-input':
|
||||
return (
|
||||
<CredentialSelector
|
||||
value={
|
||||
isPreview ? previewValue || '' : typeof config.value === 'string' ? config.value : ''
|
||||
}
|
||||
onChange={(value) => {
|
||||
// Only allow changes in non-preview mode and when not disabled
|
||||
if (!isPreview && !disabled) {
|
||||
const event = new CustomEvent('update-subblock-value', {
|
||||
detail: {
|
||||
blockId,
|
||||
subBlockId: config.id,
|
||||
value,
|
||||
},
|
||||
})
|
||||
window.dispatchEvent(event)
|
||||
}
|
||||
}}
|
||||
provider={config.provider as any}
|
||||
requiredScopes={config.requiredScopes || []}
|
||||
label={config.placeholder || 'Select a credential'}
|
||||
serviceId={config.serviceId}
|
||||
blockId={blockId}
|
||||
subBlock={config}
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue}
|
||||
/>
|
||||
)
|
||||
case 'file-selector':
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { BookOpen, Code, Info, RectangleHorizontal, RectangleVertical } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Button } from '@/components/ui/button'
|
||||
@@ -83,6 +84,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
const isActiveBlock = useExecutionStore((state) => state.activeBlockIds.has(id))
|
||||
const isActive = dataIsActive || isActiveBlock
|
||||
|
||||
// Get the current workflow ID from URL params instead of global state
|
||||
// This prevents race conditions when switching workflows rapidly
|
||||
const params = useParams()
|
||||
const currentWorkflowId = params.workflowId as string
|
||||
|
||||
const reactivateSchedule = async (scheduleId: string) => {
|
||||
try {
|
||||
const response = await fetch(`/api/schedules/${scheduleId}`, {
|
||||
@@ -94,7 +100,10 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
fetchScheduleInfo()
|
||||
// Use the current workflow ID from params instead of global state
|
||||
if (currentWorkflowId) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
}
|
||||
} else {
|
||||
console.error('Failed to reactivate schedule')
|
||||
}
|
||||
@@ -103,11 +112,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
}
|
||||
}
|
||||
|
||||
const fetchScheduleInfo = async () => {
|
||||
const fetchScheduleInfo = async (workflowId: string) => {
|
||||
if (!workflowId) return
|
||||
|
||||
try {
|
||||
setIsLoadingScheduleInfo(true)
|
||||
const workflowId = useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (!workflowId) return
|
||||
|
||||
const response = await fetch(`/api/schedules?workflowId=${workflowId}&mode=schedule`, {
|
||||
cache: 'no-store',
|
||||
@@ -176,12 +185,18 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (type === 'starter') {
|
||||
fetchScheduleInfo()
|
||||
if (type === 'starter' && currentWorkflowId) {
|
||||
fetchScheduleInfo(currentWorkflowId)
|
||||
} else {
|
||||
setScheduleInfo(null)
|
||||
setIsLoadingScheduleInfo(false) // Reset loading state when not a starter block
|
||||
}
|
||||
}, [type])
|
||||
|
||||
// Cleanup function to reset loading state when component unmounts or workflow changes
|
||||
return () => {
|
||||
setIsLoadingScheduleInfo(false)
|
||||
}
|
||||
}, [type, currentWorkflowId])
|
||||
|
||||
// Get webhook information for the tooltip
|
||||
useEffect(() => {
|
||||
@@ -436,6 +451,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
blockId={id}
|
||||
setIsConnecting={setIsConnecting}
|
||||
isDisabled={!userPermissions.canEdit}
|
||||
horizontalHandles={horizontalHandles}
|
||||
/>
|
||||
|
||||
{/* Input Handle - Don't show for starter blocks */}
|
||||
@@ -638,7 +654,9 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='top'>
|
||||
{!userPermissions.canEdit
|
||||
? 'Read-only mode'
|
||||
? userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Read-only mode'
|
||||
: blockAdvancedMode
|
||||
? 'Switch to Basic Mode'
|
||||
: 'Switch to Advanced Mode'}
|
||||
@@ -683,7 +701,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
{Object.entries(config.outputs).map(([key, value]) => (
|
||||
<div key={key} className='mb-1'>
|
||||
<span className='text-muted-foreground'>{key}</span>{' '}
|
||||
{typeof value.type === 'object' ? (
|
||||
{typeof value === 'object' ? (
|
||||
<div className='mt-1 pl-3'>
|
||||
{Object.entries(value.type).map(([typeKey, typeValue]) => (
|
||||
<div key={typeKey} className='flex items-start'>
|
||||
@@ -697,7 +715,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<span className='text-green-500'>{value.type as string}</span>
|
||||
<span className='text-green-500'>{value as string}</span>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
@@ -734,7 +752,9 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='top'>
|
||||
{!userPermissions.canEdit
|
||||
? 'Read-only mode'
|
||||
? userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Read-only mode'
|
||||
: isWide
|
||||
? 'Narrow Block'
|
||||
: 'Expand Block'}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { shallow } from 'zustand/shallow'
|
||||
import { BlockPathCalculator } from '@/lib/block-path-calculator'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -28,6 +29,35 @@ export interface ConnectedBlock {
|
||||
}
|
||||
}
|
||||
|
||||
function parseResponseFormatSafely(responseFormatValue: any, blockId: string): any {
|
||||
if (!responseFormatValue) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (typeof responseFormatValue === 'object' && responseFormatValue !== null) {
|
||||
return responseFormatValue
|
||||
}
|
||||
|
||||
if (typeof responseFormatValue === 'string') {
|
||||
const trimmedValue = responseFormatValue.trim()
|
||||
|
||||
if (trimmedValue.startsWith('<') && trimmedValue.includes('>')) {
|
||||
return trimmedValue
|
||||
}
|
||||
|
||||
if (trimmedValue === '') {
|
||||
return undefined
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.parse(trimmedValue)
|
||||
} catch (error) {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
// Helper function to extract fields from JSON Schema
|
||||
function extractFieldsFromSchema(schema: any): Field[] {
|
||||
if (!schema || typeof schema !== 'object') {
|
||||
@@ -53,63 +83,6 @@ function extractFieldsFromSchema(schema: any): Field[] {
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds all blocks along paths leading to the target block
|
||||
* This is a reverse traversal from the target node to find all ancestors
|
||||
* along connected paths
|
||||
* @param edges - List of all edges in the graph
|
||||
* @param targetNodeId - ID of the target block we're finding connections for
|
||||
* @returns Array of unique ancestor node IDs
|
||||
*/
|
||||
function findAllPathNodes(edges: any[], targetNodeId: string): string[] {
|
||||
// We'll use a reverse topological sort approach by tracking "distance" from target
|
||||
const nodeDistances = new Map<string, number>()
|
||||
const visited = new Set<string>()
|
||||
const queue: [string, number][] = [[targetNodeId, 0]] // [nodeId, distance]
|
||||
const pathNodes = new Set<string>()
|
||||
|
||||
// Build a reverse adjacency list for faster traversal
|
||||
const reverseAdjList: Record<string, string[]> = {}
|
||||
for (const edge of edges) {
|
||||
if (!reverseAdjList[edge.target]) {
|
||||
reverseAdjList[edge.target] = []
|
||||
}
|
||||
reverseAdjList[edge.target].push(edge.source)
|
||||
}
|
||||
|
||||
// BFS to find all ancestors and their shortest distance from target
|
||||
while (queue.length > 0) {
|
||||
const [currentNodeId, distance] = queue.shift()!
|
||||
|
||||
if (visited.has(currentNodeId)) {
|
||||
// If we've seen this node before, update its distance if this path is shorter
|
||||
const currentDistance = nodeDistances.get(currentNodeId) || Number.POSITIVE_INFINITY
|
||||
if (distance < currentDistance) {
|
||||
nodeDistances.set(currentNodeId, distance)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
visited.add(currentNodeId)
|
||||
nodeDistances.set(currentNodeId, distance)
|
||||
|
||||
// Don't add the target node itself to the results
|
||||
if (currentNodeId !== targetNodeId) {
|
||||
pathNodes.add(currentNodeId)
|
||||
}
|
||||
|
||||
// Get all incoming edges from the reverse adjacency list
|
||||
const incomingNodeIds = reverseAdjList[currentNodeId] || []
|
||||
|
||||
// Add all source nodes to the queue with incremented distance
|
||||
for (const sourceId of incomingNodeIds) {
|
||||
queue.push([sourceId, distance + 1])
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(pathNodes)
|
||||
}
|
||||
|
||||
export function useBlockConnections(blockId: string) {
|
||||
const { edges, blocks } = useWorkflowStore(
|
||||
(state) => ({
|
||||
@@ -120,7 +93,7 @@ export function useBlockConnections(blockId: string) {
|
||||
)
|
||||
|
||||
// Find all blocks along paths leading to this block
|
||||
const allPathNodeIds = findAllPathNodes(edges, blockId)
|
||||
const allPathNodeIds = BlockPathCalculator.findAllPathNodes(edges, blockId)
|
||||
|
||||
// Map each path node to a ConnectedBlock structure
|
||||
const allPathConnections = allPathNodeIds
|
||||
@@ -131,17 +104,8 @@ export function useBlockConnections(blockId: string) {
|
||||
// Get the response format from the subblock store
|
||||
const responseFormatValue = useSubBlockStore.getState().getValue(sourceId, 'responseFormat')
|
||||
|
||||
let responseFormat
|
||||
|
||||
try {
|
||||
responseFormat =
|
||||
typeof responseFormatValue === 'string' && responseFormatValue
|
||||
? JSON.parse(responseFormatValue)
|
||||
: responseFormatValue // Handle case where it's already an object
|
||||
} catch (e) {
|
||||
logger.error('Failed to parse response format:', { e })
|
||||
responseFormat = undefined
|
||||
}
|
||||
// Safely parse response format with proper error handling
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, sourceId)
|
||||
|
||||
// Get the default output type from the block's outputs
|
||||
const defaultOutputs: Field[] = Object.entries(sourceBlock.outputs || {}).map(([key]) => ({
|
||||
@@ -174,17 +138,8 @@ export function useBlockConnections(blockId: string) {
|
||||
.getState()
|
||||
.getValue(edge.source, 'responseFormat')
|
||||
|
||||
let responseFormat
|
||||
|
||||
try {
|
||||
responseFormat =
|
||||
typeof responseFormatValue === 'string' && responseFormatValue
|
||||
? JSON.parse(responseFormatValue)
|
||||
: responseFormatValue // Handle case where it's already an object
|
||||
} catch (e) {
|
||||
logger.error('Failed to parse response format:', { e })
|
||||
responseFormat = undefined
|
||||
}
|
||||
// Safely parse response format with proper error handling
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, edge.source)
|
||||
|
||||
// Get the default output type from the block's outputs
|
||||
const defaultOutputs: Field[] = Object.entries(sourceBlock.outputs || {}).map(([key]) => ({
|
||||
|
||||
@@ -2,6 +2,7 @@ import { useCallback, useState } from 'react'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { buildTraceSpans } from '@/lib/logs/trace-spans'
|
||||
import { processStreamingBlockLogs } from '@/lib/tokenization'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import { Executor } from '@/executor'
|
||||
import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
@@ -82,9 +83,9 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
// If this was a streaming response and we have the final content, update it
|
||||
if (streamContent && result.output?.response && typeof streamContent === 'string') {
|
||||
if (streamContent && result.output && typeof streamContent === 'string') {
|
||||
// Update the content with the final streaming content
|
||||
enrichedResult.output.response.content = streamContent
|
||||
enrichedResult.output.content = streamContent
|
||||
|
||||
// Also update any block logs to include the content where appropriate
|
||||
if (enrichedResult.logs) {
|
||||
@@ -97,10 +98,9 @@ export function useWorkflowExecution() {
|
||||
if (
|
||||
isStreamingBlock &&
|
||||
(log.blockType === 'agent' || log.blockType === 'router') &&
|
||||
log.output?.response
|
||||
) {
|
||||
log.output.response.content = streamContent
|
||||
}
|
||||
log.output
|
||||
)
|
||||
log.output.content = streamContent
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -122,7 +122,7 @@ export function useWorkflowExecution() {
|
||||
|
||||
return executionId
|
||||
} catch (error) {
|
||||
logger.error('Error persisting logs:', { error })
|
||||
logger.error('Error persisting logs:', error)
|
||||
return executionId
|
||||
}
|
||||
}
|
||||
@@ -212,22 +212,32 @@ export function useWorkflowExecution() {
|
||||
result.metadata = { duration: 0, startTime: new Date().toISOString() }
|
||||
}
|
||||
;(result.metadata as any).source = 'chat'
|
||||
result.logs?.forEach((log: BlockLog) => {
|
||||
if (streamedContent.has(log.blockId)) {
|
||||
const content = streamedContent.get(log.blockId) || ''
|
||||
if (log.output?.response) {
|
||||
log.output.response.content = content
|
||||
// Update streamed content and apply tokenization
|
||||
if (result.logs) {
|
||||
result.logs.forEach((log: BlockLog) => {
|
||||
if (streamedContent.has(log.blockId)) {
|
||||
const content = streamedContent.get(log.blockId) || ''
|
||||
// For console display, show the actual structured block output instead of formatted streaming content
|
||||
// This ensures console logs match the block state structure
|
||||
// Use replaceOutput to completely replace the output instead of merging
|
||||
useConsoleStore.getState().updateConsole(log.blockId, {
|
||||
replaceOutput: log.output,
|
||||
success: true,
|
||||
})
|
||||
}
|
||||
useConsoleStore.getState().updateConsole(log.blockId, content)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Process all logs for streaming tokenization
|
||||
const processedCount = processStreamingBlockLogs(result.logs, streamedContent)
|
||||
logger.info(`Processed ${processedCount} blocks for streaming tokenization`)
|
||||
}
|
||||
|
||||
controller.enqueue(
|
||||
encoder.encode(`data: ${JSON.stringify({ event: 'final', data: result })}\n\n`)
|
||||
)
|
||||
persistLogs(executionId, result).catch((err) => {
|
||||
logger.error('Error persisting logs:', { error: err })
|
||||
})
|
||||
persistLogs(executionId, result).catch((err) =>
|
||||
logger.error('Error persisting logs:', err)
|
||||
)
|
||||
}
|
||||
} catch (error: any) {
|
||||
controller.error(error)
|
||||
@@ -437,7 +447,7 @@ export function useWorkflowExecution() {
|
||||
|
||||
const errorResult: ExecutionResult = {
|
||||
success: false,
|
||||
output: { response: {} },
|
||||
output: {},
|
||||
error: errorMessage,
|
||||
logs: [],
|
||||
}
|
||||
@@ -560,7 +570,7 @@ export function useWorkflowExecution() {
|
||||
// Create error result
|
||||
const errorResult = {
|
||||
success: false,
|
||||
output: { response: {} },
|
||||
output: {},
|
||||
error: errorMessage,
|
||||
logs: debugContext.blockLogs,
|
||||
}
|
||||
@@ -647,7 +657,7 @@ export function useWorkflowExecution() {
|
||||
|
||||
let currentResult: ExecutionResult = {
|
||||
success: true,
|
||||
output: { response: {} },
|
||||
output: {},
|
||||
logs: debugContext.blockLogs,
|
||||
}
|
||||
|
||||
@@ -743,7 +753,7 @@ export function useWorkflowExecution() {
|
||||
// Create error result
|
||||
const errorResult = {
|
||||
success: false,
|
||||
output: { response: {} },
|
||||
output: {},
|
||||
error: errorMessage,
|
||||
logs: debugContext.blockLogs,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import React, { createContext, useContext, useMemo } from 'react'
|
||||
import type React from 'react'
|
||||
import { createContext, useContext, useEffect, useMemo, useState } from 'react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { useUserPermissions, type WorkspaceUserPermissions } from '@/hooks/use-user-permissions'
|
||||
@@ -8,6 +9,7 @@ import {
|
||||
useWorkspacePermissions,
|
||||
type WorkspacePermissions,
|
||||
} from '@/hooks/use-workspace-permissions'
|
||||
import { usePresence } from '../../[workflowId]/hooks/use-presence'
|
||||
|
||||
const logger = createLogger('WorkspacePermissionsProvider')
|
||||
|
||||
@@ -18,88 +20,140 @@ interface WorkspacePermissionsContextType {
|
||||
permissionsError: string | null
|
||||
updatePermissions: (newPermissions: WorkspacePermissions) => void
|
||||
|
||||
// Computed user permissions
|
||||
userPermissions: WorkspaceUserPermissions
|
||||
// Computed user permissions (connection-aware)
|
||||
userPermissions: WorkspaceUserPermissions & { isOfflineMode?: boolean }
|
||||
|
||||
// Connection state management
|
||||
setOfflineMode: (isOffline: boolean) => void
|
||||
}
|
||||
|
||||
const WorkspacePermissionsContext = createContext<WorkspacePermissionsContextType | null>(null)
|
||||
const WorkspacePermissionsContext = createContext<WorkspacePermissionsContextType>({
|
||||
workspacePermissions: null,
|
||||
permissionsLoading: false,
|
||||
permissionsError: null,
|
||||
updatePermissions: () => {},
|
||||
userPermissions: {
|
||||
canRead: false,
|
||||
canEdit: false,
|
||||
canAdmin: false,
|
||||
userPermissions: 'read',
|
||||
isLoading: false,
|
||||
error: null,
|
||||
},
|
||||
setOfflineMode: () => {},
|
||||
})
|
||||
|
||||
interface WorkspacePermissionsProviderProps {
|
||||
children: React.ReactNode
|
||||
}
|
||||
|
||||
const WorkspacePermissionsProvider = React.memo<WorkspacePermissionsProviderProps>(
|
||||
({ children }) => {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
/**
|
||||
* Provider that manages workspace permissions and user access
|
||||
* Also provides connection-aware permissions that enforce read-only mode when offline
|
||||
*/
|
||||
export function WorkspacePermissionsProvider({ children }: WorkspacePermissionsProviderProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params?.workspaceId as string
|
||||
|
||||
if (!workspaceId) {
|
||||
logger.warn('Workspace ID is undefined from params:', params)
|
||||
// Manage offline mode state locally
|
||||
const [isOfflineMode, setIsOfflineMode] = useState(false)
|
||||
const [hasBeenConnected, setHasBeenConnected] = useState(false)
|
||||
|
||||
// Fetch workspace permissions and loading state
|
||||
const {
|
||||
permissions: workspacePermissions,
|
||||
loading: permissionsLoading,
|
||||
error: permissionsError,
|
||||
updatePermissions,
|
||||
} = useWorkspacePermissions(workspaceId)
|
||||
|
||||
// Get base user permissions from workspace permissions
|
||||
const baseUserPermissions = useUserPermissions(
|
||||
workspacePermissions,
|
||||
permissionsLoading,
|
||||
permissionsError
|
||||
)
|
||||
|
||||
// Get connection status and update offline mode accordingly
|
||||
const { isConnected } = usePresence()
|
||||
|
||||
useEffect(() => {
|
||||
if (isConnected) {
|
||||
// Mark that we've been connected at least once
|
||||
setHasBeenConnected(true)
|
||||
// On initial connection, allow going online
|
||||
if (!hasBeenConnected) {
|
||||
setIsOfflineMode(false)
|
||||
}
|
||||
// If we were previously connected and this is a reconnection, stay offline (user must refresh)
|
||||
} else if (hasBeenConnected) {
|
||||
// Only enter offline mode if we were previously connected and now disconnected
|
||||
setIsOfflineMode(true)
|
||||
}
|
||||
// If not connected and never been connected, stay in initial state (not offline mode)
|
||||
}, [isConnected, hasBeenConnected])
|
||||
|
||||
// Create connection-aware permissions that override user permissions when offline
|
||||
const userPermissions = useMemo((): WorkspaceUserPermissions & { isOfflineMode?: boolean } => {
|
||||
if (isOfflineMode) {
|
||||
// In offline mode, force read-only permissions regardless of actual user permissions
|
||||
return {
|
||||
...baseUserPermissions,
|
||||
canEdit: false,
|
||||
canAdmin: false,
|
||||
// Keep canRead true so users can still view content
|
||||
canRead: baseUserPermissions.canRead,
|
||||
isOfflineMode: true,
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
permissions: workspacePermissions,
|
||||
loading: permissionsLoading,
|
||||
error: permissionsError,
|
||||
updatePermissions,
|
||||
} = useWorkspacePermissions(workspaceId)
|
||||
// When online, use normal permissions
|
||||
return {
|
||||
...baseUserPermissions,
|
||||
isOfflineMode: false,
|
||||
}
|
||||
}, [baseUserPermissions, isOfflineMode])
|
||||
|
||||
const userPermissions = useUserPermissions(
|
||||
const contextValue = useMemo(
|
||||
() => ({
|
||||
workspacePermissions,
|
||||
permissionsLoading,
|
||||
permissionsError
|
||||
)
|
||||
permissionsError,
|
||||
updatePermissions,
|
||||
userPermissions,
|
||||
setOfflineMode: setIsOfflineMode,
|
||||
}),
|
||||
[workspacePermissions, permissionsLoading, permissionsError, updatePermissions, userPermissions]
|
||||
)
|
||||
|
||||
const contextValue = useMemo(
|
||||
() => ({
|
||||
workspacePermissions,
|
||||
permissionsLoading,
|
||||
permissionsError,
|
||||
updatePermissions,
|
||||
userPermissions,
|
||||
}),
|
||||
[
|
||||
workspacePermissions,
|
||||
permissionsLoading,
|
||||
permissionsError,
|
||||
updatePermissions,
|
||||
userPermissions,
|
||||
]
|
||||
)
|
||||
|
||||
return (
|
||||
<WorkspacePermissionsContext.Provider value={contextValue}>
|
||||
{children}
|
||||
</WorkspacePermissionsContext.Provider>
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
WorkspacePermissionsProvider.displayName = 'WorkspacePermissionsProvider'
|
||||
|
||||
export { WorkspacePermissionsProvider }
|
||||
return (
|
||||
<WorkspacePermissionsContext.Provider value={contextValue}>
|
||||
{children}
|
||||
</WorkspacePermissionsContext.Provider>
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to access workspace permissions context
|
||||
* This replaces individual useWorkspacePermissions calls to avoid duplicate API requests
|
||||
* Hook to access workspace permissions and data from context
|
||||
* This provides both raw workspace permissions and computed user permissions
|
||||
*/
|
||||
export function useWorkspacePermissionsContext(): WorkspacePermissionsContextType {
|
||||
const context = useContext(WorkspacePermissionsContext)
|
||||
|
||||
if (!context) {
|
||||
throw new Error(
|
||||
'useWorkspacePermissionsContext must be used within a WorkspacePermissionsProvider'
|
||||
)
|
||||
}
|
||||
|
||||
return context
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to access user permissions from context
|
||||
* This replaces individual useUserPermissions calls
|
||||
* This replaces individual useUserPermissions calls and includes connection-aware permissions
|
||||
*/
|
||||
export function useUserPermissionsContext(): WorkspaceUserPermissions {
|
||||
export function useUserPermissionsContext(): WorkspaceUserPermissions & {
|
||||
isOfflineMode?: boolean
|
||||
} {
|
||||
const { userPermissions } = useWorkspacePermissionsContext()
|
||||
return userPermissions
|
||||
}
|
||||
|
||||
@@ -15,9 +15,14 @@ import { useFolderStore } from '@/stores/folders/store'
|
||||
interface CreateMenuProps {
|
||||
onCreateWorkflow: (folderId?: string) => void
|
||||
isCollapsed?: boolean
|
||||
isCreatingWorkflow?: boolean
|
||||
}
|
||||
|
||||
export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
export function CreateMenu({
|
||||
onCreateWorkflow,
|
||||
isCollapsed,
|
||||
isCreatingWorkflow = false,
|
||||
}: CreateMenuProps) {
|
||||
const [showFolderDialog, setShowFolderDialog] = useState(false)
|
||||
const [folderName, setFolderName] = useState('')
|
||||
const [isCreating, setIsCreating] = useState(false)
|
||||
@@ -73,6 +78,7 @@ export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
onClick={handleCreateWorkflow}
|
||||
onMouseEnter={() => setIsHoverOpen(true)}
|
||||
onMouseLeave={() => setIsHoverOpen(false)}
|
||||
disabled={isCreatingWorkflow}
|
||||
>
|
||||
<Plus
|
||||
className={cn(
|
||||
@@ -101,11 +107,17 @@ export function CreateMenu({ onCreateWorkflow, isCollapsed }: CreateMenuProps) {
|
||||
onCloseAutoFocus={(e) => e.preventDefault()}
|
||||
>
|
||||
<button
|
||||
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
|
||||
className={cn(
|
||||
'flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors',
|
||||
isCreatingWorkflow
|
||||
? 'cursor-not-allowed opacity-50'
|
||||
: 'hover:bg-accent hover:text-accent-foreground'
|
||||
)}
|
||||
onClick={handleCreateWorkflow}
|
||||
disabled={isCreatingWorkflow}
|
||||
>
|
||||
<File className='h-4 w-4' />
|
||||
New Workflow
|
||||
{isCreatingWorkflow ? 'Creating...' : 'New Workflow'}
|
||||
</button>
|
||||
<button
|
||||
className='flex w-full cursor-default select-none items-center gap-2 rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground'
|
||||
|
||||
@@ -14,7 +14,9 @@ import {
|
||||
} from '@/components/ui/dropdown-menu'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
|
||||
const logger = createLogger('FolderContextMenu')
|
||||
@@ -43,6 +45,9 @@ export function FolderContextMenu({
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
// Get user permissions for the workspace
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const { createFolder, updateFolder, deleteFolder } = useFolderStore()
|
||||
|
||||
const handleCreateWorkflow = () => {
|
||||
@@ -58,12 +63,17 @@ export function FolderContextMenu({
|
||||
setShowRenameDialog(true)
|
||||
}
|
||||
|
||||
const handleDelete = () => {
|
||||
const handleDelete = async () => {
|
||||
if (onDelete) {
|
||||
onDelete(folderId)
|
||||
} else {
|
||||
// Default delete behavior
|
||||
deleteFolder(folderId, workspaceId)
|
||||
// Default delete behavior with proper error handling
|
||||
try {
|
||||
await deleteFolder(folderId, workspaceId)
|
||||
logger.info(`Successfully deleted folder from context menu: ${folderName}`)
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete folder from context menu:', { error, folderId, folderName })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,23 +139,46 @@ export function FolderContextMenu({
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align='end' onClick={(e) => e.stopPropagation()}>
|
||||
<DropdownMenuItem onClick={handleCreateWorkflow}>
|
||||
<File className='mr-2 h-4 w-4' />
|
||||
New Workflow
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem onClick={handleCreateSubfolder}>
|
||||
<Folder className='mr-2 h-4 w-4' />
|
||||
New Subfolder
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem onClick={handleRename}>
|
||||
<Pencil className='mr-2 h-4 w-4' />
|
||||
Rename
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem onClick={handleDelete} className='text-destructive'>
|
||||
<Trash2 className='mr-2 h-4 w-4' />
|
||||
Delete
|
||||
</DropdownMenuItem>
|
||||
{userPermissions.canEdit && (
|
||||
<>
|
||||
<DropdownMenuItem onClick={handleCreateWorkflow}>
|
||||
<File className='mr-2 h-4 w-4' />
|
||||
New Workflow
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem onClick={handleCreateSubfolder}>
|
||||
<Folder className='mr-2 h-4 w-4' />
|
||||
New Subfolder
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem onClick={handleRename}>
|
||||
<Pencil className='mr-2 h-4 w-4' />
|
||||
Rename
|
||||
</DropdownMenuItem>
|
||||
</>
|
||||
)}
|
||||
{userPermissions.canAdmin ? (
|
||||
<DropdownMenuItem onClick={handleDelete} className='text-destructive'>
|
||||
<Trash2 className='mr-2 h-4 w-4' />
|
||||
Delete
|
||||
</DropdownMenuItem>
|
||||
) : (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div>
|
||||
<DropdownMenuItem
|
||||
className='cursor-not-allowed text-muted-foreground opacity-50'
|
||||
onClick={(e) => e.preventDefault()}
|
||||
>
|
||||
<Trash2 className='mr-2 h-4 w-4' />
|
||||
Delete
|
||||
</DropdownMenuItem>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Admin access required to delete folders</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ const TOOLTIPS = {
|
||||
debugMode: 'Enable visual debugging information during execution.',
|
||||
autoConnect: 'Automatically connect nodes.',
|
||||
autoFillEnvVars: 'Automatically fill API keys.',
|
||||
autoPan: 'Automatically pan to active blocks during workflow execution.',
|
||||
}
|
||||
|
||||
export function General() {
|
||||
@@ -30,11 +31,13 @@ export function General() {
|
||||
const isAutoConnectEnabled = useGeneralStore((state) => state.isAutoConnectEnabled)
|
||||
const isDebugModeEnabled = useGeneralStore((state) => state.isDebugModeEnabled)
|
||||
const isAutoFillEnvVarsEnabled = useGeneralStore((state) => state.isAutoFillEnvVarsEnabled)
|
||||
const isAutoPanEnabled = useGeneralStore((state) => state.isAutoPanEnabled)
|
||||
|
||||
const setTheme = useGeneralStore((state) => state.setTheme)
|
||||
const toggleAutoConnect = useGeneralStore((state) => state.toggleAutoConnect)
|
||||
const toggleDebugMode = useGeneralStore((state) => state.toggleDebugMode)
|
||||
const toggleAutoFillEnvVars = useGeneralStore((state) => state.toggleAutoFillEnvVars)
|
||||
const toggleAutoPan = useGeneralStore((state) => state.toggleAutoPan)
|
||||
const loadSettings = useGeneralStore((state) => state.loadSettings)
|
||||
|
||||
useEffect(() => {
|
||||
@@ -66,6 +69,12 @@ export function General() {
|
||||
}
|
||||
}
|
||||
|
||||
const handleAutoPanChange = (checked: boolean) => {
|
||||
if (checked !== isAutoPanEnabled) {
|
||||
toggleAutoPan()
|
||||
}
|
||||
}
|
||||
|
||||
const handleRetry = () => {
|
||||
setRetryCount((prev) => prev + 1)
|
||||
}
|
||||
@@ -200,6 +209,35 @@ export function General() {
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
<div className='flex items-center justify-between py-1'>
|
||||
<div className='flex items-center gap-2'>
|
||||
<Label htmlFor='auto-pan' className='font-medium'>
|
||||
Auto-pan during execution
|
||||
</Label>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='h-7 p-1 text-gray-500'
|
||||
aria-label='Learn more about auto-pan feature'
|
||||
disabled={isLoading}
|
||||
>
|
||||
<Info className='h-5 w-5' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='top' className='max-w-[300px] p-3'>
|
||||
<p className='text-sm'>{TOOLTIPS.autoPan}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Switch
|
||||
id='auto-pan'
|
||||
checked={isAutoPanEnabled}
|
||||
onCheckedChange={handleAutoPanChange}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -7,6 +7,7 @@ import { useParams, usePathname, useRouter } from 'next/navigation'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { useSession } from '@/lib/auth-client'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import {
|
||||
getKeyboardShortcutText,
|
||||
@@ -27,7 +28,7 @@ import { WorkspaceHeader } from './components/workspace-header/workspace-header'
|
||||
|
||||
const logger = createLogger('Sidebar')
|
||||
|
||||
const IS_DEV = process.env.NODE_ENV === 'development'
|
||||
const IS_DEV = env.NODE_ENV === 'development'
|
||||
|
||||
export function Sidebar() {
|
||||
useGlobalShortcuts()
|
||||
@@ -41,6 +42,9 @@ export function Sidebar() {
|
||||
const { isPending: sessionLoading } = useSession()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const isLoading = workflowsLoading || sessionLoading
|
||||
|
||||
// Add state to prevent multiple simultaneous workflow creations
|
||||
const [isCreatingWorkflow, setIsCreatingWorkflow] = useState(false)
|
||||
const router = useRouter()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
@@ -108,7 +112,14 @@ export function Sidebar() {
|
||||
|
||||
// Create workflow handler
|
||||
const handleCreateWorkflow = async (folderId?: string) => {
|
||||
// Prevent multiple simultaneous workflow creations
|
||||
if (isCreatingWorkflow) {
|
||||
logger.info('Workflow creation already in progress, ignoring request')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
setIsCreatingWorkflow(true)
|
||||
const id = await createWorkflow({
|
||||
workspaceId: workspaceId || undefined,
|
||||
folderId: folderId || undefined,
|
||||
@@ -116,6 +127,8 @@ export function Sidebar() {
|
||||
router.push(`/workspace/${workspaceId}/w/${id}`)
|
||||
} catch (error) {
|
||||
logger.error('Error creating workflow:', error)
|
||||
} finally {
|
||||
setIsCreatingWorkflow(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,7 +186,11 @@ export function Sidebar() {
|
||||
{isLoading ? <Skeleton className='h-4 w-16' /> : 'Workflows'}
|
||||
</h2>
|
||||
{!isCollapsed && !isLoading && (
|
||||
<CreateMenu onCreateWorkflow={handleCreateWorkflow} isCollapsed={false} />
|
||||
<CreateMenu
|
||||
onCreateWorkflow={handleCreateWorkflow}
|
||||
isCollapsed={false}
|
||||
isCreatingWorkflow={isCreatingWorkflow}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<FolderTree
|
||||
|
||||
@@ -33,6 +33,7 @@ interface WorkflowPreviewProps {
|
||||
isPannable?: boolean
|
||||
defaultPosition?: { x: number; y: number }
|
||||
defaultZoom?: number
|
||||
onNodeClick?: (blockId: string, mousePosition: { x: number; y: number }) => void
|
||||
}
|
||||
|
||||
// Define node types - the components now handle preview mode internally
|
||||
@@ -55,7 +56,24 @@ export function WorkflowPreview({
|
||||
isPannable = false,
|
||||
defaultPosition,
|
||||
defaultZoom,
|
||||
onNodeClick,
|
||||
}: WorkflowPreviewProps) {
|
||||
// Handle migrated logs that don't have complete workflow state
|
||||
if (!workflowState || !workflowState.blocks || !workflowState.edges) {
|
||||
return (
|
||||
<div
|
||||
style={{ height, width }}
|
||||
className='flex items-center justify-center rounded-lg border border-gray-200 bg-gray-50 dark:border-gray-700 dark:bg-gray-900'
|
||||
>
|
||||
<div className='text-center text-gray-500 dark:text-gray-400'>
|
||||
<div className='mb-2 font-medium text-lg'>⚠️ Logged State Not Found</div>
|
||||
<div className='text-sm'>
|
||||
This log was migrated from the old system and doesn't contain workflow state data.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
const blocksStructure = useMemo(
|
||||
() => ({
|
||||
count: Object.keys(workflowState.blocks || {}).length,
|
||||
@@ -82,8 +100,8 @@ export function WorkflowPreview({
|
||||
|
||||
const edgesStructure = useMemo(
|
||||
() => ({
|
||||
count: workflowState.edges.length,
|
||||
ids: workflowState.edges.map((e) => e.id).join(','),
|
||||
count: workflowState.edges?.length || 0,
|
||||
ids: workflowState.edges?.map((e) => e.id).join(',') || '',
|
||||
}),
|
||||
[workflowState.edges]
|
||||
)
|
||||
@@ -113,7 +131,7 @@ export function WorkflowPreview({
|
||||
const nodes: Node[] = useMemo(() => {
|
||||
const nodeArray: Node[] = []
|
||||
|
||||
Object.entries(workflowState.blocks).forEach(([blockId, block]) => {
|
||||
Object.entries(workflowState.blocks || {}).forEach(([blockId, block]) => {
|
||||
if (!block || !block.type) {
|
||||
logger.warn(`Skipping invalid block: ${blockId}`)
|
||||
return
|
||||
@@ -184,7 +202,7 @@ export function WorkflowPreview({
|
||||
})
|
||||
|
||||
if (block.type === 'loop') {
|
||||
const childBlocks = Object.entries(workflowState.blocks).filter(
|
||||
const childBlocks = Object.entries(workflowState.blocks || {}).filter(
|
||||
([_, childBlock]) => childBlock.data?.parentId === blockId
|
||||
)
|
||||
|
||||
@@ -221,7 +239,7 @@ export function WorkflowPreview({
|
||||
}, [blocksStructure, loopsStructure, parallelsStructure, showSubBlocks, workflowState.blocks])
|
||||
|
||||
const edges: Edge[] = useMemo(() => {
|
||||
return workflowState.edges.map((edge) => ({
|
||||
return (workflowState.edges || []).map((edge) => ({
|
||||
id: edge.id,
|
||||
source: edge.source,
|
||||
target: edge.target,
|
||||
@@ -256,6 +274,14 @@ export function WorkflowPreview({
|
||||
elementsSelectable={false}
|
||||
nodesDraggable={false}
|
||||
nodesConnectable={false}
|
||||
onNodeClick={
|
||||
onNodeClick
|
||||
? (event, node) => {
|
||||
logger.debug('Node clicked:', { nodeId: node.id, event })
|
||||
onNodeClick(node.id, { x: event.clientX, y: event.clientY })
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
>
|
||||
<Background />
|
||||
</ReactFlow>
|
||||
|
||||
@@ -332,25 +332,9 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
|
||||
tools: { type: 'json', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
tokens: 'any',
|
||||
toolCalls: 'any',
|
||||
},
|
||||
dependsOn: {
|
||||
subBlockId: 'responseFormat',
|
||||
condition: {
|
||||
whenEmpty: {
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
tokens: 'any',
|
||||
toolCalls: 'any',
|
||||
},
|
||||
whenFilled: 'json',
|
||||
},
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
model: 'string',
|
||||
tokens: 'any',
|
||||
toolCalls: 'any',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -179,12 +179,8 @@ export const AirtableBlock: BlockConfig<AirtableResponse> = {
|
||||
},
|
||||
// Output structure depends on the operation, covered by AirtableResponse union type
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
records: 'json', // Optional: for list, create, updateMultiple
|
||||
record: 'json', // Optional: for get, update single
|
||||
metadata: 'json', // Required: present in all responses
|
||||
},
|
||||
},
|
||||
records: 'json', // Optional: for list, create, updateMultiple
|
||||
record: 'json', // Optional: for get, update single
|
||||
metadata: 'json', // Required: present in all responses
|
||||
},
|
||||
}
|
||||
|
||||
@@ -62,12 +62,8 @@ export const ApiBlock: BlockConfig<RequestResponse> = {
|
||||
params: { type: 'json', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
data: 'any',
|
||||
status: 'number',
|
||||
headers: 'json',
|
||||
},
|
||||
},
|
||||
data: 'any',
|
||||
status: 'number',
|
||||
headers: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -112,13 +112,9 @@ export const AutoblocksBlock: BlockConfig<AutoblocksResponse> = {
|
||||
environment: { type: 'string', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
promptId: 'string',
|
||||
version: 'string',
|
||||
renderedPrompt: 'string',
|
||||
templates: 'json',
|
||||
},
|
||||
},
|
||||
promptId: 'string',
|
||||
version: 'string',
|
||||
renderedPrompt: 'string',
|
||||
templates: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -76,13 +76,9 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
|
||||
save_browser_data: { type: 'boolean', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
id: 'string',
|
||||
success: 'boolean',
|
||||
output: 'any',
|
||||
steps: 'json',
|
||||
},
|
||||
},
|
||||
id: 'string',
|
||||
success: 'boolean',
|
||||
output: 'any',
|
||||
steps: 'json',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -50,10 +50,6 @@ Plain Text: Best for populating a table in free-form style.
|
||||
data: { type: 'json', required: true },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
data: 'any',
|
||||
},
|
||||
},
|
||||
data: 'any',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -37,13 +37,9 @@ export const ConditionBlock: BlockConfig<ConditionBlockOutput> = {
|
||||
},
|
||||
inputs: {},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
content: 'string',
|
||||
conditionResult: 'boolean',
|
||||
selectedPath: 'json',
|
||||
selectedConditionId: 'string',
|
||||
},
|
||||
},
|
||||
content: 'string',
|
||||
conditionResult: 'boolean',
|
||||
selectedPath: 'json',
|
||||
selectedConditionId: 'string',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -109,14 +109,10 @@ export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
||||
content: { type: 'string', required: false },
|
||||
},
|
||||
outputs: {
|
||||
response: {
|
||||
type: {
|
||||
ts: 'string',
|
||||
pageId: 'string',
|
||||
content: 'string',
|
||||
title: 'string',
|
||||
success: 'boolean',
|
||||
},
|
||||
},
|
||||
ts: 'string',
|
||||
pageId: 'string',
|
||||
content: 'string',
|
||||
title: 'string',
|
||||
success: 'boolean',
|
||||
},
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user