mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 23:48:09 -05:00
Compare commits
65 Commits
v0.3.41
...
improvemen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2e4945508c | ||
|
|
1feef4ce4b | ||
|
|
2404f8af14 | ||
|
|
ba72e35d43 | ||
|
|
ab52458191 | ||
|
|
ef5e2b699c | ||
|
|
ba45404423 | ||
|
|
4ce2fc760a | ||
|
|
71e06f2b31 | ||
|
|
ce04d56d79 | ||
|
|
626e9a37da | ||
|
|
100ae1d23e | ||
|
|
792df1a9f0 | ||
|
|
d313a0f171 | ||
|
|
29270d15ff | ||
|
|
879711d786 | ||
|
|
f98138a550 | ||
|
|
888609a93c | ||
|
|
a516325733 | ||
|
|
396c9db204 | ||
|
|
a1acbc9616 | ||
|
|
5a74ab28e2 | ||
|
|
cf5532c852 | ||
|
|
3e6d454de3 | ||
|
|
4c4b3351e6 | ||
|
|
0c1ee239fe | ||
|
|
9c065a1c2a | ||
|
|
dc92a79f33 | ||
|
|
efb0d22d3f | ||
|
|
9af445fa25 | ||
|
|
e09088bafc | ||
|
|
994c81ba3c | ||
|
|
4bba1eb8f6 | ||
|
|
de06e8c35c | ||
|
|
61534b05dd | ||
|
|
694538e1ee | ||
|
|
6df565e4c8 | ||
|
|
422df2be0f | ||
|
|
692b385ece | ||
|
|
728f5812ac | ||
|
|
1d51706057 | ||
|
|
c166c60d9b | ||
|
|
120b7ffd5c | ||
|
|
ecc2a55f9e | ||
|
|
f10b7c0493 | ||
|
|
5835df3496 | ||
|
|
c4924776b6 | ||
|
|
9444661d98 | ||
|
|
9137b2eab3 | ||
|
|
fa0ef07981 | ||
|
|
463ba208f4 | ||
|
|
2cedac5ffb | ||
|
|
9c3f559a91 | ||
|
|
0d7ab06bd1 | ||
|
|
7bda4468b8 | ||
|
|
bdb9b866ab | ||
|
|
460d515df2 | ||
|
|
7b49515798 | ||
|
|
049f188d2e | ||
|
|
6c8a7f0594 | ||
|
|
3727b5d395 | ||
|
|
75a3f4cce7 | ||
|
|
63616a1239 | ||
|
|
7cb6dfc211 | ||
|
|
b94d942204 |
@@ -543,6 +543,8 @@ export async function executeWorkflowForChat(
|
||||
userId: deployment.userId,
|
||||
workspaceId: '', // TODO: Get from workflow
|
||||
variables: workflowVariables,
|
||||
initialInput: { input, conversationId },
|
||||
executionType: 'chat',
|
||||
})
|
||||
|
||||
const stream = new ReadableStream({
|
||||
|
||||
@@ -12,9 +12,9 @@ import {
|
||||
import { getCopilotModel } from '@/lib/copilot/config'
|
||||
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
||||
import { env } from '@/lib/env'
|
||||
import { generateChatTitle } from '@/lib/generate-chat-title'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
import { generateChatTitle } from '@/lib/sim-agent/utils'
|
||||
import { createFileContent, isSupportedFileType } from '@/lib/uploads/file-utils'
|
||||
import { S3_COPILOT_CONFIG } from '@/lib/uploads/setup'
|
||||
import { downloadFile, getStorageProvider } from '@/lib/uploads/storage-client'
|
||||
|
||||
@@ -76,11 +76,9 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info('File parse request received:', { filePath, fileType })
|
||||
|
||||
// Handle multiple files
|
||||
if (Array.isArray(filePath)) {
|
||||
const results = []
|
||||
for (const path of filePath) {
|
||||
// Skip empty or invalid paths
|
||||
if (!path || (typeof path === 'string' && path.trim() === '')) {
|
||||
results.push({
|
||||
success: false,
|
||||
@@ -91,12 +89,10 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
const result = await parseFileSingle(path, fileType)
|
||||
// Add processing time to metadata
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
}
|
||||
|
||||
// Transform each result to match expected frontend format
|
||||
if (result.success) {
|
||||
results.push({
|
||||
success: true,
|
||||
@@ -105,7 +101,7 @@ export async function POST(request: NextRequest) {
|
||||
name: result.filePath.split('/').pop() || 'unknown',
|
||||
fileType: result.metadata?.fileType || 'application/octet-stream',
|
||||
size: result.metadata?.size || 0,
|
||||
binary: false, // We only return text content
|
||||
binary: false,
|
||||
},
|
||||
filePath: result.filePath,
|
||||
})
|
||||
@@ -120,15 +116,12 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
}
|
||||
|
||||
// Handle single file
|
||||
const result = await parseFileSingle(filePath, fileType)
|
||||
|
||||
// Add processing time to metadata
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
}
|
||||
|
||||
// Transform single file result to match expected frontend format
|
||||
if (result.success) {
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
@@ -142,8 +135,6 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
}
|
||||
|
||||
// Only return 500 for actual server errors, not file processing failures
|
||||
// File processing failures (like file not found, parsing errors) should return 200 with success:false
|
||||
return NextResponse.json(result)
|
||||
} catch (error) {
|
||||
logger.error('Error in file parse API:', error)
|
||||
@@ -164,7 +155,6 @@ export async function POST(request: NextRequest) {
|
||||
async function parseFileSingle(filePath: string, fileType?: string): Promise<ParseResult> {
|
||||
logger.info('Parsing file:', filePath)
|
||||
|
||||
// Validate that filePath is not empty
|
||||
if (!filePath || filePath.trim() === '') {
|
||||
return {
|
||||
success: false,
|
||||
@@ -173,7 +163,6 @@ async function parseFileSingle(filePath: string, fileType?: string): Promise<Par
|
||||
}
|
||||
}
|
||||
|
||||
// Validate path for security before any processing
|
||||
const pathValidation = validateFilePath(filePath)
|
||||
if (!pathValidation.isValid) {
|
||||
return {
|
||||
@@ -183,49 +172,40 @@ async function parseFileSingle(filePath: string, fileType?: string): Promise<Par
|
||||
}
|
||||
}
|
||||
|
||||
// Check if this is an external URL
|
||||
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
|
||||
return handleExternalUrl(filePath, fileType)
|
||||
}
|
||||
|
||||
// Check if this is a cloud storage path (S3 or Blob)
|
||||
const isS3Path = filePath.includes('/api/files/serve/s3/')
|
||||
const isBlobPath = filePath.includes('/api/files/serve/blob/')
|
||||
|
||||
// Use cloud handler if it's a cloud path or we're in cloud mode
|
||||
if (isS3Path || isBlobPath || isUsingCloudStorage()) {
|
||||
return handleCloudFile(filePath, fileType)
|
||||
}
|
||||
|
||||
// Use local handler for local files
|
||||
return handleLocalFile(filePath, fileType)
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate file path for security
|
||||
* Validate file path for security - prevents null byte injection and path traversal attacks
|
||||
*/
|
||||
function validateFilePath(filePath: string): { isValid: boolean; error?: string } {
|
||||
// Check for null bytes
|
||||
if (filePath.includes('\0')) {
|
||||
return { isValid: false, error: 'Invalid path: null byte detected' }
|
||||
}
|
||||
|
||||
// Check for path traversal attempts
|
||||
if (filePath.includes('..')) {
|
||||
return { isValid: false, error: 'Access denied: path traversal detected' }
|
||||
}
|
||||
|
||||
// Check for tilde characters (home directory access)
|
||||
if (filePath.includes('~')) {
|
||||
return { isValid: false, error: 'Invalid path: tilde character not allowed' }
|
||||
}
|
||||
|
||||
// Check for absolute paths outside allowed directories
|
||||
if (filePath.startsWith('/') && !filePath.startsWith('/api/files/serve/')) {
|
||||
return { isValid: false, error: 'Path outside allowed directory' }
|
||||
}
|
||||
|
||||
// Check for Windows absolute paths
|
||||
if (/^[A-Za-z]:\\/.test(filePath)) {
|
||||
return { isValid: false, error: 'Path outside allowed directory' }
|
||||
}
|
||||
@@ -260,12 +240,10 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
|
||||
|
||||
logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`)
|
||||
|
||||
// Extract filename from URL
|
||||
const urlPath = new URL(url).pathname
|
||||
const filename = urlPath.split('/').pop() || 'download'
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
|
||||
// Process the file based on its content type
|
||||
if (extension === 'pdf') {
|
||||
return await handlePdfBuffer(buffer, filename, fileType, url)
|
||||
}
|
||||
@@ -276,7 +254,6 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
|
||||
return await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
|
||||
}
|
||||
|
||||
// For binary or unknown files
|
||||
return handleGenericBuffer(buffer, filename, extension, fileType)
|
||||
} catch (error) {
|
||||
logger.error(`Error handling external URL ${url}:`, error)
|
||||
@@ -289,35 +266,29 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle file stored in cloud storage (S3 or Azure Blob)
|
||||
* Handle file stored in cloud storage
|
||||
*/
|
||||
async function handleCloudFile(filePath: string, fileType?: string): Promise<ParseResult> {
|
||||
try {
|
||||
// Extract the cloud key from the path
|
||||
let cloudKey: string
|
||||
if (filePath.includes('/api/files/serve/s3/')) {
|
||||
cloudKey = decodeURIComponent(filePath.split('/api/files/serve/s3/')[1])
|
||||
} else if (filePath.includes('/api/files/serve/blob/')) {
|
||||
cloudKey = decodeURIComponent(filePath.split('/api/files/serve/blob/')[1])
|
||||
} else if (filePath.startsWith('/api/files/serve/')) {
|
||||
// Backwards-compatibility: path like "/api/files/serve/<key>"
|
||||
cloudKey = decodeURIComponent(filePath.substring('/api/files/serve/'.length))
|
||||
} else {
|
||||
// Assume raw key provided
|
||||
cloudKey = filePath
|
||||
}
|
||||
|
||||
logger.info('Extracted cloud key:', cloudKey)
|
||||
|
||||
// Download the file from cloud storage - this can throw for access errors
|
||||
const fileBuffer = await downloadFile(cloudKey)
|
||||
logger.info(`Downloaded file from cloud storage: ${cloudKey}, size: ${fileBuffer.length} bytes`)
|
||||
|
||||
// Extract the filename from the cloud key
|
||||
const filename = cloudKey.split('/').pop() || cloudKey
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
|
||||
// Process the file based on its content type
|
||||
if (extension === 'pdf') {
|
||||
return await handlePdfBuffer(fileBuffer, filename, fileType, filePath)
|
||||
}
|
||||
@@ -325,22 +296,19 @@ async function handleCloudFile(filePath: string, fileType?: string): Promise<Par
|
||||
return await handleCsvBuffer(fileBuffer, filename, fileType, filePath)
|
||||
}
|
||||
if (isSupportedFileType(extension)) {
|
||||
// For other supported types that we have parsers for
|
||||
return await handleGenericTextBuffer(fileBuffer, filename, extension, fileType, filePath)
|
||||
}
|
||||
// For binary or unknown files
|
||||
return handleGenericBuffer(fileBuffer, filename, extension, fileType)
|
||||
} catch (error) {
|
||||
logger.error(`Error handling cloud file ${filePath}:`, error)
|
||||
|
||||
// Check if this is a download/access error that should trigger a 500 response
|
||||
// For download/access errors, throw to trigger 500 response
|
||||
const errorMessage = (error as Error).message
|
||||
if (errorMessage.includes('Access denied') || errorMessage.includes('Forbidden')) {
|
||||
// For access errors, throw to trigger 500 response
|
||||
throw new Error(`Error accessing file from cloud storage: ${errorMessage}`)
|
||||
}
|
||||
|
||||
// For other errors (parsing, processing), return success:false
|
||||
// For other errors (parsing, processing), return success:false and an error message
|
||||
return {
|
||||
success: false,
|
||||
error: `Error accessing file from cloud storage: ${errorMessage}`,
|
||||
@@ -354,28 +322,23 @@ async function handleCloudFile(filePath: string, fileType?: string): Promise<Par
|
||||
*/
|
||||
async function handleLocalFile(filePath: string, fileType?: string): Promise<ParseResult> {
|
||||
try {
|
||||
// Extract filename from path
|
||||
const filename = filePath.split('/').pop() || filePath
|
||||
const fullPath = path.join(UPLOAD_DIR_SERVER, filename)
|
||||
|
||||
logger.info('Processing local file:', fullPath)
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fsPromises.access(fullPath)
|
||||
} catch {
|
||||
throw new Error(`File not found: ${filename}`)
|
||||
}
|
||||
|
||||
// Parse the file directly
|
||||
const result = await parseFile(fullPath)
|
||||
|
||||
// Get file stats for metadata
|
||||
const stats = await fsPromises.stat(fullPath)
|
||||
const fileBuffer = await readFile(fullPath)
|
||||
const hash = createHash('md5').update(fileBuffer).digest('hex')
|
||||
|
||||
// Extract file extension for type detection
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
|
||||
return {
|
||||
@@ -386,7 +349,7 @@ async function handleLocalFile(filePath: string, fileType?: string): Promise<Par
|
||||
fileType: fileType || getMimeType(extension),
|
||||
size: stats.size,
|
||||
hash,
|
||||
processingTime: 0, // Will be set by caller
|
||||
processingTime: 0,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -425,15 +388,14 @@ async function handlePdfBuffer(
|
||||
fileType: fileType || 'application/pdf',
|
||||
size: fileBuffer.length,
|
||||
hash: createHash('md5').update(fileBuffer).digest('hex'),
|
||||
processingTime: 0, // Will be set by caller
|
||||
processingTime: 0,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse PDF in memory:', error)
|
||||
|
||||
// Create fallback message for PDF parsing failure
|
||||
const content = createPdfFailureMessage(
|
||||
0, // We can't determine page count without parsing
|
||||
0,
|
||||
fileBuffer.length,
|
||||
originalPath || filename,
|
||||
(error as Error).message
|
||||
@@ -447,7 +409,7 @@ async function handlePdfBuffer(
|
||||
fileType: fileType || 'application/pdf',
|
||||
size: fileBuffer.length,
|
||||
hash: createHash('md5').update(fileBuffer).digest('hex'),
|
||||
processingTime: 0, // Will be set by caller
|
||||
processingTime: 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -465,7 +427,6 @@ async function handleCsvBuffer(
|
||||
try {
|
||||
logger.info(`Parsing CSV in memory: ${filename}`)
|
||||
|
||||
// Use the parseBuffer function from our library
|
||||
const { parseBuffer } = await import('@/lib/file-parsers')
|
||||
const result = await parseBuffer(fileBuffer, 'csv')
|
||||
|
||||
@@ -477,7 +438,7 @@ async function handleCsvBuffer(
|
||||
fileType: fileType || 'text/csv',
|
||||
size: fileBuffer.length,
|
||||
hash: createHash('md5').update(fileBuffer).digest('hex'),
|
||||
processingTime: 0, // Will be set by caller
|
||||
processingTime: 0,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -490,7 +451,7 @@ async function handleCsvBuffer(
|
||||
fileType: 'text/csv',
|
||||
size: 0,
|
||||
hash: '',
|
||||
processingTime: 0, // Will be set by caller
|
||||
processingTime: 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -509,7 +470,6 @@ async function handleGenericTextBuffer(
|
||||
try {
|
||||
logger.info(`Parsing text file in memory: ${filename}`)
|
||||
|
||||
// Try to use a specialized parser if available
|
||||
try {
|
||||
const { parseBuffer, isSupportedFileType } = await import('@/lib/file-parsers')
|
||||
|
||||
@@ -524,7 +484,7 @@ async function handleGenericTextBuffer(
|
||||
fileType: fileType || getMimeType(extension),
|
||||
size: fileBuffer.length,
|
||||
hash: createHash('md5').update(fileBuffer).digest('hex'),
|
||||
processingTime: 0, // Will be set by caller
|
||||
processingTime: 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -532,7 +492,6 @@ async function handleGenericTextBuffer(
|
||||
logger.warn('Specialized parser failed, falling back to generic parsing:', parserError)
|
||||
}
|
||||
|
||||
// Fallback to generic text parsing
|
||||
const content = fileBuffer.toString('utf-8')
|
||||
|
||||
return {
|
||||
@@ -543,7 +502,7 @@ async function handleGenericTextBuffer(
|
||||
fileType: fileType || getMimeType(extension),
|
||||
size: fileBuffer.length,
|
||||
hash: createHash('md5').update(fileBuffer).digest('hex'),
|
||||
processingTime: 0, // Will be set by caller
|
||||
processingTime: 0,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -556,7 +515,7 @@ async function handleGenericTextBuffer(
|
||||
fileType: 'text/plain',
|
||||
size: 0,
|
||||
hash: '',
|
||||
processingTime: 0, // Will be set by caller
|
||||
processingTime: 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -584,7 +543,7 @@ function handleGenericBuffer(
|
||||
fileType: fileType || getMimeType(extension),
|
||||
size: fileBuffer.length,
|
||||
hash: createHash('md5').update(fileBuffer).digest('hex'),
|
||||
processingTime: 0, // Will be set by caller
|
||||
processingTime: 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -594,8 +553,6 @@ function handleGenericBuffer(
|
||||
*/
|
||||
async function parseBufferAsPdf(buffer: Buffer) {
|
||||
try {
|
||||
// Import parsers dynamically to avoid initialization issues in tests
|
||||
// First try to use the main PDF parser
|
||||
try {
|
||||
const { PdfParser } = await import('@/lib/file-parsers/pdf-parser')
|
||||
const parser = new PdfParser()
|
||||
@@ -606,7 +563,6 @@ async function parseBufferAsPdf(buffer: Buffer) {
|
||||
}
|
||||
throw new Error('PDF parser does not support buffer parsing')
|
||||
} catch (error) {
|
||||
// Fallback to raw PDF parser
|
||||
logger.warn('Main PDF parser failed, using raw parser for buffer:', error)
|
||||
const { RawPdfParser } = await import('@/lib/file-parsers/raw-pdf-parser')
|
||||
const rawParser = new RawPdfParser()
|
||||
@@ -655,7 +611,7 @@ Please use a PDF viewer for best results.`
|
||||
}
|
||||
|
||||
/**
|
||||
* Create error message for PDF parsing failure
|
||||
* Create error message for PDF parsing failure and make it more readable
|
||||
*/
|
||||
function createPdfFailureMessage(
|
||||
pageCount: number,
|
||||
|
||||
@@ -2,7 +2,7 @@ import { randomUUID } from 'crypto'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SUPPORTED_FIELD_TYPES } from '@/lib/constants/knowledge'
|
||||
import { SUPPORTED_FIELD_TYPES } from '@/lib/knowledge/consts'
|
||||
import {
|
||||
cleanupUnusedTagDefinitions,
|
||||
createOrUpdateTagDefinitionsBulk,
|
||||
|
||||
@@ -2,7 +2,7 @@ import { randomUUID } from 'crypto'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SUPPORTED_FIELD_TYPES } from '@/lib/constants/knowledge'
|
||||
import { SUPPORTED_FIELD_TYPES } from '@/lib/knowledge/consts'
|
||||
import { createTagDefinition, getTagDefinitions } from '@/lib/knowledge/tags/service'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { TAG_SLOTS } from '@/lib/constants/knowledge'
|
||||
import { TAG_SLOTS } from '@/lib/knowledge/consts'
|
||||
import { getDocumentTagDefinitions } from '@/lib/knowledge/tags/service'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { estimateTokenCount } from '@/lib/tokenization/estimators'
|
||||
|
||||
@@ -41,6 +41,7 @@ export async function GET(
|
||||
executionId,
|
||||
workflowId: workflowLog.workflowId,
|
||||
workflowState: snapshot.stateData,
|
||||
executionData: workflowLog.executionData || {},
|
||||
executionMetadata: {
|
||||
trigger: workflowLog.trigger,
|
||||
startedAt: workflowLog.startedAt.toISOString(),
|
||||
|
||||
@@ -399,6 +399,9 @@ export async function GET() {
|
||||
userId: workflowRecord.userId,
|
||||
workspaceId: workflowRecord.workspaceId || '',
|
||||
variables: variables || {},
|
||||
initialInput: input,
|
||||
startBlockId: schedule.blockId || undefined,
|
||||
executionType: 'schedule',
|
||||
})
|
||||
|
||||
const executor = new Executor({
|
||||
@@ -467,10 +470,19 @@ export async function GET() {
|
||||
|
||||
// Create a minimal log entry for early failures
|
||||
try {
|
||||
const input = {
|
||||
workflowId: schedule.workflowId,
|
||||
_context: {
|
||||
workflowId: schedule.workflowId,
|
||||
},
|
||||
}
|
||||
await loggingSession.safeStart({
|
||||
userId: workflowRecord.userId,
|
||||
workspaceId: workflowRecord.workspaceId || '',
|
||||
variables: {},
|
||||
initialInput: input,
|
||||
startBlockId: schedule.blockId || undefined,
|
||||
executionType: 'schedule',
|
||||
})
|
||||
|
||||
await loggingSession.safeCompleteWithError({
|
||||
@@ -586,10 +598,17 @@ export async function GET() {
|
||||
requestId
|
||||
)
|
||||
|
||||
const input = {
|
||||
workflowId: schedule.workflowId,
|
||||
_context: {
|
||||
workflowId: schedule.workflowId,
|
||||
},
|
||||
}
|
||||
await failureLoggingSession.safeStart({
|
||||
userId: workflowRecord.userId,
|
||||
workspaceId: workflowRecord.workspaceId || '',
|
||||
variables: {},
|
||||
initialInput: input,
|
||||
})
|
||||
|
||||
await failureLoggingSession.safeCompleteWithError({
|
||||
|
||||
@@ -12,7 +12,7 @@ const DeleteSchema = z.object({
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
@@ -12,7 +12,7 @@ const ExecuteSchema = z.object({
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ const InsertSchema = z.object({
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
|
||||
@@ -12,7 +12,7 @@ const QuerySchema = z.object({
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ const UpdateSchema = z.object({
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
|
||||
@@ -6,7 +6,7 @@ export interface MySQLConnectionConfig {
|
||||
database: string
|
||||
username: string
|
||||
password: string
|
||||
ssl?: string
|
||||
ssl?: 'disabled' | 'required' | 'preferred'
|
||||
}
|
||||
|
||||
export async function createMySQLConnection(config: MySQLConnectionConfig) {
|
||||
@@ -18,7 +18,9 @@ export async function createMySQLConnection(config: MySQLConnectionConfig) {
|
||||
password: config.password,
|
||||
}
|
||||
|
||||
if (config.ssl === 'required') {
|
||||
if (config.ssl === 'disabled') {
|
||||
// Don't set ssl property at all to disable SSL
|
||||
} else if (config.ssl === 'required') {
|
||||
connectionConfig.ssl = { rejectUnauthorized: true }
|
||||
} else if (config.ssl === 'preferred') {
|
||||
connectionConfig.ssl = { rejectUnauthorized: false }
|
||||
|
||||
@@ -12,7 +12,7 @@ const DeleteSchema = z.object({
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
where: z.string().min(1, 'WHERE clause is required'),
|
||||
})
|
||||
|
||||
@@ -16,7 +16,7 @@ const ExecuteSchema = z.object({
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ const InsertSchema = z.object({
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
|
||||
@@ -12,7 +12,7 @@ const QuerySchema = z.object({
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
query: z.string().min(1, 'Query is required'),
|
||||
})
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ const UpdateSchema = z.object({
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
table: z.string().min(1, 'Table name is required'),
|
||||
data: z.union([
|
||||
z
|
||||
|
||||
@@ -146,6 +146,8 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any): P
|
||||
userId: workflow.userId,
|
||||
workspaceId: workflow.workspaceId,
|
||||
variables,
|
||||
initialInput: processedInput || {},
|
||||
executionType: 'api',
|
||||
})
|
||||
|
||||
// Replace environment variables in the block states
|
||||
|
||||
@@ -4,6 +4,7 @@ import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { loadWorkflowStateForExecution } from '@/lib/logs/execution/logging-factory'
|
||||
|
||||
const logger = createLogger('WorkflowLogAPI')
|
||||
|
||||
@@ -30,6 +31,22 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
success: result.success,
|
||||
})
|
||||
|
||||
// Log current normalized state before starting logging session (what snapshot will save)
|
||||
try {
|
||||
const normalizedState = await loadWorkflowStateForExecution(id)
|
||||
logger.info(`[${requestId}] 🔍 Normalized workflow state at persistence time:`, {
|
||||
blocks: Object.entries(normalizedState.blocks || {}).map(([bid, b]: [string, any]) => ({
|
||||
id: bid,
|
||||
type: (b as any).type,
|
||||
triggerMode: (b as any).triggerMode,
|
||||
enabled: (b as any).enabled,
|
||||
})),
|
||||
edgesCount: (normalizedState.edges || []).length,
|
||||
})
|
||||
} catch (e) {
|
||||
logger.warn(`[${requestId}] Failed to load normalized state for logging snapshot context`)
|
||||
}
|
||||
|
||||
// Check if this execution is from chat using only the explicit source flag
|
||||
const isChatExecution = result.metadata?.source === 'chat'
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getAssetUrl } from '@/lib/utils'
|
||||
import '@/app/globals.css'
|
||||
|
||||
import { SessionProvider } from '@/lib/session-context'
|
||||
import { SessionProvider } from '@/lib/session/session-context'
|
||||
import { ThemeProvider } from '@/app/theme-provider'
|
||||
import { ZoomPrevention } from '@/app/zoom-prevention'
|
||||
|
||||
|
||||
@@ -21,6 +21,10 @@ const ACCEPTED_FILE_TYPES = [
|
||||
'text/csv',
|
||||
'application/vnd.ms-excel',
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
'text/markdown',
|
||||
'application/vnd.ms-powerpoint',
|
||||
'application/vnd.openxmlformats-officedocument.presentationml.presentation',
|
||||
'text/html',
|
||||
]
|
||||
|
||||
interface FileWithPreview extends File {
|
||||
@@ -74,7 +78,7 @@ export function UploadModal({
|
||||
return `File "${file.name}" is too large. Maximum size is 100MB.`
|
||||
}
|
||||
if (!ACCEPTED_FILE_TYPES.includes(file.type)) {
|
||||
return `File "${file.name}" has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, or XLSX files.`
|
||||
return `File "${file.name}" has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, or HTML files.`
|
||||
}
|
||||
return null
|
||||
}
|
||||
@@ -203,7 +207,8 @@ export function UploadModal({
|
||||
{isDragging ? 'Drop files here!' : 'Drop files here or click to browse'}
|
||||
</p>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX (max 100MB each)
|
||||
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML (max 100MB
|
||||
each)
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -29,6 +29,10 @@ const ACCEPTED_FILE_TYPES = [
|
||||
'text/csv',
|
||||
'application/vnd.ms-excel',
|
||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
|
||||
'text/markdown',
|
||||
'application/vnd.ms-powerpoint',
|
||||
'application/vnd.openxmlformats-officedocument.presentationml.presentation',
|
||||
'text/html',
|
||||
]
|
||||
|
||||
interface FileWithPreview extends File {
|
||||
@@ -168,7 +172,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
// Check file type
|
||||
if (!ACCEPTED_FILE_TYPES.includes(file.type)) {
|
||||
setFileError(
|
||||
`File ${file.name} has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, or XLSX.`
|
||||
`File ${file.name} has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, or HTML.`
|
||||
)
|
||||
hasError = true
|
||||
continue
|
||||
@@ -511,7 +515,8 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
: 'Drop files here or click to browse'}
|
||||
</p>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX (max 100MB each)
|
||||
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML (max
|
||||
100MB each)
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -552,7 +557,8 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
: 'Drop more files or click to browse'}
|
||||
</p>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
PDF, DOC, DOCX, TXT, CSV, XLS, XLSX (max 100MB each)
|
||||
PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML (max 100MB
|
||||
each)
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -25,7 +25,7 @@ import {
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from '@/components/ui'
|
||||
import { MAX_TAG_SLOTS, type TagSlot } from '@/lib/constants/knowledge'
|
||||
import { MAX_TAG_SLOTS, type TagSlot } from '@/lib/knowledge/consts'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useKnowledgeBaseTagDefinitions } from '@/hooks/use-knowledge-base-tag-definitions'
|
||||
import { useNextAvailableSlot } from '@/hooks/use-next-available-slot'
|
||||
|
||||
@@ -6,7 +6,7 @@ import { Button } from '@/components/ui/button'
|
||||
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { TAG_SLOTS, type TagSlot } from '@/lib/constants/knowledge'
|
||||
import { TAG_SLOTS, type TagSlot } from '@/lib/knowledge/consts'
|
||||
import { useKnowledgeBaseTagDefinitions } from '@/hooks/use-knowledge-base-tag-definitions'
|
||||
|
||||
export type TagData = {
|
||||
|
||||
@@ -16,6 +16,7 @@ import { TraceSpansDisplay } from '@/app/workspace/[workspaceId]/logs/components
|
||||
import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils/format-date'
|
||||
import { formatCost } from '@/providers/utils'
|
||||
import type { WorkflowLog } from '@/stores/logs/filters/types'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
|
||||
interface LogSidebarProps {
|
||||
log: WorkflowLog | null
|
||||
@@ -199,6 +200,8 @@ export function Sidebar({
|
||||
const [isModelsExpanded, setIsModelsExpanded] = useState(false)
|
||||
const [isFrozenCanvasOpen, setIsFrozenCanvasOpen] = useState(false)
|
||||
const scrollAreaRef = useRef<HTMLDivElement>(null)
|
||||
const router = useRouter()
|
||||
const params = useParams() as { workspaceId?: string }
|
||||
|
||||
// Update currentLogId when log changes
|
||||
useEffect(() => {
|
||||
@@ -529,15 +532,31 @@ export function Sidebar({
|
||||
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>
|
||||
Workflow State
|
||||
</h3>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={() => setIsFrozenCanvasOpen(true)}
|
||||
className='w-full justify-start gap-2'
|
||||
>
|
||||
<Eye className='h-4 w-4' />
|
||||
View Snapshot
|
||||
</Button>
|
||||
<div className='flex w-full gap-2'>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={() => setIsFrozenCanvasOpen(true)}
|
||||
className='flex-1 justify-start gap-2'
|
||||
>
|
||||
<Eye className='h-4 w-4' />
|
||||
View Snapshot
|
||||
</Button>
|
||||
<Button
|
||||
variant='secondary'
|
||||
size='sm'
|
||||
onClick={() => {
|
||||
try {
|
||||
const href = `/workspace/${encodeURIComponent(String(params?.workspaceId || ''))}/w/${encodeURIComponent(String(log.workflowId || ''))}`
|
||||
router.push(href)
|
||||
} catch {}
|
||||
}}
|
||||
className='flex-1 justify-start gap-2'
|
||||
>
|
||||
<Eye className='h-4 w-4' />
|
||||
Open Live Debug
|
||||
</Button>
|
||||
</div>
|
||||
<p className='mt-1 text-muted-foreground text-xs'>
|
||||
See the exact workflow state and block inputs/outputs at execution time
|
||||
</p>
|
||||
|
||||
@@ -8,8 +8,6 @@ import {
|
||||
Layers,
|
||||
Play,
|
||||
RefreshCw,
|
||||
SkipForward,
|
||||
StepForward,
|
||||
Store,
|
||||
Trash2,
|
||||
WifiOff,
|
||||
@@ -44,6 +42,8 @@ import {
|
||||
getKeyboardShortcutText,
|
||||
useKeyboardShortcuts,
|
||||
} from '@/app/workspace/[workspaceId]/w/hooks/use-keyboard-shortcuts'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import { useDebugCanvasStore } from '@/stores/execution/debug-canvas/store'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { usePanelStore } from '@/stores/panel/store'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
@@ -111,6 +111,9 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
const [isExpanded, setIsExpanded] = useState(false)
|
||||
const [isTemplateModalOpen, setIsTemplateModalOpen] = useState(false)
|
||||
const [isAutoLayouting, setIsAutoLayouting] = useState(false)
|
||||
// Remove chat modal state
|
||||
// const [isChatPromptOpen, setIsChatPromptOpen] = useState(false)
|
||||
// const [chatPrompt, setChatPrompt] = useState('')
|
||||
|
||||
// Delete workflow state - grouped for better organization
|
||||
const [deleteState, setDeleteState] = useState({
|
||||
@@ -146,6 +149,13 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
}
|
||||
}, [setActiveTab, isOpen, togglePanel])
|
||||
|
||||
const openDebugPanel = useCallback(() => {
|
||||
setActiveTab('debug')
|
||||
if (!isOpen) {
|
||||
togglePanel()
|
||||
}
|
||||
}, [setActiveTab, isOpen, togglePanel])
|
||||
|
||||
// Shared condition for keyboard shortcut and button disabled state
|
||||
const isWorkflowBlocked = isExecuting || hasValidationErrors
|
||||
|
||||
@@ -808,6 +818,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
|
||||
if (isDebugging) {
|
||||
// Stop debugging
|
||||
try { useDebugCanvasStore.getState().clear() } catch {}
|
||||
handleCancelDebug()
|
||||
} else {
|
||||
// Check if there are executable blocks before starting debug mode
|
||||
@@ -819,15 +830,31 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
return // Do nothing if no executable blocks
|
||||
}
|
||||
|
||||
// Start debugging
|
||||
// Determine starter id for focus
|
||||
const starter = Object.values(blocks).find((b) => b.type === 'starter') as any
|
||||
const starterId = starter?.id as string | undefined
|
||||
|
||||
// Enable debug UI but do NOT start execution
|
||||
if (!isDebugModeEnabled) {
|
||||
toggleDebugMode()
|
||||
}
|
||||
if (usageExceeded) {
|
||||
openSubscriptionSettings()
|
||||
} else {
|
||||
openConsolePanel()
|
||||
handleRunWorkflow(undefined, true) // Start in debug mode
|
||||
// Activate debug session state so the panel is active
|
||||
const execStore = useExecutionStore.getState()
|
||||
execStore.setIsExecuting(false)
|
||||
execStore.setIsDebugging(true)
|
||||
// Set the Start block as pending - it will execute on first Step
|
||||
execStore.setPendingBlocks(starterId ? [starterId] : [])
|
||||
|
||||
// Show Debug tab and mark starter as the current block to execute
|
||||
openDebugPanel()
|
||||
if (starterId) {
|
||||
execStore.setActiveBlocks(new Set([starterId]))
|
||||
}
|
||||
// Ensure debug canvas starts in a clean state
|
||||
try { useDebugCanvasStore.getState().clear() } catch {}
|
||||
}
|
||||
}
|
||||
}, [
|
||||
@@ -838,8 +865,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
blocks,
|
||||
handleCancelDebug,
|
||||
toggleDebugMode,
|
||||
handleRunWorkflow,
|
||||
openConsolePanel,
|
||||
openDebugPanel,
|
||||
])
|
||||
|
||||
/**
|
||||
@@ -859,40 +885,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
|
||||
return (
|
||||
<div className='flex items-center gap-1'>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
onClick={() => {
|
||||
openConsolePanel()
|
||||
handleStepDebug()
|
||||
}}
|
||||
className={debugButtonClass}
|
||||
disabled={isControlDisabled}
|
||||
>
|
||||
<StepForward className='h-5 w-5' />
|
||||
<span className='sr-only'>Step Forward</span>
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>Step Forward</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
onClick={() => {
|
||||
openConsolePanel()
|
||||
handleResumeDebug()
|
||||
}}
|
||||
className={debugButtonClass}
|
||||
disabled={isControlDisabled}
|
||||
>
|
||||
<SkipForward className='h-5 w-5' />
|
||||
<span className='sr-only'>Resume Until End</span>
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>Resume Until End</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
{/* Keep only cancel (X) here; step/resume moved to panel */}
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
@@ -1214,7 +1207,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
{isExpanded && renderPublishButton()}
|
||||
{renderDeleteButton()}
|
||||
{renderDuplicateButton()}
|
||||
{!isDebugging && renderDebugModeToggle()}
|
||||
{renderDebugModeToggle()}
|
||||
{renderDeployButton()}
|
||||
{isDebugging ? renderDebugControlsBar() : renderRunButton()}
|
||||
|
||||
@@ -1226,6 +1219,8 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
workflowId={activeWorkflowId}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Removed chat prompt dialog; chat input now lives in DebugPanel */}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,6 +10,7 @@ import {
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { useCopilotStore } from '@/stores/copilot/store'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import { useChatStore } from '@/stores/panel/chat/store'
|
||||
import { useConsoleStore } from '@/stores/panel/console/store'
|
||||
import { usePanelStore } from '@/stores/panel/store'
|
||||
@@ -17,6 +18,7 @@ import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { Chat } from './components/chat/chat'
|
||||
import { Console } from './components/console/console'
|
||||
import { Copilot } from './components/copilot/copilot'
|
||||
import { DebugPanel } from './components/debug/debug'
|
||||
import { Variables } from './components/variables/variables'
|
||||
|
||||
export function Panel() {
|
||||
@@ -44,6 +46,9 @@ export function Panel() {
|
||||
const exportChatCSV = useChatStore((state) => state.exportChatCSV)
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
|
||||
// Get debug state
|
||||
const isDebugging = useExecutionStore((state) => state.isDebugging)
|
||||
|
||||
// Copilot store for chat management
|
||||
const {
|
||||
chats,
|
||||
@@ -216,7 +221,11 @@ export function Panel() {
|
||||
)
|
||||
|
||||
// Handle tab clicks - no loading, just switch tabs
|
||||
const handleTabClick = async (tab: 'chat' | 'console' | 'variables' | 'copilot') => {
|
||||
const handleTabClick = async (tab: 'chat' | 'console' | 'variables' | 'copilot' | 'debug') => {
|
||||
// Don't allow clicking debug tab if not debugging
|
||||
if (tab === 'debug' && !isDebugging) {
|
||||
return
|
||||
}
|
||||
setActiveTab(tab)
|
||||
if (!isOpen) {
|
||||
togglePanel()
|
||||
@@ -284,10 +293,30 @@ export function Panel() {
|
||||
}
|
||||
}, [activeWorkflowId, copilotWorkflowId, ensureCopilotDataLoaded])
|
||||
|
||||
// When debug mode ends, switch to a different tab if debug was active
|
||||
useEffect(() => {
|
||||
if (!isDebugging && activeTab === 'debug') {
|
||||
setActiveTab('console')
|
||||
}
|
||||
}, [isDebugging, activeTab, setActiveTab])
|
||||
|
||||
// When debug mode starts, automatically open the debug panel
|
||||
useEffect(() => {
|
||||
if (isDebugging) {
|
||||
setActiveTab('debug')
|
||||
if (!isOpen) {
|
||||
togglePanel()
|
||||
}
|
||||
}
|
||||
}, [isDebugging, setActiveTab, isOpen, togglePanel])
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* Tab Selector - Always visible */}
|
||||
<div className='fixed top-[76px] right-4 z-20 flex h-9 w-[308px] items-center gap-1 rounded-[14px] border bg-card px-[2.5px] py-1 shadow-xs'>
|
||||
<div
|
||||
className='fixed top-[76px] right-4 z-20 flex h-9 items-center gap-1 rounded-[14px] border bg-card px-[2.5px] py-1 shadow-xs'
|
||||
style={{ width: isDebugging ? '380px' : '308px' }}
|
||||
>
|
||||
<button
|
||||
onClick={() => handleTabClick('chat')}
|
||||
className={`panel-tab-base inline-flex flex-1 cursor-pointer items-center justify-center rounded-[10px] border border-transparent py-1 font-[450] text-sm outline-none transition-colors duration-200 ${
|
||||
@@ -320,6 +349,16 @@ export function Panel() {
|
||||
>
|
||||
Variables
|
||||
</button>
|
||||
{isDebugging && (
|
||||
<button
|
||||
onClick={() => handleTabClick('debug')}
|
||||
className={`panel-tab-base inline-flex flex-1 cursor-pointer items-center justify-center rounded-[10px] border border-transparent py-1 font-[450] text-sm outline-none transition-colors duration-200 ${
|
||||
isOpen && activeTab === 'debug' ? 'panel-tab-active' : 'panel-tab-inactive'
|
||||
}`}
|
||||
>
|
||||
Debug
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Panel Content - Only visible when isOpen is true */}
|
||||
@@ -512,6 +551,9 @@ export function Panel() {
|
||||
<div style={{ display: activeTab === 'variables' ? 'block' : 'none', height: '100%' }}>
|
||||
<Variables />
|
||||
</div>
|
||||
<div style={{ display: activeTab === 'debug' ? 'block' : 'none', height: '100%' }}>
|
||||
<DebugPanel />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -6,7 +6,7 @@ import { Button } from '@/components/ui/button'
|
||||
import { formatDisplayText } from '@/components/ui/formatted-text'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { checkTagTrigger, TagDropdown } from '@/components/ui/tag-dropdown'
|
||||
import { MAX_TAG_SLOTS } from '@/lib/constants/knowledge'
|
||||
import { MAX_TAG_SLOTS } from '@/lib/knowledge/consts'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
|
||||
@@ -344,7 +344,13 @@ export function TriggerConfig({
|
||||
|
||||
// Check if the trigger is connected
|
||||
// Both webhook and credential-based triggers now have webhook database entries
|
||||
const isTriggerConnected = Boolean(triggerId && actualTriggerId)
|
||||
// In preview, consider it configured if the snapshot contains any trigger fields
|
||||
const isConfiguredInPreview = isPreview && Boolean(
|
||||
(propValue?.triggerPath && propValue.triggerPath.length > 0) ||
|
||||
(propValue?.triggerConfig && Object.keys(propValue.triggerConfig).length > 0) ||
|
||||
propValue?.triggerId
|
||||
)
|
||||
const isTriggerConnected = isConfiguredInPreview || Boolean(triggerId && actualTriggerId)
|
||||
|
||||
// Debug logging to help with troubleshooting
|
||||
useEffect(() => {
|
||||
|
||||
@@ -13,6 +13,8 @@ import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/provide
|
||||
import type { BlockConfig, SubBlockConfig, SubBlockType } from '@/blocks/types'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import { usePanelStore } from '@/stores/panel/store'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
@@ -435,8 +437,12 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
stateToUse = mergedState?.subBlocks || {}
|
||||
}
|
||||
|
||||
const isAdvancedMode = useWorkflowStore.getState().blocks[blockId]?.advancedMode ?? false
|
||||
const isTriggerMode = useWorkflowStore.getState().blocks[blockId]?.triggerMode ?? false
|
||||
const isAdvancedMode = data.isPreview
|
||||
? ((data.blockState as any)?.advancedMode ?? false)
|
||||
: useWorkflowStore.getState().blocks[blockId]?.advancedMode ?? false
|
||||
const isTriggerMode = data.isPreview
|
||||
? ((data.blockState as any)?.triggerMode ?? false)
|
||||
: useWorkflowStore.getState().blocks[blockId]?.triggerMode ?? false
|
||||
const effectiveAdvanced = currentWorkflow.isDiffMode ? displayAdvancedMode : isAdvancedMode
|
||||
const effectiveTrigger = currentWorkflow.isDiffMode ? displayTriggerMode : isTriggerMode
|
||||
|
||||
@@ -580,6 +586,72 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
type === 'schedule' && !isLoadingScheduleInfo && scheduleInfo !== null
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
// Debug mode and active selection
|
||||
const isDebugModeEnabled = useGeneralStore((s) => s.isDebugModeEnabled)
|
||||
const activeBlockIds = useExecutionStore((s) => s.activeBlockIds)
|
||||
const panelFocusedBlockId = useExecutionStore((s) => s.panelFocusedBlockId)
|
||||
const setPanelFocusedBlockId = useExecutionStore((s) => s.setPanelFocusedBlockId)
|
||||
const executingBlockIds = useExecutionStore((s) => s.executingBlockIds)
|
||||
const setActiveBlocks = useExecutionStore((s) => s.setActiveBlocks)
|
||||
const setActiveTab = usePanelStore((s) => s.setActiveTab)
|
||||
const breakpointId = useExecutionStore((s) => s.breakpointId)
|
||||
const debugContext = useExecutionStore((s) => s.debugContext)
|
||||
const startPositionIds = useExecutionStore((s) => s.startPositionIds)
|
||||
|
||||
const handleDebugOpen = (e: React.MouseEvent) => {
|
||||
if (!isDebugModeEnabled) return
|
||||
e.stopPropagation()
|
||||
setActiveBlocks(new Set([id]))
|
||||
setActiveTab('debug')
|
||||
// Always select this block for the debug panel focus
|
||||
setPanelFocusedBlockId(id)
|
||||
}
|
||||
|
||||
// In debug mode, use executingBlockIds to detect actual executing blocks (not selection);
|
||||
// outside debug, fall back to activeBlockIds driven by the executor
|
||||
const isExecutingNow = isDebugModeEnabled ? executingBlockIds.has(id) : activeBlockIds.has(id)
|
||||
const isCurrentBlock = isDebugModeEnabled && isPending
|
||||
const isPanelFocused = isDebugModeEnabled && panelFocusedBlockId === id
|
||||
|
||||
// Check if block has errored during debug execution
|
||||
const hasError =
|
||||
isDebugModeEnabled && debugContext
|
||||
? (() => {
|
||||
// Check direct block state for error
|
||||
const directState = debugContext.blockStates?.get(id)
|
||||
if (
|
||||
directState?.output &&
|
||||
typeof directState.output === 'object' &&
|
||||
'error' in directState.output
|
||||
) {
|
||||
return true
|
||||
}
|
||||
// Check virtual executions for errors (for blocks inside parallels)
|
||||
for (const [key, state] of debugContext.blockStates?.entries() || []) {
|
||||
// Check if this is a virtual ID for our block
|
||||
if (typeof key === 'string' && key.startsWith(`${id}_parallel_`)) {
|
||||
if (state?.output && typeof state.output === 'object' && 'error' in state.output) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
// Also check block logs for this block
|
||||
const hasErrorLog = debugContext.blockLogs?.some((log: any) => {
|
||||
if (log.blockId === id && !log.success) return true
|
||||
// Check if log is for a virtual version of this block
|
||||
if (
|
||||
typeof log.blockId === 'string' &&
|
||||
log.blockId.startsWith(`${id}_parallel_`) &&
|
||||
!log.success
|
||||
) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
return hasErrorLog || false
|
||||
})()
|
||||
: false
|
||||
|
||||
return (
|
||||
<div className='group relative'>
|
||||
<Card
|
||||
@@ -589,20 +661,54 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
'transition-block-bg transition-ring',
|
||||
displayIsWide ? 'w-[480px]' : 'w-[320px]',
|
||||
!isEnabled && 'shadow-sm',
|
||||
isActive && 'animate-pulse-ring ring-2 ring-blue-500',
|
||||
isPending && 'ring-2 ring-amber-500',
|
||||
// Diff highlighting
|
||||
diffStatus === 'new' && 'bg-green-50/50 ring-2 ring-green-500 dark:bg-green-900/10',
|
||||
diffStatus === 'edited' && 'bg-orange-50/50 ring-2 ring-orange-500 dark:bg-orange-900/10',
|
||||
// Error state - highest priority (only border, no background)
|
||||
hasError && 'ring-2 ring-red-500',
|
||||
// Panel-focused block highlight (unless errored)
|
||||
!hasError && isPanelFocused && 'bg-blue-50/60 dark:bg-blue-900/5',
|
||||
// Executing blocks match staging: pulsing blue ring
|
||||
!hasError && isExecutingNow && 'animate-pulse-ring ring-2 ring-blue-500',
|
||||
// Pending blocks show blue border when not executing
|
||||
!hasError && !isExecutingNow && isCurrentBlock && 'ring-2 ring-blue-500',
|
||||
// Diff highlighting (only if not in debug error state)
|
||||
!hasError &&
|
||||
diffStatus === 'new' &&
|
||||
'bg-green-50/50 ring-2 ring-green-500 dark:bg-green-900/10',
|
||||
!hasError &&
|
||||
diffStatus === 'edited' &&
|
||||
'bg-orange-50/50 ring-2 ring-orange-500 dark:bg-orange-900/10',
|
||||
// Deleted block highlighting (in original workflow)
|
||||
isDeletedBlock && 'bg-red-50/50 ring-2 ring-red-500 dark:bg-red-900/10',
|
||||
'z-[20]'
|
||||
)}
|
||||
onClick={handleDebugOpen}
|
||||
>
|
||||
{/* Show debug indicator for pending blocks */}
|
||||
{isPending && (
|
||||
<div className='-top-6 -translate-x-1/2 absolute left-1/2 z-10 transform rounded-t-md bg-amber-500 px-2 py-0.5 text-white text-xs'>
|
||||
Next Step
|
||||
{/* Show error indicator for errored blocks */}
|
||||
{hasError && (
|
||||
<div className='-top-6 -translate-x-1/2 absolute left-1/2 z-10 transform rounded-t-md bg-red-500 px-2 py-0.5 text-white text-xs'>
|
||||
Error
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Show debug indicator for current blocks in debug mode (pending or executing) - but not if errored */}
|
||||
{!hasError && isDebugModeEnabled && (isPending || executingBlockIds.has(id)) && (
|
||||
<div className='-top-6 -translate-x-1/2 absolute left-1/2 z-10 transform rounded-t-md bg-blue-500 px-2 py-0.5 text-white text-xs'>
|
||||
Current
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Bottom indicators: breakpoint and start position side by side */}
|
||||
{isDebugModeEnabled && (breakpointId === id || startPositionIds.has(id)) && (
|
||||
<div className='-bottom-6 -translate-x-1/2 absolute left-1/2 z-10 flex transform items-end gap-2'>
|
||||
{breakpointId === id && (
|
||||
<div className='rounded-b-md bg-orange-500 px-2 py-0.5 text-white text-xs'>
|
||||
Breakpoint
|
||||
</div>
|
||||
)}
|
||||
{startPositionIds.has(id) && (
|
||||
<div className='rounded-b-md bg-purple-600 px-2 py-0.5 text-white text-xs'>
|
||||
Start Position
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
@@ -4,36 +4,38 @@ import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import type { DeploymentStatus } from '@/stores/workflows/registry/types'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { BlockState, Loop, Parallel, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { useDebugCanvasStore } from '@/stores/execution/debug-canvas/store'
|
||||
|
||||
/**
|
||||
* Interface for the current workflow abstraction
|
||||
*/
|
||||
export interface CurrentWorkflow {
|
||||
// Current workflow state properties
|
||||
blocks: Record<string, BlockState>
|
||||
edges: Edge[]
|
||||
loops: Record<string, Loop>
|
||||
parallels: Record<string, Parallel>
|
||||
lastSaved?: number
|
||||
isDeployed?: boolean
|
||||
deployedAt?: Date
|
||||
deploymentStatuses?: Record<string, DeploymentStatus>
|
||||
needsRedeployment?: boolean
|
||||
hasActiveWebhook?: boolean
|
||||
// Current workflow state properties
|
||||
blocks: Record<string, BlockState>
|
||||
edges: Edge[]
|
||||
loops: Record<string, Loop>
|
||||
parallels: Record<string, Parallel>
|
||||
lastSaved?: number
|
||||
isDeployed?: boolean
|
||||
deployedAt?: Date
|
||||
deploymentStatuses?: Record<string, DeploymentStatus>
|
||||
needsRedeployment?: boolean
|
||||
hasActiveWebhook?: boolean
|
||||
|
||||
// Mode information
|
||||
isDiffMode: boolean
|
||||
isNormalMode: boolean
|
||||
// Mode information
|
||||
isDiffMode: boolean
|
||||
isNormalMode: boolean
|
||||
isDebugCanvasMode?: boolean
|
||||
|
||||
// Full workflow state (for cases that need the complete object)
|
||||
workflowState: WorkflowState
|
||||
// Full workflow state (for cases that need the complete object)
|
||||
workflowState: WorkflowState
|
||||
|
||||
// Helper methods
|
||||
getBlockById: (blockId: string) => BlockState | undefined
|
||||
getBlockCount: () => number
|
||||
getEdgeCount: () => number
|
||||
hasBlocks: () => boolean
|
||||
hasEdges: () => boolean
|
||||
// Helper methods
|
||||
getBlockById: (blockId: string) => BlockState | undefined
|
||||
getBlockCount: () => number
|
||||
getEdgeCount: () => number
|
||||
hasBlocks: () => boolean
|
||||
hasEdges: () => boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -41,48 +43,91 @@ export interface CurrentWorkflow {
|
||||
* Automatically handles diff vs normal mode without exposing the complexity to consumers.
|
||||
*/
|
||||
export function useCurrentWorkflow(): CurrentWorkflow {
|
||||
// Get normal workflow state
|
||||
const normalWorkflow = useWorkflowStore((state) => state.getWorkflowState())
|
||||
// Get normal workflow state
|
||||
const normalWorkflow = useWorkflowStore((state) => state.getWorkflowState())
|
||||
|
||||
// Get diff state - now including isDiffReady
|
||||
const { isShowingDiff, isDiffReady, diffWorkflow } = useWorkflowDiffStore()
|
||||
// Get diff state - now including isDiffReady
|
||||
const { isShowingDiff, isDiffReady, diffWorkflow } = useWorkflowDiffStore()
|
||||
|
||||
// Create the abstracted interface
|
||||
const currentWorkflow = useMemo((): CurrentWorkflow => {
|
||||
// Determine which workflow to use - only use diff if it's ready
|
||||
const hasDiffBlocks =
|
||||
!!diffWorkflow && Object.keys((diffWorkflow as any).blocks || {}).length > 0
|
||||
const shouldUseDiff = isShowingDiff && isDiffReady && hasDiffBlocks
|
||||
const activeWorkflow = shouldUseDiff ? diffWorkflow : normalWorkflow
|
||||
// Get debug canvas override
|
||||
const debugCanvas = useDebugCanvasStore((s) => ({ isActive: s.isActive, workflowState: s.workflowState }))
|
||||
|
||||
return {
|
||||
// Current workflow state
|
||||
blocks: activeWorkflow.blocks,
|
||||
edges: activeWorkflow.edges,
|
||||
loops: activeWorkflow.loops || {},
|
||||
parallels: activeWorkflow.parallels || {},
|
||||
lastSaved: activeWorkflow.lastSaved,
|
||||
isDeployed: activeWorkflow.isDeployed,
|
||||
deployedAt: activeWorkflow.deployedAt,
|
||||
deploymentStatuses: activeWorkflow.deploymentStatuses,
|
||||
needsRedeployment: activeWorkflow.needsRedeployment,
|
||||
hasActiveWebhook: activeWorkflow.hasActiveWebhook,
|
||||
// Create the abstracted interface
|
||||
const currentWorkflow = useMemo((): CurrentWorkflow => {
|
||||
// Prefer debug canvas if active
|
||||
const hasDebugCanvas = !!debugCanvas.isActive && !!debugCanvas.workflowState
|
||||
if (hasDebugCanvas) {
|
||||
console.log('[useCurrentWorkflow] Using debug canvas state', {
|
||||
isActive: debugCanvas.isActive,
|
||||
hasWorkflowState: !!debugCanvas.workflowState,
|
||||
blockCount: debugCanvas.workflowState ? Object.keys(debugCanvas.workflowState.blocks || {}).length : 0,
|
||||
edgeCount: debugCanvas.workflowState ? (debugCanvas.workflowState.edges || []).length : 0
|
||||
})
|
||||
const activeWorkflow = debugCanvas.workflowState as WorkflowState
|
||||
return {
|
||||
blocks: activeWorkflow.blocks,
|
||||
edges: activeWorkflow.edges,
|
||||
loops: activeWorkflow.loops || {},
|
||||
parallels: activeWorkflow.parallels || {},
|
||||
lastSaved: activeWorkflow.lastSaved,
|
||||
isDeployed: activeWorkflow.isDeployed,
|
||||
deployedAt: activeWorkflow.deployedAt,
|
||||
deploymentStatuses: activeWorkflow.deploymentStatuses,
|
||||
needsRedeployment: activeWorkflow.needsRedeployment,
|
||||
hasActiveWebhook: activeWorkflow.hasActiveWebhook,
|
||||
isDiffMode: false,
|
||||
isNormalMode: false,
|
||||
isDebugCanvasMode: true,
|
||||
workflowState: activeWorkflow,
|
||||
getBlockById: (blockId: string) => activeWorkflow.blocks[blockId],
|
||||
getBlockCount: () => Object.keys(activeWorkflow.blocks).length,
|
||||
getEdgeCount: () => activeWorkflow.edges.length,
|
||||
hasBlocks: () => Object.keys(activeWorkflow.blocks).length > 0,
|
||||
hasEdges: () => activeWorkflow.edges.length > 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Mode information - update to reflect ready state
|
||||
isDiffMode: shouldUseDiff,
|
||||
isNormalMode: !shouldUseDiff,
|
||||
// Determine which workflow to use - only use diff if it's ready
|
||||
const hasDiffBlocks = !!diffWorkflow && Object.keys((diffWorkflow as any).blocks || {}).length > 0
|
||||
const shouldUseDiff = isShowingDiff && isDiffReady && hasDiffBlocks
|
||||
const activeWorkflow = shouldUseDiff ? diffWorkflow : normalWorkflow
|
||||
|
||||
console.log('[useCurrentWorkflow] Not using debug canvas', {
|
||||
debugCanvasIsActive: debugCanvas.isActive,
|
||||
debugCanvasHasState: !!debugCanvas.workflowState,
|
||||
usingDiff: shouldUseDiff,
|
||||
normalBlockCount: Object.keys(normalWorkflow.blocks || {}).length
|
||||
})
|
||||
|
||||
// Full workflow state (for cases that need the complete object)
|
||||
workflowState: activeWorkflow,
|
||||
return {
|
||||
// Current workflow state
|
||||
blocks: activeWorkflow.blocks,
|
||||
edges: activeWorkflow.edges,
|
||||
loops: activeWorkflow.loops || {},
|
||||
parallels: activeWorkflow.parallels || {},
|
||||
lastSaved: activeWorkflow.lastSaved,
|
||||
isDeployed: activeWorkflow.isDeployed,
|
||||
deployedAt: activeWorkflow.deployedAt,
|
||||
deploymentStatuses: activeWorkflow.deploymentStatuses,
|
||||
needsRedeployment: activeWorkflow.needsRedeployment,
|
||||
hasActiveWebhook: activeWorkflow.hasActiveWebhook,
|
||||
|
||||
// Helper methods
|
||||
getBlockById: (blockId: string) => activeWorkflow.blocks[blockId],
|
||||
getBlockCount: () => Object.keys(activeWorkflow.blocks).length,
|
||||
getEdgeCount: () => activeWorkflow.edges.length,
|
||||
hasBlocks: () => Object.keys(activeWorkflow.blocks).length > 0,
|
||||
hasEdges: () => activeWorkflow.edges.length > 0,
|
||||
}
|
||||
}, [normalWorkflow, isShowingDiff, isDiffReady, diffWorkflow])
|
||||
// Mode information - update to reflect ready state
|
||||
isDiffMode: shouldUseDiff,
|
||||
isNormalMode: !shouldUseDiff,
|
||||
isDebugCanvasMode: false,
|
||||
|
||||
return currentWorkflow
|
||||
// Full workflow state (for cases that need the complete object)
|
||||
workflowState: activeWorkflow,
|
||||
|
||||
// Helper methods
|
||||
getBlockById: (blockId: string) => activeWorkflow.blocks[blockId],
|
||||
getBlockCount: () => Object.keys(activeWorkflow.blocks).length,
|
||||
getEdgeCount: () => activeWorkflow.edges.length,
|
||||
hasBlocks: () => Object.keys(activeWorkflow.blocks).length > 0,
|
||||
hasEdges: () => activeWorkflow.edges.length > 0,
|
||||
}
|
||||
}, [normalWorkflow, isShowingDiff, isDiffReady, diffWorkflow, debugCanvas.isActive, debugCanvas.workflowState])
|
||||
|
||||
return currentWorkflow
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import { Executor } from '@/executor'
|
||||
import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import { Serializer } from '@/serializer'
|
||||
import type { SerializedWorkflow } from '@/serializer/types'
|
||||
import { useDebugSnapshotStore } from '@/stores/execution/debug-snapshots/store'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import { useConsoleStore } from '@/stores/panel/console/store'
|
||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||
@@ -62,6 +63,8 @@ export function useWorkflowExecution() {
|
||||
setExecutor,
|
||||
setDebugContext,
|
||||
setActiveBlocks,
|
||||
setExecutingBlockIds,
|
||||
startPositionIds,
|
||||
} = useExecutionStore()
|
||||
const [executionResult, setExecutionResult] = useState<ExecutionResult | null>(null)
|
||||
|
||||
@@ -70,7 +73,7 @@ export function useWorkflowExecution() {
|
||||
*/
|
||||
const validateDebugState = useCallback((): DebugValidationResult => {
|
||||
if (!executor || !debugContext || pendingBlocks.length === 0) {
|
||||
const missing = []
|
||||
const missing = [] as string[]
|
||||
if (!executor) missing.push('executor')
|
||||
if (!debugContext) missing.push('debugContext')
|
||||
if (pendingBlocks.length === 0) missing.push('pendingBlocks')
|
||||
@@ -93,6 +96,7 @@ export function useWorkflowExecution() {
|
||||
setExecutor(null)
|
||||
setPendingBlocks([])
|
||||
setActiveBlocks(new Set())
|
||||
setExecutingBlockIds(new Set())
|
||||
|
||||
// Reset debug mode setting if it was enabled
|
||||
if (isDebugModeEnabled) {
|
||||
@@ -105,6 +109,7 @@ export function useWorkflowExecution() {
|
||||
setExecutor,
|
||||
setPendingBlocks,
|
||||
setActiveBlocks,
|
||||
setExecutingBlockIds,
|
||||
isDebugModeEnabled,
|
||||
])
|
||||
|
||||
@@ -120,7 +125,7 @@ export function useWorkflowExecution() {
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Handles debug session completion
|
||||
* Handles debug session completion - keep debug session open for inspection
|
||||
*/
|
||||
const handleDebugSessionComplete = useCallback(
|
||||
async (result: ExecutionResult) => {
|
||||
@@ -130,10 +135,14 @@ export function useWorkflowExecution() {
|
||||
// Persist logs
|
||||
await persistLogs(uuidv4(), result)
|
||||
|
||||
// Reset debug state
|
||||
resetDebugState()
|
||||
// Keep debug mode open for inspection: stop executing, clear pending
|
||||
setIsExecuting(false)
|
||||
setPendingBlocks([])
|
||||
setExecutingBlockIds(new Set())
|
||||
// Keep debugContext and executor so the panel can inspect state
|
||||
// Do not reset isDebugging
|
||||
},
|
||||
[activeWorkflowId, resetDebugState]
|
||||
[activeWorkflowId, setIsExecuting, setPendingBlocks, setExecutingBlockIds]
|
||||
)
|
||||
|
||||
/**
|
||||
@@ -148,16 +157,30 @@ export function useWorkflowExecution() {
|
||||
// Update debug context and pending blocks
|
||||
if (result.metadata?.context) {
|
||||
setDebugContext(result.metadata.context)
|
||||
// Capture snapshot for revert/backstep
|
||||
try {
|
||||
useDebugSnapshotStore.getState().captureFromContext(result.metadata.context as any)
|
||||
useDebugSnapshotStore
|
||||
.getState()
|
||||
.pushFromContext(result.metadata.context as any, result.metadata?.pendingBlocks || [])
|
||||
} catch {}
|
||||
}
|
||||
if (result.metadata?.pendingBlocks) {
|
||||
setPendingBlocks(result.metadata.pendingBlocks)
|
||||
// Filter triggers from next pending
|
||||
const filtered = (result.metadata.pendingBlocks as string[]).filter((id) => {
|
||||
const block = currentWorkflow.blocks[id]
|
||||
if (!block) return false
|
||||
const cfg = getBlock(block.type)
|
||||
return cfg?.category !== 'triggers'
|
||||
})
|
||||
setPendingBlocks(filtered)
|
||||
}
|
||||
},
|
||||
[setDebugContext, setPendingBlocks]
|
||||
[setDebugContext, setPendingBlocks, currentWorkflow.blocks]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles debug execution errors
|
||||
* Handles debug execution errors - keep debug open for inspection
|
||||
*/
|
||||
const handleDebugExecutionError = useCallback(
|
||||
async (error: any, operation: string) => {
|
||||
@@ -176,10 +199,13 @@ export function useWorkflowExecution() {
|
||||
// Persist logs
|
||||
await persistLogs(uuidv4(), errorResult)
|
||||
|
||||
// Reset debug state
|
||||
resetDebugState()
|
||||
// Keep debug session open for inspection
|
||||
setIsExecuting(false)
|
||||
setPendingBlocks([])
|
||||
setExecutingBlockIds(new Set())
|
||||
// Keep isDebugging, debugContext, and executor intact
|
||||
},
|
||||
[debugContext, activeWorkflowId, resetDebugState]
|
||||
[debugContext, activeWorkflowId, setIsExecuting, setPendingBlocks, setExecutingBlockIds]
|
||||
)
|
||||
|
||||
const persistLogs = async (
|
||||
@@ -268,8 +294,8 @@ export function useWorkflowExecution() {
|
||||
const isChatExecution =
|
||||
workflowInput && typeof workflowInput === 'object' && 'input' in workflowInput
|
||||
|
||||
// For chat executions, we'll use a streaming approach
|
||||
if (isChatExecution) {
|
||||
// For chat executions, use streaming only when NOT debugging
|
||||
if (isChatExecution && !enableDebug) {
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
const encoder = new TextEncoder()
|
||||
@@ -390,7 +416,7 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeWorkflow(workflowInput, onStream, executionId)
|
||||
const result = await executeWorkflow(workflowInput, onStream, executionId, false)
|
||||
|
||||
// Check if execution was cancelled
|
||||
if (
|
||||
@@ -448,7 +474,6 @@ export function useWorkflowExecution() {
|
||||
} catch (error: any) {
|
||||
controller.error(error)
|
||||
} finally {
|
||||
controller.close()
|
||||
setIsExecuting(false)
|
||||
setIsDebugging(false)
|
||||
setActiveBlocks(new Set())
|
||||
@@ -458,12 +483,23 @@ export function useWorkflowExecution() {
|
||||
return { success: true, stream }
|
||||
}
|
||||
|
||||
// For manual (non-chat) execution
|
||||
// For manual (non-streaming) execution including debug and non-chat
|
||||
const executionId = uuidv4()
|
||||
try {
|
||||
const result = await executeWorkflow(workflowInput, undefined, executionId)
|
||||
const result = await executeWorkflow(workflowInput, undefined, executionId, enableDebug)
|
||||
if (result && 'metadata' in result && result.metadata?.isDebugSession) {
|
||||
setDebugContext(result.metadata.context || null)
|
||||
try {
|
||||
if (result.metadata?.context) {
|
||||
useDebugSnapshotStore.getState().captureFromContext(result.metadata.context as any)
|
||||
useDebugSnapshotStore
|
||||
.getState()
|
||||
.pushFromContext(
|
||||
result.metadata.context as any,
|
||||
result.metadata?.pendingBlocks || []
|
||||
)
|
||||
}
|
||||
} catch {}
|
||||
if (result.metadata.pendingBlocks) {
|
||||
setPendingBlocks(result.metadata.pendingBlocks)
|
||||
}
|
||||
@@ -508,13 +544,15 @@ export function useWorkflowExecution() {
|
||||
setExecutor,
|
||||
setPendingBlocks,
|
||||
setActiveBlocks,
|
||||
startPositionIds,
|
||||
]
|
||||
)
|
||||
|
||||
const executeWorkflow = async (
|
||||
workflowInput?: any,
|
||||
onStream?: (se: StreamingExecution) => Promise<void>,
|
||||
executionId?: string
|
||||
executionId?: string,
|
||||
debugRequested?: boolean
|
||||
): Promise<ExecutionResult | StreamingExecution> => {
|
||||
// Use currentWorkflow but check if we're in diff mode
|
||||
const {
|
||||
@@ -602,7 +640,7 @@ export function useWorkflowExecution() {
|
||||
const envVars = getAllVariables()
|
||||
const envVarValues = Object.entries(envVars).reduce(
|
||||
(acc, [key, variable]) => {
|
||||
acc[key] = variable.value
|
||||
acc[key] = (variable as any).value
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, string>
|
||||
@@ -672,7 +710,9 @@ export function useWorkflowExecution() {
|
||||
setExecutor(newExecutor)
|
||||
|
||||
// Execute workflow
|
||||
return newExecutor.execute(activeWorkflowId || '')
|
||||
const execResult = await newExecutor.execute(activeWorkflowId || '')
|
||||
|
||||
return execResult
|
||||
}
|
||||
|
||||
const handleExecutionError = (error: any) => {
|
||||
@@ -748,14 +788,33 @@ export function useWorkflowExecution() {
|
||||
// Validate debug state
|
||||
const validation = validateDebugState()
|
||||
if (!validation.isValid) {
|
||||
resetDebugState()
|
||||
// Keep session open for inspection; simply stop executing
|
||||
setIsExecuting(false)
|
||||
return
|
||||
}
|
||||
|
||||
// Compute executable set without triggers
|
||||
const nonTriggerPending = pendingBlocks.filter((id) => {
|
||||
const block = currentWorkflow.blocks[id]
|
||||
if (!block) return false
|
||||
const cfg = getBlock(block.type)
|
||||
return cfg?.category !== 'triggers'
|
||||
})
|
||||
|
||||
if (nonTriggerPending.length === 0) {
|
||||
// Nothing executable
|
||||
setIsExecuting(false)
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info('Executing debug step with blocks:', pendingBlocks)
|
||||
const result = await executor!.continueExecution(pendingBlocks, debugContext!)
|
||||
logger.info('Executing debug step with blocks:', nonTriggerPending)
|
||||
// Mark current pending blocks as executing for UI pulse
|
||||
setExecutingBlockIds(new Set(nonTriggerPending))
|
||||
const result = await executor!.continueExecution(nonTriggerPending, debugContext!)
|
||||
logger.info('Debug step execution result:', result)
|
||||
// Clear executing state after step returns
|
||||
setExecutingBlockIds(new Set())
|
||||
|
||||
if (isDebugSessionComplete(result)) {
|
||||
await handleDebugSessionComplete(result)
|
||||
@@ -763,6 +822,7 @@ export function useWorkflowExecution() {
|
||||
handleDebugSessionContinuation(result)
|
||||
}
|
||||
} catch (error: any) {
|
||||
setExecutingBlockIds(new Set())
|
||||
await handleDebugExecutionError(error, 'step')
|
||||
}
|
||||
}, [
|
||||
@@ -771,11 +831,13 @@ export function useWorkflowExecution() {
|
||||
pendingBlocks,
|
||||
activeWorkflowId,
|
||||
validateDebugState,
|
||||
resetDebugState,
|
||||
setIsExecuting,
|
||||
setExecutingBlockIds,
|
||||
isDebugSessionComplete,
|
||||
handleDebugSessionComplete,
|
||||
handleDebugSessionContinuation,
|
||||
handleDebugExecutionError,
|
||||
currentWorkflow.blocks,
|
||||
])
|
||||
|
||||
/**
|
||||
@@ -791,7 +853,8 @@ export function useWorkflowExecution() {
|
||||
// Validate debug state
|
||||
const validation = validateDebugState()
|
||||
if (!validation.isValid) {
|
||||
resetDebugState()
|
||||
// Keep session open for inspection; simply stop executing
|
||||
setIsExecuting(false)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -808,6 +871,14 @@ export function useWorkflowExecution() {
|
||||
let currentContext = { ...debugContext! }
|
||||
let currentPendingBlocks = [...pendingBlocks]
|
||||
|
||||
// Filter initial pending
|
||||
currentPendingBlocks = currentPendingBlocks.filter((id) => {
|
||||
const block = currentWorkflow.blocks[id]
|
||||
if (!block) return false
|
||||
const cfg = getBlock(block.type)
|
||||
return cfg?.category !== 'triggers'
|
||||
})
|
||||
|
||||
logger.info('Starting resume execution with blocks:', currentPendingBlocks)
|
||||
|
||||
// Continue execution until there are no more pending blocks
|
||||
@@ -819,7 +890,9 @@ export function useWorkflowExecution() {
|
||||
`Resume iteration ${iterationCount + 1}, executing ${currentPendingBlocks.length} blocks`
|
||||
)
|
||||
|
||||
setExecutingBlockIds(new Set(currentPendingBlocks))
|
||||
currentResult = await executor!.continueExecution(currentPendingBlocks, currentContext)
|
||||
setExecutingBlockIds(new Set())
|
||||
|
||||
logger.info('Resume iteration result:', {
|
||||
success: currentResult.success,
|
||||
@@ -835,9 +908,14 @@ export function useWorkflowExecution() {
|
||||
break
|
||||
}
|
||||
|
||||
// Update pending blocks for next iteration
|
||||
// Update pending blocks for next iteration, filtered
|
||||
if (currentResult.metadata?.pendingBlocks) {
|
||||
currentPendingBlocks = currentResult.metadata.pendingBlocks
|
||||
currentPendingBlocks = (currentResult.metadata.pendingBlocks as string[]).filter((id) => {
|
||||
const block = currentWorkflow.blocks[id]
|
||||
if (!block) return false
|
||||
const cfg = getBlock(block.type)
|
||||
return cfg?.category !== 'triggers'
|
||||
})
|
||||
} else {
|
||||
logger.info('No pending blocks in result, ending resume')
|
||||
break
|
||||
@@ -864,6 +942,7 @@ export function useWorkflowExecution() {
|
||||
// Handle completion
|
||||
await handleDebugSessionComplete(currentResult)
|
||||
} catch (error: any) {
|
||||
setExecutingBlockIds(new Set())
|
||||
await handleDebugExecutionError(error, 'resume')
|
||||
}
|
||||
}, [
|
||||
@@ -872,9 +951,11 @@ export function useWorkflowExecution() {
|
||||
pendingBlocks,
|
||||
activeWorkflowId,
|
||||
validateDebugState,
|
||||
resetDebugState,
|
||||
setIsExecuting,
|
||||
setExecutingBlockIds,
|
||||
handleDebugSessionComplete,
|
||||
handleDebugExecutionError,
|
||||
currentWorkflow.blocks,
|
||||
])
|
||||
|
||||
/**
|
||||
|
||||
@@ -131,6 +131,17 @@ export async function executeWorkflowWithLogging(
|
||||
// Merge subblock states from the appropriate store
|
||||
const mergedStates = mergeSubblockState(validBlocks)
|
||||
|
||||
// Log the current workflow state before filtering
|
||||
logger.info('🔍 Current workflow state before filtering:', {
|
||||
totalBlocks: Object.keys(mergedStates).length,
|
||||
blocks: Object.entries(mergedStates).map(([id, block]) => ({
|
||||
id,
|
||||
type: block.type,
|
||||
triggerMode: block.triggerMode,
|
||||
category: block.type ? getBlock(block.type)?.category : undefined,
|
||||
})),
|
||||
})
|
||||
|
||||
// Filter out trigger blocks for manual execution
|
||||
const filteredStates = Object.entries(mergedStates).reduce(
|
||||
(acc, [id, block]) => {
|
||||
@@ -142,16 +153,29 @@ export async function executeWorkflowWithLogging(
|
||||
|
||||
const blockConfig = getBlock(block.type)
|
||||
const isTriggerBlock = blockConfig?.category === 'triggers'
|
||||
const isInTriggerMode = block.triggerMode === true
|
||||
|
||||
// Skip trigger blocks during manual execution
|
||||
if (!isTriggerBlock) {
|
||||
// Skip trigger blocks AND blocks in trigger mode during manual execution
|
||||
if (!isTriggerBlock && !isInTriggerMode) {
|
||||
acc[id] = block
|
||||
} else {
|
||||
logger.info(`🚫 Filtering out block ${id} - trigger category: ${isTriggerBlock}, trigger mode: ${isInTriggerMode}`)
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{} as typeof mergedStates
|
||||
)
|
||||
|
||||
// Log the filtered state that will be used for execution (not snapshots)
|
||||
logger.info('📦 Filtered workflow state for execution:', {
|
||||
totalBlocks: Object.keys(filteredStates).length,
|
||||
blocks: Object.entries(filteredStates).map(([id, block]) => ({
|
||||
id,
|
||||
type: block.type,
|
||||
triggerMode: block.triggerMode,
|
||||
})),
|
||||
})
|
||||
|
||||
const currentBlockStates = Object.entries(filteredStates).reduce(
|
||||
(acc, [id, block]) => {
|
||||
acc[id] = Object.entries(block.subBlocks).reduce(
|
||||
|
||||
@@ -1635,6 +1635,8 @@ const WorkflowContent = React.memo(() => {
|
||||
)
|
||||
}
|
||||
|
||||
const isReadOnly = currentWorkflow.isDebugCanvasMode === true ? true : !effectivePermissions.canEdit
|
||||
|
||||
return (
|
||||
<div className='flex h-screen w-full flex-col overflow-hidden'>
|
||||
<div className='relative h-full w-full flex-1 transition-all duration-200'>
|
||||
@@ -1650,11 +1652,11 @@ const WorkflowContent = React.memo(() => {
|
||||
edges={edgesWithSelection}
|
||||
onNodesChange={onNodesChange}
|
||||
onEdgesChange={onEdgesChange}
|
||||
onConnect={effectivePermissions.canEdit ? onConnect : undefined}
|
||||
onConnect={isReadOnly ? undefined : onConnect}
|
||||
nodeTypes={nodeTypes}
|
||||
edgeTypes={edgeTypes}
|
||||
onDrop={effectivePermissions.canEdit ? onDrop : undefined}
|
||||
onDragOver={effectivePermissions.canEdit ? onDragOver : undefined}
|
||||
onDrop={isReadOnly ? undefined : onDrop}
|
||||
onDragOver={isReadOnly ? undefined : onDragOver}
|
||||
fitView
|
||||
minZoom={0.1}
|
||||
maxZoom={1.3}
|
||||
@@ -1674,22 +1676,22 @@ const WorkflowContent = React.memo(() => {
|
||||
onEdgeClick={onEdgeClick}
|
||||
elementsSelectable={true}
|
||||
selectNodesOnDrag={false}
|
||||
nodesConnectable={effectivePermissions.canEdit}
|
||||
nodesDraggable={effectivePermissions.canEdit}
|
||||
nodesConnectable={!isReadOnly}
|
||||
nodesDraggable={!isReadOnly}
|
||||
draggable={false}
|
||||
noWheelClassName='allow-scroll'
|
||||
edgesFocusable={true}
|
||||
edgesUpdatable={effectivePermissions.canEdit}
|
||||
edgesUpdatable={!isReadOnly}
|
||||
className='workflow-container h-full'
|
||||
onNodeDrag={effectivePermissions.canEdit ? onNodeDrag : undefined}
|
||||
onNodeDragStop={effectivePermissions.canEdit ? onNodeDragStop : undefined}
|
||||
onNodeDragStart={effectivePermissions.canEdit ? onNodeDragStart : undefined}
|
||||
onNodeDrag={isReadOnly ? undefined : onNodeDrag}
|
||||
onNodeDragStop={isReadOnly ? undefined : onNodeDragStop}
|
||||
onNodeDragStart={isReadOnly ? undefined : onNodeDragStart}
|
||||
snapToGrid={false}
|
||||
snapGrid={[20, 20]}
|
||||
elevateEdgesOnSelect={true}
|
||||
elevateNodesOnSelect={true}
|
||||
autoPanOnConnect={effectivePermissions.canEdit}
|
||||
autoPanOnNodeDrag={effectivePermissions.canEdit}
|
||||
autoPanOnConnect={!isReadOnly}
|
||||
autoPanOnNodeDrag={!isReadOnly}
|
||||
>
|
||||
<Background
|
||||
color='hsl(var(--workflow-dots))'
|
||||
|
||||
@@ -26,7 +26,7 @@ import {
|
||||
AlertDialogTitle,
|
||||
} from '@/components/ui/alert-dialog'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { MAX_TAG_SLOTS } from '@/lib/constants/knowledge'
|
||||
import { MAX_TAG_SLOTS } from '@/lib/knowledge/consts'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components/icons/document-icons'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
SelectValue,
|
||||
} from '@/components/ui'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { MAX_TAG_SLOTS, TAG_SLOTS, type TagSlot } from '@/lib/constants/knowledge'
|
||||
import { MAX_TAG_SLOTS, TAG_SLOTS, type TagSlot } from '@/lib/knowledge/consts'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { DocumentTag } from '@/app/workspace/[workspaceId]/knowledge/components/document-tag-entry/document-tag-entry'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
|
||||
@@ -26,15 +26,15 @@ export type DocumentProcessingPayload = {
|
||||
|
||||
export const processDocument = task({
|
||||
id: 'knowledge-process-document',
|
||||
maxDuration: env.KB_CONFIG_MAX_DURATION,
|
||||
maxDuration: env.KB_CONFIG_MAX_DURATION || 300,
|
||||
retry: {
|
||||
maxAttempts: env.KB_CONFIG_MAX_ATTEMPTS,
|
||||
factor: env.KB_CONFIG_RETRY_FACTOR,
|
||||
minTimeoutInMs: env.KB_CONFIG_MIN_TIMEOUT,
|
||||
maxTimeoutInMs: env.KB_CONFIG_MAX_TIMEOUT,
|
||||
maxAttempts: env.KB_CONFIG_MAX_ATTEMPTS || 3,
|
||||
factor: env.KB_CONFIG_RETRY_FACTOR || 2,
|
||||
minTimeoutInMs: env.KB_CONFIG_MIN_TIMEOUT || 1000,
|
||||
maxTimeoutInMs: env.KB_CONFIG_MAX_TIMEOUT || 10000,
|
||||
},
|
||||
queue: {
|
||||
concurrencyLimit: env.KB_CONFIG_CONCURRENCY_LIMIT,
|
||||
concurrencyLimit: env.KB_CONFIG_CONCURRENCY_LIMIT || 20,
|
||||
name: 'document-processing-queue',
|
||||
},
|
||||
run: async (payload: DocumentProcessingPayload) => {
|
||||
|
||||
@@ -99,6 +99,10 @@ export async function executeWebhookJob(payload: WebhookExecutionPayload) {
|
||||
userId: payload.userId,
|
||||
workspaceId: '', // TODO: Get from workflow if needed
|
||||
variables: decryptedEnvVars,
|
||||
initialInput: payload.body || {},
|
||||
triggerData: { provider: payload.provider, blockId: payload.blockId },
|
||||
startBlockId: payload.blockId,
|
||||
executionType: 'webhook',
|
||||
})
|
||||
|
||||
// Merge subblock states (matching workflow-execution pattern)
|
||||
|
||||
@@ -961,9 +961,13 @@ export class Executor {
|
||||
const connectedToStartBlock = this.actualWorkflow.connections
|
||||
.filter((conn) => conn.source === initBlock.id)
|
||||
.map((conn) => conn.target)
|
||||
|
||||
// Skip trigger-category targets when seeding from starter (manual/debug runs)
|
||||
connectedToStartBlock.forEach((blockId) => {
|
||||
context.activeExecutionPath.add(blockId)
|
||||
const targetBlock = this.actualWorkflow.blocks.find((b) => b.id === blockId)
|
||||
const isTriggerCategory = (targetBlock as any)?.metadata?.category === 'triggers'
|
||||
if (!isTriggerCategory) {
|
||||
context.activeExecutionPath.add(blockId)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -203,14 +203,18 @@ export class PathTracker {
|
||||
if (!context.activeExecutionPath.has(conn.target)) {
|
||||
const targetBlock = this.getBlock(conn.target)
|
||||
const targetBlockType = targetBlock?.metadata?.id
|
||||
const isTriggerCategory = (targetBlock as any)?.metadata?.category === 'triggers'
|
||||
|
||||
// Use routing strategy to determine if this connection should be activated
|
||||
if (!Routing.shouldSkipConnection(conn.sourceHandle, targetBlockType || '')) {
|
||||
context.activeExecutionPath.add(conn.target)
|
||||
// Do not activate or traverse trigger blocks during downstream activation from manual paths
|
||||
if (!isTriggerCategory) {
|
||||
context.activeExecutionPath.add(conn.target)
|
||||
|
||||
// Recursively activate downstream paths if the target block should activate downstream
|
||||
if (Routing.shouldActivateDownstream(targetBlockType || '')) {
|
||||
this.activateDownstreamPathsSelectively(conn.target, context)
|
||||
// Recursively activate downstream paths if the target block should activate downstream
|
||||
if (Routing.shouldActivateDownstream(targetBlockType || '')) {
|
||||
this.activateDownstreamPathsSelectively(conn.target, context)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -233,7 +237,11 @@ export class PathTracker {
|
||||
)
|
||||
|
||||
for (const conn of targetConnections) {
|
||||
context.activeExecutionPath.add(conn.target)
|
||||
const targetBlock = this.getBlock(conn.target)
|
||||
const isTriggerCategory = (targetBlock as any)?.metadata?.category === 'triggers'
|
||||
if (!isTriggerCategory) {
|
||||
context.activeExecutionPath.add(conn.target)
|
||||
}
|
||||
logger.debug(`Condition ${block.id} activated path to: ${conn.target}`)
|
||||
|
||||
// Check if the selected target should activate downstream paths
|
||||
@@ -282,13 +290,16 @@ export class PathTracker {
|
||||
if (this.shouldActivateConnection(conn, hasError, isPartOfLoop, blockLoops, context)) {
|
||||
const targetBlock = this.getBlock(conn.target)
|
||||
const targetBlockType = targetBlock?.metadata?.id
|
||||
const isTriggerCategory = (targetBlock as any)?.metadata?.category === 'triggers'
|
||||
|
||||
// Use routing strategy to determine if this connection should be activated
|
||||
if (Routing.shouldSkipConnection(conn.sourceHandle, targetBlockType || '')) {
|
||||
continue
|
||||
}
|
||||
|
||||
context.activeExecutionPath.add(conn.target)
|
||||
if (!isTriggerCategory) {
|
||||
context.activeExecutionPath.add(conn.target)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -782,6 +782,16 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
const newTriggerMode = !currentBlock.triggerMode
|
||||
|
||||
// If enabling trigger mode, proactively remove incoming edges for consistency across clients
|
||||
if (newTriggerMode) {
|
||||
const incomingEdges = Object.values(workflowStore.edges).filter((e) => e.target === id)
|
||||
for (const edge of incomingEdges) {
|
||||
executeQueuedOperation('remove', 'edge', { id: edge.id }, () =>
|
||||
workflowStore.removeEdge(edge.id)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
executeQueuedOperation(
|
||||
'update-trigger-mode',
|
||||
'block',
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import type { TagSlot } from '@/lib/constants/knowledge'
|
||||
import type { TagSlot } from '@/lib/knowledge/consts'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('useKnowledgeBaseTagDefinitions')
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import type { TagSlot } from '@/lib/constants/knowledge'
|
||||
import type { TagSlot } from '@/lib/knowledge/consts'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('useTagDefinitions')
|
||||
|
||||
@@ -10,7 +10,7 @@ import { createAuthClient } from 'better-auth/react'
|
||||
import type { auth } from '@/lib/auth'
|
||||
import { env, getEnv } from '@/lib/env'
|
||||
import { isProd } from '@/lib/environment'
|
||||
import { SessionContext, type SessionHookResult } from '@/lib/session-context'
|
||||
import { SessionContext, type SessionHookResult } from '@/lib/session/session-context'
|
||||
|
||||
export function getBaseURL() {
|
||||
let baseURL
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
/**
|
||||
* Knowledge base and document constants
|
||||
*/
|
||||
|
||||
// Tag slot configuration by field type
|
||||
// Each field type maps to specific database columns
|
||||
export const TAG_SLOT_CONFIG = {
|
||||
text: {
|
||||
slots: ['tag1', 'tag2', 'tag3', 'tag4', 'tag5', 'tag6', 'tag7'] as const,
|
||||
maxSlots: 7,
|
||||
},
|
||||
// Future field types would be added here with their own database columns
|
||||
// date: {
|
||||
// slots: ['tag8', 'tag9'] as const,
|
||||
// maxSlots: 2,
|
||||
// },
|
||||
// number: {
|
||||
// slots: ['tag10', 'tag11'] as const,
|
||||
// maxSlots: 2,
|
||||
// },
|
||||
} as const
|
||||
|
||||
// Currently supported field types
|
||||
export const SUPPORTED_FIELD_TYPES = Object.keys(TAG_SLOT_CONFIG) as Array<
|
||||
keyof typeof TAG_SLOT_CONFIG
|
||||
>
|
||||
|
||||
// All tag slots (for backward compatibility)
|
||||
export const TAG_SLOTS = TAG_SLOT_CONFIG.text.slots
|
||||
|
||||
// Maximum number of tag slots for text type (for backward compatibility)
|
||||
export const MAX_TAG_SLOTS = TAG_SLOT_CONFIG.text.maxSlots
|
||||
|
||||
// Type for tag slot names
|
||||
export type TagSlot = (typeof TAG_SLOTS)[number]
|
||||
|
||||
// Helper function to get available slots for a field type
|
||||
export function getSlotsForFieldType(fieldType: string): readonly string[] {
|
||||
const config = TAG_SLOT_CONFIG[fieldType as keyof typeof TAG_SLOT_CONFIG]
|
||||
if (!config) {
|
||||
return [] // Return empty array for unsupported field types - system will naturally handle this
|
||||
}
|
||||
return config.slots
|
||||
}
|
||||
|
||||
// Helper function to get max slots for a field type
|
||||
export function getMaxSlotsForFieldType(fieldType: string): number {
|
||||
const config = TAG_SLOT_CONFIG[fieldType as keyof typeof TAG_SLOT_CONFIG]
|
||||
if (!config) {
|
||||
return 0 // Return 0 for unsupported field types
|
||||
}
|
||||
return config.maxSlots
|
||||
}
|
||||
@@ -1,139 +1,108 @@
|
||||
import { createReadStream, existsSync } from 'fs'
|
||||
import { Readable } from 'stream'
|
||||
import csvParser from 'csv-parser'
|
||||
import { existsSync, readFileSync } from 'fs'
|
||||
import * as Papa from 'papaparse'
|
||||
import type { FileParseResult, FileParser } from '@/lib/file-parsers/types'
|
||||
import { sanitizeTextForUTF8 } from '@/lib/file-parsers/utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('CsvParser')
|
||||
|
||||
const PARSE_OPTIONS = {
|
||||
header: true,
|
||||
skipEmptyLines: true,
|
||||
transformHeader: (header: string) => sanitizeTextForUTF8(String(header)),
|
||||
transform: (value: string) => sanitizeTextForUTF8(String(value || '')),
|
||||
}
|
||||
|
||||
export class CsvParser implements FileParser {
|
||||
async parseFile(filePath: string): Promise<FileParseResult> {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
// Validate input
|
||||
if (!filePath) {
|
||||
return reject(new Error('No file path provided'))
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
if (!existsSync(filePath)) {
|
||||
return reject(new Error(`File not found: ${filePath}`))
|
||||
}
|
||||
|
||||
const results: Record<string, any>[] = []
|
||||
const headers: string[] = []
|
||||
|
||||
createReadStream(filePath)
|
||||
.on('error', (error: Error) => {
|
||||
logger.error('CSV stream error:', error)
|
||||
reject(new Error(`Failed to read CSV file: ${error.message}`))
|
||||
})
|
||||
.pipe(csvParser())
|
||||
.on('headers', (headerList: string[]) => {
|
||||
headers.push(...headerList)
|
||||
})
|
||||
.on('data', (data: Record<string, any>) => {
|
||||
results.push(data)
|
||||
})
|
||||
.on('end', () => {
|
||||
// Convert CSV data to a formatted string representation
|
||||
let content = ''
|
||||
|
||||
// Add headers
|
||||
if (headers.length > 0) {
|
||||
const cleanHeaders = headers.map((h) => sanitizeTextForUTF8(String(h)))
|
||||
content += `${cleanHeaders.join(', ')}\n`
|
||||
}
|
||||
|
||||
// Add rows
|
||||
results.forEach((row) => {
|
||||
const cleanValues = Object.values(row).map((v) =>
|
||||
sanitizeTextForUTF8(String(v || ''))
|
||||
)
|
||||
content += `${cleanValues.join(', ')}\n`
|
||||
})
|
||||
|
||||
resolve({
|
||||
content: sanitizeTextForUTF8(content),
|
||||
metadata: {
|
||||
rowCount: results.length,
|
||||
headers: headers,
|
||||
rawData: results,
|
||||
},
|
||||
})
|
||||
})
|
||||
.on('error', (error: Error) => {
|
||||
logger.error('CSV parsing error:', error)
|
||||
reject(new Error(`Failed to parse CSV file: ${error.message}`))
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('CSV general error:', error)
|
||||
reject(new Error(`Failed to process CSV file: ${(error as Error).message}`))
|
||||
try {
|
||||
if (!filePath) {
|
||||
throw new Error('No file path provided')
|
||||
}
|
||||
})
|
||||
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error(`File not found: ${filePath}`)
|
||||
}
|
||||
|
||||
const fileContent = readFileSync(filePath, 'utf8')
|
||||
|
||||
const parseResult = Papa.parse(fileContent, PARSE_OPTIONS)
|
||||
|
||||
if (parseResult.errors && parseResult.errors.length > 0) {
|
||||
const errorMessages = parseResult.errors.map((err) => err.message).join(', ')
|
||||
logger.error('CSV parsing errors:', parseResult.errors)
|
||||
throw new Error(`Failed to parse CSV file: ${errorMessages}`)
|
||||
}
|
||||
|
||||
const results = parseResult.data as Record<string, any>[]
|
||||
const headers = parseResult.meta.fields || []
|
||||
|
||||
let content = ''
|
||||
|
||||
if (headers.length > 0) {
|
||||
const cleanHeaders = headers.map((h) => sanitizeTextForUTF8(String(h)))
|
||||
content += `${cleanHeaders.join(', ')}\n`
|
||||
}
|
||||
|
||||
results.forEach((row) => {
|
||||
const cleanValues = Object.values(row).map((v) => sanitizeTextForUTF8(String(v || '')))
|
||||
content += `${cleanValues.join(', ')}\n`
|
||||
})
|
||||
|
||||
return {
|
||||
content: sanitizeTextForUTF8(content),
|
||||
metadata: {
|
||||
rowCount: results.length,
|
||||
headers: headers,
|
||||
rawData: results,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('CSV general error:', error)
|
||||
throw new Error(`Failed to process CSV file: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
|
||||
async parseBuffer(buffer: Buffer): Promise<FileParseResult> {
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
logger.info('Parsing buffer, size:', buffer.length)
|
||||
try {
|
||||
logger.info('Parsing buffer, size:', buffer.length)
|
||||
|
||||
const results: Record<string, any>[] = []
|
||||
const headers: string[] = []
|
||||
const fileContent = buffer.toString('utf8')
|
||||
|
||||
// Create a readable stream from the buffer
|
||||
const bufferStream = new Readable()
|
||||
bufferStream.push(buffer)
|
||||
bufferStream.push(null) // Signal the end of the stream
|
||||
const parseResult = Papa.parse(fileContent, PARSE_OPTIONS)
|
||||
|
||||
bufferStream
|
||||
.on('error', (error: Error) => {
|
||||
logger.error('CSV buffer stream error:', error)
|
||||
reject(new Error(`Failed to read CSV buffer: ${error.message}`))
|
||||
})
|
||||
.pipe(csvParser())
|
||||
.on('headers', (headerList: string[]) => {
|
||||
headers.push(...headerList)
|
||||
})
|
||||
.on('data', (data: Record<string, any>) => {
|
||||
results.push(data)
|
||||
})
|
||||
.on('end', () => {
|
||||
// Convert CSV data to a formatted string representation
|
||||
let content = ''
|
||||
|
||||
// Add headers
|
||||
if (headers.length > 0) {
|
||||
const cleanHeaders = headers.map((h) => sanitizeTextForUTF8(String(h)))
|
||||
content += `${cleanHeaders.join(', ')}\n`
|
||||
}
|
||||
|
||||
// Add rows
|
||||
results.forEach((row) => {
|
||||
const cleanValues = Object.values(row).map((v) =>
|
||||
sanitizeTextForUTF8(String(v || ''))
|
||||
)
|
||||
content += `${cleanValues.join(', ')}\n`
|
||||
})
|
||||
|
||||
resolve({
|
||||
content: sanitizeTextForUTF8(content),
|
||||
metadata: {
|
||||
rowCount: results.length,
|
||||
headers: headers,
|
||||
rawData: results,
|
||||
},
|
||||
})
|
||||
})
|
||||
.on('error', (error: Error) => {
|
||||
logger.error('CSV parsing error:', error)
|
||||
reject(new Error(`Failed to parse CSV buffer: ${error.message}`))
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('CSV buffer parsing error:', error)
|
||||
reject(new Error(`Failed to process CSV buffer: ${(error as Error).message}`))
|
||||
if (parseResult.errors && parseResult.errors.length > 0) {
|
||||
const errorMessages = parseResult.errors.map((err) => err.message).join(', ')
|
||||
logger.error('CSV parsing errors:', parseResult.errors)
|
||||
throw new Error(`Failed to parse CSV buffer: ${errorMessages}`)
|
||||
}
|
||||
})
|
||||
|
||||
const results = parseResult.data as Record<string, any>[]
|
||||
const headers = parseResult.meta.fields || []
|
||||
|
||||
let content = ''
|
||||
|
||||
if (headers.length > 0) {
|
||||
const cleanHeaders = headers.map((h) => sanitizeTextForUTF8(String(h)))
|
||||
content += `${cleanHeaders.join(', ')}\n`
|
||||
}
|
||||
|
||||
results.forEach((row) => {
|
||||
const cleanValues = Object.values(row).map((v) => sanitizeTextForUTF8(String(v || '')))
|
||||
content += `${cleanValues.join(', ')}\n`
|
||||
})
|
||||
|
||||
return {
|
||||
content: sanitizeTextForUTF8(content),
|
||||
metadata: {
|
||||
rowCount: results.length,
|
||||
headers: headers,
|
||||
rawData: results,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('CSV buffer parsing error:', error)
|
||||
throw new Error(`Failed to process CSV buffer: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,19 +9,16 @@ const logger = createLogger('DocParser')
|
||||
export class DocParser implements FileParser {
|
||||
async parseFile(filePath: string): Promise<FileParseResult> {
|
||||
try {
|
||||
// Validate input
|
||||
if (!filePath) {
|
||||
throw new Error('No file path provided')
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error(`File not found: ${filePath}`)
|
||||
}
|
||||
|
||||
logger.info(`Parsing DOC file: ${filePath}`)
|
||||
|
||||
// Read the file
|
||||
const buffer = await readFile(filePath)
|
||||
return this.parseBuffer(buffer)
|
||||
} catch (error) {
|
||||
@@ -38,45 +35,37 @@ export class DocParser implements FileParser {
|
||||
throw new Error('Empty buffer provided')
|
||||
}
|
||||
|
||||
// Try to dynamically import the word extractor
|
||||
let WordExtractor
|
||||
let parseOfficeAsync
|
||||
try {
|
||||
WordExtractor = (await import('word-extractor')).default
|
||||
const officeParser = await import('officeparser')
|
||||
parseOfficeAsync = officeParser.parseOfficeAsync
|
||||
} catch (importError) {
|
||||
logger.warn('word-extractor not available, using fallback extraction')
|
||||
logger.warn('officeparser not available, using fallback extraction')
|
||||
return this.fallbackExtraction(buffer)
|
||||
}
|
||||
|
||||
try {
|
||||
const extractor = new WordExtractor()
|
||||
const extracted = await extractor.extract(buffer)
|
||||
const result = await parseOfficeAsync(buffer)
|
||||
|
||||
const content = sanitizeTextForUTF8(extracted.getBody())
|
||||
const headers = extracted.getHeaders()
|
||||
const footers = extracted.getFooters()
|
||||
|
||||
// Combine body with headers/footers if they exist
|
||||
let fullContent = content
|
||||
if (headers?.trim()) {
|
||||
fullContent = `${sanitizeTextForUTF8(headers)}\n\n${fullContent}`
|
||||
}
|
||||
if (footers?.trim()) {
|
||||
fullContent = `${fullContent}\n\n${sanitizeTextForUTF8(footers)}`
|
||||
if (!result) {
|
||||
throw new Error('officeparser returned no result')
|
||||
}
|
||||
|
||||
logger.info('DOC parsing completed successfully')
|
||||
const resultString = typeof result === 'string' ? result : String(result)
|
||||
|
||||
const content = sanitizeTextForUTF8(resultString.trim())
|
||||
|
||||
logger.info('DOC parsing completed successfully with officeparser')
|
||||
|
||||
return {
|
||||
content: fullContent.trim(),
|
||||
content: content,
|
||||
metadata: {
|
||||
hasHeaders: !!headers?.trim(),
|
||||
hasFooters: !!footers?.trim(),
|
||||
characterCount: fullContent.length,
|
||||
extractionMethod: 'word-extractor',
|
||||
characterCount: content.length,
|
||||
extractionMethod: 'officeparser',
|
||||
},
|
||||
}
|
||||
} catch (extractError) {
|
||||
logger.warn('word-extractor failed, using fallback:', extractError)
|
||||
logger.warn('officeparser failed, using fallback:', extractError)
|
||||
return this.fallbackExtraction(buffer)
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -85,25 +74,16 @@ export class DocParser implements FileParser {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fallback extraction method for when word-extractor is not available
|
||||
* This is a very basic extraction that looks for readable text in the binary
|
||||
*/
|
||||
private fallbackExtraction(buffer: Buffer): FileParseResult {
|
||||
logger.info('Using fallback text extraction for DOC file')
|
||||
|
||||
// Convert buffer to string and try to extract readable text
|
||||
// This is very basic and won't work well for complex DOC files
|
||||
const text = buffer.toString('utf8', 0, Math.min(buffer.length, 100000)) // Limit to first 100KB
|
||||
const text = buffer.toString('utf8', 0, Math.min(buffer.length, 100000))
|
||||
|
||||
// Extract sequences of printable ASCII characters
|
||||
const readableText = text
|
||||
.match(/[\x20-\x7E\s]{4,}/g) // Find sequences of 4+ printable characters
|
||||
.match(/[\x20-\x7E\s]{4,}/g)
|
||||
?.filter(
|
||||
(chunk) =>
|
||||
chunk.trim().length > 10 && // Minimum length
|
||||
/[a-zA-Z]/.test(chunk) && // Must contain letters
|
||||
!/^[\x00-\x1F]*$/.test(chunk) // Not just control characters
|
||||
chunk.trim().length > 10 && /[a-zA-Z]/.test(chunk) && !/^[\x00-\x1F]*$/.test(chunk)
|
||||
)
|
||||
.join(' ')
|
||||
.replace(/\s+/g, ' ')
|
||||
@@ -118,8 +98,7 @@ export class DocParser implements FileParser {
|
||||
metadata: {
|
||||
extractionMethod: 'fallback',
|
||||
characterCount: content.length,
|
||||
warning:
|
||||
'Basic text extraction used. For better results, install word-extractor package or convert to DOCX format.',
|
||||
warning: 'Basic text extraction used. For better results, convert to DOCX format.',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,15 +14,12 @@ interface MammothResult {
|
||||
export class DocxParser implements FileParser {
|
||||
async parseFile(filePath: string): Promise<FileParseResult> {
|
||||
try {
|
||||
// Validate input
|
||||
if (!filePath) {
|
||||
throw new Error('No file path provided')
|
||||
}
|
||||
|
||||
// Read the file
|
||||
const buffer = await readFile(filePath)
|
||||
|
||||
// Use parseBuffer for consistent implementation
|
||||
return this.parseBuffer(buffer)
|
||||
} catch (error) {
|
||||
logger.error('DOCX file error:', error)
|
||||
@@ -34,10 +31,8 @@ export class DocxParser implements FileParser {
|
||||
try {
|
||||
logger.info('Parsing buffer, size:', buffer.length)
|
||||
|
||||
// Extract text with mammoth
|
||||
const result = await mammoth.extractRawText({ buffer })
|
||||
|
||||
// Extract HTML for metadata (optional - won't fail if this fails)
|
||||
let htmlResult: MammothResult = { value: '', messages: [] }
|
||||
try {
|
||||
htmlResult = await mammoth.convertToHtml({ buffer })
|
||||
|
||||
283
apps/sim/lib/file-parsers/html-parser.ts
Normal file
283
apps/sim/lib/file-parsers/html-parser.ts
Normal file
@@ -0,0 +1,283 @@
|
||||
import { readFile } from 'fs/promises'
|
||||
import * as cheerio from 'cheerio'
|
||||
import type { FileParseResult, FileParser } from '@/lib/file-parsers/types'
|
||||
import { sanitizeTextForUTF8 } from '@/lib/file-parsers/utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('HtmlParser')
|
||||
|
||||
export class HtmlParser implements FileParser {
|
||||
async parseFile(filePath: string): Promise<FileParseResult> {
|
||||
try {
|
||||
if (!filePath) {
|
||||
throw new Error('No file path provided')
|
||||
}
|
||||
|
||||
const buffer = await readFile(filePath)
|
||||
return this.parseBuffer(buffer)
|
||||
} catch (error) {
|
||||
logger.error('HTML file error:', error)
|
||||
throw new Error(`Failed to parse HTML file: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
|
||||
async parseBuffer(buffer: Buffer): Promise<FileParseResult> {
|
||||
try {
|
||||
logger.info('Parsing HTML buffer, size:', buffer.length)
|
||||
|
||||
const htmlContent = buffer.toString('utf-8')
|
||||
const $ = cheerio.load(htmlContent)
|
||||
|
||||
// Extract meta information before removing tags
|
||||
const title = $('title').text().trim()
|
||||
const metaDescription = $('meta[name="description"]').attr('content') || ''
|
||||
|
||||
$('script, style, noscript, meta, link, iframe, object, embed, svg').remove()
|
||||
|
||||
$.root()
|
||||
.contents()
|
||||
.filter(function () {
|
||||
return this.type === 'comment'
|
||||
})
|
||||
.remove()
|
||||
|
||||
const content = this.extractStructuredText($)
|
||||
|
||||
const sanitizedContent = sanitizeTextForUTF8(content)
|
||||
|
||||
const characterCount = sanitizedContent.length
|
||||
const wordCount = sanitizedContent.split(/\s+/).filter((word) => word.length > 0).length
|
||||
const estimatedTokenCount = Math.ceil(characterCount / 4)
|
||||
|
||||
const headings = this.extractHeadings($)
|
||||
|
||||
const links = this.extractLinks($)
|
||||
|
||||
return {
|
||||
content: sanitizedContent,
|
||||
metadata: {
|
||||
title,
|
||||
metaDescription,
|
||||
characterCount,
|
||||
wordCount,
|
||||
tokenCount: estimatedTokenCount,
|
||||
headings,
|
||||
links: links.slice(0, 50),
|
||||
hasImages: $('img').length > 0,
|
||||
imageCount: $('img').length,
|
||||
hasTable: $('table').length > 0,
|
||||
tableCount: $('table').length,
|
||||
hasList: $('ul, ol').length > 0,
|
||||
listCount: $('ul, ol').length,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('HTML buffer parsing error:', error)
|
||||
throw new Error(`Failed to parse HTML buffer: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract structured text content preserving document hierarchy
|
||||
*/
|
||||
private extractStructuredText($: cheerio.CheerioAPI): string {
|
||||
const contentParts: string[] = []
|
||||
|
||||
const rootElement = $('body').length > 0 ? $('body') : $.root()
|
||||
|
||||
this.processElement($, rootElement, contentParts, 0)
|
||||
|
||||
return contentParts.join('\n').trim()
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively process elements to extract text with structure
|
||||
*/
|
||||
private processElement(
|
||||
$: cheerio.CheerioAPI,
|
||||
element: cheerio.Cheerio<any>,
|
||||
contentParts: string[],
|
||||
depth: number
|
||||
): void {
|
||||
element.contents().each((_, node) => {
|
||||
if (node.type === 'text') {
|
||||
const text = $(node).text().trim()
|
||||
if (text) {
|
||||
contentParts.push(text)
|
||||
}
|
||||
} else if (node.type === 'tag') {
|
||||
const $node = $(node)
|
||||
const tagName = node.tagName?.toLowerCase()
|
||||
|
||||
switch (tagName) {
|
||||
case 'h1':
|
||||
case 'h2':
|
||||
case 'h3':
|
||||
case 'h4':
|
||||
case 'h5':
|
||||
case 'h6': {
|
||||
const headingText = $node.text().trim()
|
||||
if (headingText) {
|
||||
contentParts.push(`\n${headingText}\n`)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'p': {
|
||||
const paragraphText = $node.text().trim()
|
||||
if (paragraphText) {
|
||||
contentParts.push(`${paragraphText}\n`)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'br':
|
||||
contentParts.push('\n')
|
||||
break
|
||||
|
||||
case 'hr':
|
||||
contentParts.push('\n---\n')
|
||||
break
|
||||
|
||||
case 'li': {
|
||||
const listItemText = $node.text().trim()
|
||||
if (listItemText) {
|
||||
const indent = ' '.repeat(Math.min(depth, 3))
|
||||
contentParts.push(`${indent}• ${listItemText}`)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'ul':
|
||||
case 'ol':
|
||||
contentParts.push('\n')
|
||||
this.processElement($, $node, contentParts, depth + 1)
|
||||
contentParts.push('\n')
|
||||
break
|
||||
|
||||
case 'table':
|
||||
this.processTable($, $node, contentParts)
|
||||
break
|
||||
|
||||
case 'blockquote': {
|
||||
const quoteText = $node.text().trim()
|
||||
if (quoteText) {
|
||||
contentParts.push(`\n> ${quoteText}\n`)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'pre':
|
||||
case 'code': {
|
||||
const codeText = $node.text().trim()
|
||||
if (codeText) {
|
||||
contentParts.push(`\n\`\`\`\n${codeText}\n\`\`\`\n`)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'div':
|
||||
case 'section':
|
||||
case 'article':
|
||||
case 'main':
|
||||
case 'aside':
|
||||
case 'nav':
|
||||
case 'header':
|
||||
case 'footer':
|
||||
this.processElement($, $node, contentParts, depth)
|
||||
break
|
||||
|
||||
case 'a': {
|
||||
const linkText = $node.text().trim()
|
||||
const href = $node.attr('href')
|
||||
if (linkText) {
|
||||
if (href?.startsWith('http')) {
|
||||
contentParts.push(`${linkText} (${href})`)
|
||||
} else {
|
||||
contentParts.push(linkText)
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'img': {
|
||||
const alt = $node.attr('alt')
|
||||
if (alt) {
|
||||
contentParts.push(`[Image: ${alt}]`)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
default:
|
||||
this.processElement($, $node, contentParts, depth)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Process table elements to extract structured data
|
||||
*/
|
||||
private processTable(
|
||||
$: cheerio.CheerioAPI,
|
||||
table: cheerio.Cheerio<any>,
|
||||
contentParts: string[]
|
||||
): void {
|
||||
contentParts.push('\n[Table]')
|
||||
|
||||
table.find('tr').each((_, row) => {
|
||||
const $row = $(row)
|
||||
const cells: string[] = []
|
||||
|
||||
$row.find('td, th').each((_, cell) => {
|
||||
const cellText = $(cell).text().trim()
|
||||
cells.push(cellText || '')
|
||||
})
|
||||
|
||||
if (cells.length > 0) {
|
||||
contentParts.push(`| ${cells.join(' | ')} |`)
|
||||
}
|
||||
})
|
||||
|
||||
contentParts.push('[/Table]\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract heading structure for metadata
|
||||
*/
|
||||
private extractHeadings($: cheerio.CheerioAPI): Array<{ level: number; text: string }> {
|
||||
const headings: Array<{ level: number; text: string }> = []
|
||||
|
||||
$('h1, h2, h3, h4, h5, h6').each((_, element) => {
|
||||
const $element = $(element)
|
||||
const tagName = element.tagName?.toLowerCase()
|
||||
const level = Number.parseInt(tagName?.charAt(1) || '1', 10)
|
||||
const text = $element.text().trim()
|
||||
|
||||
if (text) {
|
||||
headings.push({ level, text })
|
||||
}
|
||||
})
|
||||
|
||||
return headings
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract links from the document
|
||||
*/
|
||||
private extractLinks($: cheerio.CheerioAPI): Array<{ text: string; href: string }> {
|
||||
const links: Array<{ text: string; href: string }> = []
|
||||
|
||||
$('a[href]').each((_, element) => {
|
||||
const $element = $(element)
|
||||
const href = $element.attr('href')
|
||||
const text = $element.text().trim()
|
||||
|
||||
if (href && text && href.startsWith('http')) {
|
||||
links.push({ text, href })
|
||||
}
|
||||
})
|
||||
|
||||
return links
|
||||
}
|
||||
}
|
||||
@@ -51,6 +51,23 @@ const mockMdParseFile = vi.fn().mockResolvedValue({
|
||||
},
|
||||
})
|
||||
|
||||
const mockPptxParseFile = vi.fn().mockResolvedValue({
|
||||
content: 'Parsed PPTX content',
|
||||
metadata: {
|
||||
slideCount: 5,
|
||||
extractionMethod: 'officeparser',
|
||||
},
|
||||
})
|
||||
|
||||
const mockHtmlParseFile = vi.fn().mockResolvedValue({
|
||||
content: 'Parsed HTML content',
|
||||
metadata: {
|
||||
title: 'Test HTML Document',
|
||||
headingCount: 3,
|
||||
linkCount: 2,
|
||||
},
|
||||
})
|
||||
|
||||
const createMockModule = () => {
|
||||
const mockParsers: Record<string, FileParser> = {
|
||||
pdf: { parseFile: mockPdfParseFile },
|
||||
@@ -58,6 +75,10 @@ const createMockModule = () => {
|
||||
docx: { parseFile: mockDocxParseFile },
|
||||
txt: { parseFile: mockTxtParseFile },
|
||||
md: { parseFile: mockMdParseFile },
|
||||
pptx: { parseFile: mockPptxParseFile },
|
||||
ppt: { parseFile: mockPptxParseFile },
|
||||
html: { parseFile: mockHtmlParseFile },
|
||||
htm: { parseFile: mockHtmlParseFile },
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -143,6 +164,18 @@ describe('File Parsers', () => {
|
||||
})),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/file-parsers/pptx-parser', () => ({
|
||||
PptxParser: vi.fn().mockImplementation(() => ({
|
||||
parseFile: mockPptxParseFile,
|
||||
})),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/file-parsers/html-parser', () => ({
|
||||
HtmlParser: vi.fn().mockImplementation(() => ({
|
||||
parseFile: mockHtmlParseFile,
|
||||
})),
|
||||
}))
|
||||
|
||||
global.console = {
|
||||
...console,
|
||||
log: vi.fn(),
|
||||
@@ -261,6 +294,82 @@ describe('File Parsers', () => {
|
||||
|
||||
const { parseFile } = await import('@/lib/file-parsers/index')
|
||||
const result = await parseFile('/test/files/document.md')
|
||||
|
||||
expect(result).toEqual(expectedResult)
|
||||
})
|
||||
|
||||
it('should parse PPTX files successfully', async () => {
|
||||
const expectedResult = {
|
||||
content: 'Parsed PPTX content',
|
||||
metadata: {
|
||||
slideCount: 5,
|
||||
extractionMethod: 'officeparser',
|
||||
},
|
||||
}
|
||||
|
||||
mockPptxParseFile.mockResolvedValueOnce(expectedResult)
|
||||
mockExistsSync.mockReturnValue(true)
|
||||
|
||||
const { parseFile } = await import('@/lib/file-parsers/index')
|
||||
const result = await parseFile('/test/files/presentation.pptx')
|
||||
|
||||
expect(result).toEqual(expectedResult)
|
||||
})
|
||||
|
||||
it('should parse PPT files successfully', async () => {
|
||||
const expectedResult = {
|
||||
content: 'Parsed PPTX content',
|
||||
metadata: {
|
||||
slideCount: 5,
|
||||
extractionMethod: 'officeparser',
|
||||
},
|
||||
}
|
||||
|
||||
mockPptxParseFile.mockResolvedValueOnce(expectedResult)
|
||||
mockExistsSync.mockReturnValue(true)
|
||||
|
||||
const { parseFile } = await import('@/lib/file-parsers/index')
|
||||
const result = await parseFile('/test/files/presentation.ppt')
|
||||
|
||||
expect(result).toEqual(expectedResult)
|
||||
})
|
||||
|
||||
it('should parse HTML files successfully', async () => {
|
||||
const expectedResult = {
|
||||
content: 'Parsed HTML content',
|
||||
metadata: {
|
||||
title: 'Test HTML Document',
|
||||
headingCount: 3,
|
||||
linkCount: 2,
|
||||
},
|
||||
}
|
||||
|
||||
mockHtmlParseFile.mockResolvedValueOnce(expectedResult)
|
||||
mockExistsSync.mockReturnValue(true)
|
||||
|
||||
const { parseFile } = await import('@/lib/file-parsers/index')
|
||||
const result = await parseFile('/test/files/document.html')
|
||||
|
||||
expect(result).toEqual(expectedResult)
|
||||
})
|
||||
|
||||
it('should parse HTM files successfully', async () => {
|
||||
const expectedResult = {
|
||||
content: 'Parsed HTML content',
|
||||
metadata: {
|
||||
title: 'Test HTML Document',
|
||||
headingCount: 3,
|
||||
linkCount: 2,
|
||||
},
|
||||
}
|
||||
|
||||
mockHtmlParseFile.mockResolvedValueOnce(expectedResult)
|
||||
mockExistsSync.mockReturnValue(true)
|
||||
|
||||
const { parseFile } = await import('@/lib/file-parsers/index')
|
||||
const result = await parseFile('/test/files/document.htm')
|
||||
|
||||
expect(result).toEqual(expectedResult)
|
||||
})
|
||||
|
||||
it('should throw error for unsupported file types', async () => {
|
||||
@@ -292,6 +401,10 @@ describe('File Parsers', () => {
|
||||
expect(isSupportedFileType('docx')).toBe(true)
|
||||
expect(isSupportedFileType('txt')).toBe(true)
|
||||
expect(isSupportedFileType('md')).toBe(true)
|
||||
expect(isSupportedFileType('pptx')).toBe(true)
|
||||
expect(isSupportedFileType('ppt')).toBe(true)
|
||||
expect(isSupportedFileType('html')).toBe(true)
|
||||
expect(isSupportedFileType('htm')).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false for unsupported file types', async () => {
|
||||
@@ -308,6 +421,8 @@ describe('File Parsers', () => {
|
||||
expect(isSupportedFileType('CSV')).toBe(true)
|
||||
expect(isSupportedFileType('TXT')).toBe(true)
|
||||
expect(isSupportedFileType('MD')).toBe(true)
|
||||
expect(isSupportedFileType('PPTX')).toBe(true)
|
||||
expect(isSupportedFileType('HTML')).toBe(true)
|
||||
})
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
|
||||
@@ -7,7 +7,6 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('FileParser')
|
||||
|
||||
// Lazy-loaded parsers to avoid initialization issues
|
||||
let parserInstances: Record<string, FileParser> | null = null
|
||||
|
||||
/**
|
||||
@@ -18,25 +17,20 @@ function getParserInstances(): Record<string, FileParser> {
|
||||
parserInstances = {}
|
||||
|
||||
try {
|
||||
// Import parsers only when needed - with try/catch for each one
|
||||
try {
|
||||
logger.info('Attempting to load PDF parser...')
|
||||
try {
|
||||
// First try to use the pdf-parse library
|
||||
// Import the PdfParser using ES module import to avoid test file access
|
||||
const { PdfParser } = require('@/lib/file-parsers/pdf-parser')
|
||||
parserInstances.pdf = new PdfParser()
|
||||
logger.info('PDF parser loaded successfully')
|
||||
} catch (pdfParseError) {
|
||||
// If that fails, fallback to our raw PDF parser
|
||||
logger.error('Failed to load primary PDF parser:', pdfParseError)
|
||||
} catch (pdfLibError) {
|
||||
logger.error('Failed to load primary PDF parser:', pdfLibError)
|
||||
logger.info('Falling back to raw PDF parser')
|
||||
parserInstances.pdf = new RawPdfParser()
|
||||
logger.info('Raw PDF parser loaded successfully')
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to load any PDF parser:', error)
|
||||
// Create a simple fallback that just returns the file size and a message
|
||||
parserInstances.pdf = {
|
||||
async parseFile(filePath: string): Promise<FileParseResult> {
|
||||
const buffer = await readFile(filePath)
|
||||
@@ -100,10 +94,26 @@ function getParserInstances(): Record<string, FileParser> {
|
||||
try {
|
||||
const { XlsxParser } = require('@/lib/file-parsers/xlsx-parser')
|
||||
parserInstances.xlsx = new XlsxParser()
|
||||
parserInstances.xls = new XlsxParser() // Both xls and xlsx use the same parser
|
||||
parserInstances.xls = new XlsxParser()
|
||||
} catch (error) {
|
||||
logger.error('Failed to load XLSX parser:', error)
|
||||
}
|
||||
|
||||
try {
|
||||
const { PptxParser } = require('@/lib/file-parsers/pptx-parser')
|
||||
parserInstances.pptx = new PptxParser()
|
||||
parserInstances.ppt = new PptxParser()
|
||||
} catch (error) {
|
||||
logger.error('Failed to load PPTX parser:', error)
|
||||
}
|
||||
|
||||
try {
|
||||
const { HtmlParser } = require('@/lib/file-parsers/html-parser')
|
||||
parserInstances.html = new HtmlParser()
|
||||
parserInstances.htm = new HtmlParser()
|
||||
} catch (error) {
|
||||
logger.error('Failed to load HTML parser:', error)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error loading file parsers:', error)
|
||||
}
|
||||
@@ -119,12 +129,10 @@ function getParserInstances(): Record<string, FileParser> {
|
||||
*/
|
||||
export async function parseFile(filePath: string): Promise<FileParseResult> {
|
||||
try {
|
||||
// Validate input
|
||||
if (!filePath) {
|
||||
throw new Error('No file path provided')
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error(`File not found: ${filePath}`)
|
||||
}
|
||||
@@ -158,7 +166,6 @@ export async function parseFile(filePath: string): Promise<FileParseResult> {
|
||||
*/
|
||||
export async function parseBuffer(buffer: Buffer, extension: string): Promise<FileParseResult> {
|
||||
try {
|
||||
// Validate input
|
||||
if (!buffer || buffer.length === 0) {
|
||||
throw new Error('Empty buffer provided')
|
||||
}
|
||||
@@ -182,7 +189,6 @@ export async function parseBuffer(buffer: Buffer, extension: string): Promise<Fi
|
||||
logger.info('Using parser for extension:', normalizedExtension)
|
||||
const parser = parsers[normalizedExtension]
|
||||
|
||||
// Check if parser supports buffer parsing
|
||||
if (parser.parseBuffer) {
|
||||
return await parser.parseBuffer(buffer)
|
||||
}
|
||||
@@ -207,5 +213,4 @@ export function isSupportedFileType(extension: string): extension is SupportedFi
|
||||
}
|
||||
}
|
||||
|
||||
// Type exports
|
||||
export type { FileParseResult, FileParser, SupportedFileType }
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { readFile } from 'fs/promises'
|
||||
import type { FileParseResult, FileParser } from '@/lib/file-parsers/types'
|
||||
import { sanitizeTextForUTF8 } from '@/lib/file-parsers/utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('MdParser')
|
||||
@@ -7,15 +8,12 @@ const logger = createLogger('MdParser')
|
||||
export class MdParser implements FileParser {
|
||||
async parseFile(filePath: string): Promise<FileParseResult> {
|
||||
try {
|
||||
// Validate input
|
||||
if (!filePath) {
|
||||
throw new Error('No file path provided')
|
||||
}
|
||||
|
||||
// Read the file
|
||||
const buffer = await readFile(filePath)
|
||||
|
||||
// Use parseBuffer for consistent implementation
|
||||
return this.parseBuffer(buffer)
|
||||
} catch (error) {
|
||||
logger.error('MD file error:', error)
|
||||
@@ -27,14 +25,14 @@ export class MdParser implements FileParser {
|
||||
try {
|
||||
logger.info('Parsing buffer, size:', buffer.length)
|
||||
|
||||
// Extract content
|
||||
const result = buffer.toString('utf-8')
|
||||
const content = sanitizeTextForUTF8(result)
|
||||
|
||||
return {
|
||||
content: result,
|
||||
content,
|
||||
metadata: {
|
||||
characterCount: result.length,
|
||||
tokenCount: result.length / 4,
|
||||
characterCount: content.length,
|
||||
tokenCount: Math.floor(content.length / 4),
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,22 +1,21 @@
|
||||
import { readFile } from 'fs/promises'
|
||||
// @ts-ignore
|
||||
import * as pdfParseLib from 'pdf-parse/lib/pdf-parse.js'
|
||||
import { PDFDocument } from 'pdf-lib'
|
||||
import type { FileParseResult, FileParser } from '@/lib/file-parsers/types'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { RawPdfParser } from './raw-pdf-parser'
|
||||
|
||||
const logger = createLogger('PdfParser')
|
||||
const rawPdfParser = new RawPdfParser()
|
||||
|
||||
export class PdfParser implements FileParser {
|
||||
async parseFile(filePath: string): Promise<FileParseResult> {
|
||||
try {
|
||||
logger.info('Starting to parse file:', filePath)
|
||||
|
||||
// Make sure we're only parsing the provided file path
|
||||
if (!filePath) {
|
||||
throw new Error('No file path provided')
|
||||
}
|
||||
|
||||
// Read the file
|
||||
logger.info('Reading file...')
|
||||
const dataBuffer = await readFile(filePath)
|
||||
logger.info('File read successfully, size:', dataBuffer.length)
|
||||
@@ -32,93 +31,66 @@ export class PdfParser implements FileParser {
|
||||
try {
|
||||
logger.info('Starting to parse buffer, size:', dataBuffer.length)
|
||||
|
||||
// Try to parse with pdf-parse library first
|
||||
try {
|
||||
logger.info('Attempting to parse with pdf-parse library...')
|
||||
logger.info('Attempting to parse with pdf-lib library...')
|
||||
|
||||
// Parse PDF with direct function call to avoid test file access
|
||||
logger.info('Starting PDF parsing...')
|
||||
const data = await pdfParseLib.default(dataBuffer)
|
||||
logger.info('PDF parsed successfully with pdf-parse, pages:', data.numpages)
|
||||
const pdfDoc = await PDFDocument.load(dataBuffer)
|
||||
const pages = pdfDoc.getPages()
|
||||
const pageCount = pages.length
|
||||
|
||||
logger.info('PDF parsed successfully with pdf-lib, pages:', pageCount)
|
||||
|
||||
const metadata: Record<string, any> = {
|
||||
pageCount,
|
||||
}
|
||||
|
||||
try {
|
||||
const title = pdfDoc.getTitle()
|
||||
const author = pdfDoc.getAuthor()
|
||||
const subject = pdfDoc.getSubject()
|
||||
const creator = pdfDoc.getCreator()
|
||||
const producer = pdfDoc.getProducer()
|
||||
const creationDate = pdfDoc.getCreationDate()
|
||||
const modificationDate = pdfDoc.getModificationDate()
|
||||
|
||||
if (title) metadata.title = title
|
||||
if (author) metadata.author = author
|
||||
if (subject) metadata.subject = subject
|
||||
if (creator) metadata.creator = creator
|
||||
if (producer) metadata.producer = producer
|
||||
if (creationDate) metadata.creationDate = creationDate.toISOString()
|
||||
if (modificationDate) metadata.modificationDate = modificationDate.toISOString()
|
||||
} catch (metadataError) {
|
||||
logger.warn('Could not extract PDF metadata:', metadataError)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
'pdf-lib loaded successfully, but text extraction requires fallback to raw parser'
|
||||
)
|
||||
const rawResult = await rawPdfParser.parseBuffer(dataBuffer)
|
||||
|
||||
return {
|
||||
content: data.text,
|
||||
content: rawResult.content,
|
||||
metadata: {
|
||||
pageCount: data.numpages,
|
||||
info: data.info,
|
||||
version: data.version,
|
||||
...rawResult.metadata,
|
||||
...metadata,
|
||||
source: 'pdf-lib + raw-parser',
|
||||
},
|
||||
}
|
||||
} catch (pdfParseError: unknown) {
|
||||
logger.error('PDF-parse library failed:', pdfParseError)
|
||||
} catch (pdfLibError: unknown) {
|
||||
logger.error('PDF-lib library failed:', pdfLibError)
|
||||
|
||||
// Fallback to manual text extraction
|
||||
logger.info('Falling back to manual text extraction...')
|
||||
|
||||
// Extract basic PDF info from raw content
|
||||
const rawContent = dataBuffer.toString('utf-8', 0, Math.min(10000, dataBuffer.length))
|
||||
|
||||
let version = 'Unknown'
|
||||
let pageCount = 0
|
||||
|
||||
// Try to extract PDF version
|
||||
const versionMatch = rawContent.match(/%PDF-(\d+\.\d+)/)
|
||||
if (versionMatch?.[1]) {
|
||||
version = versionMatch[1]
|
||||
}
|
||||
|
||||
// Try to get page count
|
||||
const pageMatches = rawContent.match(/\/Type\s*\/Page\b/g)
|
||||
if (pageMatches) {
|
||||
pageCount = pageMatches.length
|
||||
}
|
||||
|
||||
// Try to extract text by looking for text-related operators in the PDF
|
||||
let extractedText = ''
|
||||
|
||||
// Look for text in the PDF content using common patterns
|
||||
const textMatches = rawContent.match(/BT[\s\S]*?ET/g)
|
||||
if (textMatches && textMatches.length > 0) {
|
||||
extractedText = textMatches
|
||||
.map((textBlock) => {
|
||||
// Extract text objects (Tj, TJ) from the text block
|
||||
const textObjects = textBlock.match(/\([^)]*\)\s*Tj|\[[^\]]*\]\s*TJ/g)
|
||||
if (textObjects) {
|
||||
return textObjects
|
||||
.map((obj) => {
|
||||
// Clean up text objects
|
||||
return (
|
||||
obj
|
||||
.replace(
|
||||
/\(([^)]*)\)\s*Tj|\[([^\]]*)\]\s*TJ/g,
|
||||
(match, p1, p2) => p1 || p2 || ''
|
||||
)
|
||||
// Clean up PDF escape sequences
|
||||
.replace(/\\(\d{3}|[()\\])/g, '')
|
||||
.replace(/\\\\/g, '\\')
|
||||
.replace(/\\\(/g, '(')
|
||||
.replace(/\\\)/g, ')')
|
||||
)
|
||||
})
|
||||
.join(' ')
|
||||
}
|
||||
return ''
|
||||
})
|
||||
.join('\n')
|
||||
}
|
||||
|
||||
// If we couldn't extract text or the text is too short, return a fallback message
|
||||
if (!extractedText || extractedText.length < 50) {
|
||||
extractedText = `This PDF contains ${pageCount} page(s) but text extraction was not successful.`
|
||||
}
|
||||
logger.info('Falling back to raw PDF parser...')
|
||||
const rawResult = await rawPdfParser.parseBuffer(dataBuffer)
|
||||
|
||||
return {
|
||||
content: extractedText,
|
||||
...rawResult,
|
||||
metadata: {
|
||||
pageCount,
|
||||
version,
|
||||
...rawResult.metadata,
|
||||
fallback: true,
|
||||
error: (pdfParseError as Error).message || 'Unknown error',
|
||||
source: 'raw-parser-only',
|
||||
error: (pdfLibError as Error).message || 'Unknown error',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
106
apps/sim/lib/file-parsers/pptx-parser.ts
Normal file
106
apps/sim/lib/file-parsers/pptx-parser.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { existsSync } from 'fs'
|
||||
import { readFile } from 'fs/promises'
|
||||
import type { FileParseResult, FileParser } from '@/lib/file-parsers/types'
|
||||
import { sanitizeTextForUTF8 } from '@/lib/file-parsers/utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('PptxParser')
|
||||
|
||||
export class PptxParser implements FileParser {
|
||||
async parseFile(filePath: string): Promise<FileParseResult> {
|
||||
try {
|
||||
if (!filePath) {
|
||||
throw new Error('No file path provided')
|
||||
}
|
||||
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error(`File not found: ${filePath}`)
|
||||
}
|
||||
|
||||
logger.info(`Parsing PowerPoint file: ${filePath}`)
|
||||
|
||||
const buffer = await readFile(filePath)
|
||||
return this.parseBuffer(buffer)
|
||||
} catch (error) {
|
||||
logger.error('PowerPoint file parsing error:', error)
|
||||
throw new Error(`Failed to parse PowerPoint file: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
|
||||
async parseBuffer(buffer: Buffer): Promise<FileParseResult> {
|
||||
try {
|
||||
logger.info('Parsing PowerPoint buffer, size:', buffer.length)
|
||||
|
||||
if (!buffer || buffer.length === 0) {
|
||||
throw new Error('Empty buffer provided')
|
||||
}
|
||||
|
||||
let parseOfficeAsync
|
||||
try {
|
||||
const officeParser = await import('officeparser')
|
||||
parseOfficeAsync = officeParser.parseOfficeAsync
|
||||
} catch (importError) {
|
||||
logger.warn('officeparser not available, using fallback extraction')
|
||||
return this.fallbackExtraction(buffer)
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await parseOfficeAsync(buffer)
|
||||
|
||||
if (!result || typeof result !== 'string') {
|
||||
throw new Error('officeparser returned invalid result')
|
||||
}
|
||||
|
||||
const content = sanitizeTextForUTF8(result.trim())
|
||||
|
||||
logger.info('PowerPoint parsing completed successfully with officeparser')
|
||||
|
||||
return {
|
||||
content: content,
|
||||
metadata: {
|
||||
characterCount: content.length,
|
||||
extractionMethod: 'officeparser',
|
||||
},
|
||||
}
|
||||
} catch (extractError) {
|
||||
logger.warn('officeparser failed, using fallback:', extractError)
|
||||
return this.fallbackExtraction(buffer)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('PowerPoint buffer parsing error:', error)
|
||||
throw new Error(`Failed to parse PowerPoint buffer: ${(error as Error).message}`)
|
||||
}
|
||||
}
|
||||
|
||||
private fallbackExtraction(buffer: Buffer): FileParseResult {
|
||||
logger.info('Using fallback text extraction for PowerPoint file')
|
||||
|
||||
const text = buffer.toString('utf8', 0, Math.min(buffer.length, 200000))
|
||||
|
||||
const readableText = text
|
||||
.match(/[\x20-\x7E\s]{4,}/g)
|
||||
?.filter(
|
||||
(chunk) =>
|
||||
chunk.trim().length > 10 &&
|
||||
/[a-zA-Z]/.test(chunk) &&
|
||||
!/^[\x00-\x1F]*$/.test(chunk) &&
|
||||
!/^[^\w\s]*$/.test(chunk)
|
||||
)
|
||||
.join(' ')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim()
|
||||
|
||||
const content = readableText
|
||||
? sanitizeTextForUTF8(readableText)
|
||||
: 'Unable to extract text from PowerPoint file. Please ensure the file contains readable text content.'
|
||||
|
||||
return {
|
||||
content,
|
||||
metadata: {
|
||||
extractionMethod: 'fallback',
|
||||
characterCount: content.length,
|
||||
warning: 'Basic text extraction used',
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,14 +6,9 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('RawPdfParser')
|
||||
|
||||
// Promisify zlib functions
|
||||
const inflateAsync = promisify(zlib.inflate)
|
||||
const unzipAsync = promisify(zlib.unzip)
|
||||
|
||||
/**
|
||||
* A simple PDF parser that extracts readable text from a PDF file.
|
||||
* This is used as a fallback when the pdf-parse library fails.
|
||||
*/
|
||||
export class RawPdfParser implements FileParser {
|
||||
async parseFile(filePath: string): Promise<FileParseResult> {
|
||||
try {
|
||||
@@ -23,7 +18,6 @@ export class RawPdfParser implements FileParser {
|
||||
throw new Error('No file path provided')
|
||||
}
|
||||
|
||||
// Read the file
|
||||
logger.info('Reading file...')
|
||||
const dataBuffer = await readFile(filePath)
|
||||
logger.info('File read successfully, size:', dataBuffer.length)
|
||||
@@ -46,31 +40,22 @@ export class RawPdfParser implements FileParser {
|
||||
try {
|
||||
logger.info('Starting to parse buffer, size:', dataBuffer.length)
|
||||
|
||||
// Instead of trying to parse the binary PDF data directly,
|
||||
// we'll extract only the text sections that are readable
|
||||
|
||||
// First convert to string but only for pattern matching, not for display
|
||||
const rawContent = dataBuffer.toString('utf-8')
|
||||
|
||||
// Extract basic PDF info
|
||||
let version = 'Unknown'
|
||||
let pageCount = 0
|
||||
|
||||
// Try to extract PDF version
|
||||
const versionMatch = rawContent.match(/%PDF-(\d+\.\d+)/)
|
||||
if (versionMatch?.[1]) {
|
||||
version = versionMatch[1]
|
||||
}
|
||||
|
||||
// Count pages using multiple methods for redundancy
|
||||
// Method 1: Count "/Type /Page" occurrences (most reliable)
|
||||
const typePageMatches = rawContent.match(/\/Type\s*\/Page\b/gi)
|
||||
if (typePageMatches) {
|
||||
pageCount = typePageMatches.length
|
||||
logger.info('Found page count using /Type /Page:', pageCount)
|
||||
}
|
||||
|
||||
// Method 2: Look for "/Page" dictionary references
|
||||
if (pageCount === 0) {
|
||||
const pageMatches = rawContent.match(/\/Page\s*\//gi)
|
||||
if (pageMatches) {
|
||||
@@ -79,19 +64,15 @@ export class RawPdfParser implements FileParser {
|
||||
}
|
||||
}
|
||||
|
||||
// Method 3: Look for "/Pages" object references
|
||||
if (pageCount === 0) {
|
||||
const pagesObjMatches = rawContent.match(/\/Pages\s+\d+\s+\d+\s+R/gi)
|
||||
if (pagesObjMatches && pagesObjMatches.length > 0) {
|
||||
// Extract the object reference
|
||||
const pagesObjRef = pagesObjMatches[0].match(/\/Pages\s+(\d+)\s+\d+\s+R/i)
|
||||
if (pagesObjRef?.[1]) {
|
||||
const objNum = pagesObjRef[1]
|
||||
// Find the referenced object
|
||||
const objRegex = new RegExp(`${objNum}\\s+0\\s+obj[\\s\\S]*?endobj`, 'i')
|
||||
const objMatch = rawContent.match(objRegex)
|
||||
if (objMatch) {
|
||||
// Look for /Count within the Pages object
|
||||
const countMatch = objMatch[0].match(/\/Count\s+(\d+)/i)
|
||||
if (countMatch?.[1]) {
|
||||
pageCount = Number.parseInt(countMatch[1], 10)
|
||||
@@ -102,50 +83,40 @@ export class RawPdfParser implements FileParser {
|
||||
}
|
||||
}
|
||||
|
||||
// Method 4: Count trailer references to get an approximate count
|
||||
if (pageCount === 0) {
|
||||
const trailerMatches = rawContent.match(/trailer/gi)
|
||||
if (trailerMatches) {
|
||||
// This is just a rough estimate, not accurate
|
||||
pageCount = Math.max(1, Math.ceil(trailerMatches.length / 2))
|
||||
logger.info('Estimated page count using trailer references:', pageCount)
|
||||
}
|
||||
}
|
||||
|
||||
// Default to at least 1 page if we couldn't find any
|
||||
if (pageCount === 0) {
|
||||
pageCount = 1
|
||||
logger.info('Defaulting to 1 page as no count was found')
|
||||
}
|
||||
|
||||
// Extract text content using text markers commonly found in PDFs
|
||||
let extractedText = ''
|
||||
|
||||
// Method 1: Extract text between BT (Begin Text) and ET (End Text) markers
|
||||
const textMatches = rawContent.match(/BT[\s\S]*?ET/g)
|
||||
if (textMatches && textMatches.length > 0) {
|
||||
logger.info('Found', textMatches.length, 'text blocks')
|
||||
|
||||
extractedText = textMatches
|
||||
.map((textBlock) => {
|
||||
// Extract text objects (Tj, TJ) from the text block
|
||||
const textObjects = textBlock.match(/(\([^)]*\)|\[[^\]]*\])\s*(Tj|TJ)/g)
|
||||
if (textObjects && textObjects.length > 0) {
|
||||
return textObjects
|
||||
.map((obj) => {
|
||||
// Clean up text objects
|
||||
let text = ''
|
||||
if (obj.includes('Tj')) {
|
||||
// Handle Tj operator (simple string)
|
||||
const match = obj.match(/\(([^)]*)\)\s*Tj/)
|
||||
if (match?.[1]) {
|
||||
text = match[1]
|
||||
}
|
||||
} else if (obj.includes('TJ')) {
|
||||
// Handle TJ operator (array of strings and positioning)
|
||||
const match = obj.match(/\[(.*)\]\s*TJ/)
|
||||
if (match?.[1]) {
|
||||
// Extract only the string parts from the array
|
||||
const parts = match[1].match(/\([^)]*\)/g)
|
||||
if (parts) {
|
||||
text = parts.map((p) => p.slice(1, -1)).join(' ')
|
||||
@@ -153,7 +124,6 @@ export class RawPdfParser implements FileParser {
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up PDF escape sequences
|
||||
return text
|
||||
.replace(/\\(\d{3})/g, (_, octal) =>
|
||||
String.fromCharCode(Number.parseInt(octal, 8))
|
||||
@@ -170,50 +140,42 @@ export class RawPdfParser implements FileParser {
|
||||
.trim()
|
||||
}
|
||||
|
||||
// Try to extract metadata from XML
|
||||
let metadataText = ''
|
||||
const xmlMatch = rawContent.match(/<x:xmpmeta[\s\S]*?<\/x:xmpmeta>/)
|
||||
if (xmlMatch) {
|
||||
const xmlContent = xmlMatch[0]
|
||||
logger.info('Found XML metadata')
|
||||
|
||||
// Extract document title
|
||||
const titleMatch = xmlContent.match(/<dc:title>[\s\S]*?<rdf:li[^>]*>(.*?)<\/rdf:li>/i)
|
||||
if (titleMatch?.[1]) {
|
||||
const title = titleMatch[1].replace(/<[^>]+>/g, '').trim()
|
||||
metadataText += `Document Title: ${title}\n\n`
|
||||
}
|
||||
|
||||
// Extract creator/author
|
||||
const creatorMatch = xmlContent.match(/<dc:creator>[\s\S]*?<rdf:li[^>]*>(.*?)<\/rdf:li>/i)
|
||||
if (creatorMatch?.[1]) {
|
||||
const creator = creatorMatch[1].replace(/<[^>]+>/g, '').trim()
|
||||
metadataText += `Author: ${creator}\n`
|
||||
}
|
||||
|
||||
// Extract creation date
|
||||
const dateMatch = xmlContent.match(/<xmp:CreateDate>(.*?)<\/xmp:CreateDate>/i)
|
||||
if (dateMatch?.[1]) {
|
||||
metadataText += `Created: ${dateMatch[1].trim()}\n`
|
||||
}
|
||||
|
||||
// Extract producer
|
||||
const producerMatch = xmlContent.match(/<pdf:Producer>(.*?)<\/pdf:Producer>/i)
|
||||
if (producerMatch?.[1]) {
|
||||
metadataText += `Producer: ${producerMatch[1].trim()}\n`
|
||||
}
|
||||
}
|
||||
|
||||
// Try to extract actual text content from content streams
|
||||
if (!extractedText || extractedText.length < 100 || extractedText.includes('/Type /Page')) {
|
||||
logger.info('Trying advanced text extraction from content streams')
|
||||
|
||||
// Find content stream references
|
||||
const contentRefs = rawContent.match(/\/Contents\s+\[?\s*(\d+)\s+\d+\s+R\s*\]?/g)
|
||||
if (contentRefs && contentRefs.length > 0) {
|
||||
logger.info('Found', contentRefs.length, 'content stream references')
|
||||
|
||||
// Extract object numbers from content references
|
||||
const objNumbers = contentRefs
|
||||
.map((ref) => {
|
||||
const match = ref.match(/\/Contents\s+\[?\s*(\d+)\s+\d+\s+R\s*\]?/)
|
||||
@@ -223,7 +185,6 @@ export class RawPdfParser implements FileParser {
|
||||
|
||||
logger.info('Content stream object numbers:', objNumbers)
|
||||
|
||||
// Try to find those objects in the content
|
||||
if (objNumbers.length > 0) {
|
||||
let textFromStreams = ''
|
||||
|
||||
@@ -232,12 +193,10 @@ export class RawPdfParser implements FileParser {
|
||||
const objMatch = rawContent.match(objRegex)
|
||||
|
||||
if (objMatch) {
|
||||
// Look for stream content within the object
|
||||
const streamMatch = objMatch[0].match(/stream\r?\n([\s\S]*?)\r?\nendstream/)
|
||||
if (streamMatch?.[1]) {
|
||||
const streamContent = streamMatch[1]
|
||||
|
||||
// Look for text operations in the stream (Tj, TJ, etc.)
|
||||
const textFragments = streamContent.match(/\([^)]+\)\s*Tj|\[[^\]]*\]\s*TJ/g)
|
||||
if (textFragments && textFragments.length > 0) {
|
||||
const extractedFragments = textFragments
|
||||
@@ -290,35 +249,27 @@ export class RawPdfParser implements FileParser {
|
||||
}
|
||||
}
|
||||
|
||||
// Try to decompress PDF streams
|
||||
// This is especially helpful for PDFs with compressed content
|
||||
if (!extractedText || extractedText.length < 100) {
|
||||
logger.info('Trying to decompress PDF streams')
|
||||
|
||||
// Find compressed streams (FlateDecode)
|
||||
const compressedStreams = rawContent.match(
|
||||
/\/Filter\s*\/FlateDecode[\s\S]*?stream[\s\S]*?endstream/g
|
||||
)
|
||||
if (compressedStreams && compressedStreams.length > 0) {
|
||||
logger.info('Found', compressedStreams.length, 'compressed streams')
|
||||
|
||||
// Process each stream
|
||||
const decompressedContents = await Promise.all(
|
||||
compressedStreams.map(async (stream) => {
|
||||
try {
|
||||
// Extract stream content between stream and endstream
|
||||
const streamMatch = stream.match(/stream\r?\n([\s\S]*?)\r?\nendstream/)
|
||||
if (!streamMatch || !streamMatch[1]) return ''
|
||||
|
||||
const compressedData = Buffer.from(streamMatch[1], 'binary')
|
||||
|
||||
// Try different decompression methods
|
||||
try {
|
||||
// Try inflate (most common)
|
||||
const decompressed = await inflateAsync(compressedData)
|
||||
const content = decompressed.toString('utf-8')
|
||||
|
||||
// Check if it contains readable text
|
||||
const readable = content.replace(/[^\x20-\x7E\r\n]/g, ' ').trim()
|
||||
if (
|
||||
readable.length > 50 &&
|
||||
@@ -329,12 +280,10 @@ export class RawPdfParser implements FileParser {
|
||||
return readable
|
||||
}
|
||||
} catch (_inflateErr) {
|
||||
// Try unzip as fallback
|
||||
try {
|
||||
const decompressed = await unzipAsync(compressedData)
|
||||
const content = decompressed.toString('utf-8')
|
||||
|
||||
// Check if it contains readable text
|
||||
const readable = content.replace(/[^\x20-\x7E\r\n]/g, ' ').trim()
|
||||
if (
|
||||
readable.length > 50 &&
|
||||
@@ -345,12 +294,10 @@ export class RawPdfParser implements FileParser {
|
||||
return readable
|
||||
}
|
||||
} catch (_unzipErr) {
|
||||
// Both methods failed, continue to next stream
|
||||
return ''
|
||||
}
|
||||
}
|
||||
} catch (_error) {
|
||||
// Error processing this stream, skip it
|
||||
return ''
|
||||
}
|
||||
|
||||
@@ -358,7 +305,6 @@ export class RawPdfParser implements FileParser {
|
||||
})
|
||||
)
|
||||
|
||||
// Filter out empty results and combine
|
||||
const decompressedText = decompressedContents
|
||||
.filter((text) => text && text.length > 0)
|
||||
.join('\n\n')
|
||||
@@ -370,26 +316,19 @@ export class RawPdfParser implements FileParser {
|
||||
}
|
||||
}
|
||||
|
||||
// Method 2: Look for text stream data
|
||||
if (!extractedText || extractedText.length < 50) {
|
||||
logger.info('Trying alternative text extraction method with streams')
|
||||
|
||||
// Find text streams
|
||||
const streamMatches = rawContent.match(/stream[\s\S]*?endstream/g)
|
||||
if (streamMatches && streamMatches.length > 0) {
|
||||
logger.info('Found', streamMatches.length, 'streams')
|
||||
|
||||
// Process each stream to look for text content
|
||||
const textContent = streamMatches
|
||||
.map((stream) => {
|
||||
// Remove 'stream' and 'endstream' markers
|
||||
const content = stream.replace(/^stream\r?\n|\r?\nendstream$/g, '')
|
||||
|
||||
// Look for readable ASCII text (more strict heuristic)
|
||||
// Only keep ASCII printable characters
|
||||
const readable = content.replace(/[^\x20-\x7E\r\n]/g, ' ').trim()
|
||||
|
||||
// Only keep content that looks like real text (has spaces, periods, etc.)
|
||||
if (
|
||||
readable.length > 20 &&
|
||||
readable.includes(' ') &&
|
||||
@@ -400,7 +339,7 @@ export class RawPdfParser implements FileParser {
|
||||
}
|
||||
return ''
|
||||
})
|
||||
.filter((text) => text.length > 0 && text.split(' ').length > 5) // Must have at least 5 words
|
||||
.filter((text) => text.length > 0 && text.split(' ').length > 5)
|
||||
.join('\n\n')
|
||||
|
||||
if (textContent.length > 0) {
|
||||
@@ -409,22 +348,17 @@ export class RawPdfParser implements FileParser {
|
||||
}
|
||||
}
|
||||
|
||||
// Method 3: Look for object streams
|
||||
if (!extractedText || extractedText.length < 50) {
|
||||
logger.info('Trying object streams for text')
|
||||
|
||||
// Find object stream content
|
||||
const objMatches = rawContent.match(/\d+\s+\d+\s+obj[\s\S]*?endobj/g)
|
||||
if (objMatches && objMatches.length > 0) {
|
||||
logger.info('Found', objMatches.length, 'objects')
|
||||
|
||||
// Process objects looking for text content
|
||||
const textContent = objMatches
|
||||
.map((obj) => {
|
||||
// Find readable text in the object - only keep ASCII printable characters
|
||||
const readable = obj.replace(/[^\x20-\x7E\r\n]/g, ' ').trim()
|
||||
|
||||
// Only include if it looks like actual text (strict heuristic)
|
||||
if (
|
||||
readable.length > 50 &&
|
||||
readable.includes(' ') &&
|
||||
@@ -445,8 +379,6 @@ export class RawPdfParser implements FileParser {
|
||||
}
|
||||
}
|
||||
|
||||
// If what we extracted is just PDF structure information rather than readable text,
|
||||
// provide a clearer message
|
||||
if (
|
||||
extractedText &&
|
||||
(extractedText.includes('endobj') ||
|
||||
@@ -459,53 +391,41 @@ export class RawPdfParser implements FileParser {
|
||||
)
|
||||
extractedText = metadataText
|
||||
} else if (metadataText && !extractedText.includes('Document Title:')) {
|
||||
// Prepend metadata to extracted text if available
|
||||
extractedText = metadataText + (extractedText ? `\n\n${extractedText}` : '')
|
||||
}
|
||||
|
||||
// Validate that the extracted text looks meaningful
|
||||
// Count how many recognizable words/characters it contains
|
||||
const validCharCount = (extractedText || '').replace(/[^\x20-\x7E\r\n]/g, '').length
|
||||
const totalCharCount = (extractedText || '').length
|
||||
const validRatio = validCharCount / (totalCharCount || 1)
|
||||
|
||||
// Check for common PDF artifacts that indicate binary corruption
|
||||
const hasBinaryArtifacts =
|
||||
extractedText &&
|
||||
(extractedText.includes('\\u') ||
|
||||
extractedText.includes('\\x') ||
|
||||
extractedText.includes('\0') ||
|
||||
/[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\xFF]{10,}/g.test(extractedText) ||
|
||||
validRatio < 0.7) // Less than 70% valid characters
|
||||
validRatio < 0.7)
|
||||
|
||||
// Check if the content looks like gibberish
|
||||
const looksLikeGibberish =
|
||||
extractedText &&
|
||||
// Too many special characters
|
||||
(extractedText.replace(/[a-zA-Z0-9\s.,:'"()[\]{}]/g, '').length / extractedText.length >
|
||||
0.3 ||
|
||||
// Not enough spaces (real text has spaces between words)
|
||||
extractedText.split(' ').length < extractedText.length / 20)
|
||||
|
||||
// If no text was extracted, or if it's binary/gibberish,
|
||||
// provide a helpful message instead
|
||||
if (!extractedText || extractedText.length < 50 || hasBinaryArtifacts || looksLikeGibberish) {
|
||||
logger.info('Could not extract meaningful text, providing fallback message')
|
||||
logger.info('Valid character ratio:', validRatio)
|
||||
logger.info('Has binary artifacts:', hasBinaryArtifacts)
|
||||
logger.info('Looks like gibberish:', looksLikeGibberish)
|
||||
|
||||
// Start with metadata if available
|
||||
if (metadataText) {
|
||||
extractedText = `${metadataText}\n`
|
||||
} else {
|
||||
extractedText = ''
|
||||
}
|
||||
|
||||
// Add basic PDF info
|
||||
extractedText += `This is a PDF document with ${pageCount} page(s) and version ${version}.\n\n`
|
||||
|
||||
// Try to find a title in the PDF structure that we might have missed
|
||||
const titleInStructure =
|
||||
rawContent.match(/title\s*:\s*([^\n]+)/i) ||
|
||||
rawContent.match(/Microsoft Word -\s*([^\n]+)/i)
|
||||
|
||||
@@ -8,15 +8,12 @@ const logger = createLogger('TxtParser')
|
||||
export class TxtParser implements FileParser {
|
||||
async parseFile(filePath: string): Promise<FileParseResult> {
|
||||
try {
|
||||
// Validate input
|
||||
if (!filePath) {
|
||||
throw new Error('No file path provided')
|
||||
}
|
||||
|
||||
// Read the file
|
||||
const buffer = await readFile(filePath)
|
||||
|
||||
// Use parseBuffer for consistent implementation
|
||||
return this.parseBuffer(buffer)
|
||||
} catch (error) {
|
||||
logger.error('TXT file error:', error)
|
||||
@@ -28,7 +25,6 @@ export class TxtParser implements FileParser {
|
||||
try {
|
||||
logger.info('Parsing buffer, size:', buffer.length)
|
||||
|
||||
// Extract content and sanitize for UTF-8 storage
|
||||
const rawContent = buffer.toString('utf-8')
|
||||
const result = sanitizeTextForUTF8(rawContent)
|
||||
|
||||
|
||||
@@ -8,4 +8,16 @@ export interface FileParser {
|
||||
parseBuffer?(buffer: Buffer): Promise<FileParseResult>
|
||||
}
|
||||
|
||||
export type SupportedFileType = 'pdf' | 'csv' | 'doc' | 'docx' | 'txt' | 'md' | 'xlsx' | 'xls'
|
||||
export type SupportedFileType =
|
||||
| 'pdf'
|
||||
| 'csv'
|
||||
| 'doc'
|
||||
| 'docx'
|
||||
| 'txt'
|
||||
| 'md'
|
||||
| 'xlsx'
|
||||
| 'xls'
|
||||
| 'html'
|
||||
| 'htm'
|
||||
| 'pptx'
|
||||
| 'ppt'
|
||||
|
||||
@@ -9,19 +9,16 @@ const logger = createLogger('XlsxParser')
|
||||
export class XlsxParser implements FileParser {
|
||||
async parseFile(filePath: string): Promise<FileParseResult> {
|
||||
try {
|
||||
// Validate input
|
||||
if (!filePath) {
|
||||
throw new Error('No file path provided')
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error(`File not found: ${filePath}`)
|
||||
}
|
||||
|
||||
logger.info(`Parsing XLSX file: ${filePath}`)
|
||||
|
||||
// Read the workbook
|
||||
const workbook = XLSX.readFile(filePath)
|
||||
return this.processWorkbook(workbook)
|
||||
} catch (error) {
|
||||
@@ -38,7 +35,6 @@ export class XlsxParser implements FileParser {
|
||||
throw new Error('Empty buffer provided')
|
||||
}
|
||||
|
||||
// Read the workbook from buffer
|
||||
const workbook = XLSX.read(buffer, { type: 'buffer' })
|
||||
return this.processWorkbook(workbook)
|
||||
} catch (error) {
|
||||
@@ -53,25 +49,20 @@ export class XlsxParser implements FileParser {
|
||||
let content = ''
|
||||
let totalRows = 0
|
||||
|
||||
// Process each worksheet
|
||||
for (const sheetName of sheetNames) {
|
||||
const worksheet = workbook.Sheets[sheetName]
|
||||
|
||||
// Convert to array of objects
|
||||
const sheetData = XLSX.utils.sheet_to_json(worksheet, { header: 1 })
|
||||
sheets[sheetName] = sheetData
|
||||
totalRows += sheetData.length
|
||||
|
||||
// Add sheet content to the overall content string (clean sheet name)
|
||||
const cleanSheetName = sanitizeTextForUTF8(sheetName)
|
||||
content += `Sheet: ${cleanSheetName}\n`
|
||||
content += `=${'='.repeat(cleanSheetName.length + 6)}\n\n`
|
||||
|
||||
if (sheetData.length > 0) {
|
||||
// Process each row
|
||||
sheetData.forEach((row: unknown, rowIndex: number) => {
|
||||
if (Array.isArray(row) && row.length > 0) {
|
||||
// Convert row to string, handling undefined/null values and cleaning non-UTF8 characters
|
||||
const rowString = row
|
||||
.map((cell) => {
|
||||
if (cell === null || cell === undefined) {
|
||||
@@ -93,7 +84,6 @@ export class XlsxParser implements FileParser {
|
||||
|
||||
logger.info(`XLSX parsing completed: ${sheetNames.length} sheets, ${totalRows} total rows`)
|
||||
|
||||
// Final cleanup of the entire content to ensure UTF-8 compatibility
|
||||
const cleanContent = sanitizeTextForUTF8(content).trim()
|
||||
|
||||
return {
|
||||
|
||||
24
apps/sim/lib/knowledge/consts.ts
Normal file
24
apps/sim/lib/knowledge/consts.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
export const TAG_SLOT_CONFIG = {
|
||||
text: {
|
||||
slots: ['tag1', 'tag2', 'tag3', 'tag4', 'tag5', 'tag6', 'tag7'] as const,
|
||||
maxSlots: 7,
|
||||
},
|
||||
} as const
|
||||
|
||||
export const SUPPORTED_FIELD_TYPES = Object.keys(TAG_SLOT_CONFIG) as Array<
|
||||
keyof typeof TAG_SLOT_CONFIG
|
||||
>
|
||||
|
||||
export const TAG_SLOTS = TAG_SLOT_CONFIG.text.slots
|
||||
|
||||
export const MAX_TAG_SLOTS = TAG_SLOT_CONFIG.text.maxSlots
|
||||
|
||||
export type TagSlot = (typeof TAG_SLOTS)[number]
|
||||
|
||||
export function getSlotsForFieldType(fieldType: string): readonly string[] {
|
||||
const config = TAG_SLOT_CONFIG[fieldType as keyof typeof TAG_SLOT_CONFIG]
|
||||
if (!config) {
|
||||
return []
|
||||
}
|
||||
return config.slots
|
||||
}
|
||||
@@ -1,9 +1,9 @@
|
||||
import crypto, { randomUUID } from 'crypto'
|
||||
import { tasks } from '@trigger.dev/sdk'
|
||||
import { and, asc, desc, eq, inArray, isNull, sql } from 'drizzle-orm'
|
||||
import { getSlotsForFieldType, type TAG_SLOT_CONFIG } from '@/lib/constants/knowledge'
|
||||
import { generateEmbeddings } from '@/lib/embeddings/utils'
|
||||
import { env } from '@/lib/env'
|
||||
import { getSlotsForFieldType, type TAG_SLOT_CONFIG } from '@/lib/knowledge/consts'
|
||||
import { processDocument } from '@/lib/knowledge/documents/document-processor'
|
||||
import { getNextAvailableSlot } from '@/lib/knowledge/tags/service'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
@@ -17,8 +17,8 @@ import type { DocumentSortField, SortOrder } from './types'
|
||||
const logger = createLogger('DocumentService')
|
||||
|
||||
const TIMEOUTS = {
|
||||
OVERALL_PROCESSING: env.KB_CONFIG_MAX_DURATION * 1000,
|
||||
EMBEDDINGS_API: env.KB_CONFIG_MAX_TIMEOUT * 18,
|
||||
OVERALL_PROCESSING: (env.KB_CONFIG_MAX_DURATION || 300) * 1000,
|
||||
EMBEDDINGS_API: (env.KB_CONFIG_MAX_TIMEOUT || 10000) * 18,
|
||||
} as const
|
||||
|
||||
/**
|
||||
@@ -38,17 +38,17 @@ function withTimeout<T>(
|
||||
}
|
||||
|
||||
const PROCESSING_CONFIG = {
|
||||
maxConcurrentDocuments: Math.max(1, Math.floor(env.KB_CONFIG_CONCURRENCY_LIMIT / 5)) || 4,
|
||||
batchSize: Math.max(1, Math.floor(env.KB_CONFIG_BATCH_SIZE / 2)) || 10,
|
||||
delayBetweenBatches: env.KB_CONFIG_DELAY_BETWEEN_BATCHES * 2,
|
||||
delayBetweenDocuments: env.KB_CONFIG_DELAY_BETWEEN_DOCUMENTS * 2,
|
||||
maxConcurrentDocuments: Math.max(1, Math.floor((env.KB_CONFIG_CONCURRENCY_LIMIT || 20) / 5)) || 4,
|
||||
batchSize: Math.max(1, Math.floor((env.KB_CONFIG_BATCH_SIZE || 20) / 2)) || 10,
|
||||
delayBetweenBatches: (env.KB_CONFIG_DELAY_BETWEEN_BATCHES || 100) * 2,
|
||||
delayBetweenDocuments: (env.KB_CONFIG_DELAY_BETWEEN_DOCUMENTS || 50) * 2,
|
||||
}
|
||||
|
||||
const REDIS_PROCESSING_CONFIG = {
|
||||
maxConcurrentDocuments: env.KB_CONFIG_CONCURRENCY_LIMIT,
|
||||
batchSize: env.KB_CONFIG_BATCH_SIZE,
|
||||
delayBetweenBatches: env.KB_CONFIG_DELAY_BETWEEN_BATCHES,
|
||||
delayBetweenDocuments: env.KB_CONFIG_DELAY_BETWEEN_DOCUMENTS,
|
||||
maxConcurrentDocuments: env.KB_CONFIG_CONCURRENCY_LIMIT || 20,
|
||||
batchSize: env.KB_CONFIG_BATCH_SIZE || 20,
|
||||
delayBetweenBatches: env.KB_CONFIG_DELAY_BETWEEN_BATCHES || 100,
|
||||
delayBetweenDocuments: env.KB_CONFIG_DELAY_BETWEEN_DOCUMENTS || 50,
|
||||
}
|
||||
|
||||
let documentQueue: DocumentProcessingQueue | null = null
|
||||
@@ -59,8 +59,8 @@ export function getDocumentQueue(): DocumentProcessingQueue {
|
||||
const config = redisClient ? REDIS_PROCESSING_CONFIG : PROCESSING_CONFIG
|
||||
documentQueue = new DocumentProcessingQueue({
|
||||
maxConcurrent: config.maxConcurrentDocuments,
|
||||
retryDelay: env.KB_CONFIG_MIN_TIMEOUT,
|
||||
maxRetries: env.KB_CONFIG_MAX_ATTEMPTS,
|
||||
retryDelay: env.KB_CONFIG_MIN_TIMEOUT || 1000,
|
||||
maxRetries: env.KB_CONFIG_MAX_ATTEMPTS || 3,
|
||||
})
|
||||
}
|
||||
return documentQueue
|
||||
|
||||
@@ -4,7 +4,7 @@ import {
|
||||
getSlotsForFieldType,
|
||||
SUPPORTED_FIELD_TYPES,
|
||||
type TAG_SLOT_CONFIG,
|
||||
} from '@/lib/constants/knowledge'
|
||||
} from '@/lib/knowledge/consts'
|
||||
import type { BulkTagDefinitionsData, DocumentTagDefinition } from '@/lib/knowledge/tags/types'
|
||||
import type {
|
||||
CreateTagDefinitionData,
|
||||
|
||||
@@ -36,11 +36,14 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
trigger: ExecutionTrigger
|
||||
environment: ExecutionEnvironment
|
||||
workflowState: WorkflowState
|
||||
initialInput?: Record<string, unknown>
|
||||
startedFromBlockId?: string
|
||||
executionType?: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
}): Promise<{
|
||||
workflowLog: WorkflowExecutionLog
|
||||
snapshot: WorkflowExecutionSnapshot
|
||||
}> {
|
||||
const { workflowId, executionId, trigger, environment, workflowState } = params
|
||||
const { workflowId, executionId, trigger, environment, workflowState, initialInput, startedFromBlockId, executionType } = params
|
||||
|
||||
logger.debug(`Starting workflow execution ${executionId} for workflow ${workflowId}`)
|
||||
|
||||
@@ -66,6 +69,9 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
executionData: {
|
||||
environment,
|
||||
trigger,
|
||||
initialInput: initialInput || {},
|
||||
startedFromBlockId: startedFromBlockId || undefined,
|
||||
executionType: executionType || trigger.type,
|
||||
},
|
||||
})
|
||||
.returning()
|
||||
@@ -137,6 +143,39 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
// Extract files from trace spans and final output
|
||||
const executionFiles = this.extractFilesFromExecution(traceSpans, finalOutput)
|
||||
|
||||
// Read the existing executionData so we can merge new fields without losing initialInput/environment/trigger
|
||||
const [existing] = await db
|
||||
.select({ executionData: workflowExecutionLogs.executionData })
|
||||
.from(workflowExecutionLogs)
|
||||
.where(eq(workflowExecutionLogs.executionId, executionId))
|
||||
.limit(1)
|
||||
|
||||
const existingExecutionData = (existing?.executionData as any) || {}
|
||||
|
||||
// Build simple block execution summaries from trace spans (flat list)
|
||||
const blockExecutions: any[] = []
|
||||
const collectBlocks = (spans?: any[]) => {
|
||||
if (!Array.isArray(spans)) return
|
||||
spans.forEach((span) => {
|
||||
if (span?.blockId) {
|
||||
blockExecutions.push({
|
||||
id: span.id,
|
||||
blockId: span.blockId,
|
||||
blockName: span.name,
|
||||
blockType: span.type,
|
||||
startedAt: span.startTime,
|
||||
endedAt: span.endTime,
|
||||
durationMs: span.duration,
|
||||
status: span.status || 'success',
|
||||
inputData: span.input || {},
|
||||
outputData: span.output || {},
|
||||
})
|
||||
}
|
||||
if (span?.children && Array.isArray(span.children)) collectBlocks(span.children)
|
||||
})
|
||||
}
|
||||
collectBlocks(traceSpans)
|
||||
|
||||
const [updatedLog] = await db
|
||||
.update(workflowExecutionLogs)
|
||||
.set({
|
||||
@@ -145,8 +184,10 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
totalDurationMs,
|
||||
files: executionFiles.length > 0 ? executionFiles : null,
|
||||
executionData: {
|
||||
...existingExecutionData,
|
||||
traceSpans,
|
||||
finalOutput,
|
||||
blockExecutions: blockExecutions,
|
||||
tokenBreakdown: {
|
||||
prompt: costSummary.totalPromptTokens,
|
||||
completion: costSummary.totalCompletionTokens,
|
||||
|
||||
@@ -21,6 +21,9 @@ export interface SessionStartParams {
|
||||
workspaceId?: string
|
||||
variables?: Record<string, string>
|
||||
triggerData?: Record<string, unknown>
|
||||
initialInput?: Record<string, unknown>
|
||||
startBlockId?: string
|
||||
executionType?: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
}
|
||||
|
||||
export interface SessionCompleteParams {
|
||||
@@ -61,7 +64,7 @@ export class LoggingSession {
|
||||
}
|
||||
|
||||
async start(params: SessionStartParams = {}): Promise<void> {
|
||||
const { userId, workspaceId, variables, triggerData } = params
|
||||
const { userId, workspaceId, variables, triggerData, initialInput, startBlockId, executionType } = params
|
||||
|
||||
try {
|
||||
this.trigger = createTriggerObject(this.triggerType, triggerData)
|
||||
@@ -80,6 +83,9 @@ export class LoggingSession {
|
||||
trigger: this.trigger,
|
||||
environment: this.environment,
|
||||
workflowState: this.workflowState,
|
||||
initialInput,
|
||||
startedFromBlockId: startBlockId,
|
||||
executionType: executionType || this.triggerType,
|
||||
})
|
||||
|
||||
if (this.requestId) {
|
||||
|
||||
@@ -11,6 +11,7 @@ import type {
|
||||
} from '@/lib/logs/types'
|
||||
import { db } from '@/db'
|
||||
import { workflowExecutionSnapshots } from '@/db/schema'
|
||||
import { filterEdgesForTriggers } from '@/lib/workflows/trigger-rules'
|
||||
|
||||
const logger = createLogger('SnapshotService')
|
||||
|
||||
@@ -27,8 +28,27 @@ export class SnapshotService implements ISnapshotService {
|
||||
workflowId: string,
|
||||
state: WorkflowState
|
||||
): Promise<SnapshotCreationResult> {
|
||||
// Ensure consistency: apply the same trigger-edge filtering used by the editor/execution
|
||||
const filteredState = filterEdgesForTriggers(state)
|
||||
|
||||
// Hash the position-less state for deduplication (functional equivalence)
|
||||
const stateHash = this.computeStateHash(state)
|
||||
const stateHash = this.computeStateHash(filteredState)
|
||||
|
||||
// Log a concise preview of the state being considered for snapshot
|
||||
try {
|
||||
logger.info('📸 Preparing workflow snapshot', {
|
||||
workflowId,
|
||||
stateHash,
|
||||
blocks: Object.entries(filteredState.blocks || {}).map(([id, b]: [string, any]) => ({
|
||||
id,
|
||||
type: (b as any)?.type,
|
||||
name: (b as any)?.name,
|
||||
triggerMode: (b as any)?.triggerMode === true,
|
||||
enabled: (b as any)?.enabled !== false,
|
||||
})),
|
||||
edgesCount: (filteredState.edges || []).length,
|
||||
})
|
||||
} catch {}
|
||||
|
||||
const existingSnapshot = await this.getSnapshotByHash(workflowId, stateHash)
|
||||
if (existingSnapshot) {
|
||||
@@ -45,7 +65,7 @@ export class SnapshotService implements ISnapshotService {
|
||||
id: uuidv4(),
|
||||
workflowId,
|
||||
stateHash,
|
||||
stateData: state, // Full state with positions, subblock values, etc.
|
||||
stateData: filteredState, // Full state with positions, subblock values, etc., after consistent filtering
|
||||
}
|
||||
|
||||
const [newSnapshot] = await db
|
||||
@@ -53,8 +73,24 @@ export class SnapshotService implements ISnapshotService {
|
||||
.values(snapshotData)
|
||||
.returning()
|
||||
|
||||
logger.info('✅ Saved workflow snapshot', {
|
||||
workflowId,
|
||||
snapshotId: newSnapshot.id,
|
||||
stateHash,
|
||||
blocksCount: Object.keys(filteredState.blocks || {}).length,
|
||||
edgesCount: (filteredState.edges || []).length,
|
||||
})
|
||||
|
||||
// Emit the exact state saved (debug level to avoid log noise); redact sensitive values if needed
|
||||
try {
|
||||
// Lazy import to avoid cycles
|
||||
const utils = await import('@/lib/utils')
|
||||
const redactedState = utils.redactApiKeys(newSnapshot.stateData as any)
|
||||
logger.debug('🧩 Snapshot state data (exact):', redactedState)
|
||||
} catch {}
|
||||
|
||||
logger.debug(`Created new snapshot for workflow ${workflowId} with hash ${stateHash}`)
|
||||
logger.debug(`Stored full state with ${Object.keys(state.blocks || {}).length} blocks`)
|
||||
logger.debug(`Stored full state with ${Object.keys(filteredState.blocks || {}).length} blocks`)
|
||||
return {
|
||||
snapshot: {
|
||||
...newSnapshot,
|
||||
|
||||
@@ -109,6 +109,13 @@ export interface WorkflowExecutionLog {
|
||||
error: string
|
||||
stackTrace?: string
|
||||
}
|
||||
// Newly added: persist the original triggering input (starter/chat/api/webhook)
|
||||
initialInput?: Record<string, unknown>
|
||||
// Newly added: where execution began and type metadata
|
||||
startedFromBlockId?: string
|
||||
executionType?: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
// Optional precomputed block execution summaries
|
||||
blockExecutions?: any[]
|
||||
}
|
||||
// Top-level cost information
|
||||
cost?: {
|
||||
|
||||
@@ -4,7 +4,6 @@ import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
|
||||
const logger = createLogger('SimAgentClient')
|
||||
|
||||
// Base URL for the sim-agent service
|
||||
const SIM_AGENT_BASE_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
export interface SimAgentRequest {
|
||||
@@ -45,7 +44,6 @@ class SimAgentClient {
|
||||
try {
|
||||
const url = `${this.baseUrl}${endpoint}`
|
||||
|
||||
// Use provided API key or try to get it from environment
|
||||
const requestHeaders: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
...headers,
|
||||
|
||||
43
apps/sim/lib/workflows/trigger-rules.ts
Normal file
43
apps/sim/lib/workflows/trigger-rules.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
/**
|
||||
* Decide whether incoming edges should be blocked for a target block.
|
||||
* - Block if the block is a pure trigger category (webhook, etc.)
|
||||
* - Block if the block is currently in triggerMode
|
||||
* - Block if the block is the starter block
|
||||
*/
|
||||
export function shouldBlockIncomingEdgesForTarget(blockType: string, triggerMode: boolean | undefined): boolean {
|
||||
// Starter blocks should never have incoming edges
|
||||
if (blockType === 'starter') return true
|
||||
|
||||
// Runtime toggle
|
||||
if (triggerMode === true) return true
|
||||
|
||||
// Pure trigger categories
|
||||
try {
|
||||
const config = getBlock(blockType)
|
||||
if (config?.category === 'triggers') return true
|
||||
} catch {}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a copy of state with edges to trigger-like targets removed.
|
||||
*/
|
||||
export function filterEdgesForTriggers(state: WorkflowState): WorkflowState {
|
||||
const blocks = state.blocks || {}
|
||||
const edges = state.edges || []
|
||||
|
||||
const filteredEdges = edges.filter((edge) => {
|
||||
const target = blocks[edge.target]
|
||||
if (!target) return false // Drop dangling edges defensively
|
||||
return !shouldBlockIncomingEdgesForTarget(target.type, target.triggerMode)
|
||||
})
|
||||
|
||||
return {
|
||||
...state,
|
||||
edges: filteredEdges,
|
||||
}
|
||||
}
|
||||
@@ -75,12 +75,12 @@
|
||||
"ai": "^4.3.2",
|
||||
"better-auth": "^1.2.9",
|
||||
"browser-image-compression": "^2.0.2",
|
||||
"cheerio": "1.1.2",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.0.0",
|
||||
"croner": "^9.0.0",
|
||||
"csv-parse": "^5.6.0",
|
||||
"csv-parser": "^3.2.0",
|
||||
"dat.gui": "0.7.9",
|
||||
"date-fns": "4.1.0",
|
||||
"drizzle-orm": "^0.41.0",
|
||||
@@ -89,6 +89,7 @@
|
||||
"geist": "1.4.2",
|
||||
"groq-sdk": "^0.15.0",
|
||||
"html-to-text": "^9.0.5",
|
||||
"iconv-lite": "0.7.0",
|
||||
"input-otp": "^1.4.2",
|
||||
"ioredis": "^5.6.0",
|
||||
"jose": "6.0.11",
|
||||
@@ -98,11 +99,13 @@
|
||||
"lucide-react": "^0.479.0",
|
||||
"mammoth": "^1.9.0",
|
||||
"mysql2": "3.14.3",
|
||||
"next": "^15.3.2",
|
||||
"next": "^15.4.1",
|
||||
"next-runtime-env": "3.3.0",
|
||||
"next-themes": "^0.4.6",
|
||||
"officeparser": "^5.2.0",
|
||||
"openai": "^4.91.1",
|
||||
"pdf-parse": "^1.1.1",
|
||||
"papaparse": "5.5.3",
|
||||
"pdf-lib": "^1.17.1",
|
||||
"postgres": "^3.4.5",
|
||||
"prismjs": "^1.30.0",
|
||||
"react": "19.1.0",
|
||||
@@ -117,13 +120,14 @@
|
||||
"rehype-highlight": "7.0.2",
|
||||
"remark-gfm": "4.0.1",
|
||||
"resend": "^4.1.2",
|
||||
"rtf-parser": "1.3.3",
|
||||
"rtf-stream-parser": "3.8.0",
|
||||
"socket.io": "^4.8.1",
|
||||
"stripe": "^17.7.0",
|
||||
"tailwind-merge": "^2.6.0",
|
||||
"tailwindcss-animate": "^1.0.7",
|
||||
"three": "0.177.0",
|
||||
"uuid": "^11.1.0",
|
||||
"word-extractor": "1.0.4",
|
||||
"xlsx": "0.18.5",
|
||||
"zod": "^3.24.2"
|
||||
},
|
||||
@@ -134,10 +138,12 @@
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@trigger.dev/build": "4.0.1",
|
||||
"@types/html-to-text": "^9.0.4",
|
||||
"@types/iconv-lite": "0.0.1",
|
||||
"@types/js-yaml": "4.0.9",
|
||||
"@types/jsdom": "21.1.7",
|
||||
"@types/lodash": "^4.17.16",
|
||||
"@types/node": "24.2.1",
|
||||
"@types/papaparse": "5.3.16",
|
||||
"@types/prismjs": "^1.26.5",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
@@ -160,5 +166,9 @@
|
||||
"canvas",
|
||||
"better-sqlite3",
|
||||
"sharp"
|
||||
]
|
||||
],
|
||||
"overrides": {
|
||||
"next": "^15.4.1",
|
||||
"@next/env": "^15.4.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import * as schema from '@/db/schema'
|
||||
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@/db/schema'
|
||||
import { shouldBlockIncomingEdgesForTarget } from '@/lib/workflows/trigger-rules'
|
||||
|
||||
const logger = createLogger('SocketDatabase')
|
||||
|
||||
@@ -597,7 +598,21 @@ async function handleBlockOperationTx(
|
||||
throw new Error(`Block ${payload.id} not found in workflow ${workflowId}`)
|
||||
}
|
||||
|
||||
logger.debug(`Updated block trigger mode: ${payload.id} -> ${payload.triggerMode}`)
|
||||
// When enabling trigger mode, remove all incoming edges to this block at the database level
|
||||
if (payload.triggerMode === true) {
|
||||
const removed = await tx
|
||||
.delete(workflowEdges)
|
||||
.where(
|
||||
and(eq(workflowEdges.workflowId, workflowId), eq(workflowEdges.targetBlockId, payload.id))
|
||||
)
|
||||
.returning({ id: workflowEdges.id })
|
||||
|
||||
logger.debug(
|
||||
`Updated block trigger mode: ${payload.id} -> ${payload.triggerMode}. Removed ${removed.length} incoming edges for trigger mode.`
|
||||
)
|
||||
} else {
|
||||
logger.debug(`Updated block trigger mode: ${payload.id} -> ${payload.triggerMode}`)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
@@ -743,6 +758,24 @@ async function handleEdgeOperationTx(
|
||||
throw new Error('Missing required fields for add edge operation')
|
||||
}
|
||||
|
||||
// Guard: do not allow incoming edges to trigger-like targets
|
||||
const [targetBlock] = await tx
|
||||
.select({ id: workflowBlocks.id, type: workflowBlocks.type, triggerMode: workflowBlocks.triggerMode })
|
||||
.from(workflowBlocks)
|
||||
.where(and(eq(workflowBlocks.workflowId, workflowId), eq(workflowBlocks.id, payload.target)))
|
||||
.limit(1)
|
||||
|
||||
if (!targetBlock) {
|
||||
throw new Error(`Target block ${payload.target} not found in workflow ${workflowId}`)
|
||||
}
|
||||
|
||||
if (shouldBlockIncomingEdgesForTarget(targetBlock.type as string, targetBlock.triggerMode as boolean)) {
|
||||
logger.debug(
|
||||
`Rejected edge add ${payload.id}: incoming edges not allowed to ${payload.target} (type=${targetBlock.type}, triggerMode=${Boolean(targetBlock.triggerMode)})`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
await tx.insert(workflowEdges).values({
|
||||
id: payload.id,
|
||||
workflowId,
|
||||
|
||||
24
apps/sim/stores/execution/debug-canvas/store.ts
Normal file
24
apps/sim/stores/execution/debug-canvas/store.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { create } from 'zustand'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
interface DebugCanvasState {
|
||||
isActive: boolean
|
||||
workflowState: WorkflowState | null
|
||||
}
|
||||
|
||||
interface DebugCanvasActions {
|
||||
activate: (workflowState: WorkflowState) => void
|
||||
deactivate: () => void
|
||||
setWorkflowState: (workflowState: WorkflowState | null) => void
|
||||
clear: () => void
|
||||
}
|
||||
|
||||
export const useDebugCanvasStore = create<DebugCanvasState & DebugCanvasActions>()((set) => ({
|
||||
isActive: false,
|
||||
workflowState: null,
|
||||
|
||||
activate: (workflowState) => set({ isActive: true, workflowState }),
|
||||
deactivate: () => set({ isActive: false, workflowState: null }),
|
||||
setWorkflowState: (workflowState) => set({ workflowState }),
|
||||
clear: () => set({ isActive: false, workflowState: null }),
|
||||
}))
|
||||
95
apps/sim/stores/execution/debug-snapshots/store.ts
Normal file
95
apps/sim/stores/execution/debug-snapshots/store.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import { create } from 'zustand'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
|
||||
interface BlockSnapshot {
|
||||
output: any
|
||||
executed: boolean
|
||||
executionTime?: number
|
||||
}
|
||||
|
||||
interface SnapshotEntry {
|
||||
blockSnapshots: Map<string, BlockSnapshot>
|
||||
envVarValues?: Record<string, string>
|
||||
workflowVariables?: Record<string, any>
|
||||
pendingBlocks: string[]
|
||||
createdAt: number
|
||||
}
|
||||
|
||||
interface DebugSnapshotState {
|
||||
blockSnapshots: Map<string, BlockSnapshot>
|
||||
envVarValues?: Record<string, string>
|
||||
workflowVariables?: Record<string, any>
|
||||
history: SnapshotEntry[]
|
||||
}
|
||||
|
||||
interface DebugSnapshotActions {
|
||||
captureFromContext: (ctx: ExecutionContext) => void
|
||||
pushFromContext: (ctx: ExecutionContext, pendingBlocks: string[]) => void
|
||||
stepBack: () => SnapshotEntry | null
|
||||
clear: () => void
|
||||
}
|
||||
|
||||
function buildBlockSnapshots(ctx: ExecutionContext): Map<string, BlockSnapshot> {
|
||||
const next = new Map<string, BlockSnapshot>()
|
||||
try {
|
||||
ctx.blockStates.forEach((state, key) => {
|
||||
next.set(String(key), {
|
||||
output: state?.output ?? {},
|
||||
executed: !!state?.executed,
|
||||
executionTime: state?.executionTime,
|
||||
})
|
||||
})
|
||||
} catch {}
|
||||
return next
|
||||
}
|
||||
|
||||
export const useDebugSnapshotStore = create<DebugSnapshotState & DebugSnapshotActions>()(
|
||||
(set, get) => ({
|
||||
blockSnapshots: new Map<string, BlockSnapshot>(),
|
||||
envVarValues: undefined,
|
||||
workflowVariables: undefined,
|
||||
history: [],
|
||||
|
||||
captureFromContext: (ctx: ExecutionContext) => {
|
||||
const next = buildBlockSnapshots(ctx)
|
||||
set({
|
||||
blockSnapshots: next,
|
||||
envVarValues: ctx.environmentVariables || undefined,
|
||||
workflowVariables: ctx.workflowVariables || undefined,
|
||||
})
|
||||
},
|
||||
|
||||
pushFromContext: (ctx: ExecutionContext, pendingBlocks: string[]) => {
|
||||
const entry: SnapshotEntry = {
|
||||
blockSnapshots: buildBlockSnapshots(ctx),
|
||||
envVarValues: ctx.environmentVariables || undefined,
|
||||
workflowVariables: ctx.workflowVariables || undefined,
|
||||
pendingBlocks: [...pendingBlocks],
|
||||
createdAt: Date.now(),
|
||||
}
|
||||
set((state) => ({ history: [...state.history, entry] }))
|
||||
},
|
||||
|
||||
stepBack: () => {
|
||||
const { history } = get()
|
||||
if (history.length <= 1) return null
|
||||
const nextHistory = history.slice(0, -1)
|
||||
const prev = nextHistory[nextHistory.length - 1]
|
||||
set({
|
||||
history: nextHistory,
|
||||
blockSnapshots: prev.blockSnapshots,
|
||||
envVarValues: prev.envVarValues,
|
||||
workflowVariables: prev.workflowVariables,
|
||||
})
|
||||
return prev
|
||||
},
|
||||
|
||||
clear: () =>
|
||||
set({
|
||||
blockSnapshots: new Map(),
|
||||
envVarValues: undefined,
|
||||
workflowVariables: undefined,
|
||||
history: [],
|
||||
}),
|
||||
})
|
||||
)
|
||||
@@ -61,5 +61,20 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
|
||||
setExecutor: (executor) => set({ executor }),
|
||||
setDebugContext: (debugContext) => set({ debugContext }),
|
||||
setAutoPanDisabled: (disabled) => set({ autoPanDisabled: disabled }),
|
||||
setPanelFocusedBlockId: (id) => set({ panelFocusedBlockId: id }),
|
||||
setExecutingBlockIds: (ids) => set({ executingBlockIds: new Set(ids) }),
|
||||
setBreakpointId: (id) => set({ breakpointId: id }),
|
||||
|
||||
setStartPositions: (ids) => set({ startPositionIds: new Set(Array.from(ids).slice(0, 1)) }),
|
||||
toggleStartPosition: (id) => {
|
||||
set((state) => {
|
||||
const isActive = state.startPositionIds.has(id)
|
||||
// Enforce single selection
|
||||
const next = isActive ? new Set<string>() : new Set<string>([id])
|
||||
return { startPositionIds: next }
|
||||
})
|
||||
},
|
||||
clearStartPositions: () => set({ startPositionIds: new Set() }),
|
||||
|
||||
reset: () => set(initialState),
|
||||
}))
|
||||
|
||||
@@ -9,6 +9,10 @@ export interface ExecutionState {
|
||||
executor: Executor | null
|
||||
debugContext: ExecutionContext | null
|
||||
autoPanDisabled: boolean
|
||||
panelFocusedBlockId?: string | null
|
||||
executingBlockIds: Set<string>
|
||||
breakpointId: string | null
|
||||
startPositionIds: Set<string>
|
||||
}
|
||||
|
||||
export interface ExecutionActions {
|
||||
@@ -19,6 +23,12 @@ export interface ExecutionActions {
|
||||
setExecutor: (executor: Executor | null) => void
|
||||
setDebugContext: (context: ExecutionContext | null) => void
|
||||
setAutoPanDisabled: (disabled: boolean) => void
|
||||
setPanelFocusedBlockId: (id: string | null) => void
|
||||
setExecutingBlockIds: (ids: Set<string>) => void
|
||||
setBreakpointId: (id: string | null) => void
|
||||
setStartPositions: (ids: Set<string>) => void
|
||||
toggleStartPosition: (id: string) => void
|
||||
clearStartPositions: () => void
|
||||
reset: () => void
|
||||
}
|
||||
|
||||
@@ -30,6 +40,10 @@ export const initialState: ExecutionState = {
|
||||
executor: null,
|
||||
debugContext: null,
|
||||
autoPanDisabled: false,
|
||||
panelFocusedBlockId: null,
|
||||
executingBlockIds: new Set(),
|
||||
breakpointId: null,
|
||||
startPositionIds: new Set(),
|
||||
}
|
||||
|
||||
// Types for panning functionality
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export type PanelTab = 'console' | 'variables' | 'chat' | 'copilot'
|
||||
export type PanelTab = 'console' | 'variables' | 'chat' | 'copilot' | 'debug'
|
||||
|
||||
export interface PanelStore {
|
||||
isOpen: boolean
|
||||
|
||||
218
bun.lock
218
bun.lock
@@ -14,9 +14,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.0.0-beta.5",
|
||||
"@next/env": "^15.3.2",
|
||||
"@types/word-extractor": "1.0.6",
|
||||
"dotenv-cli": "^8.0.0",
|
||||
"@next/env": "^15.4.1",
|
||||
"husky": "9.1.7",
|
||||
"lint-staged": "16.0.0",
|
||||
"turbo": "2.5.6",
|
||||
@@ -105,12 +103,12 @@
|
||||
"ai": "^4.3.2",
|
||||
"better-auth": "^1.2.9",
|
||||
"browser-image-compression": "^2.0.2",
|
||||
"cheerio": "1.1.2",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.0.0",
|
||||
"croner": "^9.0.0",
|
||||
"csv-parse": "^5.6.0",
|
||||
"csv-parser": "^3.2.0",
|
||||
"dat.gui": "0.7.9",
|
||||
"date-fns": "4.1.0",
|
||||
"drizzle-orm": "^0.41.0",
|
||||
@@ -119,6 +117,7 @@
|
||||
"geist": "1.4.2",
|
||||
"groq-sdk": "^0.15.0",
|
||||
"html-to-text": "^9.0.5",
|
||||
"iconv-lite": "0.7.0",
|
||||
"input-otp": "^1.4.2",
|
||||
"ioredis": "^5.6.0",
|
||||
"jose": "6.0.11",
|
||||
@@ -128,11 +127,13 @@
|
||||
"lucide-react": "^0.479.0",
|
||||
"mammoth": "^1.9.0",
|
||||
"mysql2": "3.14.3",
|
||||
"next": "^15.3.2",
|
||||
"next": "^15.4.1",
|
||||
"next-runtime-env": "3.3.0",
|
||||
"next-themes": "^0.4.6",
|
||||
"officeparser": "^5.2.0",
|
||||
"openai": "^4.91.1",
|
||||
"pdf-parse": "^1.1.1",
|
||||
"papaparse": "5.5.3",
|
||||
"pdf-lib": "^1.17.1",
|
||||
"postgres": "^3.4.5",
|
||||
"prismjs": "^1.30.0",
|
||||
"react": "19.1.0",
|
||||
@@ -147,13 +148,14 @@
|
||||
"rehype-highlight": "7.0.2",
|
||||
"remark-gfm": "4.0.1",
|
||||
"resend": "^4.1.2",
|
||||
"rtf-parser": "1.3.3",
|
||||
"rtf-stream-parser": "3.8.0",
|
||||
"socket.io": "^4.8.1",
|
||||
"stripe": "^17.7.0",
|
||||
"tailwind-merge": "^2.6.0",
|
||||
"tailwindcss-animate": "^1.0.7",
|
||||
"three": "0.177.0",
|
||||
"uuid": "^11.1.0",
|
||||
"word-extractor": "1.0.4",
|
||||
"xlsx": "0.18.5",
|
||||
"zod": "^3.24.2",
|
||||
},
|
||||
@@ -164,10 +166,12 @@
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
"@trigger.dev/build": "4.0.1",
|
||||
"@types/html-to-text": "^9.0.4",
|
||||
"@types/iconv-lite": "0.0.1",
|
||||
"@types/js-yaml": "4.0.9",
|
||||
"@types/jsdom": "21.1.7",
|
||||
"@types/lodash": "^4.17.16",
|
||||
"@types/node": "24.2.1",
|
||||
"@types/papaparse": "5.3.16",
|
||||
"@types/prismjs": "^1.26.5",
|
||||
"@types/react": "^19",
|
||||
"@types/react-dom": "^19",
|
||||
@@ -224,6 +228,8 @@
|
||||
"sharp",
|
||||
],
|
||||
"overrides": {
|
||||
"@next/env": "^15.4.1",
|
||||
"next": "^15.4.1",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
},
|
||||
@@ -654,25 +660,45 @@
|
||||
|
||||
"@mdx-js/mdx": ["@mdx-js/mdx@3.1.1", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdx": "^2.0.0", "acorn": "^8.0.0", "collapse-white-space": "^2.0.0", "devlop": "^1.0.0", "estree-util-is-identifier-name": "^3.0.0", "estree-util-scope": "^1.0.0", "estree-walker": "^3.0.0", "hast-util-to-jsx-runtime": "^2.0.0", "markdown-extensions": "^2.0.0", "recma-build-jsx": "^1.0.0", "recma-jsx": "^1.0.0", "recma-stringify": "^1.0.0", "rehype-recma": "^1.0.0", "remark-mdx": "^3.0.0", "remark-parse": "^11.0.0", "remark-rehype": "^11.0.0", "source-map": "^0.7.0", "unified": "^11.0.0", "unist-util-position-from-estree": "^2.0.0", "unist-util-stringify-position": "^4.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" } }, "sha512-f6ZO2ifpwAQIpzGWaBQT2TXxPv6z3RBzQKpVftEWN78Vl/YweF1uwussDx8ECAXVtr3Rs89fKyG9YlzUs9DyGQ=="],
|
||||
|
||||
"@next/env": ["@next/env@15.5.2", "", {}, "sha512-Qe06ew4zt12LeO6N7j8/nULSOe3fMXE4dM6xgpBQNvdzyK1sv5y4oAP3bq4LamrvGCZtmRYnW8URFCeX5nFgGg=="],
|
||||
"@napi-rs/canvas": ["@napi-rs/canvas@0.1.78", "", { "optionalDependencies": { "@napi-rs/canvas-android-arm64": "0.1.78", "@napi-rs/canvas-darwin-arm64": "0.1.78", "@napi-rs/canvas-darwin-x64": "0.1.78", "@napi-rs/canvas-linux-arm-gnueabihf": "0.1.78", "@napi-rs/canvas-linux-arm64-gnu": "0.1.78", "@napi-rs/canvas-linux-arm64-musl": "0.1.78", "@napi-rs/canvas-linux-riscv64-gnu": "0.1.78", "@napi-rs/canvas-linux-x64-gnu": "0.1.78", "@napi-rs/canvas-linux-x64-musl": "0.1.78", "@napi-rs/canvas-win32-x64-msvc": "0.1.78" } }, "sha512-YaBHJvT+T1DoP16puvWM6w46Lq3VhwKIJ8th5m1iEJyGh7mibk5dT7flBvMQ1EH1LYmMzXJ+OUhu+8wQ9I6u7g=="],
|
||||
|
||||
"@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.5.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-8bGt577BXGSd4iqFygmzIfTYizHb0LGWqH+qgIF/2EDxS5JsSdERJKA8WgwDyNBZgTIIA4D8qUtoQHmxIIquoQ=="],
|
||||
"@napi-rs/canvas-android-arm64": ["@napi-rs/canvas-android-arm64@0.1.78", "", { "os": "android", "cpu": "arm64" }, "sha512-N1ikxztjrRmh8xxlG5kYm1RuNr8ZW1EINEDQsLhhuy7t0pWI/e7SH91uFVLZKCMDyjel1tyWV93b5fdCAi7ggw=="],
|
||||
|
||||
"@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.5.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-2DjnmR6JHK4X+dgTXt5/sOCu/7yPtqpYt8s8hLkHFK3MGkka2snTv3yRMdHvuRtJVkPwCGsvBSwmoQCHatauFQ=="],
|
||||
"@napi-rs/canvas-darwin-arm64": ["@napi-rs/canvas-darwin-arm64@0.1.78", "", { "os": "darwin", "cpu": "arm64" }, "sha512-FA3aCU3G5yGc74BSmnLJTObnZRV+HW+JBTrsU+0WVVaNyVKlb5nMvYAQuieQlRVemsAA2ek2c6nYtHh6u6bwFw=="],
|
||||
|
||||
"@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.5.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-3j7SWDBS2Wov/L9q0mFJtEvQ5miIqfO4l7d2m9Mo06ddsgUK8gWfHGgbjdFlCp2Ek7MmMQZSxpGFqcC8zGh2AA=="],
|
||||
"@napi-rs/canvas-darwin-x64": ["@napi-rs/canvas-darwin-x64@0.1.78", "", { "os": "darwin", "cpu": "x64" }, "sha512-xVij69o9t/frixCDEoyWoVDKgE3ksLGdmE2nvBWVGmoLu94MWUlv2y4Qzf5oozBmydG5Dcm4pRHFBM7YWa1i6g=="],
|
||||
|
||||
"@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.5.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-s6N8k8dF9YGc5T01UPQ08yxsK6fUow5gG1/axWc1HVVBYQBgOjca4oUZF7s4p+kwhkB1bDSGR8QznWrFZ/Rt5g=="],
|
||||
"@napi-rs/canvas-linux-arm-gnueabihf": ["@napi-rs/canvas-linux-arm-gnueabihf@0.1.78", "", { "os": "linux", "cpu": "arm" }, "sha512-aSEXrLcIpBtXpOSnLhTg4jPsjJEnK7Je9KqUdAWjc7T8O4iYlxWxrXFIF8rV8J79h5jNdScgZpAUWYnEcutR3g=="],
|
||||
|
||||
"@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.5.2", "", { "os": "linux", "cpu": "x64" }, "sha512-o1RV/KOODQh6dM6ZRJGZbc+MOAHww33Vbs5JC9Mp1gDk8cpEO+cYC/l7rweiEalkSm5/1WGa4zY7xrNwObN4+Q=="],
|
||||
"@napi-rs/canvas-linux-arm64-gnu": ["@napi-rs/canvas-linux-arm64-gnu@0.1.78", "", { "os": "linux", "cpu": "arm64" }, "sha512-dlEPRX1hLGKaY3UtGa1dtkA1uGgFITn2mDnfI6YsLlYyLJQNqHx87D1YTACI4zFCUuLr/EzQDzuX+vnp9YveVg=="],
|
||||
|
||||
"@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.5.2", "", { "os": "linux", "cpu": "x64" }, "sha512-/VUnh7w8RElYZ0IV83nUcP/J4KJ6LLYliiBIri3p3aW2giF+PAVgZb6mk8jbQSB3WlTai8gEmCAr7kptFa1H6g=="],
|
||||
"@napi-rs/canvas-linux-arm64-musl": ["@napi-rs/canvas-linux-arm64-musl@0.1.78", "", { "os": "linux", "cpu": "arm64" }, "sha512-TsCfjOPZtm5Q/NO1EZHR5pwDPSPjPEttvnv44GL32Zn1uvudssjTLbvaG1jHq81Qxm16GTXEiYLmx4jOLZQYlg=="],
|
||||
|
||||
"@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.5.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-sMPyTvRcNKXseNQ/7qRfVRLa0VhR0esmQ29DD6pqvG71+JdVnESJaHPA8t7bc67KD5spP3+DOCNLhqlEI2ZgQg=="],
|
||||
"@napi-rs/canvas-linux-riscv64-gnu": ["@napi-rs/canvas-linux-riscv64-gnu@0.1.78", "", { "os": "linux", "cpu": "none" }, "sha512-+cpTTb0GDshEow/5Fy8TpNyzaPsYb3clQIjgWRmzRcuteLU+CHEU/vpYvAcSo7JxHYPJd8fjSr+qqh+nI5AtmA=="],
|
||||
|
||||
"@next/swc-win32-ia32-msvc": ["@next/swc-win32-ia32-msvc@14.2.32", "", { "os": "win32", "cpu": "ia32" }, "sha512-jHUeDPVHrgFltqoAqDB6g6OStNnFxnc7Aks3p0KE0FbwAvRg6qWKYF5mSTdCTxA3axoSAUwxYdILzXJfUwlHhA=="],
|
||||
"@napi-rs/canvas-linux-x64-gnu": ["@napi-rs/canvas-linux-x64-gnu@0.1.78", "", { "os": "linux", "cpu": "x64" }, "sha512-wxRcvKfvYBgtrO0Uy8OmwvjlnTcHpY45LLwkwVNIWHPqHAsyoTyG/JBSfJ0p5tWRzMOPDCDqdhpIO4LOgXjeyg=="],
|
||||
|
||||
"@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.5.2", "", { "os": "win32", "cpu": "x64" }, "sha512-W5VvyZHnxG/2ukhZF/9Ikdra5fdNftxI6ybeVKYvBPDtyx7x4jPPSNduUkfH5fo3zG0JQ0bPxgy41af2JX5D4Q=="],
|
||||
"@napi-rs/canvas-linux-x64-musl": ["@napi-rs/canvas-linux-x64-musl@0.1.78", "", { "os": "linux", "cpu": "x64" }, "sha512-vQFOGwC9QDP0kXlhb2LU1QRw/humXgcbVp8mXlyBqzc/a0eijlLF9wzyarHC1EywpymtS63TAj8PHZnhTYN6hg=="],
|
||||
|
||||
"@napi-rs/canvas-win32-x64-msvc": ["@napi-rs/canvas-win32-x64-msvc@0.1.78", "", { "os": "win32", "cpu": "x64" }, "sha512-/eKlTZBtGUgpRKalzOzRr6h7KVSuziESWXgBcBnXggZmimwIJWPJlEcbrx5Tcwj8rPuZiANXQOG9pPgy9Q4LTQ=="],
|
||||
|
||||
"@next/env": ["@next/env@15.4.1", "", {}, "sha512-DXQwFGAE2VH+f2TJsKepRXpODPU+scf5fDbKOME8MMyeyswe4XwgRdiiIYmBfkXU+2ssliLYznajTrOQdnLR5A=="],
|
||||
|
||||
"@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.4.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-L+81yMsiHq82VRXS2RVq6OgDwjvA4kDksGU8hfiDHEXP+ncKIUhUsadAVB+MRIp2FErs/5hpXR0u2eluWPAhig=="],
|
||||
|
||||
"@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.4.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-jfz1RXu6SzL14lFl05/MNkcN35lTLMJWPbqt7Xaj35+ZWAX342aePIJrN6xBdGeKl6jPXJm0Yqo3Xvh3Gpo3Uw=="],
|
||||
|
||||
"@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.4.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-k0tOFn3dsnkaGfs6iQz8Ms6f1CyQe4GacXF979sL8PNQxjYS1swx9VsOyUQYaPoGV8nAZ7OX8cYaeiXGq9ahPQ=="],
|
||||
|
||||
"@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.4.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-4ogGQ/3qDzbbK3IwV88ltihHFbQVq6Qr+uEapzXHXBH1KsVBZOB50sn6BWHPcFjwSoMX2Tj9eH/fZvQnSIgc3g=="],
|
||||
|
||||
"@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.4.1", "", { "os": "linux", "cpu": "x64" }, "sha512-Jj0Rfw3wIgp+eahMz/tOGwlcYYEFjlBPKU7NqoOkTX0LY45i5W0WcDpgiDWSLrN8KFQq/LW7fZq46gxGCiOYlQ=="],
|
||||
|
||||
"@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.4.1", "", { "os": "linux", "cpu": "x64" }, "sha512-9WlEZfnw1vFqkWsTMzZDgNL7AUI1aiBHi0S2m8jvycPyCq/fbZjtE/nDkhJRYbSjXbtRHYLDBlmP95kpjEmJbw=="],
|
||||
|
||||
"@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.4.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-WodRbZ9g6CQLRZsG3gtrA9w7Qfa9BwDzhFVdlI6sV0OCPq9JrOrJSp9/ioLsezbV8w9RCJ8v55uzJuJ5RgWLZg=="],
|
||||
|
||||
"@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.4.1", "", { "os": "win32", "cpu": "x64" }, "sha512-y+wTBxelk2xiNofmDOVU7O5WxTHcvOoL3srOM0kxTzKDjQ57kPU0tpnPJ/BWrRnsOwXEv0+3QSbGR7hY4n9LkQ=="],
|
||||
|
||||
"@noble/ciphers": ["@noble/ciphers@0.6.0", "", {}, "sha512-mIbq/R9QXk5/cTfESb1OKtyFnk7oc1Om/8onA1158K9/OZUQFDEVy55jVTato+xmp3XX6F6Qh0zz0Nc1AxAlRQ=="],
|
||||
|
||||
@@ -804,6 +830,10 @@
|
||||
|
||||
"@orama/orama": ["@orama/orama@3.1.12", "", {}, "sha512-U7PY8FwXHuJ6bNBpbsqe0KLzb91IcJuORDggqHHkFy1waokY5SpWLN9tzB3AOW776awp6s1bjwts9I9Davy3lw=="],
|
||||
|
||||
"@pdf-lib/standard-fonts": ["@pdf-lib/standard-fonts@1.0.0", "", { "dependencies": { "pako": "^1.0.6" } }, "sha512-hU30BK9IUN/su0Mn9VdlVKsWBS6GyhVfqjwl1FjZN4TxP6cCw0jP2w7V3Hf5uX7M0AZJ16vey9yE0ny7Sa59ZA=="],
|
||||
|
||||
"@pdf-lib/upng": ["@pdf-lib/upng@1.0.1", "", { "dependencies": { "pako": "^1.0.10" } }, "sha512-dQK2FUMQtowVP00mtIksrlZhdFXQZPC+taih1q4CvPZ5vqdxR/LKBaFg0oAfzd1GlHZXXSPdQfzQnt+ViGvEIQ=="],
|
||||
|
||||
"@peculiar/asn1-android": ["@peculiar/asn1-android@2.4.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.4.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-YFueREq97CLslZZBI8dKzis7jMfEHSLxM+nr0Zdx1POiXFLjqqwoY5s0F1UimdBiEw/iKlHey2m56MRDv7Jtyg=="],
|
||||
|
||||
"@peculiar/asn1-ecc": ["@peculiar/asn1-ecc@2.4.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.4.0", "@peculiar/asn1-x509": "^2.4.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-fJiYUBCJBDkjh347zZe5H81BdJ0+OGIg0X9z06v8xXUoql3MFeENUX0JsjCaVaU9A0L85PefLPGYkIoGpTnXLQ=="],
|
||||
@@ -1224,8 +1254,6 @@
|
||||
|
||||
"@standard-schema/utils": ["@standard-schema/utils@0.3.0", "", {}, "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g=="],
|
||||
|
||||
"@swc/counter": ["@swc/counter@0.1.3", "", {}, "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ=="],
|
||||
|
||||
"@swc/helpers": ["@swc/helpers@0.5.15", "", { "dependencies": { "tslib": "^2.8.0" } }, "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g=="],
|
||||
|
||||
"@t3-oss/env-core": ["@t3-oss/env-core@0.13.4", "", { "peerDependencies": { "arktype": "^2.1.0", "typescript": ">=5.0.0", "valibot": "^1.0.0-beta.7 || ^1.0.0", "zod": "^3.24.0 || ^4.0.0-beta.0" }, "optionalPeers": ["typescript", "valibot", "zod"] }, "sha512-zVOiYO0+CF7EnBScz8s0O5JnJLPTU0lrUi8qhKXfIxIJXvI/jcppSiXXsEJwfB4A6XZawY/Wg/EQGKANi/aPmQ=="],
|
||||
@@ -1274,6 +1302,8 @@
|
||||
|
||||
"@testing-library/user-event": ["@testing-library/user-event@14.6.1", "", { "peerDependencies": { "@testing-library/dom": ">=7.21.4" } }, "sha512-vq7fv0rnt+QTXgPxr5Hjc210p6YKq2kmdziLgnsZGgLJ9e6VAShx1pACLuRjd/AS/sr7phAR58OIIpf0LlmQNw=="],
|
||||
|
||||
"@tokenizer/token": ["@tokenizer/token@0.3.0", "", {}, "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A=="],
|
||||
|
||||
"@trigger.dev/build": ["@trigger.dev/build@4.0.1", "", { "dependencies": { "@trigger.dev/core": "4.0.1", "pkg-types": "^1.1.3", "tinyglobby": "^0.2.2", "tsconfck": "3.1.3" } }, "sha512-PGOnCPjVSKkj72xmJb6mdRbzDSP3Ti/C5/tfaBFdSZ7qcoVctSzDfS5iwEGsSoSWSIv+MVy12c4v7Ji/r7MO1A=="],
|
||||
|
||||
"@trigger.dev/core": ["@trigger.dev/core@4.0.1", "", { "dependencies": { "@bugsnag/cuid": "^3.1.1", "@electric-sql/client": "1.0.0-beta.1", "@google-cloud/precise-date": "^4.0.0", "@jsonhero/path": "^1.0.21", "@opentelemetry/api": "1.9.0", "@opentelemetry/api-logs": "0.203.0", "@opentelemetry/core": "2.0.1", "@opentelemetry/exporter-logs-otlp-http": "0.203.0", "@opentelemetry/exporter-trace-otlp-http": "0.203.0", "@opentelemetry/instrumentation": "0.203.0", "@opentelemetry/resources": "2.0.1", "@opentelemetry/sdk-logs": "0.203.0", "@opentelemetry/sdk-trace-base": "2.0.1", "@opentelemetry/sdk-trace-node": "2.0.1", "@opentelemetry/semantic-conventions": "1.36.0", "dequal": "^2.0.3", "eventsource": "^3.0.5", "eventsource-parser": "^3.0.0", "execa": "^8.0.1", "humanize-duration": "^3.27.3", "jose": "^5.4.0", "nanoid": "3.3.8", "prom-client": "^15.1.0", "socket.io": "4.7.4", "socket.io-client": "4.7.5", "std-env": "^3.8.1", "superjson": "^2.2.1", "tinyexec": "^0.3.2", "uncrypto": "^0.1.3", "zod": "3.25.76", "zod-error": "1.5.0", "zod-validation-error": "^1.5.0" } }, "sha512-NTffiVPy/zFopujdptGGoy3lj3/CKV16JA8CobCfsEpDfu+K+wEys+9p8PFY8j5I0UI86aqlFpJu9/VRqUQ/yQ=="],
|
||||
@@ -1382,6 +1412,8 @@
|
||||
|
||||
"@types/html-to-text": ["@types/html-to-text@9.0.4", "", {}, "sha512-pUY3cKH/Nm2yYrEmDlPR1mR7yszjGx4DrwPjQ702C4/D5CwHuZTgZdIdwPkRbcuhs7BAh2L5rg3CL5cbRiGTCQ=="],
|
||||
|
||||
"@types/iconv-lite": ["@types/iconv-lite@0.0.1", "", { "dependencies": { "@types/node": "*" } }, "sha512-SsRBQxGw7/2/NxYJfBdiUx5a7Ms/voaUhOO9u2y9FTeTNBO1PXohzE4i3JfD8q2Te42HLTn5pyZtDf8j1bPKgQ=="],
|
||||
|
||||
"@types/inquirer": ["@types/inquirer@8.2.12", "", { "dependencies": { "@types/through": "*", "rxjs": "^7.2.0" } }, "sha512-YxURZF2ZsSjU5TAe06tW0M3sL4UI9AMPA6dd8I72uOtppzNafcY38xkYgCZ/vsVOAyNdzHmvtTpLWilOrbP0dQ=="],
|
||||
|
||||
"@types/js-yaml": ["@types/js-yaml@4.0.9", "", {}, "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg=="],
|
||||
@@ -1406,6 +1438,8 @@
|
||||
|
||||
"@types/normalize-path": ["@types/normalize-path@3.0.2", "", {}, "sha512-DO++toKYPaFn0Z8hQ7Tx+3iT9t77IJo/nDiqTXilgEP+kPNIYdpS9kh3fXuc53ugqwp9pxC1PVjCpV1tQDyqMA=="],
|
||||
|
||||
"@types/papaparse": ["@types/papaparse@5.3.16", "", { "dependencies": { "@types/node": "*" } }, "sha512-T3VuKMC2H0lgsjI9buTB3uuKj3EMD2eap1MOuEQuBQ44EnDx/IkGhU6EwiTf9zG3za4SKlmwKAImdDKdNnCsXg=="],
|
||||
|
||||
"@types/pg": ["@types/pg@8.6.1", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^2.2.0" } }, "sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w=="],
|
||||
|
||||
"@types/pg-pool": ["@types/pg-pool@2.0.6", "", { "dependencies": { "@types/pg": "*" } }, "sha512-TaAUE5rq2VQYxab5Ts7WZhKNmuN78Q6PiFonTDdpbx8a1H0M1vhy3rhiMjl+e2iHmogyMw7jZF4FrE6eJUy5HQ=="],
|
||||
@@ -1436,8 +1470,6 @@
|
||||
|
||||
"@types/webxr": ["@types/webxr@0.5.23", "", {}, "sha512-GPe4AsfOSpqWd3xA/0gwoKod13ChcfV67trvxaW2krUbgb9gxQjnCx8zGshzMl8LSHZlNH5gQ8LNScsDuc7nGQ=="],
|
||||
|
||||
"@types/word-extractor": ["@types/word-extractor@1.0.6", "", { "dependencies": { "@types/node": "*" } }, "sha512-NDrvZXGJi7cTKXGr8GTP08HiqiueggR1wfHZvBj1sfL8e52qecBSlvl1rBWrvOY0LLkk1DISkKVlFqMTfipLbQ=="],
|
||||
|
||||
"@types/xlsx": ["@types/xlsx@0.0.36", "", { "dependencies": { "xlsx": "*" } }, "sha512-mvfrKiKKMErQzLMF8ElYEH21qxWCZtN59pHhWGmWCWFJStYdMWjkDSAy6mGowFxHXaXZWe5/TW7pBUiWclIVOw=="],
|
||||
|
||||
"@typespec/ts-http-runtime": ["@typespec/ts-http-runtime@0.3.0", "", { "dependencies": { "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.0", "tslib": "^2.6.2" } }, "sha512-sOx1PKSuFwnIl7z4RN0Ls7N9AQawmR9r66eI5rFCzLDIs8HTIYrIpH9QjYWoX0lkgGrkLxXhi4QnK7MizPRrIg=="],
|
||||
@@ -1614,8 +1646,6 @@
|
||||
|
||||
"bufrw": ["bufrw@1.4.0", "", { "dependencies": { "ansi-color": "^0.2.1", "error": "^7.0.0", "hexer": "^1.5.0", "xtend": "^4.0.0" } }, "sha512-sWm8iPbqvL9+5SiYxXH73UOkyEbGQg7kyHQmReF89WJHQJw2eV4P/yZ0E+b71cczJ4pPobVhXxgQcmfSTgGHxQ=="],
|
||||
|
||||
"busboy": ["busboy@1.6.0", "", { "dependencies": { "streamsearch": "^1.1.0" } }, "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA=="],
|
||||
|
||||
"cac": ["cac@6.7.14", "", {}, "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ=="],
|
||||
|
||||
"call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="],
|
||||
@@ -1648,6 +1678,10 @@
|
||||
|
||||
"check-error": ["check-error@2.1.1", "", {}, "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw=="],
|
||||
|
||||
"cheerio": ["cheerio@1.1.2", "", { "dependencies": { "cheerio-select": "^2.1.0", "dom-serializer": "^2.0.0", "domhandler": "^5.0.3", "domutils": "^3.2.2", "encoding-sniffer": "^0.2.1", "htmlparser2": "^10.0.0", "parse5": "^7.3.0", "parse5-htmlparser2-tree-adapter": "^7.1.0", "parse5-parser-stream": "^7.1.2", "undici": "^7.12.0", "whatwg-mimetype": "^4.0.0" } }, "sha512-IkxPpb5rS/d1IiLbHMgfPuS0FgiWTtFIm/Nj+2woXDLTZ7fOT2eqzgYbdMlLweqlHbsZjxEChoVK+7iph7jyQg=="],
|
||||
|
||||
"cheerio-select": ["cheerio-select@2.1.0", "", { "dependencies": { "boolbase": "^1.0.0", "css-select": "^5.1.0", "css-what": "^6.1.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.0.1" } }, "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g=="],
|
||||
|
||||
"chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="],
|
||||
|
||||
"chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
|
||||
@@ -1708,6 +1742,8 @@
|
||||
|
||||
"compute-scroll-into-view": ["compute-scroll-into-view@3.1.1", "", {}, "sha512-VRhuHOLoKYOy4UbilLbUzbYg93XLjv2PncJC50EuTWPA3gaja1UjBsUP/D/9/juV3vQFr6XBEzn9KCAHdUvOHw=="],
|
||||
|
||||
"concat-stream": ["concat-stream@2.0.0", "", { "dependencies": { "buffer-from": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.0.2", "typedarray": "^0.0.6" } }, "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A=="],
|
||||
|
||||
"concurrently": ["concurrently@9.2.1", "", { "dependencies": { "chalk": "4.1.2", "rxjs": "7.8.2", "shell-quote": "1.8.3", "supports-color": "8.1.1", "tree-kill": "1.2.2", "yargs": "17.7.2" }, "bin": { "concurrently": "dist/bin/concurrently.js", "conc": "dist/bin/concurrently.js" } }, "sha512-fsfrO0MxV64Znoy8/l1vVIjjHa29SZyyqPgQBwhiDcaW8wJc2W3XWVOGx4M3oJBnv/zdUZIIp1gDeS98GzP8Ng=="],
|
||||
|
||||
"confbox": ["confbox@0.1.8", "", {}, "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w=="],
|
||||
@@ -1758,8 +1794,6 @@
|
||||
|
||||
"csv-parse": ["csv-parse@5.6.0", "", {}, "sha512-l3nz3euub2QMg5ouu5U09Ew9Wf6/wQ8I++ch1loQ0ljmzhmfZYrH9fflS22i/PQEvsPvxCwxgz5q7UB8K1JO4Q=="],
|
||||
|
||||
"csv-parser": ["csv-parser@3.2.0", "", { "bin": { "csv-parser": "bin/csv-parser" } }, "sha512-fgKbp+AJbn1h2dcAHKIdKNSSjfp43BZZykXsCjzALjKy80VXQNHPFJ6T9Afwdzoj24aMkq8GwDS7KGcDPpejrA=="],
|
||||
|
||||
"d3-array": ["d3-array@3.2.4", "", { "dependencies": { "internmap": "1 - 2" } }, "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg=="],
|
||||
|
||||
"d3-color": ["d3-color@3.1.0", "", {}, "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA=="],
|
||||
@@ -1878,6 +1912,8 @@
|
||||
|
||||
"emoji-regex": ["emoji-regex@10.5.0", "", {}, "sha512-lb49vf1Xzfx080OKA0o6l8DQQpV+6Vg95zyCJX9VB/BqKYlhG7N4wgROUUHRA+ZPUefLnteQOad7z1kT2bV7bg=="],
|
||||
|
||||
"encoding-sniffer": ["encoding-sniffer@0.2.1", "", { "dependencies": { "iconv-lite": "^0.6.3", "whatwg-encoding": "^3.1.1" } }, "sha512-5gvq20T6vfpekVtqrYQsSCFZ1wEg5+wW0/QaZMWkFr6BqD3NfKs0rLCx4rrVlSWJeZb5NBJgVLswK/w2MWU+Gw=="],
|
||||
|
||||
"end-of-stream": ["end-of-stream@1.4.5", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg=="],
|
||||
|
||||
"engine.io": ["engine.io@6.6.4", "", { "dependencies": { "@types/cors": "^2.8.12", "@types/node": ">=10.0.0", "accepts": "~1.3.4", "base64id": "2.0.0", "cookie": "~0.7.2", "cors": "~2.8.5", "debug": "~4.3.1", "engine.io-parser": "~5.2.1", "ws": "~8.17.1" } }, "sha512-ZCkIjSYNDyGn0R6ewHDtXgns/Zre/NT6Agvq1/WobF7JXgFff4SeDroKiCO3fNJreU9YG429Sc81o4w5ok/W5g=="],
|
||||
@@ -1978,8 +2014,6 @@
|
||||
|
||||
"fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="],
|
||||
|
||||
"fd-slicer": ["fd-slicer@1.1.0", "", { "dependencies": { "pend": "~1.2.0" } }, "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g=="],
|
||||
|
||||
"fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
|
||||
|
||||
"fetch-blob": ["fetch-blob@3.2.0", "", { "dependencies": { "node-domexception": "^1.0.0", "web-streams-polyfill": "^3.0.3" } }, "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ=="],
|
||||
@@ -1990,6 +2024,8 @@
|
||||
|
||||
"figures": ["figures@3.2.0", "", { "dependencies": { "escape-string-regexp": "^1.0.5" } }, "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg=="],
|
||||
|
||||
"file-type": ["file-type@16.5.4", "", { "dependencies": { "readable-web-to-node-stream": "^3.0.0", "strtok3": "^6.2.4", "token-types": "^4.1.1" } }, "sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw=="],
|
||||
|
||||
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
|
||||
|
||||
"find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="],
|
||||
@@ -2134,7 +2170,7 @@
|
||||
|
||||
"husky": ["husky@9.1.7", "", { "bin": { "husky": "bin.js" } }, "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA=="],
|
||||
|
||||
"iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
||||
"iconv-lite": ["iconv-lite@0.7.0", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ=="],
|
||||
|
||||
"ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
|
||||
|
||||
@@ -2510,7 +2546,7 @@
|
||||
|
||||
"neo-async": ["neo-async@2.6.2", "", {}, "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw=="],
|
||||
|
||||
"next": ["next@15.5.2", "", { "dependencies": { "@next/env": "15.5.2", "@swc/helpers": "0.5.15", "caniuse-lite": "^1.0.30001579", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.5.2", "@next/swc-darwin-x64": "15.5.2", "@next/swc-linux-arm64-gnu": "15.5.2", "@next/swc-linux-arm64-musl": "15.5.2", "@next/swc-linux-x64-gnu": "15.5.2", "@next/swc-linux-x64-musl": "15.5.2", "@next/swc-win32-arm64-msvc": "15.5.2", "@next/swc-win32-x64-msvc": "15.5.2", "sharp": "^0.34.3" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.51.1", "babel-plugin-react-compiler": "*", "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "babel-plugin-react-compiler", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-H8Otr7abj1glFhbGnvUt3gz++0AF1+QoCXEBmd/6aKbfdFwrn0LpA836Ed5+00va/7HQSDD+mOoVhn3tNy3e/Q=="],
|
||||
"next": ["next@15.4.1", "", { "dependencies": { "@next/env": "15.4.1", "@swc/helpers": "0.5.15", "caniuse-lite": "^1.0.30001579", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.4.1", "@next/swc-darwin-x64": "15.4.1", "@next/swc-linux-arm64-gnu": "15.4.1", "@next/swc-linux-arm64-musl": "15.4.1", "@next/swc-linux-x64-gnu": "15.4.1", "@next/swc-linux-x64-musl": "15.4.1", "@next/swc-win32-arm64-msvc": "15.4.1", "@next/swc-win32-x64-msvc": "15.4.1", "sharp": "^0.34.3" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.51.1", "babel-plugin-react-compiler": "*", "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "babel-plugin-react-compiler", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-eNKB1q8C7o9zXF8+jgJs2CzSLIU3T6bQtX6DcTnCq1sIR1CJ0GlSyRs1BubQi3/JgCnr9Vr+rS5mOMI38FFyQw=="],
|
||||
|
||||
"next-runtime-env": ["next-runtime-env@3.3.0", "", { "dependencies": { "next": "^14", "react": "^18" } }, "sha512-JgKVnog9mNbjbjH9csVpMnz2tB2cT5sLF+7O47i6Ze/s/GoiKdV7dHhJHk1gwXpo6h5qPj5PTzryldtSjvrHuQ=="],
|
||||
|
||||
@@ -2548,6 +2584,8 @@
|
||||
|
||||
"object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="],
|
||||
|
||||
"officeparser": ["officeparser@5.2.0", "", { "dependencies": { "@xmldom/xmldom": "^0.8.10", "concat-stream": "^2.0.0", "file-type": "^16.5.4", "node-ensure": "^0.0.0", "pdfjs-dist": "^5.3.31", "yauzl": "^3.1.3" }, "bin": { "officeparser": "officeParser.js" } }, "sha512-EGdHj4RgP5FtyTHsqgDz2ZXkV2q2o2Ktwk4ogHpVcRT1+udwb3pRLfmlNO9ZMDZtDhJz5qNIUAs/+ItrUWoHiQ=="],
|
||||
|
||||
"ollama-ai-provider": ["ollama-ai-provider@1.2.0", "", { "dependencies": { "@ai-sdk/provider": "^1.0.0", "@ai-sdk/provider-utils": "^2.0.0", "partial-json": "0.1.7" }, "peerDependencies": { "zod": "^3.0.0" }, "optionalPeers": ["zod"] }, "sha512-jTNFruwe3O/ruJeppI/quoOUxG7NA6blG3ZyQj3lei4+NnJo7bi3eIRWqlVpRlu/mbzbFXeJSBuYQWF6pzGKww=="],
|
||||
|
||||
"on-exit-leak-free": ["on-exit-leak-free@2.1.2", "", {}, "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA=="],
|
||||
@@ -2576,12 +2614,18 @@
|
||||
|
||||
"pako": ["pako@1.0.11", "", {}, "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw=="],
|
||||
|
||||
"papaparse": ["papaparse@5.5.3", "", {}, "sha512-5QvjGxYVjxO59MGU2lHVYpRWBBtKHnlIAcSe1uNFCkkptUh63NFRj0FJQm7nR67puEruUci/ZkjmEFrjCAyP4A=="],
|
||||
|
||||
"parse-css-color": ["parse-css-color@0.2.1", "", { "dependencies": { "color-name": "^1.1.4", "hex-rgb": "^4.1.0" } }, "sha512-bwS/GGIFV3b6KS4uwpzCFj4w297Yl3uqnSgIPsoQkx7GMLROXfMnWvxfNkL0oh8HVhZA4hvJoEoEIqonfJ3BWg=="],
|
||||
|
||||
"parse-entities": ["parse-entities@4.0.2", "", { "dependencies": { "@types/unist": "^2.0.0", "character-entities-legacy": "^3.0.0", "character-reference-invalid": "^2.0.0", "decode-named-character-reference": "^1.0.0", "is-alphanumerical": "^2.0.0", "is-decimal": "^2.0.0", "is-hexadecimal": "^2.0.0" } }, "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw=="],
|
||||
|
||||
"parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="],
|
||||
|
||||
"parse5-htmlparser2-tree-adapter": ["parse5-htmlparser2-tree-adapter@7.1.0", "", { "dependencies": { "domhandler": "^5.0.3", "parse5": "^7.0.0" } }, "sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g=="],
|
||||
|
||||
"parse5-parser-stream": ["parse5-parser-stream@7.1.2", "", { "dependencies": { "parse5": "^7.0.0" } }, "sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow=="],
|
||||
|
||||
"parseley": ["parseley@0.12.1", "", { "dependencies": { "leac": "^0.6.0", "peberminta": "^0.9.0" } }, "sha512-e6qHKe3a9HWr0oMRVDTRhKce+bRO8VGQR3NyVwcjwrbhMmFCX9KszEV35+rn4AdilFAq9VPxP/Fe1wC9Qjd2lw=="],
|
||||
|
||||
"partial-json": ["partial-json@0.1.7", "", {}, "sha512-Njv/59hHaokb/hRUjce3Hdv12wd60MtM9Z5Olmn+nehe0QDAsRtRbJPvJ0Z91TusF0SuZRIvnM+S4l6EIP8leA=="],
|
||||
@@ -2600,10 +2644,14 @@
|
||||
|
||||
"pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="],
|
||||
|
||||
"pdf-parse": ["pdf-parse@1.1.1", "", { "dependencies": { "debug": "^3.1.0", "node-ensure": "^0.0.0" } }, "sha512-v6ZJ/efsBpGrGGknjtq9J/oC8tZWq0KWL5vQrk2GlzLEQPUDB1ex+13Rmidl1neNN358Jn9EHZw5y07FFtaC7A=="],
|
||||
"pdf-lib": ["pdf-lib@1.17.1", "", { "dependencies": { "@pdf-lib/standard-fonts": "^1.0.0", "@pdf-lib/upng": "^1.0.1", "pako": "^1.0.11", "tslib": "^1.11.1" } }, "sha512-V/mpyJAoTsN4cnP31vc0wfNA1+p20evqqnap0KLoRUN0Yk/p3wN52DOEsL4oBFcLdb76hlpKPtzJIgo67j/XLw=="],
|
||||
|
||||
"pdfjs-dist": ["pdfjs-dist@5.4.54", "", { "optionalDependencies": { "@napi-rs/canvas": "^0.1.74" } }, "sha512-TBAiTfQw89gU/Z4LW98Vahzd2/LoCFprVGvGbTgFt+QCB1F+woyOPmNNVgLa6djX9Z9GGTnj7qE1UzpOVJiINw=="],
|
||||
|
||||
"peberminta": ["peberminta@0.9.0", "", {}, "sha512-XIxfHpEuSJbITd1H3EeQwpcZbTLHc+VVr8ANI9t5sit565tsI4/xK3KWTUFE2e6QiangUkh3B0jihzmGnNrRsQ=="],
|
||||
|
||||
"peek-readable": ["peek-readable@4.1.0", "", {}, "sha512-ZI3LnwUv5nOGbQzD9c2iDG6toheuXSZP5esSHBjopsXH4dg19soufvpUGA3uohi5anFtGb2lhAVdHzH6R/Evvg=="],
|
||||
|
||||
"pend": ["pend@1.2.0", "", {}, "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg=="],
|
||||
|
||||
"pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="],
|
||||
@@ -2672,7 +2720,7 @@
|
||||
|
||||
"prismjs": ["prismjs@1.30.0", "", {}, "sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw=="],
|
||||
|
||||
"process": ["process@0.10.1", "", {}, "sha512-dyIett8dgGIZ/TXKUzeYExt7WA6ldDzys9vTDU/cCA9L17Ypme+KzS+NjQCjpn9xsvi/shbMC+yP/BcFMBz0NA=="],
|
||||
"process": ["process@0.11.10", "", {}, "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="],
|
||||
|
||||
"process-nextick-args": ["process-nextick-args@2.0.1", "", {}, "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="],
|
||||
|
||||
@@ -2748,6 +2796,8 @@
|
||||
|
||||
"readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="],
|
||||
|
||||
"readable-web-to-node-stream": ["readable-web-to-node-stream@3.0.4", "", { "dependencies": { "readable-stream": "^4.7.0" } }, "sha512-9nX56alTf5bwXQ3ZDipHJhusu9NTQJ/CVPtb/XHAJCXihZeitfJvIRS4GqQ/mfIoOE3IelHMrpayVrosdHBuLw=="],
|
||||
|
||||
"readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
|
||||
|
||||
"real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="],
|
||||
@@ -2816,6 +2866,10 @@
|
||||
|
||||
"rrweb-cssom": ["rrweb-cssom@0.8.0", "", {}, "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw=="],
|
||||
|
||||
"rtf-parser": ["rtf-parser@1.3.3", "", { "dependencies": { "iconv-lite": "^0.4.15", "readable-stream": "^2.2.2" } }, "sha512-sz2eb4tcCFtwVfs5Ei/l3JnSQGqpDv+drFuNz/zwn2tA24cL2WTuk2VMo2bA4IcRgkn38juAOri2hB9nv85u2Q=="],
|
||||
|
||||
"rtf-stream-parser": ["rtf-stream-parser@3.8.0", "", {}, "sha512-Hj+FWJ8IhywyxTy0/J1ZbPQLt1+2S8uagOwrU5u1WLBs2hSo0bOc4ZA06sWZHOypi22M60WAgkzUoTclWiqI2Q=="],
|
||||
|
||||
"run-async": ["run-async@2.4.1", "", {}, "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ=="],
|
||||
|
||||
"run-exclusive": ["run-exclusive@2.2.19", "", { "dependencies": { "minimal-polyfills": "^2.2.3" } }, "sha512-K3mdoAi7tjJ/qT7Flj90L7QyPozwUaAG+CVhkdDje4HLKXUYC3N/Jzkau3flHVDLQVhiHBtcimVodMjN9egYbA=="],
|
||||
@@ -2932,8 +2986,6 @@
|
||||
|
||||
"stdin-discarder": ["stdin-discarder@0.2.2", "", {}, "sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ=="],
|
||||
|
||||
"streamsearch": ["streamsearch@1.1.0", "", {}, "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg=="],
|
||||
|
||||
"string-argv": ["string-argv@0.3.2", "", {}, "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q=="],
|
||||
|
||||
"string-template": ["string-template@0.2.1", "", {}, "sha512-Yptehjogou2xm4UJbxJ4CxgZx12HBfeystp0y3x7s4Dj32ltVVG1Gg8YhKjHZkHicuKpZX/ffilA8505VbUbpw=="],
|
||||
@@ -2966,6 +3018,8 @@
|
||||
|
||||
"strnum": ["strnum@2.1.1", "", {}, "sha512-7ZvoFTiCnGxBtDqJ//Cu6fWtZtc7Y3x+QOirG15wztbdngGSkht27o2pyGWrVy0b4WAy3jbKmnoK6g5VlVNUUw=="],
|
||||
|
||||
"strtok3": ["strtok3@6.3.0", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "peek-readable": "^4.1.0" } }, "sha512-fZtbhtvI9I48xDSywd/somNqgUHl2L2cstmXCCif0itOf96jeW18MBSyrLuNicYQVkvpOxkZtkzujiTJ9LW5Jw=="],
|
||||
|
||||
"style-to-js": ["style-to-js@1.1.17", "", { "dependencies": { "style-to-object": "1.0.9" } }, "sha512-xQcBGDxJb6jjFCTzvQtfiPn6YvvP2O8U1MDIPNfJQlWMYfktPy+iGsHE7cssjs7y84d9fQaK4UF3RIJaAHSoYA=="],
|
||||
|
||||
"style-to-object": ["style-to-object@1.0.9", "", { "dependencies": { "inline-style-parser": "0.2.4" } }, "sha512-G4qppLgKu/k6FwRpHiGiKPaPTFcG3g4wNVX/Qsfu+RqQM30E7Tyu/TEgxcL9PNLF5pdRLwQdE3YKKf+KF2Dzlw=="],
|
||||
@@ -3038,6 +3092,8 @@
|
||||
|
||||
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
|
||||
|
||||
"token-types": ["token-types@4.2.1", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-6udB24Q737UD/SDsKAHI9FCRP7Bqc9D/MQUV02ORQg5iskjtLJlZJNdN4kKtcdtwCeWIwIHDGaUsTsCCAa8sFQ=="],
|
||||
|
||||
"tough-cookie": ["tough-cookie@5.1.2", "", { "dependencies": { "tldts": "^6.1.32" } }, "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A=="],
|
||||
|
||||
"tr46": ["tr46@5.1.1", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw=="],
|
||||
@@ -3074,6 +3130,8 @@
|
||||
|
||||
"type-fest": ["type-fest@0.7.1", "", {}, "sha512-Ne2YiiGN8bmrmJJEuTWTLJR32nh/JdL1+PSicowtNb0WFpn59GK8/lfD61bVtzguz7b3PBt74nxpv/Pw5po5Rg=="],
|
||||
|
||||
"typedarray": ["typedarray@0.0.6", "", {}, "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA=="],
|
||||
|
||||
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
|
||||
|
||||
"ufo": ["ufo@1.6.1", "", {}, "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA=="],
|
||||
@@ -3084,6 +3142,8 @@
|
||||
|
||||
"underscore": ["underscore@1.13.7", "", {}, "sha512-GMXzWtsc57XAtguZgaQViUOzs0KTkk8ojr3/xAxXLITqf/3EMwxC0inyETfDFjH/Krbhuep0HNbbjI9i/q3F3g=="],
|
||||
|
||||
"undici": ["undici@7.15.0", "", {}, "sha512-7oZJCPvvMvTd0OlqWsIxTuItTpJBpU1tcbVl24FMn3xt3+VSunwUasmfPJRE57oNO1KsZ4PgA1xTdAX4hq8NyQ=="],
|
||||
|
||||
"undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
|
||||
|
||||
"unfetch": ["unfetch@4.2.0", "", {}, "sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA=="],
|
||||
@@ -3170,8 +3230,6 @@
|
||||
|
||||
"word": ["word@0.3.0", "", {}, "sha512-OELeY0Q61OXpdUfTp+oweA/vtLVg5VDOXh+3he3PNzLGG/y0oylSOC1xRVj0+l4vQ3tj/bB1HVHv1ocXkQceFA=="],
|
||||
|
||||
"word-extractor": ["word-extractor@1.0.4", "", { "dependencies": { "saxes": "^5.0.1", "yauzl": "^2.10.0" } }, "sha512-PyAGZQ2gjnVA5kcZAOAxoYciCMaAvu0dbVlw/zxHphhy+3be8cDeYKHJPO8iedIM3Sx0arA/ugKTJyXhZNgo6g=="],
|
||||
|
||||
"wrap-ansi": ["wrap-ansi@6.2.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA=="],
|
||||
|
||||
"wrap-ansi-cjs": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
|
||||
@@ -3204,7 +3262,7 @@
|
||||
|
||||
"yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="],
|
||||
|
||||
"yauzl": ["yauzl@2.10.0", "", { "dependencies": { "buffer-crc32": "~0.2.3", "fd-slicer": "~1.1.0" } }, "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g=="],
|
||||
"yauzl": ["yauzl@3.2.0", "", { "dependencies": { "buffer-crc32": "~0.2.3", "pend": "~1.2.0" } }, "sha512-Ow9nuGZE+qp1u4JIPvg+uCiUr7xGQWdff7JQSk5VGYTAZMDe2q8lxJ10ygv10qmSj031Ty/6FNJpLO4o1Sgc+w=="],
|
||||
|
||||
"yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="],
|
||||
|
||||
@@ -3272,6 +3330,8 @@
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild": ["esbuild@0.18.20", "", { "optionalDependencies": { "@esbuild/android-arm": "0.18.20", "@esbuild/android-arm64": "0.18.20", "@esbuild/android-x64": "0.18.20", "@esbuild/darwin-arm64": "0.18.20", "@esbuild/darwin-x64": "0.18.20", "@esbuild/freebsd-arm64": "0.18.20", "@esbuild/freebsd-x64": "0.18.20", "@esbuild/linux-arm": "0.18.20", "@esbuild/linux-arm64": "0.18.20", "@esbuild/linux-ia32": "0.18.20", "@esbuild/linux-loong64": "0.18.20", "@esbuild/linux-mips64el": "0.18.20", "@esbuild/linux-ppc64": "0.18.20", "@esbuild/linux-riscv64": "0.18.20", "@esbuild/linux-s390x": "0.18.20", "@esbuild/linux-x64": "0.18.20", "@esbuild/netbsd-x64": "0.18.20", "@esbuild/openbsd-x64": "0.18.20", "@esbuild/sunos-x64": "0.18.20", "@esbuild/win32-arm64": "0.18.20", "@esbuild/win32-ia32": "0.18.20", "@esbuild/win32-x64": "0.18.20" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA=="],
|
||||
|
||||
"@inquirer/external-editor/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
||||
|
||||
"@isaacs/cliui/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="],
|
||||
|
||||
"@isaacs/cliui/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="],
|
||||
@@ -3502,8 +3562,6 @@
|
||||
|
||||
"@react-email/preview-server/framer-motion": ["framer-motion@12.7.5", "", { "dependencies": { "motion-dom": "^12.7.5", "motion-utils": "^12.7.5", "tslib": "^2.4.0" }, "peerDependencies": { "@emotion/is-prop-valid": "*", "react": "^18.0.0 || ^19.0.0", "react-dom": "^18.0.0 || ^19.0.0" }, "optionalPeers": ["@emotion/is-prop-valid", "react", "react-dom"] }, "sha512-iD+vBOLn8E8bwBAFUQ1DYXjivm+cGGPgQUQ4Doleq7YP/zHdozUVwAMBJwOOfCTbtM8uOooMi77noD261Kxiyw=="],
|
||||
|
||||
"@react-email/preview-server/next": ["next@15.4.1", "", { "dependencies": { "@next/env": "15.4.1", "@swc/helpers": "0.5.15", "caniuse-lite": "^1.0.30001579", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.4.1", "@next/swc-darwin-x64": "15.4.1", "@next/swc-linux-arm64-gnu": "15.4.1", "@next/swc-linux-arm64-musl": "15.4.1", "@next/swc-linux-x64-gnu": "15.4.1", "@next/swc-linux-x64-musl": "15.4.1", "@next/swc-win32-arm64-msvc": "15.4.1", "@next/swc-win32-x64-msvc": "15.4.1", "sharp": "^0.34.3" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.51.1", "babel-plugin-react-compiler": "*", "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "babel-plugin-react-compiler", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-eNKB1q8C7o9zXF8+jgJs2CzSLIU3T6bQtX6DcTnCq1sIR1CJ0GlSyRs1BubQi3/JgCnr9Vr+rS5mOMI38FFyQw=="],
|
||||
|
||||
"@react-email/preview-server/sharp": ["sharp@0.34.1", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.7.1" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.34.1", "@img/sharp-darwin-x64": "0.34.1", "@img/sharp-libvips-darwin-arm64": "1.1.0", "@img/sharp-libvips-darwin-x64": "1.1.0", "@img/sharp-libvips-linux-arm": "1.1.0", "@img/sharp-libvips-linux-arm64": "1.1.0", "@img/sharp-libvips-linux-ppc64": "1.1.0", "@img/sharp-libvips-linux-s390x": "1.1.0", "@img/sharp-libvips-linux-x64": "1.1.0", "@img/sharp-libvips-linuxmusl-arm64": "1.1.0", "@img/sharp-libvips-linuxmusl-x64": "1.1.0", "@img/sharp-linux-arm": "0.34.1", "@img/sharp-linux-arm64": "0.34.1", "@img/sharp-linux-s390x": "0.34.1", "@img/sharp-linux-x64": "0.34.1", "@img/sharp-linuxmusl-arm64": "0.34.1", "@img/sharp-linuxmusl-x64": "0.34.1", "@img/sharp-wasm32": "0.34.1", "@img/sharp-win32-ia32": "0.34.1", "@img/sharp-win32-x64": "0.34.1" } }, "sha512-1j0w61+eVxu7DawFJtnfYcvSv6qPFvfTaqzTQ2BLknVhHTwGS8sc63ZBF4rzkWMBVKybo4S5OBtDdZahh2A1xg=="],
|
||||
|
||||
"@react-email/preview-server/tailwind-merge": ["tailwind-merge@3.2.0", "", {}, "sha512-FQT/OVqCD+7edmmJpsgCsY820RTD5AkBryuG5IUqR5YQZSdj5xlH5nLgH7YPths7WsLPSpSBNneJdM8aS8aeFA=="],
|
||||
@@ -3612,12 +3670,16 @@
|
||||
|
||||
"@types/cors/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@types/iconv-lite/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@types/jsdom/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@types/mysql/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@types/node-fetch/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@types/papaparse/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@types/pg/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@types/tedious/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
@@ -3626,8 +3688,6 @@
|
||||
|
||||
"@types/webpack/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@types/word-extractor/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"@vitejs/plugin-react/@babel/core": ["@babel/core@7.28.3", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.3", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.28.3", "@babel/helpers": "^7.28.3", "@babel/parser": "^7.28.3", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.3", "@babel/types": "^7.28.2", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-yDBHV9kQNcr2/sUr9jghVyz9C3Y5G2zUM2H2lo+9mKv4sFgbA8s8Z9t8D1jiTkGoO/NoIfKMyKWr4s6CN23ZwQ=="],
|
||||
|
||||
"accepts/mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
|
||||
@@ -3648,12 +3708,18 @@
|
||||
|
||||
"chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="],
|
||||
|
||||
"cheerio/htmlparser2": ["htmlparser2@10.0.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.2.1", "entities": "^6.0.0" } }, "sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g=="],
|
||||
|
||||
"cli-truncate/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="],
|
||||
|
||||
"cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
|
||||
|
||||
"concat-stream/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
|
||||
|
||||
"dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="],
|
||||
|
||||
"encoding-sniffer/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
||||
|
||||
"engine.io/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"engine.io/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="],
|
||||
@@ -3688,6 +3754,8 @@
|
||||
|
||||
"groq-sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
|
||||
|
||||
"hexer/process": ["process@0.10.1", "", {}, "sha512-dyIett8dgGIZ/TXKUzeYExt7WA6ldDzys9vTDU/cCA9L17Ypme+KzS+NjQCjpn9xsvi/shbMC+yP/BcFMBz0NA=="],
|
||||
|
||||
"hoist-non-react-statics/react-is": ["react-is@16.13.1", "", {}, "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="],
|
||||
|
||||
"htmlparser2/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="],
|
||||
@@ -3730,12 +3798,12 @@
|
||||
|
||||
"micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"mysql2/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
||||
|
||||
"named-placeholders/lru-cache": ["lru-cache@7.18.3", "", {}, "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA=="],
|
||||
|
||||
"next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="],
|
||||
|
||||
"next-runtime-env/next": ["next@14.2.32", "", { "dependencies": { "@next/env": "14.2.32", "@swc/helpers": "0.5.5", "busboy": "1.6.0", "caniuse-lite": "^1.0.30001579", "graceful-fs": "^4.2.11", "postcss": "8.4.31", "styled-jsx": "5.1.1" }, "optionalDependencies": { "@next/swc-darwin-arm64": "14.2.32", "@next/swc-darwin-x64": "14.2.32", "@next/swc-linux-arm64-gnu": "14.2.32", "@next/swc-linux-arm64-musl": "14.2.32", "@next/swc-linux-x64-gnu": "14.2.32", "@next/swc-linux-x64-musl": "14.2.32", "@next/swc-win32-arm64-msvc": "14.2.32", "@next/swc-win32-ia32-msvc": "14.2.32", "@next/swc-win32-x64-msvc": "14.2.32" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.41.2", "react": "^18.2.0", "react-dom": "^18.2.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-fg5g0GZ7/nFc09X8wLe6pNSU8cLWbLRG3TZzPJ1BJvi2s9m7eF991se67wliM9kR5yLHRkyGKU49MMx58s3LJg=="],
|
||||
|
||||
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
|
||||
|
||||
"nypm/pkg-types": ["pkg-types@2.3.0", "", { "dependencies": { "confbox": "^0.2.2", "exsolve": "^1.0.7", "pathe": "^2.0.3" } }, "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig=="],
|
||||
@@ -3748,7 +3816,7 @@
|
||||
|
||||
"parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="],
|
||||
|
||||
"pdf-parse/debug": ["debug@3.2.7", "", { "dependencies": { "ms": "^2.1.1" } }, "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ=="],
|
||||
"pdf-lib/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="],
|
||||
|
||||
"playwright/fsevents": ["fsevents@2.3.2", "", { "os": "darwin" }, "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA=="],
|
||||
|
||||
@@ -3770,12 +3838,16 @@
|
||||
|
||||
"react-email/ora": ["ora@8.2.0", "", { "dependencies": { "chalk": "^5.3.0", "cli-cursor": "^5.0.0", "cli-spinners": "^2.9.2", "is-interactive": "^2.0.0", "is-unicode-supported": "^2.0.0", "log-symbols": "^6.0.0", "stdin-discarder": "^0.2.2", "string-width": "^7.2.0", "strip-ansi": "^7.1.0" } }, "sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw=="],
|
||||
|
||||
"readable-web-to-node-stream/readable-stream": ["readable-stream@4.7.0", "", { "dependencies": { "abort-controller": "^3.0.0", "buffer": "^6.0.3", "events": "^3.3.0", "process": "^0.11.10", "string_decoder": "^1.3.0" } }, "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg=="],
|
||||
|
||||
"resend/@react-email/render": ["@react-email/render@1.1.2", "", { "dependencies": { "html-to-text": "^9.0.5", "prettier": "^3.5.3", "react-promise-suspense": "^0.3.4" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-RnRehYN3v9gVlNMehHPHhyp2RQo7+pSkHDtXPvg3s0GbzM9SQMW4Qrf8GRNvtpLC4gsI+Wt0VatNRUFqjvevbw=="],
|
||||
|
||||
"restore-cursor/onetime": ["onetime@5.1.2", "", { "dependencies": { "mimic-fn": "^2.1.0" } }, "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg=="],
|
||||
|
||||
"restore-cursor/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="],
|
||||
|
||||
"rtf-parser/iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="],
|
||||
|
||||
"sim/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"sim/lucide-react": ["lucide-react@0.479.0", "", { "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-aBhNnveRhorBOK7uA4gDjgaf+YlHMdMhQ/3cupk6exM10hWlEU+2QtWYOfhXhjAsmdb6LeKR+NZnow4UxRRiTQ=="],
|
||||
@@ -3828,7 +3900,7 @@
|
||||
|
||||
"webpack/mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
|
||||
|
||||
"word-extractor/saxes": ["saxes@5.0.1", "", { "dependencies": { "xmlchars": "^2.2.0" } }, "sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw=="],
|
||||
"whatwg-encoding/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
|
||||
|
||||
"@anthropic-ai/sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
|
||||
@@ -4080,28 +4152,6 @@
|
||||
|
||||
"@react-email/preview-server/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.0", "", { "os": "win32", "cpu": "x64" }, "sha512-ZENoHJBxA20C2zFzh6AI4fT6RraMzjYw4xKWemRTRmRVtN9c5DcH9r/f2ihEkMjOW5eGgrwCslG/+Y/3bL+DHQ=="],
|
||||
|
||||
"@react-email/preview-server/next/@next/env": ["@next/env@15.4.1", "", {}, "sha512-DXQwFGAE2VH+f2TJsKepRXpODPU+scf5fDbKOME8MMyeyswe4XwgRdiiIYmBfkXU+2ssliLYznajTrOQdnLR5A=="],
|
||||
|
||||
"@react-email/preview-server/next/@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.4.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-L+81yMsiHq82VRXS2RVq6OgDwjvA4kDksGU8hfiDHEXP+ncKIUhUsadAVB+MRIp2FErs/5hpXR0u2eluWPAhig=="],
|
||||
|
||||
"@react-email/preview-server/next/@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.4.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-jfz1RXu6SzL14lFl05/MNkcN35lTLMJWPbqt7Xaj35+ZWAX342aePIJrN6xBdGeKl6jPXJm0Yqo3Xvh3Gpo3Uw=="],
|
||||
|
||||
"@react-email/preview-server/next/@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.4.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-k0tOFn3dsnkaGfs6iQz8Ms6f1CyQe4GacXF979sL8PNQxjYS1swx9VsOyUQYaPoGV8nAZ7OX8cYaeiXGq9ahPQ=="],
|
||||
|
||||
"@react-email/preview-server/next/@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.4.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-4ogGQ/3qDzbbK3IwV88ltihHFbQVq6Qr+uEapzXHXBH1KsVBZOB50sn6BWHPcFjwSoMX2Tj9eH/fZvQnSIgc3g=="],
|
||||
|
||||
"@react-email/preview-server/next/@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.4.1", "", { "os": "linux", "cpu": "x64" }, "sha512-Jj0Rfw3wIgp+eahMz/tOGwlcYYEFjlBPKU7NqoOkTX0LY45i5W0WcDpgiDWSLrN8KFQq/LW7fZq46gxGCiOYlQ=="],
|
||||
|
||||
"@react-email/preview-server/next/@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.4.1", "", { "os": "linux", "cpu": "x64" }, "sha512-9WlEZfnw1vFqkWsTMzZDgNL7AUI1aiBHi0S2m8jvycPyCq/fbZjtE/nDkhJRYbSjXbtRHYLDBlmP95kpjEmJbw=="],
|
||||
|
||||
"@react-email/preview-server/next/@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.4.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-WodRbZ9g6CQLRZsG3gtrA9w7Qfa9BwDzhFVdlI6sV0OCPq9JrOrJSp9/ioLsezbV8w9RCJ8v55uzJuJ5RgWLZg=="],
|
||||
|
||||
"@react-email/preview-server/next/@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.4.1", "", { "os": "win32", "cpu": "x64" }, "sha512-y+wTBxelk2xiNofmDOVU7O5WxTHcvOoL3srOM0kxTzKDjQ57kPU0tpnPJ/BWrRnsOwXEv0+3QSbGR7hY4n9LkQ=="],
|
||||
|
||||
"@react-email/preview-server/next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="],
|
||||
|
||||
"@react-email/preview-server/next/sharp": ["sharp@0.34.3", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.4", "semver": "^7.7.2" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.34.3", "@img/sharp-darwin-x64": "0.34.3", "@img/sharp-libvips-darwin-arm64": "1.2.0", "@img/sharp-libvips-darwin-x64": "1.2.0", "@img/sharp-libvips-linux-arm": "1.2.0", "@img/sharp-libvips-linux-arm64": "1.2.0", "@img/sharp-libvips-linux-ppc64": "1.2.0", "@img/sharp-libvips-linux-s390x": "1.2.0", "@img/sharp-libvips-linux-x64": "1.2.0", "@img/sharp-libvips-linuxmusl-arm64": "1.2.0", "@img/sharp-libvips-linuxmusl-x64": "1.2.0", "@img/sharp-linux-arm": "0.34.3", "@img/sharp-linux-arm64": "0.34.3", "@img/sharp-linux-ppc64": "0.34.3", "@img/sharp-linux-s390x": "0.34.3", "@img/sharp-linux-x64": "0.34.3", "@img/sharp-linuxmusl-arm64": "0.34.3", "@img/sharp-linuxmusl-x64": "0.34.3", "@img/sharp-wasm32": "0.34.3", "@img/sharp-win32-arm64": "0.34.3", "@img/sharp-win32-ia32": "0.34.3", "@img/sharp-win32-x64": "0.34.3" } }, "sha512-eX2IQ6nFohW4DbvHIOLRB3MHFpYqaqvXd3Tp5e/T/dSH83fxaNJQRvDMhASmkNTsNTVF2/OOopzRCt7xokgPfg=="],
|
||||
|
||||
"@react-email/preview-server/sharp/@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.1.0" }, "os": "darwin", "cpu": "arm64" }, "sha512-pn44xgBtgpEbZsu+lWf2KNb6OAf70X68k+yk69Ic2Xz11zHR/w24/U49XT7AeRwJ0Px+mhALhU5LPci1Aymk7A=="],
|
||||
|
||||
"@react-email/preview-server/sharp/@img/sharp-darwin-x64": ["@img/sharp-darwin-x64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-x64": "1.1.0" }, "os": "darwin", "cpu": "x64" }, "sha512-VfuYgG2r8BpYiOUN+BfYeFo69nP/MIwAtSJ7/Zpxc5QF3KS22z8Pvg3FkrSFJBPNQ7mmcUcYQFBmEQp7eu1F8Q=="],
|
||||
@@ -4202,12 +4252,16 @@
|
||||
|
||||
"@types/cors/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"@types/iconv-lite/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"@types/jsdom/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"@types/mysql/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"@types/node-fetch/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"@types/papaparse/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"@types/pg/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"@types/tedious/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
@@ -4216,8 +4270,6 @@
|
||||
|
||||
"@types/webpack/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"@types/word-extractor/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
|
||||
"@vitejs/plugin-react/@babel/core/@babel/parser": ["@babel/parser@7.28.3", "", { "dependencies": { "@babel/types": "^7.28.2" }, "bin": "./bin/babel-parser.js" }, "sha512-7+Ey1mAgYqFAx2h0RuoxcQT5+MlG3GTV0TQrgr7/ZliKsm/MNDxVVutlWaziMq7wJNAz8MTqz55XLpWvva6StA=="],
|
||||
|
||||
"@vitejs/plugin-react/@babel/core/@babel/traverse": ["@babel/traverse@7.28.3", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.3", "@babel/helper-globals": "^7.28.0", "@babel/parser": "^7.28.3", "@babel/template": "^7.27.2", "@babel/types": "^7.28.2", "debug": "^4.3.1" } }, "sha512-7w4kZYHneL3A6NP2nxzHvT3HCZ7puDZZjFMqDpBPECub79sTtSO5CGXDkKrTQq8ksAwfD/XI2MRFX23njdDaIQ=="],
|
||||
@@ -4268,30 +4320,6 @@
|
||||
|
||||
"log-update/wrap-ansi/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="],
|
||||
|
||||
"next-runtime-env/next/@next/env": ["@next/env@14.2.32", "", {}, "sha512-n9mQdigI6iZ/DF6pCTwMKeWgF2e8lg7qgt5M7HXMLtyhZYMnf/u905M18sSpPmHL9MKp9JHo56C6jrD2EvWxng=="],
|
||||
|
||||
"next-runtime-env/next/@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@14.2.32", "", { "os": "darwin", "cpu": "arm64" }, "sha512-osHXveM70zC+ilfuFa/2W6a1XQxJTvEhzEycnjUaVE8kpUS09lDpiDDX2YLdyFCzoUbvbo5r0X1Kp4MllIOShw=="],
|
||||
|
||||
"next-runtime-env/next/@next/swc-darwin-x64": ["@next/swc-darwin-x64@14.2.32", "", { "os": "darwin", "cpu": "x64" }, "sha512-P9NpCAJuOiaHHpqtrCNncjqtSBi1f6QUdHK/+dNabBIXB2RUFWL19TY1Hkhu74OvyNQEYEzzMJCMQk5agjw1Qg=="],
|
||||
|
||||
"next-runtime-env/next/@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@14.2.32", "", { "os": "linux", "cpu": "arm64" }, "sha512-v7JaO0oXXt6d+cFjrrKqYnR2ubrD+JYP7nQVRZgeo5uNE5hkCpWnHmXm9vy3g6foMO8SPwL0P3MPw1c+BjbAzA=="],
|
||||
|
||||
"next-runtime-env/next/@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@14.2.32", "", { "os": "linux", "cpu": "arm64" }, "sha512-tA6sIKShXtSJBTH88i0DRd6I9n3ZTirmwpwAqH5zdJoQF7/wlJXR8DkPmKwYl5mFWhEKr5IIa3LfpMW9RRwKmQ=="],
|
||||
|
||||
"next-runtime-env/next/@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@14.2.32", "", { "os": "linux", "cpu": "x64" }, "sha512-7S1GY4TdnlGVIdeXXKQdDkfDysoIVFMD0lJuVVMeb3eoVjrknQ0JNN7wFlhCvea0hEk0Sd4D1hedVChDKfV2jw=="],
|
||||
|
||||
"next-runtime-env/next/@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@14.2.32", "", { "os": "linux", "cpu": "x64" }, "sha512-OHHC81P4tirVa6Awk6eCQ6RBfWl8HpFsZtfEkMpJ5GjPsJ3nhPe6wKAJUZ/piC8sszUkAgv3fLflgzPStIwfWg=="],
|
||||
|
||||
"next-runtime-env/next/@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@14.2.32", "", { "os": "win32", "cpu": "arm64" }, "sha512-rORQjXsAFeX6TLYJrCG5yoIDj+NKq31Rqwn8Wpn/bkPNy5rTHvOXkW8mLFonItS7QC6M+1JIIcLe+vOCTOYpvg=="],
|
||||
|
||||
"next-runtime-env/next/@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@14.2.32", "", { "os": "win32", "cpu": "x64" }, "sha512-2N0lSoU4GjfLSO50wvKpMQgKd4HdI2UHEhQPPPnlgfBJlOgJxkjpkYBqzk08f1gItBB6xF/n+ykso2hgxuydsA=="],
|
||||
|
||||
"next-runtime-env/next/@swc/helpers": ["@swc/helpers@0.5.5", "", { "dependencies": { "@swc/counter": "^0.1.3", "tslib": "^2.4.0" } }, "sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A=="],
|
||||
|
||||
"next-runtime-env/next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="],
|
||||
|
||||
"next-runtime-env/next/styled-jsx": ["styled-jsx@5.1.1", "", { "dependencies": { "client-only": "0.0.1" }, "peerDependencies": { "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0" } }, "sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw=="],
|
||||
|
||||
"nypm/pkg-types/confbox": ["confbox@0.2.2", "", {}, "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ=="],
|
||||
|
||||
"openai/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
@@ -4314,6 +4342,10 @@
|
||||
|
||||
"react-email/ora/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="],
|
||||
|
||||
"readable-web-to-node-stream/readable-stream/buffer": ["buffer@6.0.3", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.2.1" } }, "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA=="],
|
||||
|
||||
"readable-web-to-node-stream/readable-stream/string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="],
|
||||
|
||||
"resend/@react-email/render/prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="],
|
||||
|
||||
"restore-cursor/onetime/mimic-fn": ["mimic-fn@2.1.0", "", {}, "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg=="],
|
||||
@@ -4460,6 +4492,8 @@
|
||||
|
||||
"react-email/ora/strip-ansi/ansi-regex": ["ansi-regex@6.2.0", "", {}, "sha512-TKY5pyBkHyADOPYlRT9Lx6F544mPl0vS5Ew7BJ45hA08Q+t3GjbueLliBWN3sMICk6+y7HdyxSzC4bWS8baBdg=="],
|
||||
|
||||
"readable-web-to-node-stream/readable-stream/string_decoder/safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"sim/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
|
||||
|
||||
"sim/tailwindcss/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="],
|
||||
|
||||
@@ -27,7 +27,9 @@
|
||||
},
|
||||
"overrides": {
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0"
|
||||
"react-dom": "19.1.0",
|
||||
"next": "^15.4.1",
|
||||
"@next/env": "^15.4.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@linear/sdk": "40.0.0",
|
||||
@@ -40,9 +42,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.0.0-beta.5",
|
||||
"@next/env": "^15.3.2",
|
||||
"@types/word-extractor": "1.0.6",
|
||||
"dotenv-cli": "^8.0.0",
|
||||
"@next/env": "^15.4.1",
|
||||
"husky": "9.1.7",
|
||||
"lint-staged": "16.0.0",
|
||||
"turbo": "2.5.6"
|
||||
|
||||
Reference in New Issue
Block a user