Compare commits

..

9 Commits

Author SHA1 Message Date
Waleed
ee17cf461a v0.3.43: added additional parsers, mysql block improvements, billing fixes, permission fixes 2025-08-31 01:01:24 -07:00
Waleed
43cb124d97 fix(parsers): fix md, pptx, html kb uploads (#1209)
* fix md, pptx, html

* consolidate consts
2025-08-31 00:52:42 -07:00
Waleed
76889fde26 fix(permissions): remove permissions granted by org membership (#1206)
* fix(permissions): remove cross-functional permissions granted by org membership

* code hygiene
2025-08-30 18:14:01 -07:00
Vikhyath Mondreti
7780d9b32b fix(enterprise-billing): simplification to be fixed-cost (#1196)
* fix(enterprise-billing): simplify

* conceptual improvement

* add seats to enterprise sub meta

* correct type

* fix UI

* send emails to new enterprise users

* fix fallback

* fix merge conflict issue

---------

Co-authored-by: waleedlatif1 <walif6@gmail.com>
2025-08-30 17:26:17 -07:00
Waleed
4a703a02cb improvement(tools): update mysql to respect ssl pref (#1205) 2025-08-30 13:48:39 -07:00
Waleed
a969d09782 feat(parsers): added pptx, md, & html parsers (#1202)
* feat(parsers): added pptx, md, & html parsers

* ack PR comments

* file renaming, reorganization
2025-08-30 02:11:01 -07:00
Waleed
0bc778130f v0.3.42: kb config defaults, downgrade nextjs 2025-08-29 21:51:00 -07:00
Waleed
df3d532495 fix(deps): downgrade nextjs (#1200) 2025-08-29 21:44:51 -07:00
Waleed
f4f8fc051e improvement(kb): add fallbacks for kb configs (#1199) 2025-08-29 21:09:09 -07:00
97 changed files with 2115 additions and 1483 deletions

View File

@@ -2,7 +2,7 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { getSimplifiedBillingSummary } from '@/lib/billing/core/billing'
import { getOrganizationBillingData } from '@/lib/billing/core/organization-billing'
import { getOrganizationBillingData } from '@/lib/billing/core/organization'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { member, userStats } from '@/db/schema'

View File

@@ -12,9 +12,9 @@ import {
import { getCopilotModel } from '@/lib/copilot/config'
import type { CopilotProviderConfig } from '@/lib/copilot/types'
import { env } from '@/lib/env'
import { generateChatTitle } from '@/lib/generate-chat-title'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { generateChatTitle } from '@/lib/sim-agent/utils'
import { createFileContent, isSupportedFileType } from '@/lib/uploads/file-utils'
import { S3_COPILOT_CONFIG } from '@/lib/uploads/setup'
import { downloadFile, getStorageProvider } from '@/lib/uploads/storage-client'

View File

@@ -76,11 +76,9 @@ export async function POST(request: NextRequest) {
logger.info('File parse request received:', { filePath, fileType })
// Handle multiple files
if (Array.isArray(filePath)) {
const results = []
for (const path of filePath) {
// Skip empty or invalid paths
if (!path || (typeof path === 'string' && path.trim() === '')) {
results.push({
success: false,
@@ -91,12 +89,10 @@ export async function POST(request: NextRequest) {
}
const result = await parseFileSingle(path, fileType)
// Add processing time to metadata
if (result.metadata) {
result.metadata.processingTime = Date.now() - startTime
}
// Transform each result to match expected frontend format
if (result.success) {
results.push({
success: true,
@@ -105,7 +101,7 @@ export async function POST(request: NextRequest) {
name: result.filePath.split('/').pop() || 'unknown',
fileType: result.metadata?.fileType || 'application/octet-stream',
size: result.metadata?.size || 0,
binary: false, // We only return text content
binary: false,
},
filePath: result.filePath,
})
@@ -120,15 +116,12 @@ export async function POST(request: NextRequest) {
})
}
// Handle single file
const result = await parseFileSingle(filePath, fileType)
// Add processing time to metadata
if (result.metadata) {
result.metadata.processingTime = Date.now() - startTime
}
// Transform single file result to match expected frontend format
if (result.success) {
return NextResponse.json({
success: true,
@@ -142,8 +135,6 @@ export async function POST(request: NextRequest) {
})
}
// Only return 500 for actual server errors, not file processing failures
// File processing failures (like file not found, parsing errors) should return 200 with success:false
return NextResponse.json(result)
} catch (error) {
logger.error('Error in file parse API:', error)
@@ -164,7 +155,6 @@ export async function POST(request: NextRequest) {
async function parseFileSingle(filePath: string, fileType?: string): Promise<ParseResult> {
logger.info('Parsing file:', filePath)
// Validate that filePath is not empty
if (!filePath || filePath.trim() === '') {
return {
success: false,
@@ -173,7 +163,6 @@ async function parseFileSingle(filePath: string, fileType?: string): Promise<Par
}
}
// Validate path for security before any processing
const pathValidation = validateFilePath(filePath)
if (!pathValidation.isValid) {
return {
@@ -183,49 +172,40 @@ async function parseFileSingle(filePath: string, fileType?: string): Promise<Par
}
}
// Check if this is an external URL
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
return handleExternalUrl(filePath, fileType)
}
// Check if this is a cloud storage path (S3 or Blob)
const isS3Path = filePath.includes('/api/files/serve/s3/')
const isBlobPath = filePath.includes('/api/files/serve/blob/')
// Use cloud handler if it's a cloud path or we're in cloud mode
if (isS3Path || isBlobPath || isUsingCloudStorage()) {
return handleCloudFile(filePath, fileType)
}
// Use local handler for local files
return handleLocalFile(filePath, fileType)
}
/**
* Validate file path for security
* Validate file path for security - prevents null byte injection and path traversal attacks
*/
function validateFilePath(filePath: string): { isValid: boolean; error?: string } {
// Check for null bytes
if (filePath.includes('\0')) {
return { isValid: false, error: 'Invalid path: null byte detected' }
}
// Check for path traversal attempts
if (filePath.includes('..')) {
return { isValid: false, error: 'Access denied: path traversal detected' }
}
// Check for tilde characters (home directory access)
if (filePath.includes('~')) {
return { isValid: false, error: 'Invalid path: tilde character not allowed' }
}
// Check for absolute paths outside allowed directories
if (filePath.startsWith('/') && !filePath.startsWith('/api/files/serve/')) {
return { isValid: false, error: 'Path outside allowed directory' }
}
// Check for Windows absolute paths
if (/^[A-Za-z]:\\/.test(filePath)) {
return { isValid: false, error: 'Path outside allowed directory' }
}
@@ -260,12 +240,10 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`)
// Extract filename from URL
const urlPath = new URL(url).pathname
const filename = urlPath.split('/').pop() || 'download'
const extension = path.extname(filename).toLowerCase().substring(1)
// Process the file based on its content type
if (extension === 'pdf') {
return await handlePdfBuffer(buffer, filename, fileType, url)
}
@@ -276,7 +254,6 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
return await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
}
// For binary or unknown files
return handleGenericBuffer(buffer, filename, extension, fileType)
} catch (error) {
logger.error(`Error handling external URL ${url}:`, error)
@@ -289,35 +266,29 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
}
/**
* Handle file stored in cloud storage (S3 or Azure Blob)
* Handle file stored in cloud storage
*/
async function handleCloudFile(filePath: string, fileType?: string): Promise<ParseResult> {
try {
// Extract the cloud key from the path
let cloudKey: string
if (filePath.includes('/api/files/serve/s3/')) {
cloudKey = decodeURIComponent(filePath.split('/api/files/serve/s3/')[1])
} else if (filePath.includes('/api/files/serve/blob/')) {
cloudKey = decodeURIComponent(filePath.split('/api/files/serve/blob/')[1])
} else if (filePath.startsWith('/api/files/serve/')) {
// Backwards-compatibility: path like "/api/files/serve/<key>"
cloudKey = decodeURIComponent(filePath.substring('/api/files/serve/'.length))
} else {
// Assume raw key provided
cloudKey = filePath
}
logger.info('Extracted cloud key:', cloudKey)
// Download the file from cloud storage - this can throw for access errors
const fileBuffer = await downloadFile(cloudKey)
logger.info(`Downloaded file from cloud storage: ${cloudKey}, size: ${fileBuffer.length} bytes`)
// Extract the filename from the cloud key
const filename = cloudKey.split('/').pop() || cloudKey
const extension = path.extname(filename).toLowerCase().substring(1)
// Process the file based on its content type
if (extension === 'pdf') {
return await handlePdfBuffer(fileBuffer, filename, fileType, filePath)
}
@@ -325,22 +296,19 @@ async function handleCloudFile(filePath: string, fileType?: string): Promise<Par
return await handleCsvBuffer(fileBuffer, filename, fileType, filePath)
}
if (isSupportedFileType(extension)) {
// For other supported types that we have parsers for
return await handleGenericTextBuffer(fileBuffer, filename, extension, fileType, filePath)
}
// For binary or unknown files
return handleGenericBuffer(fileBuffer, filename, extension, fileType)
} catch (error) {
logger.error(`Error handling cloud file ${filePath}:`, error)
// Check if this is a download/access error that should trigger a 500 response
// For download/access errors, throw to trigger 500 response
const errorMessage = (error as Error).message
if (errorMessage.includes('Access denied') || errorMessage.includes('Forbidden')) {
// For access errors, throw to trigger 500 response
throw new Error(`Error accessing file from cloud storage: ${errorMessage}`)
}
// For other errors (parsing, processing), return success:false
// For other errors (parsing, processing), return success:false and an error message
return {
success: false,
error: `Error accessing file from cloud storage: ${errorMessage}`,
@@ -354,28 +322,23 @@ async function handleCloudFile(filePath: string, fileType?: string): Promise<Par
*/
async function handleLocalFile(filePath: string, fileType?: string): Promise<ParseResult> {
try {
// Extract filename from path
const filename = filePath.split('/').pop() || filePath
const fullPath = path.join(UPLOAD_DIR_SERVER, filename)
logger.info('Processing local file:', fullPath)
// Check if file exists
try {
await fsPromises.access(fullPath)
} catch {
throw new Error(`File not found: ${filename}`)
}
// Parse the file directly
const result = await parseFile(fullPath)
// Get file stats for metadata
const stats = await fsPromises.stat(fullPath)
const fileBuffer = await readFile(fullPath)
const hash = createHash('md5').update(fileBuffer).digest('hex')
// Extract file extension for type detection
const extension = path.extname(filename).toLowerCase().substring(1)
return {
@@ -386,7 +349,7 @@ async function handleLocalFile(filePath: string, fileType?: string): Promise<Par
fileType: fileType || getMimeType(extension),
size: stats.size,
hash,
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
} catch (error) {
@@ -425,15 +388,14 @@ async function handlePdfBuffer(
fileType: fileType || 'application/pdf',
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
} catch (error) {
logger.error('Failed to parse PDF in memory:', error)
// Create fallback message for PDF parsing failure
const content = createPdfFailureMessage(
0, // We can't determine page count without parsing
0,
fileBuffer.length,
originalPath || filename,
(error as Error).message
@@ -447,7 +409,7 @@ async function handlePdfBuffer(
fileType: fileType || 'application/pdf',
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
}
@@ -465,7 +427,6 @@ async function handleCsvBuffer(
try {
logger.info(`Parsing CSV in memory: ${filename}`)
// Use the parseBuffer function from our library
const { parseBuffer } = await import('@/lib/file-parsers')
const result = await parseBuffer(fileBuffer, 'csv')
@@ -477,7 +438,7 @@ async function handleCsvBuffer(
fileType: fileType || 'text/csv',
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
} catch (error) {
@@ -490,7 +451,7 @@ async function handleCsvBuffer(
fileType: 'text/csv',
size: 0,
hash: '',
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
}
@@ -509,7 +470,6 @@ async function handleGenericTextBuffer(
try {
logger.info(`Parsing text file in memory: ${filename}`)
// Try to use a specialized parser if available
try {
const { parseBuffer, isSupportedFileType } = await import('@/lib/file-parsers')
@@ -524,7 +484,7 @@ async function handleGenericTextBuffer(
fileType: fileType || getMimeType(extension),
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
}
@@ -532,7 +492,6 @@ async function handleGenericTextBuffer(
logger.warn('Specialized parser failed, falling back to generic parsing:', parserError)
}
// Fallback to generic text parsing
const content = fileBuffer.toString('utf-8')
return {
@@ -543,7 +502,7 @@ async function handleGenericTextBuffer(
fileType: fileType || getMimeType(extension),
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
} catch (error) {
@@ -556,7 +515,7 @@ async function handleGenericTextBuffer(
fileType: 'text/plain',
size: 0,
hash: '',
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
}
@@ -584,7 +543,7 @@ function handleGenericBuffer(
fileType: fileType || getMimeType(extension),
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
}
@@ -594,8 +553,6 @@ function handleGenericBuffer(
*/
async function parseBufferAsPdf(buffer: Buffer) {
try {
// Import parsers dynamically to avoid initialization issues in tests
// First try to use the main PDF parser
try {
const { PdfParser } = await import('@/lib/file-parsers/pdf-parser')
const parser = new PdfParser()
@@ -606,7 +563,6 @@ async function parseBufferAsPdf(buffer: Buffer) {
}
throw new Error('PDF parser does not support buffer parsing')
} catch (error) {
// Fallback to raw PDF parser
logger.warn('Main PDF parser failed, using raw parser for buffer:', error)
const { RawPdfParser } = await import('@/lib/file-parsers/raw-pdf-parser')
const rawParser = new RawPdfParser()
@@ -655,7 +611,7 @@ Please use a PDF viewer for best results.`
}
/**
* Create error message for PDF parsing failure
* Create error message for PDF parsing failure and make it more readable
*/
function createPdfFailureMessage(
pageCount: number,

View File

@@ -2,7 +2,7 @@ import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { SUPPORTED_FIELD_TYPES } from '@/lib/constants/knowledge'
import { SUPPORTED_FIELD_TYPES } from '@/lib/knowledge/consts'
import {
cleanupUnusedTagDefinitions,
createOrUpdateTagDefinitionsBulk,

View File

@@ -2,7 +2,7 @@ import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { SUPPORTED_FIELD_TYPES } from '@/lib/constants/knowledge'
import { SUPPORTED_FIELD_TYPES } from '@/lib/knowledge/consts'
import { createTagDefinition, getTagDefinitions } from '@/lib/knowledge/tags/service'
import { createLogger } from '@/lib/logs/console/logger'
import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils'

View File

@@ -1,6 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { TAG_SLOTS } from '@/lib/constants/knowledge'
import { TAG_SLOTS } from '@/lib/knowledge/consts'
import { getDocumentTagDefinitions } from '@/lib/knowledge/tags/service'
import { createLogger } from '@/lib/logs/console/logger'
import { estimateTokenCount } from '@/lib/tokenization/estimators'

View File

@@ -12,7 +12,7 @@ const DeleteSchema = z.object({
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
where: z.string().min(1, 'WHERE clause is required'),
})

View File

@@ -12,7 +12,7 @@ const ExecuteSchema = z.object({
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
query: z.string().min(1, 'Query is required'),
})

View File

@@ -12,7 +12,7 @@ const InsertSchema = z.object({
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
data: z.union([
z

View File

@@ -12,7 +12,7 @@ const QuerySchema = z.object({
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
query: z.string().min(1, 'Query is required'),
})

View File

@@ -12,7 +12,7 @@ const UpdateSchema = z.object({
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
data: z.union([
z

View File

@@ -6,7 +6,7 @@ export interface MySQLConnectionConfig {
database: string
username: string
password: string
ssl?: string
ssl?: 'disabled' | 'required' | 'preferred'
}
export async function createMySQLConnection(config: MySQLConnectionConfig) {
@@ -18,7 +18,9 @@ export async function createMySQLConnection(config: MySQLConnectionConfig) {
password: config.password,
}
if (config.ssl === 'required') {
if (config.ssl === 'disabled') {
// Don't set ssl property at all to disable SSL
} else if (config.ssl === 'required') {
connectionConfig.ssl = { rejectUnauthorized: true }
} else if (config.ssl === 'preferred') {
connectionConfig.ssl = { rejectUnauthorized: false }

View File

@@ -12,7 +12,7 @@ const DeleteSchema = z.object({
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
where: z.string().min(1, 'WHERE clause is required'),
})

View File

@@ -16,7 +16,7 @@ const ExecuteSchema = z.object({
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
query: z.string().min(1, 'Query is required'),
})

View File

@@ -12,7 +12,7 @@ const InsertSchema = z.object({
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
data: z.union([
z

View File

@@ -12,7 +12,7 @@ const QuerySchema = z.object({
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
query: z.string().min(1, 'Query is required'),
})

View File

@@ -12,7 +12,7 @@ const UpdateSchema = z.object({
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
data: z.union([
z

View File

@@ -1,9 +1,11 @@
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { getUserUsageLimitInfo, updateUserUsageLimit } from '@/lib/billing'
import { getOrganizationBillingData } from '@/lib/billing/core/organization-billing'
import {
getOrganizationBillingData,
isOrganizationOwnerOrAdmin,
} from '@/lib/billing/core/organization'
import { createLogger } from '@/lib/logs/console/logger'
import { isOrganizationOwnerOrAdmin } from '@/lib/permissions/utils'
const logger = createLogger('UnifiedUsageLimitsAPI')
@@ -25,7 +27,6 @@ export async function GET(request: NextRequest) {
const userId = searchParams.get('userId') || session.user.id
const organizationId = searchParams.get('organizationId')
// Validate context
if (!['user', 'organization'].includes(context)) {
return NextResponse.json(
{ error: 'Invalid context. Must be "user" or "organization"' },
@@ -33,7 +34,6 @@ export async function GET(request: NextRequest) {
)
}
// For user context, ensure they can only view their own info
if (context === 'user' && userId !== session.user.id) {
return NextResponse.json(
{ error: "Cannot view other users' usage information" },
@@ -41,7 +41,6 @@ export async function GET(request: NextRequest) {
)
}
// Get usage limit info
if (context === 'organization') {
if (!organizationId) {
return NextResponse.json(
@@ -107,10 +106,8 @@ export async function PUT(request: NextRequest) {
}
if (context === 'user') {
// Update user's own usage limit
await updateUserUsageLimit(userId, limit)
} else if (context === 'organization') {
// context === 'organization'
if (!organizationId) {
return NextResponse.json(
{ error: 'Organization ID is required when context=organization' },
@@ -123,10 +120,7 @@ export async function PUT(request: NextRequest) {
return NextResponse.json({ error: 'Permission denied' }, { status: 403 })
}
// Use the dedicated function to update org usage limit
const { updateOrganizationUsageLimit } = await import(
'@/lib/billing/core/organization-billing'
)
const { updateOrganizationUsageLimit } = await import('@/lib/billing/core/organization')
const result = await updateOrganizationUsageLimit(organizationId, limit)
if (!result.success) {
@@ -137,7 +131,6 @@ export async function PUT(request: NextRequest) {
return NextResponse.json({ success: true, context, userId, organizationId, data: updated })
}
// Return updated limit info
const updatedInfo = await getUserUsageLimitInfo(userId)
return NextResponse.json({

View File

@@ -2,16 +2,19 @@ import crypto from 'crypto'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { getUsersWithPermissions, hasWorkspaceAdminAccess } from '@/lib/permissions/utils'
import { db } from '@/db'
import { permissions, type permissionTypeEnum } from '@/db/schema'
const logger = createLogger('WorkspacesPermissionsAPI')
type PermissionType = (typeof permissionTypeEnum.enumValues)[number]
interface UpdatePermissionsRequest {
updates: Array<{
userId: string
permissions: PermissionType // Single permission type instead of object with booleans
permissions: PermissionType
}>
}
@@ -33,7 +36,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
// Verify the current user has access to this workspace
const userPermission = await db
.select()
.from(permissions)
@@ -57,7 +59,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
total: result.length,
})
} catch (error) {
console.error('Error fetching workspace permissions:', error)
logger.error('Error fetching workspace permissions:', error)
return NextResponse.json({ error: 'Failed to fetch workspace permissions' }, { status: 500 })
}
}
@@ -81,7 +83,6 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
// Verify the current user has admin access to this workspace (either direct or through organization)
const hasAdminAccess = await hasWorkspaceAdminAccess(session.user.id, workspaceId)
if (!hasAdminAccess) {
@@ -91,10 +92,8 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
)
}
// Parse and validate request body
const body: UpdatePermissionsRequest = await request.json()
// Prevent users from modifying their own admin permissions
const selfUpdate = body.updates.find((update) => update.userId === session.user.id)
if (selfUpdate && selfUpdate.permissions !== 'admin') {
return NextResponse.json(
@@ -103,10 +102,8 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
)
}
// Process updates in a transaction
await db.transaction(async (tx) => {
for (const update of body.updates) {
// Delete existing permissions for this user and workspace
await tx
.delete(permissions)
.where(
@@ -117,7 +114,6 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
)
)
// Insert the single new permission
await tx.insert(permissions).values({
id: crypto.randomUUID(),
userId: update.userId,
@@ -138,7 +134,7 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
total: updatedUsers.length,
})
} catch (error) {
console.error('Error updating workspace permissions:', error)
logger.error('Error updating workspace permissions:', error)
return NextResponse.json({ error: 'Failed to update workspace permissions' }, { status: 500 })
}
}

View File

@@ -11,7 +11,7 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getAssetUrl } from '@/lib/utils'
import '@/app/globals.css'
import { SessionProvider } from '@/lib/session-context'
import { SessionProvider } from '@/lib/session/session-context'
import { ThemeProvider } from '@/app/theme-provider'
import { ZoomPrevention } from '@/app/zoom-prevention'

View File

@@ -7,22 +7,12 @@ import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/u
import { Label } from '@/components/ui/label'
import { Progress } from '@/components/ui/progress'
import { createLogger } from '@/lib/logs/console/logger'
import { ACCEPT_ATTRIBUTE, ACCEPTED_FILE_TYPES, MAX_FILE_SIZE } from '@/lib/uploads/validation'
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
const logger = createLogger('UploadModal')
const MAX_FILE_SIZE = 100 * 1024 * 1024 // 100MB
const ACCEPTED_FILE_TYPES = [
'application/pdf',
'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'text/plain',
'text/csv',
'application/vnd.ms-excel',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
]
interface FileWithPreview extends File {
preview: string
}
@@ -74,7 +64,7 @@ export function UploadModal({
return `File "${file.name}" is too large. Maximum size is 100MB.`
}
if (!ACCEPTED_FILE_TYPES.includes(file.type)) {
return `File "${file.name}" has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, or XLSX files.`
return `File "${file.name}" has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, or HTML files.`
}
return null
}
@@ -168,7 +158,7 @@ export function UploadModal({
return (
<Dialog open={open} onOpenChange={handleClose}>
<DialogContent className='flex max-h-[95vh] max-w-2xl flex-col overflow-hidden'>
<DialogContent className='flex max-h-[95vh] flex-col overflow-hidden sm:max-w-[600px]'>
<DialogHeader>
<DialogTitle>Upload Documents</DialogTitle>
</DialogHeader>
@@ -193,7 +183,7 @@ export function UploadModal({
<input
ref={fileInputRef}
type='file'
accept={ACCEPTED_FILE_TYPES.join(',')}
accept={ACCEPT_ATTRIBUTE}
onChange={handleFileChange}
className='hidden'
multiple
@@ -203,7 +193,8 @@ export function UploadModal({
{isDragging ? 'Drop files here!' : 'Drop files here or click to browse'}
</p>
<p className='text-muted-foreground text-xs'>
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX (max 100MB each)
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML (max 100MB
each)
</p>
</div>
</div>
@@ -223,7 +214,7 @@ export function UploadModal({
<input
ref={fileInputRef}
type='file'
accept={ACCEPTED_FILE_TYPES.join(',')}
accept={ACCEPT_ATTRIBUTE}
onChange={handleFileChange}
className='hidden'
multiple
@@ -233,7 +224,7 @@ export function UploadModal({
</p>
</div>
<div className='max-h-60 space-y-2 overflow-auto'>
<div className='max-h-80 space-y-2 overflow-auto'>
{files.map((file, index) => {
const fileStatus = uploadProgress.fileStatuses?.[index]
const isCurrentlyUploading = fileStatus?.status === 'uploading'

View File

@@ -14,23 +14,13 @@ import { Label } from '@/components/ui/label'
import { Progress } from '@/components/ui/progress'
import { Textarea } from '@/components/ui/textarea'
import { createLogger } from '@/lib/logs/console/logger'
import { ACCEPT_ATTRIBUTE, ACCEPTED_FILE_TYPES, MAX_FILE_SIZE } from '@/lib/uploads/validation'
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
const logger = createLogger('CreateModal')
const MAX_FILE_SIZE = 100 * 1024 * 1024 // 100MB
const ACCEPTED_FILE_TYPES = [
'application/pdf',
'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'text/plain',
'text/csv',
'application/vnd.ms-excel',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
]
interface FileWithPreview extends File {
preview: string
}
@@ -168,7 +158,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
// Check file type
if (!ACCEPTED_FILE_TYPES.includes(file.type)) {
setFileError(
`File ${file.name} has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, or XLSX.`
`File ${file.name} has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, or HTML.`
)
hasError = true
continue
@@ -494,7 +484,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
<input
ref={fileInputRef}
type='file'
accept={ACCEPTED_FILE_TYPES.join(',')}
accept={ACCEPT_ATTRIBUTE}
onChange={handleFileChange}
className='hidden'
multiple
@@ -511,7 +501,8 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
: 'Drop files here or click to browse'}
</p>
<p className='text-muted-foreground text-xs'>
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX (max 100MB each)
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML (max
100MB each)
</p>
</div>
</div>
@@ -535,7 +526,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
<input
ref={fileInputRef}
type='file'
accept={ACCEPTED_FILE_TYPES.join(',')}
accept={ACCEPT_ATTRIBUTE}
onChange={handleFileChange}
className='hidden'
multiple
@@ -552,7 +543,8 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
: 'Drop more files or click to browse'}
</p>
<p className='text-muted-foreground text-xs'>
PDF, DOC, DOCX, TXT, CSV, XLS, XLSX (max 100MB each)
PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML (max 100MB
each)
</p>
</div>
</div>

View File

@@ -25,7 +25,7 @@ import {
TooltipProvider,
TooltipTrigger,
} from '@/components/ui'
import { MAX_TAG_SLOTS, type TagSlot } from '@/lib/constants/knowledge'
import { MAX_TAG_SLOTS, type TagSlot } from '@/lib/knowledge/consts'
import { createLogger } from '@/lib/logs/console/logger'
import { useKnowledgeBaseTagDefinitions } from '@/hooks/use-knowledge-base-tag-definitions'
import { useNextAvailableSlot } from '@/hooks/use-next-available-slot'

View File

@@ -6,7 +6,7 @@ import { Button } from '@/components/ui/button'
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
import { Input } from '@/components/ui/input'
import { Label } from '@/components/ui/label'
import { TAG_SLOTS, type TagSlot } from '@/lib/constants/knowledge'
import { TAG_SLOTS, type TagSlot } from '@/lib/knowledge/consts'
import { useKnowledgeBaseTagDefinitions } from '@/hooks/use-knowledge-base-tag-definitions'
export type TagData = {

View File

@@ -12,15 +12,17 @@ import {
extractPathFromOutputId,
parseOutputContentSafely,
} from '@/lib/response-format'
import { ChatMessage } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/chat/components/chat-message/chat-message'
import { OutputSelect } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/chat/components/output-select/output-select'
import {
ChatFileUpload,
ChatMessage,
OutputSelect,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/chat/components'
import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution'
import type { BlockLog, ExecutionResult } from '@/executor/types'
import { useExecutionStore } from '@/stores/execution/store'
import { useChatStore } from '@/stores/panel/chat/store'
import { useConsoleStore } from '@/stores/panel/console/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { ChatFileUpload } from './components/chat-file-upload'
const logger = createLogger('ChatPanel')

View File

@@ -0,0 +1,3 @@
export { ChatFileUpload } from './chat-file-upload/chat-file-upload'
export { ChatMessage } from './chat-message/chat-message'
export { OutputSelect } from './output-select/output-select'

View File

@@ -155,7 +155,7 @@ const ImagePreview = ({
className='h-auto w-full rounded-lg border'
unoptimized
onError={(e) => {
console.error('Image failed to load:', imageSrc)
logger.error('Image failed to load:', imageSrc)
setLoadError(true)
onLoadError?.(true)
}}
@@ -333,7 +333,7 @@ export function ConsoleEntry({ entry, consoleWidth }: ConsoleEntryProps) {
// Clean up the URL
setTimeout(() => URL.revokeObjectURL(url), 100)
} catch (error) {
console.error('Error downloading image:', error)
logger.error('Error downloading image:', error)
alert('Failed to download image. Please try again later.')
}
}

View File

@@ -9,6 +9,7 @@ import {
} from '@/components/ui/dropdown-menu'
import { ScrollArea } from '@/components/ui/scroll-area'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { createLogger } from '@/lib/logs/console/logger'
import { useCopilotStore } from '@/stores/copilot/store'
import { useChatStore } from '@/stores/panel/chat/store'
import { useConsoleStore } from '@/stores/panel/console/store'
@@ -19,6 +20,8 @@ import { Console } from './components/console/console'
import { Copilot } from './components/copilot/copilot'
import { Variables } from './components/variables/variables'
const logger = createLogger('Panel')
export function Panel() {
const [chatMessage, setChatMessage] = useState<string>('')
const [isHistoryDropdownOpen, setIsHistoryDropdownOpen] = useState(false)
@@ -67,7 +70,7 @@ export function Panel() {
try {
await deleteChat(chatId)
} catch (error) {
console.error('Error deleting chat:', error)
logger.error('Error deleting chat:', error)
}
},
[deleteChat]
@@ -101,7 +104,7 @@ export function Panel() {
lastLoadedWorkflowRef.current = activeWorkflowId
}
} catch (error) {
console.error('Failed to load copilot data:', error)
logger.error('Failed to load copilot data:', error)
}
},
[
@@ -134,14 +137,14 @@ export function Panel() {
if (!areChatsFresh(activeWorkflowId)) {
// Don't await - let it load in background while dropdown is already open
ensureCopilotDataLoaded(false).catch((error) => {
console.error('Failed to load chat history:', error)
logger.error('Failed to load chat history:', error)
})
}
}
// If streaming, just log that we're showing cached data
if (open && isSendingMessage) {
console.log('Chat history opened during stream - showing cached data only')
logger.info('Chat history opened during stream - showing cached data only')
}
},
[ensureCopilotDataLoaded, activeWorkflowId, areChatsFresh, isSendingMessage]
@@ -278,7 +281,7 @@ export function Panel() {
// This is a real workflow change, not just a tab switch
if (copilotWorkflowId !== activeWorkflowId || !copilotWorkflowId) {
ensureCopilotDataLoaded().catch((error) => {
console.error('Failed to auto-load copilot data on workflow change:', error)
logger.error('Failed to auto-load copilot data on workflow change:', error)
})
}
}

View File

@@ -6,7 +6,7 @@ import { Button } from '@/components/ui/button'
import { formatDisplayText } from '@/components/ui/formatted-text'
import { Input } from '@/components/ui/input'
import { checkTagTrigger, TagDropdown } from '@/components/ui/tag-dropdown'
import { MAX_TAG_SLOTS } from '@/lib/constants/knowledge'
import { MAX_TAG_SLOTS } from '@/lib/knowledge/consts'
import { cn } from '@/lib/utils'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/hooks/use-sub-block-value'
import type { SubBlockConfig } from '@/blocks/types'

View File

@@ -235,7 +235,7 @@ export function FileUpload({
})
}
} catch (error) {
console.error(`Error uploading ${file.name}:`, error)
logger.error(`Error uploading ${file.name}:`, error)
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
uploadErrors.push(`${file.name}: ${errorMessage}`)
}
@@ -428,7 +428,7 @@ export function FileUpload({
deletionResults.failures.push(`${file.name}: ${errorMessage}`)
}
} catch (error) {
console.error(`Failed to delete file ${file.name}:`, error)
logger.error(`Failed to delete file ${file.name}:`, error)
deletionResults.failures.push(
`${file.name}: ${error instanceof Error ? error.message : 'Unknown error'}`
)

View File

@@ -483,7 +483,7 @@ export function ToolInput({
try {
return block.tools.config.tool({ operation })
} catch (error) {
console.error('Error selecting tool for operation:', error)
logger.error('Error selecting tool for operation:', error)
}
}

View File

@@ -6,6 +6,7 @@ import { Badge } from '@/components/ui/badge'
import { Button } from '@/components/ui/button'
import { Card } from '@/components/ui/card'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { createLogger } from '@/lib/logs/console/logger'
import { parseCronToHumanReadable } from '@/lib/schedules/utils'
import { cn, validateName } from '@/lib/utils'
import { type DiffStatus, hasDiffStatus } from '@/lib/workflows/diff/types'
@@ -23,6 +24,8 @@ import { ActionBar } from './components/action-bar/action-bar'
import { ConnectionBlocks } from './components/connection-blocks/connection-blocks'
import { SubBlock } from './components/sub-block/sub-block'
const logger = createLogger('WorkflowBlock')
interface WorkflowBlockProps {
type: string
config: BlockConfig
@@ -232,10 +235,10 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
fetchScheduleInfo(currentWorkflowId)
}
} else {
console.error('Failed to reactivate schedule')
logger.error('Failed to reactivate schedule')
}
} catch (error) {
console.error('Error reactivating schedule:', error)
logger.error('Error reactivating schedule:', error)
}
}
@@ -255,10 +258,10 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
fetchScheduleInfo(currentWorkflowId)
}
} else {
console.error('Failed to disable schedule')
logger.error('Failed to disable schedule')
}
} catch (error) {
console.error('Error disabling schedule:', error)
logger.error('Error disabling schedule:', error)
}
}
@@ -328,12 +331,12 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
return
}
} catch (err) {
console.error('Error fetching schedule status:', err)
logger.error('Error fetching schedule status:', err)
}
setScheduleInfo(baseInfo)
} catch (error) {
console.error('Error fetching schedule info:', error)
logger.error('Error fetching schedule info:', error)
setScheduleInfo(null)
} finally {
setIsLoadingScheduleInfo(false)

View File

@@ -26,7 +26,7 @@ import {
AlertDialogTitle,
} from '@/components/ui/alert-dialog'
import { ScrollArea } from '@/components/ui/scroll-area'
import { MAX_TAG_SLOTS } from '@/lib/constants/knowledge'
import { MAX_TAG_SLOTS } from '@/lib/knowledge/consts'
import { createLogger } from '@/lib/logs/console/logger'
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components/icons/document-icons'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'

View File

@@ -17,7 +17,7 @@ import {
SelectValue,
} from '@/components/ui'
import { ScrollArea } from '@/components/ui/scroll-area'
import { MAX_TAG_SLOTS, TAG_SLOTS, type TagSlot } from '@/lib/constants/knowledge'
import { MAX_TAG_SLOTS, TAG_SLOTS, type TagSlot } from '@/lib/knowledge/consts'
import { createLogger } from '@/lib/logs/console/logger'
import type { DocumentTag } from '@/app/workspace/[workspaceId]/knowledge/components/document-tag-entry/document-tag-entry'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'

View File

@@ -15,9 +15,12 @@ import {
import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
import { Skeleton } from '@/components/ui/skeleton'
import { createLogger } from '@/lib/logs/console/logger'
import { useEnvironmentStore } from '@/stores/settings/environment/store'
import type { EnvironmentVariable as StoreEnvironmentVariable } from '@/stores/settings/environment/types'
const logger = createLogger('EnvironmentVariables')
// Constants
const GRID_COLS = 'grid grid-cols-[minmax(0,1fr),minmax(0,1fr),40px] gap-4'
const INITIAL_ENV_VAR: UIEnvironmentVariable = { key: '', value: '' }
@@ -263,7 +266,7 @@ export function EnvironmentVariables({
// Single store update that triggers sync
useEnvironmentStore.getState().setVariables(validVariables)
} catch (error) {
console.error('Failed to save environment variables:', error)
logger.error('Failed to save environment variables:', error)
}
}

View File

@@ -197,10 +197,10 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
const activeOrgId = activeOrganization?.id
useEffect(() => {
if (subscription.isTeam && activeOrgId) {
if ((subscription.isTeam || subscription.isEnterprise) && activeOrgId) {
loadOrganizationBillingData(activeOrgId)
}
}, [activeOrgId, subscription.isTeam, loadOrganizationBillingData])
}, [activeOrgId, subscription.isTeam, subscription.isEnterprise, loadOrganizationBillingData])
// Auto-clear upgrade error
useEffect(() => {
@@ -349,22 +349,39 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
badgeText={badgeText}
onBadgeClick={handleBadgeClick}
seatsText={
permissions.canManageTeam
permissions.canManageTeam || subscription.isEnterprise
? `${organizationBillingData?.totalSeats || subscription.seats || 1} seats`
: undefined
}
current={usage.current}
current={
subscription.isEnterprise || subscription.isTeam
? organizationBillingData?.totalCurrentUsage || 0
: usage.current
}
limit={
!subscription.isFree &&
(permissions.canEditUsageLimit ||
permissions.showTeamMemberView ||
subscription.isEnterprise)
? usage.current // placeholder; rightContent will render UsageLimit
: usage.limit
subscription.isEnterprise || subscription.isTeam
? organizationBillingData?.totalUsageLimit ||
organizationBillingData?.minimumBillingAmount ||
0
: !subscription.isFree &&
(permissions.canEditUsageLimit || permissions.showTeamMemberView)
? usage.current // placeholder; rightContent will render UsageLimit
: usage.limit
}
isBlocked={Boolean(subscriptionData?.billingBlocked)}
status={billingStatus === 'unknown' ? 'ok' : billingStatus}
percentUsed={Math.round(usage.percentUsed)}
percentUsed={
subscription.isEnterprise || subscription.isTeam
? organizationBillingData?.totalUsageLimit &&
organizationBillingData.totalUsageLimit > 0
? Math.round(
(organizationBillingData.totalCurrentUsage /
organizationBillingData.totalUsageLimit) *
100
)
: 0
: Math.round(usage.percentUsed)
}
onResolvePayment={async () => {
try {
const res = await fetch('/api/billing/portal', {
@@ -387,9 +404,7 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
}}
rightContent={
!subscription.isFree &&
(permissions.canEditUsageLimit ||
permissions.showTeamMemberView ||
subscription.isEnterprise) ? (
(permissions.canEditUsageLimit || permissions.showTeamMemberView) ? (
<UsageLimit
ref={usageLimitRef}
currentLimit={
@@ -398,7 +413,7 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
: usageLimitData?.currentLimit || usage.limit
}
currentUsage={usage.current}
canEdit={permissions.canEditUsageLimit && !subscription.isEnterprise}
canEdit={permissions.canEditUsageLimit}
minimumLimit={
subscription.isTeam && isTeamAdmin
? organizationBillingData?.minimumBillingAmount ||

View File

@@ -1039,6 +1039,7 @@ export function Sidebar() {
<HelpModal open={showHelp} onOpenChange={setShowHelp} />
<InviteModal open={showInviteMembers} onOpenChange={setShowInviteMembers} />
<SubscriptionModal open={showSubscriptionModal} onOpenChange={setShowSubscriptionModal} />
<SearchModal
open={showSearchModal}
onOpenChange={setShowSearchModal}

View File

@@ -26,15 +26,15 @@ export type DocumentProcessingPayload = {
export const processDocument = task({
id: 'knowledge-process-document',
maxDuration: env.KB_CONFIG_MAX_DURATION,
maxDuration: env.KB_CONFIG_MAX_DURATION || 300,
retry: {
maxAttempts: env.KB_CONFIG_MAX_ATTEMPTS,
factor: env.KB_CONFIG_RETRY_FACTOR,
minTimeoutInMs: env.KB_CONFIG_MIN_TIMEOUT,
maxTimeoutInMs: env.KB_CONFIG_MAX_TIMEOUT,
maxAttempts: env.KB_CONFIG_MAX_ATTEMPTS || 3,
factor: env.KB_CONFIG_RETRY_FACTOR || 2,
minTimeoutInMs: env.KB_CONFIG_MIN_TIMEOUT || 1000,
maxTimeoutInMs: env.KB_CONFIG_MAX_TIMEOUT || 10000,
},
queue: {
concurrencyLimit: env.KB_CONFIG_CONCURRENCY_LIMIT,
concurrencyLimit: env.KB_CONFIG_CONCURRENCY_LIMIT || 20,
name: 'document-processing-queue',
},
run: async (payload: DocumentProcessingPayload) => {

View File

@@ -0,0 +1,122 @@
import {
Body,
Column,
Container,
Head,
Html,
Img,
Link,
Preview,
Row,
Section,
Text,
} from '@react-email/components'
import { format } from 'date-fns'
import { getBrandConfig } from '@/lib/branding/branding'
import { env } from '@/lib/env'
import { getAssetUrl } from '@/lib/utils'
import { baseStyles } from './base-styles'
import EmailFooter from './footer'
interface EnterpriseSubscriptionEmailProps {
userName?: string
userEmail?: string
loginLink?: string
createdDate?: Date
}
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
export const EnterpriseSubscriptionEmail = ({
userName = 'Valued User',
userEmail = '',
loginLink = `${baseUrl}/login`,
createdDate = new Date(),
}: EnterpriseSubscriptionEmailProps) => {
const brand = getBrandConfig()
return (
<Html>
<Head />
<Body style={baseStyles.main}>
<Preview>Your Enterprise Plan is now active on Sim</Preview>
<Container style={baseStyles.container}>
<Section style={{ padding: '30px 0', textAlign: 'center' }}>
<Row>
<Column style={{ textAlign: 'center' }}>
<Img
src={brand.logoUrl || getAssetUrl('static/sim.png')}
width='114'
alt={brand.name}
style={{
margin: '0 auto',
}}
/>
</Column>
</Row>
</Section>
<Section style={baseStyles.sectionsBorders}>
<Row>
<Column style={baseStyles.sectionBorder} />
<Column style={baseStyles.sectionCenter} />
<Column style={baseStyles.sectionBorder} />
</Row>
</Section>
<Section style={baseStyles.content}>
<Text style={baseStyles.paragraph}>Hello {userName},</Text>
<Text style={baseStyles.paragraph}>
Great news! Your <strong>Enterprise Plan</strong> has been activated on Sim. You now
have access to advanced features and increased capacity for your workflows.
</Text>
<Text style={baseStyles.paragraph}>
Your account has been set up with full access to your organization. Click below to log
in and start exploring your new Enterprise features:
</Text>
<Link href={loginLink} style={{ textDecoration: 'none' }}>
<Text style={baseStyles.button}>Access Your Enterprise Account</Text>
</Link>
<Text style={baseStyles.paragraph}>
<strong>What's next?</strong>
</Text>
<Text style={baseStyles.paragraph}>
• Invite team members to your organization
<br />• Begin building your workflows
</Text>
<Text style={baseStyles.paragraph}>
If you have any questions or need assistance getting started, our support team is here
to help.
</Text>
<Text style={baseStyles.paragraph}>
Welcome to Sim Enterprise!
<br />
The Sim Team
</Text>
<Text
style={{
...baseStyles.footerText,
marginTop: '40px',
textAlign: 'left',
color: '#666666',
}}
>
This email was sent on {format(createdDate, 'MMMM do, yyyy')} to {userEmail}
regarding your Enterprise plan activation on Sim.
</Text>
</Section>
</Container>
<EmailFooter baseUrl={baseUrl} />
</Body>
</Html>
)
}
export default EnterpriseSubscriptionEmail

View File

@@ -1,5 +1,6 @@
export * from './base-styles'
export { BatchInvitationEmail } from './batch-invitation-email'
export { EnterpriseSubscriptionEmail } from './enterprise-subscription-email'
export { default as EmailFooter } from './footer'
export { HelpConfirmationEmail } from './help-confirmation-email'
export { InvitationEmail } from './invitation-email'

View File

@@ -14,6 +14,7 @@ import {
import { format } from 'date-fns'
import { getBrandConfig } from '@/lib/branding/branding'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getAssetUrl } from '@/lib/utils'
import { baseStyles } from './base-styles'
import EmailFooter from './footer'
@@ -28,6 +29,8 @@ interface InvitationEmailProps {
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
const logger = createLogger('InvitationEmail')
export const InvitationEmail = ({
inviterName = 'A team member',
organizationName = 'an organization',
@@ -49,7 +52,7 @@ export const InvitationEmail = ({
enhancedLink = `${baseUrl}/invite/${invitationId}?token=${invitationId}`
}
} catch (e) {
console.error('Error parsing invite link:', e)
logger.error('Error parsing invite link:', e)
}
}

View File

@@ -1,6 +1,7 @@
import { render } from '@react-email/components'
import {
BatchInvitationEmail,
EnterpriseSubscriptionEmail,
HelpConfirmationEmail,
InvitationEmail,
OTPVerificationEmail,
@@ -82,6 +83,23 @@ export async function renderHelpConfirmationEmail(
)
}
export async function renderEnterpriseSubscriptionEmail(
userName: string,
userEmail: string
): Promise<string> {
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
const loginLink = `${baseUrl}/login`
return await render(
EnterpriseSubscriptionEmail({
userName,
userEmail,
loginLink,
createdDate: new Date(),
})
)
}
export function getEmailSubject(
type:
| 'sign-in'
@@ -91,6 +109,7 @@ export function getEmailSubject(
| 'invitation'
| 'batch-invitation'
| 'help-confirmation'
| 'enterprise-subscription'
): string {
const brandName = getBrandConfig().name
@@ -109,6 +128,8 @@ export function getEmailSubject(
return `You've been invited to join a team and workspaces on ${brandName}`
case 'help-confirmation':
return 'Your request has been received'
case 'enterprise-subscription':
return `Your Enterprise Plan is now active on ${brandName}`
default:
return brandName
}

View File

@@ -13,10 +13,13 @@ import {
} from '@react-email/components'
import { getBrandConfig } from '@/lib/branding/branding'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getAssetUrl } from '@/lib/utils'
import { baseStyles } from './base-styles'
import EmailFooter from './footer'
const logger = createLogger('WorkspaceInvitationEmail')
interface WorkspaceInvitationEmailProps {
workspaceName?: string
inviterName?: string
@@ -45,7 +48,7 @@ export const WorkspaceInvitationEmail = ({
}
}
} catch (e) {
console.error('Error enhancing invitation link:', e)
logger.error('Error enhancing invitation link:', e)
}
return (

View File

@@ -1254,7 +1254,7 @@ export class InputResolver {
return JSON.parse(normalizedExpression)
} catch (jsonError) {
console.error('Error parsing JSON for loop:', jsonError)
logger.error('Error parsing JSON for loop:', jsonError)
// If JSON parsing fails, continue with expression evaluation
}
}
@@ -1267,7 +1267,7 @@ export class InputResolver {
}
}
} catch (e) {
console.error('Error evaluating forEach items:', e)
logger.error('Error evaluating forEach items:', e)
}
}
}
@@ -1712,7 +1712,7 @@ export class InputResolver {
}
}
} catch (e) {
console.error('Error evaluating parallel distribution items:', e)
logger.error('Error evaluating parallel distribution items:', e)
}
}

View File

@@ -175,10 +175,7 @@ describe('Full Executor Test', () => {
} else {
expect(result).toBeDefined()
}
} catch (error) {
console.error('Execution error:', error)
// Log the error but don't fail the test - we want to see what happens
}
} catch (error) {}
})
it('should test the executor getNextExecutionLayer method directly', async () => {

View File

@@ -621,7 +621,7 @@ export function useCollaborativeWorkflow() {
}
if (!blockConfig) {
console.error(`Block type ${type} not found`)
logger.error(`Block type ${type} not found`)
return
}

View File

@@ -1,7 +1,7 @@
'use client'
import { useCallback, useEffect, useState } from 'react'
import type { TagSlot } from '@/lib/constants/knowledge'
import type { TagSlot } from '@/lib/knowledge/consts'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('useKnowledgeBaseTagDefinitions')

View File

@@ -1,7 +1,10 @@
import { useCallback, useEffect, useMemo, useState } from 'react'
import Fuse from 'fuse.js'
import { createLogger } from '@/lib/logs/console/logger'
import { type ChunkData, type DocumentData, useKnowledgeStore } from '@/stores/knowledge/store'
const logger = createLogger('UseKnowledgeBase')
export function useKnowledgeBase(id: string) {
const { getKnowledgeBase, getCachedKnowledgeBase, loadingKnowledgeBases } = useKnowledgeStore()
@@ -22,6 +25,7 @@ export function useKnowledgeBase(id: string) {
} catch (err) {
if (isMounted) {
setError(err instanceof Error ? err.message : 'Failed to load knowledge base')
logger.error(`Failed to load knowledge base ${id}:`, err)
}
}
}
@@ -86,6 +90,7 @@ export function useKnowledgeBaseDocuments(
} catch (err) {
if (isMounted) {
setError(err instanceof Error ? err.message : 'Failed to load documents')
logger.error(`Failed to load documents for knowledge base ${knowledgeBaseId}:`, err)
}
}
}
@@ -127,6 +132,7 @@ export function useKnowledgeBaseDocuments(
})
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to refresh documents')
logger.error(`Failed to refresh documents for knowledge base ${knowledgeBaseId}:`, err)
}
}, [
knowledgeBaseId,
@@ -141,6 +147,7 @@ export function useKnowledgeBaseDocuments(
const updateDocumentLocal = useCallback(
(documentId: string, updates: Partial<DocumentData>) => {
updateDocument(knowledgeBaseId, documentId, updates)
logger.info(`Updated document ${documentId} for knowledge base ${knowledgeBaseId}`)
},
[knowledgeBaseId, updateDocument]
)
@@ -204,10 +211,11 @@ export function useKnowledgeBasesList(workspaceId?: string) {
retryTimeoutId = setTimeout(() => {
if (isMounted) {
loadData(attempt + 1)
logger.warn(`Failed to load knowledge bases list, retrying... ${attempt + 1}`)
}
}, delay)
} else {
console.error('All retry attempts failed for knowledge bases list:', err)
logger.error('All retry attempts failed for knowledge bases list:', err)
setError(errorMessage)
setRetryCount(maxRetries)
}
@@ -235,7 +243,7 @@ export function useKnowledgeBasesList(workspaceId?: string) {
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Failed to refresh knowledge bases'
setError(errorMessage)
console.error('Error refreshing knowledge bases list:', err)
logger.error('Error refreshing knowledge bases list:', err)
}
}
@@ -257,7 +265,7 @@ export function useKnowledgeBasesList(workspaceId?: string) {
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Failed to refresh knowledge bases'
setError(errorMessage)
console.error('Error force refreshing knowledge bases list:', err)
logger.error('Error force refreshing knowledge bases list:', err)
}
}
@@ -361,6 +369,7 @@ export function useDocumentChunks(
} catch (err) {
if (isMounted) {
setError(err instanceof Error ? err.message : 'Failed to load chunks')
logger.error(`Failed to load chunks for document ${documentId}:`, err)
}
} finally {
if (isMounted) {
@@ -559,6 +568,7 @@ export function useDocumentChunks(
} catch (err) {
if (isMounted) {
setError(err instanceof Error ? err.message : 'Failed to load chunks')
logger.error(`Failed to load chunks for document ${documentId}:`, err)
}
} finally {
if (isMounted) {
@@ -599,6 +609,7 @@ export function useDocumentChunks(
// Update loading state based on store
if (!isStoreLoading && isLoading) {
logger.info(`Chunks loaded for document ${documentId}`)
setIsLoading(false)
}
}, [documentId, isStoreLoading, isLoading, initialLoadDone, serverSearchQuery, serverCurrentPage])
@@ -629,6 +640,7 @@ export function useDocumentChunks(
return fetchedChunks
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to load page')
logger.error(`Failed to load page for document ${documentId}:`, err)
throw err
} finally {
setIsLoading(false)
@@ -676,6 +688,7 @@ export function useDocumentChunks(
return fetchedChunks
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to refresh chunks')
logger.error(`Failed to refresh chunks for document ${documentId}:`, err)
throw err
} finally {
setIsLoading(false)
@@ -704,6 +717,7 @@ export function useDocumentChunks(
return searchResults
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to search chunks')
logger.error(`Failed to search chunks for document ${documentId}:`, err)
throw err
} finally {
setIsLoading(false)

View File

@@ -1,7 +1,7 @@
'use client'
import { useCallback, useEffect, useState } from 'react'
import type { TagSlot } from '@/lib/constants/knowledge'
import type { TagSlot } from '@/lib/knowledge/consts'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('useTagDefinitions')

View File

@@ -10,7 +10,7 @@ import { createAuthClient } from 'better-auth/react'
import type { auth } from '@/lib/auth'
import { env, getEnv } from '@/lib/env'
import { isProd } from '@/lib/environment'
import { SessionContext, type SessionHookResult } from '@/lib/session-context'
import { SessionContext, type SessionHookResult } from '@/lib/session/session-context'
export function getBaseURL() {
let baseURL

View File

@@ -24,7 +24,7 @@ import { authorizeSubscriptionReference } from '@/lib/billing/authorization'
import { handleNewUser } from '@/lib/billing/core/usage'
import { syncSubscriptionUsageLimits } from '@/lib/billing/organization'
import { getPlans } from '@/lib/billing/plans'
import type { EnterpriseSubscriptionMetadata } from '@/lib/billing/types'
import { handleManualEnterpriseSubscription } from '@/lib/billing/webhooks/enterprise'
import {
handleInvoiceFinalized,
handleInvoicePaymentFailed,
@@ -52,121 +52,6 @@ if (validStripeKey) {
})
}
function isEnterpriseMetadata(value: unknown): value is EnterpriseSubscriptionMetadata {
return (
!!value &&
typeof (value as any).plan === 'string' &&
(value as any).plan.toLowerCase() === 'enterprise'
)
}
async function handleManualEnterpriseSubscription(event: Stripe.Event) {
const stripeSubscription = event.data.object as Stripe.Subscription
const metaPlan = (stripeSubscription.metadata?.plan as string | undefined)?.toLowerCase() || ''
if (metaPlan !== 'enterprise') {
logger.info('[subscription.created] Skipping non-enterprise subscription', {
subscriptionId: stripeSubscription.id,
plan: metaPlan || 'unknown',
})
return
}
const stripeCustomerId = stripeSubscription.customer as string
if (!stripeCustomerId) {
logger.error('[subscription.created] Missing Stripe customer ID', {
subscriptionId: stripeSubscription.id,
})
throw new Error('Missing Stripe customer ID on subscription')
}
const metadata = stripeSubscription.metadata || {}
const referenceId =
typeof metadata.referenceId === 'string' && metadata.referenceId.length > 0
? metadata.referenceId
: null
if (!referenceId) {
logger.error('[subscription.created] Unable to resolve referenceId', {
subscriptionId: stripeSubscription.id,
stripeCustomerId,
})
throw new Error('Unable to resolve referenceId for subscription')
}
const firstItem = stripeSubscription.items?.data?.[0]
const seats = typeof firstItem?.quantity === 'number' ? firstItem.quantity : null
if (!isEnterpriseMetadata(metadata)) {
logger.error('[subscription.created] Invalid enterprise metadata shape', {
subscriptionId: stripeSubscription.id,
metadata,
})
throw new Error('Invalid enterprise metadata for subscription')
}
const enterpriseMetadata = metadata
const metadataJson: Record<string, unknown> = { ...enterpriseMetadata }
const subscriptionRow = {
id: crypto.randomUUID(),
plan: 'enterprise',
referenceId,
stripeCustomerId,
stripeSubscriptionId: stripeSubscription.id,
status: stripeSubscription.status || null,
periodStart: stripeSubscription.current_period_start
? new Date(stripeSubscription.current_period_start * 1000)
: null,
periodEnd: stripeSubscription.current_period_end
? new Date(stripeSubscription.current_period_end * 1000)
: null,
cancelAtPeriodEnd: stripeSubscription.cancel_at_period_end ?? null,
seats,
trialStart: stripeSubscription.trial_start
? new Date(stripeSubscription.trial_start * 1000)
: null,
trialEnd: stripeSubscription.trial_end ? new Date(stripeSubscription.trial_end * 1000) : null,
metadata: metadataJson,
}
const existing = await db
.select({ id: schema.subscription.id })
.from(schema.subscription)
.where(eq(schema.subscription.stripeSubscriptionId, stripeSubscription.id))
.limit(1)
if (existing.length > 0) {
await db
.update(schema.subscription)
.set({
plan: subscriptionRow.plan,
referenceId: subscriptionRow.referenceId,
stripeCustomerId: subscriptionRow.stripeCustomerId,
status: subscriptionRow.status,
periodStart: subscriptionRow.periodStart,
periodEnd: subscriptionRow.periodEnd,
cancelAtPeriodEnd: subscriptionRow.cancelAtPeriodEnd,
seats: subscriptionRow.seats,
trialStart: subscriptionRow.trialStart,
trialEnd: subscriptionRow.trialEnd,
metadata: subscriptionRow.metadata,
})
.where(eq(schema.subscription.stripeSubscriptionId, stripeSubscription.id))
} else {
await db.insert(schema.subscription).values(subscriptionRow)
}
logger.info('[subscription.created] Upserted subscription', {
subscriptionId: subscriptionRow.id,
referenceId: subscriptionRow.referenceId,
plan: subscriptionRow.plan,
status: subscriptionRow.status,
})
}
export const auth = betterAuth({
baseURL: getBaseURL(),
trustedOrigins: [
@@ -1161,7 +1046,7 @@ export const auth = betterAuth({
if (!response.ok) {
const errorText = await response.text()
console.error('Linear API error:', {
logger.error('Linear API error:', {
status: response.status,
statusText: response.statusText,
body: errorText,
@@ -1172,12 +1057,12 @@ export const auth = betterAuth({
const { data, errors } = await response.json()
if (errors) {
console.error('GraphQL errors:', errors)
logger.error('GraphQL errors:', errors)
throw new Error(`GraphQL errors: ${JSON.stringify(errors)}`)
}
if (!data?.viewer) {
console.error('No viewer data in response:', data)
logger.error('No viewer data in response:', data)
throw new Error('No viewer data in response')
}
@@ -1193,7 +1078,7 @@ export const auth = betterAuth({
image: viewer.avatarUrl || null,
}
} catch (error) {
console.error('Error in getUserInfo:', error)
logger.error('Error in getUserInfo:', error)
throw error
}
},

View File

@@ -31,9 +31,7 @@ export async function checkUsageStatus(userId: string): Promise<UsageData> {
const statsRecords = await db.select().from(userStats).where(eq(userStats.userId, userId))
const currentUsage =
statsRecords.length > 0
? Number.parseFloat(
statsRecords[0].currentPeriodCost?.toString() || statsRecords[0].totalCost.toString()
)
? Number.parseFloat(statsRecords[0].currentPeriodCost?.toString())
: 0
return {
@@ -117,7 +115,7 @@ export async function checkUsageStatus(userId: string): Promise<UsageData> {
// Fall back to minimum billing amount from Stripe subscription
const orgSub = await getOrganizationSubscription(org.id)
if (orgSub?.seats) {
const { basePrice } = getPlanPricing(orgSub.plan, orgSub)
const { basePrice } = getPlanPricing(orgSub.plan)
orgCap = (orgSub.seats || 1) * basePrice
} else {
// If no subscription, use team default

View File

@@ -2,12 +2,10 @@ import { and, eq } from 'drizzle-orm'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { getUserUsageData } from '@/lib/billing/core/usage'
import {
getEnterpriseTierLimitPerSeat,
getFreeTierLimit,
getProTierLimit,
getTeamTierLimitPerSeat,
} from '@/lib/billing/subscriptions/utils'
import type { EnterpriseSubscriptionMetadata } from '@/lib/billing/types'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { member, subscription, user } from '@/db/schema'
@@ -43,11 +41,8 @@ export async function getOrganizationSubscription(organizationId: string) {
/**
* Get plan pricing information
*/
export function getPlanPricing(
plan: string,
subscription?: any
): {
basePrice: number // What they pay upfront via Stripe subscription (per seat for team/enterprise)
export function getPlanPricing(plan: string): {
basePrice: number // What they pay upfront via Stripe subscription
} {
switch (plan) {
case 'free':
@@ -55,25 +50,7 @@ export function getPlanPricing(
case 'pro':
return { basePrice: getProTierLimit() }
case 'team':
return { basePrice: getTeamTierLimitPerSeat() }
case 'enterprise':
// Enterprise uses per-seat pricing like Team plans
// Custom per-seat price can be set in metadata
if (subscription?.metadata) {
const metadata: EnterpriseSubscriptionMetadata =
typeof subscription.metadata === 'string'
? JSON.parse(subscription.metadata)
: subscription.metadata
const perSeatPrice = metadata.perSeatPrice
? Number.parseFloat(String(metadata.perSeatPrice))
: undefined
if (perSeatPrice && perSeatPrice > 0 && !Number.isNaN(perSeatPrice)) {
return { basePrice: perSeatPrice }
}
}
// Default enterprise per-seat pricing
return { basePrice: getEnterpriseTierLimitPerSeat() }
return { basePrice: getTeamTierLimitPerSeat() } // Per-seat pricing
default:
return { basePrice: 0 }
}
@@ -103,7 +80,7 @@ export async function calculateUserOverage(userId: string): Promise<{
}
const plan = subscription?.plan || 'free'
const { basePrice } = getPlanPricing(plan, subscription)
const { basePrice } = getPlanPricing(plan)
const actualUsage = usageData.currentUsage
// Calculate overage: any usage beyond what they already paid for
@@ -197,7 +174,7 @@ export async function getSimplifiedBillingSummary(
.from(member)
.where(eq(member.organizationId, organizationId))
const { basePrice: basePricePerSeat } = getPlanPricing(subscription.plan, subscription)
const { basePrice: basePricePerSeat } = getPlanPricing(subscription.plan)
// Use licensed seats from Stripe as source of truth
const licensedSeats = subscription.seats || 1
const totalBasePrice = basePricePerSeat * licensedSeats // Based on Stripe subscription
@@ -270,7 +247,7 @@ export async function getSimplifiedBillingSummary(
}
// Individual billing summary
const { basePrice } = getPlanPricing(plan, subscription)
const { basePrice } = getPlanPricing(plan)
// For team and enterprise plans, calculate total team usage instead of individual usage
let currentUsage = usageData.currentUsage

View File

@@ -131,35 +131,38 @@ export async function getOrganizationBillingData(
const totalCurrentUsage = members.reduce((sum, member) => sum + member.currentUsage, 0)
// Get per-seat pricing for the plan
const { basePrice: pricePerSeat } = getPlanPricing(subscription.plan, subscription)
const { basePrice: pricePerSeat } = getPlanPricing(subscription.plan)
// Use Stripe subscription seats as source of truth
// Ensure we always have at least 1 seat (protect against 0 or falsy values)
const licensedSeats = Math.max(subscription.seats || 1, 1)
// Validate seat capacity - warn if members exceed licensed seats
if (members.length > licensedSeats) {
logger.warn('Organization has more members than licensed seats', {
organizationId,
licensedSeats,
actualMembers: members.length,
plan: subscription.plan,
})
// Calculate minimum billing amount
let minimumBillingAmount: number
let totalUsageLimit: number
if (subscription.plan === 'enterprise') {
// Enterprise has fixed pricing set through custom Stripe product
// Their usage limit is configured to match their monthly cost
const configuredLimit = organizationData.orgUsageLimit
? Number.parseFloat(organizationData.orgUsageLimit)
: 0
minimumBillingAmount = configuredLimit // For enterprise, this equals their fixed monthly cost
totalUsageLimit = configuredLimit // Same as their monthly cost
} else {
// Team plan: Billing is based on licensed seats from Stripe
minimumBillingAmount = licensedSeats * pricePerSeat
// Total usage limit: never below the minimum based on licensed seats
const configuredLimit = organizationData.orgUsageLimit
? Number.parseFloat(organizationData.orgUsageLimit)
: null
totalUsageLimit =
configuredLimit !== null
? Math.max(configuredLimit, minimumBillingAmount)
: minimumBillingAmount
}
// Billing is based on licensed seats from Stripe, not actual member count
// This ensures organizations pay for their seat capacity regardless of utilization
const minimumBillingAmount = licensedSeats * pricePerSeat
// Total usage limit: never below the minimum based on licensed seats
const configuredLimit = organizationData.orgUsageLimit
? Number.parseFloat(organizationData.orgUsageLimit)
: null
const totalUsageLimit =
configuredLimit !== null
? Math.max(configuredLimit, minimumBillingAmount)
: minimumBillingAmount
const averageUsagePerMember = members.length > 0 ? totalCurrentUsage / members.length : 0
// Billing period comes from the organization's subscription
@@ -213,8 +216,24 @@ export async function updateOrganizationUsageLimit(
return { success: false, error: 'No active subscription found' }
}
// Calculate minimum based on seats
const { basePrice } = getPlanPricing(subscription.plan, subscription)
// Enterprise plans have fixed usage limits that cannot be changed
if (subscription.plan === 'enterprise') {
return {
success: false,
error: 'Enterprise plans have fixed usage limits that cannot be changed',
}
}
// Only team plans can update their usage limits
if (subscription.plan !== 'team') {
return {
success: false,
error: 'Only team organizations can update usage limits',
}
}
// Team plans have minimum based on seats
const { basePrice } = getPlanPricing(subscription.plan)
const minimumLimit = Math.max(subscription.seats || 1, 1) * basePrice
// Validate new limit is not below minimum
@@ -315,3 +334,33 @@ export async function getOrganizationBillingSummary(organizationId: string) {
throw error
}
}
/**
* Check if a user is an owner or admin of a specific organization
*
* @param userId - The ID of the user to check
* @param organizationId - The ID of the organization
* @returns Promise<boolean> - True if the user is an owner or admin of the organization
*/
export async function isOrganizationOwnerOrAdmin(
userId: string,
organizationId: string
): Promise<boolean> {
try {
const memberRecord = await db
.select({ role: member.role })
.from(member)
.where(and(eq(member.userId, userId), eq(member.organizationId, organizationId)))
.limit(1)
if (memberRecord.length === 0) {
return false
}
const userRole = memberRecord[0].role
return ['owner', 'admin'].includes(userRole)
} catch (error) {
logger.error('Error checking organization ownership/admin status:', error)
return false
}
}

View File

@@ -157,14 +157,26 @@ export async function hasExceededCostLimit(userId: string): Promise<boolean> {
// Calculate usage limit
let limit = getFreeTierLimit() // Default free tier limit
if (subscription) {
limit = getPerUserMinimumLimit(subscription)
logger.info('Using subscription-based limit', {
userId,
plan: subscription.plan,
seats: subscription.seats || 1,
limit,
})
// Team/Enterprise: Use organization limit
if (subscription.plan === 'team' || subscription.plan === 'enterprise') {
const { getUserUsageLimit } = await import('@/lib/billing/core/usage')
limit = await getUserUsageLimit(userId)
logger.info('Using organization limit', {
userId,
plan: subscription.plan,
limit,
})
} else {
// Pro/Free: Use individual limit
limit = getPerUserMinimumLimit(subscription)
logger.info('Using subscription-based limit', {
userId,
plan: subscription.plan,
limit,
})
}
} else {
logger.info('Using free tier limit', { userId, limit })
}
@@ -231,7 +243,14 @@ export async function getUserSubscriptionState(userId: string): Promise<UserSubs
if (isProd && statsRecords.length > 0) {
let limit = getFreeTierLimit() // Default free tier limit
if (subscription) {
limit = getPerUserMinimumLimit(subscription)
// Team/Enterprise: Use organization limit
if (subscription.plan === 'team' || subscription.plan === 'enterprise') {
const { getUserUsageLimit } = await import('@/lib/billing/core/usage')
limit = await getUserUsageLimit(userId)
} else {
// Pro/Free: Use individual limit
limit = getPerUserMinimumLimit(subscription)
}
}
const currentCost = Number.parseFloat(

View File

@@ -71,7 +71,7 @@ export async function getUserUsageData(userId: string): Promise<UsageData> {
.limit(1)
const { getPlanPricing } = await import('@/lib/billing/core/billing')
const { basePrice } = getPlanPricing(subscription.plan, subscription)
const { basePrice } = getPlanPricing(subscription.plan)
const minimum = (subscription.seats || 1) * basePrice
if (orgData.length > 0 && orgData[0].orgUsageLimit) {
@@ -144,7 +144,7 @@ export async function getUserUsageLimitInfo(userId: string): Promise<UsageLimitI
.limit(1)
const { getPlanPricing } = await import('@/lib/billing/core/billing')
const { basePrice } = getPlanPricing(subscription.plan, subscription)
const { basePrice } = getPlanPricing(subscription.plan)
const minimum = (subscription.seats || 1) * basePrice
if (orgData.length > 0 && orgData[0].orgUsageLimit) {
@@ -335,14 +335,14 @@ export async function getUserUsageLimit(userId: string): Promise<number> {
if (orgData[0].orgUsageLimit) {
const configured = Number.parseFloat(orgData[0].orgUsageLimit)
const { getPlanPricing } = await import('@/lib/billing/core/billing')
const { basePrice } = getPlanPricing(subscription.plan, subscription)
const { basePrice } = getPlanPricing(subscription.plan)
const minimum = (subscription.seats || 1) * basePrice
return Math.max(configured, minimum)
}
// If org hasn't set a custom limit, use minimum (seats × cost per seat)
const { getPlanPricing } = await import('@/lib/billing/core/billing')
const { basePrice } = getPlanPricing(subscription.plan, subscription)
const { basePrice } = getPlanPricing(subscription.plan)
return (subscription.seats || 1) * basePrice
}

View File

@@ -5,7 +5,7 @@
export * from '@/lib/billing/calculations/usage-monitor'
export * from '@/lib/billing/core/billing'
export * from '@/lib/billing/core/organization-billing'
export * from '@/lib/billing/core/organization'
export * from '@/lib/billing/core/subscription'
export {
getHighestPrioritySubscription as getActiveSubscription,
@@ -23,10 +23,6 @@ export {
updateUserUsageLimit as updateUsageLimit,
} from '@/lib/billing/core/usage'
export * from '@/lib/billing/subscriptions/utils'
export {
canEditUsageLimit as canEditLimit,
getMinimumUsageLimit as getMinimumLimit,
getSubscriptionAllowance as getDefaultLimit,
} from '@/lib/billing/subscriptions/utils'
export { canEditUsageLimit as canEditLimit } from '@/lib/billing/subscriptions/utils'
export * from '@/lib/billing/types'
export * from '@/lib/billing/validation/seat-management'

View File

@@ -1,77 +0,0 @@
import { describe, expect, it, vi } from 'vitest'
import { checkEnterprisePlan, getSubscriptionAllowance } from '@/lib/billing/subscriptions/utils'
vi.mock('@/lib/env', () => ({
env: {
FREE_TIER_COST_LIMIT: 10,
PRO_TIER_COST_LIMIT: 20,
TEAM_TIER_COST_LIMIT: 40,
ENTERPRISE_TIER_COST_LIMIT: 200,
},
isTruthy: (value: string | boolean | number | undefined) =>
typeof value === 'string' ? value.toLowerCase() === 'true' || value === '1' : Boolean(value),
getEnv: (variable: string) => process.env[variable],
}))
describe('Subscription Utilities', () => {
describe('checkEnterprisePlan', () => {
it.concurrent('returns true for active enterprise subscription', () => {
expect(checkEnterprisePlan({ plan: 'enterprise', status: 'active' })).toBeTruthy()
})
it.concurrent('returns false for inactive enterprise subscription', () => {
expect(checkEnterprisePlan({ plan: 'enterprise', status: 'canceled' })).toBeFalsy()
})
it.concurrent('returns false when plan is not enterprise', () => {
expect(checkEnterprisePlan({ plan: 'pro', status: 'active' })).toBeFalsy()
})
})
describe('getSubscriptionAllowance', () => {
it.concurrent('returns free-tier limit when subscription is null', () => {
expect(getSubscriptionAllowance(null)).toBe(10)
})
it.concurrent('returns free-tier limit when subscription is undefined', () => {
expect(getSubscriptionAllowance(undefined)).toBe(10)
})
it.concurrent('returns free-tier limit when subscription is not active', () => {
expect(getSubscriptionAllowance({ plan: 'pro', status: 'canceled', seats: 1 })).toBe(10)
})
it.concurrent('returns pro limit for active pro plan', () => {
expect(getSubscriptionAllowance({ plan: 'pro', status: 'active', seats: 1 })).toBe(20)
})
it.concurrent('returns team limit multiplied by seats', () => {
expect(getSubscriptionAllowance({ plan: 'team', status: 'active', seats: 3 })).toBe(3 * 40)
})
it.concurrent('returns enterprise limit using perSeatPrice metadata', () => {
const sub = {
plan: 'enterprise',
status: 'active',
seats: 10,
metadata: { perSeatPrice: 150 },
}
expect(getSubscriptionAllowance(sub)).toBe(10 * 150)
})
it.concurrent('returns enterprise limit using perSeatPrice as string', () => {
const sub = {
plan: 'enterprise',
status: 'active',
seats: 8,
metadata: { perSeatPrice: '250' },
}
expect(getSubscriptionAllowance(sub)).toBe(8 * 250)
})
it.concurrent('falls back to default enterprise tier when metadata missing', () => {
const sub = { plan: 'enterprise', status: 'active', seats: 2, metadata: {} }
expect(getSubscriptionAllowance(sub)).toBe(2 * 200)
})
})
})

View File

@@ -4,7 +4,6 @@ import {
DEFAULT_PRO_TIER_COST_LIMIT,
DEFAULT_TEAM_TIER_COST_LIMIT,
} from '@/lib/billing/constants'
import type { EnterpriseSubscriptionMetadata } from '@/lib/billing/types'
import { env } from '@/lib/env'
/**
@@ -47,51 +46,10 @@ export function checkTeamPlan(subscription: any): boolean {
return subscription?.plan === 'team' && subscription?.status === 'active'
}
/**
* Calculate the total subscription-level allowance (what the org/user gets for their base payment)
* - Pro: Fixed amount per user
* - Team: Seats * base price (pooled for the org)
* - Enterprise: Seats * per-seat price (pooled, with optional custom pricing in metadata)
* @param subscription The subscription object
* @returns The total subscription allowance in dollars
*/
export function getSubscriptionAllowance(subscription: any): number {
if (!subscription || subscription.status !== 'active') {
return getFreeTierLimit()
}
const seats = subscription.seats || 1
if (subscription.plan === 'pro') {
return getProTierLimit()
}
if (subscription.plan === 'team') {
return seats * getTeamTierLimitPerSeat()
}
if (subscription.plan === 'enterprise') {
const metadata = subscription.metadata as EnterpriseSubscriptionMetadata | undefined
// Enterprise uses per-seat pricing (pooled like Team)
// Custom per-seat price can be set in metadata
let perSeatPrice = getEnterpriseTierLimitPerSeat()
if (metadata?.perSeatPrice) {
const parsed = Number.parseFloat(String(metadata.perSeatPrice))
if (parsed > 0 && !Number.isNaN(parsed)) {
perSeatPrice = parsed
}
}
return seats * perSeatPrice
}
return getFreeTierLimit()
}
/**
* Get the minimum usage limit for an individual user (used for validation)
* - Pro: User's plan minimum
* - Team: 0 (pooled model, no individual minimums)
* - Enterprise: 0 (pooled model, no individual minimums)
* Only applicable for plans with individual limits (Free/Pro)
* Team and Enterprise plans use organization-level limits instead
* @param subscription The subscription object
* @returns The per-user minimum limit in dollars
*/
@@ -100,27 +58,15 @@ export function getPerUserMinimumLimit(subscription: any): number {
return getFreeTierLimit()
}
const seats = subscription.seats || 1
if (subscription.plan === 'pro') {
return getProTierLimit()
}
if (subscription.plan === 'team') {
// For team plans, return the total pooled limit (seats * cost per seat)
// This becomes the user's individual limit representing their share of the team pool
return seats * getTeamTierLimitPerSeat()
}
if (subscription.plan === 'enterprise') {
// For enterprise plans, return the total pooled limit (seats * cost per seat)
// This becomes the user's individual limit representing their share of the enterprise pool
let perSeatPrice = getEnterpriseTierLimitPerSeat()
if (subscription.metadata?.perSeatPrice) {
const parsed = Number.parseFloat(String(subscription.metadata.perSeatPrice))
if (parsed > 0 && !Number.isNaN(parsed)) {
perSeatPrice = parsed
}
}
return seats * perSeatPrice
if (subscription.plan === 'team' || subscription.plan === 'enterprise') {
// Team and Enterprise don't have individual limits - they use organization limits
// This function should not be called for these plans
// Returning 0 to indicate no individual minimum
return 0
}
return getFreeTierLimit()
@@ -128,7 +74,8 @@ export function getPerUserMinimumLimit(subscription: any): number {
/**
* Check if a user can edit their usage limits based on their subscription
* Free plan users cannot edit limits, paid plan users can
* Free and Enterprise plans cannot edit limits
* Pro and Team plans can increase their limits
* @param subscription The subscription object
* @returns Whether the user can edit their usage limits
*/
@@ -137,19 +84,7 @@ export function canEditUsageLimit(subscription: any): boolean {
return false // Free plan users cannot edit limits
}
return (
subscription.plan === 'pro' ||
subscription.plan === 'team' ||
subscription.plan === 'enterprise'
)
}
/**
* Get the minimum allowed usage limit for a subscription
* This prevents users from setting limits below their plan's base amount
* @param subscription The subscription object
* @returns The minimum allowed usage limit in dollars
*/
export function getMinimumUsageLimit(subscription: any): number {
return getPerUserMinimumLimit(subscription)
// Only Pro and Team plans can edit limits
// Enterprise has fixed limits that match their monthly cost
return subscription.plan === 'pro' || subscription.plan === 'team'
}

View File

@@ -5,15 +5,15 @@
export interface EnterpriseSubscriptionMetadata {
plan: 'enterprise'
// Custom per-seat pricing (defaults to DEFAULT_ENTERPRISE_TIER_COST_LIMIT)
// The referenceId must be provided in Stripe metadata to link to the organization
// This gets stored in the subscription.referenceId column
referenceId: string
perSeatPrice?: number
// Maximum allowed seats (defaults to subscription.seats)
maxSeats?: number
// Whether seats are fixed and cannot be changed
fixedSeats?: boolean
// The fixed monthly price for this enterprise customer (as string from Stripe metadata)
// This will be used to set the organization's usage limit
monthlyPrice: string
// Number of seats for invitation limits (not for billing) (as string from Stripe metadata)
// We set Stripe quantity to 1 and use this for actual seat count
seats: string
}
export interface UsageData {

View File

@@ -1,6 +1,5 @@
import { and, count, eq } from 'drizzle-orm'
import { getOrganizationSubscription } from '@/lib/billing/core/billing'
import type { EnterpriseSubscriptionMetadata } from '@/lib/billing/types'
import { quickValidateEmail } from '@/lib/email/validation'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
@@ -67,26 +66,9 @@ export async function validateSeatAvailability(
const currentSeats = memberCount[0]?.count || 0
// Determine seat limits based on subscription
let maxSeats = subscription.seats || 1
// For enterprise plans, check metadata for custom seat allowances
if (subscription.plan === 'enterprise' && subscription.metadata) {
try {
const metadata: EnterpriseSubscriptionMetadata =
typeof subscription.metadata === 'string'
? JSON.parse(subscription.metadata)
: subscription.metadata
if (metadata.maxSeats && typeof metadata.maxSeats === 'number') {
maxSeats = metadata.maxSeats
}
} catch (error) {
logger.warn('Failed to parse enterprise subscription metadata', {
organizationId,
metadata: subscription.metadata,
error,
})
}
}
// Team: seats from Stripe subscription quantity
// Enterprise: seats from metadata (stored in subscription.seats)
const maxSeats = subscription.seats || 1
const availableSeats = Math.max(0, maxSeats - currentSeats)
const canInvite = availableSeats >= additionalSeats
@@ -162,24 +144,11 @@ export async function getOrganizationSeatInfo(
const currentSeats = memberCount[0]?.count || 0
// Determine seat limits
let maxSeats = subscription.seats || 1
let canAddSeats = true
const maxSeats = subscription.seats || 1
if (subscription.plan === 'enterprise' && subscription.metadata) {
try {
const metadata: EnterpriseSubscriptionMetadata =
typeof subscription.metadata === 'string'
? JSON.parse(subscription.metadata)
: subscription.metadata
if (metadata.maxSeats && typeof metadata.maxSeats === 'number') {
maxSeats = metadata.maxSeats
}
// Enterprise plans might have fixed seat counts
canAddSeats = !metadata.fixedSeats
} catch (error) {
logger.warn('Failed to parse enterprise subscription metadata', { organizationId, error })
}
}
// Enterprise plans have fixed seats (can't self-serve changes)
// Team plans can add seats through Stripe
const canAddSeats = subscription.plan !== 'enterprise'
const availableSeats = Math.max(0, maxSeats - currentSeats)

View File

@@ -0,0 +1,251 @@
import { eq } from 'drizzle-orm'
import type Stripe from 'stripe'
import {
getEmailSubject,
renderEnterpriseSubscriptionEmail,
} from '@/components/emails/render-email'
import { sendEmail } from '@/lib/email/mailer'
import { getFromEmailAddress } from '@/lib/email/utils'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { organization, subscription, user } from '@/db/schema'
import type { EnterpriseSubscriptionMetadata } from '../types'
const logger = createLogger('BillingEnterprise')
function isEnterpriseMetadata(value: unknown): value is EnterpriseSubscriptionMetadata {
return (
!!value &&
typeof value === 'object' &&
'plan' in value &&
'referenceId' in value &&
'monthlyPrice' in value &&
'seats' in value &&
typeof value.plan === 'string' &&
value.plan.toLowerCase() === 'enterprise' &&
typeof value.referenceId === 'string' &&
typeof value.monthlyPrice === 'string' &&
typeof value.seats === 'string'
)
}
export async function handleManualEnterpriseSubscription(event: Stripe.Event) {
const stripeSubscription = event.data.object as Stripe.Subscription
const metaPlan = (stripeSubscription.metadata?.plan as string | undefined)?.toLowerCase() || ''
if (metaPlan !== 'enterprise') {
logger.info('[subscription.created] Skipping non-enterprise subscription', {
subscriptionId: stripeSubscription.id,
plan: metaPlan || 'unknown',
})
return
}
const stripeCustomerId = stripeSubscription.customer as string
if (!stripeCustomerId) {
logger.error('[subscription.created] Missing Stripe customer ID', {
subscriptionId: stripeSubscription.id,
})
throw new Error('Missing Stripe customer ID on subscription')
}
const metadata = stripeSubscription.metadata || {}
const referenceId =
typeof metadata.referenceId === 'string' && metadata.referenceId.length > 0
? metadata.referenceId
: null
if (!referenceId) {
logger.error('[subscription.created] Unable to resolve referenceId', {
subscriptionId: stripeSubscription.id,
stripeCustomerId,
})
throw new Error('Unable to resolve referenceId for subscription')
}
if (!isEnterpriseMetadata(metadata)) {
logger.error('[subscription.created] Invalid enterprise metadata shape', {
subscriptionId: stripeSubscription.id,
metadata,
})
throw new Error('Invalid enterprise metadata for subscription')
}
const enterpriseMetadata = metadata
const metadataJson: Record<string, unknown> = { ...enterpriseMetadata }
// Extract and parse seats and monthly price from metadata (they come as strings from Stripe)
const seats = Number.parseInt(enterpriseMetadata.seats, 10)
const monthlyPrice = Number.parseFloat(enterpriseMetadata.monthlyPrice)
if (!seats || seats <= 0 || Number.isNaN(seats)) {
logger.error('[subscription.created] Invalid or missing seats in enterprise metadata', {
subscriptionId: stripeSubscription.id,
seatsRaw: enterpriseMetadata.seats,
seatsParsed: seats,
})
throw new Error('Enterprise subscription must include valid seats in metadata')
}
if (!monthlyPrice || monthlyPrice <= 0 || Number.isNaN(monthlyPrice)) {
logger.error('[subscription.created] Invalid or missing monthlyPrice in enterprise metadata', {
subscriptionId: stripeSubscription.id,
monthlyPriceRaw: enterpriseMetadata.monthlyPrice,
monthlyPriceParsed: monthlyPrice,
})
throw new Error('Enterprise subscription must include valid monthlyPrice in metadata')
}
const subscriptionRow = {
id: crypto.randomUUID(),
plan: 'enterprise',
referenceId,
stripeCustomerId,
stripeSubscriptionId: stripeSubscription.id,
status: stripeSubscription.status || null,
periodStart: stripeSubscription.current_period_start
? new Date(stripeSubscription.current_period_start * 1000)
: null,
periodEnd: stripeSubscription.current_period_end
? new Date(stripeSubscription.current_period_end * 1000)
: null,
cancelAtPeriodEnd: stripeSubscription.cancel_at_period_end ?? null,
seats,
trialStart: stripeSubscription.trial_start
? new Date(stripeSubscription.trial_start * 1000)
: null,
trialEnd: stripeSubscription.trial_end ? new Date(stripeSubscription.trial_end * 1000) : null,
metadata: metadataJson,
}
const existing = await db
.select({ id: subscription.id })
.from(subscription)
.where(eq(subscription.stripeSubscriptionId, stripeSubscription.id))
.limit(1)
if (existing.length > 0) {
await db
.update(subscription)
.set({
plan: subscriptionRow.plan,
referenceId: subscriptionRow.referenceId,
stripeCustomerId: subscriptionRow.stripeCustomerId,
status: subscriptionRow.status,
periodStart: subscriptionRow.periodStart,
periodEnd: subscriptionRow.periodEnd,
cancelAtPeriodEnd: subscriptionRow.cancelAtPeriodEnd,
seats: subscriptionRow.seats,
trialStart: subscriptionRow.trialStart,
trialEnd: subscriptionRow.trialEnd,
metadata: subscriptionRow.metadata,
})
.where(eq(subscription.stripeSubscriptionId, stripeSubscription.id))
} else {
await db.insert(subscription).values(subscriptionRow)
}
// Update the organization's usage limit to match the monthly price
// The referenceId for enterprise plans is the organization ID
try {
await db
.update(organization)
.set({
orgUsageLimit: monthlyPrice.toFixed(2),
updatedAt: new Date(),
})
.where(eq(organization.id, referenceId))
logger.info('[subscription.created] Updated organization usage limit', {
organizationId: referenceId,
usageLimit: monthlyPrice,
})
} catch (error) {
logger.error('[subscription.created] Failed to update organization usage limit', {
organizationId: referenceId,
usageLimit: monthlyPrice,
error,
})
// Don't throw - the subscription was created successfully, just log the error
}
logger.info('[subscription.created] Upserted enterprise subscription', {
subscriptionId: subscriptionRow.id,
referenceId: subscriptionRow.referenceId,
plan: subscriptionRow.plan,
status: subscriptionRow.status,
monthlyPrice,
seats,
note: 'Seats from metadata, Stripe quantity set to 1',
})
try {
const userDetails = await db
.select({
id: user.id,
name: user.name,
email: user.email,
})
.from(user)
.where(eq(user.stripeCustomerId, stripeCustomerId))
.limit(1)
const orgDetails = await db
.select({
id: organization.id,
name: organization.name,
})
.from(organization)
.where(eq(organization.id, referenceId))
.limit(1)
if (userDetails.length > 0 && orgDetails.length > 0) {
const user = userDetails[0]
const org = orgDetails[0]
const html = await renderEnterpriseSubscriptionEmail(user.name || user.email, user.email)
const emailResult = await sendEmail({
to: user.email,
subject: getEmailSubject('enterprise-subscription'),
html,
from: getFromEmailAddress(),
emailType: 'transactional',
})
if (emailResult.success) {
logger.info('[subscription.created] Enterprise subscription email sent successfully', {
userId: user.id,
email: user.email,
organizationId: org.id,
subscriptionId: subscriptionRow.id,
})
} else {
logger.warn('[subscription.created] Failed to send enterprise subscription email', {
userId: user.id,
email: user.email,
error: emailResult.message,
})
}
} else {
logger.warn(
'[subscription.created] Could not find user or organization for email notification',
{
userFound: userDetails.length > 0,
orgFound: orgDetails.length > 0,
stripeCustomerId,
referenceId,
}
)
}
} catch (emailError) {
logger.error('[subscription.created] Error sending enterprise subscription email', {
error: emailError,
stripeCustomerId,
referenceId,
subscriptionId: subscriptionRow.id,
})
}
}

View File

@@ -197,6 +197,7 @@ export async function handleInvoicePaymentFailed(event: Stripe.Event) {
/**
* Handle base invoice finalized → create a separate overage-only invoice
* Note: Enterprise plans no longer have overages
*/
export async function handleInvoiceFinalized(event: Stripe.Event) {
try {
@@ -215,14 +216,22 @@ export async function handleInvoiceFinalized(event: Stripe.Event) {
if (records.length === 0) return
const sub = records[0]
// Always reset usage at cycle end for all plans
await resetUsageForSubscription({ plan: sub.plan, referenceId: sub.referenceId })
// Enterprise plans have no overages - skip overage invoice creation
if (sub.plan === 'enterprise') {
return
}
const stripe = requireStripeClient()
const periodEnd =
invoice.lines?.data?.[0]?.period?.end || invoice.period_end || Math.floor(Date.now() / 1000)
const billingPeriod = new Date(periodEnd * 1000).toISOString().slice(0, 7)
// Compute overage
// Compute overage (only for team and pro plans)
let totalOverage = 0
if (sub.plan === 'team' || sub.plan === 'enterprise') {
if (sub.plan === 'team') {
const members = await db
.select({ userId: member.userId })
.from(member)
@@ -235,19 +244,16 @@ export async function handleInvoiceFinalized(event: Stripe.Event) {
}
const { getPlanPricing } = await import('@/lib/billing/core/billing')
const { basePrice } = getPlanPricing(sub.plan, sub)
const { basePrice } = getPlanPricing(sub.plan)
const baseSubscriptionAmount = (sub.seats || 1) * basePrice
totalOverage = Math.max(0, totalTeamUsage - baseSubscriptionAmount)
} else {
const usage = await getUserUsageData(sub.referenceId)
const { getPlanPricing } = await import('@/lib/billing/core/billing')
const { basePrice } = getPlanPricing(sub.plan, sub)
const { basePrice } = getPlanPricing(sub.plan)
totalOverage = Math.max(0, usage.currentUsage - basePrice)
}
// Always reset usage at cycle end, regardless of whether overage > 0
await resetUsageForSubscription({ plan: sub.plan, referenceId: sub.referenceId })
if (totalOverage <= 0) return
const customerId = String(invoice.customer)

View File

@@ -1,53 +0,0 @@
/**
* Knowledge base and document constants
*/
// Tag slot configuration by field type
// Each field type maps to specific database columns
export const TAG_SLOT_CONFIG = {
text: {
slots: ['tag1', 'tag2', 'tag3', 'tag4', 'tag5', 'tag6', 'tag7'] as const,
maxSlots: 7,
},
// Future field types would be added here with their own database columns
// date: {
// slots: ['tag8', 'tag9'] as const,
// maxSlots: 2,
// },
// number: {
// slots: ['tag10', 'tag11'] as const,
// maxSlots: 2,
// },
} as const
// Currently supported field types
export const SUPPORTED_FIELD_TYPES = Object.keys(TAG_SLOT_CONFIG) as Array<
keyof typeof TAG_SLOT_CONFIG
>
// All tag slots (for backward compatibility)
export const TAG_SLOTS = TAG_SLOT_CONFIG.text.slots
// Maximum number of tag slots for text type (for backward compatibility)
export const MAX_TAG_SLOTS = TAG_SLOT_CONFIG.text.maxSlots
// Type for tag slot names
export type TagSlot = (typeof TAG_SLOTS)[number]
// Helper function to get available slots for a field type
export function getSlotsForFieldType(fieldType: string): readonly string[] {
const config = TAG_SLOT_CONFIG[fieldType as keyof typeof TAG_SLOT_CONFIG]
if (!config) {
return [] // Return empty array for unsupported field types - system will naturally handle this
}
return config.slots
}
// Helper function to get max slots for a field type
export function getMaxSlotsForFieldType(fieldType: string): number {
const config = TAG_SLOT_CONFIG[fieldType as keyof typeof TAG_SLOT_CONFIG]
if (!config) {
return 0 // Return 0 for unsupported field types
}
return config.maxSlots
}

View File

@@ -1,139 +1,108 @@
import { createReadStream, existsSync } from 'fs'
import { Readable } from 'stream'
import csvParser from 'csv-parser'
import { existsSync, readFileSync } from 'fs'
import * as Papa from 'papaparse'
import type { FileParseResult, FileParser } from '@/lib/file-parsers/types'
import { sanitizeTextForUTF8 } from '@/lib/file-parsers/utils'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('CsvParser')
const PARSE_OPTIONS = {
header: true,
skipEmptyLines: true,
transformHeader: (header: string) => sanitizeTextForUTF8(String(header)),
transform: (value: string) => sanitizeTextForUTF8(String(value || '')),
}
export class CsvParser implements FileParser {
async parseFile(filePath: string): Promise<FileParseResult> {
return new Promise((resolve, reject) => {
try {
// Validate input
if (!filePath) {
return reject(new Error('No file path provided'))
}
// Check if file exists
if (!existsSync(filePath)) {
return reject(new Error(`File not found: ${filePath}`))
}
const results: Record<string, any>[] = []
const headers: string[] = []
createReadStream(filePath)
.on('error', (error: Error) => {
logger.error('CSV stream error:', error)
reject(new Error(`Failed to read CSV file: ${error.message}`))
})
.pipe(csvParser())
.on('headers', (headerList: string[]) => {
headers.push(...headerList)
})
.on('data', (data: Record<string, any>) => {
results.push(data)
})
.on('end', () => {
// Convert CSV data to a formatted string representation
let content = ''
// Add headers
if (headers.length > 0) {
const cleanHeaders = headers.map((h) => sanitizeTextForUTF8(String(h)))
content += `${cleanHeaders.join(', ')}\n`
}
// Add rows
results.forEach((row) => {
const cleanValues = Object.values(row).map((v) =>
sanitizeTextForUTF8(String(v || ''))
)
content += `${cleanValues.join(', ')}\n`
})
resolve({
content: sanitizeTextForUTF8(content),
metadata: {
rowCount: results.length,
headers: headers,
rawData: results,
},
})
})
.on('error', (error: Error) => {
logger.error('CSV parsing error:', error)
reject(new Error(`Failed to parse CSV file: ${error.message}`))
})
} catch (error) {
logger.error('CSV general error:', error)
reject(new Error(`Failed to process CSV file: ${(error as Error).message}`))
try {
if (!filePath) {
throw new Error('No file path provided')
}
})
if (!existsSync(filePath)) {
throw new Error(`File not found: ${filePath}`)
}
const fileContent = readFileSync(filePath, 'utf8')
const parseResult = Papa.parse(fileContent, PARSE_OPTIONS)
if (parseResult.errors && parseResult.errors.length > 0) {
const errorMessages = parseResult.errors.map((err) => err.message).join(', ')
logger.error('CSV parsing errors:', parseResult.errors)
throw new Error(`Failed to parse CSV file: ${errorMessages}`)
}
const results = parseResult.data as Record<string, any>[]
const headers = parseResult.meta.fields || []
let content = ''
if (headers.length > 0) {
const cleanHeaders = headers.map((h) => sanitizeTextForUTF8(String(h)))
content += `${cleanHeaders.join(', ')}\n`
}
results.forEach((row) => {
const cleanValues = Object.values(row).map((v) => sanitizeTextForUTF8(String(v || '')))
content += `${cleanValues.join(', ')}\n`
})
return {
content: sanitizeTextForUTF8(content),
metadata: {
rowCount: results.length,
headers: headers,
rawData: results,
},
}
} catch (error) {
logger.error('CSV general error:', error)
throw new Error(`Failed to process CSV file: ${(error as Error).message}`)
}
}
async parseBuffer(buffer: Buffer): Promise<FileParseResult> {
return new Promise((resolve, reject) => {
try {
logger.info('Parsing buffer, size:', buffer.length)
try {
logger.info('Parsing buffer, size:', buffer.length)
const results: Record<string, any>[] = []
const headers: string[] = []
const fileContent = buffer.toString('utf8')
// Create a readable stream from the buffer
const bufferStream = new Readable()
bufferStream.push(buffer)
bufferStream.push(null) // Signal the end of the stream
const parseResult = Papa.parse(fileContent, PARSE_OPTIONS)
bufferStream
.on('error', (error: Error) => {
logger.error('CSV buffer stream error:', error)
reject(new Error(`Failed to read CSV buffer: ${error.message}`))
})
.pipe(csvParser())
.on('headers', (headerList: string[]) => {
headers.push(...headerList)
})
.on('data', (data: Record<string, any>) => {
results.push(data)
})
.on('end', () => {
// Convert CSV data to a formatted string representation
let content = ''
// Add headers
if (headers.length > 0) {
const cleanHeaders = headers.map((h) => sanitizeTextForUTF8(String(h)))
content += `${cleanHeaders.join(', ')}\n`
}
// Add rows
results.forEach((row) => {
const cleanValues = Object.values(row).map((v) =>
sanitizeTextForUTF8(String(v || ''))
)
content += `${cleanValues.join(', ')}\n`
})
resolve({
content: sanitizeTextForUTF8(content),
metadata: {
rowCount: results.length,
headers: headers,
rawData: results,
},
})
})
.on('error', (error: Error) => {
logger.error('CSV parsing error:', error)
reject(new Error(`Failed to parse CSV buffer: ${error.message}`))
})
} catch (error) {
logger.error('CSV buffer parsing error:', error)
reject(new Error(`Failed to process CSV buffer: ${(error as Error).message}`))
if (parseResult.errors && parseResult.errors.length > 0) {
const errorMessages = parseResult.errors.map((err) => err.message).join(', ')
logger.error('CSV parsing errors:', parseResult.errors)
throw new Error(`Failed to parse CSV buffer: ${errorMessages}`)
}
})
const results = parseResult.data as Record<string, any>[]
const headers = parseResult.meta.fields || []
let content = ''
if (headers.length > 0) {
const cleanHeaders = headers.map((h) => sanitizeTextForUTF8(String(h)))
content += `${cleanHeaders.join(', ')}\n`
}
results.forEach((row) => {
const cleanValues = Object.values(row).map((v) => sanitizeTextForUTF8(String(v || '')))
content += `${cleanValues.join(', ')}\n`
})
return {
content: sanitizeTextForUTF8(content),
metadata: {
rowCount: results.length,
headers: headers,
rawData: results,
},
}
} catch (error) {
logger.error('CSV buffer parsing error:', error)
throw new Error(`Failed to process CSV buffer: ${(error as Error).message}`)
}
}
}

View File

@@ -9,19 +9,16 @@ const logger = createLogger('DocParser')
export class DocParser implements FileParser {
async parseFile(filePath: string): Promise<FileParseResult> {
try {
// Validate input
if (!filePath) {
throw new Error('No file path provided')
}
// Check if file exists
if (!existsSync(filePath)) {
throw new Error(`File not found: ${filePath}`)
}
logger.info(`Parsing DOC file: ${filePath}`)
// Read the file
const buffer = await readFile(filePath)
return this.parseBuffer(buffer)
} catch (error) {
@@ -38,45 +35,37 @@ export class DocParser implements FileParser {
throw new Error('Empty buffer provided')
}
// Try to dynamically import the word extractor
let WordExtractor
let parseOfficeAsync
try {
WordExtractor = (await import('word-extractor')).default
const officeParser = await import('officeparser')
parseOfficeAsync = officeParser.parseOfficeAsync
} catch (importError) {
logger.warn('word-extractor not available, using fallback extraction')
logger.warn('officeparser not available, using fallback extraction')
return this.fallbackExtraction(buffer)
}
try {
const extractor = new WordExtractor()
const extracted = await extractor.extract(buffer)
const result = await parseOfficeAsync(buffer)
const content = sanitizeTextForUTF8(extracted.getBody())
const headers = extracted.getHeaders()
const footers = extracted.getFooters()
// Combine body with headers/footers if they exist
let fullContent = content
if (headers?.trim()) {
fullContent = `${sanitizeTextForUTF8(headers)}\n\n${fullContent}`
}
if (footers?.trim()) {
fullContent = `${fullContent}\n\n${sanitizeTextForUTF8(footers)}`
if (!result) {
throw new Error('officeparser returned no result')
}
logger.info('DOC parsing completed successfully')
const resultString = typeof result === 'string' ? result : String(result)
const content = sanitizeTextForUTF8(resultString.trim())
logger.info('DOC parsing completed successfully with officeparser')
return {
content: fullContent.trim(),
content: content,
metadata: {
hasHeaders: !!headers?.trim(),
hasFooters: !!footers?.trim(),
characterCount: fullContent.length,
extractionMethod: 'word-extractor',
characterCount: content.length,
extractionMethod: 'officeparser',
},
}
} catch (extractError) {
logger.warn('word-extractor failed, using fallback:', extractError)
logger.warn('officeparser failed, using fallback:', extractError)
return this.fallbackExtraction(buffer)
}
} catch (error) {
@@ -85,25 +74,16 @@ export class DocParser implements FileParser {
}
}
/**
* Fallback extraction method for when word-extractor is not available
* This is a very basic extraction that looks for readable text in the binary
*/
private fallbackExtraction(buffer: Buffer): FileParseResult {
logger.info('Using fallback text extraction for DOC file')
// Convert buffer to string and try to extract readable text
// This is very basic and won't work well for complex DOC files
const text = buffer.toString('utf8', 0, Math.min(buffer.length, 100000)) // Limit to first 100KB
const text = buffer.toString('utf8', 0, Math.min(buffer.length, 100000))
// Extract sequences of printable ASCII characters
const readableText = text
.match(/[\x20-\x7E\s]{4,}/g) // Find sequences of 4+ printable characters
.match(/[\x20-\x7E\s]{4,}/g)
?.filter(
(chunk) =>
chunk.trim().length > 10 && // Minimum length
/[a-zA-Z]/.test(chunk) && // Must contain letters
!/^[\x00-\x1F]*$/.test(chunk) // Not just control characters
chunk.trim().length > 10 && /[a-zA-Z]/.test(chunk) && !/^[\x00-\x1F]*$/.test(chunk)
)
.join(' ')
.replace(/\s+/g, ' ')
@@ -118,8 +98,7 @@ export class DocParser implements FileParser {
metadata: {
extractionMethod: 'fallback',
characterCount: content.length,
warning:
'Basic text extraction used. For better results, install word-extractor package or convert to DOCX format.',
warning: 'Basic text extraction used. For better results, convert to DOCX format.',
},
}
}

View File

@@ -14,15 +14,12 @@ interface MammothResult {
export class DocxParser implements FileParser {
async parseFile(filePath: string): Promise<FileParseResult> {
try {
// Validate input
if (!filePath) {
throw new Error('No file path provided')
}
// Read the file
const buffer = await readFile(filePath)
// Use parseBuffer for consistent implementation
return this.parseBuffer(buffer)
} catch (error) {
logger.error('DOCX file error:', error)
@@ -34,10 +31,8 @@ export class DocxParser implements FileParser {
try {
logger.info('Parsing buffer, size:', buffer.length)
// Extract text with mammoth
const result = await mammoth.extractRawText({ buffer })
// Extract HTML for metadata (optional - won't fail if this fails)
let htmlResult: MammothResult = { value: '', messages: [] }
try {
htmlResult = await mammoth.convertToHtml({ buffer })

View File

@@ -0,0 +1,283 @@
import { readFile } from 'fs/promises'
import * as cheerio from 'cheerio'
import type { FileParseResult, FileParser } from '@/lib/file-parsers/types'
import { sanitizeTextForUTF8 } from '@/lib/file-parsers/utils'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('HtmlParser')
export class HtmlParser implements FileParser {
async parseFile(filePath: string): Promise<FileParseResult> {
try {
if (!filePath) {
throw new Error('No file path provided')
}
const buffer = await readFile(filePath)
return this.parseBuffer(buffer)
} catch (error) {
logger.error('HTML file error:', error)
throw new Error(`Failed to parse HTML file: ${(error as Error).message}`)
}
}
async parseBuffer(buffer: Buffer): Promise<FileParseResult> {
try {
logger.info('Parsing HTML buffer, size:', buffer.length)
const htmlContent = buffer.toString('utf-8')
const $ = cheerio.load(htmlContent)
// Extract meta information before removing tags
const title = $('title').text().trim()
const metaDescription = $('meta[name="description"]').attr('content') || ''
$('script, style, noscript, meta, link, iframe, object, embed, svg').remove()
$.root()
.contents()
.filter(function () {
return this.type === 'comment'
})
.remove()
const content = this.extractStructuredText($)
const sanitizedContent = sanitizeTextForUTF8(content)
const characterCount = sanitizedContent.length
const wordCount = sanitizedContent.split(/\s+/).filter((word) => word.length > 0).length
const estimatedTokenCount = Math.ceil(characterCount / 4)
const headings = this.extractHeadings($)
const links = this.extractLinks($)
return {
content: sanitizedContent,
metadata: {
title,
metaDescription,
characterCount,
wordCount,
tokenCount: estimatedTokenCount,
headings,
links: links.slice(0, 50),
hasImages: $('img').length > 0,
imageCount: $('img').length,
hasTable: $('table').length > 0,
tableCount: $('table').length,
hasList: $('ul, ol').length > 0,
listCount: $('ul, ol').length,
},
}
} catch (error) {
logger.error('HTML buffer parsing error:', error)
throw new Error(`Failed to parse HTML buffer: ${(error as Error).message}`)
}
}
/**
* Extract structured text content preserving document hierarchy
*/
private extractStructuredText($: cheerio.CheerioAPI): string {
const contentParts: string[] = []
const rootElement = $('body').length > 0 ? $('body') : $.root()
this.processElement($, rootElement, contentParts, 0)
return contentParts.join('\n').trim()
}
/**
* Recursively process elements to extract text with structure
*/
private processElement(
$: cheerio.CheerioAPI,
element: cheerio.Cheerio<any>,
contentParts: string[],
depth: number
): void {
element.contents().each((_, node) => {
if (node.type === 'text') {
const text = $(node).text().trim()
if (text) {
contentParts.push(text)
}
} else if (node.type === 'tag') {
const $node = $(node)
const tagName = node.tagName?.toLowerCase()
switch (tagName) {
case 'h1':
case 'h2':
case 'h3':
case 'h4':
case 'h5':
case 'h6': {
const headingText = $node.text().trim()
if (headingText) {
contentParts.push(`\n${headingText}\n`)
}
break
}
case 'p': {
const paragraphText = $node.text().trim()
if (paragraphText) {
contentParts.push(`${paragraphText}\n`)
}
break
}
case 'br':
contentParts.push('\n')
break
case 'hr':
contentParts.push('\n---\n')
break
case 'li': {
const listItemText = $node.text().trim()
if (listItemText) {
const indent = ' '.repeat(Math.min(depth, 3))
contentParts.push(`${indent}${listItemText}`)
}
break
}
case 'ul':
case 'ol':
contentParts.push('\n')
this.processElement($, $node, contentParts, depth + 1)
contentParts.push('\n')
break
case 'table':
this.processTable($, $node, contentParts)
break
case 'blockquote': {
const quoteText = $node.text().trim()
if (quoteText) {
contentParts.push(`\n> ${quoteText}\n`)
}
break
}
case 'pre':
case 'code': {
const codeText = $node.text().trim()
if (codeText) {
contentParts.push(`\n\`\`\`\n${codeText}\n\`\`\`\n`)
}
break
}
case 'div':
case 'section':
case 'article':
case 'main':
case 'aside':
case 'nav':
case 'header':
case 'footer':
this.processElement($, $node, contentParts, depth)
break
case 'a': {
const linkText = $node.text().trim()
const href = $node.attr('href')
if (linkText) {
if (href?.startsWith('http')) {
contentParts.push(`${linkText} (${href})`)
} else {
contentParts.push(linkText)
}
}
break
}
case 'img': {
const alt = $node.attr('alt')
if (alt) {
contentParts.push(`[Image: ${alt}]`)
}
break
}
default:
this.processElement($, $node, contentParts, depth)
}
}
})
}
/**
* Process table elements to extract structured data
*/
private processTable(
$: cheerio.CheerioAPI,
table: cheerio.Cheerio<any>,
contentParts: string[]
): void {
contentParts.push('\n[Table]')
table.find('tr').each((_, row) => {
const $row = $(row)
const cells: string[] = []
$row.find('td, th').each((_, cell) => {
const cellText = $(cell).text().trim()
cells.push(cellText || '')
})
if (cells.length > 0) {
contentParts.push(`| ${cells.join(' | ')} |`)
}
})
contentParts.push('[/Table]\n')
}
/**
* Extract heading structure for metadata
*/
private extractHeadings($: cheerio.CheerioAPI): Array<{ level: number; text: string }> {
const headings: Array<{ level: number; text: string }> = []
$('h1, h2, h3, h4, h5, h6').each((_, element) => {
const $element = $(element)
const tagName = element.tagName?.toLowerCase()
const level = Number.parseInt(tagName?.charAt(1) || '1', 10)
const text = $element.text().trim()
if (text) {
headings.push({ level, text })
}
})
return headings
}
/**
* Extract links from the document
*/
private extractLinks($: cheerio.CheerioAPI): Array<{ text: string; href: string }> {
const links: Array<{ text: string; href: string }> = []
$('a[href]').each((_, element) => {
const $element = $(element)
const href = $element.attr('href')
const text = $element.text().trim()
if (href && text && href.startsWith('http')) {
links.push({ text, href })
}
})
return links
}
}

View File

@@ -51,6 +51,23 @@ const mockMdParseFile = vi.fn().mockResolvedValue({
},
})
const mockPptxParseFile = vi.fn().mockResolvedValue({
content: 'Parsed PPTX content',
metadata: {
slideCount: 5,
extractionMethod: 'officeparser',
},
})
const mockHtmlParseFile = vi.fn().mockResolvedValue({
content: 'Parsed HTML content',
metadata: {
title: 'Test HTML Document',
headingCount: 3,
linkCount: 2,
},
})
const createMockModule = () => {
const mockParsers: Record<string, FileParser> = {
pdf: { parseFile: mockPdfParseFile },
@@ -58,6 +75,10 @@ const createMockModule = () => {
docx: { parseFile: mockDocxParseFile },
txt: { parseFile: mockTxtParseFile },
md: { parseFile: mockMdParseFile },
pptx: { parseFile: mockPptxParseFile },
ppt: { parseFile: mockPptxParseFile },
html: { parseFile: mockHtmlParseFile },
htm: { parseFile: mockHtmlParseFile },
}
return {
@@ -143,6 +164,18 @@ describe('File Parsers', () => {
})),
}))
vi.doMock('@/lib/file-parsers/pptx-parser', () => ({
PptxParser: vi.fn().mockImplementation(() => ({
parseFile: mockPptxParseFile,
})),
}))
vi.doMock('@/lib/file-parsers/html-parser', () => ({
HtmlParser: vi.fn().mockImplementation(() => ({
parseFile: mockHtmlParseFile,
})),
}))
global.console = {
...console,
log: vi.fn(),
@@ -261,6 +294,82 @@ describe('File Parsers', () => {
const { parseFile } = await import('@/lib/file-parsers/index')
const result = await parseFile('/test/files/document.md')
expect(result).toEqual(expectedResult)
})
it('should parse PPTX files successfully', async () => {
const expectedResult = {
content: 'Parsed PPTX content',
metadata: {
slideCount: 5,
extractionMethod: 'officeparser',
},
}
mockPptxParseFile.mockResolvedValueOnce(expectedResult)
mockExistsSync.mockReturnValue(true)
const { parseFile } = await import('@/lib/file-parsers/index')
const result = await parseFile('/test/files/presentation.pptx')
expect(result).toEqual(expectedResult)
})
it('should parse PPT files successfully', async () => {
const expectedResult = {
content: 'Parsed PPTX content',
metadata: {
slideCount: 5,
extractionMethod: 'officeparser',
},
}
mockPptxParseFile.mockResolvedValueOnce(expectedResult)
mockExistsSync.mockReturnValue(true)
const { parseFile } = await import('@/lib/file-parsers/index')
const result = await parseFile('/test/files/presentation.ppt')
expect(result).toEqual(expectedResult)
})
it('should parse HTML files successfully', async () => {
const expectedResult = {
content: 'Parsed HTML content',
metadata: {
title: 'Test HTML Document',
headingCount: 3,
linkCount: 2,
},
}
mockHtmlParseFile.mockResolvedValueOnce(expectedResult)
mockExistsSync.mockReturnValue(true)
const { parseFile } = await import('@/lib/file-parsers/index')
const result = await parseFile('/test/files/document.html')
expect(result).toEqual(expectedResult)
})
it('should parse HTM files successfully', async () => {
const expectedResult = {
content: 'Parsed HTML content',
metadata: {
title: 'Test HTML Document',
headingCount: 3,
linkCount: 2,
},
}
mockHtmlParseFile.mockResolvedValueOnce(expectedResult)
mockExistsSync.mockReturnValue(true)
const { parseFile } = await import('@/lib/file-parsers/index')
const result = await parseFile('/test/files/document.htm')
expect(result).toEqual(expectedResult)
})
it('should throw error for unsupported file types', async () => {
@@ -292,6 +401,10 @@ describe('File Parsers', () => {
expect(isSupportedFileType('docx')).toBe(true)
expect(isSupportedFileType('txt')).toBe(true)
expect(isSupportedFileType('md')).toBe(true)
expect(isSupportedFileType('pptx')).toBe(true)
expect(isSupportedFileType('ppt')).toBe(true)
expect(isSupportedFileType('html')).toBe(true)
expect(isSupportedFileType('htm')).toBe(true)
})
it('should return false for unsupported file types', async () => {
@@ -308,6 +421,8 @@ describe('File Parsers', () => {
expect(isSupportedFileType('CSV')).toBe(true)
expect(isSupportedFileType('TXT')).toBe(true)
expect(isSupportedFileType('MD')).toBe(true)
expect(isSupportedFileType('PPTX')).toBe(true)
expect(isSupportedFileType('HTML')).toBe(true)
})
it('should handle errors gracefully', async () => {

View File

@@ -7,7 +7,6 @@ import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('FileParser')
// Lazy-loaded parsers to avoid initialization issues
let parserInstances: Record<string, FileParser> | null = null
/**
@@ -18,25 +17,20 @@ function getParserInstances(): Record<string, FileParser> {
parserInstances = {}
try {
// Import parsers only when needed - with try/catch for each one
try {
logger.info('Attempting to load PDF parser...')
try {
// First try to use the pdf-parse library
// Import the PdfParser using ES module import to avoid test file access
const { PdfParser } = require('@/lib/file-parsers/pdf-parser')
parserInstances.pdf = new PdfParser()
logger.info('PDF parser loaded successfully')
} catch (pdfParseError) {
// If that fails, fallback to our raw PDF parser
logger.error('Failed to load primary PDF parser:', pdfParseError)
} catch (pdfLibError) {
logger.error('Failed to load primary PDF parser:', pdfLibError)
logger.info('Falling back to raw PDF parser')
parserInstances.pdf = new RawPdfParser()
logger.info('Raw PDF parser loaded successfully')
}
} catch (error) {
logger.error('Failed to load any PDF parser:', error)
// Create a simple fallback that just returns the file size and a message
parserInstances.pdf = {
async parseFile(filePath: string): Promise<FileParseResult> {
const buffer = await readFile(filePath)
@@ -100,10 +94,26 @@ function getParserInstances(): Record<string, FileParser> {
try {
const { XlsxParser } = require('@/lib/file-parsers/xlsx-parser')
parserInstances.xlsx = new XlsxParser()
parserInstances.xls = new XlsxParser() // Both xls and xlsx use the same parser
parserInstances.xls = new XlsxParser()
} catch (error) {
logger.error('Failed to load XLSX parser:', error)
}
try {
const { PptxParser } = require('@/lib/file-parsers/pptx-parser')
parserInstances.pptx = new PptxParser()
parserInstances.ppt = new PptxParser()
} catch (error) {
logger.error('Failed to load PPTX parser:', error)
}
try {
const { HtmlParser } = require('@/lib/file-parsers/html-parser')
parserInstances.html = new HtmlParser()
parserInstances.htm = new HtmlParser()
} catch (error) {
logger.error('Failed to load HTML parser:', error)
}
} catch (error) {
logger.error('Error loading file parsers:', error)
}
@@ -119,12 +129,10 @@ function getParserInstances(): Record<string, FileParser> {
*/
export async function parseFile(filePath: string): Promise<FileParseResult> {
try {
// Validate input
if (!filePath) {
throw new Error('No file path provided')
}
// Check if file exists
if (!existsSync(filePath)) {
throw new Error(`File not found: ${filePath}`)
}
@@ -158,7 +166,6 @@ export async function parseFile(filePath: string): Promise<FileParseResult> {
*/
export async function parseBuffer(buffer: Buffer, extension: string): Promise<FileParseResult> {
try {
// Validate input
if (!buffer || buffer.length === 0) {
throw new Error('Empty buffer provided')
}
@@ -182,7 +189,6 @@ export async function parseBuffer(buffer: Buffer, extension: string): Promise<Fi
logger.info('Using parser for extension:', normalizedExtension)
const parser = parsers[normalizedExtension]
// Check if parser supports buffer parsing
if (parser.parseBuffer) {
return await parser.parseBuffer(buffer)
}
@@ -207,5 +213,4 @@ export function isSupportedFileType(extension: string): extension is SupportedFi
}
}
// Type exports
export type { FileParseResult, FileParser, SupportedFileType }

View File

@@ -1,5 +1,6 @@
import { readFile } from 'fs/promises'
import type { FileParseResult, FileParser } from '@/lib/file-parsers/types'
import { sanitizeTextForUTF8 } from '@/lib/file-parsers/utils'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('MdParser')
@@ -7,15 +8,12 @@ const logger = createLogger('MdParser')
export class MdParser implements FileParser {
async parseFile(filePath: string): Promise<FileParseResult> {
try {
// Validate input
if (!filePath) {
throw new Error('No file path provided')
}
// Read the file
const buffer = await readFile(filePath)
// Use parseBuffer for consistent implementation
return this.parseBuffer(buffer)
} catch (error) {
logger.error('MD file error:', error)
@@ -27,14 +25,14 @@ export class MdParser implements FileParser {
try {
logger.info('Parsing buffer, size:', buffer.length)
// Extract content
const result = buffer.toString('utf-8')
const content = sanitizeTextForUTF8(result)
return {
content: result,
content,
metadata: {
characterCount: result.length,
tokenCount: result.length / 4,
characterCount: content.length,
tokenCount: Math.floor(content.length / 4),
},
}
} catch (error) {

View File

@@ -1,22 +1,21 @@
import { readFile } from 'fs/promises'
// @ts-ignore
import * as pdfParseLib from 'pdf-parse/lib/pdf-parse.js'
import { PDFDocument } from 'pdf-lib'
import type { FileParseResult, FileParser } from '@/lib/file-parsers/types'
import { createLogger } from '@/lib/logs/console/logger'
import { RawPdfParser } from './raw-pdf-parser'
const logger = createLogger('PdfParser')
const rawPdfParser = new RawPdfParser()
export class PdfParser implements FileParser {
async parseFile(filePath: string): Promise<FileParseResult> {
try {
logger.info('Starting to parse file:', filePath)
// Make sure we're only parsing the provided file path
if (!filePath) {
throw new Error('No file path provided')
}
// Read the file
logger.info('Reading file...')
const dataBuffer = await readFile(filePath)
logger.info('File read successfully, size:', dataBuffer.length)
@@ -32,93 +31,66 @@ export class PdfParser implements FileParser {
try {
logger.info('Starting to parse buffer, size:', dataBuffer.length)
// Try to parse with pdf-parse library first
try {
logger.info('Attempting to parse with pdf-parse library...')
logger.info('Attempting to parse with pdf-lib library...')
// Parse PDF with direct function call to avoid test file access
logger.info('Starting PDF parsing...')
const data = await pdfParseLib.default(dataBuffer)
logger.info('PDF parsed successfully with pdf-parse, pages:', data.numpages)
const pdfDoc = await PDFDocument.load(dataBuffer)
const pages = pdfDoc.getPages()
const pageCount = pages.length
logger.info('PDF parsed successfully with pdf-lib, pages:', pageCount)
const metadata: Record<string, any> = {
pageCount,
}
try {
const title = pdfDoc.getTitle()
const author = pdfDoc.getAuthor()
const subject = pdfDoc.getSubject()
const creator = pdfDoc.getCreator()
const producer = pdfDoc.getProducer()
const creationDate = pdfDoc.getCreationDate()
const modificationDate = pdfDoc.getModificationDate()
if (title) metadata.title = title
if (author) metadata.author = author
if (subject) metadata.subject = subject
if (creator) metadata.creator = creator
if (producer) metadata.producer = producer
if (creationDate) metadata.creationDate = creationDate.toISOString()
if (modificationDate) metadata.modificationDate = modificationDate.toISOString()
} catch (metadataError) {
logger.warn('Could not extract PDF metadata:', metadataError)
}
logger.info(
'pdf-lib loaded successfully, but text extraction requires fallback to raw parser'
)
const rawResult = await rawPdfParser.parseBuffer(dataBuffer)
return {
content: data.text,
content: rawResult.content,
metadata: {
pageCount: data.numpages,
info: data.info,
version: data.version,
...rawResult.metadata,
...metadata,
source: 'pdf-lib + raw-parser',
},
}
} catch (pdfParseError: unknown) {
logger.error('PDF-parse library failed:', pdfParseError)
} catch (pdfLibError: unknown) {
logger.error('PDF-lib library failed:', pdfLibError)
// Fallback to manual text extraction
logger.info('Falling back to manual text extraction...')
// Extract basic PDF info from raw content
const rawContent = dataBuffer.toString('utf-8', 0, Math.min(10000, dataBuffer.length))
let version = 'Unknown'
let pageCount = 0
// Try to extract PDF version
const versionMatch = rawContent.match(/%PDF-(\d+\.\d+)/)
if (versionMatch?.[1]) {
version = versionMatch[1]
}
// Try to get page count
const pageMatches = rawContent.match(/\/Type\s*\/Page\b/g)
if (pageMatches) {
pageCount = pageMatches.length
}
// Try to extract text by looking for text-related operators in the PDF
let extractedText = ''
// Look for text in the PDF content using common patterns
const textMatches = rawContent.match(/BT[\s\S]*?ET/g)
if (textMatches && textMatches.length > 0) {
extractedText = textMatches
.map((textBlock) => {
// Extract text objects (Tj, TJ) from the text block
const textObjects = textBlock.match(/\([^)]*\)\s*Tj|\[[^\]]*\]\s*TJ/g)
if (textObjects) {
return textObjects
.map((obj) => {
// Clean up text objects
return (
obj
.replace(
/\(([^)]*)\)\s*Tj|\[([^\]]*)\]\s*TJ/g,
(match, p1, p2) => p1 || p2 || ''
)
// Clean up PDF escape sequences
.replace(/\\(\d{3}|[()\\])/g, '')
.replace(/\\\\/g, '\\')
.replace(/\\\(/g, '(')
.replace(/\\\)/g, ')')
)
})
.join(' ')
}
return ''
})
.join('\n')
}
// If we couldn't extract text or the text is too short, return a fallback message
if (!extractedText || extractedText.length < 50) {
extractedText = `This PDF contains ${pageCount} page(s) but text extraction was not successful.`
}
logger.info('Falling back to raw PDF parser...')
const rawResult = await rawPdfParser.parseBuffer(dataBuffer)
return {
content: extractedText,
...rawResult,
metadata: {
pageCount,
version,
...rawResult.metadata,
fallback: true,
error: (pdfParseError as Error).message || 'Unknown error',
source: 'raw-parser-only',
error: (pdfLibError as Error).message || 'Unknown error',
},
}
}

View File

@@ -0,0 +1,106 @@
import { existsSync } from 'fs'
import { readFile } from 'fs/promises'
import type { FileParseResult, FileParser } from '@/lib/file-parsers/types'
import { sanitizeTextForUTF8 } from '@/lib/file-parsers/utils'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('PptxParser')
export class PptxParser implements FileParser {
async parseFile(filePath: string): Promise<FileParseResult> {
try {
if (!filePath) {
throw new Error('No file path provided')
}
if (!existsSync(filePath)) {
throw new Error(`File not found: ${filePath}`)
}
logger.info(`Parsing PowerPoint file: ${filePath}`)
const buffer = await readFile(filePath)
return this.parseBuffer(buffer)
} catch (error) {
logger.error('PowerPoint file parsing error:', error)
throw new Error(`Failed to parse PowerPoint file: ${(error as Error).message}`)
}
}
async parseBuffer(buffer: Buffer): Promise<FileParseResult> {
try {
logger.info('Parsing PowerPoint buffer, size:', buffer.length)
if (!buffer || buffer.length === 0) {
throw new Error('Empty buffer provided')
}
let parseOfficeAsync
try {
const officeParser = await import('officeparser')
parseOfficeAsync = officeParser.parseOfficeAsync
} catch (importError) {
logger.warn('officeparser not available, using fallback extraction')
return this.fallbackExtraction(buffer)
}
try {
const result = await parseOfficeAsync(buffer)
if (!result || typeof result !== 'string') {
throw new Error('officeparser returned invalid result')
}
const content = sanitizeTextForUTF8(result.trim())
logger.info('PowerPoint parsing completed successfully with officeparser')
return {
content: content,
metadata: {
characterCount: content.length,
extractionMethod: 'officeparser',
},
}
} catch (extractError) {
logger.warn('officeparser failed, using fallback:', extractError)
return this.fallbackExtraction(buffer)
}
} catch (error) {
logger.error('PowerPoint buffer parsing error:', error)
throw new Error(`Failed to parse PowerPoint buffer: ${(error as Error).message}`)
}
}
private fallbackExtraction(buffer: Buffer): FileParseResult {
logger.info('Using fallback text extraction for PowerPoint file')
const text = buffer.toString('utf8', 0, Math.min(buffer.length, 200000))
const readableText = text
.match(/[\x20-\x7E\s]{4,}/g)
?.filter(
(chunk) =>
chunk.trim().length > 10 &&
/[a-zA-Z]/.test(chunk) &&
!/^[\x00-\x1F]*$/.test(chunk) &&
!/^[^\w\s]*$/.test(chunk)
)
.join(' ')
.replace(/\s+/g, ' ')
.trim()
const content = readableText
? sanitizeTextForUTF8(readableText)
: 'Unable to extract text from PowerPoint file. Please ensure the file contains readable text content.'
return {
content,
metadata: {
extractionMethod: 'fallback',
characterCount: content.length,
warning: 'Basic text extraction used',
},
}
}
}

View File

@@ -6,14 +6,9 @@ import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('RawPdfParser')
// Promisify zlib functions
const inflateAsync = promisify(zlib.inflate)
const unzipAsync = promisify(zlib.unzip)
/**
* A simple PDF parser that extracts readable text from a PDF file.
* This is used as a fallback when the pdf-parse library fails.
*/
export class RawPdfParser implements FileParser {
async parseFile(filePath: string): Promise<FileParseResult> {
try {
@@ -23,7 +18,6 @@ export class RawPdfParser implements FileParser {
throw new Error('No file path provided')
}
// Read the file
logger.info('Reading file...')
const dataBuffer = await readFile(filePath)
logger.info('File read successfully, size:', dataBuffer.length)
@@ -46,31 +40,22 @@ export class RawPdfParser implements FileParser {
try {
logger.info('Starting to parse buffer, size:', dataBuffer.length)
// Instead of trying to parse the binary PDF data directly,
// we'll extract only the text sections that are readable
// First convert to string but only for pattern matching, not for display
const rawContent = dataBuffer.toString('utf-8')
// Extract basic PDF info
let version = 'Unknown'
let pageCount = 0
// Try to extract PDF version
const versionMatch = rawContent.match(/%PDF-(\d+\.\d+)/)
if (versionMatch?.[1]) {
version = versionMatch[1]
}
// Count pages using multiple methods for redundancy
// Method 1: Count "/Type /Page" occurrences (most reliable)
const typePageMatches = rawContent.match(/\/Type\s*\/Page\b/gi)
if (typePageMatches) {
pageCount = typePageMatches.length
logger.info('Found page count using /Type /Page:', pageCount)
}
// Method 2: Look for "/Page" dictionary references
if (pageCount === 0) {
const pageMatches = rawContent.match(/\/Page\s*\//gi)
if (pageMatches) {
@@ -79,19 +64,15 @@ export class RawPdfParser implements FileParser {
}
}
// Method 3: Look for "/Pages" object references
if (pageCount === 0) {
const pagesObjMatches = rawContent.match(/\/Pages\s+\d+\s+\d+\s+R/gi)
if (pagesObjMatches && pagesObjMatches.length > 0) {
// Extract the object reference
const pagesObjRef = pagesObjMatches[0].match(/\/Pages\s+(\d+)\s+\d+\s+R/i)
if (pagesObjRef?.[1]) {
const objNum = pagesObjRef[1]
// Find the referenced object
const objRegex = new RegExp(`${objNum}\\s+0\\s+obj[\\s\\S]*?endobj`, 'i')
const objMatch = rawContent.match(objRegex)
if (objMatch) {
// Look for /Count within the Pages object
const countMatch = objMatch[0].match(/\/Count\s+(\d+)/i)
if (countMatch?.[1]) {
pageCount = Number.parseInt(countMatch[1], 10)
@@ -102,50 +83,40 @@ export class RawPdfParser implements FileParser {
}
}
// Method 4: Count trailer references to get an approximate count
if (pageCount === 0) {
const trailerMatches = rawContent.match(/trailer/gi)
if (trailerMatches) {
// This is just a rough estimate, not accurate
pageCount = Math.max(1, Math.ceil(trailerMatches.length / 2))
logger.info('Estimated page count using trailer references:', pageCount)
}
}
// Default to at least 1 page if we couldn't find any
if (pageCount === 0) {
pageCount = 1
logger.info('Defaulting to 1 page as no count was found')
}
// Extract text content using text markers commonly found in PDFs
let extractedText = ''
// Method 1: Extract text between BT (Begin Text) and ET (End Text) markers
const textMatches = rawContent.match(/BT[\s\S]*?ET/g)
if (textMatches && textMatches.length > 0) {
logger.info('Found', textMatches.length, 'text blocks')
extractedText = textMatches
.map((textBlock) => {
// Extract text objects (Tj, TJ) from the text block
const textObjects = textBlock.match(/(\([^)]*\)|\[[^\]]*\])\s*(Tj|TJ)/g)
if (textObjects && textObjects.length > 0) {
return textObjects
.map((obj) => {
// Clean up text objects
let text = ''
if (obj.includes('Tj')) {
// Handle Tj operator (simple string)
const match = obj.match(/\(([^)]*)\)\s*Tj/)
if (match?.[1]) {
text = match[1]
}
} else if (obj.includes('TJ')) {
// Handle TJ operator (array of strings and positioning)
const match = obj.match(/\[(.*)\]\s*TJ/)
if (match?.[1]) {
// Extract only the string parts from the array
const parts = match[1].match(/\([^)]*\)/g)
if (parts) {
text = parts.map((p) => p.slice(1, -1)).join(' ')
@@ -153,7 +124,6 @@ export class RawPdfParser implements FileParser {
}
}
// Clean up PDF escape sequences
return text
.replace(/\\(\d{3})/g, (_, octal) =>
String.fromCharCode(Number.parseInt(octal, 8))
@@ -170,50 +140,42 @@ export class RawPdfParser implements FileParser {
.trim()
}
// Try to extract metadata from XML
let metadataText = ''
const xmlMatch = rawContent.match(/<x:xmpmeta[\s\S]*?<\/x:xmpmeta>/)
if (xmlMatch) {
const xmlContent = xmlMatch[0]
logger.info('Found XML metadata')
// Extract document title
const titleMatch = xmlContent.match(/<dc:title>[\s\S]*?<rdf:li[^>]*>(.*?)<\/rdf:li>/i)
if (titleMatch?.[1]) {
const title = titleMatch[1].replace(/<[^>]+>/g, '').trim()
metadataText += `Document Title: ${title}\n\n`
}
// Extract creator/author
const creatorMatch = xmlContent.match(/<dc:creator>[\s\S]*?<rdf:li[^>]*>(.*?)<\/rdf:li>/i)
if (creatorMatch?.[1]) {
const creator = creatorMatch[1].replace(/<[^>]+>/g, '').trim()
metadataText += `Author: ${creator}\n`
}
// Extract creation date
const dateMatch = xmlContent.match(/<xmp:CreateDate>(.*?)<\/xmp:CreateDate>/i)
if (dateMatch?.[1]) {
metadataText += `Created: ${dateMatch[1].trim()}\n`
}
// Extract producer
const producerMatch = xmlContent.match(/<pdf:Producer>(.*?)<\/pdf:Producer>/i)
if (producerMatch?.[1]) {
metadataText += `Producer: ${producerMatch[1].trim()}\n`
}
}
// Try to extract actual text content from content streams
if (!extractedText || extractedText.length < 100 || extractedText.includes('/Type /Page')) {
logger.info('Trying advanced text extraction from content streams')
// Find content stream references
const contentRefs = rawContent.match(/\/Contents\s+\[?\s*(\d+)\s+\d+\s+R\s*\]?/g)
if (contentRefs && contentRefs.length > 0) {
logger.info('Found', contentRefs.length, 'content stream references')
// Extract object numbers from content references
const objNumbers = contentRefs
.map((ref) => {
const match = ref.match(/\/Contents\s+\[?\s*(\d+)\s+\d+\s+R\s*\]?/)
@@ -223,7 +185,6 @@ export class RawPdfParser implements FileParser {
logger.info('Content stream object numbers:', objNumbers)
// Try to find those objects in the content
if (objNumbers.length > 0) {
let textFromStreams = ''
@@ -232,12 +193,10 @@ export class RawPdfParser implements FileParser {
const objMatch = rawContent.match(objRegex)
if (objMatch) {
// Look for stream content within the object
const streamMatch = objMatch[0].match(/stream\r?\n([\s\S]*?)\r?\nendstream/)
if (streamMatch?.[1]) {
const streamContent = streamMatch[1]
// Look for text operations in the stream (Tj, TJ, etc.)
const textFragments = streamContent.match(/\([^)]+\)\s*Tj|\[[^\]]*\]\s*TJ/g)
if (textFragments && textFragments.length > 0) {
const extractedFragments = textFragments
@@ -290,35 +249,27 @@ export class RawPdfParser implements FileParser {
}
}
// Try to decompress PDF streams
// This is especially helpful for PDFs with compressed content
if (!extractedText || extractedText.length < 100) {
logger.info('Trying to decompress PDF streams')
// Find compressed streams (FlateDecode)
const compressedStreams = rawContent.match(
/\/Filter\s*\/FlateDecode[\s\S]*?stream[\s\S]*?endstream/g
)
if (compressedStreams && compressedStreams.length > 0) {
logger.info('Found', compressedStreams.length, 'compressed streams')
// Process each stream
const decompressedContents = await Promise.all(
compressedStreams.map(async (stream) => {
try {
// Extract stream content between stream and endstream
const streamMatch = stream.match(/stream\r?\n([\s\S]*?)\r?\nendstream/)
if (!streamMatch || !streamMatch[1]) return ''
const compressedData = Buffer.from(streamMatch[1], 'binary')
// Try different decompression methods
try {
// Try inflate (most common)
const decompressed = await inflateAsync(compressedData)
const content = decompressed.toString('utf-8')
// Check if it contains readable text
const readable = content.replace(/[^\x20-\x7E\r\n]/g, ' ').trim()
if (
readable.length > 50 &&
@@ -329,12 +280,10 @@ export class RawPdfParser implements FileParser {
return readable
}
} catch (_inflateErr) {
// Try unzip as fallback
try {
const decompressed = await unzipAsync(compressedData)
const content = decompressed.toString('utf-8')
// Check if it contains readable text
const readable = content.replace(/[^\x20-\x7E\r\n]/g, ' ').trim()
if (
readable.length > 50 &&
@@ -345,12 +294,10 @@ export class RawPdfParser implements FileParser {
return readable
}
} catch (_unzipErr) {
// Both methods failed, continue to next stream
return ''
}
}
} catch (_error) {
// Error processing this stream, skip it
return ''
}
@@ -358,7 +305,6 @@ export class RawPdfParser implements FileParser {
})
)
// Filter out empty results and combine
const decompressedText = decompressedContents
.filter((text) => text && text.length > 0)
.join('\n\n')
@@ -370,26 +316,19 @@ export class RawPdfParser implements FileParser {
}
}
// Method 2: Look for text stream data
if (!extractedText || extractedText.length < 50) {
logger.info('Trying alternative text extraction method with streams')
// Find text streams
const streamMatches = rawContent.match(/stream[\s\S]*?endstream/g)
if (streamMatches && streamMatches.length > 0) {
logger.info('Found', streamMatches.length, 'streams')
// Process each stream to look for text content
const textContent = streamMatches
.map((stream) => {
// Remove 'stream' and 'endstream' markers
const content = stream.replace(/^stream\r?\n|\r?\nendstream$/g, '')
// Look for readable ASCII text (more strict heuristic)
// Only keep ASCII printable characters
const readable = content.replace(/[^\x20-\x7E\r\n]/g, ' ').trim()
// Only keep content that looks like real text (has spaces, periods, etc.)
if (
readable.length > 20 &&
readable.includes(' ') &&
@@ -400,7 +339,7 @@ export class RawPdfParser implements FileParser {
}
return ''
})
.filter((text) => text.length > 0 && text.split(' ').length > 5) // Must have at least 5 words
.filter((text) => text.length > 0 && text.split(' ').length > 5)
.join('\n\n')
if (textContent.length > 0) {
@@ -409,22 +348,17 @@ export class RawPdfParser implements FileParser {
}
}
// Method 3: Look for object streams
if (!extractedText || extractedText.length < 50) {
logger.info('Trying object streams for text')
// Find object stream content
const objMatches = rawContent.match(/\d+\s+\d+\s+obj[\s\S]*?endobj/g)
if (objMatches && objMatches.length > 0) {
logger.info('Found', objMatches.length, 'objects')
// Process objects looking for text content
const textContent = objMatches
.map((obj) => {
// Find readable text in the object - only keep ASCII printable characters
const readable = obj.replace(/[^\x20-\x7E\r\n]/g, ' ').trim()
// Only include if it looks like actual text (strict heuristic)
if (
readable.length > 50 &&
readable.includes(' ') &&
@@ -445,8 +379,6 @@ export class RawPdfParser implements FileParser {
}
}
// If what we extracted is just PDF structure information rather than readable text,
// provide a clearer message
if (
extractedText &&
(extractedText.includes('endobj') ||
@@ -459,53 +391,41 @@ export class RawPdfParser implements FileParser {
)
extractedText = metadataText
} else if (metadataText && !extractedText.includes('Document Title:')) {
// Prepend metadata to extracted text if available
extractedText = metadataText + (extractedText ? `\n\n${extractedText}` : '')
}
// Validate that the extracted text looks meaningful
// Count how many recognizable words/characters it contains
const validCharCount = (extractedText || '').replace(/[^\x20-\x7E\r\n]/g, '').length
const totalCharCount = (extractedText || '').length
const validRatio = validCharCount / (totalCharCount || 1)
// Check for common PDF artifacts that indicate binary corruption
const hasBinaryArtifacts =
extractedText &&
(extractedText.includes('\\u') ||
extractedText.includes('\\x') ||
extractedText.includes('\0') ||
/[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\xFF]{10,}/g.test(extractedText) ||
validRatio < 0.7) // Less than 70% valid characters
validRatio < 0.7)
// Check if the content looks like gibberish
const looksLikeGibberish =
extractedText &&
// Too many special characters
(extractedText.replace(/[a-zA-Z0-9\s.,:'"()[\]{}]/g, '').length / extractedText.length >
0.3 ||
// Not enough spaces (real text has spaces between words)
extractedText.split(' ').length < extractedText.length / 20)
// If no text was extracted, or if it's binary/gibberish,
// provide a helpful message instead
if (!extractedText || extractedText.length < 50 || hasBinaryArtifacts || looksLikeGibberish) {
logger.info('Could not extract meaningful text, providing fallback message')
logger.info('Valid character ratio:', validRatio)
logger.info('Has binary artifacts:', hasBinaryArtifacts)
logger.info('Looks like gibberish:', looksLikeGibberish)
// Start with metadata if available
if (metadataText) {
extractedText = `${metadataText}\n`
} else {
extractedText = ''
}
// Add basic PDF info
extractedText += `This is a PDF document with ${pageCount} page(s) and version ${version}.\n\n`
// Try to find a title in the PDF structure that we might have missed
const titleInStructure =
rawContent.match(/title\s*:\s*([^\n]+)/i) ||
rawContent.match(/Microsoft Word -\s*([^\n]+)/i)

View File

@@ -8,15 +8,12 @@ const logger = createLogger('TxtParser')
export class TxtParser implements FileParser {
async parseFile(filePath: string): Promise<FileParseResult> {
try {
// Validate input
if (!filePath) {
throw new Error('No file path provided')
}
// Read the file
const buffer = await readFile(filePath)
// Use parseBuffer for consistent implementation
return this.parseBuffer(buffer)
} catch (error) {
logger.error('TXT file error:', error)
@@ -28,7 +25,6 @@ export class TxtParser implements FileParser {
try {
logger.info('Parsing buffer, size:', buffer.length)
// Extract content and sanitize for UTF-8 storage
const rawContent = buffer.toString('utf-8')
const result = sanitizeTextForUTF8(rawContent)

View File

@@ -8,4 +8,16 @@ export interface FileParser {
parseBuffer?(buffer: Buffer): Promise<FileParseResult>
}
export type SupportedFileType = 'pdf' | 'csv' | 'doc' | 'docx' | 'txt' | 'md' | 'xlsx' | 'xls'
export type SupportedFileType =
| 'pdf'
| 'csv'
| 'doc'
| 'docx'
| 'txt'
| 'md'
| 'xlsx'
| 'xls'
| 'html'
| 'htm'
| 'pptx'
| 'ppt'

View File

@@ -9,19 +9,16 @@ const logger = createLogger('XlsxParser')
export class XlsxParser implements FileParser {
async parseFile(filePath: string): Promise<FileParseResult> {
try {
// Validate input
if (!filePath) {
throw new Error('No file path provided')
}
// Check if file exists
if (!existsSync(filePath)) {
throw new Error(`File not found: ${filePath}`)
}
logger.info(`Parsing XLSX file: ${filePath}`)
// Read the workbook
const workbook = XLSX.readFile(filePath)
return this.processWorkbook(workbook)
} catch (error) {
@@ -38,7 +35,6 @@ export class XlsxParser implements FileParser {
throw new Error('Empty buffer provided')
}
// Read the workbook from buffer
const workbook = XLSX.read(buffer, { type: 'buffer' })
return this.processWorkbook(workbook)
} catch (error) {
@@ -53,25 +49,20 @@ export class XlsxParser implements FileParser {
let content = ''
let totalRows = 0
// Process each worksheet
for (const sheetName of sheetNames) {
const worksheet = workbook.Sheets[sheetName]
// Convert to array of objects
const sheetData = XLSX.utils.sheet_to_json(worksheet, { header: 1 })
sheets[sheetName] = sheetData
totalRows += sheetData.length
// Add sheet content to the overall content string (clean sheet name)
const cleanSheetName = sanitizeTextForUTF8(sheetName)
content += `Sheet: ${cleanSheetName}\n`
content += `=${'='.repeat(cleanSheetName.length + 6)}\n\n`
if (sheetData.length > 0) {
// Process each row
sheetData.forEach((row: unknown, rowIndex: number) => {
if (Array.isArray(row) && row.length > 0) {
// Convert row to string, handling undefined/null values and cleaning non-UTF8 characters
const rowString = row
.map((cell) => {
if (cell === null || cell === undefined) {
@@ -93,7 +84,6 @@ export class XlsxParser implements FileParser {
logger.info(`XLSX parsing completed: ${sheetNames.length} sheets, ${totalRows} total rows`)
// Final cleanup of the entire content to ensure UTF-8 compatibility
const cleanContent = sanitizeTextForUTF8(content).trim()
return {

View File

@@ -0,0 +1,24 @@
export const TAG_SLOT_CONFIG = {
text: {
slots: ['tag1', 'tag2', 'tag3', 'tag4', 'tag5', 'tag6', 'tag7'] as const,
maxSlots: 7,
},
} as const
export const SUPPORTED_FIELD_TYPES = Object.keys(TAG_SLOT_CONFIG) as Array<
keyof typeof TAG_SLOT_CONFIG
>
export const TAG_SLOTS = TAG_SLOT_CONFIG.text.slots
export const MAX_TAG_SLOTS = TAG_SLOT_CONFIG.text.maxSlots
export type TagSlot = (typeof TAG_SLOTS)[number]
export function getSlotsForFieldType(fieldType: string): readonly string[] {
const config = TAG_SLOT_CONFIG[fieldType as keyof typeof TAG_SLOT_CONFIG]
if (!config) {
return []
}
return config.slots
}

View File

@@ -1,9 +1,9 @@
import crypto, { randomUUID } from 'crypto'
import { tasks } from '@trigger.dev/sdk'
import { and, asc, desc, eq, inArray, isNull, sql } from 'drizzle-orm'
import { getSlotsForFieldType, type TAG_SLOT_CONFIG } from '@/lib/constants/knowledge'
import { generateEmbeddings } from '@/lib/embeddings/utils'
import { env } from '@/lib/env'
import { getSlotsForFieldType, type TAG_SLOT_CONFIG } from '@/lib/knowledge/consts'
import { processDocument } from '@/lib/knowledge/documents/document-processor'
import { getNextAvailableSlot } from '@/lib/knowledge/tags/service'
import { createLogger } from '@/lib/logs/console/logger'
@@ -17,8 +17,8 @@ import type { DocumentSortField, SortOrder } from './types'
const logger = createLogger('DocumentService')
const TIMEOUTS = {
OVERALL_PROCESSING: env.KB_CONFIG_MAX_DURATION * 1000,
EMBEDDINGS_API: env.KB_CONFIG_MAX_TIMEOUT * 18,
OVERALL_PROCESSING: (env.KB_CONFIG_MAX_DURATION || 300) * 1000,
EMBEDDINGS_API: (env.KB_CONFIG_MAX_TIMEOUT || 10000) * 18,
} as const
/**
@@ -38,17 +38,17 @@ function withTimeout<T>(
}
const PROCESSING_CONFIG = {
maxConcurrentDocuments: Math.max(1, Math.floor(env.KB_CONFIG_CONCURRENCY_LIMIT / 5)) || 4,
batchSize: Math.max(1, Math.floor(env.KB_CONFIG_BATCH_SIZE / 2)) || 10,
delayBetweenBatches: env.KB_CONFIG_DELAY_BETWEEN_BATCHES * 2,
delayBetweenDocuments: env.KB_CONFIG_DELAY_BETWEEN_DOCUMENTS * 2,
maxConcurrentDocuments: Math.max(1, Math.floor((env.KB_CONFIG_CONCURRENCY_LIMIT || 20) / 5)) || 4,
batchSize: Math.max(1, Math.floor((env.KB_CONFIG_BATCH_SIZE || 20) / 2)) || 10,
delayBetweenBatches: (env.KB_CONFIG_DELAY_BETWEEN_BATCHES || 100) * 2,
delayBetweenDocuments: (env.KB_CONFIG_DELAY_BETWEEN_DOCUMENTS || 50) * 2,
}
const REDIS_PROCESSING_CONFIG = {
maxConcurrentDocuments: env.KB_CONFIG_CONCURRENCY_LIMIT,
batchSize: env.KB_CONFIG_BATCH_SIZE,
delayBetweenBatches: env.KB_CONFIG_DELAY_BETWEEN_BATCHES,
delayBetweenDocuments: env.KB_CONFIG_DELAY_BETWEEN_DOCUMENTS,
maxConcurrentDocuments: env.KB_CONFIG_CONCURRENCY_LIMIT || 20,
batchSize: env.KB_CONFIG_BATCH_SIZE || 20,
delayBetweenBatches: env.KB_CONFIG_DELAY_BETWEEN_BATCHES || 100,
delayBetweenDocuments: env.KB_CONFIG_DELAY_BETWEEN_DOCUMENTS || 50,
}
let documentQueue: DocumentProcessingQueue | null = null
@@ -59,8 +59,8 @@ export function getDocumentQueue(): DocumentProcessingQueue {
const config = redisClient ? REDIS_PROCESSING_CONFIG : PROCESSING_CONFIG
documentQueue = new DocumentProcessingQueue({
maxConcurrent: config.maxConcurrentDocuments,
retryDelay: env.KB_CONFIG_MIN_TIMEOUT,
maxRetries: env.KB_CONFIG_MAX_ATTEMPTS,
retryDelay: env.KB_CONFIG_MIN_TIMEOUT || 1000,
maxRetries: env.KB_CONFIG_MAX_ATTEMPTS || 3,
})
}
return documentQueue

View File

@@ -4,7 +4,7 @@ import {
getSlotsForFieldType,
SUPPORTED_FIELD_TYPES,
type TAG_SLOT_CONFIG,
} from '@/lib/constants/knowledge'
} from '@/lib/knowledge/consts'
import type { BulkTagDefinitionsData, DocumentTagDefinition } from '@/lib/knowledge/tags/types'
import type {
CreateTagDefinitionData,

View File

@@ -7,6 +7,7 @@ vi.mock('@/db', () => ({
where: vi.fn(),
limit: vi.fn(),
innerJoin: vi.fn(),
leftJoin: vi.fn(),
orderBy: vi.fn(),
},
}))
@@ -17,6 +18,7 @@ vi.mock('@/db/schema', () => ({
userId: 'user_id',
entityType: 'entity_type',
entityId: 'entity_id',
id: 'permission_id',
},
permissionTypeEnum: {
enumValues: ['admin', 'write', 'read'] as const,
@@ -25,23 +27,18 @@ vi.mock('@/db/schema', () => ({
id: 'user_id',
email: 'user_email',
name: 'user_name',
image: 'user_image',
},
workspace: {
id: 'workspace_id',
name: 'workspace_name',
ownerId: 'workspace_owner_id',
},
member: {
userId: 'member_user_id',
organizationId: 'member_organization_id',
role: 'member_role',
},
}))
vi.mock('drizzle-orm', () => ({
and: vi.fn().mockReturnValue('and-condition'),
eq: vi.fn().mockReturnValue('eq-condition'),
or: vi.fn().mockReturnValue('or-condition'),
}))
import {
@@ -50,8 +47,6 @@ import {
getUsersWithPermissions,
hasAdminPermission,
hasWorkspaceAdminAccess,
isOrganizationAdminForWorkspace,
isOrganizationOwnerOrAdmin,
} from '@/lib/permissions/utils'
import { db } from '@/db'
@@ -124,11 +119,64 @@ describe('Permission Utils', () => {
expect(result).toBe('admin')
})
it('should return write permission when user only has write access', async () => {
const mockResults = [{ permissionType: 'write' as PermissionType }]
const chain = createMockChain(mockResults)
mockDb.select.mockReturnValue(chain)
const result = await getUserEntityPermissions('user123', 'workspace', 'workspace456')
expect(result).toBe('write')
})
it('should prioritize write over read permissions', async () => {
const mockResults = [
{ permissionType: 'read' as PermissionType },
{ permissionType: 'write' as PermissionType },
]
const chain = createMockChain(mockResults)
mockDb.select.mockReturnValue(chain)
const result = await getUserEntityPermissions('user123', 'workspace', 'workspace456')
expect(result).toBe('write')
})
it('should work with workflow entity type', async () => {
const mockResults = [{ permissionType: 'admin' as PermissionType }]
const chain = createMockChain(mockResults)
mockDb.select.mockReturnValue(chain)
const result = await getUserEntityPermissions('user123', 'workflow', 'workflow789')
expect(result).toBe('admin')
})
it('should work with organization entity type', async () => {
const mockResults = [{ permissionType: 'read' as PermissionType }]
const chain = createMockChain(mockResults)
mockDb.select.mockReturnValue(chain)
const result = await getUserEntityPermissions('user123', 'organization', 'org456')
expect(result).toBe('read')
})
it('should handle generic entity types', async () => {
const mockResults = [{ permissionType: 'write' as PermissionType }]
const chain = createMockChain(mockResults)
mockDb.select.mockReturnValue(chain)
const result = await getUserEntityPermissions('user123', 'custom_entity', 'entity123')
expect(result).toBe('write')
})
})
describe('hasAdminPermission', () => {
it('should return true when user has admin permission for workspace', async () => {
const chain = createMockChain([{ permissionType: 'admin' }])
const chain = createMockChain([{ id: 'perm1' }])
mockDb.select.mockReturnValue(chain)
const result = await hasAdminPermission('admin-user', 'workspace123')
@@ -144,6 +192,42 @@ describe('Permission Utils', () => {
expect(result).toBe(false)
})
it('should return false when user has write permission but not admin', async () => {
const chain = createMockChain([])
mockDb.select.mockReturnValue(chain)
const result = await hasAdminPermission('write-user', 'workspace123')
expect(result).toBe(false)
})
it('should return false when user has read permission but not admin', async () => {
const chain = createMockChain([])
mockDb.select.mockReturnValue(chain)
const result = await hasAdminPermission('read-user', 'workspace123')
expect(result).toBe(false)
})
it('should handle non-existent workspace', async () => {
const chain = createMockChain([])
mockDb.select.mockReturnValue(chain)
const result = await hasAdminPermission('user123', 'non-existent-workspace')
expect(result).toBe(false)
})
it('should handle empty user ID', async () => {
const chain = createMockChain([])
mockDb.select.mockReturnValue(chain)
const result = await hasAdminPermission('', 'workspace123')
expect(result).toBe(false)
})
})
describe('getUsersWithPermissions', () => {
@@ -162,7 +246,6 @@ describe('Permission Utils', () => {
userId: 'user1',
email: 'alice@example.com',
name: 'Alice Smith',
image: 'https://example.com/alice.jpg',
permissionType: 'admin' as PermissionType,
},
]
@@ -177,43 +260,66 @@ describe('Permission Utils', () => {
userId: 'user1',
email: 'alice@example.com',
name: 'Alice Smith',
image: 'https://example.com/alice.jpg',
permissionType: 'admin',
},
])
})
})
describe('isOrganizationAdminForWorkspace', () => {
it('should return false when workspace does not exist', async () => {
const chain = createMockChain([])
mockDb.select.mockReturnValue(chain)
it('should return multiple users with different permission levels', async () => {
const mockUsersResults = [
{
userId: 'user1',
email: 'admin@example.com',
name: 'Admin User',
permissionType: 'admin' as PermissionType,
},
{
userId: 'user2',
email: 'writer@example.com',
name: 'Writer User',
permissionType: 'write' as PermissionType,
},
{
userId: 'user3',
email: 'reader@example.com',
name: 'Reader User',
permissionType: 'read' as PermissionType,
},
]
const result = await isOrganizationAdminForWorkspace('user123', 'workspace456')
const usersChain = createMockChain(mockUsersResults)
mockDb.select.mockReturnValue(usersChain)
expect(result).toBe(false)
const result = await getUsersWithPermissions('workspace456')
expect(result).toHaveLength(3)
expect(result[0].permissionType).toBe('admin')
expect(result[1].permissionType).toBe('write')
expect(result[2].permissionType).toBe('read')
})
it('should return false when user has no organization memberships', async () => {
// Mock workspace exists, but user has no org memberships
let callCount = 0
mockDb.select.mockImplementation(() => {
callCount++
if (callCount === 1) {
return createMockChain([{ ownerId: 'workspace-owner-123' }])
}
return createMockChain([]) // No memberships
})
it('should handle users with empty names', async () => {
const mockUsersResults = [
{
userId: 'user1',
email: 'test@example.com',
name: '',
permissionType: 'read' as PermissionType,
},
]
const result = await isOrganizationAdminForWorkspace('user123', 'workspace456')
const usersChain = createMockChain(mockUsersResults)
mockDb.select.mockReturnValue(usersChain)
expect(result).toBe(false)
const result = await getUsersWithPermissions('workspace123')
expect(result[0].name).toBe('')
})
})
describe('hasWorkspaceAdminAccess', () => {
it('should return true when user has direct admin permission', async () => {
const chain = createMockChain([{ permissionType: 'admin' }])
it('should return true when user owns the workspace', async () => {
const chain = createMockChain([{ ownerId: 'user123' }])
mockDb.select.mockReturnValue(chain)
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
@@ -221,7 +327,22 @@ describe('Permission Utils', () => {
expect(result).toBe(true)
})
it('should return false when user has neither direct nor organization admin access', async () => {
it('should return true when user has direct admin permission', async () => {
let callCount = 0
mockDb.select.mockImplementation(() => {
callCount++
if (callCount === 1) {
return createMockChain([{ ownerId: 'other-user' }])
}
return createMockChain([{ id: 'perm1' }])
})
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
expect(result).toBe(true)
})
it('should return false when workspace does not exist', async () => {
const chain = createMockChain([])
mockDb.select.mockReturnValue(chain)
@@ -229,51 +350,137 @@ describe('Permission Utils', () => {
expect(result).toBe(false)
})
it('should return false when user has no admin access', async () => {
let callCount = 0
mockDb.select.mockImplementation(() => {
callCount++
if (callCount === 1) {
return createMockChain([{ ownerId: 'other-user' }])
}
return createMockChain([])
})
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
expect(result).toBe(false)
})
it('should return false when user has write permission but not admin', async () => {
let callCount = 0
mockDb.select.mockImplementation(() => {
callCount++
if (callCount === 1) {
return createMockChain([{ ownerId: 'other-user' }])
}
return createMockChain([])
})
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
expect(result).toBe(false)
})
it('should return false when user has read permission but not admin', async () => {
let callCount = 0
mockDb.select.mockImplementation(() => {
callCount++
if (callCount === 1) {
return createMockChain([{ ownerId: 'other-user' }])
}
return createMockChain([])
})
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
expect(result).toBe(false)
})
it('should handle empty workspace ID', async () => {
const chain = createMockChain([])
mockDb.select.mockReturnValue(chain)
const result = await hasWorkspaceAdminAccess('user123', '')
expect(result).toBe(false)
})
it('should handle empty user ID', async () => {
const chain = createMockChain([])
mockDb.select.mockReturnValue(chain)
const result = await hasWorkspaceAdminAccess('', 'workspace456')
expect(result).toBe(false)
})
})
describe('isOrganizationOwnerOrAdmin', () => {
it('should return true when user is owner of organization', async () => {
const chain = createMockChain([{ role: 'owner' }])
mockDb.select.mockReturnValue(chain)
const result = await isOrganizationOwnerOrAdmin('user123', 'org456')
expect(result).toBe(true)
})
it('should return true when user is admin of organization', async () => {
const chain = createMockChain([{ role: 'admin' }])
mockDb.select.mockReturnValue(chain)
const result = await isOrganizationOwnerOrAdmin('user123', 'org456')
expect(result).toBe(true)
})
it('should return false when user is regular member of organization', async () => {
const chain = createMockChain([{ role: 'member' }])
mockDb.select.mockReturnValue(chain)
const result = await isOrganizationOwnerOrAdmin('user123', 'org456')
expect(result).toBe(false)
})
it('should return false when user is not member of organization', async () => {
describe('Edge Cases and Security Tests', () => {
it('should handle SQL injection attempts in user IDs', async () => {
const chain = createMockChain([])
mockDb.select.mockReturnValue(chain)
const result = await isOrganizationOwnerOrAdmin('user123', 'org456')
const result = await getUserEntityPermissions(
"'; DROP TABLE users; --",
'workspace',
'workspace123'
)
expect(result).toBeNull()
})
it('should handle very long entity IDs', async () => {
const longEntityId = 'a'.repeat(1000)
const chain = createMockChain([])
mockDb.select.mockReturnValue(chain)
const result = await getUserEntityPermissions('user123', 'workspace', longEntityId)
expect(result).toBeNull()
})
it('should handle unicode characters in entity names', async () => {
const chain = createMockChain([{ permissionType: 'read' as PermissionType }])
mockDb.select.mockReturnValue(chain)
const result = await getUserEntityPermissions('user123', '📝workspace', '🏢org-id')
expect(result).toBe('read')
})
it('should verify permission hierarchy ordering is consistent', () => {
const permissionOrder: Record<PermissionType, number> = { admin: 3, write: 2, read: 1 }
expect(permissionOrder.admin).toBeGreaterThan(permissionOrder.write)
expect(permissionOrder.write).toBeGreaterThan(permissionOrder.read)
})
it('should handle workspace ownership checks with null owner IDs', async () => {
let callCount = 0
mockDb.select.mockImplementation(() => {
callCount++
if (callCount === 1) {
return createMockChain([{ ownerId: null }])
}
return createMockChain([])
})
const result = await hasWorkspaceAdminAccess('user123', 'workspace456')
expect(result).toBe(false)
})
it('should handle errors gracefully', async () => {
it('should handle null user ID correctly when owner ID is different', async () => {
let callCount = 0
mockDb.select.mockImplementation(() => {
throw new Error('Database error')
callCount++
if (callCount === 1) {
return createMockChain([{ ownerId: 'other-user' }])
}
return createMockChain([])
})
const result = await isOrganizationOwnerOrAdmin('user123', 'org456')
const result = await hasWorkspaceAdminAccess(null as any, 'workspace456')
expect(result).toBe(false)
})
@@ -289,27 +496,121 @@ describe('Permission Utils', () => {
expect(result).toEqual([])
})
it('should return direct admin workspaces', async () => {
const mockDirectWorkspaces = [
{ id: 'ws1', name: 'Workspace 1', ownerId: 'owner1' },
{ id: 'ws2', name: 'Workspace 2', ownerId: 'owner2' },
it('should return owned workspaces', async () => {
const mockWorkspaces = [
{ id: 'ws1', name: 'My Workspace 1', ownerId: 'user123' },
{ id: 'ws2', name: 'My Workspace 2', ownerId: 'user123' },
]
let callCount = 0
mockDb.select.mockImplementation(() => {
callCount++
if (callCount === 1) {
return createMockChain(mockDirectWorkspaces) // direct admin workspaces
return createMockChain(mockWorkspaces) // Owned workspaces
}
return createMockChain([]) // no organization memberships
return createMockChain([]) // No admin workspaces
})
const result = await getManageableWorkspaces('user123')
expect(result).toEqual([
{ id: 'ws1', name: 'Workspace 1', ownerId: 'owner1', accessType: 'direct' },
{ id: 'ws2', name: 'Workspace 2', ownerId: 'owner2', accessType: 'direct' },
{ id: 'ws1', name: 'My Workspace 1', ownerId: 'user123', accessType: 'owner' },
{ id: 'ws2', name: 'My Workspace 2', ownerId: 'user123', accessType: 'owner' },
])
})
it('should return workspaces with direct admin permissions', async () => {
const mockAdminWorkspaces = [{ id: 'ws1', name: 'Shared Workspace', ownerId: 'other-user' }]
let callCount = 0
mockDb.select.mockImplementation(() => {
callCount++
if (callCount === 1) {
return createMockChain([]) // No owned workspaces
}
return createMockChain(mockAdminWorkspaces) // Admin workspaces
})
const result = await getManageableWorkspaces('user123')
expect(result).toEqual([
{ id: 'ws1', name: 'Shared Workspace', ownerId: 'other-user', accessType: 'direct' },
])
})
it('should combine owned and admin workspaces without duplicates', async () => {
const mockOwnedWorkspaces = [
{ id: 'ws1', name: 'My Workspace', ownerId: 'user123' },
{ id: 'ws2', name: 'Another Workspace', ownerId: 'user123' },
]
const mockAdminWorkspaces = [
{ id: 'ws1', name: 'My Workspace', ownerId: 'user123' }, // Duplicate (should be filtered)
{ id: 'ws3', name: 'Shared Workspace', ownerId: 'other-user' },
]
let callCount = 0
mockDb.select.mockImplementation(() => {
callCount++
if (callCount === 1) {
return createMockChain(mockOwnedWorkspaces) // Owned workspaces
}
return createMockChain(mockAdminWorkspaces) // Admin workspaces
})
const result = await getManageableWorkspaces('user123')
expect(result).toHaveLength(3)
expect(result).toEqual([
{ id: 'ws1', name: 'My Workspace', ownerId: 'user123', accessType: 'owner' },
{ id: 'ws2', name: 'Another Workspace', ownerId: 'user123', accessType: 'owner' },
{ id: 'ws3', name: 'Shared Workspace', ownerId: 'other-user', accessType: 'direct' },
])
})
it('should handle empty workspace names', async () => {
const mockWorkspaces = [{ id: 'ws1', name: '', ownerId: 'user123' }]
let callCount = 0
mockDb.select.mockImplementation(() => {
callCount++
if (callCount === 1) {
return createMockChain(mockWorkspaces)
}
return createMockChain([])
})
const result = await getManageableWorkspaces('user123')
expect(result[0].name).toBe('')
})
it('should handle multiple admin permissions for same workspace', async () => {
const mockAdminWorkspaces = [
{ id: 'ws1', name: 'Shared Workspace', ownerId: 'other-user' },
{ id: 'ws1', name: 'Shared Workspace', ownerId: 'other-user' }, // Duplicate
]
let callCount = 0
mockDb.select.mockImplementation(() => {
callCount++
if (callCount === 1) {
return createMockChain([]) // No owned workspaces
}
return createMockChain(mockAdminWorkspaces) // Admin workspaces with duplicates
})
const result = await getManageableWorkspaces('user123')
expect(result).toHaveLength(2) // Should include duplicates from admin permissions
})
it('should handle empty user ID gracefully', async () => {
const chain = createMockChain([])
mockDb.select.mockReturnValue(chain)
const result = await getManageableWorkspaces('')
expect(result).toEqual([])
})
})
})

View File

@@ -1,6 +1,6 @@
import { and, eq } from 'drizzle-orm'
import { db } from '@/db'
import { member, permissions, type permissionTypeEnum, user, workspace } from '@/db/schema'
import { permissions, type permissionTypeEnum, user, workspace } from '@/db/schema'
export type PermissionType = (typeof permissionTypeEnum.enumValues)[number]
@@ -32,7 +32,6 @@ export async function getUserEntityPermissions(
return null
}
// If multiple permissions exist (legacy data), return the highest one
const permissionOrder: Record<PermissionType, number> = { admin: 3, write: 2, read: 1 }
const highestPermission = result.reduce((highest, current) => {
return permissionOrder[current.permissionType] > permissionOrder[highest.permissionType]
@@ -46,13 +45,13 @@ export async function getUserEntityPermissions(
/**
* Check if a user has admin permission for a specific workspace
*
* @param userId - The ID of the user to check permissions for
* @param workspaceId - The ID of the workspace to check admin permission for
* @param userId - The ID of the user to check
* @param workspaceId - The ID of the workspace to check
* @returns Promise<boolean> - True if the user has admin permission for the workspace, false otherwise
*/
export async function hasAdminPermission(userId: string, workspaceId: string): Promise<boolean> {
const result = await db
.select()
.select({ id: permissions.id })
.from(permissions)
.where(
and(
@@ -73,13 +72,19 @@ export async function hasAdminPermission(userId: string, workspaceId: string): P
* @param workspaceId - The ID of the workspace to retrieve user permissions for.
* @returns A promise that resolves to an array of user objects, each containing user details and their permission type.
*/
export async function getUsersWithPermissions(workspaceId: string) {
export async function getUsersWithPermissions(workspaceId: string): Promise<
Array<{
userId: string
email: string
name: string
permissionType: PermissionType
}>
> {
const usersWithPermissions = await db
.select({
userId: user.id,
email: user.email,
name: user.name,
image: user.image,
permissionType: permissions.permissionType,
})
.from(permissions)
@@ -87,141 +92,71 @@ export async function getUsersWithPermissions(workspaceId: string) {
.where(and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspaceId)))
.orderBy(user.email)
// Since each user has only one permission, we can use the results directly
return usersWithPermissions.map((row) => ({
userId: row.userId,
email: row.email,
name: row.name,
image: row.image,
permissionType: row.permissionType,
}))
}
/**
* Check if a user is an admin or owner of any organization that has access to a workspace
* Check if a user has admin access to a specific workspace
*
* @param userId - The ID of the user to check
* @param workspaceId - The ID of the workspace
* @returns Promise<boolean> - True if the user is an organization admin with access to the workspace
*/
export async function isOrganizationAdminForWorkspace(
userId: string,
workspaceId: string
): Promise<boolean> {
try {
// Get the workspace owner
const workspaceRecord = await db
.select({ ownerId: workspace.ownerId })
.from(workspace)
.where(eq(workspace.id, workspaceId))
.limit(1)
if (workspaceRecord.length === 0) {
return false
}
const workspaceOwnerId = workspaceRecord[0].ownerId
// Check if the user is an admin/owner of any organization that the workspace owner belongs to
const orgMemberships = await db
.select({
organizationId: member.organizationId,
role: member.role,
})
.from(member)
.where(
and(
eq(member.userId, userId),
// Only admin and owner roles can manage workspace permissions
eq(member.role, 'admin') // We'll also check for 'owner' separately
)
)
// Also check for owner role
const ownerMemberships = await db
.select({
organizationId: member.organizationId,
role: member.role,
})
.from(member)
.where(and(eq(member.userId, userId), eq(member.role, 'owner')))
const allOrgMemberships = [...orgMemberships, ...ownerMemberships]
if (allOrgMemberships.length === 0) {
return false
}
// Check if the workspace owner is a member of any of these organizations
for (const membership of allOrgMemberships) {
const workspaceOwnerInOrg = await db
.select()
.from(member)
.where(
and(
eq(member.userId, workspaceOwnerId),
eq(member.organizationId, membership.organizationId)
)
)
.limit(1)
if (workspaceOwnerInOrg.length > 0) {
return true
}
}
return false
} catch (error) {
console.error('Error checking organization admin status for workspace:', error)
return false
}
}
/**
* Check if a user has admin permissions (either direct workspace admin or organization admin)
*
* @param userId - The ID of the user to check permissions for
* @param workspaceId - The ID of the workspace to check admin permission for
* @returns Promise<boolean> - True if the user has admin permission for the workspace, false otherwise
* @param workspaceId - The ID of the workspace to check
* @returns Promise<boolean> - True if the user has admin access to the workspace, false otherwise
*/
export async function hasWorkspaceAdminAccess(
userId: string,
workspaceId: string
): Promise<boolean> {
// Check direct workspace admin permission
const directAdmin = await hasAdminPermission(userId, workspaceId)
if (directAdmin) {
const workspaceResult = await db
.select({ ownerId: workspace.ownerId })
.from(workspace)
.where(eq(workspace.id, workspaceId))
.limit(1)
if (workspaceResult.length === 0) {
return false
}
if (workspaceResult[0].ownerId === userId) {
return true
}
// Check organization admin permission
const orgAdmin = await isOrganizationAdminForWorkspace(userId, workspaceId)
return orgAdmin
return await hasAdminPermission(userId, workspaceId)
}
/**
* Get all workspaces that a user can manage (either as direct admin or organization admin)
* Get a list of workspaces that the user has access to
*
* @param userId - The ID of the user
* @returns Promise<Array<{id: string, name: string, ownerId: string}>> - Array of workspaces the user can manage
* @param userId - The ID of the user to check
* @returns Promise<Array<{
* id: string
* name: string
* ownerId: string
* accessType: 'direct' | 'owner'
* }>> - A list of workspaces that the user has access to
*/
export async function getManageableWorkspaces(userId: string): Promise<
Array<{
id: string
name: string
ownerId: string
accessType: 'direct' | 'organization'
accessType: 'direct' | 'owner'
}>
> {
const manageableWorkspaces: Array<{
id: string
name: string
ownerId: string
accessType: 'direct' | 'organization'
}> = []
const ownedWorkspaces = await db
.select({
id: workspace.id,
name: workspace.name,
ownerId: workspace.ownerId,
})
.from(workspace)
.where(eq(workspace.ownerId, userId))
// Get workspaces where user has direct admin permissions
const directWorkspaces = await db
const adminWorkspaces = await db
.select({
id: workspace.id,
name: workspace.name,
@@ -237,86 +172,13 @@ export async function getManageableWorkspaces(userId: string): Promise<
)
)
directWorkspaces.forEach((ws) => {
manageableWorkspaces.push({
...ws,
accessType: 'direct',
})
})
const ownedSet = new Set(ownedWorkspaces.map((w) => w.id))
const combined = [
...ownedWorkspaces.map((ws) => ({ ...ws, accessType: 'owner' as const })),
...adminWorkspaces
.filter((ws) => !ownedSet.has(ws.id))
.map((ws) => ({ ...ws, accessType: 'direct' as const })),
]
// Get workspaces where user has organization admin access
// First, get organizations where the user is admin/owner
const adminOrgs = await db
.select({ organizationId: member.organizationId })
.from(member)
.where(
and(
eq(member.userId, userId)
// Check for both admin and owner roles
)
)
// Get all organization workspaces for these orgs
for (const org of adminOrgs) {
// Get all members of this organization
const orgMembers = await db
.select({ userId: member.userId })
.from(member)
.where(eq(member.organizationId, org.organizationId))
// Get workspaces owned by org members
const orgWorkspaces = await db
.select({
id: workspace.id,
name: workspace.name,
ownerId: workspace.ownerId,
})
.from(workspace)
.where(
// Find workspaces owned by any org member
eq(workspace.ownerId, orgMembers.length > 0 ? orgMembers[0].userId : 'none')
)
// Add these workspaces if not already included
orgWorkspaces.forEach((ws) => {
if (!manageableWorkspaces.find((existing) => existing.id === ws.id)) {
manageableWorkspaces.push({
...ws,
accessType: 'organization',
})
}
})
}
return manageableWorkspaces
}
/**
* Check if a user is an owner or admin of a specific organization
*
* @param userId - The ID of the user to check
* @param organizationId - The ID of the organization
* @returns Promise<boolean> - True if the user is an owner or admin of the organization
*/
export async function isOrganizationOwnerOrAdmin(
userId: string,
organizationId: string
): Promise<boolean> {
try {
const memberRecord = await db
.select({ role: member.role })
.from(member)
.where(and(eq(member.userId, userId), eq(member.organizationId, organizationId)))
.limit(1)
if (memberRecord.length === 0) {
return false // User is not a member of the organization
}
const userRole = memberRecord[0].role
return ['owner', 'admin'].includes(userRole)
} catch (error) {
console.error('Error checking organization ownership/admin status:', error)
return false
}
return combined
}

View File

@@ -4,7 +4,6 @@ import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
const logger = createLogger('SimAgentClient')
// Base URL for the sim-agent service
const SIM_AGENT_BASE_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
export interface SimAgentRequest {
@@ -45,7 +44,6 @@ class SimAgentClient {
try {
const url = `${this.baseUrl}${endpoint}`
// Use provided API key or try to get it from environment
const requestHeaders: Record<string, string> = {
'Content-Type': 'application/json',
...headers,

View File

@@ -1,5 +1,8 @@
import OpenAI, { AzureOpenAI } from 'openai'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('SimAgentUtils')
const azureApiKey = env.AZURE_OPENAI_API_KEY
const azureEndpoint = env.AZURE_OPENAI_ENDPOINT
@@ -52,7 +55,7 @@ export async function generateChatTitle(message: string): Promise<string | null>
const title = response.choices[0]?.message?.content?.trim() || null
return title
} catch (error) {
console.error('Error generating chat title:', error)
logger.error('Error generating chat title:', error)
return null
}
}

View File

@@ -154,9 +154,8 @@ export function useSubscriptionUpgrade() {
} catch (error) {
logger.error('Failed to initiate subscription upgrade:', error)
// Log detailed error information for debugging
if (error instanceof Error) {
console.error('Detailed error:', {
logger.error('Detailed error:', {
message: error.message,
stack: error.stack,
cause: error.cause,

View File

@@ -1,5 +1,7 @@
import path from 'path'
export const MAX_FILE_SIZE = 100 * 1024 * 1024 // 100MB
export const SUPPORTED_DOCUMENT_EXTENSIONS = [
'pdf',
'csv',
@@ -9,21 +11,49 @@ export const SUPPORTED_DOCUMENT_EXTENSIONS = [
'md',
'xlsx',
'xls',
'ppt',
'pptx',
'html',
'htm',
] as const
export type SupportedDocumentExtension = (typeof SUPPORTED_DOCUMENT_EXTENSIONS)[number]
export const SUPPORTED_MIME_TYPES: Record<SupportedDocumentExtension, string[]> = {
pdf: ['application/pdf'],
csv: ['text/csv', 'application/csv'],
doc: ['application/msword'],
docx: ['application/vnd.openxmlformats-officedocument.wordprocessingml.document'],
txt: ['text/plain'],
md: ['text/markdown', 'text/x-markdown'],
xlsx: ['application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'],
xls: ['application/vnd.ms-excel'],
pdf: ['application/pdf', 'application/x-pdf'],
csv: ['text/csv', 'application/csv', 'text/comma-separated-values'],
doc: ['application/msword', 'application/doc', 'application/vnd.ms-word'],
docx: [
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/octet-stream',
],
txt: ['text/plain', 'text/x-plain', 'application/txt'],
md: ['text/markdown', 'text/x-markdown', 'text/plain', 'application/markdown'],
xlsx: [
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'application/octet-stream',
],
xls: [
'application/vnd.ms-excel',
'application/excel',
'application/x-excel',
'application/x-msexcel',
],
ppt: ['application/vnd.ms-powerpoint', 'application/powerpoint', 'application/x-mspowerpoint'],
pptx: [
'application/vnd.openxmlformats-officedocument.presentationml.presentation',
'application/octet-stream',
],
html: ['text/html', 'application/xhtml+xml'],
htm: ['text/html', 'application/xhtml+xml'],
}
export const ACCEPTED_FILE_TYPES = Object.values(SUPPORTED_MIME_TYPES).flat()
export const ACCEPTED_FILE_EXTENSIONS = SUPPORTED_DOCUMENT_EXTENSIONS.map((ext) => `.${ext}`)
export const ACCEPT_ATTRIBUTE = [...ACCEPTED_FILE_TYPES, ...ACCEPTED_FILE_EXTENSIONS].join(',')
export interface FileValidationError {
code: 'UNSUPPORTED_FILE_TYPE' | 'MIME_TYPE_MISMATCH'
message: string

View File

@@ -134,7 +134,7 @@ export async function validateSlackSignature(
return result === 0
} catch (error) {
console.error('Error validating Slack signature:', error)
logger.error('Error validating Slack signature:', error)
return false
}
}
@@ -149,7 +149,6 @@ export function formatWebhookInput(
request: NextRequest
): any {
if (foundWebhook.provider === 'whatsapp') {
// WhatsApp input formatting logic
const data = body?.entry?.[0]?.changes?.[0]?.value
const messages = data?.messages || []
@@ -189,12 +188,10 @@ export function formatWebhookInput(
}
if (foundWebhook.provider === 'telegram') {
// Telegram input formatting logic
const message =
body?.message || body?.edited_message || body?.channel_post || body?.edited_channel_post
if (message) {
// Extract message text with fallbacks for different content types
let input = ''
if (message.text) {
@@ -223,7 +220,6 @@ export function formatWebhookInput(
input = 'Message received'
}
// Create the message object for easier access
const messageObj = {
id: message.message_id,
text: message.text,
@@ -251,7 +247,6 @@ export function formatWebhookInput(
raw: message,
}
// Create sender object
const senderObj = message.from
? {
id: message.from.id,
@@ -263,7 +258,6 @@ export function formatWebhookInput(
}
: null
// Create chat object
const chatObj = message.chat
? {
id: message.chat.id,
@@ -276,9 +270,9 @@ export function formatWebhookInput(
: null
return {
input, // Primary workflow input - the message content
input,
// NEW: Top-level properties for backward compatibility with <blockName.message> syntax
// Top-level properties for backward compatibility with <blockName.message> syntax
message: messageObj,
sender: senderObj,
chat: chatObj,
@@ -683,7 +677,7 @@ export function validateMicrosoftTeamsSignature(
return result === 0
} catch (error) {
console.error('Error validating Microsoft Teams signature:', error)
logger.error('Error validating Microsoft Teams signature:', error)
return false
}
}
@@ -698,12 +692,11 @@ export function verifyProviderWebhook(
): NextResponse | null {
const authHeader = request.headers.get('authorization')
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
// Keep existing switch statement for github, stripe, generic, default
switch (foundWebhook.provider) {
case 'github':
break // No specific auth here
break
case 'stripe':
break // Stripe verification would go here
break
case 'gmail':
if (providerConfig.secret) {
const secretHeader = request.headers.get('X-Webhook-Secret')
@@ -723,22 +716,16 @@ export function verifyProviderWebhook(
break
case 'telegram': {
// Check User-Agent to ensure it's not blocked by middleware
// Log the user agent for debugging purposes
const userAgent = request.headers.get('user-agent') || ''
logger.debug(`[${requestId}] Telegram webhook request received with User-Agent: ${userAgent}`)
// Check if the user agent is empty and warn about it
if (!userAgent) {
logger.warn(
`[${requestId}] Telegram webhook request has empty User-Agent header. This may be blocked by middleware.`
)
}
// We'll accept the request anyway since we're in the provider-specific logic,
// but we'll log the information for debugging
// Telegram uses IP addresses in specific ranges
// This is optional verification that could be added if IP verification is needed
const clientIp =
request.headers.get('x-forwarded-for')?.split(',')[0].trim() ||
request.headers.get('x-real-ip') ||
@@ -749,34 +736,27 @@ export function verifyProviderWebhook(
break
}
case 'microsoftteams':
// Microsoft Teams webhook authentication is handled separately in the main flow
// due to the need for raw body access for HMAC verification
break
case 'generic':
// Generic auth logic: requireAuth, token, secretHeaderName, allowedIps
if (providerConfig.requireAuth) {
let isAuthenticated = false
// Check for token in Authorization header (Bearer token)
if (providerConfig.token) {
const providedToken = authHeader?.startsWith('Bearer ') ? authHeader.substring(7) : null
if (providedToken === providerConfig.token) {
isAuthenticated = true
}
// Check for token in custom header if specified
if (!isAuthenticated && providerConfig.secretHeaderName) {
const customHeaderValue = request.headers.get(providerConfig.secretHeaderName)
if (customHeaderValue === providerConfig.token) {
isAuthenticated = true
}
}
// Return 401 if authentication failed
if (!isAuthenticated) {
logger.warn(`[${requestId}] Unauthorized webhook access attempt - invalid token`)
return new NextResponse('Unauthorized', { status: 401 })
}
}
}
// IP restriction check
if (
providerConfig.allowedIps &&
Array.isArray(providerConfig.allowedIps) &&
@@ -821,7 +801,7 @@ export async function fetchAndProcessAirtablePayloads(
// Logging handles all error logging
let currentCursor: number | null = null
let mightHaveMore = true
let payloadsFetched = 0 // Track total payloads fetched
let payloadsFetched = 0
let apiCallCount = 0
// Use a Map to consolidate changes per record ID
const consolidatedChangesMap = new Map<string, AirtableChange>()
@@ -829,15 +809,7 @@ export async function fetchAndProcessAirtablePayloads(
const allPayloads = []
const localProviderConfig = {
...((webhookData.providerConfig as Record<string, any>) || {}),
} // Local copy
// DEBUG: Log start of function execution with critical info
logger.debug(`[${requestId}] TRACE: fetchAndProcessAirtablePayloads started`, {
webhookId: webhookData.id,
workflowId: workflowData.id,
hasBaseId: !!localProviderConfig.baseId,
hasExternalId: !!localProviderConfig.externalId,
})
}
try {
// --- Essential IDs & Config from localProviderConfig ---
@@ -848,11 +820,9 @@ export async function fetchAndProcessAirtablePayloads(
logger.error(
`[${requestId}] Missing baseId or externalId in providerConfig for webhook ${webhookData.id}. Cannot fetch payloads.`
)
// Error logging handled by logging session
return // Exit early
return
}
// Require credentialId
const credentialId: string | undefined = localProviderConfig.credentialId
if (!credentialId) {
logger.error(
@@ -861,7 +831,6 @@ export async function fetchAndProcessAirtablePayloads(
return
}
// Resolve owner and access token strictly via credentialId (no fallback)
let ownerUserId: string | null = null
try {
const rows = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
@@ -877,18 +846,14 @@ export async function fetchAndProcessAirtablePayloads(
return
}
// --- Retrieve Stored Cursor from localProviderConfig ---
const storedCursor = localProviderConfig.externalWebhookCursor
// Initialize cursor in provider config if missing
if (storedCursor === undefined || storedCursor === null) {
logger.info(
`[${requestId}] No cursor found in providerConfig for webhook ${webhookData.id}, initializing...`
)
// Update the local copy
localProviderConfig.externalWebhookCursor = null
// Add cursor to the database immediately to fix the configuration
try {
await db
.update(webhook)
@@ -901,7 +866,7 @@ export async function fetchAndProcessAirtablePayloads(
})
.where(eq(webhook.id, webhookData.id))
localProviderConfig.externalWebhookCursor = null // Update local copy too
localProviderConfig.externalWebhookCursor = null
logger.info(`[${requestId}] Successfully initialized cursor for webhook ${webhookData.id}`)
} catch (initError: any) {
logger.error(`[${requestId}] Failed to initialize cursor in DB`, {
@@ -909,7 +874,6 @@ export async function fetchAndProcessAirtablePayloads(
error: initError.message,
stack: initError.stack,
})
// Error logging handled by logging session
}
}
@@ -919,13 +883,12 @@ export async function fetchAndProcessAirtablePayloads(
`[${requestId}] Using stored cursor: ${currentCursor} for webhook ${webhookData.id}`
)
} else {
currentCursor = null // Airtable API defaults to 1 if omitted
currentCursor = null
logger.debug(
`[${requestId}] No valid stored cursor for webhook ${webhookData.id}, starting from beginning`
)
}
// --- Get OAuth Token (strict via credentialId) ---
let accessToken: string | null = null
try {
accessToken = await refreshAccessTokenIfNeeded(credentialId, ownerUserId, requestId)
@@ -946,8 +909,7 @@ export async function fetchAndProcessAirtablePayloads(
credentialId,
}
)
// Error logging handled by logging session
return // Exit early
return
}
const airtableApiBase = 'https://api.airtable.com/v0'

View File

@@ -75,12 +75,12 @@
"ai": "^4.3.2",
"better-auth": "^1.2.9",
"browser-image-compression": "^2.0.2",
"cheerio": "1.1.2",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cmdk": "^1.0.0",
"croner": "^9.0.0",
"csv-parse": "^5.6.0",
"csv-parser": "^3.2.0",
"dat.gui": "0.7.9",
"date-fns": "4.1.0",
"drizzle-orm": "^0.41.0",
@@ -89,6 +89,7 @@
"geist": "1.4.2",
"groq-sdk": "^0.15.0",
"html-to-text": "^9.0.5",
"iconv-lite": "0.7.0",
"input-otp": "^1.4.2",
"ioredis": "^5.6.0",
"jose": "6.0.11",
@@ -98,11 +99,13 @@
"lucide-react": "^0.479.0",
"mammoth": "^1.9.0",
"mysql2": "3.14.3",
"next": "^15.3.2",
"next": "^15.4.1",
"next-runtime-env": "3.3.0",
"next-themes": "^0.4.6",
"officeparser": "^5.2.0",
"openai": "^4.91.1",
"pdf-parse": "^1.1.1",
"papaparse": "5.5.3",
"pdf-lib": "^1.17.1",
"postgres": "^3.4.5",
"prismjs": "^1.30.0",
"react": "19.1.0",
@@ -117,27 +120,30 @@
"rehype-highlight": "7.0.2",
"remark-gfm": "4.0.1",
"resend": "^4.1.2",
"rtf-parser": "1.3.3",
"rtf-stream-parser": "3.8.0",
"socket.io": "^4.8.1",
"stripe": "^17.7.0",
"tailwind-merge": "^2.6.0",
"tailwindcss-animate": "^1.0.7",
"three": "0.177.0",
"uuid": "^11.1.0",
"word-extractor": "1.0.4",
"xlsx": "0.18.5",
"zod": "^3.24.2"
},
"devDependencies": {
"@react-email/preview-server": "4.2.4",
"@react-email/preview-server": "4.2.8",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.3.0",
"@testing-library/user-event": "^14.6.1",
"@trigger.dev/build": "4.0.1",
"@types/html-to-text": "^9.0.4",
"@types/iconv-lite": "0.0.1",
"@types/js-yaml": "4.0.9",
"@types/jsdom": "21.1.7",
"@types/lodash": "^4.17.16",
"@types/node": "24.2.1",
"@types/papaparse": "5.3.16",
"@types/prismjs": "^1.26.5",
"@types/react": "^19",
"@types/react-dom": "^19",
@@ -160,5 +166,9 @@
"canvas",
"better-sqlite3",
"sharp"
]
],
"overrides": {
"next": "^15.4.1",
"@next/env": "^15.4.1"
}
}

View File

@@ -38,7 +38,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
const config = {
clearExisting: options.clearExisting ?? false,
docsPath: options.docsPath ?? path.join(process.cwd(), '../../apps/docs/content/docs'),
// Use localhost docs in development, production docs otherwise
baseUrl: options.baseUrl ?? (isDev ? 'http://localhost:3001' : 'https://docs.sim.ai'),
chunkSize: options.chunkSize ?? 300, // Max 300 tokens per chunk
minChunkSize: options.minChunkSize ?? 100,
@@ -53,7 +52,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
clearExisting: config.clearExisting,
})
// Initialize the docs chunker
const chunker = new DocsChunker({
chunkSize: config.chunkSize,
minChunkSize: config.minChunkSize,
@@ -61,7 +59,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
baseUrl: config.baseUrl,
})
// Process all .mdx files first (compute embeddings before clearing)
logger.info(`📚 Processing docs from: ${config.docsPath}`)
const chunks = await chunker.chunkAllDocs(config.docsPath)
@@ -72,7 +69,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
logger.info(`📊 Generated ${chunks.length} chunks with embeddings`)
// Clear existing embeddings if requested (after computing new ones to minimize downtime)
if (config.clearExisting) {
logger.info('🗑️ Clearing existing docs embeddings...')
try {
@@ -84,7 +80,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
}
}
// Save chunks to database in batches for better performance
const batchSize = 10
logger.info(`💾 Saving chunks to database (batch size: ${batchSize})...`)
@@ -92,7 +87,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
const batch = chunks.slice(i, i + batchSize)
try {
// Prepare batch data
const batchData = batch.map((chunk) => ({
chunkText: chunk.text,
sourceDocument: chunk.sourceDocument,
@@ -105,7 +99,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
metadata: chunk.metadata,
}))
// Insert batch
await db.insert(docsEmbeddings).values(batchData)
processedChunks += batch.length
@@ -121,7 +114,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
}
}
// Verify results
const savedCount = await db
.select({ count: sql<number>`count(*)` })
.from(docsEmbeddings)
@@ -137,7 +129,6 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
logger.info(` • Database total: ${savedCount}`)
logger.info(` • Duration: ${Math.round(duration / 1000)}s`)
// Summary by document
const documentStats = chunks.reduce(
(acc, chunk) => {
if (!acc[chunk.sourceDocument]) {
@@ -153,7 +144,7 @@ async function processDocsEmbeddings(options: ProcessingOptions = {}) {
logger.info(`📋 Document breakdown:`)
Object.entries(documentStats)
.sort(([, a], [, b]) => b.chunks - a.chunks)
.slice(0, 10) // Top 10 documents
.slice(0, 10)
.forEach(([doc, stats]) => {
logger.info(`${doc}: ${stats.chunks} chunks, ${stats.tokens} tokens`)
})
@@ -188,7 +179,6 @@ async function main() {
const args = process.argv.slice(2)
const options: ProcessingOptions = {}
// Parse command line arguments
if (args.includes('--clear')) {
options.clearExisting = true
}
@@ -215,10 +205,9 @@ Examples:
}
}
// Run the script if executed directly
if (import.meta.url.includes('process-docs-embeddings.ts')) {
main().catch((error) => {
console.error('Script failed:', error)
logger.error('Script failed:', error)
process.exit(1)
})
}

View File

@@ -1070,12 +1070,12 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
})
if (!updateResponse.ok) {
console.error('Failed to update webhook status')
logger.error('Failed to update webhook status')
}
}
}
} catch (error) {
console.error('Error toggling webhook status:', error)
logger.error('Error toggling webhook status:', error)
}
}

View File

@@ -1,6 +1,9 @@
import { createLogger } from '@/lib/logs/console/logger'
import type { LatestCommitParams, LatestCommitResponse } from '@/tools/github/types'
import type { ToolConfig } from '@/tools/types'
const logger = createLogger('GitHubLatestCommitTool')
export const latestCommitTool: ToolConfig<LatestCommitParams, LatestCommitResponse> = {
id: 'github_latest_commit',
name: 'GitHub Latest Commit',
@@ -50,14 +53,11 @@ export const latestCommitTool: ToolConfig<LatestCommitParams, LatestCommitRespon
transformResponse: async (response, params) => {
const data = await response.json()
// Create a human-readable content string
const content = `Latest commit: "${data.commit.message}" by ${data.commit.author.name} on ${data.commit.author.date}. SHA: ${data.sha}`
// Initialize files array and add file information
const files = data.files || []
const fileDetailsWithContent = []
// Fetch raw content for each file if includeFileContent is true
if (files.length > 0) {
for (const file of files) {
const fileDetail = {
@@ -72,10 +72,8 @@ export const latestCommitTool: ToolConfig<LatestCommitParams, LatestCommitRespon
content: undefined as string | undefined,
}
// Only try to fetch content for files that are not too large and not deleted
if (file.status !== 'removed' && file.raw_url) {
try {
// Fetch the raw file content
const contentResponse = await fetch(file.raw_url, {
headers: {
Authorization: `Bearer ${params?.apiKey}`,
@@ -87,7 +85,7 @@ export const latestCommitTool: ToolConfig<LatestCommitParams, LatestCommitRespon
fileDetail.content = await contentResponse.text()
}
} catch (error) {
console.error(`Failed to fetch content for ${file.filename}:`, error)
logger.error(`Failed to fetch content for ${file.filename}:`, error)
}
}

View File

@@ -1,3 +1,4 @@
import { createLogger } from '@/lib/logs/console/logger'
import type { GmailSearchParams, GmailToolResponse } from '@/tools/gmail/types'
import {
createMessagesSummary,
@@ -6,6 +7,8 @@ import {
} from '@/tools/gmail/utils'
import type { ToolConfig } from '@/tools/types'
const logger = createLogger('GmailSearchTool')
export const gmailSearchTool: ToolConfig<GmailSearchParams, GmailToolResponse> = {
id: 'gmail_search',
name: 'Gmail Search',
@@ -109,7 +112,7 @@ export const gmailSearchTool: ToolConfig<GmailSearchParams, GmailToolResponse> =
},
}
} catch (error: any) {
console.error('Error fetching message details:', error)
logger.error('Error fetching message details:', error)
return {
success: true,
output: {

View File

@@ -1,6 +1,9 @@
import { createLogger } from '@/lib/logs/console/logger'
import type { ToolConfig } from '@/tools/types'
import type { XSearchParams, XSearchResponse, XTweet, XUser } from '@/tools/x/types'
const logger = createLogger('XSearchTool')
export const xSearchTool: ToolConfig<XSearchParams, XSearchResponse> = {
id: 'x_search',
name: 'X Search',
@@ -92,7 +95,7 @@ export const xSearchTool: ToolConfig<XSearchParams, XSearchResponse> = {
// Check if data.data is undefined/null or not an array
if (!data.data || !Array.isArray(data.data)) {
console.error('X Search API Error:', JSON.stringify(data, null, 2))
logger.error('X Search API Error:', JSON.stringify(data, null, 2))
return {
success: false,
error:

222
bun.lock
View File

@@ -14,9 +14,7 @@
},
"devDependencies": {
"@biomejs/biome": "2.0.0-beta.5",
"@next/env": "^15.3.2",
"@types/word-extractor": "1.0.6",
"dotenv-cli": "^8.0.0",
"@next/env": "^15.4.1",
"husky": "9.1.7",
"lint-staged": "16.0.0",
"turbo": "2.5.6",
@@ -105,12 +103,12 @@
"ai": "^4.3.2",
"better-auth": "^1.2.9",
"browser-image-compression": "^2.0.2",
"cheerio": "1.1.2",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cmdk": "^1.0.0",
"croner": "^9.0.0",
"csv-parse": "^5.6.0",
"csv-parser": "^3.2.0",
"dat.gui": "0.7.9",
"date-fns": "4.1.0",
"drizzle-orm": "^0.41.0",
@@ -119,6 +117,7 @@
"geist": "1.4.2",
"groq-sdk": "^0.15.0",
"html-to-text": "^9.0.5",
"iconv-lite": "0.7.0",
"input-otp": "^1.4.2",
"ioredis": "^5.6.0",
"jose": "6.0.11",
@@ -128,11 +127,13 @@
"lucide-react": "^0.479.0",
"mammoth": "^1.9.0",
"mysql2": "3.14.3",
"next": "^15.3.2",
"next": "^15.4.1",
"next-runtime-env": "3.3.0",
"next-themes": "^0.4.6",
"officeparser": "^5.2.0",
"openai": "^4.91.1",
"pdf-parse": "^1.1.1",
"papaparse": "5.5.3",
"pdf-lib": "^1.17.1",
"postgres": "^3.4.5",
"prismjs": "^1.30.0",
"react": "19.1.0",
@@ -147,27 +148,30 @@
"rehype-highlight": "7.0.2",
"remark-gfm": "4.0.1",
"resend": "^4.1.2",
"rtf-parser": "1.3.3",
"rtf-stream-parser": "3.8.0",
"socket.io": "^4.8.1",
"stripe": "^17.7.0",
"tailwind-merge": "^2.6.0",
"tailwindcss-animate": "^1.0.7",
"three": "0.177.0",
"uuid": "^11.1.0",
"word-extractor": "1.0.4",
"xlsx": "0.18.5",
"zod": "^3.24.2",
},
"devDependencies": {
"@react-email/preview-server": "4.2.4",
"@react-email/preview-server": "4.2.8",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.3.0",
"@testing-library/user-event": "^14.6.1",
"@trigger.dev/build": "4.0.1",
"@types/html-to-text": "^9.0.4",
"@types/iconv-lite": "0.0.1",
"@types/js-yaml": "4.0.9",
"@types/jsdom": "21.1.7",
"@types/lodash": "^4.17.16",
"@types/node": "24.2.1",
"@types/papaparse": "5.3.16",
"@types/prismjs": "^1.26.5",
"@types/react": "^19",
"@types/react-dom": "^19",
@@ -224,6 +228,8 @@
"sharp",
],
"overrides": {
"@next/env": "^15.4.1",
"next": "^15.4.1",
"react": "19.1.0",
"react-dom": "19.1.0",
},
@@ -654,25 +660,45 @@
"@mdx-js/mdx": ["@mdx-js/mdx@3.1.1", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdx": "^2.0.0", "acorn": "^8.0.0", "collapse-white-space": "^2.0.0", "devlop": "^1.0.0", "estree-util-is-identifier-name": "^3.0.0", "estree-util-scope": "^1.0.0", "estree-walker": "^3.0.0", "hast-util-to-jsx-runtime": "^2.0.0", "markdown-extensions": "^2.0.0", "recma-build-jsx": "^1.0.0", "recma-jsx": "^1.0.0", "recma-stringify": "^1.0.0", "rehype-recma": "^1.0.0", "remark-mdx": "^3.0.0", "remark-parse": "^11.0.0", "remark-rehype": "^11.0.0", "source-map": "^0.7.0", "unified": "^11.0.0", "unist-util-position-from-estree": "^2.0.0", "unist-util-stringify-position": "^4.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" } }, "sha512-f6ZO2ifpwAQIpzGWaBQT2TXxPv6z3RBzQKpVftEWN78Vl/YweF1uwussDx8ECAXVtr3Rs89fKyG9YlzUs9DyGQ=="],
"@next/env": ["@next/env@15.5.2", "", {}, "sha512-Qe06ew4zt12LeO6N7j8/nULSOe3fMXE4dM6xgpBQNvdzyK1sv5y4oAP3bq4LamrvGCZtmRYnW8URFCeX5nFgGg=="],
"@napi-rs/canvas": ["@napi-rs/canvas@0.1.78", "", { "optionalDependencies": { "@napi-rs/canvas-android-arm64": "0.1.78", "@napi-rs/canvas-darwin-arm64": "0.1.78", "@napi-rs/canvas-darwin-x64": "0.1.78", "@napi-rs/canvas-linux-arm-gnueabihf": "0.1.78", "@napi-rs/canvas-linux-arm64-gnu": "0.1.78", "@napi-rs/canvas-linux-arm64-musl": "0.1.78", "@napi-rs/canvas-linux-riscv64-gnu": "0.1.78", "@napi-rs/canvas-linux-x64-gnu": "0.1.78", "@napi-rs/canvas-linux-x64-musl": "0.1.78", "@napi-rs/canvas-win32-x64-msvc": "0.1.78" } }, "sha512-YaBHJvT+T1DoP16puvWM6w46Lq3VhwKIJ8th5m1iEJyGh7mibk5dT7flBvMQ1EH1LYmMzXJ+OUhu+8wQ9I6u7g=="],
"@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.5.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-8bGt577BXGSd4iqFygmzIfTYizHb0LGWqH+qgIF/2EDxS5JsSdERJKA8WgwDyNBZgTIIA4D8qUtoQHmxIIquoQ=="],
"@napi-rs/canvas-android-arm64": ["@napi-rs/canvas-android-arm64@0.1.78", "", { "os": "android", "cpu": "arm64" }, "sha512-N1ikxztjrRmh8xxlG5kYm1RuNr8ZW1EINEDQsLhhuy7t0pWI/e7SH91uFVLZKCMDyjel1tyWV93b5fdCAi7ggw=="],
"@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.5.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-2DjnmR6JHK4X+dgTXt5/sOCu/7yPtqpYt8s8hLkHFK3MGkka2snTv3yRMdHvuRtJVkPwCGsvBSwmoQCHatauFQ=="],
"@napi-rs/canvas-darwin-arm64": ["@napi-rs/canvas-darwin-arm64@0.1.78", "", { "os": "darwin", "cpu": "arm64" }, "sha512-FA3aCU3G5yGc74BSmnLJTObnZRV+HW+JBTrsU+0WVVaNyVKlb5nMvYAQuieQlRVemsAA2ek2c6nYtHh6u6bwFw=="],
"@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.5.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-3j7SWDBS2Wov/L9q0mFJtEvQ5miIqfO4l7d2m9Mo06ddsgUK8gWfHGgbjdFlCp2Ek7MmMQZSxpGFqcC8zGh2AA=="],
"@napi-rs/canvas-darwin-x64": ["@napi-rs/canvas-darwin-x64@0.1.78", "", { "os": "darwin", "cpu": "x64" }, "sha512-xVij69o9t/frixCDEoyWoVDKgE3ksLGdmE2nvBWVGmoLu94MWUlv2y4Qzf5oozBmydG5Dcm4pRHFBM7YWa1i6g=="],
"@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.5.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-s6N8k8dF9YGc5T01UPQ08yxsK6fUow5gG1/axWc1HVVBYQBgOjca4oUZF7s4p+kwhkB1bDSGR8QznWrFZ/Rt5g=="],
"@napi-rs/canvas-linux-arm-gnueabihf": ["@napi-rs/canvas-linux-arm-gnueabihf@0.1.78", "", { "os": "linux", "cpu": "arm" }, "sha512-aSEXrLcIpBtXpOSnLhTg4jPsjJEnK7Je9KqUdAWjc7T8O4iYlxWxrXFIF8rV8J79h5jNdScgZpAUWYnEcutR3g=="],
"@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.5.2", "", { "os": "linux", "cpu": "x64" }, "sha512-o1RV/KOODQh6dM6ZRJGZbc+MOAHww33Vbs5JC9Mp1gDk8cpEO+cYC/l7rweiEalkSm5/1WGa4zY7xrNwObN4+Q=="],
"@napi-rs/canvas-linux-arm64-gnu": ["@napi-rs/canvas-linux-arm64-gnu@0.1.78", "", { "os": "linux", "cpu": "arm64" }, "sha512-dlEPRX1hLGKaY3UtGa1dtkA1uGgFITn2mDnfI6YsLlYyLJQNqHx87D1YTACI4zFCUuLr/EzQDzuX+vnp9YveVg=="],
"@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.5.2", "", { "os": "linux", "cpu": "x64" }, "sha512-/VUnh7w8RElYZ0IV83nUcP/J4KJ6LLYliiBIri3p3aW2giF+PAVgZb6mk8jbQSB3WlTai8gEmCAr7kptFa1H6g=="],
"@napi-rs/canvas-linux-arm64-musl": ["@napi-rs/canvas-linux-arm64-musl@0.1.78", "", { "os": "linux", "cpu": "arm64" }, "sha512-TsCfjOPZtm5Q/NO1EZHR5pwDPSPjPEttvnv44GL32Zn1uvudssjTLbvaG1jHq81Qxm16GTXEiYLmx4jOLZQYlg=="],
"@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.5.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-sMPyTvRcNKXseNQ/7qRfVRLa0VhR0esmQ29DD6pqvG71+JdVnESJaHPA8t7bc67KD5spP3+DOCNLhqlEI2ZgQg=="],
"@napi-rs/canvas-linux-riscv64-gnu": ["@napi-rs/canvas-linux-riscv64-gnu@0.1.78", "", { "os": "linux", "cpu": "none" }, "sha512-+cpTTb0GDshEow/5Fy8TpNyzaPsYb3clQIjgWRmzRcuteLU+CHEU/vpYvAcSo7JxHYPJd8fjSr+qqh+nI5AtmA=="],
"@next/swc-win32-ia32-msvc": ["@next/swc-win32-ia32-msvc@14.2.32", "", { "os": "win32", "cpu": "ia32" }, "sha512-jHUeDPVHrgFltqoAqDB6g6OStNnFxnc7Aks3p0KE0FbwAvRg6qWKYF5mSTdCTxA3axoSAUwxYdILzXJfUwlHhA=="],
"@napi-rs/canvas-linux-x64-gnu": ["@napi-rs/canvas-linux-x64-gnu@0.1.78", "", { "os": "linux", "cpu": "x64" }, "sha512-wxRcvKfvYBgtrO0Uy8OmwvjlnTcHpY45LLwkwVNIWHPqHAsyoTyG/JBSfJ0p5tWRzMOPDCDqdhpIO4LOgXjeyg=="],
"@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.5.2", "", { "os": "win32", "cpu": "x64" }, "sha512-W5VvyZHnxG/2ukhZF/9Ikdra5fdNftxI6ybeVKYvBPDtyx7x4jPPSNduUkfH5fo3zG0JQ0bPxgy41af2JX5D4Q=="],
"@napi-rs/canvas-linux-x64-musl": ["@napi-rs/canvas-linux-x64-musl@0.1.78", "", { "os": "linux", "cpu": "x64" }, "sha512-vQFOGwC9QDP0kXlhb2LU1QRw/humXgcbVp8mXlyBqzc/a0eijlLF9wzyarHC1EywpymtS63TAj8PHZnhTYN6hg=="],
"@napi-rs/canvas-win32-x64-msvc": ["@napi-rs/canvas-win32-x64-msvc@0.1.78", "", { "os": "win32", "cpu": "x64" }, "sha512-/eKlTZBtGUgpRKalzOzRr6h7KVSuziESWXgBcBnXggZmimwIJWPJlEcbrx5Tcwj8rPuZiANXQOG9pPgy9Q4LTQ=="],
"@next/env": ["@next/env@15.4.1", "", {}, "sha512-DXQwFGAE2VH+f2TJsKepRXpODPU+scf5fDbKOME8MMyeyswe4XwgRdiiIYmBfkXU+2ssliLYznajTrOQdnLR5A=="],
"@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.4.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-L+81yMsiHq82VRXS2RVq6OgDwjvA4kDksGU8hfiDHEXP+ncKIUhUsadAVB+MRIp2FErs/5hpXR0u2eluWPAhig=="],
"@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.4.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-jfz1RXu6SzL14lFl05/MNkcN35lTLMJWPbqt7Xaj35+ZWAX342aePIJrN6xBdGeKl6jPXJm0Yqo3Xvh3Gpo3Uw=="],
"@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.4.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-k0tOFn3dsnkaGfs6iQz8Ms6f1CyQe4GacXF979sL8PNQxjYS1swx9VsOyUQYaPoGV8nAZ7OX8cYaeiXGq9ahPQ=="],
"@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.4.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-4ogGQ/3qDzbbK3IwV88ltihHFbQVq6Qr+uEapzXHXBH1KsVBZOB50sn6BWHPcFjwSoMX2Tj9eH/fZvQnSIgc3g=="],
"@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.4.1", "", { "os": "linux", "cpu": "x64" }, "sha512-Jj0Rfw3wIgp+eahMz/tOGwlcYYEFjlBPKU7NqoOkTX0LY45i5W0WcDpgiDWSLrN8KFQq/LW7fZq46gxGCiOYlQ=="],
"@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.4.1", "", { "os": "linux", "cpu": "x64" }, "sha512-9WlEZfnw1vFqkWsTMzZDgNL7AUI1aiBHi0S2m8jvycPyCq/fbZjtE/nDkhJRYbSjXbtRHYLDBlmP95kpjEmJbw=="],
"@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.4.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-WodRbZ9g6CQLRZsG3gtrA9w7Qfa9BwDzhFVdlI6sV0OCPq9JrOrJSp9/ioLsezbV8w9RCJ8v55uzJuJ5RgWLZg=="],
"@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.4.1", "", { "os": "win32", "cpu": "x64" }, "sha512-y+wTBxelk2xiNofmDOVU7O5WxTHcvOoL3srOM0kxTzKDjQ57kPU0tpnPJ/BWrRnsOwXEv0+3QSbGR7hY4n9LkQ=="],
"@noble/ciphers": ["@noble/ciphers@0.6.0", "", {}, "sha512-mIbq/R9QXk5/cTfESb1OKtyFnk7oc1Om/8onA1158K9/OZUQFDEVy55jVTato+xmp3XX6F6Qh0zz0Nc1AxAlRQ=="],
@@ -804,6 +830,10 @@
"@orama/orama": ["@orama/orama@3.1.12", "", {}, "sha512-U7PY8FwXHuJ6bNBpbsqe0KLzb91IcJuORDggqHHkFy1waokY5SpWLN9tzB3AOW776awp6s1bjwts9I9Davy3lw=="],
"@pdf-lib/standard-fonts": ["@pdf-lib/standard-fonts@1.0.0", "", { "dependencies": { "pako": "^1.0.6" } }, "sha512-hU30BK9IUN/su0Mn9VdlVKsWBS6GyhVfqjwl1FjZN4TxP6cCw0jP2w7V3Hf5uX7M0AZJ16vey9yE0ny7Sa59ZA=="],
"@pdf-lib/upng": ["@pdf-lib/upng@1.0.1", "", { "dependencies": { "pako": "^1.0.10" } }, "sha512-dQK2FUMQtowVP00mtIksrlZhdFXQZPC+taih1q4CvPZ5vqdxR/LKBaFg0oAfzd1GlHZXXSPdQfzQnt+ViGvEIQ=="],
"@peculiar/asn1-android": ["@peculiar/asn1-android@2.4.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.4.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-YFueREq97CLslZZBI8dKzis7jMfEHSLxM+nr0Zdx1POiXFLjqqwoY5s0F1UimdBiEw/iKlHey2m56MRDv7Jtyg=="],
"@peculiar/asn1-ecc": ["@peculiar/asn1-ecc@2.4.0", "", { "dependencies": { "@peculiar/asn1-schema": "^2.4.0", "@peculiar/asn1-x509": "^2.4.0", "asn1js": "^3.0.6", "tslib": "^2.8.1" } }, "sha512-fJiYUBCJBDkjh347zZe5H81BdJ0+OGIg0X9z06v8xXUoql3MFeENUX0JsjCaVaU9A0L85PefLPGYkIoGpTnXLQ=="],
@@ -972,7 +1002,7 @@
"@react-email/preview": ["@react-email/preview@0.0.12", "", { "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-g/H5fa9PQPDK6WUEG7iTlC19sAktI23qyoiJtMLqQiXFCfWeQMhqjLGKeLSKkfzszqmfJCjZtpSiKtBoOdxp3Q=="],
"@react-email/preview-server": ["@react-email/preview-server@4.2.4", "", { "dependencies": { "@babel/core": "7.26.10", "@babel/parser": "7.27.0", "@babel/traverse": "7.27.0", "@lottiefiles/dotlottie-react": "0.13.3", "@radix-ui/colors": "3.0.0", "@radix-ui/react-collapsible": "1.1.7", "@radix-ui/react-dropdown-menu": "2.1.10", "@radix-ui/react-popover": "1.1.10", "@radix-ui/react-slot": "1.2.0", "@radix-ui/react-tabs": "1.1.7", "@radix-ui/react-toggle-group": "1.1.6", "@radix-ui/react-tooltip": "1.2.3", "@types/node": "22.14.1", "@types/normalize-path": "3.0.2", "@types/react": "19.0.10", "@types/react-dom": "19.0.4", "@types/webpack": "5.28.5", "autoprefixer": "10.4.21", "chalk": "4.1.2", "clsx": "2.1.1", "esbuild": "0.25.0", "framer-motion": "12.7.5", "json5": "2.2.3", "log-symbols": "4.1.0", "module-punycode": "npm:punycode@2.3.1", "next": "15.4.1", "node-html-parser": "7.0.1", "ora": "5.4.1", "pretty-bytes": "6.1.1", "prism-react-renderer": "2.4.1", "react": "19.0.0", "react-dom": "19.0.0", "sharp": "0.34.1", "socket.io-client": "4.8.1", "sonner": "2.0.3", "source-map-js": "1.2.1", "spamc": "0.0.5", "stacktrace-parser": "0.1.11", "tailwind-merge": "3.2.0", "tailwindcss": "3.4.0", "use-debounce": "10.0.4", "zod": "3.24.3" } }, "sha512-QRh7MUK9rG48lwIvwHoL8ByNCNkQzX9G7hl8T+IsleI55lGeAtlAzze/QHeLfoYZ7wl5LCG05ok/00DP06Xogw=="],
"@react-email/preview-server": ["@react-email/preview-server@4.2.8", "", { "dependencies": { "@babel/core": "7.26.10", "@babel/parser": "7.27.0", "@babel/traverse": "7.27.0", "@lottiefiles/dotlottie-react": "0.13.3", "@radix-ui/colors": "3.0.0", "@radix-ui/react-collapsible": "1.1.7", "@radix-ui/react-dropdown-menu": "2.1.10", "@radix-ui/react-popover": "1.1.10", "@radix-ui/react-slot": "1.2.0", "@radix-ui/react-tabs": "1.1.7", "@radix-ui/react-toggle-group": "1.1.6", "@radix-ui/react-tooltip": "1.2.3", "@types/node": "22.14.1", "@types/normalize-path": "3.0.2", "@types/react": "19.0.10", "@types/react-dom": "19.0.4", "@types/webpack": "5.28.5", "autoprefixer": "10.4.21", "chalk": "4.1.2", "clsx": "2.1.1", "esbuild": "0.25.0", "framer-motion": "12.23.12", "json5": "2.2.3", "log-symbols": "4.1.0", "module-punycode": "npm:punycode@2.3.1", "next": "15.4.1", "node-html-parser": "7.0.1", "ora": "5.4.1", "pretty-bytes": "6.1.1", "prism-react-renderer": "2.4.1", "react": "19.0.0", "react-dom": "19.0.0", "sharp": "0.34.1", "socket.io-client": "4.8.1", "sonner": "2.0.3", "source-map-js": "1.2.1", "spamc": "0.0.5", "stacktrace-parser": "0.1.11", "tailwind-merge": "3.2.0", "tailwindcss": "3.4.0", "use-debounce": "10.0.4", "zod": "3.24.3" } }, "sha512-q/Y4VQtFsrOiTYAAh84M+acu04OROz1Ay2RQCWX6+5GlM+gZkq4tXiE7TXfTj4dFdPkPvU3mCr6LP6Y2yPnXNg=="],
"@react-email/render": ["@react-email/render@1.0.5", "", { "dependencies": { "html-to-text": "9.0.5", "prettier": "3.4.2", "react-promise-suspense": "0.3.4" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-CA69HYXPk21HhtAXATIr+9JJwpDNmAFCvdMUjWmeoD1+KhJ9NAxusMRxKNeibdZdslmq3edaeOKGbdQ9qjK8LQ=="],
@@ -1224,8 +1254,6 @@
"@standard-schema/utils": ["@standard-schema/utils@0.3.0", "", {}, "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g=="],
"@swc/counter": ["@swc/counter@0.1.3", "", {}, "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ=="],
"@swc/helpers": ["@swc/helpers@0.5.15", "", { "dependencies": { "tslib": "^2.8.0" } }, "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g=="],
"@t3-oss/env-core": ["@t3-oss/env-core@0.13.4", "", { "peerDependencies": { "arktype": "^2.1.0", "typescript": ">=5.0.0", "valibot": "^1.0.0-beta.7 || ^1.0.0", "zod": "^3.24.0 || ^4.0.0-beta.0" }, "optionalPeers": ["typescript", "valibot", "zod"] }, "sha512-zVOiYO0+CF7EnBScz8s0O5JnJLPTU0lrUi8qhKXfIxIJXvI/jcppSiXXsEJwfB4A6XZawY/Wg/EQGKANi/aPmQ=="],
@@ -1274,6 +1302,8 @@
"@testing-library/user-event": ["@testing-library/user-event@14.6.1", "", { "peerDependencies": { "@testing-library/dom": ">=7.21.4" } }, "sha512-vq7fv0rnt+QTXgPxr5Hjc210p6YKq2kmdziLgnsZGgLJ9e6VAShx1pACLuRjd/AS/sr7phAR58OIIpf0LlmQNw=="],
"@tokenizer/token": ["@tokenizer/token@0.3.0", "", {}, "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A=="],
"@trigger.dev/build": ["@trigger.dev/build@4.0.1", "", { "dependencies": { "@trigger.dev/core": "4.0.1", "pkg-types": "^1.1.3", "tinyglobby": "^0.2.2", "tsconfck": "3.1.3" } }, "sha512-PGOnCPjVSKkj72xmJb6mdRbzDSP3Ti/C5/tfaBFdSZ7qcoVctSzDfS5iwEGsSoSWSIv+MVy12c4v7Ji/r7MO1A=="],
"@trigger.dev/core": ["@trigger.dev/core@4.0.1", "", { "dependencies": { "@bugsnag/cuid": "^3.1.1", "@electric-sql/client": "1.0.0-beta.1", "@google-cloud/precise-date": "^4.0.0", "@jsonhero/path": "^1.0.21", "@opentelemetry/api": "1.9.0", "@opentelemetry/api-logs": "0.203.0", "@opentelemetry/core": "2.0.1", "@opentelemetry/exporter-logs-otlp-http": "0.203.0", "@opentelemetry/exporter-trace-otlp-http": "0.203.0", "@opentelemetry/instrumentation": "0.203.0", "@opentelemetry/resources": "2.0.1", "@opentelemetry/sdk-logs": "0.203.0", "@opentelemetry/sdk-trace-base": "2.0.1", "@opentelemetry/sdk-trace-node": "2.0.1", "@opentelemetry/semantic-conventions": "1.36.0", "dequal": "^2.0.3", "eventsource": "^3.0.5", "eventsource-parser": "^3.0.0", "execa": "^8.0.1", "humanize-duration": "^3.27.3", "jose": "^5.4.0", "nanoid": "3.3.8", "prom-client": "^15.1.0", "socket.io": "4.7.4", "socket.io-client": "4.7.5", "std-env": "^3.8.1", "superjson": "^2.2.1", "tinyexec": "^0.3.2", "uncrypto": "^0.1.3", "zod": "3.25.76", "zod-error": "1.5.0", "zod-validation-error": "^1.5.0" } }, "sha512-NTffiVPy/zFopujdptGGoy3lj3/CKV16JA8CobCfsEpDfu+K+wEys+9p8PFY8j5I0UI86aqlFpJu9/VRqUQ/yQ=="],
@@ -1382,6 +1412,8 @@
"@types/html-to-text": ["@types/html-to-text@9.0.4", "", {}, "sha512-pUY3cKH/Nm2yYrEmDlPR1mR7yszjGx4DrwPjQ702C4/D5CwHuZTgZdIdwPkRbcuhs7BAh2L5rg3CL5cbRiGTCQ=="],
"@types/iconv-lite": ["@types/iconv-lite@0.0.1", "", { "dependencies": { "@types/node": "*" } }, "sha512-SsRBQxGw7/2/NxYJfBdiUx5a7Ms/voaUhOO9u2y9FTeTNBO1PXohzE4i3JfD8q2Te42HLTn5pyZtDf8j1bPKgQ=="],
"@types/inquirer": ["@types/inquirer@8.2.12", "", { "dependencies": { "@types/through": "*", "rxjs": "^7.2.0" } }, "sha512-YxURZF2ZsSjU5TAe06tW0M3sL4UI9AMPA6dd8I72uOtppzNafcY38xkYgCZ/vsVOAyNdzHmvtTpLWilOrbP0dQ=="],
"@types/js-yaml": ["@types/js-yaml@4.0.9", "", {}, "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg=="],
@@ -1406,6 +1438,8 @@
"@types/normalize-path": ["@types/normalize-path@3.0.2", "", {}, "sha512-DO++toKYPaFn0Z8hQ7Tx+3iT9t77IJo/nDiqTXilgEP+kPNIYdpS9kh3fXuc53ugqwp9pxC1PVjCpV1tQDyqMA=="],
"@types/papaparse": ["@types/papaparse@5.3.16", "", { "dependencies": { "@types/node": "*" } }, "sha512-T3VuKMC2H0lgsjI9buTB3uuKj3EMD2eap1MOuEQuBQ44EnDx/IkGhU6EwiTf9zG3za4SKlmwKAImdDKdNnCsXg=="],
"@types/pg": ["@types/pg@8.6.1", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^2.2.0" } }, "sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w=="],
"@types/pg-pool": ["@types/pg-pool@2.0.6", "", { "dependencies": { "@types/pg": "*" } }, "sha512-TaAUE5rq2VQYxab5Ts7WZhKNmuN78Q6PiFonTDdpbx8a1H0M1vhy3rhiMjl+e2iHmogyMw7jZF4FrE6eJUy5HQ=="],
@@ -1436,8 +1470,6 @@
"@types/webxr": ["@types/webxr@0.5.23", "", {}, "sha512-GPe4AsfOSpqWd3xA/0gwoKod13ChcfV67trvxaW2krUbgb9gxQjnCx8zGshzMl8LSHZlNH5gQ8LNScsDuc7nGQ=="],
"@types/word-extractor": ["@types/word-extractor@1.0.6", "", { "dependencies": { "@types/node": "*" } }, "sha512-NDrvZXGJi7cTKXGr8GTP08HiqiueggR1wfHZvBj1sfL8e52qecBSlvl1rBWrvOY0LLkk1DISkKVlFqMTfipLbQ=="],
"@types/xlsx": ["@types/xlsx@0.0.36", "", { "dependencies": { "xlsx": "*" } }, "sha512-mvfrKiKKMErQzLMF8ElYEH21qxWCZtN59pHhWGmWCWFJStYdMWjkDSAy6mGowFxHXaXZWe5/TW7pBUiWclIVOw=="],
"@typespec/ts-http-runtime": ["@typespec/ts-http-runtime@0.3.0", "", { "dependencies": { "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.0", "tslib": "^2.6.2" } }, "sha512-sOx1PKSuFwnIl7z4RN0Ls7N9AQawmR9r66eI5rFCzLDIs8HTIYrIpH9QjYWoX0lkgGrkLxXhi4QnK7MizPRrIg=="],
@@ -1614,8 +1646,6 @@
"bufrw": ["bufrw@1.4.0", "", { "dependencies": { "ansi-color": "^0.2.1", "error": "^7.0.0", "hexer": "^1.5.0", "xtend": "^4.0.0" } }, "sha512-sWm8iPbqvL9+5SiYxXH73UOkyEbGQg7kyHQmReF89WJHQJw2eV4P/yZ0E+b71cczJ4pPobVhXxgQcmfSTgGHxQ=="],
"busboy": ["busboy@1.6.0", "", { "dependencies": { "streamsearch": "^1.1.0" } }, "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA=="],
"cac": ["cac@6.7.14", "", {}, "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ=="],
"call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="],
@@ -1648,6 +1678,10 @@
"check-error": ["check-error@2.1.1", "", {}, "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw=="],
"cheerio": ["cheerio@1.1.2", "", { "dependencies": { "cheerio-select": "^2.1.0", "dom-serializer": "^2.0.0", "domhandler": "^5.0.3", "domutils": "^3.2.2", "encoding-sniffer": "^0.2.1", "htmlparser2": "^10.0.0", "parse5": "^7.3.0", "parse5-htmlparser2-tree-adapter": "^7.1.0", "parse5-parser-stream": "^7.1.2", "undici": "^7.12.0", "whatwg-mimetype": "^4.0.0" } }, "sha512-IkxPpb5rS/d1IiLbHMgfPuS0FgiWTtFIm/Nj+2woXDLTZ7fOT2eqzgYbdMlLweqlHbsZjxEChoVK+7iph7jyQg=="],
"cheerio-select": ["cheerio-select@2.1.0", "", { "dependencies": { "boolbase": "^1.0.0", "css-select": "^5.1.0", "css-what": "^6.1.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.0.1" } }, "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g=="],
"chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="],
"chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
@@ -1708,6 +1742,8 @@
"compute-scroll-into-view": ["compute-scroll-into-view@3.1.1", "", {}, "sha512-VRhuHOLoKYOy4UbilLbUzbYg93XLjv2PncJC50EuTWPA3gaja1UjBsUP/D/9/juV3vQFr6XBEzn9KCAHdUvOHw=="],
"concat-stream": ["concat-stream@2.0.0", "", { "dependencies": { "buffer-from": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.0.2", "typedarray": "^0.0.6" } }, "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A=="],
"concurrently": ["concurrently@9.2.1", "", { "dependencies": { "chalk": "4.1.2", "rxjs": "7.8.2", "shell-quote": "1.8.3", "supports-color": "8.1.1", "tree-kill": "1.2.2", "yargs": "17.7.2" }, "bin": { "concurrently": "dist/bin/concurrently.js", "conc": "dist/bin/concurrently.js" } }, "sha512-fsfrO0MxV64Znoy8/l1vVIjjHa29SZyyqPgQBwhiDcaW8wJc2W3XWVOGx4M3oJBnv/zdUZIIp1gDeS98GzP8Ng=="],
"confbox": ["confbox@0.1.8", "", {}, "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w=="],
@@ -1758,8 +1794,6 @@
"csv-parse": ["csv-parse@5.6.0", "", {}, "sha512-l3nz3euub2QMg5ouu5U09Ew9Wf6/wQ8I++ch1loQ0ljmzhmfZYrH9fflS22i/PQEvsPvxCwxgz5q7UB8K1JO4Q=="],
"csv-parser": ["csv-parser@3.2.0", "", { "bin": { "csv-parser": "bin/csv-parser" } }, "sha512-fgKbp+AJbn1h2dcAHKIdKNSSjfp43BZZykXsCjzALjKy80VXQNHPFJ6T9Afwdzoj24aMkq8GwDS7KGcDPpejrA=="],
"d3-array": ["d3-array@3.2.4", "", { "dependencies": { "internmap": "1 - 2" } }, "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg=="],
"d3-color": ["d3-color@3.1.0", "", {}, "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA=="],
@@ -1878,6 +1912,8 @@
"emoji-regex": ["emoji-regex@10.5.0", "", {}, "sha512-lb49vf1Xzfx080OKA0o6l8DQQpV+6Vg95zyCJX9VB/BqKYlhG7N4wgROUUHRA+ZPUefLnteQOad7z1kT2bV7bg=="],
"encoding-sniffer": ["encoding-sniffer@0.2.1", "", { "dependencies": { "iconv-lite": "^0.6.3", "whatwg-encoding": "^3.1.1" } }, "sha512-5gvq20T6vfpekVtqrYQsSCFZ1wEg5+wW0/QaZMWkFr6BqD3NfKs0rLCx4rrVlSWJeZb5NBJgVLswK/w2MWU+Gw=="],
"end-of-stream": ["end-of-stream@1.4.5", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg=="],
"engine.io": ["engine.io@6.6.4", "", { "dependencies": { "@types/cors": "^2.8.12", "@types/node": ">=10.0.0", "accepts": "~1.3.4", "base64id": "2.0.0", "cookie": "~0.7.2", "cors": "~2.8.5", "debug": "~4.3.1", "engine.io-parser": "~5.2.1", "ws": "~8.17.1" } }, "sha512-ZCkIjSYNDyGn0R6ewHDtXgns/Zre/NT6Agvq1/WobF7JXgFff4SeDroKiCO3fNJreU9YG429Sc81o4w5ok/W5g=="],
@@ -1978,8 +2014,6 @@
"fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="],
"fd-slicer": ["fd-slicer@1.1.0", "", { "dependencies": { "pend": "~1.2.0" } }, "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g=="],
"fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
"fetch-blob": ["fetch-blob@3.2.0", "", { "dependencies": { "node-domexception": "^1.0.0", "web-streams-polyfill": "^3.0.3" } }, "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ=="],
@@ -1990,6 +2024,8 @@
"figures": ["figures@3.2.0", "", { "dependencies": { "escape-string-regexp": "^1.0.5" } }, "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg=="],
"file-type": ["file-type@16.5.4", "", { "dependencies": { "readable-web-to-node-stream": "^3.0.0", "strtok3": "^6.2.4", "token-types": "^4.1.1" } }, "sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw=="],
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
"find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="],
@@ -2134,7 +2170,7 @@
"husky": ["husky@9.1.7", "", { "bin": { "husky": "bin.js" } }, "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA=="],
"iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
"iconv-lite": ["iconv-lite@0.7.0", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ=="],
"ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="],
@@ -2510,7 +2546,7 @@
"neo-async": ["neo-async@2.6.2", "", {}, "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw=="],
"next": ["next@15.5.2", "", { "dependencies": { "@next/env": "15.5.2", "@swc/helpers": "0.5.15", "caniuse-lite": "^1.0.30001579", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.5.2", "@next/swc-darwin-x64": "15.5.2", "@next/swc-linux-arm64-gnu": "15.5.2", "@next/swc-linux-arm64-musl": "15.5.2", "@next/swc-linux-x64-gnu": "15.5.2", "@next/swc-linux-x64-musl": "15.5.2", "@next/swc-win32-arm64-msvc": "15.5.2", "@next/swc-win32-x64-msvc": "15.5.2", "sharp": "^0.34.3" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.51.1", "babel-plugin-react-compiler": "*", "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "babel-plugin-react-compiler", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-H8Otr7abj1glFhbGnvUt3gz++0AF1+QoCXEBmd/6aKbfdFwrn0LpA836Ed5+00va/7HQSDD+mOoVhn3tNy3e/Q=="],
"next": ["next@15.4.1", "", { "dependencies": { "@next/env": "15.4.1", "@swc/helpers": "0.5.15", "caniuse-lite": "^1.0.30001579", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.4.1", "@next/swc-darwin-x64": "15.4.1", "@next/swc-linux-arm64-gnu": "15.4.1", "@next/swc-linux-arm64-musl": "15.4.1", "@next/swc-linux-x64-gnu": "15.4.1", "@next/swc-linux-x64-musl": "15.4.1", "@next/swc-win32-arm64-msvc": "15.4.1", "@next/swc-win32-x64-msvc": "15.4.1", "sharp": "^0.34.3" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.51.1", "babel-plugin-react-compiler": "*", "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "babel-plugin-react-compiler", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-eNKB1q8C7o9zXF8+jgJs2CzSLIU3T6bQtX6DcTnCq1sIR1CJ0GlSyRs1BubQi3/JgCnr9Vr+rS5mOMI38FFyQw=="],
"next-runtime-env": ["next-runtime-env@3.3.0", "", { "dependencies": { "next": "^14", "react": "^18" } }, "sha512-JgKVnog9mNbjbjH9csVpMnz2tB2cT5sLF+7O47i6Ze/s/GoiKdV7dHhJHk1gwXpo6h5qPj5PTzryldtSjvrHuQ=="],
@@ -2548,6 +2584,8 @@
"object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="],
"officeparser": ["officeparser@5.2.0", "", { "dependencies": { "@xmldom/xmldom": "^0.8.10", "concat-stream": "^2.0.0", "file-type": "^16.5.4", "node-ensure": "^0.0.0", "pdfjs-dist": "^5.3.31", "yauzl": "^3.1.3" }, "bin": { "officeparser": "officeParser.js" } }, "sha512-EGdHj4RgP5FtyTHsqgDz2ZXkV2q2o2Ktwk4ogHpVcRT1+udwb3pRLfmlNO9ZMDZtDhJz5qNIUAs/+ItrUWoHiQ=="],
"ollama-ai-provider": ["ollama-ai-provider@1.2.0", "", { "dependencies": { "@ai-sdk/provider": "^1.0.0", "@ai-sdk/provider-utils": "^2.0.0", "partial-json": "0.1.7" }, "peerDependencies": { "zod": "^3.0.0" }, "optionalPeers": ["zod"] }, "sha512-jTNFruwe3O/ruJeppI/quoOUxG7NA6blG3ZyQj3lei4+NnJo7bi3eIRWqlVpRlu/mbzbFXeJSBuYQWF6pzGKww=="],
"on-exit-leak-free": ["on-exit-leak-free@2.1.2", "", {}, "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA=="],
@@ -2576,12 +2614,18 @@
"pako": ["pako@1.0.11", "", {}, "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw=="],
"papaparse": ["papaparse@5.5.3", "", {}, "sha512-5QvjGxYVjxO59MGU2lHVYpRWBBtKHnlIAcSe1uNFCkkptUh63NFRj0FJQm7nR67puEruUci/ZkjmEFrjCAyP4A=="],
"parse-css-color": ["parse-css-color@0.2.1", "", { "dependencies": { "color-name": "^1.1.4", "hex-rgb": "^4.1.0" } }, "sha512-bwS/GGIFV3b6KS4uwpzCFj4w297Yl3uqnSgIPsoQkx7GMLROXfMnWvxfNkL0oh8HVhZA4hvJoEoEIqonfJ3BWg=="],
"parse-entities": ["parse-entities@4.0.2", "", { "dependencies": { "@types/unist": "^2.0.0", "character-entities-legacy": "^3.0.0", "character-reference-invalid": "^2.0.0", "decode-named-character-reference": "^1.0.0", "is-alphanumerical": "^2.0.0", "is-decimal": "^2.0.0", "is-hexadecimal": "^2.0.0" } }, "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw=="],
"parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="],
"parse5-htmlparser2-tree-adapter": ["parse5-htmlparser2-tree-adapter@7.1.0", "", { "dependencies": { "domhandler": "^5.0.3", "parse5": "^7.0.0" } }, "sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g=="],
"parse5-parser-stream": ["parse5-parser-stream@7.1.2", "", { "dependencies": { "parse5": "^7.0.0" } }, "sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow=="],
"parseley": ["parseley@0.12.1", "", { "dependencies": { "leac": "^0.6.0", "peberminta": "^0.9.0" } }, "sha512-e6qHKe3a9HWr0oMRVDTRhKce+bRO8VGQR3NyVwcjwrbhMmFCX9KszEV35+rn4AdilFAq9VPxP/Fe1wC9Qjd2lw=="],
"partial-json": ["partial-json@0.1.7", "", {}, "sha512-Njv/59hHaokb/hRUjce3Hdv12wd60MtM9Z5Olmn+nehe0QDAsRtRbJPvJ0Z91TusF0SuZRIvnM+S4l6EIP8leA=="],
@@ -2600,10 +2644,14 @@
"pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="],
"pdf-parse": ["pdf-parse@1.1.1", "", { "dependencies": { "debug": "^3.1.0", "node-ensure": "^0.0.0" } }, "sha512-v6ZJ/efsBpGrGGknjtq9J/oC8tZWq0KWL5vQrk2GlzLEQPUDB1ex+13Rmidl1neNN358Jn9EHZw5y07FFtaC7A=="],
"pdf-lib": ["pdf-lib@1.17.1", "", { "dependencies": { "@pdf-lib/standard-fonts": "^1.0.0", "@pdf-lib/upng": "^1.0.1", "pako": "^1.0.11", "tslib": "^1.11.1" } }, "sha512-V/mpyJAoTsN4cnP31vc0wfNA1+p20evqqnap0KLoRUN0Yk/p3wN52DOEsL4oBFcLdb76hlpKPtzJIgo67j/XLw=="],
"pdfjs-dist": ["pdfjs-dist@5.4.54", "", { "optionalDependencies": { "@napi-rs/canvas": "^0.1.74" } }, "sha512-TBAiTfQw89gU/Z4LW98Vahzd2/LoCFprVGvGbTgFt+QCB1F+woyOPmNNVgLa6djX9Z9GGTnj7qE1UzpOVJiINw=="],
"peberminta": ["peberminta@0.9.0", "", {}, "sha512-XIxfHpEuSJbITd1H3EeQwpcZbTLHc+VVr8ANI9t5sit565tsI4/xK3KWTUFE2e6QiangUkh3B0jihzmGnNrRsQ=="],
"peek-readable": ["peek-readable@4.1.0", "", {}, "sha512-ZI3LnwUv5nOGbQzD9c2iDG6toheuXSZP5esSHBjopsXH4dg19soufvpUGA3uohi5anFtGb2lhAVdHzH6R/Evvg=="],
"pend": ["pend@1.2.0", "", {}, "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg=="],
"pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="],
@@ -2672,7 +2720,7 @@
"prismjs": ["prismjs@1.30.0", "", {}, "sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw=="],
"process": ["process@0.10.1", "", {}, "sha512-dyIett8dgGIZ/TXKUzeYExt7WA6ldDzys9vTDU/cCA9L17Ypme+KzS+NjQCjpn9xsvi/shbMC+yP/BcFMBz0NA=="],
"process": ["process@0.11.10", "", {}, "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="],
"process-nextick-args": ["process-nextick-args@2.0.1", "", {}, "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="],
@@ -2748,6 +2796,8 @@
"readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="],
"readable-web-to-node-stream": ["readable-web-to-node-stream@3.0.4", "", { "dependencies": { "readable-stream": "^4.7.0" } }, "sha512-9nX56alTf5bwXQ3ZDipHJhusu9NTQJ/CVPtb/XHAJCXihZeitfJvIRS4GqQ/mfIoOE3IelHMrpayVrosdHBuLw=="],
"readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="],
"real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="],
@@ -2816,6 +2866,10 @@
"rrweb-cssom": ["rrweb-cssom@0.8.0", "", {}, "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw=="],
"rtf-parser": ["rtf-parser@1.3.3", "", { "dependencies": { "iconv-lite": "^0.4.15", "readable-stream": "^2.2.2" } }, "sha512-sz2eb4tcCFtwVfs5Ei/l3JnSQGqpDv+drFuNz/zwn2tA24cL2WTuk2VMo2bA4IcRgkn38juAOri2hB9nv85u2Q=="],
"rtf-stream-parser": ["rtf-stream-parser@3.8.0", "", {}, "sha512-Hj+FWJ8IhywyxTy0/J1ZbPQLt1+2S8uagOwrU5u1WLBs2hSo0bOc4ZA06sWZHOypi22M60WAgkzUoTclWiqI2Q=="],
"run-async": ["run-async@2.4.1", "", {}, "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ=="],
"run-exclusive": ["run-exclusive@2.2.19", "", { "dependencies": { "minimal-polyfills": "^2.2.3" } }, "sha512-K3mdoAi7tjJ/qT7Flj90L7QyPozwUaAG+CVhkdDje4HLKXUYC3N/Jzkau3flHVDLQVhiHBtcimVodMjN9egYbA=="],
@@ -2932,8 +2986,6 @@
"stdin-discarder": ["stdin-discarder@0.2.2", "", {}, "sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ=="],
"streamsearch": ["streamsearch@1.1.0", "", {}, "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg=="],
"string-argv": ["string-argv@0.3.2", "", {}, "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q=="],
"string-template": ["string-template@0.2.1", "", {}, "sha512-Yptehjogou2xm4UJbxJ4CxgZx12HBfeystp0y3x7s4Dj32ltVVG1Gg8YhKjHZkHicuKpZX/ffilA8505VbUbpw=="],
@@ -2966,6 +3018,8 @@
"strnum": ["strnum@2.1.1", "", {}, "sha512-7ZvoFTiCnGxBtDqJ//Cu6fWtZtc7Y3x+QOirG15wztbdngGSkht27o2pyGWrVy0b4WAy3jbKmnoK6g5VlVNUUw=="],
"strtok3": ["strtok3@6.3.0", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "peek-readable": "^4.1.0" } }, "sha512-fZtbhtvI9I48xDSywd/somNqgUHl2L2cstmXCCif0itOf96jeW18MBSyrLuNicYQVkvpOxkZtkzujiTJ9LW5Jw=="],
"style-to-js": ["style-to-js@1.1.17", "", { "dependencies": { "style-to-object": "1.0.9" } }, "sha512-xQcBGDxJb6jjFCTzvQtfiPn6YvvP2O8U1MDIPNfJQlWMYfktPy+iGsHE7cssjs7y84d9fQaK4UF3RIJaAHSoYA=="],
"style-to-object": ["style-to-object@1.0.9", "", { "dependencies": { "inline-style-parser": "0.2.4" } }, "sha512-G4qppLgKu/k6FwRpHiGiKPaPTFcG3g4wNVX/Qsfu+RqQM30E7Tyu/TEgxcL9PNLF5pdRLwQdE3YKKf+KF2Dzlw=="],
@@ -3038,6 +3092,8 @@
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
"token-types": ["token-types@4.2.1", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-6udB24Q737UD/SDsKAHI9FCRP7Bqc9D/MQUV02ORQg5iskjtLJlZJNdN4kKtcdtwCeWIwIHDGaUsTsCCAa8sFQ=="],
"tough-cookie": ["tough-cookie@5.1.2", "", { "dependencies": { "tldts": "^6.1.32" } }, "sha512-FVDYdxtnj0G6Qm/DhNPSb8Ju59ULcup3tuJxkFb5K8Bv2pUXILbf0xZWU8PX8Ov19OXljbUyveOFwRMwkXzO+A=="],
"tr46": ["tr46@5.1.1", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw=="],
@@ -3074,6 +3130,8 @@
"type-fest": ["type-fest@0.7.1", "", {}, "sha512-Ne2YiiGN8bmrmJJEuTWTLJR32nh/JdL1+PSicowtNb0WFpn59GK8/lfD61bVtzguz7b3PBt74nxpv/Pw5po5Rg=="],
"typedarray": ["typedarray@0.0.6", "", {}, "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA=="],
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
"ufo": ["ufo@1.6.1", "", {}, "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA=="],
@@ -3084,6 +3142,8 @@
"underscore": ["underscore@1.13.7", "", {}, "sha512-GMXzWtsc57XAtguZgaQViUOzs0KTkk8ojr3/xAxXLITqf/3EMwxC0inyETfDFjH/Krbhuep0HNbbjI9i/q3F3g=="],
"undici": ["undici@7.15.0", "", {}, "sha512-7oZJCPvvMvTd0OlqWsIxTuItTpJBpU1tcbVl24FMn3xt3+VSunwUasmfPJRE57oNO1KsZ4PgA1xTdAX4hq8NyQ=="],
"undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
"unfetch": ["unfetch@4.2.0", "", {}, "sha512-F9p7yYCn6cIW9El1zi0HI6vqpeIvBsr3dSuRO6Xuppb1u5rXpCPmMvLSyECLhybr9isec8Ohl0hPekMVrEinDA=="],
@@ -3170,8 +3230,6 @@
"word": ["word@0.3.0", "", {}, "sha512-OELeY0Q61OXpdUfTp+oweA/vtLVg5VDOXh+3he3PNzLGG/y0oylSOC1xRVj0+l4vQ3tj/bB1HVHv1ocXkQceFA=="],
"word-extractor": ["word-extractor@1.0.4", "", { "dependencies": { "saxes": "^5.0.1", "yauzl": "^2.10.0" } }, "sha512-PyAGZQ2gjnVA5kcZAOAxoYciCMaAvu0dbVlw/zxHphhy+3be8cDeYKHJPO8iedIM3Sx0arA/ugKTJyXhZNgo6g=="],
"wrap-ansi": ["wrap-ansi@6.2.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA=="],
"wrap-ansi-cjs": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
@@ -3204,7 +3262,7 @@
"yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="],
"yauzl": ["yauzl@2.10.0", "", { "dependencies": { "buffer-crc32": "~0.2.3", "fd-slicer": "~1.1.0" } }, "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g=="],
"yauzl": ["yauzl@3.2.0", "", { "dependencies": { "buffer-crc32": "~0.2.3", "pend": "~1.2.0" } }, "sha512-Ow9nuGZE+qp1u4JIPvg+uCiUr7xGQWdff7JQSk5VGYTAZMDe2q8lxJ10ygv10qmSj031Ty/6FNJpLO4o1Sgc+w=="],
"yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="],
@@ -3272,6 +3330,8 @@
"@esbuild-kit/core-utils/esbuild": ["esbuild@0.18.20", "", { "optionalDependencies": { "@esbuild/android-arm": "0.18.20", "@esbuild/android-arm64": "0.18.20", "@esbuild/android-x64": "0.18.20", "@esbuild/darwin-arm64": "0.18.20", "@esbuild/darwin-x64": "0.18.20", "@esbuild/freebsd-arm64": "0.18.20", "@esbuild/freebsd-x64": "0.18.20", "@esbuild/linux-arm": "0.18.20", "@esbuild/linux-arm64": "0.18.20", "@esbuild/linux-ia32": "0.18.20", "@esbuild/linux-loong64": "0.18.20", "@esbuild/linux-mips64el": "0.18.20", "@esbuild/linux-ppc64": "0.18.20", "@esbuild/linux-riscv64": "0.18.20", "@esbuild/linux-s390x": "0.18.20", "@esbuild/linux-x64": "0.18.20", "@esbuild/netbsd-x64": "0.18.20", "@esbuild/openbsd-x64": "0.18.20", "@esbuild/sunos-x64": "0.18.20", "@esbuild/win32-arm64": "0.18.20", "@esbuild/win32-ia32": "0.18.20", "@esbuild/win32-x64": "0.18.20" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA=="],
"@inquirer/external-editor/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
"@isaacs/cliui/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="],
"@isaacs/cliui/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="],
@@ -3502,8 +3562,6 @@
"@react-email/preview-server/framer-motion": ["framer-motion@12.7.5", "", { "dependencies": { "motion-dom": "^12.7.5", "motion-utils": "^12.7.5", "tslib": "^2.4.0" }, "peerDependencies": { "@emotion/is-prop-valid": "*", "react": "^18.0.0 || ^19.0.0", "react-dom": "^18.0.0 || ^19.0.0" }, "optionalPeers": ["@emotion/is-prop-valid", "react", "react-dom"] }, "sha512-iD+vBOLn8E8bwBAFUQ1DYXjivm+cGGPgQUQ4Doleq7YP/zHdozUVwAMBJwOOfCTbtM8uOooMi77noD261Kxiyw=="],
"@react-email/preview-server/next": ["next@15.4.1", "", { "dependencies": { "@next/env": "15.4.1", "@swc/helpers": "0.5.15", "caniuse-lite": "^1.0.30001579", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.4.1", "@next/swc-darwin-x64": "15.4.1", "@next/swc-linux-arm64-gnu": "15.4.1", "@next/swc-linux-arm64-musl": "15.4.1", "@next/swc-linux-x64-gnu": "15.4.1", "@next/swc-linux-x64-musl": "15.4.1", "@next/swc-win32-arm64-msvc": "15.4.1", "@next/swc-win32-x64-msvc": "15.4.1", "sharp": "^0.34.3" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.51.1", "babel-plugin-react-compiler": "*", "react": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "react-dom": "^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "babel-plugin-react-compiler", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-eNKB1q8C7o9zXF8+jgJs2CzSLIU3T6bQtX6DcTnCq1sIR1CJ0GlSyRs1BubQi3/JgCnr9Vr+rS5mOMI38FFyQw=="],
"@react-email/preview-server/sharp": ["sharp@0.34.1", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.3", "semver": "^7.7.1" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.34.1", "@img/sharp-darwin-x64": "0.34.1", "@img/sharp-libvips-darwin-arm64": "1.1.0", "@img/sharp-libvips-darwin-x64": "1.1.0", "@img/sharp-libvips-linux-arm": "1.1.0", "@img/sharp-libvips-linux-arm64": "1.1.0", "@img/sharp-libvips-linux-ppc64": "1.1.0", "@img/sharp-libvips-linux-s390x": "1.1.0", "@img/sharp-libvips-linux-x64": "1.1.0", "@img/sharp-libvips-linuxmusl-arm64": "1.1.0", "@img/sharp-libvips-linuxmusl-x64": "1.1.0", "@img/sharp-linux-arm": "0.34.1", "@img/sharp-linux-arm64": "0.34.1", "@img/sharp-linux-s390x": "0.34.1", "@img/sharp-linux-x64": "0.34.1", "@img/sharp-linuxmusl-arm64": "0.34.1", "@img/sharp-linuxmusl-x64": "0.34.1", "@img/sharp-wasm32": "0.34.1", "@img/sharp-win32-ia32": "0.34.1", "@img/sharp-win32-x64": "0.34.1" } }, "sha512-1j0w61+eVxu7DawFJtnfYcvSv6qPFvfTaqzTQ2BLknVhHTwGS8sc63ZBF4rzkWMBVKybo4S5OBtDdZahh2A1xg=="],
"@react-email/preview-server/tailwind-merge": ["tailwind-merge@3.2.0", "", {}, "sha512-FQT/OVqCD+7edmmJpsgCsY820RTD5AkBryuG5IUqR5YQZSdj5xlH5nLgH7YPths7WsLPSpSBNneJdM8aS8aeFA=="],
@@ -3612,12 +3670,16 @@
"@types/cors/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"@types/iconv-lite/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"@types/jsdom/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"@types/mysql/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"@types/node-fetch/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"@types/papaparse/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"@types/pg/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"@types/tedious/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
@@ -3626,8 +3688,6 @@
"@types/webpack/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"@types/word-extractor/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"@vitejs/plugin-react/@babel/core": ["@babel/core@7.28.3", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.3", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.28.3", "@babel/helpers": "^7.28.3", "@babel/parser": "^7.28.3", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.3", "@babel/types": "^7.28.2", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-yDBHV9kQNcr2/sUr9jghVyz9C3Y5G2zUM2H2lo+9mKv4sFgbA8s8Z9t8D1jiTkGoO/NoIfKMyKWr4s6CN23ZwQ=="],
"accepts/mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
@@ -3648,12 +3708,18 @@
"chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="],
"cheerio/htmlparser2": ["htmlparser2@10.0.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.2.1", "entities": "^6.0.0" } }, "sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g=="],
"cli-truncate/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="],
"cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
"concat-stream/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
"dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="],
"encoding-sniffer/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
"engine.io/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"engine.io/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="],
@@ -3688,6 +3754,8 @@
"groq-sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
"hexer/process": ["process@0.10.1", "", {}, "sha512-dyIett8dgGIZ/TXKUzeYExt7WA6ldDzys9vTDU/cCA9L17Ypme+KzS+NjQCjpn9xsvi/shbMC+yP/BcFMBz0NA=="],
"hoist-non-react-statics/react-is": ["react-is@16.13.1", "", {}, "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="],
"htmlparser2/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="],
@@ -3730,12 +3798,12 @@
"micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
"mysql2/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
"named-placeholders/lru-cache": ["lru-cache@7.18.3", "", {}, "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA=="],
"next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="],
"next-runtime-env/next": ["next@14.2.32", "", { "dependencies": { "@next/env": "14.2.32", "@swc/helpers": "0.5.5", "busboy": "1.6.0", "caniuse-lite": "^1.0.30001579", "graceful-fs": "^4.2.11", "postcss": "8.4.31", "styled-jsx": "5.1.1" }, "optionalDependencies": { "@next/swc-darwin-arm64": "14.2.32", "@next/swc-darwin-x64": "14.2.32", "@next/swc-linux-arm64-gnu": "14.2.32", "@next/swc-linux-arm64-musl": "14.2.32", "@next/swc-linux-x64-gnu": "14.2.32", "@next/swc-linux-x64-musl": "14.2.32", "@next/swc-win32-arm64-msvc": "14.2.32", "@next/swc-win32-ia32-msvc": "14.2.32", "@next/swc-win32-x64-msvc": "14.2.32" }, "peerDependencies": { "@opentelemetry/api": "^1.1.0", "@playwright/test": "^1.41.2", "react": "^18.2.0", "react-dom": "^18.2.0", "sass": "^1.3.0" }, "optionalPeers": ["@opentelemetry/api", "@playwright/test", "sass"], "bin": { "next": "dist/bin/next" } }, "sha512-fg5g0GZ7/nFc09X8wLe6pNSU8cLWbLRG3TZzPJ1BJvi2s9m7eF991se67wliM9kR5yLHRkyGKU49MMx58s3LJg=="],
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
"nypm/pkg-types": ["pkg-types@2.3.0", "", { "dependencies": { "confbox": "^0.2.2", "exsolve": "^1.0.7", "pathe": "^2.0.3" } }, "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig=="],
@@ -3748,7 +3816,7 @@
"parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="],
"pdf-parse/debug": ["debug@3.2.7", "", { "dependencies": { "ms": "^2.1.1" } }, "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ=="],
"pdf-lib/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="],
"playwright/fsevents": ["fsevents@2.3.2", "", { "os": "darwin" }, "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA=="],
@@ -3770,12 +3838,16 @@
"react-email/ora": ["ora@8.2.0", "", { "dependencies": { "chalk": "^5.3.0", "cli-cursor": "^5.0.0", "cli-spinners": "^2.9.2", "is-interactive": "^2.0.0", "is-unicode-supported": "^2.0.0", "log-symbols": "^6.0.0", "stdin-discarder": "^0.2.2", "string-width": "^7.2.0", "strip-ansi": "^7.1.0" } }, "sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw=="],
"readable-web-to-node-stream/readable-stream": ["readable-stream@4.7.0", "", { "dependencies": { "abort-controller": "^3.0.0", "buffer": "^6.0.3", "events": "^3.3.0", "process": "^0.11.10", "string_decoder": "^1.3.0" } }, "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg=="],
"resend/@react-email/render": ["@react-email/render@1.1.2", "", { "dependencies": { "html-to-text": "^9.0.5", "prettier": "^3.5.3", "react-promise-suspense": "^0.3.4" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-RnRehYN3v9gVlNMehHPHhyp2RQo7+pSkHDtXPvg3s0GbzM9SQMW4Qrf8GRNvtpLC4gsI+Wt0VatNRUFqjvevbw=="],
"restore-cursor/onetime": ["onetime@5.1.2", "", { "dependencies": { "mimic-fn": "^2.1.0" } }, "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg=="],
"restore-cursor/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="],
"rtf-parser/iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="],
"sim/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"sim/lucide-react": ["lucide-react@0.479.0", "", { "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-aBhNnveRhorBOK7uA4gDjgaf+YlHMdMhQ/3cupk6exM10hWlEU+2QtWYOfhXhjAsmdb6LeKR+NZnow4UxRRiTQ=="],
@@ -3828,7 +3900,7 @@
"webpack/mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
"word-extractor/saxes": ["saxes@5.0.1", "", { "dependencies": { "xmlchars": "^2.2.0" } }, "sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw=="],
"whatwg-encoding/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="],
"@anthropic-ai/sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
@@ -4080,28 +4152,6 @@
"@react-email/preview-server/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.0", "", { "os": "win32", "cpu": "x64" }, "sha512-ZENoHJBxA20C2zFzh6AI4fT6RraMzjYw4xKWemRTRmRVtN9c5DcH9r/f2ihEkMjOW5eGgrwCslG/+Y/3bL+DHQ=="],
"@react-email/preview-server/next/@next/env": ["@next/env@15.4.1", "", {}, "sha512-DXQwFGAE2VH+f2TJsKepRXpODPU+scf5fDbKOME8MMyeyswe4XwgRdiiIYmBfkXU+2ssliLYznajTrOQdnLR5A=="],
"@react-email/preview-server/next/@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.4.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-L+81yMsiHq82VRXS2RVq6OgDwjvA4kDksGU8hfiDHEXP+ncKIUhUsadAVB+MRIp2FErs/5hpXR0u2eluWPAhig=="],
"@react-email/preview-server/next/@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.4.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-jfz1RXu6SzL14lFl05/MNkcN35lTLMJWPbqt7Xaj35+ZWAX342aePIJrN6xBdGeKl6jPXJm0Yqo3Xvh3Gpo3Uw=="],
"@react-email/preview-server/next/@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.4.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-k0tOFn3dsnkaGfs6iQz8Ms6f1CyQe4GacXF979sL8PNQxjYS1swx9VsOyUQYaPoGV8nAZ7OX8cYaeiXGq9ahPQ=="],
"@react-email/preview-server/next/@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.4.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-4ogGQ/3qDzbbK3IwV88ltihHFbQVq6Qr+uEapzXHXBH1KsVBZOB50sn6BWHPcFjwSoMX2Tj9eH/fZvQnSIgc3g=="],
"@react-email/preview-server/next/@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.4.1", "", { "os": "linux", "cpu": "x64" }, "sha512-Jj0Rfw3wIgp+eahMz/tOGwlcYYEFjlBPKU7NqoOkTX0LY45i5W0WcDpgiDWSLrN8KFQq/LW7fZq46gxGCiOYlQ=="],
"@react-email/preview-server/next/@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.4.1", "", { "os": "linux", "cpu": "x64" }, "sha512-9WlEZfnw1vFqkWsTMzZDgNL7AUI1aiBHi0S2m8jvycPyCq/fbZjtE/nDkhJRYbSjXbtRHYLDBlmP95kpjEmJbw=="],
"@react-email/preview-server/next/@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.4.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-WodRbZ9g6CQLRZsG3gtrA9w7Qfa9BwDzhFVdlI6sV0OCPq9JrOrJSp9/ioLsezbV8w9RCJ8v55uzJuJ5RgWLZg=="],
"@react-email/preview-server/next/@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.4.1", "", { "os": "win32", "cpu": "x64" }, "sha512-y+wTBxelk2xiNofmDOVU7O5WxTHcvOoL3srOM0kxTzKDjQ57kPU0tpnPJ/BWrRnsOwXEv0+3QSbGR7hY4n9LkQ=="],
"@react-email/preview-server/next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="],
"@react-email/preview-server/next/sharp": ["sharp@0.34.3", "", { "dependencies": { "color": "^4.2.3", "detect-libc": "^2.0.4", "semver": "^7.7.2" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.34.3", "@img/sharp-darwin-x64": "0.34.3", "@img/sharp-libvips-darwin-arm64": "1.2.0", "@img/sharp-libvips-darwin-x64": "1.2.0", "@img/sharp-libvips-linux-arm": "1.2.0", "@img/sharp-libvips-linux-arm64": "1.2.0", "@img/sharp-libvips-linux-ppc64": "1.2.0", "@img/sharp-libvips-linux-s390x": "1.2.0", "@img/sharp-libvips-linux-x64": "1.2.0", "@img/sharp-libvips-linuxmusl-arm64": "1.2.0", "@img/sharp-libvips-linuxmusl-x64": "1.2.0", "@img/sharp-linux-arm": "0.34.3", "@img/sharp-linux-arm64": "0.34.3", "@img/sharp-linux-ppc64": "0.34.3", "@img/sharp-linux-s390x": "0.34.3", "@img/sharp-linux-x64": "0.34.3", "@img/sharp-linuxmusl-arm64": "0.34.3", "@img/sharp-linuxmusl-x64": "0.34.3", "@img/sharp-wasm32": "0.34.3", "@img/sharp-win32-arm64": "0.34.3", "@img/sharp-win32-ia32": "0.34.3", "@img/sharp-win32-x64": "0.34.3" } }, "sha512-eX2IQ6nFohW4DbvHIOLRB3MHFpYqaqvXd3Tp5e/T/dSH83fxaNJQRvDMhASmkNTsNTVF2/OOopzRCt7xokgPfg=="],
"@react-email/preview-server/sharp/@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.1.0" }, "os": "darwin", "cpu": "arm64" }, "sha512-pn44xgBtgpEbZsu+lWf2KNb6OAf70X68k+yk69Ic2Xz11zHR/w24/U49XT7AeRwJ0Px+mhALhU5LPci1Aymk7A=="],
"@react-email/preview-server/sharp/@img/sharp-darwin-x64": ["@img/sharp-darwin-x64@0.34.1", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-x64": "1.1.0" }, "os": "darwin", "cpu": "x64" }, "sha512-VfuYgG2r8BpYiOUN+BfYeFo69nP/MIwAtSJ7/Zpxc5QF3KS22z8Pvg3FkrSFJBPNQ7mmcUcYQFBmEQp7eu1F8Q=="],
@@ -4202,12 +4252,16 @@
"@types/cors/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"@types/iconv-lite/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"@types/jsdom/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"@types/mysql/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"@types/node-fetch/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"@types/papaparse/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"@types/pg/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"@types/tedious/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
@@ -4216,8 +4270,6 @@
"@types/webpack/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"@types/word-extractor/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"@vitejs/plugin-react/@babel/core/@babel/parser": ["@babel/parser@7.28.3", "", { "dependencies": { "@babel/types": "^7.28.2" }, "bin": "./bin/babel-parser.js" }, "sha512-7+Ey1mAgYqFAx2h0RuoxcQT5+MlG3GTV0TQrgr7/ZliKsm/MNDxVVutlWaziMq7wJNAz8MTqz55XLpWvva6StA=="],
"@vitejs/plugin-react/@babel/core/@babel/traverse": ["@babel/traverse@7.28.3", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.3", "@babel/helper-globals": "^7.28.0", "@babel/parser": "^7.28.3", "@babel/template": "^7.27.2", "@babel/types": "^7.28.2", "debug": "^4.3.1" } }, "sha512-7w4kZYHneL3A6NP2nxzHvT3HCZ7puDZZjFMqDpBPECub79sTtSO5CGXDkKrTQq8ksAwfD/XI2MRFX23njdDaIQ=="],
@@ -4268,30 +4320,6 @@
"log-update/wrap-ansi/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="],
"next-runtime-env/next/@next/env": ["@next/env@14.2.32", "", {}, "sha512-n9mQdigI6iZ/DF6pCTwMKeWgF2e8lg7qgt5M7HXMLtyhZYMnf/u905M18sSpPmHL9MKp9JHo56C6jrD2EvWxng=="],
"next-runtime-env/next/@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@14.2.32", "", { "os": "darwin", "cpu": "arm64" }, "sha512-osHXveM70zC+ilfuFa/2W6a1XQxJTvEhzEycnjUaVE8kpUS09lDpiDDX2YLdyFCzoUbvbo5r0X1Kp4MllIOShw=="],
"next-runtime-env/next/@next/swc-darwin-x64": ["@next/swc-darwin-x64@14.2.32", "", { "os": "darwin", "cpu": "x64" }, "sha512-P9NpCAJuOiaHHpqtrCNncjqtSBi1f6QUdHK/+dNabBIXB2RUFWL19TY1Hkhu74OvyNQEYEzzMJCMQk5agjw1Qg=="],
"next-runtime-env/next/@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@14.2.32", "", { "os": "linux", "cpu": "arm64" }, "sha512-v7JaO0oXXt6d+cFjrrKqYnR2ubrD+JYP7nQVRZgeo5uNE5hkCpWnHmXm9vy3g6foMO8SPwL0P3MPw1c+BjbAzA=="],
"next-runtime-env/next/@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@14.2.32", "", { "os": "linux", "cpu": "arm64" }, "sha512-tA6sIKShXtSJBTH88i0DRd6I9n3ZTirmwpwAqH5zdJoQF7/wlJXR8DkPmKwYl5mFWhEKr5IIa3LfpMW9RRwKmQ=="],
"next-runtime-env/next/@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@14.2.32", "", { "os": "linux", "cpu": "x64" }, "sha512-7S1GY4TdnlGVIdeXXKQdDkfDysoIVFMD0lJuVVMeb3eoVjrknQ0JNN7wFlhCvea0hEk0Sd4D1hedVChDKfV2jw=="],
"next-runtime-env/next/@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@14.2.32", "", { "os": "linux", "cpu": "x64" }, "sha512-OHHC81P4tirVa6Awk6eCQ6RBfWl8HpFsZtfEkMpJ5GjPsJ3nhPe6wKAJUZ/piC8sszUkAgv3fLflgzPStIwfWg=="],
"next-runtime-env/next/@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@14.2.32", "", { "os": "win32", "cpu": "arm64" }, "sha512-rORQjXsAFeX6TLYJrCG5yoIDj+NKq31Rqwn8Wpn/bkPNy5rTHvOXkW8mLFonItS7QC6M+1JIIcLe+vOCTOYpvg=="],
"next-runtime-env/next/@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@14.2.32", "", { "os": "win32", "cpu": "x64" }, "sha512-2N0lSoU4GjfLSO50wvKpMQgKd4HdI2UHEhQPPPnlgfBJlOgJxkjpkYBqzk08f1gItBB6xF/n+ykso2hgxuydsA=="],
"next-runtime-env/next/@swc/helpers": ["@swc/helpers@0.5.5", "", { "dependencies": { "@swc/counter": "^0.1.3", "tslib": "^2.4.0" } }, "sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A=="],
"next-runtime-env/next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="],
"next-runtime-env/next/styled-jsx": ["styled-jsx@5.1.1", "", { "dependencies": { "client-only": "0.0.1" }, "peerDependencies": { "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0" } }, "sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw=="],
"nypm/pkg-types/confbox": ["confbox@0.2.2", "", {}, "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ=="],
"openai/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
@@ -4314,6 +4342,10 @@
"react-email/ora/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="],
"readable-web-to-node-stream/readable-stream/buffer": ["buffer@6.0.3", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.2.1" } }, "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA=="],
"readable-web-to-node-stream/readable-stream/string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="],
"resend/@react-email/render/prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="],
"restore-cursor/onetime/mimic-fn": ["mimic-fn@2.1.0", "", {}, "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg=="],
@@ -4460,6 +4492,8 @@
"react-email/ora/strip-ansi/ansi-regex": ["ansi-regex@6.2.0", "", {}, "sha512-TKY5pyBkHyADOPYlRT9Lx6F544mPl0vS5Ew7BJ45hA08Q+t3GjbueLliBWN3sMICk6+y7HdyxSzC4bWS8baBdg=="],
"readable-web-to-node-stream/readable-stream/string_decoder/safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
"sim/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
"sim/tailwindcss/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="],

View File

@@ -27,7 +27,9 @@
},
"overrides": {
"react": "19.1.0",
"react-dom": "19.1.0"
"react-dom": "19.1.0",
"next": "^15.4.1",
"@next/env": "^15.4.1"
},
"dependencies": {
"@linear/sdk": "40.0.0",
@@ -40,9 +42,7 @@
},
"devDependencies": {
"@biomejs/biome": "2.0.0-beta.5",
"@next/env": "^15.3.2",
"@types/word-extractor": "1.0.6",
"dotenv-cli": "^8.0.0",
"@next/env": "^15.4.1",
"husky": "9.1.7",
"lint-staged": "16.0.0",
"turbo": "2.5.6"