mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 06:58:07 -05:00
fix(logs): fix log filtering by moving filtering to server-side (#424)
* moved log filtering server-side from client-side to handle pagination and lots of logs, added tests * fix docs
This commit is contained in:
@@ -50,11 +50,7 @@ The File Parser tool is particularly useful for scenarios where your agents need
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Upload and extract contents from structured file formats including PDFs, CSV spreadsheets, and Word documents (DOCX). ${
|
||||
shouldEnableURLInput
|
||||
? 'You can either provide a URL to a file or upload files directly. '
|
||||
: 'Upload files directly. '
|
||||
}Specialized parsers extract text and metadata from each format. You can upload multiple files at once and access them individually or as a combined document.
|
||||
Upload and extract contents from structured file formats including PDFs, CSV spreadsheets, and Word documents (DOCX). Upload files directly. Specialized parsers extract text and metadata from each format. You can upload multiple files at once and access them individually or as a combined document.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -79,11 +79,7 @@ The Mistral Parse tool is particularly useful for scenarios where your agents ne
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Extract text and structure from PDF documents using Mistral's OCR API.${
|
||||
shouldEnableFileUpload
|
||||
? ' Either enter a URL to a PDF document or upload a PDF file directly.'
|
||||
: ' Enter a URL to a PDF document (.pdf extension required).'
|
||||
} Configure processing options and get the content in your preferred format. For URLs, they must be publicly accessible and point to a valid PDF file. Note: Google Drive, Dropbox, and other cloud storage links are not supported; use a direct download URL from a web server instead.
|
||||
Extract text and structure from PDF documents using Mistral's OCR API. Configure processing options and get the content in your preferred format. For URLs, they must be publicly accessible and point to a valid PDF file. Note: Google Drive, Dropbox, and other cloud storage links are not supported; use a direct download URL from a web server instead.
|
||||
|
||||
|
||||
|
||||
|
||||
474
apps/sim/app/api/logs/route.test.ts
Normal file
474
apps/sim/app/api/logs/route.test.ts
Normal file
@@ -0,0 +1,474 @@
|
||||
/**
|
||||
* Tests for workflow logs API route
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Workflow Logs API Route', () => {
|
||||
const mockWorkflowLogs = [
|
||||
{
|
||||
id: 'log-1',
|
||||
workflowId: 'workflow-1',
|
||||
executionId: 'exec-1',
|
||||
level: 'info',
|
||||
message: 'Workflow started',
|
||||
duration: '1.2s',
|
||||
trigger: 'manual',
|
||||
createdAt: new Date('2024-01-01T10:00:00.000Z'),
|
||||
},
|
||||
{
|
||||
id: 'log-2',
|
||||
workflowId: 'workflow-1',
|
||||
executionId: 'exec-1',
|
||||
level: 'error',
|
||||
message: 'API call failed',
|
||||
duration: '0.5s',
|
||||
trigger: 'manual',
|
||||
createdAt: new Date('2024-01-01T10:01:00.000Z'),
|
||||
},
|
||||
{
|
||||
id: 'log-3',
|
||||
workflowId: 'workflow-2',
|
||||
executionId: 'exec-2',
|
||||
level: 'info',
|
||||
message: 'Task completed',
|
||||
duration: '2.1s',
|
||||
trigger: 'api',
|
||||
createdAt: new Date('2024-01-01T10:02:00.000Z'),
|
||||
},
|
||||
]
|
||||
|
||||
const mockWorkflows = [
|
||||
{
|
||||
id: 'workflow-1',
|
||||
userId: 'user-123',
|
||||
name: 'Test Workflow 1',
|
||||
color: '#3972F6',
|
||||
description: 'First test workflow',
|
||||
state: {},
|
||||
createdAt: new Date('2024-01-01T00:00:00.000Z'),
|
||||
updatedAt: new Date('2024-01-01T00:00:00.000Z'),
|
||||
},
|
||||
{
|
||||
id: 'workflow-2',
|
||||
userId: 'user-123',
|
||||
name: 'Test Workflow 2',
|
||||
color: '#FF6B6B',
|
||||
description: 'Second test workflow',
|
||||
state: {},
|
||||
createdAt: new Date('2024-01-01T00:00:00.000Z'),
|
||||
updatedAt: new Date('2024-01-01T00:00:00.000Z'),
|
||||
},
|
||||
]
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
vi.clearAllMocks()
|
||||
|
||||
vi.stubGlobal('crypto', {
|
||||
randomUUID: vi.fn().mockReturnValue('mock-request-id-12345678'),
|
||||
})
|
||||
|
||||
vi.doMock('@/lib/logs/console-logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue({
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth', () => ({
|
||||
getSession: vi.fn().mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
}),
|
||||
}))
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
function setupDatabaseMock({
|
||||
userWorkflows = mockWorkflows.filter((w) => w.userId === 'user-123'),
|
||||
logs = mockWorkflowLogs,
|
||||
workflows = mockWorkflows,
|
||||
throwError = false,
|
||||
} = {}) {
|
||||
const createChainableMock = (data: any[]) => {
|
||||
const mock = {
|
||||
select: vi.fn().mockReturnThis(),
|
||||
from: vi.fn().mockReturnThis(),
|
||||
where: vi.fn().mockReturnThis(),
|
||||
orderBy: vi.fn().mockReturnThis(),
|
||||
limit: vi.fn().mockReturnThis(),
|
||||
offset: vi.fn().mockReturnThis(),
|
||||
then: vi.fn((resolve) => resolve(data)),
|
||||
}
|
||||
return mock
|
||||
}
|
||||
|
||||
let dbCallCount = 0
|
||||
|
||||
vi.doMock('@/db', () => ({
|
||||
db: {
|
||||
select: vi.fn().mockImplementation((selection?: any) => {
|
||||
if (throwError) {
|
||||
throw new Error('Database connection failed')
|
||||
}
|
||||
|
||||
dbCallCount++
|
||||
|
||||
// First call: get user workflows
|
||||
if (dbCallCount === 1) {
|
||||
return createChainableMock(userWorkflows.map((w) => ({ id: w.id })))
|
||||
}
|
||||
|
||||
// Second call: get logs
|
||||
if (dbCallCount === 2) {
|
||||
return createChainableMock(logs)
|
||||
}
|
||||
|
||||
// Third call: get count
|
||||
if (dbCallCount === 3) {
|
||||
// If selection is provided and has count property, return count result
|
||||
if (selection && Object.keys(selection).some((key) => key === 'count')) {
|
||||
return createChainableMock([{ count: logs.length }])
|
||||
}
|
||||
return createChainableMock([{ count: logs.length }])
|
||||
}
|
||||
|
||||
// Fourth call: get workflows for includeWorkflow
|
||||
if (dbCallCount === 4) {
|
||||
return createChainableMock(workflows)
|
||||
}
|
||||
|
||||
return createChainableMock([])
|
||||
}),
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('drizzle-orm', () => ({
|
||||
eq: vi.fn().mockImplementation((field, value) => ({ type: 'eq', field, value })),
|
||||
and: vi.fn().mockImplementation((...conditions) => ({ type: 'and', conditions })),
|
||||
or: vi.fn().mockImplementation((...conditions) => ({ type: 'or', conditions })),
|
||||
gte: vi.fn().mockImplementation((field, value) => ({ type: 'gte', field, value })),
|
||||
lte: vi.fn().mockImplementation((field, value) => ({ type: 'lte', field, value })),
|
||||
sql: vi.fn().mockImplementation((strings, ...values) => ({
|
||||
type: 'sql',
|
||||
sql: strings,
|
||||
values,
|
||||
})),
|
||||
}))
|
||||
|
||||
vi.doMock('@/db/schema', () => ({
|
||||
workflow: {
|
||||
id: 'workflow.id',
|
||||
userId: 'workflow.userId',
|
||||
name: 'workflow.name',
|
||||
color: 'workflow.color',
|
||||
description: 'workflow.description',
|
||||
},
|
||||
workflowLogs: {
|
||||
id: 'workflowLogs.id',
|
||||
workflowId: 'workflowLogs.workflowId',
|
||||
level: 'workflowLogs.level',
|
||||
createdAt: 'workflowLogs.createdAt',
|
||||
message: 'workflowLogs.message',
|
||||
executionId: 'workflowLogs.executionId',
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
describe('GET /api/logs', () => {
|
||||
it('should return logs successfully with default parameters', async () => {
|
||||
setupDatabaseMock()
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data).toHaveProperty('data')
|
||||
expect(data).toHaveProperty('total', 3)
|
||||
expect(data).toHaveProperty('page', 1)
|
||||
expect(data).toHaveProperty('pageSize', 100)
|
||||
expect(data).toHaveProperty('totalPages', 1)
|
||||
expect(Array.isArray(data.data)).toBe(true)
|
||||
expect(data.data).toHaveLength(3)
|
||||
})
|
||||
|
||||
it('should include workflow data when includeWorkflow=true', async () => {
|
||||
setupDatabaseMock()
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?includeWorkflow=true')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data[0]).toHaveProperty('workflow')
|
||||
expect(data.data[0].workflow).toHaveProperty('name')
|
||||
expect(data.data[0].workflow).toHaveProperty('color')
|
||||
})
|
||||
|
||||
it('should filter logs by level', async () => {
|
||||
const errorLogs = mockWorkflowLogs.filter((log) => log.level === 'error')
|
||||
setupDatabaseMock({ logs: errorLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?level=error')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data).toHaveLength(1)
|
||||
expect(data.data[0].level).toBe('error')
|
||||
})
|
||||
|
||||
it('should filter logs by specific workflow IDs', async () => {
|
||||
const workflow1Logs = mockWorkflowLogs.filter((log) => log.workflowId === 'workflow-1')
|
||||
setupDatabaseMock({ logs: workflow1Logs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?workflowIds=workflow-1')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data).toHaveLength(2)
|
||||
expect(data.data.every((log: any) => log.workflowId === 'workflow-1')).toBe(true)
|
||||
})
|
||||
|
||||
it('should filter logs by multiple workflow IDs', async () => {
|
||||
setupDatabaseMock()
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?workflowIds=workflow-1,workflow-2')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data).toHaveLength(3)
|
||||
})
|
||||
|
||||
it('should filter logs by date range', async () => {
|
||||
const startDate = '2024-01-01T10:00:30.000Z'
|
||||
const filteredLogs = mockWorkflowLogs.filter(
|
||||
(log) => new Date(log.createdAt) >= new Date(startDate)
|
||||
)
|
||||
setupDatabaseMock({ logs: filteredLogs })
|
||||
|
||||
const url = new URL(`http://localhost:3000/api/logs?startDate=${startDate}`)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data).toHaveLength(2)
|
||||
})
|
||||
|
||||
it('should search logs by message content', async () => {
|
||||
const searchLogs = mockWorkflowLogs.filter((log) =>
|
||||
log.message.toLowerCase().includes('failed')
|
||||
)
|
||||
setupDatabaseMock({ logs: searchLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?search=failed')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data).toHaveLength(1)
|
||||
expect(data.data[0].message).toContain('failed')
|
||||
})
|
||||
|
||||
it('should handle pagination correctly', async () => {
|
||||
const paginatedLogs = mockWorkflowLogs.slice(1, 3)
|
||||
setupDatabaseMock({ logs: paginatedLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?limit=2&offset=1')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data).toHaveLength(2)
|
||||
expect(data.page).toBe(1)
|
||||
expect(data.pageSize).toBe(2)
|
||||
expect(data.total).toBe(2)
|
||||
expect(data.totalPages).toBe(1)
|
||||
})
|
||||
|
||||
it('should return empty array when user has no workflows', async () => {
|
||||
setupDatabaseMock({ userWorkflows: [], logs: [], workflows: [] })
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data).toEqual([])
|
||||
expect(data.total).toBe(0)
|
||||
})
|
||||
|
||||
it('should return 403 for unauthorized workflow access', async () => {
|
||||
// Set up mock to simulate user not owning the requested workflow
|
||||
setupDatabaseMock({
|
||||
userWorkflows: mockWorkflows.filter((w) => w.id !== 'unauthorized-workflow'),
|
||||
})
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?workflowIds=unauthorized-workflow')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(403)
|
||||
expect(data).toHaveProperty('error', 'Unauthorized access to workflows')
|
||||
})
|
||||
|
||||
it('should return 401 for unauthenticated requests', async () => {
|
||||
// Mock auth to return no session
|
||||
vi.doMock('@/lib/auth', () => ({
|
||||
getSession: vi.fn().mockResolvedValue(null),
|
||||
}))
|
||||
|
||||
setupDatabaseMock()
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
expect(data).toHaveProperty('error', 'Unauthorized')
|
||||
})
|
||||
|
||||
it('should validate query parameters', async () => {
|
||||
setupDatabaseMock()
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?limit=invalid')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data).toHaveProperty('error', 'Invalid request parameters')
|
||||
expect(data).toHaveProperty('details')
|
||||
})
|
||||
|
||||
it('should handle database errors gracefully', async () => {
|
||||
setupDatabaseMock({ throwError: true })
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data).toHaveProperty('error')
|
||||
})
|
||||
|
||||
it('should combine multiple filters correctly', async () => {
|
||||
const filteredLogs = mockWorkflowLogs.filter(
|
||||
(log) =>
|
||||
log.level === 'info' &&
|
||||
log.workflowId === 'workflow-1' &&
|
||||
log.message.toLowerCase().includes('started')
|
||||
)
|
||||
setupDatabaseMock({ logs: filteredLogs })
|
||||
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?level=info&workflowIds=workflow-1&search=started'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data).toHaveLength(1)
|
||||
expect(data.data[0].level).toBe('info')
|
||||
expect(data.data[0].workflowId).toBe('workflow-1')
|
||||
expect(data.data[0].message).toContain('started')
|
||||
})
|
||||
|
||||
it('should handle end date filter', async () => {
|
||||
const endDate = '2024-01-01T10:01:30.000Z'
|
||||
const filteredLogs = mockWorkflowLogs.filter(
|
||||
(log) => new Date(log.createdAt) <= new Date(endDate)
|
||||
)
|
||||
setupDatabaseMock({ logs: filteredLogs })
|
||||
|
||||
const url = new URL(`http://localhost:3000/api/logs?endDate=${endDate}`)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data).toHaveLength(2)
|
||||
})
|
||||
|
||||
it('should handle large offset values', async () => {
|
||||
setupDatabaseMock({ logs: [] })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?limit=10&offset=1000')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data).toEqual([])
|
||||
expect(data.page).toBe(101) // (1000 / 10) + 1
|
||||
expect(data.total).toBe(0)
|
||||
})
|
||||
|
||||
it('should handle search by execution ID', async () => {
|
||||
const searchLogs = mockWorkflowLogs.filter((log) => log.executionId?.includes('exec-1'))
|
||||
setupDatabaseMock({ logs: searchLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?search=exec-1')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.data).toHaveLength(2)
|
||||
expect(data.data.every((log: any) => log.executionId === 'exec-1')).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -6,23 +6,20 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { workflow, workflowLogs } from '@/db/schema'
|
||||
|
||||
// Create a logger for this module
|
||||
const logger = createLogger('WorkflowLogsAPI')
|
||||
|
||||
// No cache
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const revalidate = 0
|
||||
|
||||
// Schema for query parameters
|
||||
const QueryParamsSchema = z.object({
|
||||
includeWorkflow: z.enum(['true', 'false']).optional().default('false'),
|
||||
limit: z.coerce.number().optional().default(100),
|
||||
offset: z.coerce.number().optional().default(0),
|
||||
// Add more filters as needed (e.g., by level, date range, etc.)
|
||||
level: z.string().optional(),
|
||||
workflowId: z.string().optional(),
|
||||
workflowIds: z.string().optional(), // Comma-separated list of workflow IDs
|
||||
startDate: z.string().optional(),
|
||||
endDate: z.string().optional(),
|
||||
search: z.string().optional(),
|
||||
})
|
||||
|
||||
// Used to retrieve and display workflow logs
|
||||
@@ -30,7 +27,6 @@ export async function GET(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
// Get the session directly in the API route
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized workflow logs access attempt`)
|
||||
@@ -40,32 +36,42 @@ export async function GET(request: NextRequest) {
|
||||
const userId = session.user.id
|
||||
|
||||
try {
|
||||
// Parse query parameters
|
||||
const { searchParams } = new URL(request.url)
|
||||
const params = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
// Start building the query to get all workflows for the user
|
||||
const userWorkflows = await db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.userId, userId))
|
||||
|
||||
const workflowIds = userWorkflows.map((w) => w.id)
|
||||
const userWorkflowIds = userWorkflows.map((w) => w.id)
|
||||
|
||||
if (workflowIds.length === 0) {
|
||||
if (userWorkflowIds.length === 0) {
|
||||
return NextResponse.json({ data: [], total: 0 }, { status: 200 })
|
||||
}
|
||||
|
||||
// Build the conditions for the query
|
||||
let conditions: SQL<unknown> | undefined
|
||||
|
||||
// Start with the first workflowId
|
||||
conditions = eq(workflowLogs.workflowId, workflowIds[0])
|
||||
|
||||
// Add additional workflowIds if there are more than one
|
||||
if (workflowIds.length > 1) {
|
||||
const workflowConditions = workflowIds.map((id) => eq(workflowLogs.workflowId, id))
|
||||
conditions = or(...workflowConditions)
|
||||
// Apply workflow filtering
|
||||
if (params.workflowIds) {
|
||||
const requestedWorkflowIds = params.workflowIds.split(',').map((id) => id.trim())
|
||||
// Ensure all requested workflows belong to the user
|
||||
const unauthorizedIds = requestedWorkflowIds.filter((id) => !userWorkflowIds.includes(id))
|
||||
if (unauthorizedIds.length > 0) {
|
||||
logger.warn(`[${requestId}] Unauthorized access to workflow logs`, {
|
||||
unauthorizedWorkflowIds: unauthorizedIds,
|
||||
})
|
||||
return NextResponse.json({ error: 'Unauthorized access to workflows' }, { status: 403 })
|
||||
}
|
||||
conditions = or(...requestedWorkflowIds.map((id) => eq(workflowLogs.workflowId, id)))
|
||||
} else {
|
||||
// No specific workflows requested, filter by all user workflows
|
||||
if (userWorkflowIds.length === 1) {
|
||||
conditions = eq(workflowLogs.workflowId, userWorkflowIds[0])
|
||||
} else {
|
||||
conditions = or(...userWorkflowIds.map((id) => eq(workflowLogs.workflowId, id)))
|
||||
}
|
||||
}
|
||||
|
||||
// Apply additional filters if provided
|
||||
@@ -73,18 +79,6 @@ export async function GET(request: NextRequest) {
|
||||
conditions = and(conditions, eq(workflowLogs.level, params.level))
|
||||
}
|
||||
|
||||
if (params.workflowId) {
|
||||
// Ensure the requested workflow belongs to the user
|
||||
if (workflowIds.includes(params.workflowId)) {
|
||||
conditions = and(conditions, eq(workflowLogs.workflowId, params.workflowId))
|
||||
} else {
|
||||
logger.warn(`[${requestId}] Unauthorized access to workflow logs`, {
|
||||
requestedWorkflowId: params.workflowId,
|
||||
})
|
||||
return NextResponse.json({ error: 'Unauthorized access to workflow' }, { status: 403 })
|
||||
}
|
||||
}
|
||||
|
||||
if (params.startDate) {
|
||||
const startDate = new Date(params.startDate)
|
||||
conditions = and(conditions, gte(workflowLogs.createdAt, startDate))
|
||||
@@ -95,6 +89,17 @@ export async function GET(request: NextRequest) {
|
||||
conditions = and(conditions, lte(workflowLogs.createdAt, endDate))
|
||||
}
|
||||
|
||||
if (params.search) {
|
||||
const searchTerm = `%${params.search}%`
|
||||
conditions = and(
|
||||
conditions,
|
||||
or(
|
||||
sql`${workflowLogs.message} ILIKE ${searchTerm}`,
|
||||
sql`${workflowLogs.executionId} ILIKE ${searchTerm}`
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Execute the query with all conditions
|
||||
const logs = await db
|
||||
.select()
|
||||
|
||||
@@ -24,9 +24,8 @@ export function ControlBar() {
|
||||
const {
|
||||
setSearchQuery: setStoreSearchQuery,
|
||||
setLogs,
|
||||
logs,
|
||||
setError,
|
||||
applyFilters,
|
||||
buildQueryParams,
|
||||
} = useFilterStore()
|
||||
|
||||
// Update store when debounced search query changes
|
||||
@@ -36,8 +35,8 @@ export function ControlBar() {
|
||||
|
||||
const fetchLogs = async () => {
|
||||
try {
|
||||
// Include workflow data in the response
|
||||
const response = await fetch('/api/logs?includeWorkflow=true')
|
||||
const queryParams = buildQueryParams(1, 50) // Get first 50 logs for refresh
|
||||
const response = await fetch(`/api/logs?${queryParams}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Error fetching logs: ${response.statusText}`)
|
||||
@@ -66,12 +65,8 @@ export function ControlBar() {
|
||||
// Wait for minimum loading time
|
||||
await minLoadingTime
|
||||
|
||||
// Merge new logs with existing logs (avoid duplicates by ID)
|
||||
const existingLogIds = new Set(logs.map((log) => log.id))
|
||||
const newLogs = logsResponse.data.filter((log) => !existingLogIds.has(log.id))
|
||||
|
||||
// Update logs in the store with merged logs
|
||||
setLogs([...newLogs, ...logs])
|
||||
// Replace logs with fresh filtered results from server
|
||||
setLogs(logsResponse.data)
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
// Wait for minimum loading time
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useMemo } from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { Check, ChevronDown } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import {
|
||||
@@ -10,25 +10,41 @@ import {
|
||||
} from '@/components/ui/dropdown-menu'
|
||||
import { useFilterStore } from '@/app/w/logs/stores/store'
|
||||
|
||||
interface WorkflowOption {
|
||||
id: string
|
||||
name: string
|
||||
color: string
|
||||
}
|
||||
|
||||
export default function Workflow() {
|
||||
const { logs, workflowIds, toggleWorkflowId, setWorkflowIds } = useFilterStore()
|
||||
const { workflowIds, toggleWorkflowId, setWorkflowIds } = useFilterStore()
|
||||
const [workflows, setWorkflows] = useState<WorkflowOption[]>([])
|
||||
const [loading, setLoading] = useState(true)
|
||||
|
||||
// Extract unique workflows from logs
|
||||
const workflows = useMemo(() => {
|
||||
const uniqueWorkflows = new Map()
|
||||
|
||||
logs.forEach((log) => {
|
||||
if (log.workflow && !uniqueWorkflows.has(log.workflowId)) {
|
||||
uniqueWorkflows.set(log.workflowId, {
|
||||
id: log.workflowId,
|
||||
name: log.workflow.name,
|
||||
color: log.workflow.color,
|
||||
})
|
||||
// Fetch all available workflows from the API
|
||||
useEffect(() => {
|
||||
const fetchWorkflows = async () => {
|
||||
try {
|
||||
setLoading(true)
|
||||
const response = await fetch('/api/workflows/sync')
|
||||
if (response.ok) {
|
||||
const { data } = await response.json()
|
||||
const workflowOptions: WorkflowOption[] = data.map((workflow: any) => ({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
color: workflow.color || '#3972F6',
|
||||
}))
|
||||
setWorkflows(workflowOptions)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch workflows:', error)
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return Array.from(uniqueWorkflows.values())
|
||||
}, [logs])
|
||||
fetchWorkflows()
|
||||
}, [])
|
||||
|
||||
// Get display text for the dropdown button
|
||||
const getSelectedWorkflowsText = () => {
|
||||
@@ -54,7 +70,7 @@ export default function Workflow() {
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant='outline' size='sm' className='w-full justify-between font-normal text-sm'>
|
||||
{getSelectedWorkflowsText()}
|
||||
{loading ? 'Loading workflows...' : getSelectedWorkflowsText()}
|
||||
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
@@ -71,27 +87,34 @@ export default function Workflow() {
|
||||
{workflowIds.length === 0 && <Check className='h-4 w-4 text-primary' />}
|
||||
</DropdownMenuItem>
|
||||
|
||||
{workflows.length > 0 && <DropdownMenuSeparator />}
|
||||
{!loading && workflows.length > 0 && <DropdownMenuSeparator />}
|
||||
|
||||
{workflows.map((workflow) => (
|
||||
<DropdownMenuItem
|
||||
key={workflow.id}
|
||||
onSelect={(e) => {
|
||||
e.preventDefault()
|
||||
toggleWorkflowId(workflow.id)
|
||||
}}
|
||||
className='flex cursor-pointer items-center justify-between p-2 text-sm'
|
||||
>
|
||||
<div className='flex items-center'>
|
||||
<div
|
||||
className='mr-2 h-2 w-2 rounded-full'
|
||||
style={{ backgroundColor: workflow.color }}
|
||||
/>
|
||||
{workflow.name}
|
||||
</div>
|
||||
{isWorkflowSelected(workflow.id) && <Check className='h-4 w-4 text-primary' />}
|
||||
{!loading &&
|
||||
workflows.map((workflow) => (
|
||||
<DropdownMenuItem
|
||||
key={workflow.id}
|
||||
onSelect={(e) => {
|
||||
e.preventDefault()
|
||||
toggleWorkflowId(workflow.id)
|
||||
}}
|
||||
className='flex cursor-pointer items-center justify-between p-2 text-sm'
|
||||
>
|
||||
<div className='flex items-center'>
|
||||
<div
|
||||
className='mr-2 h-2 w-2 rounded-full'
|
||||
style={{ backgroundColor: workflow.color }}
|
||||
/>
|
||||
{workflow.name}
|
||||
</div>
|
||||
{isWorkflowSelected(workflow.id) && <Check className='h-4 w-4 text-primary' />}
|
||||
</DropdownMenuItem>
|
||||
))}
|
||||
|
||||
{loading && (
|
||||
<DropdownMenuItem disabled className='p-2 text-muted-foreground text-sm'>
|
||||
Loading workflows...
|
||||
</DropdownMenuItem>
|
||||
))}
|
||||
)}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)
|
||||
|
||||
@@ -56,7 +56,6 @@ const selectedRowAnimation = `
|
||||
|
||||
export default function Logs() {
|
||||
const {
|
||||
filteredLogs,
|
||||
logs,
|
||||
loading,
|
||||
error,
|
||||
@@ -69,6 +68,11 @@ export default function Logs() {
|
||||
setHasMore,
|
||||
isFetchingMore,
|
||||
setIsFetchingMore,
|
||||
buildQueryParams,
|
||||
timeRange,
|
||||
level,
|
||||
workflowIds,
|
||||
searchQuery,
|
||||
} = useFilterStore()
|
||||
|
||||
const [selectedLog, setSelectedLog] = useState<WorkflowLog | null>(null)
|
||||
@@ -85,7 +89,7 @@ export default function Logs() {
|
||||
const groups: Record<string, WorkflowLog[]> = {}
|
||||
|
||||
// Group logs by executionId
|
||||
filteredLogs.forEach((log) => {
|
||||
logs.forEach((log) => {
|
||||
if (log.executionId) {
|
||||
if (!groups[log.executionId]) {
|
||||
groups[log.executionId] = []
|
||||
@@ -101,20 +105,20 @@ export default function Logs() {
|
||||
})
|
||||
|
||||
return groups
|
||||
}, [filteredLogs])
|
||||
}, [logs])
|
||||
|
||||
const handleLogClick = (log: WorkflowLog) => {
|
||||
setSelectedLog(log)
|
||||
const index = filteredLogs.findIndex((l) => l.id === log.id)
|
||||
const index = logs.findIndex((l) => l.id === log.id)
|
||||
setSelectedLogIndex(index)
|
||||
setIsSidebarOpen(true)
|
||||
}
|
||||
|
||||
const handleNavigateNext = () => {
|
||||
if (selectedLogIndex < filteredLogs.length - 1) {
|
||||
if (selectedLogIndex < logs.length - 1) {
|
||||
const nextIndex = selectedLogIndex + 1
|
||||
setSelectedLogIndex(nextIndex)
|
||||
setSelectedLog(filteredLogs[nextIndex])
|
||||
setSelectedLog(logs[nextIndex])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -122,7 +126,7 @@ export default function Logs() {
|
||||
if (selectedLogIndex > 0) {
|
||||
const prevIndex = selectedLogIndex - 1
|
||||
setSelectedLogIndex(prevIndex)
|
||||
setSelectedLog(filteredLogs[prevIndex])
|
||||
setSelectedLog(logs[prevIndex])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -148,9 +152,8 @@ export default function Logs() {
|
||||
setIsFetchingMore(true)
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`/api/logs?includeWorkflow=true&limit=${LOGS_PER_PAGE}&offset=${(pageNum - 1) * LOGS_PER_PAGE}`
|
||||
)
|
||||
const queryParams = buildQueryParams(pageNum, LOGS_PER_PAGE)
|
||||
const response = await fetch(`/api/logs?${queryParams}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Error fetching logs: ${response.statusText}`)
|
||||
@@ -161,6 +164,7 @@ export default function Logs() {
|
||||
setHasMore(data.data.length === LOGS_PER_PAGE && data.page < data.totalPages)
|
||||
|
||||
setLogs(data.data, append)
|
||||
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
logger.error('Failed to fetch logs:', { err })
|
||||
@@ -173,9 +177,62 @@ export default function Logs() {
|
||||
}
|
||||
}
|
||||
},
|
||||
[setLogs, setLoading, setError, setHasMore, setIsFetchingMore]
|
||||
[setLogs, setLoading, setError, setHasMore, setIsFetchingMore, buildQueryParams]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
fetchLogs(1)
|
||||
}, [fetchLogs])
|
||||
|
||||
// Refetch when filters change (but not on initial load)
|
||||
const isInitialMount = useRef(true)
|
||||
useEffect(() => {
|
||||
if (isInitialMount.current) {
|
||||
isInitialMount.current = false
|
||||
return
|
||||
}
|
||||
|
||||
// Reset pagination and fetch from beginning when filters change
|
||||
setPage(1)
|
||||
setHasMore(true)
|
||||
|
||||
// Fetch logs with new filters
|
||||
const fetchWithNewFilters = async () => {
|
||||
try {
|
||||
setLoading(true)
|
||||
const queryParams = buildQueryParams(1, LOGS_PER_PAGE)
|
||||
const response = await fetch(`/api/logs?${queryParams}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Error fetching logs: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data: LogsResponse = await response.json()
|
||||
setHasMore(data.data.length === LOGS_PER_PAGE && data.page < data.totalPages)
|
||||
setLogs(data.data, false)
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
logger.error('Failed to fetch logs:', { err })
|
||||
setError(err instanceof Error ? err.message : 'An unknown error occurred')
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
fetchWithNewFilters()
|
||||
}, [
|
||||
timeRange,
|
||||
level,
|
||||
workflowIds,
|
||||
searchQuery,
|
||||
setPage,
|
||||
setHasMore,
|
||||
setLoading,
|
||||
setLogs,
|
||||
setError,
|
||||
buildQueryParams,
|
||||
])
|
||||
|
||||
const loadMoreLogs = useCallback(() => {
|
||||
if (!isFetchingMore && hasMore) {
|
||||
const nextPage = page + 1
|
||||
@@ -238,18 +295,14 @@ export default function Logs() {
|
||||
}
|
||||
}, [loading, hasMore, isFetchingMore, loadMoreLogs])
|
||||
|
||||
useEffect(() => {
|
||||
fetchLogs(1)
|
||||
}, [fetchLogs])
|
||||
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
if (filteredLogs.length === 0) return
|
||||
if (logs.length === 0) return
|
||||
|
||||
if (selectedLogIndex === -1 && (e.key === 'ArrowUp' || e.key === 'ArrowDown')) {
|
||||
e.preventDefault()
|
||||
setSelectedLogIndex(0)
|
||||
setSelectedLog(filteredLogs[0])
|
||||
setSelectedLog(logs[0])
|
||||
return
|
||||
}
|
||||
|
||||
@@ -258,12 +311,7 @@ export default function Logs() {
|
||||
handleNavigatePrev()
|
||||
}
|
||||
|
||||
if (
|
||||
e.key === 'ArrowDown' &&
|
||||
!e.metaKey &&
|
||||
!e.ctrlKey &&
|
||||
selectedLogIndex < filteredLogs.length - 1
|
||||
) {
|
||||
if (e.key === 'ArrowDown' && !e.metaKey && !e.ctrlKey && selectedLogIndex < logs.length - 1) {
|
||||
e.preventDefault()
|
||||
handleNavigateNext()
|
||||
}
|
||||
@@ -277,7 +325,7 @@ export default function Logs() {
|
||||
window.addEventListener('keydown', handleKeyDown)
|
||||
return () => window.removeEventListener('keydown', handleKeyDown)
|
||||
}, [
|
||||
filteredLogs,
|
||||
logs,
|
||||
selectedLogIndex,
|
||||
isSidebarOpen,
|
||||
selectedLog,
|
||||
@@ -359,7 +407,7 @@ export default function Logs() {
|
||||
<span className='text-sm'>Error: {error}</span>
|
||||
</div>
|
||||
</div>
|
||||
) : filteredLogs.length === 0 ? (
|
||||
) : logs.length === 0 ? (
|
||||
<div className='flex h-full items-center justify-center'>
|
||||
<div className='flex items-center gap-2 text-muted-foreground'>
|
||||
<Info className='h-5 w-5' />
|
||||
@@ -380,7 +428,7 @@ export default function Logs() {
|
||||
<col className='w-[8%] md:w-[10%]' />
|
||||
</colgroup>
|
||||
<tbody>
|
||||
{filteredLogs.map((log) => {
|
||||
{logs.map((log) => {
|
||||
const formattedDate = formatDate(log.createdAt)
|
||||
const isSelected = selectedLog?.id === log.id
|
||||
const _isWorkflowExecutionLog =
|
||||
@@ -503,7 +551,7 @@ export default function Logs() {
|
||||
<tr className='border-t'>
|
||||
<td colSpan={7}>
|
||||
<div className='flex items-center justify-between px-4 py-2 text-muted-foreground text-xs'>
|
||||
<span>Showing {filteredLogs.length} logs</span>
|
||||
<span>Showing {logs.length} logs</span>
|
||||
<div className='flex items-center gap-4'>
|
||||
{isFetchingMore ? (
|
||||
<div className='flex items-center gap-2' />
|
||||
@@ -537,7 +585,7 @@ export default function Logs() {
|
||||
onClose={handleCloseSidebar}
|
||||
onNavigateNext={handleNavigateNext}
|
||||
onNavigatePrev={handleNavigatePrev}
|
||||
hasNext={selectedLogIndex < filteredLogs.length - 1}
|
||||
hasNext={selectedLogIndex < logs.length - 1}
|
||||
hasPrev={selectedLogIndex > 0}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -3,7 +3,6 @@ import type { FilterState } from './types'
|
||||
|
||||
export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
logs: [],
|
||||
filteredLogs: [],
|
||||
timeRange: 'All time',
|
||||
level: 'all',
|
||||
workflowIds: [],
|
||||
@@ -19,28 +18,24 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
const currentLogs = [...get().logs]
|
||||
const newLogs = [...currentLogs, ...logs]
|
||||
set({ logs: newLogs })
|
||||
get().applyFilters()
|
||||
} else {
|
||||
set({ logs, filteredLogs: logs, loading: false })
|
||||
set({ logs, loading: false })
|
||||
}
|
||||
},
|
||||
|
||||
setTimeRange: (timeRange) => {
|
||||
set({ timeRange })
|
||||
get().resetPagination()
|
||||
get().applyFilters()
|
||||
},
|
||||
|
||||
setLevel: (level) => {
|
||||
set({ level })
|
||||
get().resetPagination()
|
||||
get().applyFilters()
|
||||
},
|
||||
|
||||
setWorkflowIds: (workflowIds) => {
|
||||
set({ workflowIds })
|
||||
get().resetPagination()
|
||||
get().applyFilters()
|
||||
},
|
||||
|
||||
toggleWorkflowId: (workflowId) => {
|
||||
@@ -55,13 +50,11 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
|
||||
set({ workflowIds: currentWorkflowIds })
|
||||
get().resetPagination()
|
||||
get().applyFilters()
|
||||
},
|
||||
|
||||
setSearchQuery: (searchQuery) => {
|
||||
set({ searchQuery })
|
||||
get().resetPagination()
|
||||
get().applyFilters()
|
||||
},
|
||||
|
||||
setLoading: (loading) => set({ loading }),
|
||||
@@ -76,53 +69,52 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
|
||||
resetPagination: () => set({ page: 1, hasMore: true }),
|
||||
|
||||
applyFilters: () => {
|
||||
const { logs, timeRange, level, workflowIds, searchQuery } = get()
|
||||
// Build query parameters for server-side filtering
|
||||
buildQueryParams: (page: number, limit: number) => {
|
||||
const { timeRange, level, workflowIds, searchQuery } = get()
|
||||
const params = new URLSearchParams()
|
||||
|
||||
let filtered = [...logs]
|
||||
params.set('includeWorkflow', 'true')
|
||||
params.set('limit', limit.toString())
|
||||
params.set('offset', ((page - 1) * limit).toString())
|
||||
|
||||
// Apply time range filter
|
||||
// Add level filter
|
||||
if (level !== 'all') {
|
||||
params.set('level', level)
|
||||
}
|
||||
|
||||
// Add workflow filter
|
||||
if (workflowIds.length > 0) {
|
||||
params.set('workflowIds', workflowIds.join(','))
|
||||
}
|
||||
|
||||
// Add time range filter
|
||||
if (timeRange !== 'All time') {
|
||||
const now = new Date()
|
||||
let cutoffTime: Date
|
||||
let startDate: Date
|
||||
|
||||
switch (timeRange) {
|
||||
case 'Past 30 minutes':
|
||||
cutoffTime = new Date(now.getTime() - 30 * 60 * 1000)
|
||||
startDate = new Date(now.getTime() - 30 * 60 * 1000)
|
||||
break
|
||||
case 'Past hour':
|
||||
cutoffTime = new Date(now.getTime() - 60 * 60 * 1000)
|
||||
startDate = new Date(now.getTime() - 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 24 hours':
|
||||
cutoffTime = new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
startDate = new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
break
|
||||
default:
|
||||
cutoffTime = new Date(0) // Beginning of time
|
||||
startDate = new Date(0)
|
||||
}
|
||||
|
||||
filtered = filtered.filter((log) => new Date(log.createdAt) >= cutoffTime)
|
||||
params.set('startDate', startDate.toISOString())
|
||||
}
|
||||
|
||||
// Apply level filter
|
||||
if (level !== 'all') {
|
||||
filtered = filtered.filter((log) => log.level.toLowerCase() === level)
|
||||
}
|
||||
|
||||
// Apply workflow filter
|
||||
if (workflowIds.length > 0) {
|
||||
filtered = filtered.filter((log) => workflowIds.includes(log.workflowId))
|
||||
}
|
||||
|
||||
// Apply search query filter
|
||||
// Add search filter
|
||||
if (searchQuery.trim()) {
|
||||
const query = searchQuery.toLowerCase().trim()
|
||||
filtered = filtered.filter(
|
||||
(log) =>
|
||||
log.message.toLowerCase().includes(query) ||
|
||||
log.executionId?.toLowerCase().includes(query)
|
||||
)
|
||||
params.set('search', searchQuery.trim())
|
||||
}
|
||||
|
||||
set({ filteredLogs: filtered })
|
||||
return params.toString()
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -88,9 +88,6 @@ export interface FilterState {
|
||||
// Original logs from API
|
||||
logs: WorkflowLog[]
|
||||
|
||||
// Filtered logs to display
|
||||
filteredLogs: WorkflowLog[]
|
||||
|
||||
// Filter states
|
||||
timeRange: TimeRange
|
||||
level: LogLevel
|
||||
@@ -120,6 +117,6 @@ export interface FilterState {
|
||||
setIsFetchingMore: (isFetchingMore: boolean) => void
|
||||
resetPagination: () => void
|
||||
|
||||
// Apply filters
|
||||
applyFilters: () => void
|
||||
// Build query parameters for server-side filtering
|
||||
buildQueryParams: (page: number, limit: number) => string
|
||||
}
|
||||
|
||||
@@ -220,8 +220,28 @@ function extractStringProperty(content: string, propName: string): string | null
|
||||
if (simpleMatch) return simpleMatch[1]
|
||||
|
||||
// Try to match multi-line string with template literals
|
||||
const templateMatch = content.match(new RegExp(`${propName}\\s*:\\s*\`([^\`]+)\``, 'm'))
|
||||
return templateMatch ? templateMatch[1] : null
|
||||
const templateMatch = content.match(new RegExp(`${propName}\\s*:\\s*\`([^\`]+)\``, 's'))
|
||||
if (templateMatch) {
|
||||
let templateContent = templateMatch[1]
|
||||
|
||||
// Handle template literals with expressions by replacing them with reasonable defaults
|
||||
// This is a simple approach - we'll replace common variable references with sensible defaults
|
||||
templateContent = templateContent.replace(
|
||||
/\$\{[^}]*shouldEnableURLInput[^}]*\?[^:]*:[^}]*\}/g,
|
||||
'Upload files directly. '
|
||||
)
|
||||
templateContent = templateContent.replace(/\$\{[^}]*shouldEnableURLInput[^}]*\}/g, 'false')
|
||||
|
||||
// Remove any remaining template expressions that we can't safely evaluate
|
||||
templateContent = templateContent.replace(/\$\{[^}]+\}/g, '')
|
||||
|
||||
// Clean up any extra whitespace
|
||||
templateContent = templateContent.replace(/\s+/g, ' ').trim()
|
||||
|
||||
return templateContent
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
// Helper to extract icon name from content
|
||||
|
||||
Reference in New Issue
Block a user