mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-07 22:24:06 -05:00
improvement(logs): update logs export route to respect filters (#2550)
This commit is contained in:
@@ -1,28 +1,15 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
|
||||
import { and, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm'
|
||||
import { and, desc, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildFilterConditions, LogFilterParamsSchema } from '@/lib/logs/filters'
|
||||
|
||||
const logger = createLogger('LogsExportAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
const ExportParamsSchema = z.object({
|
||||
level: z.string().optional(),
|
||||
workflowIds: z.string().optional(),
|
||||
folderIds: z.string().optional(),
|
||||
triggers: z.string().optional(),
|
||||
startDate: z.string().optional(),
|
||||
endDate: z.string().optional(),
|
||||
search: z.string().optional(),
|
||||
workflowName: z.string().optional(),
|
||||
folderName: z.string().optional(),
|
||||
workspaceId: z.string(),
|
||||
})
|
||||
|
||||
function escapeCsv(value: any): string {
|
||||
if (value === null || value === undefined) return ''
|
||||
const str = String(value)
|
||||
@@ -41,7 +28,7 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
const userId = session.user.id
|
||||
const { searchParams } = new URL(request.url)
|
||||
const params = ExportParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
const params = LogFilterParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
const selectColumns = {
|
||||
id: workflowExecutionLogs.id,
|
||||
@@ -57,53 +44,11 @@ export async function GET(request: NextRequest) {
|
||||
workflowName: workflow.name,
|
||||
}
|
||||
|
||||
let conditions: SQL | undefined = eq(workflowExecutionLogs.workspaceId, params.workspaceId)
|
||||
|
||||
if (params.level && params.level !== 'all') {
|
||||
const levels = params.level.split(',').filter(Boolean)
|
||||
if (levels.length === 1) {
|
||||
conditions = and(conditions, eq(workflowExecutionLogs.level, levels[0]))
|
||||
} else if (levels.length > 1) {
|
||||
conditions = and(conditions, inArray(workflowExecutionLogs.level, levels))
|
||||
}
|
||||
}
|
||||
|
||||
if (params.workflowIds) {
|
||||
const workflowIds = params.workflowIds.split(',').filter(Boolean)
|
||||
if (workflowIds.length > 0) conditions = and(conditions, inArray(workflow.id, workflowIds))
|
||||
}
|
||||
|
||||
if (params.folderIds) {
|
||||
const folderIds = params.folderIds.split(',').filter(Boolean)
|
||||
if (folderIds.length > 0) conditions = and(conditions, inArray(workflow.folderId, folderIds))
|
||||
}
|
||||
|
||||
if (params.triggers) {
|
||||
const triggers = params.triggers.split(',').filter(Boolean)
|
||||
if (triggers.length > 0 && !triggers.includes('all')) {
|
||||
conditions = and(conditions, inArray(workflowExecutionLogs.trigger, triggers))
|
||||
}
|
||||
}
|
||||
|
||||
if (params.startDate) {
|
||||
conditions = and(conditions, gte(workflowExecutionLogs.startedAt, new Date(params.startDate)))
|
||||
}
|
||||
if (params.endDate) {
|
||||
conditions = and(conditions, lte(workflowExecutionLogs.startedAt, new Date(params.endDate)))
|
||||
}
|
||||
|
||||
if (params.search) {
|
||||
const term = `%${params.search}%`
|
||||
conditions = and(conditions, sql`${workflowExecutionLogs.executionId} ILIKE ${term}`)
|
||||
}
|
||||
if (params.workflowName) {
|
||||
const nameTerm = `%${params.workflowName}%`
|
||||
conditions = and(conditions, sql`${workflow.name} ILIKE ${nameTerm}`)
|
||||
}
|
||||
if (params.folderName) {
|
||||
const folderTerm = `%${params.folderName}%`
|
||||
conditions = and(conditions, sql`${workflow.name} ILIKE ${folderTerm}`)
|
||||
}
|
||||
const workspaceCondition = eq(workflowExecutionLogs.workspaceId, params.workspaceId)
|
||||
const filterConditions = buildFilterConditions(params)
|
||||
const conditions = filterConditions
|
||||
? and(workspaceCondition, filterConditions)
|
||||
: workspaceCondition
|
||||
|
||||
const header = [
|
||||
'startedAt',
|
||||
|
||||
@@ -6,51 +6,22 @@ import {
|
||||
workflowDeploymentVersion,
|
||||
workflowExecutionLogs,
|
||||
} from '@sim/db/schema'
|
||||
import {
|
||||
and,
|
||||
desc,
|
||||
eq,
|
||||
gt,
|
||||
gte,
|
||||
inArray,
|
||||
isNotNull,
|
||||
isNull,
|
||||
lt,
|
||||
lte,
|
||||
ne,
|
||||
or,
|
||||
type SQL,
|
||||
sql,
|
||||
} from 'drizzle-orm'
|
||||
import { and, desc, eq, isNotNull, isNull, or, type SQL, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildFilterConditions, LogFilterParamsSchema } from '@/lib/logs/filters'
|
||||
|
||||
const logger = createLogger('LogsAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
const QueryParamsSchema = z.object({
|
||||
const QueryParamsSchema = LogFilterParamsSchema.extend({
|
||||
details: z.enum(['basic', 'full']).optional().default('basic'),
|
||||
limit: z.coerce.number().optional().default(100),
|
||||
offset: z.coerce.number().optional().default(0),
|
||||
level: z.string().optional(),
|
||||
workflowIds: z.string().optional(),
|
||||
folderIds: z.string().optional(),
|
||||
triggers: z.string().optional(),
|
||||
startDate: z.string().optional(),
|
||||
endDate: z.string().optional(),
|
||||
search: z.string().optional(),
|
||||
workflowName: z.string().optional(),
|
||||
folderName: z.string().optional(),
|
||||
executionId: z.string().optional(),
|
||||
costOperator: z.enum(['=', '>', '<', '>=', '<=', '!=']).optional(),
|
||||
costValue: z.coerce.number().optional(),
|
||||
durationOperator: z.enum(['=', '>', '<', '>=', '<=', '!=']).optional(),
|
||||
durationValue: z.coerce.number().optional(),
|
||||
workspaceId: z.string(),
|
||||
})
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
@@ -197,102 +168,11 @@ export async function GET(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
if (params.workflowIds) {
|
||||
const workflowIds = params.workflowIds.split(',').filter(Boolean)
|
||||
if (workflowIds.length > 0) {
|
||||
conditions = and(conditions, inArray(workflow.id, workflowIds))
|
||||
}
|
||||
}
|
||||
|
||||
if (params.folderIds) {
|
||||
const folderIds = params.folderIds.split(',').filter(Boolean)
|
||||
if (folderIds.length > 0) {
|
||||
conditions = and(conditions, inArray(workflow.folderId, folderIds))
|
||||
}
|
||||
}
|
||||
|
||||
if (params.triggers) {
|
||||
const triggers = params.triggers.split(',').filter(Boolean)
|
||||
if (triggers.length > 0 && !triggers.includes('all')) {
|
||||
conditions = and(conditions, inArray(workflowExecutionLogs.trigger, triggers))
|
||||
}
|
||||
}
|
||||
|
||||
if (params.startDate) {
|
||||
conditions = and(
|
||||
conditions,
|
||||
gte(workflowExecutionLogs.startedAt, new Date(params.startDate))
|
||||
)
|
||||
}
|
||||
if (params.endDate) {
|
||||
conditions = and(conditions, lte(workflowExecutionLogs.startedAt, new Date(params.endDate)))
|
||||
}
|
||||
|
||||
if (params.search) {
|
||||
const searchTerm = `%${params.search}%`
|
||||
conditions = and(conditions, sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`)
|
||||
}
|
||||
|
||||
if (params.workflowName) {
|
||||
const nameTerm = `%${params.workflowName}%`
|
||||
conditions = and(conditions, sql`${workflow.name} ILIKE ${nameTerm}`)
|
||||
}
|
||||
|
||||
if (params.folderName) {
|
||||
const folderTerm = `%${params.folderName}%`
|
||||
conditions = and(conditions, sql`${workflow.name} ILIKE ${folderTerm}`)
|
||||
}
|
||||
|
||||
if (params.executionId) {
|
||||
conditions = and(conditions, eq(workflowExecutionLogs.executionId, params.executionId))
|
||||
}
|
||||
|
||||
if (params.costOperator && params.costValue !== undefined) {
|
||||
const costField = sql`(${workflowExecutionLogs.cost}->>'total')::numeric`
|
||||
switch (params.costOperator) {
|
||||
case '=':
|
||||
conditions = and(conditions, sql`${costField} = ${params.costValue}`)
|
||||
break
|
||||
case '>':
|
||||
conditions = and(conditions, sql`${costField} > ${params.costValue}`)
|
||||
break
|
||||
case '<':
|
||||
conditions = and(conditions, sql`${costField} < ${params.costValue}`)
|
||||
break
|
||||
case '>=':
|
||||
conditions = and(conditions, sql`${costField} >= ${params.costValue}`)
|
||||
break
|
||||
case '<=':
|
||||
conditions = and(conditions, sql`${costField} <= ${params.costValue}`)
|
||||
break
|
||||
case '!=':
|
||||
conditions = and(conditions, sql`${costField} != ${params.costValue}`)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (params.durationOperator && params.durationValue !== undefined) {
|
||||
const durationField = workflowExecutionLogs.totalDurationMs
|
||||
switch (params.durationOperator) {
|
||||
case '=':
|
||||
conditions = and(conditions, eq(durationField, params.durationValue))
|
||||
break
|
||||
case '>':
|
||||
conditions = and(conditions, gt(durationField, params.durationValue))
|
||||
break
|
||||
case '<':
|
||||
conditions = and(conditions, lt(durationField, params.durationValue))
|
||||
break
|
||||
case '>=':
|
||||
conditions = and(conditions, gte(durationField, params.durationValue))
|
||||
break
|
||||
case '<=':
|
||||
conditions = and(conditions, lte(durationField, params.durationValue))
|
||||
break
|
||||
case '!=':
|
||||
conditions = and(conditions, ne(durationField, params.durationValue))
|
||||
break
|
||||
}
|
||||
// Apply common filters (workflowIds, folderIds, triggers, dates, search, cost, duration)
|
||||
// Level filtering is handled above with advanced running/pending state logic
|
||||
const commonFilters = buildFilterConditions(params, { useSimpleLevelFilter: false })
|
||||
if (commonFilters) {
|
||||
conditions = and(conditions, commonFilters)
|
||||
}
|
||||
|
||||
const logs = await baseQuery
|
||||
|
||||
@@ -4,6 +4,7 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { AlertCircle, Loader2 } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getStartDateFromTimeRange } from '@/lib/logs/filters'
|
||||
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useDashboardLogs, useLogDetail, useLogsList } from '@/hooks/queries/logs'
|
||||
@@ -262,6 +263,11 @@ export default function Logs() {
|
||||
if (workflowIds.length > 0) params.set('workflowIds', workflowIds.join(','))
|
||||
if (folderIds.length > 0) params.set('folderIds', folderIds.join(','))
|
||||
|
||||
const startDate = getStartDateFromTimeRange(timeRange)
|
||||
if (startDate) {
|
||||
params.set('startDate', startDate.toISOString())
|
||||
}
|
||||
|
||||
const parsed = parseQuery(debouncedSearchQuery)
|
||||
const extra = queryToApiParams(parsed)
|
||||
Object.entries(extra).forEach(([k, v]) => params.set(k, v))
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { keepPreviousData, useInfiniteQuery, useQuery } from '@tanstack/react-query'
|
||||
import { getStartDateFromTimeRange } from '@/lib/logs/filters'
|
||||
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
|
||||
import type { LogsResponse, WorkflowLog } from '@/stores/logs/filters/types'
|
||||
import type { LogsResponse, TimeRange, WorkflowLog } from '@/stores/logs/filters/types'
|
||||
|
||||
export const logKeys = {
|
||||
all: ['logs'] as const,
|
||||
@@ -14,7 +15,7 @@ export const logKeys = {
|
||||
}
|
||||
|
||||
interface LogFilters {
|
||||
timeRange: string
|
||||
timeRange: TimeRange
|
||||
level: string
|
||||
workflowIds: string[]
|
||||
folderIds: string[]
|
||||
@@ -23,39 +24,6 @@ interface LogFilters {
|
||||
limit: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates start date from a time range string.
|
||||
* Returns null for 'All time' to indicate no date filtering.
|
||||
*/
|
||||
function getStartDateFromTimeRange(timeRange: string): Date | null {
|
||||
if (timeRange === 'All time') return null
|
||||
|
||||
const now = new Date()
|
||||
|
||||
switch (timeRange) {
|
||||
case 'Past 30 minutes':
|
||||
return new Date(now.getTime() - 30 * 60 * 1000)
|
||||
case 'Past hour':
|
||||
return new Date(now.getTime() - 60 * 60 * 1000)
|
||||
case 'Past 6 hours':
|
||||
return new Date(now.getTime() - 6 * 60 * 60 * 1000)
|
||||
case 'Past 12 hours':
|
||||
return new Date(now.getTime() - 12 * 60 * 60 * 1000)
|
||||
case 'Past 24 hours':
|
||||
return new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
case 'Past 3 days':
|
||||
return new Date(now.getTime() - 3 * 24 * 60 * 60 * 1000)
|
||||
case 'Past 7 days':
|
||||
return new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000)
|
||||
case 'Past 14 days':
|
||||
return new Date(now.getTime() - 14 * 24 * 60 * 60 * 1000)
|
||||
case 'Past 30 days':
|
||||
return new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000)
|
||||
default:
|
||||
return new Date(0)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies common filter parameters to a URLSearchParams object.
|
||||
* Shared between paginated and non-paginated log fetches.
|
||||
|
||||
258
apps/sim/lib/logs/filters.ts
Normal file
258
apps/sim/lib/logs/filters.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
import { workflow, workflowExecutionLogs } from '@sim/db/schema'
|
||||
import { and, eq, gt, gte, inArray, lt, lte, ne, type SQL, sql } from 'drizzle-orm'
|
||||
import { z } from 'zod'
|
||||
import type { TimeRange } from '@/stores/logs/filters/types'
|
||||
|
||||
/**
|
||||
* Shared schema for log filter parameters.
|
||||
* Used by both the logs list API and export API.
|
||||
*/
|
||||
export const LogFilterParamsSchema = z.object({
|
||||
workspaceId: z.string(),
|
||||
level: z.string().optional(),
|
||||
workflowIds: z.string().optional(),
|
||||
folderIds: z.string().optional(),
|
||||
triggers: z.string().optional(),
|
||||
startDate: z.string().optional(),
|
||||
endDate: z.string().optional(),
|
||||
search: z.string().optional(),
|
||||
workflowName: z.string().optional(),
|
||||
folderName: z.string().optional(),
|
||||
executionId: z.string().optional(),
|
||||
costOperator: z.enum(['=', '>', '<', '>=', '<=', '!=']).optional(),
|
||||
costValue: z.coerce.number().optional(),
|
||||
durationOperator: z.enum(['=', '>', '<', '>=', '<=', '!=']).optional(),
|
||||
durationValue: z.coerce.number().optional(),
|
||||
})
|
||||
|
||||
export type LogFilterParams = z.infer<typeof LogFilterParamsSchema>
|
||||
|
||||
/**
|
||||
* Calculates start date from a time range string.
|
||||
* Returns null for 'All time' to indicate no date filtering.
|
||||
* @param timeRange - The time range option selected by the user
|
||||
* @returns Date object for the start of the range, or null for 'All time'
|
||||
*/
|
||||
export function getStartDateFromTimeRange(timeRange: TimeRange): Date | null {
|
||||
if (timeRange === 'All time') return null
|
||||
|
||||
const now = new Date()
|
||||
|
||||
switch (timeRange) {
|
||||
case 'Past 30 minutes':
|
||||
return new Date(now.getTime() - 30 * 60 * 1000)
|
||||
case 'Past hour':
|
||||
return new Date(now.getTime() - 60 * 60 * 1000)
|
||||
case 'Past 6 hours':
|
||||
return new Date(now.getTime() - 6 * 60 * 60 * 1000)
|
||||
case 'Past 12 hours':
|
||||
return new Date(now.getTime() - 12 * 60 * 60 * 1000)
|
||||
case 'Past 24 hours':
|
||||
return new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
case 'Past 3 days':
|
||||
return new Date(now.getTime() - 3 * 24 * 60 * 60 * 1000)
|
||||
case 'Past 7 days':
|
||||
return new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000)
|
||||
case 'Past 14 days':
|
||||
return new Date(now.getTime() - 14 * 24 * 60 * 60 * 1000)
|
||||
case 'Past 30 days':
|
||||
return new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000)
|
||||
default:
|
||||
return new Date(0)
|
||||
}
|
||||
}
|
||||
|
||||
type ComparisonOperator = '=' | '>' | '<' | '>=' | '<=' | '!='
|
||||
|
||||
function buildWorkflowIdsCondition(workflowIds: string): SQL | undefined {
|
||||
const ids = workflowIds.split(',').filter(Boolean)
|
||||
if (ids.length > 0) {
|
||||
return inArray(workflow.id, ids)
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
function buildFolderIdsCondition(folderIds: string): SQL | undefined {
|
||||
const ids = folderIds.split(',').filter(Boolean)
|
||||
if (ids.length > 0) {
|
||||
return inArray(workflow.folderId, ids)
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
function buildTriggersCondition(triggers: string): SQL | undefined {
|
||||
const triggerList = triggers.split(',').filter(Boolean)
|
||||
if (triggerList.length > 0 && !triggerList.includes('all')) {
|
||||
return inArray(workflowExecutionLogs.trigger, triggerList)
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
function buildDateConditions(
|
||||
startDate?: string,
|
||||
endDate?: string
|
||||
): { startCondition?: SQL; endCondition?: SQL } {
|
||||
const result: { startCondition?: SQL; endCondition?: SQL } = {}
|
||||
|
||||
if (startDate) {
|
||||
result.startCondition = gte(workflowExecutionLogs.startedAt, new Date(startDate))
|
||||
}
|
||||
if (endDate) {
|
||||
result.endCondition = lte(workflowExecutionLogs.startedAt, new Date(endDate))
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function buildSearchConditions(params: {
|
||||
search?: string
|
||||
workflowName?: string
|
||||
folderName?: string
|
||||
executionId?: string
|
||||
}): SQL[] {
|
||||
const conditions: SQL[] = []
|
||||
|
||||
if (params.search) {
|
||||
const searchTerm = `%${params.search}%`
|
||||
conditions.push(sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`)
|
||||
}
|
||||
|
||||
if (params.workflowName) {
|
||||
const nameTerm = `%${params.workflowName}%`
|
||||
conditions.push(sql`${workflow.name} ILIKE ${nameTerm}`)
|
||||
}
|
||||
|
||||
if (params.folderName) {
|
||||
const folderTerm = `%${params.folderName}%`
|
||||
conditions.push(sql`${workflow.name} ILIKE ${folderTerm}`)
|
||||
}
|
||||
|
||||
if (params.executionId) {
|
||||
conditions.push(eq(workflowExecutionLogs.executionId, params.executionId))
|
||||
}
|
||||
|
||||
return conditions
|
||||
}
|
||||
|
||||
function buildCostCondition(operator: ComparisonOperator, value: number): SQL {
|
||||
const costField = sql`(${workflowExecutionLogs.cost}->>'total')::numeric`
|
||||
|
||||
switch (operator) {
|
||||
case '=':
|
||||
return sql`${costField} = ${value}`
|
||||
case '>':
|
||||
return sql`${costField} > ${value}`
|
||||
case '<':
|
||||
return sql`${costField} < ${value}`
|
||||
case '>=':
|
||||
return sql`${costField} >= ${value}`
|
||||
case '<=':
|
||||
return sql`${costField} <= ${value}`
|
||||
case '!=':
|
||||
return sql`${costField} != ${value}`
|
||||
}
|
||||
}
|
||||
|
||||
function buildDurationCondition(operator: ComparisonOperator, value: number): SQL | undefined {
|
||||
const durationField = workflowExecutionLogs.totalDurationMs
|
||||
|
||||
switch (operator) {
|
||||
case '=':
|
||||
return eq(durationField, value)
|
||||
case '>':
|
||||
return gt(durationField, value)
|
||||
case '<':
|
||||
return lt(durationField, value)
|
||||
case '>=':
|
||||
return gte(durationField, value)
|
||||
case '<=':
|
||||
return lte(durationField, value)
|
||||
case '!=':
|
||||
return ne(durationField, value)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds SQL conditions for simple level filtering (used by export API).
|
||||
* Does not handle complex running/pending states.
|
||||
*/
|
||||
export function buildSimpleLevelCondition(level: string): SQL | undefined {
|
||||
if (!level || level === 'all') return undefined
|
||||
|
||||
const levels = level.split(',').filter(Boolean)
|
||||
if (levels.length === 1) {
|
||||
return eq(workflowExecutionLogs.level, levels[0])
|
||||
}
|
||||
if (levels.length > 1) {
|
||||
return inArray(workflowExecutionLogs.level, levels)
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
export interface BuildFilterConditionsOptions {
|
||||
/**
|
||||
* Whether to use simple level filtering (just matches level string).
|
||||
* Set to false to skip level filtering (caller will handle it separately).
|
||||
*/
|
||||
useSimpleLevelFilter?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds combined SQL conditions from log filter parameters.
|
||||
* Returns a single SQL condition that can be used in a WHERE clause.
|
||||
* @param params - The filter parameters from the request
|
||||
* @param options - Configuration options for filter building
|
||||
* @returns Combined SQL condition or undefined if no filters
|
||||
*/
|
||||
export function buildFilterConditions(
|
||||
params: LogFilterParams,
|
||||
options: BuildFilterConditionsOptions = {}
|
||||
): SQL | undefined {
|
||||
const { useSimpleLevelFilter = true } = options
|
||||
const conditions: SQL[] = []
|
||||
|
||||
if (useSimpleLevelFilter && params.level) {
|
||||
const levelCondition = buildSimpleLevelCondition(params.level)
|
||||
if (levelCondition) conditions.push(levelCondition)
|
||||
}
|
||||
|
||||
if (params.workflowIds) {
|
||||
const condition = buildWorkflowIdsCondition(params.workflowIds)
|
||||
if (condition) conditions.push(condition)
|
||||
}
|
||||
|
||||
if (params.folderIds) {
|
||||
const condition = buildFolderIdsCondition(params.folderIds)
|
||||
if (condition) conditions.push(condition)
|
||||
}
|
||||
|
||||
if (params.triggers) {
|
||||
const condition = buildTriggersCondition(params.triggers)
|
||||
if (condition) conditions.push(condition)
|
||||
}
|
||||
|
||||
const { startCondition, endCondition } = buildDateConditions(params.startDate, params.endDate)
|
||||
if (startCondition) conditions.push(startCondition)
|
||||
if (endCondition) conditions.push(endCondition)
|
||||
|
||||
const searchConditions = buildSearchConditions({
|
||||
search: params.search,
|
||||
workflowName: params.workflowName,
|
||||
folderName: params.folderName,
|
||||
executionId: params.executionId,
|
||||
})
|
||||
conditions.push(...searchConditions)
|
||||
|
||||
if (params.costOperator && params.costValue !== undefined) {
|
||||
conditions.push(buildCostCondition(params.costOperator, params.costValue))
|
||||
}
|
||||
|
||||
if (params.durationOperator && params.durationValue !== undefined) {
|
||||
const condition = buildDurationCondition(params.durationOperator, params.durationValue)
|
||||
if (condition) conditions.push(condition)
|
||||
}
|
||||
|
||||
if (conditions.length === 0) return undefined
|
||||
if (conditions.length === 1) return conditions[0]
|
||||
return and(...conditions)
|
||||
}
|
||||
Reference in New Issue
Block a user