mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 06:58:07 -05:00
feat(test-framework): add executions logs for test framework (#1639)
* Starting logs page * Execution history v0 * Execution dashboard * Fix scroll * Add open workflow button * Lint * Fix fetchExecutions in dependency array --------- Co-authored-by: Waleed <walif6@gmail.com> Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
This commit is contained in:
committed by
GitHub
parent
7595e54dfb
commit
1a05ef97d6
@@ -0,0 +1,305 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflowExecutionLogs } from '@sim/db/schema'
|
||||
import { and, desc, eq, gte, inArray } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
const logger = createLogger('WorkflowExecutionDetailsAPI')
|
||||
|
||||
const QueryParamsSchema = z.object({
|
||||
timeFilter: z.enum(['1h', '12h', '24h', '1w']).optional(),
|
||||
startTime: z.string().optional(),
|
||||
endTime: z.string().optional(),
|
||||
triggers: z.string().optional(),
|
||||
})
|
||||
|
||||
function getTimeRangeMs(filter: string): number {
|
||||
switch (filter) {
|
||||
case '1h':
|
||||
return 60 * 60 * 1000
|
||||
case '12h':
|
||||
return 12 * 60 * 60 * 1000
|
||||
case '24h':
|
||||
return 24 * 60 * 60 * 1000
|
||||
case '1w':
|
||||
return 7 * 24 * 60 * 60 * 1000
|
||||
default:
|
||||
return 24 * 60 * 60 * 1000
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; workflowId: string }> }
|
||||
) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized workflow details access attempt`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
const { id: workspaceId, workflowId } = await params
|
||||
const { searchParams } = new URL(request.url)
|
||||
const queryParams = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
// Calculate time range - use custom times if provided, otherwise use timeFilter
|
||||
let endTime: Date
|
||||
let startTime: Date
|
||||
|
||||
if (queryParams.startTime && queryParams.endTime) {
|
||||
startTime = new Date(queryParams.startTime)
|
||||
endTime = new Date(queryParams.endTime)
|
||||
} else {
|
||||
endTime = new Date()
|
||||
const timeRangeMs = getTimeRangeMs(queryParams.timeFilter || '24h')
|
||||
startTime = new Date(endTime.getTime() - timeRangeMs)
|
||||
}
|
||||
|
||||
const timeRangeMs = endTime.getTime() - startTime.getTime()
|
||||
|
||||
// Number of data points for the line charts
|
||||
const dataPoints = 30
|
||||
const segmentDurationMs = timeRangeMs / dataPoints
|
||||
|
||||
logger.debug(`[${requestId}] Fetching workflow details for ${workflowId}`)
|
||||
|
||||
// Check permissions
|
||||
const [permission] = await db
|
||||
.select()
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!permission) {
|
||||
logger.warn(`[${requestId}] User ${userId} has no permission for workspace ${workspaceId}`)
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Build conditions for log filtering
|
||||
const logConditions = [
|
||||
eq(workflowExecutionLogs.workflowId, workflowId),
|
||||
gte(workflowExecutionLogs.startedAt, startTime),
|
||||
]
|
||||
|
||||
// Add trigger filter if specified
|
||||
if (queryParams.triggers) {
|
||||
const triggerList = queryParams.triggers.split(',')
|
||||
logConditions.push(inArray(workflowExecutionLogs.trigger, triggerList))
|
||||
}
|
||||
|
||||
// Fetch all logs for this workflow in the time range
|
||||
const logs = await db
|
||||
.select({
|
||||
id: workflowExecutionLogs.id,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
level: workflowExecutionLogs.level,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
executionData: workflowExecutionLogs.executionData,
|
||||
cost: workflowExecutionLogs.cost,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.where(and(...logConditions))
|
||||
.orderBy(desc(workflowExecutionLogs.startedAt))
|
||||
.limit(50)
|
||||
|
||||
// Calculate metrics for each time segment
|
||||
const errorRates: { timestamp: string; value: number }[] = []
|
||||
const durations: { timestamp: string; value: number }[] = []
|
||||
const executionCounts: { timestamp: string; value: number }[] = []
|
||||
|
||||
for (let i = 0; i < dataPoints; i++) {
|
||||
const segmentStart = new Date(startTime.getTime() + i * segmentDurationMs)
|
||||
const segmentEnd = new Date(startTime.getTime() + (i + 1) * segmentDurationMs)
|
||||
|
||||
// Filter logs for this segment
|
||||
const segmentLogs = logs.filter((log) => {
|
||||
const logTime = log.startedAt.getTime()
|
||||
return logTime >= segmentStart.getTime() && logTime < segmentEnd.getTime()
|
||||
})
|
||||
|
||||
const totalExecutions = segmentLogs.length
|
||||
const errorExecutions = segmentLogs.filter((log) => log.level === 'error').length
|
||||
const errorRate = totalExecutions > 0 ? (errorExecutions / totalExecutions) * 100 : 0
|
||||
|
||||
// Calculate average duration for this segment
|
||||
const durationsInSegment = segmentLogs
|
||||
.filter((log) => log.totalDurationMs !== null)
|
||||
.map((log) => log.totalDurationMs!)
|
||||
const avgDuration =
|
||||
durationsInSegment.length > 0
|
||||
? durationsInSegment.reduce((sum, d) => sum + d, 0) / durationsInSegment.length
|
||||
: 0
|
||||
|
||||
errorRates.push({
|
||||
timestamp: segmentStart.toISOString(),
|
||||
value: errorRate,
|
||||
})
|
||||
|
||||
durations.push({
|
||||
timestamp: segmentStart.toISOString(),
|
||||
value: avgDuration,
|
||||
})
|
||||
|
||||
executionCounts.push({
|
||||
timestamp: segmentStart.toISOString(),
|
||||
value: totalExecutions,
|
||||
})
|
||||
}
|
||||
|
||||
// Helper function to recursively search for error in trace spans
|
||||
const findErrorInSpans = (spans: any[]): string | null => {
|
||||
for (const span of spans) {
|
||||
if (span.status === 'error' && span.output?.error) {
|
||||
return span.output.error
|
||||
}
|
||||
if (span.children && Array.isArray(span.children)) {
|
||||
const childError = findErrorInSpans(span.children)
|
||||
if (childError) return childError
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// Helper function to get all blocks from trace spans (flattened)
|
||||
const flattenTraceSpans = (spans: any[]): any[] => {
|
||||
const flattened: any[] = []
|
||||
for (const span of spans) {
|
||||
if (span.type !== 'workflow') {
|
||||
flattened.push(span)
|
||||
}
|
||||
if (span.children && Array.isArray(span.children)) {
|
||||
flattened.push(...flattenTraceSpans(span.children))
|
||||
}
|
||||
}
|
||||
return flattened
|
||||
}
|
||||
|
||||
// Format logs for response
|
||||
const formattedLogs = logs.map((log) => {
|
||||
const executionData = log.executionData as any
|
||||
const triggerData = executionData?.trigger || {}
|
||||
const traceSpans = executionData?.traceSpans || []
|
||||
|
||||
// Extract error message from trace spans
|
||||
let errorMessage = null
|
||||
if (log.level === 'error') {
|
||||
errorMessage = findErrorInSpans(traceSpans)
|
||||
// Fallback to executionData.errorDetails
|
||||
if (!errorMessage) {
|
||||
errorMessage = executionData?.errorDetails?.error || null
|
||||
}
|
||||
}
|
||||
|
||||
// Extract outputs from the last block in trace spans
|
||||
let outputs = null
|
||||
let cost = null
|
||||
|
||||
if (traceSpans.length > 0) {
|
||||
// Flatten all blocks from trace spans
|
||||
const allBlocks = flattenTraceSpans(traceSpans)
|
||||
|
||||
// Find the last successful block execution
|
||||
const successBlocks = allBlocks.filter(
|
||||
(span: any) =>
|
||||
span.status !== 'error' && span.output && Object.keys(span.output).length > 0
|
||||
)
|
||||
|
||||
if (successBlocks.length > 0) {
|
||||
const lastBlock = successBlocks[successBlocks.length - 1]
|
||||
const blockOutput = lastBlock.output || {}
|
||||
|
||||
// Clean up the output to show meaningful data
|
||||
// Priority: content > result > data > the whole output object
|
||||
if (blockOutput.content) {
|
||||
outputs = { content: blockOutput.content }
|
||||
} else if (blockOutput.result !== undefined) {
|
||||
outputs = { result: blockOutput.result }
|
||||
} else if (blockOutput.data !== undefined) {
|
||||
outputs = { data: blockOutput.data }
|
||||
} else {
|
||||
// Filter out internal/metadata fields for cleaner display
|
||||
const cleanOutput: any = {}
|
||||
for (const [key, value] of Object.entries(blockOutput)) {
|
||||
if (
|
||||
![
|
||||
'executionTime',
|
||||
'tokens',
|
||||
'model',
|
||||
'cost',
|
||||
'childTraceSpans',
|
||||
'error',
|
||||
'stackTrace',
|
||||
].includes(key)
|
||||
) {
|
||||
cleanOutput[key] = value
|
||||
}
|
||||
}
|
||||
if (Object.keys(cleanOutput).length > 0) {
|
||||
outputs = cleanOutput
|
||||
}
|
||||
}
|
||||
|
||||
// Extract cost from the block output
|
||||
if (blockOutput.cost) {
|
||||
cost = blockOutput.cost
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Use the cost stored at the top-level in workflowExecutionLogs table
|
||||
// This is the same cost shown in the logs page
|
||||
const logCost = log.cost as any
|
||||
|
||||
return {
|
||||
id: log.id,
|
||||
executionId: log.executionId,
|
||||
startedAt: log.startedAt.toISOString(),
|
||||
level: log.level,
|
||||
trigger: log.trigger,
|
||||
triggerUserId: triggerData.userId || null,
|
||||
triggerInputs: triggerData.inputs || triggerData.data || null,
|
||||
outputs,
|
||||
errorMessage,
|
||||
duration: log.totalDurationMs,
|
||||
cost: logCost
|
||||
? {
|
||||
input: logCost.input || 0,
|
||||
output: logCost.output || 0,
|
||||
total: logCost.total || 0,
|
||||
}
|
||||
: null,
|
||||
}
|
||||
})
|
||||
|
||||
logger.debug(`[${requestId}] Successfully calculated workflow details`)
|
||||
|
||||
logger.debug(`[${requestId}] Returning ${formattedLogs.length} execution logs`)
|
||||
|
||||
return NextResponse.json({
|
||||
errorRates,
|
||||
durations,
|
||||
executionCounts,
|
||||
logs: formattedLogs,
|
||||
startTime: startTime.toISOString(),
|
||||
endTime: endTime.toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching workflow details:`, error)
|
||||
return NextResponse.json({ error: 'Failed to fetch workflow details' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
223
apps/sim/app/api/workspaces/[id]/execution-history/route.ts
Normal file
223
apps/sim/app/api/workspaces/[id]/execution-history/route.ts
Normal file
@@ -0,0 +1,223 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
|
||||
import { and, eq, gte, inArray } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
const logger = createLogger('ExecutionHistoryAPI')
|
||||
|
||||
const QueryParamsSchema = z.object({
|
||||
timeFilter: z.enum(['1h', '12h', '24h', '1w']).optional(),
|
||||
startTime: z.string().optional(),
|
||||
endTime: z.string().optional(),
|
||||
segments: z.coerce.number().min(1).max(200).default(120),
|
||||
workflowIds: z.string().optional(),
|
||||
folderIds: z.string().optional(),
|
||||
triggers: z.string().optional(),
|
||||
})
|
||||
|
||||
interface TimeSegment {
|
||||
successRate: number
|
||||
timestamp: string
|
||||
hasExecutions: boolean
|
||||
totalExecutions: number
|
||||
successfulExecutions: number
|
||||
}
|
||||
|
||||
interface WorkflowExecution {
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
segments: TimeSegment[]
|
||||
overallSuccessRate: number
|
||||
}
|
||||
|
||||
function getTimeRangeMs(filter: string): number {
|
||||
switch (filter) {
|
||||
case '1h':
|
||||
return 60 * 60 * 1000
|
||||
case '12h':
|
||||
return 12 * 60 * 60 * 1000
|
||||
case '24h':
|
||||
return 24 * 60 * 60 * 1000
|
||||
case '1w':
|
||||
return 7 * 24 * 60 * 60 * 1000
|
||||
default:
|
||||
return 24 * 60 * 60 * 1000
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized execution history access attempt`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
const { id: workspaceId } = await params
|
||||
const { searchParams } = new URL(request.url)
|
||||
const queryParams = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
// Calculate time range - use custom times if provided, otherwise use timeFilter
|
||||
let endTime: Date
|
||||
let startTime: Date
|
||||
|
||||
if (queryParams.startTime && queryParams.endTime) {
|
||||
startTime = new Date(queryParams.startTime)
|
||||
endTime = new Date(queryParams.endTime)
|
||||
} else {
|
||||
endTime = new Date()
|
||||
const timeRangeMs = getTimeRangeMs(queryParams.timeFilter || '24h')
|
||||
startTime = new Date(endTime.getTime() - timeRangeMs)
|
||||
}
|
||||
|
||||
const timeRangeMs = endTime.getTime() - startTime.getTime()
|
||||
const segmentDurationMs = timeRangeMs / queryParams.segments
|
||||
|
||||
logger.debug(`[${requestId}] Fetching execution history for workspace ${workspaceId}`)
|
||||
logger.debug(
|
||||
`[${requestId}] Time range: ${startTime.toISOString()} to ${endTime.toISOString()}`
|
||||
)
|
||||
logger.debug(
|
||||
`[${requestId}] Segments: ${queryParams.segments}, duration: ${segmentDurationMs}ms`
|
||||
)
|
||||
|
||||
// Check permissions
|
||||
const [permission] = await db
|
||||
.select()
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!permission) {
|
||||
logger.warn(`[${requestId}] User ${userId} has no permission for workspace ${workspaceId}`)
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Build workflow query conditions
|
||||
const workflowConditions = [eq(workflow.workspaceId, workspaceId)]
|
||||
|
||||
// Apply workflow ID filter
|
||||
if (queryParams.workflowIds) {
|
||||
const workflowIdList = queryParams.workflowIds.split(',')
|
||||
workflowConditions.push(inArray(workflow.id, workflowIdList))
|
||||
}
|
||||
|
||||
// Apply folder ID filter
|
||||
if (queryParams.folderIds) {
|
||||
const folderIdList = queryParams.folderIds.split(',')
|
||||
workflowConditions.push(inArray(workflow.folderId, folderIdList))
|
||||
}
|
||||
|
||||
// Get all workflows in the workspace with optional filters
|
||||
const workflows = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(and(...workflowConditions))
|
||||
|
||||
logger.debug(`[${requestId}] Found ${workflows.length} workflows`)
|
||||
|
||||
// Use Promise.all to fetch logs in parallel per workflow
|
||||
// This is better than single query when workflows have 10k+ logs each
|
||||
const workflowExecutions: WorkflowExecution[] = await Promise.all(
|
||||
workflows.map(async (wf) => {
|
||||
// Build conditions for log filtering
|
||||
const logConditions = [
|
||||
eq(workflowExecutionLogs.workflowId, wf.id),
|
||||
gte(workflowExecutionLogs.startedAt, startTime),
|
||||
]
|
||||
|
||||
// Add trigger filter if specified
|
||||
if (queryParams.triggers) {
|
||||
const triggerList = queryParams.triggers.split(',')
|
||||
logConditions.push(inArray(workflowExecutionLogs.trigger, triggerList))
|
||||
}
|
||||
|
||||
// Fetch logs for this workflow - runs in parallel with others
|
||||
const logs = await db
|
||||
.select({
|
||||
id: workflowExecutionLogs.id,
|
||||
level: workflowExecutionLogs.level,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.where(and(...logConditions))
|
||||
|
||||
// Initialize segments with timestamps
|
||||
const segments: TimeSegment[] = []
|
||||
let totalSuccess = 0
|
||||
let totalExecutions = 0
|
||||
|
||||
for (let i = 0; i < queryParams.segments; i++) {
|
||||
const segmentStart = new Date(startTime.getTime() + i * segmentDurationMs)
|
||||
const segmentEnd = new Date(startTime.getTime() + (i + 1) * segmentDurationMs)
|
||||
|
||||
// Count executions in this segment
|
||||
const segmentLogs = logs.filter((log) => {
|
||||
const logTime = log.startedAt.getTime()
|
||||
return logTime >= segmentStart.getTime() && logTime < segmentEnd.getTime()
|
||||
})
|
||||
|
||||
const segmentTotal = segmentLogs.length
|
||||
const segmentErrors = segmentLogs.filter((log) => log.level === 'error').length
|
||||
const segmentSuccess = segmentTotal - segmentErrors
|
||||
|
||||
// Calculate success rate (default to 100% if no executions in this segment)
|
||||
const hasExecutions = segmentTotal > 0
|
||||
const successRate = hasExecutions ? (segmentSuccess / segmentTotal) * 100 : 100
|
||||
|
||||
segments.push({
|
||||
successRate,
|
||||
timestamp: segmentStart.toISOString(),
|
||||
hasExecutions,
|
||||
totalExecutions: segmentTotal,
|
||||
successfulExecutions: segmentSuccess,
|
||||
})
|
||||
|
||||
totalExecutions += segmentTotal
|
||||
totalSuccess += segmentSuccess
|
||||
}
|
||||
|
||||
// Calculate overall success rate (percentage of non-errored executions)
|
||||
const overallSuccessRate =
|
||||
totalExecutions > 0 ? (totalSuccess / totalExecutions) * 100 : 100
|
||||
|
||||
return {
|
||||
workflowId: wf.id,
|
||||
workflowName: wf.name,
|
||||
segments,
|
||||
overallSuccessRate,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
`[${requestId}] Successfully calculated execution history for ${workflowExecutions.length} workflows`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
workflows: workflowExecutions,
|
||||
segments: queryParams.segments,
|
||||
startTime: startTime.toISOString(),
|
||||
endTime: endTime.toISOString(),
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching execution history:`, error)
|
||||
return NextResponse.json({ error: 'Failed to fetch execution history' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -12,7 +12,12 @@ import type { TimeRange } from '@/stores/logs/filters/types'
|
||||
|
||||
export default function Timeline() {
|
||||
const { timeRange, setTimeRange } = useFilterStore()
|
||||
const specificTimeRanges: TimeRange[] = ['Past 30 minutes', 'Past hour', 'Past 24 hours']
|
||||
const specificTimeRanges: TimeRange[] = [
|
||||
'Past 30 minutes',
|
||||
'Past hour',
|
||||
'Past 12 hours',
|
||||
'Past 24 hours',
|
||||
]
|
||||
|
||||
return (
|
||||
<DropdownMenu>
|
||||
|
||||
1155
apps/sim/app/workspace/[workspaceId]/logs/executions-dashboard.tsx
Normal file
1155
apps/sim/app/workspace/[workspaceId]/logs/executions-dashboard.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -4,12 +4,14 @@ import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { AlertCircle, Info, Loader2, Play, RefreshCw, Square } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Switch } from '@/components/ui/switch'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { AutocompleteSearch } from '@/app/workspace/[workspaceId]/logs/components/search/search'
|
||||
import { Sidebar } from '@/app/workspace/[workspaceId]/logs/components/sidebar/sidebar'
|
||||
import ExecutionsDashboard from '@/app/workspace/[workspaceId]/logs/executions-dashboard'
|
||||
import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils/format-date'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
@@ -76,6 +78,8 @@ export default function Logs() {
|
||||
searchQuery: storeSearchQuery,
|
||||
setSearchQuery: setStoreSearchQuery,
|
||||
triggers,
|
||||
viewMode,
|
||||
setViewMode,
|
||||
} = useFilterStore()
|
||||
|
||||
useEffect(() => {
|
||||
@@ -661,8 +665,13 @@ export default function Logs() {
|
||||
return () => window.removeEventListener('keydown', handleKeyDown)
|
||||
}, [logs, selectedLogIndex, isSidebarOpen, selectedLog, handleNavigateNext, handleNavigatePrev])
|
||||
|
||||
// If in dashboard mode, show the dashboard
|
||||
if (viewMode === 'dashboard') {
|
||||
return <ExecutionsDashboard />
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex h-[100vh] min-w-0 flex-col pl-64'>
|
||||
<div className='flex h-full min-w-0 flex-col pl-64'>
|
||||
{/* Add the animation styles */}
|
||||
<style jsx global>
|
||||
{selectedRowAnimation}
|
||||
@@ -670,13 +679,6 @@ export default function Logs() {
|
||||
|
||||
<div className='flex min-w-0 flex-1 overflow-hidden'>
|
||||
<div className='flex flex-1 flex-col overflow-auto p-6'>
|
||||
{/* Header */}
|
||||
<div className='mb-5'>
|
||||
<h1 className='font-sans font-semibold text-3xl text-foreground tracking-[0.01em]'>
|
||||
Logs
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
{/* Search and Controls */}
|
||||
<div className='mb-8 flex flex-col items-stretch justify-between gap-4 sm:flex-row sm:items-start'>
|
||||
<AutocompleteSearch
|
||||
@@ -754,6 +756,23 @@ export default function Logs() {
|
||||
)}
|
||||
<span>Live</span>
|
||||
</Button>
|
||||
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div className='flex items-center rounded-[11px] border bg-card p-2'>
|
||||
<Switch
|
||||
checked={(viewMode as string) === 'dashboard'}
|
||||
onCheckedChange={(checked) => setViewMode(checked ? 'dashboard' : 'logs')}
|
||||
className='data-[state=checked]:bg-primary'
|
||||
/>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{(viewMode as string) === 'dashboard'
|
||||
? 'Switch to logs view'
|
||||
: 'Switch to executions dashboard'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -7,21 +7,27 @@ import Level from '@/app/workspace/[workspaceId]/logs/components/filters/compone
|
||||
import Timeline from '@/app/workspace/[workspaceId]/logs/components/filters/components/timeline'
|
||||
import Trigger from '@/app/workspace/[workspaceId]/logs/components/filters/components/trigger'
|
||||
import Workflow from '@/app/workspace/[workspaceId]/logs/components/filters/components/workflow'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
|
||||
export function LogsFilters() {
|
||||
const viewMode = useFilterStore((state) => state.viewMode)
|
||||
|
||||
const sections = [
|
||||
{ key: 'level', title: 'Level', component: <Level /> },
|
||||
{ key: 'workflow', title: 'Workflow', component: <Workflow /> },
|
||||
{ key: 'folder', title: 'Folder', component: <FolderFilter /> },
|
||||
{ key: 'trigger', title: 'Trigger', component: <Trigger /> },
|
||||
{ key: 'timeline', title: 'Timeline', component: <Timeline /> },
|
||||
{ key: 'level', title: 'Level', component: <Level />, showInDashboard: false },
|
||||
{ key: 'workflow', title: 'Workflow', component: <Workflow />, showInDashboard: true },
|
||||
{ key: 'folder', title: 'Folder', component: <FolderFilter />, showInDashboard: true },
|
||||
{ key: 'trigger', title: 'Trigger', component: <Trigger />, showInDashboard: true },
|
||||
{ key: 'timeline', title: 'Timeline', component: <Timeline />, showInDashboard: true },
|
||||
]
|
||||
|
||||
const filteredSections =
|
||||
viewMode === 'dashboard' ? sections.filter((section) => section.showInDashboard) : sections
|
||||
|
||||
return (
|
||||
<div className='h-full'>
|
||||
<ScrollArea className='h-full' hideScrollbar={true}>
|
||||
<div className='space-y-4 px-3 py-3'>
|
||||
{sections.map((section) => (
|
||||
{filteredSections.map((section) => (
|
||||
<FilterSection key={section.key} title={section.title} content={section.component} />
|
||||
))}
|
||||
</div>
|
||||
|
||||
@@ -21,6 +21,8 @@ const parseTimeRangeFromURL = (value: string | null): TimeRange => {
|
||||
return 'Past 30 minutes'
|
||||
case 'past-hour':
|
||||
return 'Past hour'
|
||||
case 'past-12-hours':
|
||||
return 'Past 12 hours'
|
||||
case 'past-24-hours':
|
||||
return 'Past 24 hours'
|
||||
default:
|
||||
@@ -51,6 +53,8 @@ const timeRangeToURL = (timeRange: TimeRange): string => {
|
||||
return 'past-30-minutes'
|
||||
case 'Past hour':
|
||||
return 'past-hour'
|
||||
case 'Past 12 hours':
|
||||
return 'past-12-hours'
|
||||
case 'Past 24 hours':
|
||||
return 'past-24-hours'
|
||||
default:
|
||||
@@ -61,6 +65,7 @@ const timeRangeToURL = (timeRange: TimeRange): string => {
|
||||
export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
logs: [],
|
||||
workspaceId: '',
|
||||
viewMode: 'logs',
|
||||
timeRange: 'All time',
|
||||
level: 'all',
|
||||
workflowIds: [],
|
||||
@@ -86,6 +91,8 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
|
||||
setWorkspaceId: (workspaceId) => set({ workspaceId }),
|
||||
|
||||
setViewMode: (viewMode) => set({ viewMode }),
|
||||
|
||||
setTimeRange: (timeRange) => {
|
||||
set({ timeRange })
|
||||
get().resetPagination()
|
||||
@@ -298,6 +305,9 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
case 'Past hour':
|
||||
startDate = new Date(now.getTime() - 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 12 hours':
|
||||
startDate = new Date(now.getTime() - 12 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 24 hours':
|
||||
startDate = new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
break
|
||||
|
||||
@@ -151,7 +151,12 @@ export interface LogsResponse {
|
||||
totalPages: number
|
||||
}
|
||||
|
||||
export type TimeRange = 'Past 30 minutes' | 'Past hour' | 'Past 24 hours' | 'All time'
|
||||
export type TimeRange =
|
||||
| 'Past 30 minutes'
|
||||
| 'Past hour'
|
||||
| 'Past 12 hours'
|
||||
| 'Past 24 hours'
|
||||
| 'All time'
|
||||
export type LogLevel = 'error' | 'info' | 'all'
|
||||
export type TriggerType = 'chat' | 'api' | 'webhook' | 'manual' | 'schedule' | 'all'
|
||||
|
||||
@@ -162,6 +167,9 @@ export interface FilterState {
|
||||
// Workspace context
|
||||
workspaceId: string
|
||||
|
||||
// View mode
|
||||
viewMode: 'logs' | 'dashboard'
|
||||
|
||||
// Filter states
|
||||
timeRange: TimeRange
|
||||
level: LogLevel
|
||||
@@ -185,6 +193,7 @@ export interface FilterState {
|
||||
// Actions
|
||||
setLogs: (logs: WorkflowLog[], append?: boolean) => void
|
||||
setWorkspaceId: (workspaceId: string) => void
|
||||
setViewMode: (viewMode: 'logs' | 'dashboard') => void
|
||||
setTimeRange: (timeRange: TimeRange) => void
|
||||
setLevel: (level: LogLevel) => void
|
||||
setWorkflowIds: (workflowIds: string[]) => void
|
||||
|
||||
Reference in New Issue
Block a user