improvement(logs): cleanup code (#999)

This commit is contained in:
Vikhyath Mondreti
2025-08-16 13:44:00 -07:00
committed by GitHub
parent 8748e1d5f9
commit f254d70624
15 changed files with 6470 additions and 328 deletions

View File

@@ -46,20 +46,7 @@ export async function GET(
startedAt: workflowLog.startedAt.toISOString(),
endedAt: workflowLog.endedAt?.toISOString(),
totalDurationMs: workflowLog.totalDurationMs,
blockStats: {
total: workflowLog.blockCount,
success: workflowLog.successCount,
error: workflowLog.errorCount,
skipped: workflowLog.skippedCount,
},
cost: {
total: workflowLog.totalCost ? Number.parseFloat(workflowLog.totalCost) : null,
input: workflowLog.totalInputCost ? Number.parseFloat(workflowLog.totalInputCost) : null,
output: workflowLog.totalOutputCost
? Number.parseFloat(workflowLog.totalOutputCost)
: null,
},
totalTokens: workflowLog.totalTokens,
cost: workflowLog.cost || null,
},
}

View File

@@ -0,0 +1,102 @@
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { permissions, workflow, workflowExecutionLogs } from '@/db/schema'
const logger = createLogger('LogDetailsByIdAPI')
export const revalidate = 0
export async function GET(_request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized log details access attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
const { id } = await params
const rows = await db
.select({
id: workflowExecutionLogs.id,
workflowId: workflowExecutionLogs.workflowId,
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
createdAt: workflowExecutionLogs.createdAt,
workflowName: workflow.name,
workflowDescription: workflow.description,
workflowColor: workflow.color,
workflowFolderId: workflow.folderId,
workflowUserId: workflow.userId,
workflowWorkspaceId: workflow.workspaceId,
workflowCreatedAt: workflow.createdAt,
workflowUpdatedAt: workflow.updatedAt,
})
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflow.workspaceId),
eq(permissions.userId, userId)
)
)
.where(eq(workflowExecutionLogs.id, id))
.limit(1)
const log = rows[0]
if (!log) {
return NextResponse.json({ error: 'Not found' }, { status: 404 })
}
const workflowSummary = {
id: log.workflowId,
name: log.workflowName,
description: log.workflowDescription,
color: log.workflowColor,
folderId: log.workflowFolderId,
userId: log.workflowUserId,
workspaceId: log.workflowWorkspaceId,
createdAt: log.workflowCreatedAt,
updatedAt: log.workflowUpdatedAt,
}
const response = {
id: log.id,
workflowId: log.workflowId,
executionId: log.executionId,
level: log.level,
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
trigger: log.trigger,
createdAt: log.startedAt.toISOString(),
files: log.files || undefined,
workflow: workflowSummary,
executionData: {
totalDuration: log.totalDurationMs,
...(log.executionData as any),
enhanced: true,
},
cost: log.cost as any,
}
return NextResponse.json({ data: response })
} catch (error: any) {
logger.error(`[${requestId}] log details fetch error`, error)
return NextResponse.json({ error: error.message }, { status: 500 })
}
}

View File

@@ -99,21 +99,13 @@ export async function GET(request: NextRequest) {
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
message: workflowExecutionLogs.message,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
blockCount: workflowExecutionLogs.blockCount,
successCount: workflowExecutionLogs.successCount,
errorCount: workflowExecutionLogs.errorCount,
skippedCount: workflowExecutionLogs.skippedCount,
totalCost: workflowExecutionLogs.totalCost,
totalInputCost: workflowExecutionLogs.totalInputCost,
totalOutputCost: workflowExecutionLogs.totalOutputCost,
totalTokens: workflowExecutionLogs.totalTokens,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
metadata: workflowExecutionLogs.metadata,
createdAt: workflowExecutionLogs.createdAt,
})
.from(workflowExecutionLogs)

View File

@@ -1,4 +1,4 @@
import { and, desc, eq, gte, inArray, lte, or, type SQL, sql } from 'drizzle-orm'
import { and, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
@@ -44,8 +44,7 @@ function extractBlockExecutionsFromTraceSpans(traceSpans: any[]): any[] {
export const revalidate = 0
const QueryParamsSchema = z.object({
includeWorkflow: z.coerce.boolean().optional().default(false),
includeBlocks: z.coerce.boolean().optional().default(false),
details: z.enum(['basic', 'full']).optional().default('basic'),
limit: z.coerce.number().optional().default(100),
offset: z.coerce.number().optional().default(0),
level: z.string().optional(),
@@ -81,20 +80,12 @@ export async function GET(request: NextRequest) {
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
message: workflowExecutionLogs.message,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
blockCount: workflowExecutionLogs.blockCount,
successCount: workflowExecutionLogs.successCount,
errorCount: workflowExecutionLogs.errorCount,
skippedCount: workflowExecutionLogs.skippedCount,
totalCost: workflowExecutionLogs.totalCost,
totalInputCost: workflowExecutionLogs.totalInputCost,
totalOutputCost: workflowExecutionLogs.totalOutputCost,
totalTokens: workflowExecutionLogs.totalTokens,
metadata: workflowExecutionLogs.metadata,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
createdAt: workflowExecutionLogs.createdAt,
workflowName: workflow.name,
@@ -163,13 +154,8 @@ export async function GET(request: NextRequest) {
// Filter by search query
if (params.search) {
const searchTerm = `%${params.search}%`
conditions = and(
conditions,
or(
sql`${workflowExecutionLogs.message} ILIKE ${searchTerm}`,
sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`
)
)
// With message removed, restrict search to executionId only
conditions = and(conditions, sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`)
}
// Execute the query using the optimized join
@@ -290,31 +276,20 @@ export async function GET(request: NextRequest) {
const enhancedLogs = logs.map((log) => {
const blockExecutions = blockExecutionsByExecution[log.executionId] || []
// Use stored trace spans from metadata if available, otherwise create from block executions
const storedTraceSpans = (log.metadata as any)?.traceSpans
// Use stored trace spans if available, otherwise create from block executions
const storedTraceSpans = (log.executionData as any)?.traceSpans
const traceSpans =
storedTraceSpans && Array.isArray(storedTraceSpans) && storedTraceSpans.length > 0
? storedTraceSpans
: createTraceSpans(blockExecutions)
// Use extracted cost summary if available, otherwise use stored values
// Prefer stored cost JSON; otherwise synthesize from blocks
const costSummary =
blockExecutions.length > 0
? extractCostSummary(blockExecutions)
: {
input: Number(log.totalInputCost) || 0,
output: Number(log.totalOutputCost) || 0,
total: Number(log.totalCost) || 0,
tokens: {
total: log.totalTokens || 0,
prompt: (log.metadata as any)?.tokenBreakdown?.prompt || 0,
completion: (log.metadata as any)?.tokenBreakdown?.completion || 0,
},
models: (log.metadata as any)?.models || {},
}
log.cost && Object.keys(log.cost as any).length > 0
? (log.cost as any)
: extractCostSummary(blockExecutions)
// Build workflow object from joined data
const workflow = {
const workflowSummary = {
id: log.workflowId,
name: log.workflowName,
description: log.workflowDescription,
@@ -329,67 +304,28 @@ export async function GET(request: NextRequest) {
return {
id: log.id,
workflowId: log.workflowId,
executionId: log.executionId,
executionId: params.details === 'full' ? log.executionId : undefined,
level: log.level,
message: log.message,
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
trigger: log.trigger,
createdAt: log.startedAt.toISOString(),
files: log.files || undefined,
workflow: params.includeWorkflow ? workflow : undefined,
metadata: {
totalDuration: log.totalDurationMs,
cost: costSummary,
blockStats: {
total: log.blockCount,
success: log.successCount,
error: log.errorCount,
skipped: log.skippedCount,
},
traceSpans,
blockExecutions,
enhanced: true,
},
files: params.details === 'full' ? log.files || undefined : undefined,
workflow: workflowSummary,
executionData:
params.details === 'full'
? {
totalDuration: log.totalDurationMs,
traceSpans,
blockExecutions,
enhanced: true,
}
: undefined,
cost:
params.details === 'full'
? (costSummary as any)
: { total: (costSummary as any)?.total || 0 },
}
})
// Include block execution data if requested
if (params.includeBlocks) {
// Block executions are now extracted from stored trace spans in metadata
const blockLogsByExecution: Record<string, any[]> = {}
logs.forEach((log) => {
const storedTraceSpans = (log.metadata as any)?.traceSpans
if (storedTraceSpans && Array.isArray(storedTraceSpans)) {
blockLogsByExecution[log.executionId] =
extractBlockExecutionsFromTraceSpans(storedTraceSpans)
} else {
blockLogsByExecution[log.executionId] = []
}
})
// Add block logs to metadata
const logsWithBlocks = enhancedLogs.map((log) => ({
...log,
metadata: {
...log.metadata,
blockExecutions: blockLogsByExecution[log.executionId] || [],
},
}))
return NextResponse.json(
{
data: logsWithBlocks,
total: Number(count),
page: Math.floor(params.offset / params.limit) + 1,
pageSize: params.limit,
totalPages: Math.ceil(Number(count) / params.limit),
},
{ status: 200 }
)
}
// Return basic logs
return NextResponse.json(
{
data: enhancedLogs,

View File

@@ -214,24 +214,16 @@ export function Sidebar({
let blockInput: Record<string, any> | undefined
if (log.metadata?.blockInput) {
blockInput = log.metadata.blockInput
} else if (log.metadata?.traceSpans) {
const blockIdMatch = log.message.match(/Block .+?(\d+)/i)
const blockId = blockIdMatch ? blockIdMatch[1] : null
if (blockId) {
const matchingSpan = log.metadata.traceSpans.find(
(span) => span.blockId === blockId || span.name.includes(`Block ${blockId}`)
)
if (matchingSpan?.input) {
blockInput = matchingSpan.input
}
if (log.executionData?.blockInput) {
blockInput = log.executionData.blockInput
} else if (log.executionData?.traceSpans) {
const firstSpanWithInput = log.executionData.traceSpans.find((s) => s.input)
if (firstSpanWithInput?.input) {
blockInput = firstSpanWithInput.input as any
}
}
return formatJsonContent(log.message, blockInput)
return null
}, [log])
useEffect(() => {
@@ -243,22 +235,16 @@ export function Sidebar({
// Determine if this is a workflow execution log
const isWorkflowExecutionLog = useMemo(() => {
if (!log) return false
// Check if message contains workflow execution phrases (success or failure)
return (
log.message.toLowerCase().includes('workflow executed') ||
log.message.toLowerCase().includes('execution completed') ||
log.message.toLowerCase().includes('workflow execution failed') ||
log.message.toLowerCase().includes('execution failed') ||
(log.trigger === 'manual' && log.duration) ||
// Also check if we have enhanced logging metadata with trace spans
(log.metadata?.enhanced && log.metadata?.traceSpans)
(log.trigger === 'manual' && !!log.duration) ||
(log.executionData?.enhanced && log.executionData?.traceSpans)
)
}, [log])
// Helper to determine if we have cost information to display
// All workflow executions now have cost info (base charge + any model costs)
const hasCostInfo = useMemo(() => {
return isWorkflowExecutionLog && log?.metadata?.cost
return isWorkflowExecutionLog && log?.cost
}, [log, isWorkflowExecutionLog])
const isWorkflowWithCost = useMemo(() => {
@@ -548,12 +534,12 @@ export function Sidebar({
</div>
{/* Trace Spans (if available and this is a workflow execution log) */}
{isWorkflowExecutionLog && log.metadata?.traceSpans && (
{isWorkflowExecutionLog && log.executionData?.traceSpans && (
<div className='w-full'>
<div className='w-full overflow-x-hidden'>
<TraceSpansDisplay
traceSpans={log.metadata.traceSpans}
totalDuration={log.metadata.totalDuration}
traceSpans={log.executionData.traceSpans}
totalDuration={log.executionData.totalDuration}
onExpansionChange={handleTraceSpanToggle}
/>
</div>
@@ -561,11 +547,11 @@ export function Sidebar({
)}
{/* Tool Calls (if available) */}
{log.metadata?.toolCalls && log.metadata.toolCalls.length > 0 && (
{log.executionData?.toolCalls && log.executionData.toolCalls.length > 0 && (
<div className='w-full'>
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Tool Calls</h3>
<div className='w-full overflow-x-hidden rounded-md bg-secondary/30 p-3'>
<ToolCallsDisplay metadata={log.metadata} />
<ToolCallsDisplay metadata={log.executionData} />
</div>
</div>
)}
@@ -584,86 +570,80 @@ export function Sidebar({
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-sm'>Model Input:</span>
<span className='text-sm'>
{formatCost(log.metadata?.cost?.input || 0)}
</span>
<span className='text-sm'>{formatCost(log.cost?.input || 0)}</span>
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-sm'>Model Output:</span>
<span className='text-sm'>
{formatCost(log.metadata?.cost?.output || 0)}
</span>
<span className='text-sm'>{formatCost(log.cost?.output || 0)}</span>
</div>
<div className='mt-1 flex items-center justify-between border-t pt-2'>
<span className='text-muted-foreground text-sm'>Total:</span>
<span className='text-foreground text-sm'>
{formatCost(log.metadata?.cost?.total || 0)}
{formatCost(log.cost?.total || 0)}
</span>
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-xs'>Tokens:</span>
<span className='text-muted-foreground text-xs'>
{log.metadata?.cost?.tokens?.prompt || 0} in /{' '}
{log.metadata?.cost?.tokens?.completion || 0} out
{log.cost?.tokens?.prompt || 0} in / {log.cost?.tokens?.completion || 0}{' '}
out
</span>
</div>
</div>
{/* Models Breakdown */}
{log.metadata?.cost?.models &&
Object.keys(log.metadata?.cost?.models).length > 0 && (
<div className='border-t'>
<button
onClick={() => setIsModelsExpanded(!isModelsExpanded)}
className='flex w-full items-center justify-between p-3 text-left transition-colors hover:bg-muted/50'
>
<span className='font-medium text-muted-foreground text-xs'>
Model Breakdown (
{Object.keys(log.metadata?.cost?.models || {}).length})
</span>
{isModelsExpanded ? (
<ChevronUp className='h-3 w-3 text-muted-foreground' />
) : (
<ChevronDown className='h-3 w-3 text-muted-foreground' />
)}
</button>
{log.cost?.models && Object.keys(log.cost?.models).length > 0 && (
<div className='border-t'>
<button
onClick={() => setIsModelsExpanded(!isModelsExpanded)}
className='flex w-full items-center justify-between p-3 text-left transition-colors hover:bg-muted/50'
>
<span className='font-medium text-muted-foreground text-xs'>
Model Breakdown ({Object.keys(log.cost?.models || {}).length})
</span>
{isModelsExpanded ? (
<ChevronUp className='h-3 w-3 text-muted-foreground' />
) : (
<ChevronDown className='h-3 w-3 text-muted-foreground' />
)}
</button>
{isModelsExpanded && (
<div className='space-y-3 border-t bg-muted/30 p-3'>
{Object.entries(log.metadata?.cost?.models || {}).map(
([model, cost]: [string, any]) => (
<div key={model} className='space-y-1'>
<div className='font-medium font-mono text-xs'>{model}</div>
<div className='space-y-1 text-xs'>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Input:</span>
<span>{formatCost(cost.input || 0)}</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Output:</span>
<span>{formatCost(cost.output || 0)}</span>
</div>
<div className='flex justify-between border-t pt-1'>
<span className='text-muted-foreground'>Total:</span>
<span className='font-medium'>
{formatCost(cost.total || 0)}
</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Tokens:</span>
<span>
{cost.tokens?.prompt || 0} in /{' '}
{cost.tokens?.completion || 0} out
</span>
</div>
{isModelsExpanded && (
<div className='space-y-3 border-t bg-muted/30 p-3'>
{Object.entries(log.cost?.models || {}).map(
([model, cost]: [string, any]) => (
<div key={model} className='space-y-1'>
<div className='font-medium font-mono text-xs'>{model}</div>
<div className='space-y-1 text-xs'>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Input:</span>
<span>{formatCost(cost.input || 0)}</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Output:</span>
<span>{formatCost(cost.output || 0)}</span>
</div>
<div className='flex justify-between border-t pt-1'>
<span className='text-muted-foreground'>Total:</span>
<span className='font-medium'>
{formatCost(cost.total || 0)}
</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Tokens:</span>
<span>
{cost.tokens?.prompt || 0} in /{' '}
{cost.tokens?.completion || 0} out
</span>
</div>
</div>
)
)}
</div>
)}
</div>
)}
</div>
)
)}
</div>
)}
</div>
)}
{isWorkflowWithCost && (
<div className='border-t bg-muted p-3 text-muted-foreground text-xs'>
@@ -688,7 +668,7 @@ export function Sidebar({
executionId={log.executionId}
workflowName={log.workflow?.name}
trigger={log.trigger || undefined}
traceSpans={log.metadata?.traceSpans}
traceSpans={log.executionData?.traceSpans}
isOpen={isFrozenCanvasOpen}
onClose={() => setIsFrozenCanvasOpen(false)}
/>

View File

@@ -85,6 +85,8 @@ export default function Logs() {
const [selectedLog, setSelectedLog] = useState<WorkflowLog | null>(null)
const [selectedLogIndex, setSelectedLogIndex] = useState<number>(-1)
const [isSidebarOpen, setIsSidebarOpen] = useState(false)
const detailsCacheRef = useRef<Map<string, any>>(new Map())
const detailsAbortRef = useRef<AbortController | null>(null)
const selectedRowRef = useRef<HTMLTableRowElement | null>(null)
const loaderRef = useRef<HTMLDivElement>(null)
const scrollContainerRef = useRef<HTMLDivElement>(null)
@@ -116,13 +118,118 @@ export default function Logs() {
const index = logs.findIndex((l) => l.id === log.id)
setSelectedLogIndex(index)
setIsSidebarOpen(true)
// Fetch details for current, previous, and next concurrently with cache
const currentId = log.id
const prevId = index > 0 ? logs[index - 1]?.id : undefined
const nextId = index < logs.length - 1 ? logs[index + 1]?.id : undefined
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
const idsToFetch: Array<{ id: string; merge: boolean }> = []
if (currentId && !detailsCacheRef.current.has(currentId))
idsToFetch.push({ id: currentId, merge: true })
if (prevId && !detailsCacheRef.current.has(prevId))
idsToFetch.push({ id: prevId, merge: false })
if (nextId && !detailsCacheRef.current.has(nextId))
idsToFetch.push({ id: nextId, merge: false })
if (idsToFetch.length === 0) {
const cached = detailsCacheRef.current.get(currentId)
if (cached) {
setSelectedLog((prev) =>
prev && prev.id === currentId ? ({ ...(prev as any), ...(cached as any) } as any) : prev
)
}
return
}
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === currentId) {
setSelectedLog((prev) =>
prev && prev.id === id ? ({ ...(prev as any), ...(detailed as any) } as any) : prev
)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
const handleNavigateNext = useCallback(() => {
if (selectedLogIndex < logs.length - 1) {
const nextIndex = selectedLogIndex + 1
setSelectedLogIndex(nextIndex)
setSelectedLog(logs[nextIndex])
const nextLog = logs[nextIndex]
setSelectedLog(nextLog)
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
const cached = detailsCacheRef.current.get(nextLog.id)
if (cached) {
setSelectedLog((prev) =>
prev && prev.id === nextLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev
)
} else {
const prevId = nextIndex > 0 ? logs[nextIndex - 1]?.id : undefined
const afterId = nextIndex < logs.length - 1 ? logs[nextIndex + 1]?.id : undefined
const idsToFetch: Array<{ id: string; merge: boolean }> = []
if (nextLog.id && !detailsCacheRef.current.has(nextLog.id))
idsToFetch.push({ id: nextLog.id, merge: true })
if (prevId && !detailsCacheRef.current.has(prevId))
idsToFetch.push({ id: prevId, merge: false })
if (afterId && !detailsCacheRef.current.has(afterId))
idsToFetch.push({ id: afterId, merge: false })
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === nextLog.id) {
setSelectedLog((prev) =>
prev && prev.id === id
? ({ ...(prev as any), ...(detailed as any) } as any)
: prev
)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
}
}, [selectedLogIndex, logs])
@@ -130,7 +237,57 @@ export default function Logs() {
if (selectedLogIndex > 0) {
const prevIndex = selectedLogIndex - 1
setSelectedLogIndex(prevIndex)
setSelectedLog(logs[prevIndex])
const prevLog = logs[prevIndex]
setSelectedLog(prevLog)
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
const cached = detailsCacheRef.current.get(prevLog.id)
if (cached) {
setSelectedLog((prev) =>
prev && prev.id === prevLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev
)
} else {
const beforeId = prevIndex > 0 ? logs[prevIndex - 1]?.id : undefined
const afterId = prevIndex < logs.length - 1 ? logs[prevIndex + 1]?.id : undefined
const idsToFetch: Array<{ id: string; merge: boolean }> = []
if (prevLog.id && !detailsCacheRef.current.has(prevLog.id))
idsToFetch.push({ id: prevLog.id, merge: true })
if (beforeId && !detailsCacheRef.current.has(beforeId))
idsToFetch.push({ id: beforeId, merge: false })
if (afterId && !detailsCacheRef.current.has(afterId))
idsToFetch.push({ id: afterId, merge: false })
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === prevLog.id) {
setSelectedLog((prev) =>
prev && prev.id === id
? ({ ...(prev as any), ...(detailed as any) } as any)
: prev
)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
}
}, [selectedLogIndex, logs])
@@ -160,7 +317,7 @@ export default function Logs() {
// Get fresh query params by calling buildQueryParams from store
const { buildQueryParams: getCurrentQueryParams } = useFilterStore.getState()
const queryParams = getCurrentQueryParams(pageNum, LOGS_PER_PAGE)
const response = await fetch(`/api/logs?${queryParams}`)
const response = await fetch(`/api/logs?${queryParams}&details=basic`)
if (!response.ok) {
throw new Error(`Error fetching logs: ${response.statusText}`)
@@ -262,7 +419,7 @@ export default function Logs() {
// Build query params inline to avoid dependency issues
const params = new URLSearchParams()
params.set('includeWorkflow', 'true')
params.set('details', 'basic')
params.set('limit', LOGS_PER_PAGE.toString())
params.set('offset', '0') // Always start from page 1
params.set('workspaceId', workspaceId)
@@ -482,7 +639,7 @@ export default function Logs() {
{/* Header */}
<div>
<div className='border-border border-b'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_80px_1fr] gap-2 px-2 pb-3 md:grid-cols-[140px_90px_140px_90px_1fr] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_100px_1fr] lg:gap-4 xl:grid-cols-[160px_100px_160px_100px_100px_1fr_100px]'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px] gap-2 px-2 pb-3 md:grid-cols-[140px_90px_140px_120px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px]'>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Time
</div>
@@ -493,14 +650,12 @@ export default function Logs() {
Workflow
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
ID
Cost
</div>
<div className='hidden font-[480] font-sans text-[13px] text-muted-foreground leading-normal xl:block'>
Trigger
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Message
</div>
<div className='hidden font-[480] font-sans text-[13px] text-muted-foreground leading-normal xl:block'>
Duration
</div>
@@ -547,7 +702,7 @@ export default function Logs() {
}`}
onClick={() => handleLogClick(log)}
>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_80px_1fr] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_90px_1fr] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_100px_1fr] lg:gap-4 xl:grid-cols-[160px_100px_160px_100px_100px_1fr_100px]'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_120px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px]'>
{/* Time */}
<div>
<div className='text-[13px]'>
@@ -584,10 +739,12 @@ export default function Logs() {
</div>
</div>
{/* ID */}
{/* Cost */}
<div>
<div className='font-medium text-muted-foreground text-xs'>
#{log.id.slice(-4)}
{typeof (log as any)?.cost?.total === 'number'
? `$${((log as any).cost.total as number).toFixed(4)}`
: '—'}
</div>
</div>
@@ -614,11 +771,6 @@ export default function Logs() {
)}
</div>
{/* Message */}
<div className='min-w-0'>
<div className='truncate font-[420] text-[13px]'>{log.message}</div>
</div>
{/* Duration */}
<div className='hidden xl:block'>
<div className='text-muted-foreground text-xs'>

View File

@@ -0,0 +1,131 @@
-- One-shot data migration to create/populate execution_data & cost, then drop legacy columns
-- Safe on reruns and across differing prior schemas
-- 1) Ensure execution_data exists (prefer rename if only metadata exists)
DO $$
BEGIN
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = 'workflow_execution_logs' AND column_name = 'metadata'
) AND NOT EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = 'workflow_execution_logs' AND column_name = 'execution_data'
) THEN
EXECUTE 'ALTER TABLE workflow_execution_logs RENAME COLUMN metadata TO execution_data';
END IF;
END $$;--> statement-breakpoint
ALTER TABLE "workflow_execution_logs"
ADD COLUMN IF NOT EXISTS "execution_data" jsonb NOT NULL DEFAULT '{}'::jsonb,
ADD COLUMN IF NOT EXISTS "cost" jsonb;--> statement-breakpoint
-- 2) Backfill top-level cost from legacy numeric columns, tokenBreakdown/models, and traceSpans aggregates
WITH RECURSIVE spans AS (
SELECT l.id, s.span
FROM workflow_execution_logs l
LEFT JOIN LATERAL jsonb_array_elements(
COALESCE(
CASE
WHEN jsonb_typeof(l.execution_data->'traceSpans') = 'array' THEN l.execution_data->'traceSpans'
ELSE '[]'::jsonb
END
)
) s(span) ON true
UNION ALL
SELECT spans.id, c.span
FROM spans
JOIN LATERAL jsonb_array_elements(COALESCE(spans.span->'children','[]'::jsonb)) c(span) ON true
),
agg AS (
SELECT id,
SUM(COALESCE((span->'cost'->>'input')::numeric,0)) AS agg_input,
SUM(COALESCE((span->'cost'->>'output')::numeric,0)) AS agg_output,
SUM(COALESCE((span->'cost'->>'total')::numeric,0)) AS agg_total,
SUM(COALESCE((span->'cost'->'tokens'->>'prompt')::numeric, COALESCE((span->'tokens'->>'prompt')::numeric,0))) AS agg_tokens_prompt,
SUM(COALESCE((span->'cost'->'tokens'->>'completion')::numeric, COALESCE((span->'tokens'->>'completion')::numeric,0))) AS agg_tokens_completion,
SUM(COALESCE((span->'cost'->'tokens'->>'total')::numeric, COALESCE((span->'tokens'->>'total')::numeric,0))) AS agg_tokens_total
FROM spans
GROUP BY id
),
model_rows AS (
SELECT id,
(span->'cost'->>'model') AS model,
COALESCE((span->'cost'->>'input')::numeric,0) AS input,
COALESCE((span->'cost'->>'output')::numeric,0) AS output,
COALESCE((span->'cost'->>'total')::numeric,0) AS total,
COALESCE((span->'cost'->'tokens'->>'prompt')::numeric,0) AS tokens_prompt,
COALESCE((span->'cost'->'tokens'->>'completion')::numeric,0) AS tokens_completion,
COALESCE((span->'cost'->'tokens'->>'total')::numeric,0) AS tokens_total
FROM spans
WHERE span ? 'cost' AND (span->'cost'->>'model') IS NOT NULL
),
model_sums AS (
SELECT id,
model,
SUM(input) AS input,
SUM(output) AS output,
SUM(total) AS total,
SUM(tokens_prompt) AS tokens_prompt,
SUM(tokens_completion) AS tokens_completion,
SUM(tokens_total) AS tokens_total
FROM model_rows
GROUP BY id, model
),
models AS (
SELECT id,
jsonb_object_agg(model, jsonb_build_object(
'input', input,
'output', output,
'total', total,
'tokens', jsonb_build_object(
'prompt', tokens_prompt,
'completion', tokens_completion,
'total', tokens_total
)
)) AS models
FROM model_sums
GROUP BY id
),
tb AS (
SELECT l.id,
NULLIF((l.execution_data->'tokenBreakdown'->>'prompt')::numeric, 0) AS prompt,
NULLIF((l.execution_data->'tokenBreakdown'->>'completion')::numeric, 0) AS completion
FROM workflow_execution_logs l
)
UPDATE workflow_execution_logs AS l
SET cost = jsonb_strip_nulls(
jsonb_build_object(
'total', COALESCE(l.total_cost, NULLIF(agg.agg_total,0)),
'input', COALESCE(l.total_input_cost, NULLIF(agg.agg_input,0)),
'output', COALESCE(l.total_output_cost, NULLIF(agg.agg_output,0)),
'tokens', CASE
WHEN l.total_tokens IS NOT NULL OR tb.prompt IS NOT NULL OR tb.completion IS NOT NULL OR NULLIF(agg.agg_tokens_total,0) IS NOT NULL THEN
jsonb_strip_nulls(
jsonb_build_object(
'total', COALESCE(l.total_tokens, NULLIF(agg.agg_tokens_total,0)),
'prompt', COALESCE(tb.prompt, NULLIF(agg.agg_tokens_prompt,0)),
'completion', COALESCE(tb.completion, NULLIF(agg.agg_tokens_completion,0))
)
)
ELSE NULL
END,
'models', models.models
)
)
FROM agg
LEFT JOIN models ON models.id = agg.id
LEFT JOIN tb ON tb.id = agg.id
WHERE l.id = agg.id;--> statement-breakpoint
-- 3) Drop legacy columns now that backfill is complete
ALTER TABLE "workflow_execution_logs"
DROP COLUMN IF EXISTS "message",
DROP COLUMN IF EXISTS "block_count",
DROP COLUMN IF EXISTS "success_count",
DROP COLUMN IF EXISTS "error_count",
DROP COLUMN IF EXISTS "skipped_count",
DROP COLUMN IF EXISTS "total_cost",
DROP COLUMN IF EXISTS "total_input_cost",
DROP COLUMN IF EXISTS "total_output_cost",
DROP COLUMN IF EXISTS "total_tokens",
DROP COLUMN IF EXISTS "metadata";

File diff suppressed because it is too large Load Diff

View File

@@ -526,6 +526,13 @@
"when": 1755319635487,
"tag": "0075_lush_moonstone",
"breakpoints": true
},
{
"idx": 76,
"version": "7",
"when": 1755375658161,
"tag": "0076_damp_vector",
"breakpoints": true
}
]
}

View File

@@ -282,24 +282,14 @@ export const workflowExecutionLogs = pgTable(
.references(() => workflowExecutionSnapshots.id),
level: text('level').notNull(), // 'info', 'error'
message: text('message').notNull(),
trigger: text('trigger').notNull(), // 'api', 'webhook', 'schedule', 'manual', 'chat'
startedAt: timestamp('started_at').notNull(),
endedAt: timestamp('ended_at'),
totalDurationMs: integer('total_duration_ms'),
blockCount: integer('block_count').notNull().default(0),
successCount: integer('success_count').notNull().default(0),
errorCount: integer('error_count').notNull().default(0),
skippedCount: integer('skipped_count').notNull().default(0),
totalCost: decimal('total_cost', { precision: 10, scale: 6 }),
totalInputCost: decimal('total_input_cost', { precision: 10, scale: 6 }),
totalOutputCost: decimal('total_output_cost', { precision: 10, scale: 6 }),
totalTokens: integer('total_tokens'),
metadata: jsonb('metadata').notNull().default('{}'),
executionData: jsonb('execution_data').notNull().default('{}'),
cost: jsonb('cost'),
files: jsonb('files'), // File metadata for execution files
createdAt: timestamp('created_at').notNull().defaultNow(),
},

View File

@@ -39,15 +39,10 @@ interface ExecutionEntry {
id: string
executionId: string
level: string
message: string
trigger: string
startedAt: string
endedAt: string | null
durationMs: number | null
blockCount: number
successCount: number
errorCount: number
skippedCount: number
totalCost: number | null
totalTokens: number | null
blockExecutions: BlockExecution[]
@@ -124,18 +119,12 @@ async function getWorkflowConsole(
id: workflowExecutionLogs.id,
executionId: workflowExecutionLogs.executionId,
level: workflowExecutionLogs.level,
message: workflowExecutionLogs.message,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
blockCount: workflowExecutionLogs.blockCount,
successCount: workflowExecutionLogs.successCount,
errorCount: workflowExecutionLogs.errorCount,
skippedCount: workflowExecutionLogs.skippedCount,
totalCost: workflowExecutionLogs.totalCost,
totalTokens: workflowExecutionLogs.totalTokens,
metadata: workflowExecutionLogs.metadata,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
})
.from(workflowExecutionLogs)
.where(eq(workflowExecutionLogs.workflowId, workflowId))
@@ -144,9 +133,8 @@ async function getWorkflowConsole(
// Format the response with detailed block execution data
const formattedEntries: ExecutionEntry[] = executionLogs.map((log) => {
// Extract trace spans from metadata
const metadata = log.metadata as any
const traceSpans = metadata?.traceSpans || []
// Extract trace spans from execution data
const traceSpans = (log.executionData as any)?.traceSpans || []
const blockExecutions = extractBlockExecutionsFromTraceSpans(traceSpans)
// Try to find the final output from the last executed block
@@ -172,17 +160,12 @@ async function getWorkflowConsole(
id: log.id,
executionId: log.executionId,
level: log.level,
message: log.message,
trigger: log.trigger,
startedAt: log.startedAt.toISOString(),
endedAt: log.endedAt?.toISOString() || null,
durationMs: log.totalDurationMs,
blockCount: log.blockCount,
successCount: log.successCount,
errorCount: log.errorCount,
skippedCount: log.skippedCount || 0,
totalCost: log.totalCost ? Number.parseFloat(log.totalCost.toString()) : null,
totalTokens: log.totalTokens,
totalCost: (log.cost as any)?.total ?? null,
totalTokens: (log.cost as any)?.tokens?.total ?? null,
blockExecutions: includeDetails ? blockExecutions : [],
output: finalOutput,
}

View File

@@ -59,20 +59,11 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId,
stateSnapshotId: snapshotResult.snapshot.id,
level: 'info',
message: `${this.getTriggerPrefix(trigger.type)} execution started`,
trigger: trigger.type,
startedAt: startTime,
endedAt: null,
totalDurationMs: null,
blockCount: 0,
successCount: 0,
errorCount: 0,
skippedCount: 0,
totalCost: null,
totalInputCost: null,
totalOutputCost: null,
totalTokens: null,
metadata: {
executionData: {
environment,
trigger,
},
@@ -88,20 +79,11 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId: workflowLog.executionId,
stateSnapshotId: workflowLog.stateSnapshotId,
level: workflowLog.level as 'info' | 'error',
message: workflowLog.message,
trigger: workflowLog.trigger as ExecutionTrigger['type'],
startedAt: workflowLog.startedAt.toISOString(),
endedAt: workflowLog.endedAt?.toISOString() || workflowLog.startedAt.toISOString(),
totalDurationMs: workflowLog.totalDurationMs || 0,
blockCount: workflowLog.blockCount,
successCount: workflowLog.successCount,
errorCount: workflowLog.errorCount,
skippedCount: workflowLog.skippedCount,
totalCost: Number(workflowLog.totalCost) || 0,
totalInputCost: Number(workflowLog.totalInputCost) || 0,
totalOutputCost: Number(workflowLog.totalOutputCost) || 0,
totalTokens: workflowLog.totalTokens || 0,
metadata: workflowLog.metadata as WorkflowExecutionLog['metadata'],
executionData: workflowLog.executionData as WorkflowExecutionLog['executionData'],
createdAt: workflowLog.createdAt.toISOString(),
},
snapshot: snapshotResult.snapshot,
@@ -151,7 +133,6 @@ export class ExecutionLogger implements IExecutionLoggerService {
})
const level = hasErrors ? 'error' : 'info'
const message = hasErrors ? 'Workflow execution failed' : 'Workflow execution completed'
// Extract files from trace spans and final output
const executionFiles = this.extractFilesFromExecution(traceSpans, finalOutput)
@@ -160,19 +141,10 @@ export class ExecutionLogger implements IExecutionLoggerService {
.update(workflowExecutionLogs)
.set({
level,
message,
endedAt: new Date(endedAt),
totalDurationMs,
blockCount: 0,
successCount: 0,
errorCount: 0,
skippedCount: 0,
totalCost: costSummary.totalCost.toString(),
totalInputCost: costSummary.totalInputCost.toString(),
totalOutputCost: costSummary.totalOutputCost.toString(),
totalTokens: costSummary.totalTokens,
files: executionFiles.length > 0 ? executionFiles : null,
metadata: {
executionData: {
traceSpans,
finalOutput,
tokenBreakdown: {
@@ -182,6 +154,17 @@ export class ExecutionLogger implements IExecutionLoggerService {
},
models: costSummary.models,
},
cost: {
total: costSummary.totalCost,
input: costSummary.totalInputCost,
output: costSummary.totalOutputCost,
tokens: {
prompt: costSummary.totalPromptTokens,
completion: costSummary.totalCompletionTokens,
total: costSummary.totalTokens,
},
models: costSummary.models,
},
})
.where(eq(workflowExecutionLogs.executionId, executionId))
.returning()
@@ -205,20 +188,12 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId: updatedLog.executionId,
stateSnapshotId: updatedLog.stateSnapshotId,
level: updatedLog.level as 'info' | 'error',
message: updatedLog.message,
trigger: updatedLog.trigger as ExecutionTrigger['type'],
startedAt: updatedLog.startedAt.toISOString(),
endedAt: updatedLog.endedAt?.toISOString() || endedAt,
totalDurationMs: updatedLog.totalDurationMs || totalDurationMs,
blockCount: updatedLog.blockCount,
successCount: updatedLog.successCount,
errorCount: updatedLog.errorCount,
skippedCount: updatedLog.skippedCount,
totalCost: Number(updatedLog.totalCost) || 0,
totalInputCost: Number(updatedLog.totalInputCost) || 0,
totalOutputCost: Number(updatedLog.totalOutputCost) || 0,
totalTokens: updatedLog.totalTokens || 0,
metadata: updatedLog.metadata as WorkflowExecutionLog['metadata'],
executionData: updatedLog.executionData as WorkflowExecutionLog['executionData'],
cost: updatedLog.cost as any,
createdAt: updatedLog.createdAt.toISOString(),
}
}
@@ -238,20 +213,12 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId: workflowLog.executionId,
stateSnapshotId: workflowLog.stateSnapshotId,
level: workflowLog.level as 'info' | 'error',
message: workflowLog.message,
trigger: workflowLog.trigger as ExecutionTrigger['type'],
startedAt: workflowLog.startedAt.toISOString(),
endedAt: workflowLog.endedAt?.toISOString() || workflowLog.startedAt.toISOString(),
totalDurationMs: workflowLog.totalDurationMs || 0,
blockCount: workflowLog.blockCount,
successCount: workflowLog.successCount,
errorCount: workflowLog.errorCount,
skippedCount: workflowLog.skippedCount,
totalCost: Number(workflowLog.totalCost) || 0,
totalInputCost: Number(workflowLog.totalInputCost) || 0,
totalOutputCost: Number(workflowLog.totalOutputCost) || 0,
totalTokens: workflowLog.totalTokens || 0,
metadata: workflowLog.metadata as WorkflowExecutionLog['metadata'],
executionData: workflowLog.executionData as WorkflowExecutionLog['executionData'],
cost: workflowLog.cost as any,
createdAt: workflowLog.createdAt.toISOString(),
}
}

View File

@@ -82,19 +82,10 @@ export interface WorkflowExecutionLog {
executionId: string
stateSnapshotId: string
level: 'info' | 'error'
message: string
trigger: ExecutionTrigger['type']
startedAt: string
endedAt: string
totalDurationMs: number
blockCount: number
successCount: number
errorCount: number
skippedCount: number
totalCost: number
totalInputCost: number
totalOutputCost: number
totalTokens: number
files?: Array<{
id: string
name: string
@@ -107,9 +98,10 @@ export interface WorkflowExecutionLog {
storageProvider?: 's3' | 'blob' | 'local'
bucketName?: string
}>
metadata: {
environment: ExecutionEnvironment
trigger: ExecutionTrigger
// Execution details
executionData: {
environment?: ExecutionEnvironment
trigger?: ExecutionTrigger
traceSpans?: TraceSpan[]
errorDetails?: {
blockId: string
@@ -118,6 +110,22 @@ export interface WorkflowExecutionLog {
stackTrace?: string
}
}
// Top-level cost information
cost?: {
input?: number
output?: number
total?: number
tokens?: { prompt?: number; completion?: number; total?: number }
models?: Record<
string,
{
input?: number
output?: number
total?: number
tokens?: { prompt?: number; completion?: number; total?: number }
}
>
}
duration?: string
createdAt: string
}

View File

@@ -261,8 +261,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
buildQueryParams: (page: number, limit: number) => {
const { workspaceId, timeRange, level, workflowIds, folderIds, searchQuery, triggers } = get()
const params = new URLSearchParams()
params.set('includeWorkflow', 'true')
params.set('limit', limit.toString())
params.set('offset', ((page - 1) * limit).toString())

View File

@@ -71,9 +71,8 @@ export interface TraceSpan {
export interface WorkflowLog {
id: string
workflowId: string
executionId: string | null
executionId?: string | null
level: string
message: string
duration: string | null
trigger: string | null
createdAt: string
@@ -90,10 +89,10 @@ export interface WorkflowLog {
storageProvider?: 's3' | 'blob' | 'local'
bucketName?: string
}>
metadata?: ToolCallMetadata & {
cost?: CostMetadata
executionData?: ToolCallMetadata & {
traceSpans?: TraceSpan[]
totalDuration?: number
cost?: CostMetadata
blockInput?: Record<string, any>
enhanced?: boolean