mirror of
https://github.com/simstudioai/sim.git
synced 2026-04-06 03:00:16 -04:00
feat(dashboard): added stats endpoint to compute stats on server side and avoid limit (#2823)
* feat(dashboard): added stats endpoint to compute stats on server side and avoid limit * updated query
This commit is contained in:
297
apps/sim/app/api/logs/stats/route.ts
Normal file
297
apps/sim/app/api/logs/stats/route.ts
Normal file
@@ -0,0 +1,297 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { buildFilterConditions, LogFilterParamsSchema } from '@/lib/logs/filters'
|
||||
|
||||
const logger = createLogger('LogsStatsAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
const StatsQueryParamsSchema = LogFilterParamsSchema.extend({
|
||||
segmentCount: z.coerce.number().optional().default(72),
|
||||
})
|
||||
|
||||
export interface SegmentStats {
|
||||
timestamp: string
|
||||
totalExecutions: number
|
||||
successfulExecutions: number
|
||||
avgDurationMs: number
|
||||
}
|
||||
|
||||
export interface WorkflowStats {
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
segments: SegmentStats[]
|
||||
overallSuccessRate: number
|
||||
totalExecutions: number
|
||||
totalSuccessful: number
|
||||
}
|
||||
|
||||
export interface DashboardStatsResponse {
|
||||
workflows: WorkflowStats[]
|
||||
aggregateSegments: SegmentStats[]
|
||||
totalRuns: number
|
||||
totalErrors: number
|
||||
avgLatency: number
|
||||
timeBounds: {
|
||||
start: string
|
||||
end: string
|
||||
}
|
||||
segmentMs: number
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized logs stats access attempt`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const params = StatsQueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
const workspaceFilter = eq(workflowExecutionLogs.workspaceId, params.workspaceId)
|
||||
|
||||
const commonFilters = buildFilterConditions(params, { useSimpleLevelFilter: true })
|
||||
const whereCondition = commonFilters ? and(workspaceFilter, commonFilters) : workspaceFilter
|
||||
|
||||
const boundsQuery = await db
|
||||
.select({
|
||||
minTime: sql<string>`MIN(${workflowExecutionLogs.startedAt})`,
|
||||
maxTime: sql<string>`MAX(${workflowExecutionLogs.startedAt})`,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workflowExecutionLogs.workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.where(whereCondition)
|
||||
|
||||
const bounds = boundsQuery[0]
|
||||
const now = new Date()
|
||||
|
||||
let startTime: Date
|
||||
let endTime: Date
|
||||
|
||||
if (!bounds?.minTime || !bounds?.maxTime) {
|
||||
endTime = now
|
||||
startTime = new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
} else {
|
||||
startTime = new Date(bounds.minTime)
|
||||
endTime = new Date(Math.max(new Date(bounds.maxTime).getTime(), now.getTime()))
|
||||
}
|
||||
|
||||
const totalMs = Math.max(1, endTime.getTime() - startTime.getTime())
|
||||
const segmentMs = Math.max(60000, Math.floor(totalMs / params.segmentCount))
|
||||
|
||||
const statsQuery = await db
|
||||
.select({
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
workflowName: workflow.name,
|
||||
segmentIndex:
|
||||
sql<number>`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTime}::timestamp)) * 1000 / ${segmentMs})`.as(
|
||||
'segment_index'
|
||||
),
|
||||
totalExecutions: sql<number>`COUNT(*)`.as('total_executions'),
|
||||
successfulExecutions:
|
||||
sql<number>`COUNT(*) FILTER (WHERE ${workflowExecutionLogs.level} != 'error')`.as(
|
||||
'successful_executions'
|
||||
),
|
||||
avgDurationMs:
|
||||
sql<number>`COALESCE(AVG(${workflowExecutionLogs.totalDurationMs}) FILTER (WHERE ${workflowExecutionLogs.totalDurationMs} > 0), 0)`.as(
|
||||
'avg_duration_ms'
|
||||
),
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workflowExecutionLogs.workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.where(whereCondition)
|
||||
.groupBy(
|
||||
workflowExecutionLogs.workflowId,
|
||||
workflow.name,
|
||||
sql`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTime}::timestamp)) * 1000 / ${segmentMs})`
|
||||
)
|
||||
.orderBy(workflowExecutionLogs.workflowId, sql`segment_index`)
|
||||
|
||||
const workflowMap = new Map<
|
||||
string,
|
||||
{
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
segments: Map<number, SegmentStats>
|
||||
totalExecutions: number
|
||||
totalSuccessful: number
|
||||
}
|
||||
>()
|
||||
|
||||
for (const row of statsQuery) {
|
||||
const segmentIndex = Math.min(
|
||||
params.segmentCount - 1,
|
||||
Math.max(0, Math.floor(Number(row.segmentIndex)))
|
||||
)
|
||||
|
||||
if (!workflowMap.has(row.workflowId)) {
|
||||
workflowMap.set(row.workflowId, {
|
||||
workflowId: row.workflowId,
|
||||
workflowName: row.workflowName,
|
||||
segments: new Map(),
|
||||
totalExecutions: 0,
|
||||
totalSuccessful: 0,
|
||||
})
|
||||
}
|
||||
|
||||
const wf = workflowMap.get(row.workflowId)!
|
||||
wf.totalExecutions += Number(row.totalExecutions)
|
||||
wf.totalSuccessful += Number(row.successfulExecutions)
|
||||
|
||||
const existing = wf.segments.get(segmentIndex)
|
||||
if (existing) {
|
||||
const oldTotal = existing.totalExecutions
|
||||
const newTotal = oldTotal + Number(row.totalExecutions)
|
||||
existing.totalExecutions = newTotal
|
||||
existing.successfulExecutions += Number(row.successfulExecutions)
|
||||
existing.avgDurationMs =
|
||||
newTotal > 0
|
||||
? (existing.avgDurationMs * oldTotal +
|
||||
Number(row.avgDurationMs || 0) * Number(row.totalExecutions)) /
|
||||
newTotal
|
||||
: 0
|
||||
} else {
|
||||
wf.segments.set(segmentIndex, {
|
||||
timestamp: new Date(startTime.getTime() + segmentIndex * segmentMs).toISOString(),
|
||||
totalExecutions: Number(row.totalExecutions),
|
||||
successfulExecutions: Number(row.successfulExecutions),
|
||||
avgDurationMs: Number(row.avgDurationMs || 0),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const workflows: WorkflowStats[] = []
|
||||
for (const wf of workflowMap.values()) {
|
||||
const segments: SegmentStats[] = []
|
||||
for (let i = 0; i < params.segmentCount; i++) {
|
||||
const existing = wf.segments.get(i)
|
||||
if (existing) {
|
||||
segments.push(existing)
|
||||
} else {
|
||||
segments.push({
|
||||
timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(),
|
||||
totalExecutions: 0,
|
||||
successfulExecutions: 0,
|
||||
avgDurationMs: 0,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
workflows.push({
|
||||
workflowId: wf.workflowId,
|
||||
workflowName: wf.workflowName,
|
||||
segments,
|
||||
totalExecutions: wf.totalExecutions,
|
||||
totalSuccessful: wf.totalSuccessful,
|
||||
overallSuccessRate:
|
||||
wf.totalExecutions > 0 ? (wf.totalSuccessful / wf.totalExecutions) * 100 : 100,
|
||||
})
|
||||
}
|
||||
|
||||
workflows.sort((a, b) => {
|
||||
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
|
||||
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
|
||||
if (errA !== errB) return errB - errA
|
||||
return a.workflowName.localeCompare(b.workflowName)
|
||||
})
|
||||
|
||||
const aggregateSegments: SegmentStats[] = []
|
||||
let totalRuns = 0
|
||||
let totalErrors = 0
|
||||
let weightedLatencySum = 0
|
||||
let latencyCount = 0
|
||||
|
||||
for (let i = 0; i < params.segmentCount; i++) {
|
||||
let segTotal = 0
|
||||
let segSuccess = 0
|
||||
let segWeightedLatency = 0
|
||||
let segLatencyCount = 0
|
||||
|
||||
for (const wf of workflows) {
|
||||
const seg = wf.segments[i]
|
||||
segTotal += seg.totalExecutions
|
||||
segSuccess += seg.successfulExecutions
|
||||
if (seg.avgDurationMs > 0 && seg.totalExecutions > 0) {
|
||||
segWeightedLatency += seg.avgDurationMs * seg.totalExecutions
|
||||
segLatencyCount += seg.totalExecutions
|
||||
}
|
||||
}
|
||||
|
||||
totalRuns += segTotal
|
||||
totalErrors += segTotal - segSuccess
|
||||
weightedLatencySum += segWeightedLatency
|
||||
latencyCount += segLatencyCount
|
||||
|
||||
aggregateSegments.push({
|
||||
timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(),
|
||||
totalExecutions: segTotal,
|
||||
successfulExecutions: segSuccess,
|
||||
avgDurationMs: segLatencyCount > 0 ? segWeightedLatency / segLatencyCount : 0,
|
||||
})
|
||||
}
|
||||
|
||||
const avgLatency = latencyCount > 0 ? weightedLatencySum / latencyCount : 0
|
||||
|
||||
const response: DashboardStatsResponse = {
|
||||
workflows,
|
||||
aggregateSegments,
|
||||
totalRuns,
|
||||
totalErrors,
|
||||
avgLatency,
|
||||
timeBounds: {
|
||||
start: startTime.toISOString(),
|
||||
end: endTime.toISOString(),
|
||||
},
|
||||
segmentMs,
|
||||
}
|
||||
|
||||
return NextResponse.json(response, { status: 200 })
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid logs stats request parameters`, {
|
||||
errors: validationError.errors,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid request parameters',
|
||||
details: validationError.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
throw validationError
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] logs stats fetch error`, error)
|
||||
return NextResponse.json({ error: error.message }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -3,9 +3,9 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { Loader2 } from 'lucide-react'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { formatLatency, parseDuration } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import { formatLatency } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import type { DashboardStatsResponse, WorkflowStats } from '@/hooks/queries/logs'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
import type { WorkflowLog } from '@/stores/logs/filters/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { LineChart, WorkflowsList } from './components'
|
||||
|
||||
@@ -26,10 +26,6 @@ interface WorkflowExecution {
|
||||
overallSuccessRate: number
|
||||
}
|
||||
|
||||
const DEFAULT_SEGMENTS = 72
|
||||
const MIN_SEGMENT_PX = 10
|
||||
const MIN_SEGMENT_MS = 60000
|
||||
|
||||
const SKELETON_BAR_HEIGHTS = [
|
||||
45, 72, 38, 85, 52, 68, 30, 90, 55, 42, 78, 35, 88, 48, 65, 28, 82, 58, 40, 75, 32, 95, 50, 70,
|
||||
]
|
||||
@@ -120,13 +116,32 @@ function DashboardSkeleton() {
|
||||
}
|
||||
|
||||
interface DashboardProps {
|
||||
logs: WorkflowLog[]
|
||||
stats?: DashboardStatsResponse
|
||||
isLoading: boolean
|
||||
error?: Error | null
|
||||
}
|
||||
|
||||
export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
|
||||
const [segmentCount, setSegmentCount] = useState<number>(DEFAULT_SEGMENTS)
|
||||
/**
|
||||
* Converts server WorkflowStats to the internal WorkflowExecution format.
|
||||
*/
|
||||
function toWorkflowExecution(wf: WorkflowStats): WorkflowExecution {
|
||||
return {
|
||||
workflowId: wf.workflowId,
|
||||
workflowName: wf.workflowName,
|
||||
overallSuccessRate: wf.overallSuccessRate,
|
||||
segments: wf.segments.map((seg) => ({
|
||||
timestamp: seg.timestamp,
|
||||
totalExecutions: seg.totalExecutions,
|
||||
successfulExecutions: seg.successfulExecutions,
|
||||
hasExecutions: seg.totalExecutions > 0,
|
||||
successRate:
|
||||
seg.totalExecutions > 0 ? (seg.successfulExecutions / seg.totalExecutions) * 100 : 100,
|
||||
avgDurationMs: seg.avgDurationMs,
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
export default function Dashboard({ stats, isLoading, error }: DashboardProps) {
|
||||
const [selectedSegments, setSelectedSegments] = useState<Record<string, number[]>>({})
|
||||
const [lastAnchorIndices, setLastAnchorIndices] = useState<Record<string, number>>({})
|
||||
const barsAreaRef = useRef<HTMLDivElement | null>(null)
|
||||
@@ -137,182 +152,32 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
|
||||
|
||||
const expandedWorkflowId = workflowIds.length === 1 ? workflowIds[0] : null
|
||||
|
||||
const lastExecutionByWorkflow = useMemo(() => {
|
||||
const map = new Map<string, number>()
|
||||
for (const log of logs) {
|
||||
const wfId = log.workflowId
|
||||
if (!wfId) continue
|
||||
const ts = new Date(log.createdAt).getTime()
|
||||
const existing = map.get(wfId)
|
||||
if (!existing || ts > existing) {
|
||||
map.set(wfId, ts)
|
||||
}
|
||||
}
|
||||
return map
|
||||
}, [logs])
|
||||
|
||||
const timeBounds = useMemo(() => {
|
||||
if (logs.length === 0) {
|
||||
const now = new Date()
|
||||
return { start: now, end: now }
|
||||
}
|
||||
|
||||
let minTime = Number.POSITIVE_INFINITY
|
||||
let maxTime = Number.NEGATIVE_INFINITY
|
||||
|
||||
for (const log of logs) {
|
||||
const ts = new Date(log.createdAt).getTime()
|
||||
if (ts < minTime) minTime = ts
|
||||
if (ts > maxTime) maxTime = ts
|
||||
}
|
||||
|
||||
const end = new Date(Math.max(maxTime, Date.now()))
|
||||
const start = new Date(minTime)
|
||||
|
||||
return { start, end }
|
||||
}, [logs])
|
||||
|
||||
const { executions, aggregateSegments, segmentMs } = useMemo(() => {
|
||||
const allWorkflowsList = Object.values(allWorkflows)
|
||||
|
||||
if (allWorkflowsList.length === 0) {
|
||||
if (!stats) {
|
||||
return { executions: [], aggregateSegments: [], segmentMs: 0 }
|
||||
}
|
||||
|
||||
const { start, end } =
|
||||
logs.length > 0
|
||||
? timeBounds
|
||||
: { start: new Date(Date.now() - 24 * 60 * 60 * 1000), end: new Date() }
|
||||
|
||||
const totalMs = Math.max(1, end.getTime() - start.getTime())
|
||||
const calculatedSegmentMs = Math.max(
|
||||
MIN_SEGMENT_MS,
|
||||
Math.floor(totalMs / Math.max(1, segmentCount))
|
||||
)
|
||||
|
||||
const logsByWorkflow = new Map<string, WorkflowLog[]>()
|
||||
for (const log of logs) {
|
||||
const wfId = log.workflowId
|
||||
if (!logsByWorkflow.has(wfId)) {
|
||||
logsByWorkflow.set(wfId, [])
|
||||
}
|
||||
logsByWorkflow.get(wfId)!.push(log)
|
||||
}
|
||||
|
||||
const workflowExecutions: WorkflowExecution[] = []
|
||||
|
||||
for (const workflow of allWorkflowsList) {
|
||||
const workflowLogs = logsByWorkflow.get(workflow.id) || []
|
||||
|
||||
const segments: WorkflowExecution['segments'] = Array.from(
|
||||
{ length: segmentCount },
|
||||
(_, i) => ({
|
||||
timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(),
|
||||
hasExecutions: false,
|
||||
totalExecutions: 0,
|
||||
successfulExecutions: 0,
|
||||
successRate: 100,
|
||||
avgDurationMs: 0,
|
||||
})
|
||||
)
|
||||
|
||||
const durations: number[][] = Array.from({ length: segmentCount }, () => [])
|
||||
|
||||
for (const log of workflowLogs) {
|
||||
const logTime = new Date(log.createdAt).getTime()
|
||||
const idx = Math.min(
|
||||
segmentCount - 1,
|
||||
Math.max(0, Math.floor((logTime - start.getTime()) / calculatedSegmentMs))
|
||||
)
|
||||
|
||||
segments[idx].totalExecutions += 1
|
||||
segments[idx].hasExecutions = true
|
||||
|
||||
if (log.level !== 'error') {
|
||||
segments[idx].successfulExecutions += 1
|
||||
}
|
||||
|
||||
const duration = parseDuration({ duration: log.duration ?? undefined })
|
||||
if (duration !== null && duration > 0) {
|
||||
durations[idx].push(duration)
|
||||
}
|
||||
}
|
||||
|
||||
let totalExecs = 0
|
||||
let totalSuccess = 0
|
||||
|
||||
for (let i = 0; i < segmentCount; i++) {
|
||||
const seg = segments[i]
|
||||
totalExecs += seg.totalExecutions
|
||||
totalSuccess += seg.successfulExecutions
|
||||
|
||||
if (seg.totalExecutions > 0) {
|
||||
seg.successRate = (seg.successfulExecutions / seg.totalExecutions) * 100
|
||||
}
|
||||
|
||||
if (durations[i].length > 0) {
|
||||
seg.avgDurationMs = Math.round(
|
||||
durations[i].reduce((sum, d) => sum + d, 0) / durations[i].length
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const overallSuccessRate = totalExecs > 0 ? (totalSuccess / totalExecs) * 100 : 100
|
||||
|
||||
workflowExecutions.push({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
segments,
|
||||
overallSuccessRate,
|
||||
})
|
||||
}
|
||||
|
||||
workflowExecutions.sort((a, b) => {
|
||||
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
|
||||
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
|
||||
if (errA !== errB) return errB - errA
|
||||
return a.workflowName.localeCompare(b.workflowName)
|
||||
})
|
||||
|
||||
const aggSegments: {
|
||||
timestamp: string
|
||||
totalExecutions: number
|
||||
successfulExecutions: number
|
||||
avgDurationMs: number
|
||||
}[] = Array.from({ length: segmentCount }, (_, i) => ({
|
||||
timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(),
|
||||
totalExecutions: 0,
|
||||
successfulExecutions: 0,
|
||||
avgDurationMs: 0,
|
||||
}))
|
||||
|
||||
const weightedDurationSums: number[] = Array(segmentCount).fill(0)
|
||||
const executionCounts: number[] = Array(segmentCount).fill(0)
|
||||
|
||||
for (const wf of workflowExecutions) {
|
||||
wf.segments.forEach((s, i) => {
|
||||
aggSegments[i].totalExecutions += s.totalExecutions
|
||||
aggSegments[i].successfulExecutions += s.successfulExecutions
|
||||
|
||||
if (s.avgDurationMs && s.avgDurationMs > 0 && s.totalExecutions > 0) {
|
||||
weightedDurationSums[i] += s.avgDurationMs * s.totalExecutions
|
||||
executionCounts[i] += s.totalExecutions
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
aggSegments.forEach((seg, i) => {
|
||||
if (executionCounts[i] > 0) {
|
||||
seg.avgDurationMs = weightedDurationSums[i] / executionCounts[i]
|
||||
}
|
||||
})
|
||||
const workflowExecutions = stats.workflows.map(toWorkflowExecution)
|
||||
|
||||
return {
|
||||
executions: workflowExecutions,
|
||||
aggregateSegments: aggSegments,
|
||||
segmentMs: calculatedSegmentMs,
|
||||
aggregateSegments: stats.aggregateSegments,
|
||||
segmentMs: stats.segmentMs,
|
||||
}
|
||||
}, [logs, timeBounds, segmentCount, allWorkflows])
|
||||
}, [stats])
|
||||
|
||||
const lastExecutionByWorkflow = useMemo(() => {
|
||||
const map = new Map<string, number>()
|
||||
for (const wf of executions) {
|
||||
for (let i = wf.segments.length - 1; i >= 0; i--) {
|
||||
if (wf.segments[i].totalExecutions > 0) {
|
||||
map.set(wf.workflowId, new Date(wf.segments[i].timestamp).getTime())
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return map
|
||||
}, [executions])
|
||||
|
||||
const filteredExecutions = useMemo(() => {
|
||||
let filtered = executions
|
||||
@@ -511,37 +376,12 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
|
||||
useEffect(() => {
|
||||
setSelectedSegments({})
|
||||
setLastAnchorIndices({})
|
||||
}, [logs, timeRange, workflowIds, searchQuery])
|
||||
|
||||
useEffect(() => {
|
||||
if (!barsAreaRef.current) return
|
||||
const el = barsAreaRef.current
|
||||
let debounceId: ReturnType<typeof setTimeout> | null = null
|
||||
const ro = new ResizeObserver(([entry]) => {
|
||||
const w = entry?.contentRect?.width || 720
|
||||
const n = Math.max(36, Math.min(96, Math.floor(w / MIN_SEGMENT_PX)))
|
||||
if (debounceId) clearTimeout(debounceId)
|
||||
debounceId = setTimeout(() => {
|
||||
setSegmentCount(n)
|
||||
}, 150)
|
||||
})
|
||||
ro.observe(el)
|
||||
const rect = el.getBoundingClientRect()
|
||||
if (rect?.width) {
|
||||
const n = Math.max(36, Math.min(96, Math.floor(rect.width / MIN_SEGMENT_PX)))
|
||||
setSegmentCount(n)
|
||||
}
|
||||
return () => {
|
||||
if (debounceId) clearTimeout(debounceId)
|
||||
ro.disconnect()
|
||||
}
|
||||
}, [])
|
||||
}, [stats, timeRange, workflowIds, searchQuery])
|
||||
|
||||
if (isLoading) {
|
||||
return <DashboardSkeleton />
|
||||
}
|
||||
|
||||
// Show error state
|
||||
if (error) {
|
||||
return (
|
||||
<div className='mt-[24px] flex flex-1 items-center justify-center'>
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
} from '@/lib/logs/filters'
|
||||
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useDashboardLogs, useLogDetail, useLogsList } from '@/hooks/queries/logs'
|
||||
import { useDashboardStats, useLogDetail, useLogsList } from '@/hooks/queries/logs'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
import type { WorkflowLog } from '@/stores/logs/filters/types'
|
||||
@@ -130,7 +130,7 @@ export default function Logs() {
|
||||
[timeRange, startDate, endDate, level, workflowIds, folderIds, triggers, debouncedSearchQuery]
|
||||
)
|
||||
|
||||
const dashboardLogsQuery = useDashboardLogs(workspaceId, dashboardFilters, {
|
||||
const dashboardStatsQuery = useDashboardStats(workspaceId, dashboardFilters, {
|
||||
enabled: Boolean(workspaceId) && isInitialized.current,
|
||||
refetchInterval: isLive ? 5000 : false,
|
||||
})
|
||||
@@ -417,9 +417,9 @@ export default function Logs() {
|
||||
className={cn('flex min-h-0 flex-1 flex-col pr-[24px]', !isDashboardView && 'hidden')}
|
||||
>
|
||||
<Dashboard
|
||||
logs={dashboardLogsQuery.data ?? []}
|
||||
isLoading={!dashboardLogsQuery.data}
|
||||
error={dashboardLogsQuery.error}
|
||||
stats={dashboardStatsQuery.data}
|
||||
isLoading={dashboardStatsQuery.isLoading}
|
||||
error={dashboardStatsQuery.error}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -199,7 +199,6 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
switch (params.operation) {
|
||||
case 'create_runs_batch':
|
||||
return 'langsmith_create_runs_batch'
|
||||
case 'create_run':
|
||||
default:
|
||||
return 'langsmith_create_run'
|
||||
}
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import { keepPreviousData, useInfiniteQuery, useQuery } from '@tanstack/react-query'
|
||||
import { getEndDateFromTimeRange, getStartDateFromTimeRange } from '@/lib/logs/filters'
|
||||
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
|
||||
import type {
|
||||
DashboardStatsResponse,
|
||||
SegmentStats,
|
||||
WorkflowStats,
|
||||
} from '@/app/api/logs/stats/route'
|
||||
import type { LogsResponse, TimeRange, WorkflowLog } from '@/stores/logs/filters/types'
|
||||
|
||||
export type { DashboardStatsResponse, SegmentStats, WorkflowStats }
|
||||
|
||||
export const logKeys = {
|
||||
all: ['logs'] as const,
|
||||
lists: () => [...logKeys.all, 'list'] as const,
|
||||
@@ -10,8 +17,8 @@ export const logKeys = {
|
||||
[...logKeys.lists(), workspaceId ?? '', filters] as const,
|
||||
details: () => [...logKeys.all, 'detail'] as const,
|
||||
detail: (logId: string | undefined) => [...logKeys.details(), logId ?? ''] as const,
|
||||
dashboard: (workspaceId: string | undefined, filters: Record<string, unknown>) =>
|
||||
[...logKeys.all, 'dashboard', workspaceId ?? '', filters] as const,
|
||||
stats: (workspaceId: string | undefined, filters: object) =>
|
||||
[...logKeys.all, 'stats', workspaceId ?? '', filters] as const,
|
||||
executionSnapshots: () => [...logKeys.all, 'executionSnapshot'] as const,
|
||||
executionSnapshot: (executionId: string | undefined) =>
|
||||
[...logKeys.executionSnapshots(), executionId ?? ''] as const,
|
||||
@@ -147,52 +154,96 @@ export function useLogDetail(logId: string | undefined) {
|
||||
})
|
||||
}
|
||||
|
||||
const DASHBOARD_LOGS_LIMIT = 10000
|
||||
interface DashboardFilters {
|
||||
timeRange: TimeRange
|
||||
startDate?: string
|
||||
endDate?: string
|
||||
level: string
|
||||
workflowIds: string[]
|
||||
folderIds: string[]
|
||||
triggers: string[]
|
||||
searchQuery: string
|
||||
segmentCount?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches all logs for dashboard metrics (non-paginated).
|
||||
* Uses same filters as the logs list but with a high limit to get all data.
|
||||
* Fetches aggregated dashboard statistics from the server.
|
||||
* Uses SQL aggregation for efficient computation without row limits.
|
||||
*/
|
||||
async function fetchAllLogs(
|
||||
async function fetchDashboardStats(
|
||||
workspaceId: string,
|
||||
filters: Omit<LogFilters, 'limit'>
|
||||
): Promise<WorkflowLog[]> {
|
||||
filters: DashboardFilters
|
||||
): Promise<DashboardStatsResponse> {
|
||||
const params = new URLSearchParams()
|
||||
|
||||
params.set('workspaceId', workspaceId)
|
||||
params.set('limit', DASHBOARD_LOGS_LIMIT.toString())
|
||||
params.set('offset', '0')
|
||||
|
||||
applyFilterParams(params, filters)
|
||||
|
||||
const response = await fetch(`/api/logs?${params.toString()}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch logs for dashboard')
|
||||
if (filters.segmentCount) {
|
||||
params.set('segmentCount', filters.segmentCount.toString())
|
||||
}
|
||||
|
||||
const apiData: LogsResponse = await response.json()
|
||||
return apiData.data || []
|
||||
if (filters.level !== 'all') {
|
||||
params.set('level', filters.level)
|
||||
}
|
||||
|
||||
if (filters.triggers.length > 0) {
|
||||
params.set('triggers', filters.triggers.join(','))
|
||||
}
|
||||
|
||||
if (filters.workflowIds.length > 0) {
|
||||
params.set('workflowIds', filters.workflowIds.join(','))
|
||||
}
|
||||
|
||||
if (filters.folderIds.length > 0) {
|
||||
params.set('folderIds', filters.folderIds.join(','))
|
||||
}
|
||||
|
||||
const startDate = getStartDateFromTimeRange(filters.timeRange, filters.startDate)
|
||||
if (startDate) {
|
||||
params.set('startDate', startDate.toISOString())
|
||||
}
|
||||
|
||||
const endDate = getEndDateFromTimeRange(filters.timeRange, filters.endDate)
|
||||
if (endDate) {
|
||||
params.set('endDate', endDate.toISOString())
|
||||
}
|
||||
|
||||
if (filters.searchQuery.trim()) {
|
||||
const parsedQuery = parseQuery(filters.searchQuery.trim())
|
||||
const searchParams = queryToApiParams(parsedQuery)
|
||||
|
||||
for (const [key, value] of Object.entries(searchParams)) {
|
||||
params.set(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/logs/stats?${params.toString()}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch dashboard stats')
|
||||
}
|
||||
|
||||
return response.json()
|
||||
}
|
||||
|
||||
interface UseDashboardLogsOptions {
|
||||
interface UseDashboardStatsOptions {
|
||||
enabled?: boolean
|
||||
refetchInterval?: number | false
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching all logs for dashboard metrics.
|
||||
* Unlike useLogsList, this fetches all logs in a single request
|
||||
* to ensure dashboard metrics are computed from complete data.
|
||||
* Hook for fetching aggregated dashboard statistics.
|
||||
* Uses server-side SQL aggregation for efficient computation
|
||||
* without any row limits - all matching logs are included in the stats.
|
||||
*/
|
||||
export function useDashboardLogs(
|
||||
export function useDashboardStats(
|
||||
workspaceId: string | undefined,
|
||||
filters: Omit<LogFilters, 'limit'>,
|
||||
options?: UseDashboardLogsOptions
|
||||
filters: DashboardFilters,
|
||||
options?: UseDashboardStatsOptions
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: logKeys.dashboard(workspaceId, filters),
|
||||
queryFn: () => fetchAllLogs(workspaceId as string, filters),
|
||||
queryKey: logKeys.stats(workspaceId, filters),
|
||||
queryFn: () => fetchDashboardStats(workspaceId as string, filters),
|
||||
enabled: Boolean(workspaceId) && (options?.enabled ?? true),
|
||||
refetchInterval: options?.refetchInterval ?? false,
|
||||
staleTime: 0,
|
||||
|
||||
Reference in New Issue
Block a user