mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 07:27:57 -05:00
feat(performance): added reactquery hooks for workflow operations, for logs, fixed logs reloading, fix subscription UI (#2017)
* feat(performance): added reactquery hooks for workflow operations, for logs, fixed logs reloading, fix subscription UI * use useInfiniteQuery for logs fetching
This commit is contained in:
@@ -118,18 +118,18 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Creating workflow ${workflowId} for user ${session.user.id}`)
|
||||
|
||||
// Track workflow creation
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/telemetry/tracer')
|
||||
trackPlatformEvent('platform.workflow.created', {
|
||||
'workflow.id': workflowId,
|
||||
'workflow.name': name,
|
||||
'workflow.has_workspace': !!workspaceId,
|
||||
'workflow.has_folder': !!folderId,
|
||||
import('@/lib/telemetry/tracer')
|
||||
.then(({ trackPlatformEvent }) => {
|
||||
trackPlatformEvent('platform.workflow.created', {
|
||||
'workflow.id': workflowId,
|
||||
'workflow.name': name,
|
||||
'workflow.has_workspace': !!workspaceId,
|
||||
'workflow.has_folder': !!folderId,
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
// Silently fail
|
||||
})
|
||||
} catch (_e) {
|
||||
// Silently fail
|
||||
}
|
||||
|
||||
await db.insert(workflow).values({
|
||||
id: workflowId,
|
||||
|
||||
@@ -23,6 +23,7 @@ import '@/components/emcn/components/code/code.css'
|
||||
interface LogSidebarProps {
|
||||
log: WorkflowLog | null
|
||||
isOpen: boolean
|
||||
isLoadingDetails?: boolean
|
||||
onClose: () => void
|
||||
onNavigateNext?: () => void
|
||||
onNavigatePrev?: () => void
|
||||
@@ -192,6 +193,7 @@ const BlockContentDisplay = ({
|
||||
export function Sidebar({
|
||||
log,
|
||||
isOpen,
|
||||
isLoadingDetails = false,
|
||||
onClose,
|
||||
onNavigateNext,
|
||||
onNavigatePrev,
|
||||
@@ -219,15 +221,6 @@ export function Sidebar({
|
||||
}
|
||||
}, [log?.id])
|
||||
|
||||
const isLoadingDetails = useMemo(() => {
|
||||
if (!log) return false
|
||||
// Only show while we expect details to arrive (has executionId)
|
||||
if (!log.executionId) return false
|
||||
const hasEnhanced = !!log.executionData?.enhanced
|
||||
const hasAnyDetails = hasEnhanced || !!log.cost || Array.isArray(log.executionData?.traceSpans)
|
||||
return !hasAnyDetails
|
||||
}, [log])
|
||||
|
||||
const formattedContent = useMemo(() => {
|
||||
if (!log) return null
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { Loader2 } from 'lucide-react'
|
||||
import { useParams, useRouter, useSearchParams } from 'next/navigation'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { soehne } from '@/app/fonts/soehne/soehne'
|
||||
import Controls from '@/app/workspace/[workspaceId]/logs/components/dashboard/controls'
|
||||
import KPIs from '@/app/workspace/[workspaceId]/logs/components/dashboard/kpis'
|
||||
@@ -11,12 +10,15 @@ import WorkflowDetails from '@/app/workspace/[workspaceId]/logs/components/dashb
|
||||
import WorkflowsList from '@/app/workspace/[workspaceId]/logs/components/dashboard/workflows-list'
|
||||
import Timeline from '@/app/workspace/[workspaceId]/logs/components/filters/components/timeline'
|
||||
import { mapToExecutionLog, mapToExecutionLogAlt } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import {
|
||||
useExecutionsMetrics,
|
||||
useGlobalDashboardLogs,
|
||||
useWorkflowDashboardLogs,
|
||||
} from '@/hooks/queries/logs'
|
||||
import { formatCost } from '@/providers/utils'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('Dashboard')
|
||||
|
||||
type TimeFilter = '30m' | '1h' | '6h' | '12h' | '24h' | '3d' | '7d' | '14d' | '30d'
|
||||
|
||||
interface WorkflowExecution {
|
||||
@@ -59,15 +61,6 @@ interface ExecutionLog {
|
||||
workflowColor?: string
|
||||
}
|
||||
|
||||
interface WorkflowDetailsDataLocal {
|
||||
errorRates: { timestamp: string; value: number }[]
|
||||
durations: { timestamp: string; value: number }[]
|
||||
executionCounts: { timestamp: string; value: number }[]
|
||||
logs: ExecutionLog[]
|
||||
allLogs: ExecutionLog[]
|
||||
__meta?: { offset: number; hasMore: boolean }
|
||||
}
|
||||
|
||||
export default function Dashboard() {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
@@ -99,23 +92,7 @@ export default function Dashboard() {
|
||||
}
|
||||
}
|
||||
const [endTime, setEndTime] = useState<Date>(new Date())
|
||||
const [executions, setExecutions] = useState<WorkflowExecution[]>([])
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [isRefetching, setIsRefetching] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [expandedWorkflowId, setExpandedWorkflowId] = useState<string | null>(null)
|
||||
const [workflowDetails, setWorkflowDetails] = useState<Record<string, WorkflowDetailsDataLocal>>(
|
||||
{}
|
||||
)
|
||||
const [globalDetails, setGlobalDetails] = useState<WorkflowDetailsDataLocal | null>(null)
|
||||
const [globalLogsMeta, setGlobalLogsMeta] = useState<{ offset: number; hasMore: boolean }>({
|
||||
offset: 0,
|
||||
hasMore: true,
|
||||
})
|
||||
const [globalLoadingMore, setGlobalLoadingMore] = useState(false)
|
||||
const [aggregateSegments, setAggregateSegments] = useState<
|
||||
{ timestamp: string; totalExecutions: number; successfulExecutions: number }[]
|
||||
>([])
|
||||
const [selectedSegments, setSelectedSegments] = useState<Record<string, number[]>>({})
|
||||
const [lastAnchorIndices, setLastAnchorIndices] = useState<Record<string, number>>({})
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
@@ -135,6 +112,134 @@ export default function Dashboard() {
|
||||
|
||||
const timeFilter = getTimeFilterFromRange(sidebarTimeRange)
|
||||
|
||||
const getStartTime = useCallback(() => {
|
||||
const start = new Date(endTime)
|
||||
|
||||
switch (timeFilter) {
|
||||
case '30m':
|
||||
start.setMinutes(endTime.getMinutes() - 30)
|
||||
break
|
||||
case '1h':
|
||||
start.setHours(endTime.getHours() - 1)
|
||||
break
|
||||
case '6h':
|
||||
start.setHours(endTime.getHours() - 6)
|
||||
break
|
||||
case '12h':
|
||||
start.setHours(endTime.getHours() - 12)
|
||||
break
|
||||
case '24h':
|
||||
start.setHours(endTime.getHours() - 24)
|
||||
break
|
||||
case '3d':
|
||||
start.setDate(endTime.getDate() - 3)
|
||||
break
|
||||
case '7d':
|
||||
start.setDate(endTime.getDate() - 7)
|
||||
break
|
||||
case '14d':
|
||||
start.setDate(endTime.getDate() - 14)
|
||||
break
|
||||
case '30d':
|
||||
start.setDate(endTime.getDate() - 30)
|
||||
break
|
||||
default:
|
||||
start.setHours(endTime.getHours() - 24)
|
||||
}
|
||||
|
||||
return start
|
||||
}, [endTime, timeFilter])
|
||||
|
||||
const metricsFilters = useMemo(
|
||||
() => ({
|
||||
workspaceId,
|
||||
segments: segmentCount || DEFAULT_SEGMENTS,
|
||||
startTime: getStartTime().toISOString(),
|
||||
endTime: endTime.toISOString(),
|
||||
workflowIds: workflowIds.length > 0 ? workflowIds : undefined,
|
||||
folderIds: folderIds.length > 0 ? folderIds : undefined,
|
||||
triggers: triggers.length > 0 ? triggers : undefined,
|
||||
}),
|
||||
[workspaceId, segmentCount, getStartTime, endTime, workflowIds, folderIds, triggers]
|
||||
)
|
||||
|
||||
const logsFilters = useMemo(
|
||||
() => ({
|
||||
workspaceId,
|
||||
startDate: getStartTime().toISOString(),
|
||||
endDate: endTime.toISOString(),
|
||||
workflowIds: workflowIds.length > 0 ? workflowIds : undefined,
|
||||
folderIds: folderIds.length > 0 ? folderIds : undefined,
|
||||
triggers: triggers.length > 0 ? triggers : undefined,
|
||||
limit: 50,
|
||||
}),
|
||||
[workspaceId, getStartTime, endTime, workflowIds, folderIds, triggers]
|
||||
)
|
||||
|
||||
const metricsQuery = useExecutionsMetrics(metricsFilters, {
|
||||
enabled: Boolean(workspaceId),
|
||||
})
|
||||
|
||||
const globalLogsQuery = useGlobalDashboardLogs(logsFilters, {
|
||||
enabled: Boolean(workspaceId),
|
||||
})
|
||||
|
||||
const workflowLogsQuery = useWorkflowDashboardLogs(expandedWorkflowId ?? undefined, logsFilters, {
|
||||
enabled: Boolean(workspaceId) && Boolean(expandedWorkflowId),
|
||||
})
|
||||
|
||||
const executions = metricsQuery.data?.workflows ?? []
|
||||
const aggregateSegments = metricsQuery.data?.aggregateSegments ?? []
|
||||
const loading = metricsQuery.isLoading
|
||||
const isRefetching = metricsQuery.isFetching && !metricsQuery.isLoading
|
||||
const error = metricsQuery.error?.message ?? null
|
||||
|
||||
const globalLogs = useMemo(() => {
|
||||
if (!globalLogsQuery.data?.pages) return []
|
||||
return globalLogsQuery.data.pages.flatMap((page) => page.logs).map(mapToExecutionLog)
|
||||
}, [globalLogsQuery.data?.pages])
|
||||
|
||||
const workflowLogs = useMemo(() => {
|
||||
if (!workflowLogsQuery.data?.pages) return []
|
||||
return workflowLogsQuery.data.pages.flatMap((page) => page.logs).map(mapToExecutionLogAlt)
|
||||
}, [workflowLogsQuery.data?.pages])
|
||||
|
||||
const globalDetails = useMemo(() => {
|
||||
if (!aggregateSegments.length) return null
|
||||
|
||||
const errorRates = aggregateSegments.map((s) => ({
|
||||
timestamp: s.timestamp,
|
||||
value: s.totalExecutions > 0 ? (1 - s.successfulExecutions / s.totalExecutions) * 100 : 0,
|
||||
}))
|
||||
|
||||
const executionCounts = aggregateSegments.map((s) => ({
|
||||
timestamp: s.timestamp,
|
||||
value: s.totalExecutions,
|
||||
}))
|
||||
|
||||
return {
|
||||
errorRates,
|
||||
durations: [],
|
||||
executionCounts,
|
||||
logs: globalLogs,
|
||||
allLogs: globalLogs,
|
||||
}
|
||||
}, [aggregateSegments, globalLogs])
|
||||
|
||||
const workflowDetails = useMemo(() => {
|
||||
if (!expandedWorkflowId || !workflowLogs.length) return {}
|
||||
|
||||
return {
|
||||
[expandedWorkflowId]: {
|
||||
errorRates: [],
|
||||
durations: [],
|
||||
executionCounts: [],
|
||||
logs: workflowLogs,
|
||||
allLogs: workflowLogs,
|
||||
},
|
||||
}
|
||||
}, [expandedWorkflowId, workflowLogs])
|
||||
|
||||
useEffect(() => {
|
||||
const urlView = searchParams.get('view')
|
||||
if (urlView === 'dashboard' || urlView === 'logs') {
|
||||
@@ -190,362 +295,24 @@ export default function Dashboard() {
|
||||
}
|
||||
}, [executions])
|
||||
|
||||
const getStartTime = useCallback(() => {
|
||||
const start = new Date(endTime)
|
||||
|
||||
switch (timeFilter) {
|
||||
case '30m':
|
||||
start.setMinutes(endTime.getMinutes() - 30)
|
||||
break
|
||||
case '1h':
|
||||
start.setHours(endTime.getHours() - 1)
|
||||
break
|
||||
case '6h':
|
||||
start.setHours(endTime.getHours() - 6)
|
||||
break
|
||||
case '12h':
|
||||
start.setHours(endTime.getHours() - 12)
|
||||
break
|
||||
case '24h':
|
||||
start.setHours(endTime.getHours() - 24)
|
||||
break
|
||||
case '3d':
|
||||
start.setDate(endTime.getDate() - 3)
|
||||
break
|
||||
case '7d':
|
||||
start.setDate(endTime.getDate() - 7)
|
||||
break
|
||||
case '14d':
|
||||
start.setDate(endTime.getDate() - 14)
|
||||
break
|
||||
case '30d':
|
||||
start.setDate(endTime.getDate() - 30)
|
||||
break
|
||||
default:
|
||||
start.setHours(endTime.getHours() - 24)
|
||||
}
|
||||
|
||||
return start
|
||||
}, [endTime, timeFilter])
|
||||
|
||||
const fetchExecutions = useCallback(
|
||||
async (isInitialLoad = false) => {
|
||||
try {
|
||||
if (isInitialLoad) {
|
||||
setLoading(true)
|
||||
} else {
|
||||
setIsRefetching(true)
|
||||
}
|
||||
setError(null)
|
||||
|
||||
const startTime = getStartTime()
|
||||
const params = new URLSearchParams({
|
||||
segments: String(segmentCount || DEFAULT_SEGMENTS),
|
||||
startTime: startTime.toISOString(),
|
||||
endTime: endTime.toISOString(),
|
||||
})
|
||||
|
||||
if (workflowIds.length > 0) {
|
||||
params.set('workflowIds', workflowIds.join(','))
|
||||
}
|
||||
|
||||
if (folderIds.length > 0) {
|
||||
params.set('folderIds', folderIds.join(','))
|
||||
}
|
||||
|
||||
if (triggers.length > 0) {
|
||||
params.set('triggers', triggers.join(','))
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`/api/workspaces/${workspaceId}/metrics/executions?${params.toString()}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch execution history')
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const mapped: WorkflowExecution[] = (data.workflows || []).map((wf: any) => {
|
||||
const segments = (wf.segments || []).map((s: any) => {
|
||||
const total = s.totalExecutions || 0
|
||||
const success = s.successfulExecutions || 0
|
||||
const hasExecutions = total > 0
|
||||
const successRate = hasExecutions ? (success / total) * 100 : 100
|
||||
return {
|
||||
timestamp: s.timestamp,
|
||||
hasExecutions,
|
||||
totalExecutions: total,
|
||||
successfulExecutions: success,
|
||||
successRate,
|
||||
avgDurationMs: typeof s.avgDurationMs === 'number' ? s.avgDurationMs : 0,
|
||||
p50Ms: typeof s.p50Ms === 'number' ? s.p50Ms : 0,
|
||||
p90Ms: typeof s.p90Ms === 'number' ? s.p90Ms : 0,
|
||||
p99Ms: typeof s.p99Ms === 'number' ? s.p99Ms : 0,
|
||||
}
|
||||
})
|
||||
const totals = segments.reduce(
|
||||
(acc: { total: number; success: number }, seg: (typeof segments)[number]) => {
|
||||
acc.total += seg.totalExecutions
|
||||
acc.success += seg.successfulExecutions
|
||||
return acc
|
||||
},
|
||||
{ total: 0, success: 0 }
|
||||
)
|
||||
const overallSuccessRate = totals.total > 0 ? (totals.success / totals.total) * 100 : 100
|
||||
return {
|
||||
workflowId: wf.workflowId,
|
||||
workflowName: wf.workflowName,
|
||||
segments,
|
||||
overallSuccessRate,
|
||||
} as WorkflowExecution
|
||||
})
|
||||
const sortedWorkflows = mapped.sort((a, b) => {
|
||||
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
|
||||
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
|
||||
return errB - errA
|
||||
})
|
||||
setExecutions(sortedWorkflows)
|
||||
|
||||
const segmentsCount: number = Number(params.get('segments') || DEFAULT_SEGMENTS)
|
||||
const agg: { timestamp: string; totalExecutions: number; successfulExecutions: number }[] =
|
||||
Array.from({ length: segmentsCount }, (_, i) => {
|
||||
const base = startTime.getTime()
|
||||
const ts = new Date(base + Math.floor((i * (endTime.getTime() - base)) / segmentsCount))
|
||||
return {
|
||||
timestamp: ts.toISOString(),
|
||||
totalExecutions: 0,
|
||||
successfulExecutions: 0,
|
||||
}
|
||||
})
|
||||
for (const wf of data.workflows as any[]) {
|
||||
wf.segments.forEach((s: any, i: number) => {
|
||||
const index = Math.min(i, segmentsCount - 1)
|
||||
agg[index].totalExecutions += s.totalExecutions || 0
|
||||
agg[index].successfulExecutions += s.successfulExecutions || 0
|
||||
})
|
||||
}
|
||||
setAggregateSegments(agg)
|
||||
|
||||
const errorRates = agg.map((s) => ({
|
||||
timestamp: s.timestamp,
|
||||
value: s.totalExecutions > 0 ? (1 - s.successfulExecutions / s.totalExecutions) * 100 : 0,
|
||||
}))
|
||||
const executionCounts = agg.map((s) => ({
|
||||
timestamp: s.timestamp,
|
||||
value: s.totalExecutions,
|
||||
}))
|
||||
|
||||
const logsParams = new URLSearchParams({
|
||||
limit: '50',
|
||||
offset: '0',
|
||||
workspaceId,
|
||||
startDate: startTime.toISOString(),
|
||||
endDate: endTime.toISOString(),
|
||||
order: 'desc',
|
||||
details: 'full',
|
||||
})
|
||||
if (workflowIds.length > 0) logsParams.set('workflowIds', workflowIds.join(','))
|
||||
if (folderIds.length > 0) logsParams.set('folderIds', folderIds.join(','))
|
||||
if (triggers.length > 0) logsParams.set('triggers', triggers.join(','))
|
||||
|
||||
const logsResponse = await fetch(`/api/logs?${logsParams.toString()}`)
|
||||
let mappedLogs: ExecutionLog[] = []
|
||||
if (logsResponse.ok) {
|
||||
const logsData = await logsResponse.json()
|
||||
mappedLogs = (logsData.data || []).map(mapToExecutionLog)
|
||||
}
|
||||
|
||||
setGlobalDetails({
|
||||
errorRates,
|
||||
durations: [],
|
||||
executionCounts,
|
||||
logs: mappedLogs,
|
||||
allLogs: mappedLogs,
|
||||
})
|
||||
setGlobalLogsMeta({ offset: mappedLogs.length, hasMore: mappedLogs.length === 50 })
|
||||
} catch (err) {
|
||||
logger.error('Error fetching executions:', err)
|
||||
setError(err instanceof Error ? err.message : 'An error occurred')
|
||||
} finally {
|
||||
setLoading(false)
|
||||
setIsRefetching(false)
|
||||
}
|
||||
},
|
||||
[workspaceId, timeFilter, endTime, getStartTime, workflowIds, folderIds, triggers, segmentCount]
|
||||
)
|
||||
|
||||
const fetchWorkflowDetails = useCallback(
|
||||
async (workflowId: string, silent = false) => {
|
||||
try {
|
||||
const startTime = getStartTime()
|
||||
const params = new URLSearchParams({
|
||||
startTime: startTime.toISOString(),
|
||||
endTime: endTime.toISOString(),
|
||||
})
|
||||
|
||||
if (triggers.length > 0) {
|
||||
params.set('triggers', triggers.join(','))
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`/api/logs?${new URLSearchParams({
|
||||
limit: '50',
|
||||
offset: '0',
|
||||
workspaceId,
|
||||
startDate: startTime.toISOString(),
|
||||
endDate: endTime.toISOString(),
|
||||
order: 'desc',
|
||||
details: 'full',
|
||||
workflowIds: workflowId,
|
||||
...(triggers.length > 0 ? { triggers: triggers.join(',') } : {}),
|
||||
}).toString()}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch workflow details')
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const mappedLogs: ExecutionLog[] = (data.data || []).map(mapToExecutionLogAlt)
|
||||
|
||||
setWorkflowDetails((prev) => ({
|
||||
...prev,
|
||||
[workflowId]: {
|
||||
errorRates: [],
|
||||
durations: [],
|
||||
executionCounts: [],
|
||||
logs: mappedLogs,
|
||||
allLogs: mappedLogs,
|
||||
__meta: { offset: mappedLogs.length, hasMore: (data.data || []).length === 50 },
|
||||
},
|
||||
}))
|
||||
} catch (err) {
|
||||
logger.error('Error fetching workflow details:', err)
|
||||
}
|
||||
},
|
||||
[workspaceId, endTime, getStartTime, triggers]
|
||||
)
|
||||
|
||||
// Infinite scroll for details logs
|
||||
const loadMoreLogs = useCallback(
|
||||
async (workflowId: string) => {
|
||||
const details = (workflowDetails as any)[workflowId]
|
||||
if (!details) return
|
||||
if (details.__loading) return
|
||||
if (!details.__meta?.hasMore) return
|
||||
try {
|
||||
// mark loading to prevent duplicate fetches
|
||||
setWorkflowDetails((prev) => ({
|
||||
...prev,
|
||||
[workflowId]: { ...(prev as any)[workflowId], __loading: true },
|
||||
}))
|
||||
const startTime = getStartTime()
|
||||
const offset = details.__meta.offset || 0
|
||||
const qp = new URLSearchParams({
|
||||
limit: '50',
|
||||
offset: String(offset),
|
||||
workspaceId,
|
||||
startDate: startTime.toISOString(),
|
||||
endDate: endTime.toISOString(),
|
||||
order: 'desc',
|
||||
details: 'full',
|
||||
workflowIds: workflowId,
|
||||
})
|
||||
if (triggers.length > 0) qp.set('triggers', triggers.join(','))
|
||||
const res = await fetch(`/api/logs?${qp.toString()}`)
|
||||
if (!res.ok) return
|
||||
const data = await res.json()
|
||||
const more: ExecutionLog[] = (data.data || []).map(mapToExecutionLogAlt)
|
||||
|
||||
setWorkflowDetails((prev) => {
|
||||
const cur = prev[workflowId]
|
||||
const seen = new Set<string>()
|
||||
const dedup = [...(cur?.allLogs || []), ...more].filter((x) => {
|
||||
const id = x.id
|
||||
if (seen.has(id)) return false
|
||||
seen.add(id)
|
||||
return true
|
||||
})
|
||||
return {
|
||||
...prev,
|
||||
[workflowId]: {
|
||||
...cur,
|
||||
logs: dedup,
|
||||
allLogs: dedup,
|
||||
__meta: {
|
||||
offset: (cur?.__meta?.offset || 0) + more.length,
|
||||
hasMore: more.length === 50,
|
||||
},
|
||||
__loading: false,
|
||||
},
|
||||
}
|
||||
})
|
||||
} catch {
|
||||
setWorkflowDetails((prev) => ({
|
||||
...prev,
|
||||
[workflowId]: { ...(prev as any)[workflowId], __loading: false },
|
||||
}))
|
||||
(workflowId: string) => {
|
||||
if (
|
||||
workflowId === expandedWorkflowId &&
|
||||
workflowLogsQuery.hasNextPage &&
|
||||
!workflowLogsQuery.isFetchingNextPage
|
||||
) {
|
||||
workflowLogsQuery.fetchNextPage()
|
||||
}
|
||||
},
|
||||
[workspaceId, endTime, getStartTime, triggers, workflowDetails]
|
||||
[expandedWorkflowId, workflowLogsQuery]
|
||||
)
|
||||
|
||||
const loadMoreGlobalLogs = useCallback(async () => {
|
||||
if (!globalDetails || !globalLogsMeta.hasMore) return
|
||||
if (globalLoadingMore) return
|
||||
try {
|
||||
setGlobalLoadingMore(true)
|
||||
const startTime = getStartTime()
|
||||
const qp = new URLSearchParams({
|
||||
limit: '50',
|
||||
offset: String(globalLogsMeta.offset || 0),
|
||||
workspaceId,
|
||||
startDate: startTime.toISOString(),
|
||||
endDate: endTime.toISOString(),
|
||||
order: 'desc',
|
||||
details: 'full',
|
||||
})
|
||||
if (workflowIds.length > 0) qp.set('workflowIds', workflowIds.join(','))
|
||||
if (folderIds.length > 0) qp.set('folderIds', folderIds.join(','))
|
||||
if (triggers.length > 0) qp.set('triggers', triggers.join(','))
|
||||
|
||||
const res = await fetch(`/api/logs?${qp.toString()}`)
|
||||
if (!res.ok) return
|
||||
const data = await res.json()
|
||||
const more: ExecutionLog[] = (data.data || []).map(mapToExecutionLog)
|
||||
|
||||
setGlobalDetails((prev) => {
|
||||
if (!prev) return prev
|
||||
const seen = new Set<string>()
|
||||
const dedup = [...prev.allLogs, ...more].filter((x) => {
|
||||
const id = x.id
|
||||
if (seen.has(id)) return false
|
||||
seen.add(id)
|
||||
return true
|
||||
})
|
||||
return { ...prev, logs: dedup, allLogs: dedup }
|
||||
})
|
||||
setGlobalLogsMeta((m) => ({
|
||||
offset: (m.offset || 0) + more.length,
|
||||
hasMore: more.length === 50,
|
||||
}))
|
||||
} catch {
|
||||
// ignore
|
||||
} finally {
|
||||
setGlobalLoadingMore(false)
|
||||
const loadMoreGlobalLogs = useCallback(() => {
|
||||
if (globalLogsQuery.hasNextPage && !globalLogsQuery.isFetchingNextPage) {
|
||||
globalLogsQuery.fetchNextPage()
|
||||
}
|
||||
}, [
|
||||
globalDetails,
|
||||
globalLogsMeta,
|
||||
globalLoadingMore,
|
||||
workspaceId,
|
||||
endTime,
|
||||
getStartTime,
|
||||
workflowIds,
|
||||
folderIds,
|
||||
triggers,
|
||||
])
|
||||
}, [globalLogsQuery])
|
||||
|
||||
const toggleWorkflow = useCallback(
|
||||
(workflowId: string) => {
|
||||
@@ -553,12 +320,9 @@ export default function Dashboard() {
|
||||
setExpandedWorkflowId(null)
|
||||
} else {
|
||||
setExpandedWorkflowId(workflowId)
|
||||
if (!workflowDetails[workflowId]) {
|
||||
fetchWorkflowDetails(workflowId)
|
||||
}
|
||||
}
|
||||
},
|
||||
[expandedWorkflowId, workflowDetails, fetchWorkflowDetails]
|
||||
[expandedWorkflowId]
|
||||
)
|
||||
|
||||
const handleSegmentClick = useCallback(
|
||||
@@ -568,13 +332,7 @@ export default function Dashboard() {
|
||||
_timestamp: string,
|
||||
mode: 'single' | 'toggle' | 'range'
|
||||
) => {
|
||||
// Fetch workflow details if not already loaded
|
||||
if (!workflowDetails[workflowId]) {
|
||||
fetchWorkflowDetails(workflowId)
|
||||
}
|
||||
|
||||
if (mode === 'toggle') {
|
||||
// Toggle mode: Add/remove segment from selection, allowing cross-workflow selection
|
||||
setSelectedSegments((prev) => {
|
||||
const currentSegments = prev[workflowId] || []
|
||||
const exists = currentSegments.includes(segmentIndex)
|
||||
@@ -584,7 +342,6 @@ export default function Dashboard() {
|
||||
|
||||
if (nextSegments.length === 0) {
|
||||
const { [workflowId]: _, ...rest } = prev
|
||||
// If this was the only workflow with selections, clear expanded
|
||||
if (Object.keys(rest).length === 0) {
|
||||
setExpandedWorkflowId(null)
|
||||
}
|
||||
@@ -593,7 +350,6 @@ export default function Dashboard() {
|
||||
|
||||
const newState = { ...prev, [workflowId]: nextSegments }
|
||||
|
||||
// Set to multi-workflow mode if multiple workflows have selections
|
||||
const selectedWorkflowIds = Object.keys(newState)
|
||||
if (selectedWorkflowIds.length > 1) {
|
||||
setExpandedWorkflowId('__multi__')
|
||||
@@ -606,27 +362,23 @@ export default function Dashboard() {
|
||||
|
||||
setLastAnchorIndices((prev) => ({ ...prev, [workflowId]: segmentIndex }))
|
||||
} else if (mode === 'single') {
|
||||
// Single mode: Select this segment, or deselect if already selected
|
||||
setSelectedSegments((prev) => {
|
||||
const currentSegments = prev[workflowId] || []
|
||||
const isOnlySelectedSegment =
|
||||
currentSegments.length === 1 && currentSegments[0] === segmentIndex
|
||||
const isOnlyWorkflowSelected = Object.keys(prev).length === 1 && prev[workflowId]
|
||||
|
||||
// If this is the only selected segment in the only selected workflow, deselect it
|
||||
if (isOnlySelectedSegment && isOnlyWorkflowSelected) {
|
||||
setExpandedWorkflowId(null)
|
||||
setLastAnchorIndices({})
|
||||
return {}
|
||||
}
|
||||
|
||||
// Otherwise, select only this segment
|
||||
setExpandedWorkflowId(workflowId)
|
||||
setLastAnchorIndices({ [workflowId]: segmentIndex })
|
||||
return { [workflowId]: [segmentIndex] }
|
||||
})
|
||||
} else if (mode === 'range') {
|
||||
// Range mode: Expand selection within the current workflow
|
||||
if (expandedWorkflowId === workflowId) {
|
||||
setSelectedSegments((prev) => {
|
||||
const currentSegments = prev[workflowId] || []
|
||||
@@ -638,31 +390,15 @@ export default function Dashboard() {
|
||||
return { ...prev, [workflowId]: Array.from(union).sort((a, b) => a - b) }
|
||||
})
|
||||
} else {
|
||||
// If clicking range on a different workflow, treat as single click
|
||||
setExpandedWorkflowId(workflowId)
|
||||
setSelectedSegments({ [workflowId]: [segmentIndex] })
|
||||
setLastAnchorIndices({ [workflowId]: segmentIndex })
|
||||
}
|
||||
}
|
||||
},
|
||||
[expandedWorkflowId, workflowDetails, fetchWorkflowDetails, lastAnchorIndices]
|
||||
[expandedWorkflowId, workflowDetails, lastAnchorIndices]
|
||||
)
|
||||
|
||||
const isInitialMount = useRef(true)
|
||||
useEffect(() => {
|
||||
const isInitial = isInitialMount.current
|
||||
if (isInitial) {
|
||||
isInitialMount.current = false
|
||||
}
|
||||
fetchExecutions(isInitial)
|
||||
}, [workspaceId, timeFilter, endTime, workflowIds, folderIds, triggers, segmentCount])
|
||||
|
||||
useEffect(() => {
|
||||
if (expandedWorkflowId) {
|
||||
fetchWorkflowDetails(expandedWorkflowId)
|
||||
}
|
||||
}, [expandedWorkflowId, timeFilter, endTime, workflowIds, folderIds, fetchWorkflowDetails])
|
||||
|
||||
useEffect(() => {
|
||||
setSelectedSegments({})
|
||||
setLastAnchorIndices({})
|
||||
@@ -692,68 +428,15 @@ export default function Dashboard() {
|
||||
}
|
||||
}, [])
|
||||
|
||||
const getShiftLabel = () => {
|
||||
switch (sidebarTimeRange) {
|
||||
case 'Past 30 minutes':
|
||||
return '30 minutes'
|
||||
case 'Past hour':
|
||||
return 'hour'
|
||||
case 'Past 12 hours':
|
||||
return '12 hours'
|
||||
case 'Past 24 hours':
|
||||
return '24 hours'
|
||||
default:
|
||||
return 'period'
|
||||
}
|
||||
}
|
||||
|
||||
const getDateRange = () => {
|
||||
const start = getStartTime()
|
||||
return `${start.toLocaleDateString('en-US', { month: 'short', day: 'numeric', hour: 'numeric', minute: '2-digit' })} - ${endTime.toLocaleDateString('en-US', { month: 'short', day: 'numeric', hour: 'numeric', minute: '2-digit', year: 'numeric' })}`
|
||||
}
|
||||
|
||||
const shiftTimeWindow = (direction: 'back' | 'forward') => {
|
||||
let shift: number
|
||||
switch (timeFilter) {
|
||||
case '30m':
|
||||
shift = 30 * 60 * 1000
|
||||
break
|
||||
case '1h':
|
||||
shift = 60 * 60 * 1000
|
||||
break
|
||||
case '6h':
|
||||
shift = 6 * 60 * 60 * 1000
|
||||
break
|
||||
case '12h':
|
||||
shift = 12 * 60 * 60 * 1000
|
||||
break
|
||||
case '24h':
|
||||
shift = 24 * 60 * 60 * 1000
|
||||
break
|
||||
case '3d':
|
||||
shift = 3 * 24 * 60 * 60 * 1000
|
||||
break
|
||||
case '7d':
|
||||
shift = 7 * 24 * 60 * 60 * 1000
|
||||
break
|
||||
case '14d':
|
||||
shift = 14 * 24 * 60 * 60 * 1000
|
||||
break
|
||||
case '30d':
|
||||
shift = 30 * 24 * 60 * 60 * 1000
|
||||
break
|
||||
default:
|
||||
shift = 24 * 60 * 60 * 1000
|
||||
}
|
||||
|
||||
setEndTime((prev) => new Date(prev.getTime() + (direction === 'forward' ? shift : -shift)))
|
||||
}
|
||||
|
||||
const resetToNow = () => {
|
||||
setEndTime(new Date())
|
||||
}
|
||||
|
||||
const isLive = endTime.getTime() > Date.now() - 60000 // Within last minute
|
||||
const [live, setLive] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
@@ -768,8 +451,6 @@ export default function Dashboard() {
|
||||
}
|
||||
}, [live])
|
||||
|
||||
// Infinite scroll is now handled inside WorkflowDetails
|
||||
|
||||
return (
|
||||
<div className={`flex h-full min-w-0 flex-col pl-64 ${soehne.className}`}>
|
||||
<div className='flex min-w-0 flex-1 overflow-hidden'>
|
||||
@@ -873,25 +554,21 @@ export default function Dashboard() {
|
||||
{/* Details section in its own scroll area */}
|
||||
<div className='min-h-0 flex-1 overflow-auto'>
|
||||
{(() => {
|
||||
// Handle multi-workflow selection view
|
||||
if (expandedWorkflowId === '__multi__') {
|
||||
const selectedWorkflowIds = Object.keys(selectedSegments)
|
||||
const totalMs = endTime.getTime() - getStartTime().getTime()
|
||||
const segMs = totalMs / Math.max(1, segmentCount)
|
||||
|
||||
// Collect all unique segment indices across all workflows
|
||||
const allSegmentIndices = new Set<number>()
|
||||
for (const indices of Object.values(selectedSegments)) {
|
||||
indices.forEach((idx) => allSegmentIndices.add(idx))
|
||||
}
|
||||
const sortedIndices = Array.from(allSegmentIndices).sort((a, b) => a - b)
|
||||
|
||||
// Aggregate logs from all selected workflows/segments
|
||||
const allLogs: any[] = []
|
||||
let totalExecutions = 0
|
||||
let totalSuccess = 0
|
||||
|
||||
// Build aggregated chart series
|
||||
const aggregatedSegments: Array<{
|
||||
timestamp: string
|
||||
totalExecutions: number
|
||||
@@ -900,9 +577,7 @@ export default function Dashboard() {
|
||||
durationCount: number
|
||||
}> = []
|
||||
|
||||
// Initialize aggregated segments for each unique index
|
||||
for (const idx of sortedIndices) {
|
||||
// Get the timestamp from the first workflow that has this index
|
||||
let timestamp = ''
|
||||
for (const wfId of selectedWorkflowIds) {
|
||||
const wf = executions.find((w) => w.workflowId === wfId)
|
||||
@@ -921,7 +596,6 @@ export default function Dashboard() {
|
||||
})
|
||||
}
|
||||
|
||||
// Aggregate data from all workflows
|
||||
for (const wfId of selectedWorkflowIds) {
|
||||
const wf = executions.find((w) => w.workflowId === wfId)
|
||||
const details = workflowDetails[wfId]
|
||||
@@ -929,7 +603,6 @@ export default function Dashboard() {
|
||||
|
||||
if (!wf || !details || indices.length === 0) continue
|
||||
|
||||
// Calculate time windows for this workflow's selected segments
|
||||
const windows = indices
|
||||
.map((idx) => wf.segments[idx])
|
||||
.filter(Boolean)
|
||||
@@ -944,7 +617,6 @@ export default function Dashboard() {
|
||||
const inAnyWindow = (t: number) =>
|
||||
windows.some((w) => t >= w.start && t < w.end)
|
||||
|
||||
// Filter logs for this workflow's selected segments
|
||||
const workflowLogs = details.allLogs
|
||||
.filter((log) => inAnyWindow(new Date(log.startedAt).getTime()))
|
||||
.map((log) => ({
|
||||
@@ -956,7 +628,6 @@ export default function Dashboard() {
|
||||
|
||||
allLogs.push(...workflowLogs)
|
||||
|
||||
// Aggregate segment metrics
|
||||
indices.forEach((idx) => {
|
||||
const segment = wf.segments[idx]
|
||||
if (!segment) return
|
||||
@@ -974,7 +645,6 @@ export default function Dashboard() {
|
||||
})
|
||||
}
|
||||
|
||||
// Build chart series
|
||||
const errorRates = aggregatedSegments.map((seg) => ({
|
||||
timestamp: seg.timestamp,
|
||||
value:
|
||||
@@ -993,7 +663,6 @@ export default function Dashboard() {
|
||||
value: seg.durationCount > 0 ? seg.avgDurationMs / seg.durationCount : 0,
|
||||
}))
|
||||
|
||||
// Sort logs by time (most recent first)
|
||||
allLogs.sort(
|
||||
(a, b) => new Date(b.startedAt).getTime() - new Date(a.startedAt).getTime()
|
||||
)
|
||||
@@ -1002,13 +671,11 @@ export default function Dashboard() {
|
||||
const totalRate =
|
||||
totalExecutions > 0 ? (totalSuccess / totalExecutions) * 100 : 100
|
||||
|
||||
// Calculate overall time range across all selected workflows
|
||||
let multiWorkflowTimeRange: { start: Date; end: Date } | null = null
|
||||
if (sortedIndices.length > 0) {
|
||||
const firstIdx = sortedIndices[0]
|
||||
const lastIdx = sortedIndices[sortedIndices.length - 1]
|
||||
|
||||
// Find earliest start time
|
||||
let earliestStart: Date | null = null
|
||||
for (const wfId of selectedWorkflowIds) {
|
||||
const wf = executions.find((w) => w.workflowId === wfId)
|
||||
@@ -1021,7 +688,6 @@ export default function Dashboard() {
|
||||
}
|
||||
}
|
||||
|
||||
// Find latest end time
|
||||
let latestEnd: Date | null = null
|
||||
for (const wfId of selectedWorkflowIds) {
|
||||
const wf = executions.find((w) => w.workflowId === wfId)
|
||||
@@ -1042,7 +708,6 @@ export default function Dashboard() {
|
||||
}
|
||||
}
|
||||
|
||||
// Get workflow names
|
||||
const workflowNames = selectedWorkflowIds
|
||||
.map((id) => executions.find((w) => w.workflowId === id)?.workflowName)
|
||||
.filter(Boolean) as string[]
|
||||
@@ -1179,33 +844,25 @@ export default function Dashboard() {
|
||||
...log,
|
||||
workflowName: (log as any).workflowName || wf.workflowName,
|
||||
}))
|
||||
|
||||
// Build series from selected segments indices
|
||||
const idxSet = new Set(workflowSelectedIndices)
|
||||
const selectedSegs = wf.segments.filter((_, i) => idxSet.has(i))
|
||||
;(details as any).__filtered = buildSeriesFromSegments(selectedSegs as any)
|
||||
} else if (details) {
|
||||
// Clear filtered data when no segments are selected
|
||||
;(details as any).__filtered = undefined
|
||||
}
|
||||
|
||||
// Compute series data based on selected segments or all segments
|
||||
const segmentsToUse =
|
||||
workflowSelectedIndices.length > 0
|
||||
? wf.segments.filter((_, i) => workflowSelectedIndices.includes(i))
|
||||
: wf.segments
|
||||
const series = buildSeriesFromSegments(segmentsToUse as any)
|
||||
|
||||
const detailsWithFilteredLogs = details
|
||||
? {
|
||||
...details,
|
||||
logs: logsToDisplay,
|
||||
...(() => {
|
||||
const series =
|
||||
(details as any).__filtered ||
|
||||
buildSeriesFromSegments(wf.segments as any)
|
||||
return {
|
||||
errorRates: series.errorRates,
|
||||
durations: series.durations,
|
||||
executionCounts: series.executionCounts,
|
||||
durationP50: series.durationP50,
|
||||
durationP90: series.durationP90,
|
||||
durationP99: series.durationP99,
|
||||
}
|
||||
})(),
|
||||
errorRates: series.errorRates,
|
||||
durations: series.durations,
|
||||
executionCounts: series.executionCounts,
|
||||
durationP50: series.durationP50,
|
||||
durationP90: series.durationP90,
|
||||
durationP99: series.durationP99,
|
||||
}
|
||||
: undefined
|
||||
|
||||
@@ -1261,8 +918,8 @@ export default function Dashboard() {
|
||||
}}
|
||||
formatCost={formatCost}
|
||||
onLoadMore={() => loadMoreLogs(expandedWorkflowId)}
|
||||
hasMore={(workflowDetails as any)[expandedWorkflowId]?.__meta?.hasMore}
|
||||
isLoadingMore={(workflowDetails as any)[expandedWorkflowId]?.__loading}
|
||||
hasMore={workflowLogsQuery.hasNextPage ?? false}
|
||||
isLoadingMore={workflowLogsQuery.isFetchingNextPage}
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -1297,8 +954,8 @@ export default function Dashboard() {
|
||||
}}
|
||||
formatCost={formatCost}
|
||||
onLoadMore={loadMoreGlobalLogs}
|
||||
hasMore={globalLogsMeta.hasMore}
|
||||
isLoadingMore={globalLoadingMore}
|
||||
hasMore={globalLogsQuery.hasNextPage ?? false}
|
||||
isLoadingMore={globalLogsQuery.isFetchingNextPage}
|
||||
/>
|
||||
)
|
||||
})()}
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { AlertCircle, ArrowUpRight, Info, Loader2 } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
|
||||
import { cn } from '@/lib/utils'
|
||||
import Controls from '@/app/workspace/[workspaceId]/logs/components/dashboard/controls'
|
||||
@@ -13,12 +12,12 @@ import { Sidebar } from '@/app/workspace/[workspaceId]/logs/components/sidebar/s
|
||||
import Dashboard from '@/app/workspace/[workspaceId]/logs/dashboard'
|
||||
import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useLogDetail, useLogsList } from '@/hooks/queries/logs'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
import type { LogsResponse, WorkflowLog } from '@/stores/logs/filters/types'
|
||||
import type { WorkflowLog } from '@/stores/logs/filters/types'
|
||||
|
||||
const logger = createLogger('Logs')
|
||||
const LOGS_PER_PAGE = 50
|
||||
|
||||
/**
|
||||
@@ -63,19 +62,7 @@ export default function Logs() {
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const {
|
||||
logs,
|
||||
loading,
|
||||
error,
|
||||
setLogs,
|
||||
setLoading,
|
||||
setError,
|
||||
setWorkspaceId,
|
||||
page,
|
||||
setPage,
|
||||
hasMore,
|
||||
setHasMore,
|
||||
isFetchingMore,
|
||||
setIsFetchingMore,
|
||||
initializeFromURL,
|
||||
timeRange,
|
||||
level,
|
||||
@@ -95,10 +82,6 @@ export default function Logs() {
|
||||
const [selectedLog, setSelectedLog] = useState<WorkflowLog | null>(null)
|
||||
const [selectedLogIndex, setSelectedLogIndex] = useState<number>(-1)
|
||||
const [isSidebarOpen, setIsSidebarOpen] = useState(false)
|
||||
const [isDetailsLoading, setIsDetailsLoading] = useState(false)
|
||||
const detailsCacheRef = useRef<Map<string, any>>(new Map())
|
||||
const detailsAbortRef = useRef<AbortController | null>(null)
|
||||
const currentDetailsIdRef = useRef<string | null>(null)
|
||||
const selectedRowRef = useRef<HTMLTableRowElement | null>(null)
|
||||
const loaderRef = useRef<HTMLDivElement>(null)
|
||||
const scrollContainerRef = useRef<HTMLDivElement>(null)
|
||||
@@ -107,16 +90,37 @@ export default function Logs() {
|
||||
const [searchQuery, setSearchQuery] = useState(storeSearchQuery)
|
||||
const debouncedSearchQuery = useDebounce(searchQuery, 300)
|
||||
|
||||
const [availableWorkflows, setAvailableWorkflows] = useState<string[]>([])
|
||||
const [availableFolders, setAvailableFolders] = useState<string[]>([])
|
||||
const [, setAvailableWorkflows] = useState<string[]>([])
|
||||
const [, setAvailableFolders] = useState<string[]>([])
|
||||
|
||||
// Live and refresh state
|
||||
const [isLive, setIsLive] = useState(false)
|
||||
const [isRefreshing, setIsRefreshing] = useState(false)
|
||||
const liveIntervalRef = useRef<NodeJS.Timeout | null>(null)
|
||||
const isSearchOpenRef = useRef<boolean>(false)
|
||||
|
||||
// Sync local search query with store search query
|
||||
const logFilters = useMemo(
|
||||
() => ({
|
||||
timeRange,
|
||||
level,
|
||||
workflowIds,
|
||||
folderIds,
|
||||
triggers,
|
||||
searchQuery: debouncedSearchQuery,
|
||||
limit: LOGS_PER_PAGE,
|
||||
}),
|
||||
[timeRange, level, workflowIds, folderIds, triggers, debouncedSearchQuery]
|
||||
)
|
||||
|
||||
const logsQuery = useLogsList(workspaceId, logFilters, {
|
||||
enabled: Boolean(workspaceId) && isInitialized.current,
|
||||
refetchInterval: isLive ? 5000 : false,
|
||||
})
|
||||
|
||||
const logDetailQuery = useLogDetail(selectedLog?.id)
|
||||
|
||||
const logs = useMemo(() => {
|
||||
if (!logsQuery.data?.pages) return []
|
||||
return logsQuery.data.pages.flatMap((page) => page.logs)
|
||||
}, [logsQuery.data?.pages])
|
||||
|
||||
useEffect(() => {
|
||||
setSearchQuery(storeSearchQuery)
|
||||
}, [storeSearchQuery])
|
||||
@@ -182,62 +186,6 @@ export default function Logs() {
|
||||
const index = logs.findIndex((l) => l.id === log.id)
|
||||
setSelectedLogIndex(index)
|
||||
setIsSidebarOpen(true)
|
||||
setIsDetailsLoading(true)
|
||||
|
||||
const currentId = log.id
|
||||
const prevId = index > 0 ? logs[index - 1]?.id : undefined
|
||||
const nextId = index < logs.length - 1 ? logs[index + 1]?.id : undefined
|
||||
|
||||
if (detailsAbortRef.current) {
|
||||
try {
|
||||
detailsAbortRef.current.abort()
|
||||
} catch {
|
||||
/* no-op */
|
||||
}
|
||||
}
|
||||
const controller = new AbortController()
|
||||
detailsAbortRef.current = controller
|
||||
currentDetailsIdRef.current = currentId
|
||||
|
||||
const idsToFetch: Array<{ id: string; merge: boolean }> = []
|
||||
const cachedCurrent = currentId ? detailsCacheRef.current.get(currentId) : undefined
|
||||
if (currentId && !cachedCurrent) idsToFetch.push({ id: currentId, merge: true })
|
||||
if (prevId && !detailsCacheRef.current.has(prevId))
|
||||
idsToFetch.push({ id: prevId, merge: false })
|
||||
if (nextId && !detailsCacheRef.current.has(nextId))
|
||||
idsToFetch.push({ id: nextId, merge: false })
|
||||
|
||||
if (cachedCurrent) {
|
||||
setSelectedLog((prev) =>
|
||||
prev && prev.id === currentId
|
||||
? ({ ...(prev as any), ...(cachedCurrent as any) } as any)
|
||||
: prev
|
||||
)
|
||||
setIsDetailsLoading(false)
|
||||
}
|
||||
if (idsToFetch.length === 0) return
|
||||
|
||||
Promise.all(
|
||||
idsToFetch.map(async ({ id, merge }) => {
|
||||
try {
|
||||
const res = await fetch(`/api/logs/${id}`, { signal: controller.signal })
|
||||
if (!res.ok) return
|
||||
const body = await res.json()
|
||||
const detailed = body?.data
|
||||
if (detailed) {
|
||||
detailsCacheRef.current.set(id, detailed)
|
||||
if (merge && id === currentId) {
|
||||
setSelectedLog((prev) =>
|
||||
prev && prev.id === id ? ({ ...(prev as any), ...(detailed as any) } as any) : prev
|
||||
)
|
||||
if (currentDetailsIdRef.current === id) setIsDetailsLoading(false)
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e?.name === 'AbortError') return
|
||||
}
|
||||
})
|
||||
).catch(() => {})
|
||||
}
|
||||
|
||||
const handleNavigateNext = useCallback(() => {
|
||||
@@ -246,54 +194,6 @@ export default function Logs() {
|
||||
setSelectedLogIndex(nextIndex)
|
||||
const nextLog = logs[nextIndex]
|
||||
setSelectedLog(nextLog)
|
||||
if (detailsAbortRef.current) {
|
||||
try {
|
||||
detailsAbortRef.current.abort()
|
||||
} catch {
|
||||
/* no-op */
|
||||
}
|
||||
}
|
||||
const controller = new AbortController()
|
||||
detailsAbortRef.current = controller
|
||||
|
||||
const cached = detailsCacheRef.current.get(nextLog.id)
|
||||
if (cached) {
|
||||
setSelectedLog((prev) =>
|
||||
prev && prev.id === nextLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev
|
||||
)
|
||||
} else {
|
||||
const prevId = nextIndex > 0 ? logs[nextIndex - 1]?.id : undefined
|
||||
const afterId = nextIndex < logs.length - 1 ? logs[nextIndex + 1]?.id : undefined
|
||||
const idsToFetch: Array<{ id: string; merge: boolean }> = []
|
||||
if (nextLog.id && !detailsCacheRef.current.has(nextLog.id))
|
||||
idsToFetch.push({ id: nextLog.id, merge: true })
|
||||
if (prevId && !detailsCacheRef.current.has(prevId))
|
||||
idsToFetch.push({ id: prevId, merge: false })
|
||||
if (afterId && !detailsCacheRef.current.has(afterId))
|
||||
idsToFetch.push({ id: afterId, merge: false })
|
||||
Promise.all(
|
||||
idsToFetch.map(async ({ id, merge }) => {
|
||||
try {
|
||||
const res = await fetch(`/api/logs/${id}`, { signal: controller.signal })
|
||||
if (!res.ok) return
|
||||
const body = await res.json()
|
||||
const detailed = body?.data
|
||||
if (detailed) {
|
||||
detailsCacheRef.current.set(id, detailed)
|
||||
if (merge && id === nextLog.id) {
|
||||
setSelectedLog((prev) =>
|
||||
prev && prev.id === id
|
||||
? ({ ...(prev as any), ...(detailed as any) } as any)
|
||||
: prev
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e?.name === 'AbortError') return
|
||||
}
|
||||
})
|
||||
).catch(() => {})
|
||||
}
|
||||
}
|
||||
}, [selectedLogIndex, logs])
|
||||
|
||||
@@ -303,54 +203,6 @@ export default function Logs() {
|
||||
setSelectedLogIndex(prevIndex)
|
||||
const prevLog = logs[prevIndex]
|
||||
setSelectedLog(prevLog)
|
||||
if (detailsAbortRef.current) {
|
||||
try {
|
||||
detailsAbortRef.current.abort()
|
||||
} catch {
|
||||
/* no-op */
|
||||
}
|
||||
}
|
||||
const controller = new AbortController()
|
||||
detailsAbortRef.current = controller
|
||||
|
||||
const cached = detailsCacheRef.current.get(prevLog.id)
|
||||
if (cached) {
|
||||
setSelectedLog((prev) =>
|
||||
prev && prev.id === prevLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev
|
||||
)
|
||||
} else {
|
||||
const beforeId = prevIndex > 0 ? logs[prevIndex - 1]?.id : undefined
|
||||
const afterId = prevIndex < logs.length - 1 ? logs[prevIndex + 1]?.id : undefined
|
||||
const idsToFetch: Array<{ id: string; merge: boolean }> = []
|
||||
if (prevLog.id && !detailsCacheRef.current.has(prevLog.id))
|
||||
idsToFetch.push({ id: prevLog.id, merge: true })
|
||||
if (beforeId && !detailsCacheRef.current.has(beforeId))
|
||||
idsToFetch.push({ id: beforeId, merge: false })
|
||||
if (afterId && !detailsCacheRef.current.has(afterId))
|
||||
idsToFetch.push({ id: afterId, merge: false })
|
||||
Promise.all(
|
||||
idsToFetch.map(async ({ id, merge }) => {
|
||||
try {
|
||||
const res = await fetch(`/api/logs/${id}`, { signal: controller.signal })
|
||||
if (!res.ok) return
|
||||
const body = await res.json()
|
||||
const detailed = body?.data
|
||||
if (detailed) {
|
||||
detailsCacheRef.current.set(id, detailed)
|
||||
if (merge && id === prevLog.id) {
|
||||
setSelectedLog((prev) =>
|
||||
prev && prev.id === id
|
||||
? ({ ...(prev as any), ...(detailed as any) } as any)
|
||||
: prev
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e?.name === 'AbortError') return
|
||||
}
|
||||
})
|
||||
).catch(() => {})
|
||||
}
|
||||
}
|
||||
}, [selectedLogIndex, logs])
|
||||
|
||||
@@ -369,106 +221,13 @@ export default function Logs() {
|
||||
}
|
||||
}, [selectedLogIndex])
|
||||
|
||||
const fetchLogs = useCallback(async (pageNum: number, append = false) => {
|
||||
try {
|
||||
// Don't fetch if workspaceId is not set
|
||||
const { workspaceId: storeWorkspaceId } = useFilterStore.getState()
|
||||
if (!storeWorkspaceId) {
|
||||
return
|
||||
}
|
||||
|
||||
if (pageNum === 1) {
|
||||
setLoading(true)
|
||||
} else {
|
||||
setIsFetchingMore(true)
|
||||
}
|
||||
|
||||
const { buildQueryParams: getCurrentQueryParams } = useFilterStore.getState()
|
||||
const queryParams = getCurrentQueryParams(pageNum, LOGS_PER_PAGE)
|
||||
|
||||
const { searchQuery: currentSearchQuery } = useFilterStore.getState()
|
||||
const parsedQuery = parseQuery(currentSearchQuery)
|
||||
const enhancedParams = queryToApiParams(parsedQuery)
|
||||
|
||||
const allParams = new URLSearchParams(queryParams)
|
||||
Object.entries(enhancedParams).forEach(([key, value]) => {
|
||||
if (key === 'triggers' && allParams.has('triggers')) {
|
||||
const existingTriggers = allParams.get('triggers')?.split(',') || []
|
||||
const searchTriggers = value.split(',')
|
||||
const combined = [...new Set([...existingTriggers, ...searchTriggers])]
|
||||
allParams.set('triggers', combined.join(','))
|
||||
} else {
|
||||
allParams.set(key, value)
|
||||
}
|
||||
})
|
||||
|
||||
allParams.set('details', 'basic')
|
||||
const response = await fetch(`/api/logs?${allParams.toString()}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Error fetching logs: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data: LogsResponse = await response.json()
|
||||
|
||||
setHasMore(data.data.length === LOGS_PER_PAGE && data.page < data.totalPages)
|
||||
|
||||
setLogs(data.data, append)
|
||||
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
logger.error('Failed to fetch logs:', { err })
|
||||
setError(err instanceof Error ? err.message : 'An unknown error occurred')
|
||||
} finally {
|
||||
if (pageNum === 1) {
|
||||
setLoading(false)
|
||||
} else {
|
||||
setIsFetchingMore(false)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleRefresh = async () => {
|
||||
if (isRefreshing) return
|
||||
|
||||
setIsRefreshing(true)
|
||||
|
||||
try {
|
||||
await fetchLogs(1)
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'An unknown error occurred')
|
||||
} finally {
|
||||
setIsRefreshing(false)
|
||||
await logsQuery.refetch()
|
||||
if (selectedLog?.id) {
|
||||
await logDetailQuery.refetch()
|
||||
}
|
||||
}
|
||||
|
||||
// Setup or clear the live refresh interval when isLive changes
|
||||
useEffect(() => {
|
||||
if (liveIntervalRef.current) {
|
||||
clearInterval(liveIntervalRef.current)
|
||||
liveIntervalRef.current = null
|
||||
}
|
||||
|
||||
if (isLive) {
|
||||
handleRefresh()
|
||||
liveIntervalRef.current = setInterval(() => {
|
||||
handleRefresh()
|
||||
}, 5000)
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (liveIntervalRef.current) {
|
||||
clearInterval(liveIntervalRef.current)
|
||||
liveIntervalRef.current = null
|
||||
}
|
||||
}
|
||||
}, [isLive])
|
||||
|
||||
const toggleLive = () => {
|
||||
setIsLive(!isLive)
|
||||
}
|
||||
|
||||
const handleExport = async () => {
|
||||
const params = new URLSearchParams()
|
||||
params.set('workspaceId', workspaceId)
|
||||
@@ -506,101 +265,14 @@ export default function Logs() {
|
||||
return () => window.removeEventListener('popstate', handlePopState)
|
||||
}, [initializeFromURL])
|
||||
|
||||
useEffect(() => {
|
||||
if (!isInitialized.current) {
|
||||
return
|
||||
}
|
||||
|
||||
// Don't fetch if workspaceId is not set yet
|
||||
if (!workspaceId) {
|
||||
return
|
||||
}
|
||||
|
||||
setPage(1)
|
||||
setHasMore(true)
|
||||
|
||||
const fetchWithFilters = async () => {
|
||||
try {
|
||||
setLoading(true)
|
||||
|
||||
const params = new URLSearchParams()
|
||||
params.set('details', 'basic')
|
||||
params.set('limit', LOGS_PER_PAGE.toString())
|
||||
params.set('offset', '0') // Always start from page 1
|
||||
params.set('workspaceId', workspaceId)
|
||||
|
||||
const parsedQuery = parseQuery(debouncedSearchQuery)
|
||||
const enhancedParams = queryToApiParams(parsedQuery)
|
||||
|
||||
if (level !== 'all') params.set('level', level)
|
||||
if (triggers.length > 0) params.set('triggers', triggers.join(','))
|
||||
if (workflowIds.length > 0) params.set('workflowIds', workflowIds.join(','))
|
||||
if (folderIds.length > 0) params.set('folderIds', folderIds.join(','))
|
||||
|
||||
Object.entries(enhancedParams).forEach(([key, value]) => {
|
||||
if (key === 'triggers' && params.has('triggers')) {
|
||||
const storeTriggers = params.get('triggers')?.split(',') || []
|
||||
const searchTriggers = value.split(',')
|
||||
const combined = [...new Set([...storeTriggers, ...searchTriggers])]
|
||||
params.set('triggers', combined.join(','))
|
||||
} else {
|
||||
params.set(key, value)
|
||||
}
|
||||
})
|
||||
|
||||
if (timeRange !== 'All time') {
|
||||
const now = new Date()
|
||||
let startDate: Date
|
||||
switch (timeRange) {
|
||||
case 'Past 30 minutes':
|
||||
startDate = new Date(now.getTime() - 30 * 60 * 1000)
|
||||
break
|
||||
case 'Past hour':
|
||||
startDate = new Date(now.getTime() - 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 24 hours':
|
||||
startDate = new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
break
|
||||
default:
|
||||
startDate = new Date(0)
|
||||
}
|
||||
params.set('startDate', startDate.toISOString())
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/logs?${params.toString()}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Error fetching logs: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data: LogsResponse = await response.json()
|
||||
setHasMore(data.data.length === LOGS_PER_PAGE && data.page < data.totalPages)
|
||||
setLogs(data.data, false)
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
logger.error('Failed to fetch logs:', { err })
|
||||
setError(err instanceof Error ? err.message : 'An unknown error occurred')
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
fetchWithFilters()
|
||||
}, [workspaceId, timeRange, level, workflowIds, folderIds, debouncedSearchQuery, triggers])
|
||||
|
||||
const loadMoreLogs = useCallback(() => {
|
||||
if (!isFetchingMore && hasMore) {
|
||||
const nextPage = page + 1
|
||||
setPage(nextPage)
|
||||
setIsFetchingMore(true)
|
||||
setTimeout(() => {
|
||||
fetchLogs(nextPage, true)
|
||||
}, 50)
|
||||
if (!logsQuery.isFetching && logsQuery.hasNextPage) {
|
||||
logsQuery.fetchNextPage()
|
||||
}
|
||||
}, [fetchLogs, isFetchingMore, hasMore, page])
|
||||
}, [logsQuery])
|
||||
|
||||
useEffect(() => {
|
||||
if (loading || !hasMore) return
|
||||
if (logsQuery.isLoading || !logsQuery.hasNextPage) return
|
||||
|
||||
const scrollContainer = scrollContainerRef.current
|
||||
if (!scrollContainer) return
|
||||
@@ -612,7 +284,7 @@ export default function Logs() {
|
||||
|
||||
const scrollPercentage = (scrollTop / (scrollHeight - clientHeight)) * 100
|
||||
|
||||
if (scrollPercentage > 60 && !isFetchingMore && hasMore) {
|
||||
if (scrollPercentage > 60 && !logsQuery.isFetchingNextPage && logsQuery.hasNextPage) {
|
||||
loadMoreLogs()
|
||||
}
|
||||
}
|
||||
@@ -622,13 +294,14 @@ export default function Logs() {
|
||||
return () => {
|
||||
scrollContainer.removeEventListener('scroll', handleScroll)
|
||||
}
|
||||
}, [loading, hasMore, isFetchingMore, loadMoreLogs])
|
||||
}, [logsQuery.isLoading, logsQuery.hasNextPage, logsQuery.isFetchingNextPage, loadMoreLogs])
|
||||
|
||||
useEffect(() => {
|
||||
const currentLoaderRef = loaderRef.current
|
||||
const scrollContainer = scrollContainerRef.current
|
||||
|
||||
if (!currentLoaderRef || !scrollContainer || loading || !hasMore) return
|
||||
if (!currentLoaderRef || !scrollContainer || logsQuery.isLoading || !logsQuery.hasNextPage)
|
||||
return
|
||||
|
||||
const observer = new IntersectionObserver(
|
||||
(entries) => {
|
||||
@@ -636,7 +309,7 @@ export default function Logs() {
|
||||
if (!e?.isIntersecting) return
|
||||
const { scrollTop, scrollHeight, clientHeight } = scrollContainer
|
||||
const pct = (scrollTop / (scrollHeight - clientHeight)) * 100
|
||||
if (pct > 70 && !isFetchingMore) {
|
||||
if (pct > 70 && !logsQuery.isFetchingNextPage) {
|
||||
loadMoreLogs()
|
||||
}
|
||||
},
|
||||
@@ -652,7 +325,7 @@ export default function Logs() {
|
||||
return () => {
|
||||
observer.unobserve(currentLoaderRef)
|
||||
}
|
||||
}, [loading, hasMore, isFetchingMore, loadMoreLogs])
|
||||
}, [logsQuery.isLoading, logsQuery.hasNextPage, logsQuery.isFetchingNextPage, loadMoreLogs])
|
||||
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
@@ -686,7 +359,6 @@ export default function Logs() {
|
||||
return () => window.removeEventListener('keydown', handleKeyDown)
|
||||
}, [logs, selectedLogIndex, isSidebarOpen, selectedLog, handleNavigateNext, handleNavigatePrev])
|
||||
|
||||
// If in dashboard mode, show the dashboard
|
||||
if (viewMode === 'dashboard') {
|
||||
return <Dashboard />
|
||||
}
|
||||
@@ -701,7 +373,7 @@ export default function Logs() {
|
||||
<div className='flex min-w-0 flex-1 overflow-hidden'>
|
||||
<div className='flex flex-1 flex-col p-[24px]'>
|
||||
<Controls
|
||||
isRefetching={isRefreshing}
|
||||
isRefetching={logsQuery.isFetching}
|
||||
resetToNow={handleRefresh}
|
||||
live={isLive}
|
||||
setLive={(fn) => setIsLive(fn)}
|
||||
@@ -750,18 +422,20 @@ export default function Logs() {
|
||||
|
||||
{/* Table body - scrollable */}
|
||||
<div className='flex-1 overflow-y-auto overflow-x-hidden' ref={scrollContainerRef}>
|
||||
{loading && page === 1 ? (
|
||||
{logsQuery.isLoading && !logsQuery.data ? (
|
||||
<div className='flex h-full items-center justify-center'>
|
||||
<div className='flex items-center gap-[8px] text-[var(--text-secondary)] dark:text-[var(--text-secondary)]'>
|
||||
<Loader2 className='h-[16px] w-[16px] animate-spin' />
|
||||
<span className='text-[13px]'>Loading logs...</span>
|
||||
</div>
|
||||
</div>
|
||||
) : error ? (
|
||||
) : logsQuery.isError ? (
|
||||
<div className='flex h-full items-center justify-center'>
|
||||
<div className='flex items-center gap-[8px] text-[var(--text-error)] dark:text-[var(--text-error)]'>
|
||||
<AlertCircle className='h-[16px] w-[16px]' />
|
||||
<span className='text-[13px]'>Error: {error}</span>
|
||||
<span className='text-[13px]'>
|
||||
Error: {logsQuery.error?.message || 'Failed to load logs'}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
) : logs.length === 0 ? (
|
||||
@@ -778,7 +452,6 @@ export default function Logs() {
|
||||
const isSelected = selectedLog?.id === log.id
|
||||
const baseLevel = (log.level || 'info').toLowerCase()
|
||||
const isError = baseLevel === 'error'
|
||||
// If it's an error, don't treat it as pending even if hasPendingPause is true
|
||||
const isPending = !isError && log.hasPendingPause === true
|
||||
const statusLabel = isPending
|
||||
? 'Pending'
|
||||
@@ -906,13 +579,13 @@ export default function Logs() {
|
||||
})}
|
||||
|
||||
{/* Infinite scroll loader */}
|
||||
{hasMore && (
|
||||
{logsQuery.hasNextPage && (
|
||||
<div className='flex items-center justify-center py-[16px]'>
|
||||
<div
|
||||
ref={loaderRef}
|
||||
className='flex items-center gap-[8px] text-[var(--text-secondary)] dark:text-[var(--text-secondary)]'
|
||||
>
|
||||
{isFetchingMore ? (
|
||||
{logsQuery.isFetchingNextPage ? (
|
||||
<>
|
||||
<Loader2 className='h-[16px] w-[16px] animate-spin' />
|
||||
<span className='text-[13px]'>Loading more...</span>
|
||||
@@ -932,8 +605,9 @@ export default function Logs() {
|
||||
|
||||
{/* Log Sidebar */}
|
||||
<Sidebar
|
||||
log={selectedLog}
|
||||
log={logDetailQuery.data || selectedLog}
|
||||
isOpen={isSidebarOpen}
|
||||
isLoadingDetails={logDetailQuery.isLoading}
|
||||
onClose={handleCloseSidebar}
|
||||
onNavigateNext={handleNavigateNext}
|
||||
onNavigatePrev={handleNavigatePrev}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { Braces, Square } from 'lucide-react'
|
||||
import { ArrowDown, Braces, Square } from 'lucide-react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import {
|
||||
BubbleChatPreview,
|
||||
@@ -22,12 +22,13 @@ import {
|
||||
PopoverTrigger,
|
||||
Trash,
|
||||
} from '@/components/emcn'
|
||||
import { VariableIcon } from '@/components/icons'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { Variables } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/variables/variables'
|
||||
import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution'
|
||||
import { useDeleteWorkflow } from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { useDeleteWorkflow, useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { useChatStore } from '@/stores/chat/store'
|
||||
import { usePanelStore } from '@/stores/panel-new/store'
|
||||
import type { PanelTab } from '@/stores/panel-new/types'
|
||||
@@ -62,6 +63,7 @@ export function Panel() {
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const panelRef = useRef<HTMLElement>(null)
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
const { activeTab, setActiveTab, panelWidth, _hasHydrated, setHasHydrated } = usePanelStore()
|
||||
const copilotRef = useRef<{
|
||||
createNewChat: () => void
|
||||
@@ -77,6 +79,7 @@ export function Panel() {
|
||||
|
||||
// Hooks
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const { isImporting, handleFileChange } = useImportWorkflow({ workspaceId })
|
||||
const {
|
||||
workflows,
|
||||
activeWorkflowId,
|
||||
@@ -262,6 +265,14 @@ export function Panel() {
|
||||
workspaceId,
|
||||
])
|
||||
|
||||
/**
|
||||
* Handles triggering file input for workflow import
|
||||
*/
|
||||
const handleImportWorkflow = useCallback(() => {
|
||||
setIsMenuOpen(false)
|
||||
fileInputRef.current?.click()
|
||||
}, [])
|
||||
|
||||
// Compute run button state
|
||||
const canRun = userPermissions.canRead // Running only requires read permissions
|
||||
const isLoadingPermissions = userPermissions.isLoading
|
||||
@@ -314,7 +325,7 @@ export function Panel() {
|
||||
</PopoverItem>
|
||||
{
|
||||
<PopoverItem onClick={() => setVariablesOpen(!isVariablesOpen)}>
|
||||
<Braces className='h-3 w-3' />
|
||||
<VariableIcon className='h-3 w-3' />
|
||||
<span>Variables</span>
|
||||
</PopoverItem>
|
||||
}
|
||||
@@ -331,7 +342,14 @@ export function Panel() {
|
||||
disabled={isExporting || !currentWorkflow}
|
||||
>
|
||||
<Braces className='h-3 w-3' />
|
||||
<span>Export JSON</span>
|
||||
<span>Export workflow</span>
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
onClick={handleImportWorkflow}
|
||||
disabled={isImporting || !userPermissions.canEdit}
|
||||
>
|
||||
<ArrowDown className='h-3 w-3' />
|
||||
<span>Import workflow</span>
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
onClick={handleDuplicateWorkflow}
|
||||
@@ -499,6 +517,16 @@ export function Panel() {
|
||||
|
||||
{/* Floating Variables Modal */}
|
||||
<Variables />
|
||||
|
||||
{/* Hidden file input for workflow import */}
|
||||
<input
|
||||
ref={fileInputRef}
|
||||
type='file'
|
||||
accept='.json,.zip'
|
||||
multiple
|
||||
style={{ display: 'none' }}
|
||||
onChange={handleFileChange}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -116,8 +116,7 @@ const WorkflowContent = React.memo(() => {
|
||||
// Get workspace ID from the params
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const { workflows, activeWorkflowId, isLoading, setActiveWorkflow, createWorkflow } =
|
||||
useWorkflowRegistry()
|
||||
const { workflows, activeWorkflowId, isLoading, setActiveWorkflow } = useWorkflowRegistry()
|
||||
|
||||
// Use the clean abstraction for current workflow state
|
||||
const currentWorkflow = useCurrentWorkflow()
|
||||
|
||||
@@ -227,12 +227,8 @@ export function CancelSubscription({ subscription, subscriptionData }: CancelSub
|
||||
onClick={() => setIsDialogOpen(true)}
|
||||
disabled={isLoading}
|
||||
className={cn(
|
||||
'h-8 rounded-[8px] font-medium text-xs transition-all duration-200',
|
||||
error
|
||||
? 'border-red-500 text-red-500 dark:border-red-500 dark:text-red-500'
|
||||
: isCancelAtPeriodEnd
|
||||
? 'text-muted-foreground hover:border-green-500 hover:bg-green-500 hover:text-white dark:hover:border-green-500 dark:hover:bg-green-500'
|
||||
: 'text-muted-foreground hover:border-red-500 hover:bg-red-500 hover:text-white dark:hover:border-red-500 dark:hover:bg-red-500'
|
||||
'h-8 rounded-[8px] font-medium text-xs',
|
||||
error && 'border-red-500 text-red-500 dark:border-red-500 dark:text-red-500'
|
||||
)}
|
||||
>
|
||||
{error ? 'Error' : isCancelAtPeriodEnd ? 'Restore' : 'Manage'}
|
||||
|
||||
@@ -107,12 +107,11 @@ export function PlanCard({
|
||||
<Button
|
||||
onClick={onButtonClick}
|
||||
className={cn(
|
||||
'h-9 rounded-[8px] text-xs transition-colors',
|
||||
'h-9 rounded-[8px] text-xs',
|
||||
isHorizontal ? 'px-4' : 'w-full',
|
||||
isError &&
|
||||
'border-red-500 bg-transparent text-red-500 hover:bg-red-500 hover:text-white dark:border-red-500 dark:text-red-500 dark:hover:bg-red-500'
|
||||
isError && 'border-red-500 text-red-500 dark:border-red-500 dark:text-red-500'
|
||||
)}
|
||||
variant={isError ? 'outline' : 'default'}
|
||||
variant='outline'
|
||||
aria-label={`${buttonText} ${name} plan`}
|
||||
>
|
||||
{isError ? 'Error' : buttonText}
|
||||
|
||||
@@ -14,8 +14,8 @@ import {
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useDeleteFolder, useDuplicateFolder } from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { useUpdateFolder } from '@/hooks/queries/folders'
|
||||
import { useCreateWorkflow } from '@/hooks/queries/workflows'
|
||||
import type { FolderTreeNode } from '@/stores/folders/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface FolderItemProps {
|
||||
folder: FolderTreeNode
|
||||
@@ -39,7 +39,7 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
const router = useRouter()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const updateFolderMutation = useUpdateFolder()
|
||||
const { createWorkflow } = useWorkflowRegistry()
|
||||
const createWorkflowMutation = useCreateWorkflow()
|
||||
|
||||
// Delete modal state
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
@@ -58,18 +58,18 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
})
|
||||
|
||||
/**
|
||||
* Handle create workflow in folder
|
||||
* Handle create workflow in folder using React Query mutation
|
||||
*/
|
||||
const handleCreateWorkflowInFolder = useCallback(async () => {
|
||||
const workflowId = await createWorkflow({
|
||||
const result = await createWorkflowMutation.mutateAsync({
|
||||
workspaceId,
|
||||
folderId: folder.id,
|
||||
})
|
||||
|
||||
if (workflowId) {
|
||||
router.push(`/workspace/${workspaceId}/w/${workflowId}`)
|
||||
if (result.id) {
|
||||
router.push(`/workspace/${workspaceId}/w/${result.id}`)
|
||||
}
|
||||
}, [createWorkflow, workspaceId, folder.id, router])
|
||||
}, [createWorkflowMutation, workspaceId, folder.id, router])
|
||||
|
||||
// Folder expand hook
|
||||
const {
|
||||
|
||||
@@ -6,18 +6,18 @@ import { useParams, useRouter } from 'next/navigation'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateFolderName } from '@/lib/naming'
|
||||
import { cn } from '@/lib/utils'
|
||||
import {
|
||||
extractWorkflowName,
|
||||
extractWorkflowsFromFiles,
|
||||
extractWorkflowsFromZip,
|
||||
} from '@/lib/workflows/import-export'
|
||||
import { generateFolderName } from '@/lib/workspaces/naming'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useCreateFolder } from '@/hooks/queries/folders'
|
||||
import { useCreateWorkflow } from '@/hooks/queries/workflows'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { parseWorkflowJson } from '@/stores/workflows/json/importer'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('CreateMenu')
|
||||
|
||||
@@ -44,7 +44,7 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
const router = useRouter()
|
||||
const workspaceId = params.workspaceId as string
|
||||
const createFolderMutation = useCreateFolder()
|
||||
const { createWorkflow } = useWorkflowRegistry()
|
||||
const createWorkflowMutation = useCreateWorkflow()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
@@ -194,12 +194,13 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
const { clearDiff } = useWorkflowDiffStore.getState()
|
||||
clearDiff()
|
||||
|
||||
const newWorkflowId = await createWorkflow({
|
||||
const result = await createWorkflowMutation.mutateAsync({
|
||||
name: workflowName,
|
||||
description: 'Imported from workspace export',
|
||||
workspaceId,
|
||||
folderId: targetFolderId,
|
||||
})
|
||||
const newWorkflowId = result.id
|
||||
|
||||
const response = await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
@@ -255,11 +256,12 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
const { clearDiff } = useWorkflowDiffStore.getState()
|
||||
clearDiff()
|
||||
|
||||
const newWorkflowId = await createWorkflow({
|
||||
const result = await createWorkflowMutation.mutateAsync({
|
||||
name: workflowName,
|
||||
description: 'Imported from JSON',
|
||||
workspaceId,
|
||||
})
|
||||
const newWorkflowId = result.id
|
||||
|
||||
const response = await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
@@ -299,8 +301,8 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
}
|
||||
}
|
||||
|
||||
const { loadWorkflows } = useWorkflowRegistry.getState()
|
||||
await loadWorkflows(workspaceId)
|
||||
// Invalidate workflow queries to reload the list
|
||||
// The useWorkflows hook in the sidebar will automatically refetch
|
||||
} catch (error) {
|
||||
logger.error('Failed to import workflows:', error)
|
||||
} finally {
|
||||
@@ -310,7 +312,7 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
}
|
||||
}
|
||||
},
|
||||
[workspaceId, createWorkflow, createFolderMutation]
|
||||
[workspaceId, createWorkflowMutation, createFolderMutation]
|
||||
)
|
||||
|
||||
// Button event handlers
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateFolderName } from '@/lib/naming'
|
||||
import { generateFolderName } from '@/lib/workspaces/naming'
|
||||
import { useCreateFolder } from '@/hooks/queries/folders'
|
||||
|
||||
const logger = createLogger('useFolderOperations')
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useCreateWorkflow, useWorkflows } from '@/hooks/queries/workflows'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
@@ -25,12 +26,9 @@ export function useWorkflowOperations({
|
||||
onWorkspaceInvalid,
|
||||
}: UseWorkflowOperationsProps) {
|
||||
const router = useRouter()
|
||||
const {
|
||||
workflows,
|
||||
isLoading: workflowsLoading,
|
||||
loadWorkflows,
|
||||
createWorkflow,
|
||||
} = useWorkflowRegistry()
|
||||
const { workflows } = useWorkflowRegistry()
|
||||
const workflowsQuery = useWorkflows(workspaceId)
|
||||
const createWorkflowMutation = useCreateWorkflow()
|
||||
const [isCreatingWorkflow, setIsCreatingWorkflow] = useState(false)
|
||||
|
||||
/**
|
||||
@@ -45,6 +43,7 @@ export function useWorkflowOperations({
|
||||
|
||||
/**
|
||||
* Create workflow handler - creates workflow and navigates to it
|
||||
* Now uses React Query mutation for better performance and caching
|
||||
*/
|
||||
const handleCreateWorkflow = useCallback(async (): Promise<string | null> => {
|
||||
if (isCreatingWorkflow) {
|
||||
@@ -59,14 +58,15 @@ export function useWorkflowOperations({
|
||||
const { clearDiff } = useWorkflowDiffStore.getState()
|
||||
clearDiff()
|
||||
|
||||
const workflowId = await createWorkflow({
|
||||
workspaceId: workspaceId || undefined,
|
||||
// Use React Query mutation for creation
|
||||
const result = await createWorkflowMutation.mutateAsync({
|
||||
workspaceId: workspaceId,
|
||||
})
|
||||
|
||||
// Navigate to the newly created workflow
|
||||
if (workflowId) {
|
||||
router.push(`/workspace/${workspaceId}/w/${workflowId}`)
|
||||
return workflowId
|
||||
if (result.id) {
|
||||
router.push(`/workspace/${workspaceId}/w/${result.id}`)
|
||||
return result.id
|
||||
}
|
||||
return null
|
||||
} catch (error) {
|
||||
@@ -75,34 +75,16 @@ export function useWorkflowOperations({
|
||||
} finally {
|
||||
setIsCreatingWorkflow(false)
|
||||
}
|
||||
}, [isCreatingWorkflow, createWorkflow, workspaceId, router])
|
||||
|
||||
/**
|
||||
* Load workflows for the current workspace when workspaceId changes
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (workspaceId) {
|
||||
// Validate workspace exists before loading workflows
|
||||
isWorkspaceValid(workspaceId).then((valid) => {
|
||||
if (valid) {
|
||||
loadWorkflows(workspaceId)
|
||||
} else {
|
||||
logger.warn(`Workspace ${workspaceId} no longer exists, triggering workspace refresh`)
|
||||
onWorkspaceInvalid()
|
||||
}
|
||||
})
|
||||
}
|
||||
}, [workspaceId, loadWorkflows, isWorkspaceValid, onWorkspaceInvalid])
|
||||
}, [isCreatingWorkflow, createWorkflowMutation, workspaceId, router])
|
||||
|
||||
return {
|
||||
// State
|
||||
workflows,
|
||||
regularWorkflows,
|
||||
workflowsLoading,
|
||||
workflowsLoading: workflowsQuery.isLoading,
|
||||
isCreatingWorkflow,
|
||||
|
||||
// Operations
|
||||
handleCreateWorkflow,
|
||||
loadWorkflows,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { usePathname, useRouter } from 'next/navigation'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateWorkspaceName } from '@/lib/naming'
|
||||
import { generateWorkspaceName } from '@/lib/workspaces/naming'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('useWorkspaceManagement')
|
||||
|
||||
@@ -7,8 +7,8 @@ import { ScrollArea } from '@/components/ui'
|
||||
import { useSession } from '@/lib/auth-client'
|
||||
import { getEnv, isTruthy } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateWorkspaceName } from '@/lib/naming'
|
||||
import { canUpgrade, getBillingStatus } from '@/lib/subscription/helpers'
|
||||
import { generateWorkspaceName } from '@/lib/workspaces/naming'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
CreateMenu,
|
||||
@@ -28,7 +28,6 @@ import { InviteModal } from '@/app/workspace/[workspaceId]/w/components/sidebar/
|
||||
import { useAutoScroll } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks/use-auto-scroll'
|
||||
import { useSubscriptionData } from '@/hooks/queries/subscription'
|
||||
import { useKnowledgeBasesList } from '@/hooks/use-knowledge'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
|
||||
|
||||
@@ -82,13 +81,7 @@ interface TemplateData {
|
||||
export function Sidebar() {
|
||||
// useGlobalShortcuts()
|
||||
|
||||
const {
|
||||
workflows,
|
||||
createWorkflow,
|
||||
isLoading: workflowsLoading,
|
||||
loadWorkflows,
|
||||
switchToWorkspace,
|
||||
} = useWorkflowRegistry()
|
||||
const { workflows, isLoading: workflowsLoading, switchToWorkspace } = useWorkflowRegistry()
|
||||
const { data: sessionData, isPending: sessionLoading } = useSession()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const isLoading = workflowsLoading || sessionLoading
|
||||
@@ -605,19 +598,20 @@ export function Sidebar() {
|
||||
}
|
||||
|
||||
// Load workflows for the current workspace when workspaceId changes
|
||||
useEffect(() => {
|
||||
if (workspaceId) {
|
||||
// Validate workspace exists before loading workflows
|
||||
isWorkspaceValid(workspaceId).then((valid) => {
|
||||
if (valid) {
|
||||
loadWorkflows(workspaceId)
|
||||
} else {
|
||||
logger.warn(`Workspace ${workspaceId} no longer exists, triggering workspace refresh`)
|
||||
fetchWorkspaces() // This will handle the redirect through the fallback logic
|
||||
}
|
||||
})
|
||||
}
|
||||
}, [workspaceId, loadWorkflows]) // Removed isWorkspaceValid and fetchWorkspaces dependencies
|
||||
// NOTE: This useEffect is disabled - workflows now loaded via React Query in sidebar-new
|
||||
// useEffect(() => {
|
||||
// if (workspaceId) {
|
||||
// // Validate workspace exists before loading workflows
|
||||
// isWorkspaceValid(workspaceId).then((valid) => {
|
||||
// if (valid) {
|
||||
// loadWorkflows(workspaceId)
|
||||
// } else {
|
||||
// logger.warn(`Workspace ${workspaceId} no longer exists, triggering workspace refresh`)
|
||||
// fetchWorkspaces() // This will handle the redirect through the fallback logic
|
||||
// }
|
||||
// })
|
||||
// }
|
||||
// }, [workspaceId, loadWorkflows]) // Removed isWorkspaceValid and fetchWorkspaces dependencies
|
||||
|
||||
// Initialize workspace data on mount (uses full validation with URL handling)
|
||||
useEffect(() => {
|
||||
@@ -736,30 +730,33 @@ export function Sidebar() {
|
||||
}, [knowledgeBases, workspaceId, knowledgeBaseId])
|
||||
|
||||
// Create workflow handler
|
||||
// NOTE: This is disabled - workflow creation now handled via React Query in sidebar-new
|
||||
const handleCreateWorkflow = async (folderId?: string): Promise<string> => {
|
||||
if (isCreatingWorkflow) {
|
||||
logger.info('Workflow creation already in progress, ignoring request')
|
||||
throw new Error('Workflow creation already in progress')
|
||||
}
|
||||
logger.warn('Old sidebar handleCreateWorkflow called - should use sidebar-new')
|
||||
return ''
|
||||
// if (isCreatingWorkflow) {
|
||||
// logger.info('Workflow creation already in progress, ignoring request')
|
||||
// throw new Error('Workflow creation already in progress')
|
||||
// }
|
||||
|
||||
try {
|
||||
setIsCreatingWorkflow(true)
|
||||
// try {
|
||||
// setIsCreatingWorkflow(true)
|
||||
|
||||
// Clear workflow diff store when creating a new workflow
|
||||
const { clearDiff } = useWorkflowDiffStore.getState()
|
||||
clearDiff()
|
||||
// // Clear workflow diff store when creating a new workflow
|
||||
// const { clearDiff } = useWorkflowDiffStore.getState()
|
||||
// clearDiff()
|
||||
|
||||
const id = await createWorkflow({
|
||||
workspaceId: workspaceId || undefined,
|
||||
folderId: folderId || undefined,
|
||||
})
|
||||
return id
|
||||
} catch (error) {
|
||||
logger.error('Error creating workflow:', error)
|
||||
throw error
|
||||
} finally {
|
||||
setIsCreatingWorkflow(false)
|
||||
}
|
||||
// const id = await createWorkflow({
|
||||
// workspaceId: workspaceId || undefined,
|
||||
// folderId: folderId || undefined,
|
||||
// })
|
||||
// return id
|
||||
// } catch (error) {
|
||||
// logger.error('Error creating workflow:', error)
|
||||
// throw error
|
||||
// } finally {
|
||||
// setIsCreatingWorkflow(false)
|
||||
// }
|
||||
}
|
||||
|
||||
// Toggle workspace selector visibility
|
||||
|
||||
@@ -8,9 +8,9 @@ import {
|
||||
extractWorkflowsFromZip,
|
||||
} from '@/lib/workflows/import-export'
|
||||
import { folderKeys, useCreateFolder } from '@/hooks/queries/folders'
|
||||
import { useCreateWorkflow, workflowKeys } from '@/hooks/queries/workflows'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { parseWorkflowJson } from '@/stores/workflows/json/importer'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('useImportWorkflow')
|
||||
|
||||
@@ -30,7 +30,7 @@ interface UseImportWorkflowProps {
|
||||
*/
|
||||
export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const router = useRouter()
|
||||
const { createWorkflow, loadWorkflows } = useWorkflowRegistry()
|
||||
const createWorkflowMutation = useCreateWorkflow()
|
||||
const queryClient = useQueryClient()
|
||||
const createFolderMutation = useCreateFolder()
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
@@ -55,12 +55,13 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const workflowColor =
|
||||
parsedContent.state?.metadata?.color || parsedContent.metadata?.color || '#3972F6'
|
||||
|
||||
const newWorkflowId = await createWorkflow({
|
||||
const result = await createWorkflowMutation.mutateAsync({
|
||||
name: workflowName,
|
||||
description: workflowData.metadata?.description || 'Imported from JSON',
|
||||
workspaceId,
|
||||
folderId: folderId || undefined,
|
||||
})
|
||||
const newWorkflowId = result.id
|
||||
|
||||
// Update workflow color if we extracted one
|
||||
if (workflowColor !== '#3972F6') {
|
||||
@@ -98,7 +99,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
logger.info(`Imported workflow: ${workflowName}`)
|
||||
return newWorkflowId
|
||||
},
|
||||
[createWorkflow, workspaceId]
|
||||
[createWorkflowMutation, workspaceId]
|
||||
)
|
||||
|
||||
/**
|
||||
@@ -184,8 +185,8 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
}
|
||||
|
||||
// Reload workflows to show newly imported ones
|
||||
await loadWorkflows(workspaceId)
|
||||
// Reload workflows and folders to show newly imported ones
|
||||
await queryClient.invalidateQueries({ queryKey: workflowKeys.list(workspaceId) })
|
||||
await queryClient.invalidateQueries({ queryKey: folderKeys.list(workspaceId) })
|
||||
|
||||
logger.info(`Import complete. Imported ${importedWorkflowIds.length} workflow(s)`)
|
||||
@@ -205,7 +206,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
}
|
||||
},
|
||||
[importSingleWorkflow, workspaceId, loadWorkflows, router, createFolderMutation, queryClient]
|
||||
[importSingleWorkflow, workspaceId, router, createFolderMutation, queryClient]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,42 +1,32 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useEffect } from 'react'
|
||||
import { Loader2 } from 'lucide-react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflows } from '@/hooks/queries/workflows'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('WorkflowsPage')
|
||||
|
||||
export default function WorkflowsPage() {
|
||||
const router = useRouter()
|
||||
const { workflows, isLoading, loadWorkflows, setActiveWorkflow } = useWorkflowRegistry()
|
||||
const [hasInitialized, setHasInitialized] = useState(false)
|
||||
|
||||
const { workflows, setActiveWorkflow } = useWorkflowRegistry()
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
// Initialize workspace workflows
|
||||
useEffect(() => {
|
||||
const initializeWorkspace = async () => {
|
||||
try {
|
||||
await loadWorkflows(workspaceId)
|
||||
setHasInitialized(true)
|
||||
} catch (error) {
|
||||
logger.error('Failed to load workflows for workspace:', error)
|
||||
setHasInitialized(true) // Still mark as initialized to show error state
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasInitialized) {
|
||||
initializeWorkspace()
|
||||
}
|
||||
}, [workspaceId, loadWorkflows, hasInitialized])
|
||||
// Fetch workflows using React Query
|
||||
const { isLoading, isError } = useWorkflows(workspaceId)
|
||||
|
||||
// Handle redirection once workflows are loaded
|
||||
useEffect(() => {
|
||||
// Only proceed if we've initialized and workflows are not loading
|
||||
if (!hasInitialized || isLoading) return
|
||||
// Only proceed if workflows are done loading
|
||||
if (isLoading) return
|
||||
|
||||
if (isError) {
|
||||
logger.error('Failed to load workflows for workspace')
|
||||
return
|
||||
}
|
||||
|
||||
const workflowIds = Object.keys(workflows)
|
||||
|
||||
@@ -55,7 +45,7 @@ export default function WorkflowsPage() {
|
||||
router.replace(`/workspace/${workspaceId}/w/${firstWorkflowId}`)
|
||||
})
|
||||
}
|
||||
}, [hasInitialized, isLoading, workflows, workspaceId, router, setActiveWorkflow])
|
||||
}, [isLoading, workflows, workspaceId, router, setActiveWorkflow, isError])
|
||||
|
||||
// Always show loading state until redirect happens
|
||||
// There should always be a default workflow, so we never show "no workflows found"
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { useEffect } from 'react'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { workflowKeys } from '@/hooks/queries/workflows'
|
||||
import { useFolderStore, type WorkflowFolder } from '@/stores/folders/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('FolderQueries')
|
||||
|
||||
@@ -150,11 +150,8 @@ export function useDeleteFolderMutation() {
|
||||
},
|
||||
onSuccess: async (_data, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
|
||||
try {
|
||||
await useWorkflowRegistry.getState().loadWorkflows(variables.workspaceId)
|
||||
} catch (error) {
|
||||
logger.error('Failed to reload workflows after folder delete', { error })
|
||||
}
|
||||
// Invalidate workflow queries to reload workflows after folder changes
|
||||
queryClient.invalidateQueries({ queryKey: workflowKeys.list(variables.workspaceId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -184,11 +181,8 @@ export function useDuplicateFolderMutation() {
|
||||
},
|
||||
onSuccess: async (_data, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
|
||||
try {
|
||||
await useWorkflowRegistry.getState().loadWorkflows(variables.workspaceId)
|
||||
} catch (error) {
|
||||
logger.error('Failed to reload workflows after folder duplicate', { error })
|
||||
}
|
||||
// Invalidate workflow queries to reload workflows after folder changes
|
||||
queryClient.invalidateQueries({ queryKey: workflowKeys.list(variables.workspaceId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
421
apps/sim/hooks/queries/logs.ts
Normal file
421
apps/sim/hooks/queries/logs.ts
Normal file
@@ -0,0 +1,421 @@
|
||||
import { keepPreviousData, useInfiniteQuery, useQuery } from '@tanstack/react-query'
|
||||
import type { LogsResponse, WorkflowLog } from '@/stores/logs/filters/types'
|
||||
|
||||
export const logKeys = {
|
||||
all: ['logs'] as const,
|
||||
lists: () => [...logKeys.all, 'list'] as const,
|
||||
list: (workspaceId: string | undefined, filters: Omit<LogFilters, 'page'>) =>
|
||||
[...logKeys.lists(), workspaceId ?? '', filters] as const,
|
||||
details: () => [...logKeys.all, 'detail'] as const,
|
||||
detail: (logId: string | undefined) => [...logKeys.details(), logId ?? ''] as const,
|
||||
metrics: () => [...logKeys.all, 'metrics'] as const,
|
||||
executions: (workspaceId: string | undefined, filters: Record<string, any>) =>
|
||||
[...logKeys.metrics(), 'executions', workspaceId ?? '', filters] as const,
|
||||
workflowLogs: (
|
||||
workspaceId: string | undefined,
|
||||
workflowId: string | undefined,
|
||||
filters: Record<string, any>
|
||||
) => [...logKeys.all, 'workflow-logs', workspaceId ?? '', workflowId ?? '', filters] as const,
|
||||
globalLogs: (workspaceId: string | undefined, filters: Record<string, any>) =>
|
||||
[...logKeys.all, 'global-logs', workspaceId ?? '', filters] as const,
|
||||
}
|
||||
|
||||
interface LogFilters {
|
||||
timeRange: string
|
||||
level: string
|
||||
workflowIds: string[]
|
||||
folderIds: string[]
|
||||
triggers: string[]
|
||||
searchQuery: string
|
||||
limit: number
|
||||
}
|
||||
|
||||
async function fetchLogsPage(
|
||||
workspaceId: string,
|
||||
filters: LogFilters,
|
||||
page: number
|
||||
): Promise<{ logs: WorkflowLog[]; hasMore: boolean; nextPage: number | undefined }> {
|
||||
const queryParams = buildQueryParams(workspaceId, filters, page)
|
||||
const response = await fetch(`/api/logs?${queryParams}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch logs')
|
||||
}
|
||||
|
||||
const apiData: LogsResponse = await response.json()
|
||||
const hasMore = apiData.data.length === filters.limit && apiData.page < apiData.totalPages
|
||||
|
||||
return {
|
||||
logs: apiData.data || [],
|
||||
hasMore,
|
||||
nextPage: hasMore ? page + 1 : undefined,
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchLogDetail(logId: string): Promise<WorkflowLog> {
|
||||
const response = await fetch(`/api/logs/${logId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch log details')
|
||||
}
|
||||
|
||||
const { data } = await response.json()
|
||||
return data
|
||||
}
|
||||
|
||||
function buildQueryParams(workspaceId: string, filters: LogFilters, page: number): string {
|
||||
const params = new URLSearchParams()
|
||||
|
||||
params.set('workspaceId', workspaceId)
|
||||
params.set('limit', filters.limit.toString())
|
||||
params.set('offset', ((page - 1) * filters.limit).toString())
|
||||
|
||||
if (filters.level !== 'all') {
|
||||
params.set('level', filters.level)
|
||||
}
|
||||
|
||||
if (filters.triggers.length > 0) {
|
||||
params.set('triggers', filters.triggers.join(','))
|
||||
}
|
||||
|
||||
if (filters.workflowIds.length > 0) {
|
||||
params.set('workflowIds', filters.workflowIds.join(','))
|
||||
}
|
||||
|
||||
if (filters.folderIds.length > 0) {
|
||||
params.set('folderIds', filters.folderIds.join(','))
|
||||
}
|
||||
|
||||
if (filters.timeRange !== 'All time') {
|
||||
const now = new Date()
|
||||
let startDate: Date
|
||||
|
||||
switch (filters.timeRange) {
|
||||
case 'Past 30 minutes':
|
||||
startDate = new Date(now.getTime() - 30 * 60 * 1000)
|
||||
break
|
||||
case 'Past hour':
|
||||
startDate = new Date(now.getTime() - 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 6 hours':
|
||||
startDate = new Date(now.getTime() - 6 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 12 hours':
|
||||
startDate = new Date(now.getTime() - 12 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 24 hours':
|
||||
startDate = new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 3 days':
|
||||
startDate = new Date(now.getTime() - 3 * 24 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 7 days':
|
||||
startDate = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 14 days':
|
||||
startDate = new Date(now.getTime() - 14 * 24 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 30 days':
|
||||
startDate = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000)
|
||||
break
|
||||
default:
|
||||
startDate = new Date(0)
|
||||
}
|
||||
|
||||
params.set('startDate', startDate.toISOString())
|
||||
}
|
||||
|
||||
if (filters.searchQuery.trim()) {
|
||||
params.set('search', filters.searchQuery.trim())
|
||||
}
|
||||
|
||||
return params.toString()
|
||||
}
|
||||
|
||||
interface UseLogsListOptions {
|
||||
enabled?: boolean
|
||||
refetchInterval?: number | false
|
||||
}
|
||||
|
||||
export function useLogsList(
|
||||
workspaceId: string | undefined,
|
||||
filters: LogFilters,
|
||||
options?: UseLogsListOptions
|
||||
) {
|
||||
return useInfiniteQuery({
|
||||
queryKey: logKeys.list(workspaceId, filters),
|
||||
queryFn: ({ pageParam }) => fetchLogsPage(workspaceId as string, filters, pageParam),
|
||||
enabled: Boolean(workspaceId) && (options?.enabled ?? true),
|
||||
refetchInterval: options?.refetchInterval ?? false,
|
||||
staleTime: 0, // Always consider stale for real-time logs
|
||||
initialPageParam: 1,
|
||||
getNextPageParam: (lastPage) => lastPage.nextPage,
|
||||
})
|
||||
}
|
||||
|
||||
export function useLogDetail(logId: string | undefined) {
|
||||
return useQuery({
|
||||
queryKey: logKeys.detail(logId),
|
||||
queryFn: () => fetchLogDetail(logId as string),
|
||||
enabled: Boolean(logId),
|
||||
staleTime: 30 * 1000, // Details can be slightly stale (30 seconds)
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
interface WorkflowSegment {
|
||||
timestamp: string
|
||||
hasExecutions: boolean
|
||||
totalExecutions: number
|
||||
successfulExecutions: number
|
||||
successRate: number
|
||||
avgDurationMs?: number
|
||||
p50Ms?: number
|
||||
p90Ms?: number
|
||||
p99Ms?: number
|
||||
}
|
||||
|
||||
interface WorkflowExecution {
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
segments: WorkflowSegment[]
|
||||
overallSuccessRate: number
|
||||
}
|
||||
|
||||
interface AggregateSegment {
|
||||
timestamp: string
|
||||
totalExecutions: number
|
||||
successfulExecutions: number
|
||||
}
|
||||
|
||||
interface ExecutionsMetricsResponse {
|
||||
workflows: WorkflowExecution[]
|
||||
aggregateSegments: AggregateSegment[]
|
||||
}
|
||||
|
||||
interface DashboardMetricsFilters {
|
||||
workspaceId: string
|
||||
segments: number
|
||||
startTime: string
|
||||
endTime: string
|
||||
workflowIds?: string[]
|
||||
folderIds?: string[]
|
||||
triggers?: string[]
|
||||
}
|
||||
|
||||
async function fetchExecutionsMetrics(
|
||||
filters: DashboardMetricsFilters
|
||||
): Promise<ExecutionsMetricsResponse> {
|
||||
const params = new URLSearchParams({
|
||||
segments: String(filters.segments),
|
||||
startTime: filters.startTime,
|
||||
endTime: filters.endTime,
|
||||
})
|
||||
|
||||
if (filters.workflowIds && filters.workflowIds.length > 0) {
|
||||
params.set('workflowIds', filters.workflowIds.join(','))
|
||||
}
|
||||
|
||||
if (filters.folderIds && filters.folderIds.length > 0) {
|
||||
params.set('folderIds', filters.folderIds.join(','))
|
||||
}
|
||||
|
||||
if (filters.triggers && filters.triggers.length > 0) {
|
||||
params.set('triggers', filters.triggers.join(','))
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`/api/workspaces/${filters.workspaceId}/metrics/executions?${params.toString()}`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch execution metrics')
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const workflows: WorkflowExecution[] = (data.workflows || []).map((wf: any) => {
|
||||
const segments = (wf.segments || []).map((s: any) => {
|
||||
const total = s.totalExecutions || 0
|
||||
const success = s.successfulExecutions || 0
|
||||
const hasExecutions = total > 0
|
||||
const successRate = hasExecutions ? (success / total) * 100 : 100
|
||||
return {
|
||||
timestamp: s.timestamp,
|
||||
hasExecutions,
|
||||
totalExecutions: total,
|
||||
successfulExecutions: success,
|
||||
successRate,
|
||||
avgDurationMs: typeof s.avgDurationMs === 'number' ? s.avgDurationMs : 0,
|
||||
p50Ms: typeof s.p50Ms === 'number' ? s.p50Ms : 0,
|
||||
p90Ms: typeof s.p90Ms === 'number' ? s.p90Ms : 0,
|
||||
p99Ms: typeof s.p99Ms === 'number' ? s.p99Ms : 0,
|
||||
}
|
||||
})
|
||||
|
||||
const totals = segments.reduce(
|
||||
(acc: { total: number; success: number }, seg: WorkflowSegment) => {
|
||||
acc.total += seg.totalExecutions
|
||||
acc.success += seg.successfulExecutions
|
||||
return acc
|
||||
},
|
||||
{ total: 0, success: 0 }
|
||||
)
|
||||
|
||||
const overallSuccessRate = totals.total > 0 ? (totals.success / totals.total) * 100 : 100
|
||||
|
||||
return {
|
||||
workflowId: wf.workflowId,
|
||||
workflowName: wf.workflowName,
|
||||
segments,
|
||||
overallSuccessRate,
|
||||
}
|
||||
})
|
||||
|
||||
const sortedWorkflows = workflows.sort((a, b) => {
|
||||
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
|
||||
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
|
||||
return errB - errA
|
||||
})
|
||||
|
||||
const segmentCount = filters.segments
|
||||
const startTime = new Date(filters.startTime)
|
||||
const endTime = new Date(filters.endTime)
|
||||
|
||||
const aggregateSegments: AggregateSegment[] = Array.from({ length: segmentCount }, (_, i) => {
|
||||
const base = startTime.getTime()
|
||||
const ts = new Date(base + Math.floor((i * (endTime.getTime() - base)) / segmentCount))
|
||||
return {
|
||||
timestamp: ts.toISOString(),
|
||||
totalExecutions: 0,
|
||||
successfulExecutions: 0,
|
||||
}
|
||||
})
|
||||
|
||||
for (const wf of data.workflows as any[]) {
|
||||
wf.segments.forEach((s: any, i: number) => {
|
||||
const index = Math.min(i, segmentCount - 1)
|
||||
aggregateSegments[index].totalExecutions += s.totalExecutions || 0
|
||||
aggregateSegments[index].successfulExecutions += s.successfulExecutions || 0
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
workflows: sortedWorkflows,
|
||||
aggregateSegments,
|
||||
}
|
||||
}
|
||||
|
||||
interface UseExecutionsMetricsOptions {
|
||||
enabled?: boolean
|
||||
refetchInterval?: number | false
|
||||
}
|
||||
|
||||
export function useExecutionsMetrics(
|
||||
filters: DashboardMetricsFilters,
|
||||
options?: UseExecutionsMetricsOptions
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: logKeys.executions(filters.workspaceId, filters),
|
||||
queryFn: () => fetchExecutionsMetrics(filters),
|
||||
enabled: Boolean(filters.workspaceId) && (options?.enabled ?? true),
|
||||
refetchInterval: options?.refetchInterval ?? false,
|
||||
staleTime: 10 * 1000, // Metrics can be slightly stale (10 seconds)
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
interface DashboardLogsFilters {
|
||||
workspaceId: string
|
||||
startDate: string
|
||||
endDate: string
|
||||
workflowIds?: string[]
|
||||
folderIds?: string[]
|
||||
triggers?: string[]
|
||||
limit: number
|
||||
}
|
||||
|
||||
interface DashboardLogsPage {
|
||||
logs: any[] // Will be mapped by the consumer
|
||||
hasMore: boolean
|
||||
nextPage: number | undefined
|
||||
}
|
||||
|
||||
async function fetchDashboardLogsPage(
|
||||
filters: DashboardLogsFilters,
|
||||
page: number,
|
||||
workflowId?: string
|
||||
): Promise<DashboardLogsPage> {
|
||||
const params = new URLSearchParams({
|
||||
limit: filters.limit.toString(),
|
||||
offset: ((page - 1) * filters.limit).toString(),
|
||||
workspaceId: filters.workspaceId,
|
||||
startDate: filters.startDate,
|
||||
endDate: filters.endDate,
|
||||
order: 'desc',
|
||||
details: 'full',
|
||||
})
|
||||
|
||||
if (workflowId) {
|
||||
params.set('workflowIds', workflowId)
|
||||
} else if (filters.workflowIds && filters.workflowIds.length > 0) {
|
||||
params.set('workflowIds', filters.workflowIds.join(','))
|
||||
}
|
||||
|
||||
if (filters.folderIds && filters.folderIds.length > 0) {
|
||||
params.set('folderIds', filters.folderIds.join(','))
|
||||
}
|
||||
|
||||
if (filters.triggers && filters.triggers.length > 0) {
|
||||
params.set('triggers', filters.triggers.join(','))
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/logs?${params.toString()}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch dashboard logs')
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const logs = data.data || []
|
||||
const hasMore = logs.length === filters.limit
|
||||
|
||||
return {
|
||||
logs,
|
||||
hasMore,
|
||||
nextPage: hasMore ? page + 1 : undefined,
|
||||
}
|
||||
}
|
||||
|
||||
interface UseDashboardLogsOptions {
|
||||
enabled?: boolean
|
||||
}
|
||||
|
||||
export function useGlobalDashboardLogs(
|
||||
filters: DashboardLogsFilters,
|
||||
options?: UseDashboardLogsOptions
|
||||
) {
|
||||
return useInfiniteQuery({
|
||||
queryKey: logKeys.globalLogs(filters.workspaceId, filters),
|
||||
queryFn: ({ pageParam }) => fetchDashboardLogsPage(filters, pageParam),
|
||||
enabled: Boolean(filters.workspaceId) && (options?.enabled ?? true),
|
||||
staleTime: 10 * 1000, // Slightly stale (10 seconds)
|
||||
initialPageParam: 1,
|
||||
getNextPageParam: (lastPage) => lastPage.nextPage,
|
||||
})
|
||||
}
|
||||
|
||||
export function useWorkflowDashboardLogs(
|
||||
workflowId: string | undefined,
|
||||
filters: DashboardLogsFilters,
|
||||
options?: UseDashboardLogsOptions
|
||||
) {
|
||||
return useInfiniteQuery({
|
||||
queryKey: logKeys.workflowLogs(filters.workspaceId, workflowId, filters),
|
||||
queryFn: ({ pageParam }) => fetchDashboardLogsPage(filters, pageParam, workflowId),
|
||||
enabled: Boolean(filters.workspaceId) && Boolean(workflowId) && (options?.enabled ?? true),
|
||||
staleTime: 10 * 1000, // Slightly stale (10 seconds)
|
||||
initialPageParam: 1,
|
||||
getNextPageParam: (lastPage) => lastPage.nextPage,
|
||||
})
|
||||
}
|
||||
169
apps/sim/hooks/queries/workflows.ts
Normal file
169
apps/sim/hooks/queries/workflows.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import { useEffect } from 'react'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
|
||||
import {
|
||||
generateCreativeWorkflowName,
|
||||
getNextWorkflowColor,
|
||||
} from '@/stores/workflows/registry/utils'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
|
||||
const logger = createLogger('WorkflowQueries')
|
||||
|
||||
export const workflowKeys = {
|
||||
all: ['workflows'] as const,
|
||||
lists: () => [...workflowKeys.all, 'list'] as const,
|
||||
list: (workspaceId: string | undefined) => [...workflowKeys.lists(), workspaceId ?? ''] as const,
|
||||
}
|
||||
|
||||
function mapWorkflow(workflow: any): WorkflowMetadata {
|
||||
return {
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
workspaceId: workflow.workspaceId,
|
||||
folderId: workflow.folderId,
|
||||
createdAt: new Date(workflow.createdAt),
|
||||
lastModified: new Date(workflow.updatedAt || workflow.createdAt),
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchWorkflows(workspaceId: string): Promise<WorkflowMetadata[]> {
|
||||
const response = await fetch(`/api/workflows?workspaceId=${workspaceId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch workflows')
|
||||
}
|
||||
|
||||
const { data }: { data: any[] } = await response.json()
|
||||
return data.map(mapWorkflow)
|
||||
}
|
||||
|
||||
export function useWorkflows(workspaceId?: string) {
|
||||
const setWorkflows = useWorkflowRegistry((state) => state.setWorkflows)
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: workflowKeys.list(workspaceId),
|
||||
queryFn: () => fetchWorkflows(workspaceId as string),
|
||||
enabled: Boolean(workspaceId),
|
||||
placeholderData: keepPreviousData,
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
if (query.data) {
|
||||
setWorkflows(query.data)
|
||||
}
|
||||
}, [query.data, setWorkflows])
|
||||
|
||||
return query
|
||||
}
|
||||
|
||||
interface CreateWorkflowVariables {
|
||||
workspaceId: string
|
||||
name?: string
|
||||
description?: string
|
||||
color?: string
|
||||
folderId?: string | null
|
||||
}
|
||||
|
||||
export function useCreateWorkflow() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (variables: CreateWorkflowVariables) => {
|
||||
const { workspaceId, name, description, color, folderId } = variables
|
||||
|
||||
logger.info(`Creating new workflow in workspace: ${workspaceId}`)
|
||||
|
||||
const createResponse = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
name: name || generateCreativeWorkflowName(),
|
||||
description: description || 'New workflow',
|
||||
color: color || getNextWorkflowColor(),
|
||||
workspaceId,
|
||||
folderId: folderId || null,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!createResponse.ok) {
|
||||
const errorData = await createResponse.json()
|
||||
throw new Error(
|
||||
`Failed to create workflow: ${errorData.error || createResponse.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
const createdWorkflow = await createResponse.json()
|
||||
const workflowId = createdWorkflow.id
|
||||
|
||||
logger.info(`Successfully created workflow ${workflowId}`)
|
||||
|
||||
const { workflowState } = buildDefaultWorkflowArtifacts()
|
||||
|
||||
fetch(`/api/workflows/${workflowId}/state`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(workflowState),
|
||||
})
|
||||
.then((response) => {
|
||||
if (!response.ok) {
|
||||
response.text().then((text) => {
|
||||
logger.error('Failed to persist default Start block:', text)
|
||||
})
|
||||
} else {
|
||||
logger.info('Successfully persisted default Start block')
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.error('Error persisting default Start block:', error)
|
||||
})
|
||||
|
||||
return {
|
||||
id: workflowId,
|
||||
name: createdWorkflow.name,
|
||||
description: createdWorkflow.description,
|
||||
color: createdWorkflow.color,
|
||||
workspaceId,
|
||||
folderId: createdWorkflow.folderId,
|
||||
}
|
||||
},
|
||||
onSuccess: (data, variables) => {
|
||||
logger.info(`Workflow ${data.id} created successfully`)
|
||||
|
||||
const { subBlockValues } = buildDefaultWorkflowArtifacts()
|
||||
useSubBlockStore.setState((state) => ({
|
||||
workflowValues: {
|
||||
...state.workflowValues,
|
||||
[data.id]: subBlockValues,
|
||||
},
|
||||
}))
|
||||
|
||||
useWorkflowRegistry.setState((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[data.id]: {
|
||||
id: data.id,
|
||||
name: data.name,
|
||||
lastModified: new Date(),
|
||||
createdAt: new Date(),
|
||||
description: data.description,
|
||||
color: data.color,
|
||||
workspaceId: data.workspaceId,
|
||||
folderId: data.folderId,
|
||||
},
|
||||
},
|
||||
error: null,
|
||||
}))
|
||||
|
||||
queryClient.invalidateQueries({ queryKey: workflowKeys.list(variables.workspaceId) })
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
logger.error('Failed to create workflow:', error)
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
/**
|
||||
* Utility functions for generating names for all entities (workspaces, folders, workflows)
|
||||
* Utility functions for generating names for workspaces and folders
|
||||
*/
|
||||
|
||||
import type { Workspace } from '@/lib/organization/types'
|
||||
@@ -17,148 +17,6 @@ interface FoldersApiResponse {
|
||||
folders: WorkflowFolder[]
|
||||
}
|
||||
|
||||
const ADJECTIVES = [
|
||||
'Blazing',
|
||||
'Crystal',
|
||||
'Golden',
|
||||
'Silver',
|
||||
'Mystic',
|
||||
'Cosmic',
|
||||
'Electric',
|
||||
'Frozen',
|
||||
'Burning',
|
||||
'Shining',
|
||||
'Dancing',
|
||||
'Flying',
|
||||
'Roaring',
|
||||
'Whispering',
|
||||
'Glowing',
|
||||
'Sparkling',
|
||||
'Thunder',
|
||||
'Lightning',
|
||||
'Storm',
|
||||
'Ocean',
|
||||
'Mountain',
|
||||
'Forest',
|
||||
'Desert',
|
||||
'Arctic',
|
||||
'Tropical',
|
||||
'Midnight',
|
||||
'Dawn',
|
||||
'Sunset',
|
||||
'Rainbow',
|
||||
'Diamond',
|
||||
'Ruby',
|
||||
'Emerald',
|
||||
'Sapphire',
|
||||
'Pearl',
|
||||
'Jade',
|
||||
'Amber',
|
||||
'Coral',
|
||||
'Ivory',
|
||||
'Obsidian',
|
||||
'Marble',
|
||||
'Velvet',
|
||||
'Silk',
|
||||
'Satin',
|
||||
'Linen',
|
||||
'Cotton',
|
||||
'Wool',
|
||||
'Cashmere',
|
||||
'Denim',
|
||||
'Neon',
|
||||
'Pastel',
|
||||
'Vibrant',
|
||||
'Muted',
|
||||
'Bold',
|
||||
'Subtle',
|
||||
'Bright',
|
||||
'Dark',
|
||||
]
|
||||
|
||||
const NOUNS = [
|
||||
'Phoenix',
|
||||
'Dragon',
|
||||
'Eagle',
|
||||
'Wolf',
|
||||
'Lion',
|
||||
'Tiger',
|
||||
'Panther',
|
||||
'Falcon',
|
||||
'Hawk',
|
||||
'Raven',
|
||||
'Swan',
|
||||
'Dove',
|
||||
'Butterfly',
|
||||
'Firefly',
|
||||
'Dragonfly',
|
||||
'Hummingbird',
|
||||
'Galaxy',
|
||||
'Nebula',
|
||||
'Comet',
|
||||
'Meteor',
|
||||
'Star',
|
||||
'Moon',
|
||||
'Sun',
|
||||
'Planet',
|
||||
'Asteroid',
|
||||
'Constellation',
|
||||
'Aurora',
|
||||
'Eclipse',
|
||||
'Solstice',
|
||||
'Equinox',
|
||||
'Horizon',
|
||||
'Zenith',
|
||||
'Castle',
|
||||
'Tower',
|
||||
'Bridge',
|
||||
'Garden',
|
||||
'Fountain',
|
||||
'Palace',
|
||||
'Temple',
|
||||
'Cathedral',
|
||||
'Lighthouse',
|
||||
'Windmill',
|
||||
'Waterfall',
|
||||
'Canyon',
|
||||
'Valley',
|
||||
'Peak',
|
||||
'Ridge',
|
||||
'Cliff',
|
||||
'Ocean',
|
||||
'River',
|
||||
'Lake',
|
||||
'Stream',
|
||||
'Pond',
|
||||
'Bay',
|
||||
'Cove',
|
||||
'Harbor',
|
||||
'Island',
|
||||
'Peninsula',
|
||||
'Archipelago',
|
||||
'Atoll',
|
||||
'Reef',
|
||||
'Lagoon',
|
||||
'Fjord',
|
||||
'Delta',
|
||||
'Cake',
|
||||
'Cookie',
|
||||
'Muffin',
|
||||
'Cupcake',
|
||||
'Pie',
|
||||
'Tart',
|
||||
'Brownie',
|
||||
'Donut',
|
||||
'Pancake',
|
||||
'Waffle',
|
||||
'Croissant',
|
||||
'Bagel',
|
||||
'Pretzel',
|
||||
'Biscuit',
|
||||
'Scone',
|
||||
'Crumpet',
|
||||
]
|
||||
|
||||
/**
|
||||
* Generates the next incremental name for entities following pattern: "{prefix} {number}"
|
||||
*
|
||||
@@ -226,13 +84,3 @@ export async function generateSubfolderName(
|
||||
|
||||
return generateIncrementalName(subfolders, 'Subfolder')
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a creative workflow name using random adjectives and nouns
|
||||
* @returns A creative workflow name like "blazing-phoenix" or "crystal-dragon"
|
||||
*/
|
||||
export function generateCreativeWorkflowName(): string {
|
||||
const adjective = ADJECTIVES[Math.floor(Math.random() * ADJECTIVES.length)]
|
||||
const noun = NOUNS[Math.floor(Math.random() * NOUNS.length)]
|
||||
return `${adjective.toLowerCase()}-${noun.toLowerCase()}`
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
import { create } from 'zustand'
|
||||
import type { FilterState, LogLevel, TimeRange, TriggerType } from '@/stores/logs/filters/types'
|
||||
|
||||
// Helper functions for URL synchronization
|
||||
const getSearchParams = () => {
|
||||
if (typeof window === 'undefined') return new URLSearchParams()
|
||||
return new URLSearchParams(window.location.search)
|
||||
@@ -87,7 +86,6 @@ const timeRangeToURL = (timeRange: TimeRange): string => {
|
||||
}
|
||||
|
||||
export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
logs: [],
|
||||
workspaceId: '',
|
||||
viewMode: 'logs',
|
||||
timeRange: DEFAULT_TIME_RANGE,
|
||||
@@ -96,30 +94,14 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
folderIds: [],
|
||||
searchQuery: '',
|
||||
triggers: [],
|
||||
loading: true,
|
||||
error: null,
|
||||
page: 1,
|
||||
hasMore: true,
|
||||
isFetchingMore: false,
|
||||
_isInitializing: false, // Internal flag to prevent URL sync during initialization
|
||||
|
||||
setLogs: (logs, append = false) => {
|
||||
if (append) {
|
||||
const currentLogs = [...get().logs]
|
||||
const newLogs = [...currentLogs, ...logs]
|
||||
set({ logs: newLogs })
|
||||
} else {
|
||||
set({ logs, loading: false })
|
||||
}
|
||||
},
|
||||
|
||||
setWorkspaceId: (workspaceId) => set({ workspaceId }),
|
||||
|
||||
setViewMode: (viewMode) => set({ viewMode }),
|
||||
|
||||
setTimeRange: (timeRange) => {
|
||||
set({ timeRange })
|
||||
get().resetPagination()
|
||||
if (!get()._isInitializing) {
|
||||
get().syncWithURL()
|
||||
}
|
||||
@@ -127,7 +109,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
|
||||
setLevel: (level) => {
|
||||
set({ level })
|
||||
get().resetPagination()
|
||||
if (!get()._isInitializing) {
|
||||
get().syncWithURL()
|
||||
}
|
||||
@@ -135,7 +116,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
|
||||
setWorkflowIds: (workflowIds) => {
|
||||
set({ workflowIds })
|
||||
get().resetPagination()
|
||||
if (!get()._isInitializing) {
|
||||
get().syncWithURL()
|
||||
}
|
||||
@@ -152,7 +132,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
}
|
||||
|
||||
set({ workflowIds: currentWorkflowIds })
|
||||
get().resetPagination()
|
||||
if (!get()._isInitializing) {
|
||||
get().syncWithURL()
|
||||
}
|
||||
@@ -160,7 +139,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
|
||||
setFolderIds: (folderIds) => {
|
||||
set({ folderIds })
|
||||
get().resetPagination()
|
||||
if (!get()._isInitializing) {
|
||||
get().syncWithURL()
|
||||
}
|
||||
@@ -177,7 +155,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
}
|
||||
|
||||
set({ folderIds: currentFolderIds })
|
||||
get().resetPagination()
|
||||
if (!get()._isInitializing) {
|
||||
get().syncWithURL()
|
||||
}
|
||||
@@ -185,7 +162,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
|
||||
setSearchQuery: (searchQuery) => {
|
||||
set({ searchQuery })
|
||||
get().resetPagination()
|
||||
if (!get()._isInitializing) {
|
||||
get().syncWithURL()
|
||||
}
|
||||
@@ -193,7 +169,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
|
||||
setTriggers: (triggers: TriggerType[]) => {
|
||||
set({ triggers })
|
||||
get().resetPagination()
|
||||
if (!get()._isInitializing) {
|
||||
get().syncWithURL()
|
||||
}
|
||||
@@ -210,27 +185,12 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
}
|
||||
|
||||
set({ triggers: currentTriggers })
|
||||
get().resetPagination()
|
||||
if (!get()._isInitializing) {
|
||||
get().syncWithURL()
|
||||
}
|
||||
},
|
||||
|
||||
setLoading: (loading) => set({ loading }),
|
||||
|
||||
setError: (error) => set({ error }),
|
||||
|
||||
setPage: (page) => set({ page }),
|
||||
|
||||
setHasMore: (hasMore) => set({ hasMore }),
|
||||
|
||||
setIsFetchingMore: (isFetchingMore) => set({ isFetchingMore }),
|
||||
|
||||
resetPagination: () => set({ page: 1, hasMore: true }),
|
||||
|
||||
// URL synchronization methods
|
||||
initializeFromURL: () => {
|
||||
// Set initialization flag to prevent URL sync during init
|
||||
set({ _isInitializing: true })
|
||||
|
||||
const params = getSearchParams()
|
||||
@@ -252,7 +212,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
_isInitializing: false, // Clear the flag after initialization
|
||||
})
|
||||
|
||||
// Ensure URL reflects the initialized state
|
||||
get().syncWithURL()
|
||||
},
|
||||
|
||||
@@ -260,7 +219,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
const { timeRange, level, workflowIds, folderIds, triggers, searchQuery } = get()
|
||||
const params = new URLSearchParams()
|
||||
|
||||
// Only add non-default values to keep URL clean
|
||||
if (timeRange !== DEFAULT_TIME_RANGE) {
|
||||
params.set('timeRange', timeRangeToURL(timeRange))
|
||||
}
|
||||
@@ -287,81 +245,4 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
|
||||
updateURL(params)
|
||||
},
|
||||
|
||||
// Build query parameters for server-side filtering
|
||||
buildQueryParams: (page: number, limit: number) => {
|
||||
const { workspaceId, timeRange, level, workflowIds, folderIds, searchQuery, triggers } = get()
|
||||
const params = new URLSearchParams()
|
||||
params.set('limit', limit.toString())
|
||||
params.set('offset', ((page - 1) * limit).toString())
|
||||
|
||||
params.set('workspaceId', workspaceId)
|
||||
|
||||
// Add level filter
|
||||
if (level !== 'all') {
|
||||
params.set('level', level)
|
||||
}
|
||||
|
||||
// Add trigger filter
|
||||
if (triggers.length > 0) {
|
||||
params.set('triggers', triggers.join(','))
|
||||
}
|
||||
|
||||
// Add workflow filter
|
||||
if (workflowIds.length > 0) {
|
||||
params.set('workflowIds', workflowIds.join(','))
|
||||
}
|
||||
|
||||
// Add folder filter
|
||||
if (folderIds.length > 0) {
|
||||
params.set('folderIds', folderIds.join(','))
|
||||
}
|
||||
|
||||
// Add time range filter
|
||||
if (timeRange !== 'All time') {
|
||||
const now = new Date()
|
||||
let startDate: Date
|
||||
|
||||
switch (timeRange) {
|
||||
case 'Past 30 minutes':
|
||||
startDate = new Date(now.getTime() - 30 * 60 * 1000)
|
||||
break
|
||||
case 'Past hour':
|
||||
startDate = new Date(now.getTime() - 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 6 hours':
|
||||
startDate = new Date(now.getTime() - 6 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 12 hours':
|
||||
startDate = new Date(now.getTime() - 12 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 24 hours':
|
||||
startDate = new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 3 days':
|
||||
startDate = new Date(now.getTime() - 3 * 24 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 7 days':
|
||||
startDate = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 14 days':
|
||||
startDate = new Date(now.getTime() - 14 * 24 * 60 * 60 * 1000)
|
||||
break
|
||||
case 'Past 30 days':
|
||||
startDate = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000)
|
||||
break
|
||||
default:
|
||||
startDate = new Date(0)
|
||||
}
|
||||
|
||||
params.set('startDate', startDate.toISOString())
|
||||
}
|
||||
|
||||
// Add search filter
|
||||
if (searchQuery.trim()) {
|
||||
params.set('search', searchQuery.trim())
|
||||
}
|
||||
|
||||
return params.toString()
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -167,9 +167,6 @@ export type LogLevel = 'error' | 'info' | 'all'
|
||||
export type TriggerType = 'chat' | 'api' | 'webhook' | 'manual' | 'schedule' | 'all'
|
||||
|
||||
export interface FilterState {
|
||||
// Original logs from API
|
||||
logs: WorkflowLog[]
|
||||
|
||||
// Workspace context
|
||||
workspaceId: string
|
||||
|
||||
@@ -184,20 +181,10 @@ export interface FilterState {
|
||||
searchQuery: string
|
||||
triggers: TriggerType[]
|
||||
|
||||
// Loading state
|
||||
loading: boolean
|
||||
error: string | null
|
||||
|
||||
// Pagination state
|
||||
page: number
|
||||
hasMore: boolean
|
||||
isFetchingMore: boolean
|
||||
|
||||
// Internal state
|
||||
_isInitializing: boolean
|
||||
|
||||
// Actions
|
||||
setLogs: (logs: WorkflowLog[], append?: boolean) => void
|
||||
setWorkspaceId: (workspaceId: string) => void
|
||||
setViewMode: (viewMode: 'logs' | 'dashboard') => void
|
||||
setTimeRange: (timeRange: TimeRange) => void
|
||||
@@ -209,17 +196,8 @@ export interface FilterState {
|
||||
setSearchQuery: (query: string) => void
|
||||
setTriggers: (triggers: TriggerType[]) => void
|
||||
toggleTrigger: (trigger: TriggerType) => void
|
||||
setLoading: (loading: boolean) => void
|
||||
setError: (error: string | null) => void
|
||||
setPage: (page: number) => void
|
||||
setHasMore: (hasMore: boolean) => void
|
||||
setIsFetchingMore: (isFetchingMore: boolean) => void
|
||||
resetPagination: () => void
|
||||
|
||||
// URL synchronization methods
|
||||
initializeFromURL: () => void
|
||||
syncWithURL: () => void
|
||||
|
||||
// Build query parameters for server-side filtering
|
||||
buildQueryParams: (page: number, limit: number) => string
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateCreativeWorkflowName } from '@/lib/naming'
|
||||
import { withOptimisticUpdate } from '@/lib/utils'
|
||||
import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults'
|
||||
import { API_ENDPOINTS } from '@/stores/constants'
|
||||
@@ -249,9 +248,16 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
set({ isLoading: loading })
|
||||
},
|
||||
|
||||
// Simple method to load workflows (replaces sync system)
|
||||
loadWorkflows: async (workspaceId?: string) => {
|
||||
await fetchWorkflowsFromDB(workspaceId)
|
||||
setWorkflows: (workflows: WorkflowMetadata[]) => {
|
||||
set({
|
||||
workflows: workflows.reduce(
|
||||
(acc, w) => {
|
||||
acc[w.id] = w
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, WorkflowMetadata>
|
||||
),
|
||||
})
|
||||
},
|
||||
|
||||
// Switch to workspace - just clear state, let sidebar handle workflow loading
|
||||
@@ -497,113 +503,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
logger.info(`Switched to workflow ${id}`)
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a new workflow with appropriate metadata and initial blocks
|
||||
* @param options - Optional configuration for workflow creation
|
||||
* @returns The ID of the newly created workflow
|
||||
*/
|
||||
createWorkflow: async (options = {}) => {
|
||||
// Use provided workspace ID (must be provided since we no longer track active workspace)
|
||||
const workspaceId = options.workspaceId
|
||||
|
||||
if (!workspaceId) {
|
||||
logger.error('Cannot create workflow without workspaceId')
|
||||
set({ error: 'Workspace ID is required to create a workflow' })
|
||||
throw new Error('Workspace ID is required to create a workflow')
|
||||
}
|
||||
|
||||
logger.info(`Creating new workflow in workspace: ${workspaceId || 'none'}`)
|
||||
|
||||
// Create the workflow on the server first to get the server-generated ID
|
||||
try {
|
||||
const response = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
name: options.name || generateCreativeWorkflowName(),
|
||||
description: options.description || 'New workflow',
|
||||
color: getNextWorkflowColor(),
|
||||
workspaceId,
|
||||
folderId: options.folderId || null,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json()
|
||||
throw new Error(`Failed to create workflow: ${errorData.error || response.statusText}`)
|
||||
}
|
||||
|
||||
const createdWorkflow = await response.json()
|
||||
const serverWorkflowId = createdWorkflow.id
|
||||
|
||||
logger.info(`Successfully created workflow ${serverWorkflowId} on server`)
|
||||
|
||||
// Generate workflow metadata with server-generated ID
|
||||
const newWorkflow: WorkflowMetadata = {
|
||||
id: serverWorkflowId,
|
||||
name: createdWorkflow.name,
|
||||
lastModified: new Date(),
|
||||
createdAt: new Date(),
|
||||
description: createdWorkflow.description,
|
||||
color: createdWorkflow.color,
|
||||
workspaceId,
|
||||
folderId: createdWorkflow.folderId,
|
||||
}
|
||||
|
||||
// Add workflow to registry with server-generated ID
|
||||
set((state) => ({
|
||||
workflows: {
|
||||
...state.workflows,
|
||||
[serverWorkflowId]: newWorkflow,
|
||||
},
|
||||
error: null,
|
||||
}))
|
||||
|
||||
// Initialize subblock values to ensure they're available for sync
|
||||
const { workflowState, subBlockValues } = buildDefaultWorkflowArtifacts()
|
||||
|
||||
useSubBlockStore.setState((state) => ({
|
||||
workflowValues: {
|
||||
...state.workflowValues,
|
||||
[serverWorkflowId]: subBlockValues,
|
||||
},
|
||||
}))
|
||||
|
||||
try {
|
||||
logger.info(`Persisting default Start block for new workflow ${serverWorkflowId}`)
|
||||
const response = await fetch(`/api/workflows/${serverWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(workflowState),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error('Failed to persist default Start block:', await response.text())
|
||||
} else {
|
||||
logger.info('Successfully persisted default Start block')
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error persisting default Start block:', error)
|
||||
}
|
||||
|
||||
// Don't set as active workflow here - let the navigation/URL change handle that
|
||||
// This prevents race conditions and flickering
|
||||
logger.info(
|
||||
`Created new workflow with ID ${serverWorkflowId} in workspace ${workspaceId || 'none'}`
|
||||
)
|
||||
|
||||
return serverWorkflowId
|
||||
} catch (error) {
|
||||
logger.error(`Failed to create new workflow:`, error)
|
||||
set({
|
||||
error: `Failed to create workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
})
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Duplicates an existing workflow
|
||||
*/
|
||||
|
||||
@@ -26,18 +26,11 @@ export interface WorkflowRegistryState {
|
||||
|
||||
export interface WorkflowRegistryActions {
|
||||
setLoading: (loading: boolean) => void
|
||||
setWorkflows: (workflows: WorkflowMetadata[]) => void
|
||||
setActiveWorkflow: (id: string) => Promise<void>
|
||||
switchToWorkspace: (id: string) => Promise<void>
|
||||
loadWorkflows: (workspaceId?: string) => Promise<void>
|
||||
removeWorkflow: (id: string) => Promise<void>
|
||||
updateWorkflow: (id: string, metadata: Partial<WorkflowMetadata>) => Promise<void>
|
||||
createWorkflow: (options?: {
|
||||
isInitial?: boolean
|
||||
name?: string
|
||||
description?: string
|
||||
workspaceId?: string
|
||||
folderId?: string | null
|
||||
}) => Promise<string>
|
||||
duplicateWorkflow: (sourceId: string) => Promise<string | null>
|
||||
getWorkflowDeploymentStatus: (workflowId: string | null) => DeploymentStatus | null
|
||||
setDeploymentStatus: (
|
||||
|
||||
@@ -76,3 +76,254 @@ export function getNextWorkflowColor(): string {
|
||||
// Simply return a random color from the available colors
|
||||
return WORKFLOW_COLORS[Math.floor(Math.random() * WORKFLOW_COLORS.length)]
|
||||
}
|
||||
|
||||
// Adjectives and nouns for creative workflow names
|
||||
const ADJECTIVES = [
|
||||
'Blazing',
|
||||
'Crystal',
|
||||
'Golden',
|
||||
'Silver',
|
||||
'Mystic',
|
||||
'Cosmic',
|
||||
'Electric',
|
||||
'Frozen',
|
||||
'Burning',
|
||||
'Shining',
|
||||
'Dancing',
|
||||
'Flying',
|
||||
'Roaring',
|
||||
'Whispering',
|
||||
'Glowing',
|
||||
'Sparkling',
|
||||
'Thunder',
|
||||
'Lightning',
|
||||
'Storm',
|
||||
'Ocean',
|
||||
'Mountain',
|
||||
'Forest',
|
||||
'Desert',
|
||||
'Arctic',
|
||||
'Tropical',
|
||||
'Midnight',
|
||||
'Dawn',
|
||||
'Sunset',
|
||||
'Rainbow',
|
||||
'Diamond',
|
||||
'Ruby',
|
||||
'Emerald',
|
||||
'Sapphire',
|
||||
'Pearl',
|
||||
'Jade',
|
||||
'Amber',
|
||||
'Coral',
|
||||
'Ivory',
|
||||
'Obsidian',
|
||||
'Marble',
|
||||
'Velvet',
|
||||
'Silk',
|
||||
'Satin',
|
||||
'Linen',
|
||||
'Cotton',
|
||||
'Wool',
|
||||
'Cashmere',
|
||||
'Denim',
|
||||
'Neon',
|
||||
'Pastel',
|
||||
'Vibrant',
|
||||
'Muted',
|
||||
'Bold',
|
||||
'Subtle',
|
||||
'Bright',
|
||||
'Dark',
|
||||
'Ancient',
|
||||
'Modern',
|
||||
'Eternal',
|
||||
'Swift',
|
||||
'Radiant',
|
||||
'Quantum',
|
||||
'Stellar',
|
||||
'Lunar',
|
||||
'Solar',
|
||||
'Celestial',
|
||||
'Ethereal',
|
||||
'Phantom',
|
||||
'Shadow',
|
||||
'Crimson',
|
||||
'Azure',
|
||||
'Violet',
|
||||
'Scarlet',
|
||||
'Magenta',
|
||||
'Turquoise',
|
||||
'Indigo',
|
||||
'Jade',
|
||||
'Noble',
|
||||
'Regal',
|
||||
'Imperial',
|
||||
'Royal',
|
||||
'Supreme',
|
||||
'Prime',
|
||||
'Elite',
|
||||
'Ultra',
|
||||
'Mega',
|
||||
'Hyper',
|
||||
'Super',
|
||||
'Neo',
|
||||
'Cyber',
|
||||
'Digital',
|
||||
'Virtual',
|
||||
'Sonic',
|
||||
'Atomic',
|
||||
'Nuclear',
|
||||
'Laser',
|
||||
'Plasma',
|
||||
'Magnetic',
|
||||
]
|
||||
|
||||
const NOUNS = [
|
||||
'Phoenix',
|
||||
'Dragon',
|
||||
'Eagle',
|
||||
'Wolf',
|
||||
'Lion',
|
||||
'Tiger',
|
||||
'Panther',
|
||||
'Falcon',
|
||||
'Hawk',
|
||||
'Raven',
|
||||
'Swan',
|
||||
'Dove',
|
||||
'Butterfly',
|
||||
'Firefly',
|
||||
'Dragonfly',
|
||||
'Hummingbird',
|
||||
'Galaxy',
|
||||
'Nebula',
|
||||
'Comet',
|
||||
'Meteor',
|
||||
'Star',
|
||||
'Moon',
|
||||
'Sun',
|
||||
'Planet',
|
||||
'Asteroid',
|
||||
'Constellation',
|
||||
'Aurora',
|
||||
'Eclipse',
|
||||
'Solstice',
|
||||
'Equinox',
|
||||
'Horizon',
|
||||
'Zenith',
|
||||
'Castle',
|
||||
'Tower',
|
||||
'Bridge',
|
||||
'Garden',
|
||||
'Fountain',
|
||||
'Palace',
|
||||
'Temple',
|
||||
'Cathedral',
|
||||
'Lighthouse',
|
||||
'Windmill',
|
||||
'Waterfall',
|
||||
'Canyon',
|
||||
'Valley',
|
||||
'Peak',
|
||||
'Ridge',
|
||||
'Cliff',
|
||||
'Ocean',
|
||||
'River',
|
||||
'Lake',
|
||||
'Stream',
|
||||
'Pond',
|
||||
'Bay',
|
||||
'Cove',
|
||||
'Harbor',
|
||||
'Island',
|
||||
'Peninsula',
|
||||
'Archipelago',
|
||||
'Atoll',
|
||||
'Reef',
|
||||
'Lagoon',
|
||||
'Fjord',
|
||||
'Delta',
|
||||
'Cake',
|
||||
'Cookie',
|
||||
'Muffin',
|
||||
'Cupcake',
|
||||
'Pie',
|
||||
'Tart',
|
||||
'Brownie',
|
||||
'Donut',
|
||||
'Pancake',
|
||||
'Waffle',
|
||||
'Croissant',
|
||||
'Bagel',
|
||||
'Pretzel',
|
||||
'Biscuit',
|
||||
'Scone',
|
||||
'Crumpet',
|
||||
'Thunder',
|
||||
'Blizzard',
|
||||
'Tornado',
|
||||
'Hurricane',
|
||||
'Tsunami',
|
||||
'Volcano',
|
||||
'Glacier',
|
||||
'Avalanche',
|
||||
'Vortex',
|
||||
'Tempest',
|
||||
'Maelstrom',
|
||||
'Whirlwind',
|
||||
'Cyclone',
|
||||
'Typhoon',
|
||||
'Monsoon',
|
||||
'Anvil',
|
||||
'Hammer',
|
||||
'Forge',
|
||||
'Blade',
|
||||
'Sword',
|
||||
'Shield',
|
||||
'Arrow',
|
||||
'Spear',
|
||||
'Crown',
|
||||
'Throne',
|
||||
'Scepter',
|
||||
'Orb',
|
||||
'Gem',
|
||||
'Crystal',
|
||||
'Prism',
|
||||
'Spectrum',
|
||||
'Beacon',
|
||||
'Signal',
|
||||
'Pulse',
|
||||
'Wave',
|
||||
'Surge',
|
||||
'Tide',
|
||||
'Current',
|
||||
'Flow',
|
||||
'Circuit',
|
||||
'Node',
|
||||
'Core',
|
||||
'Matrix',
|
||||
'Network',
|
||||
'System',
|
||||
'Engine',
|
||||
'Reactor',
|
||||
'Generator',
|
||||
'Dynamo',
|
||||
'Catalyst',
|
||||
'Nexus',
|
||||
'Portal',
|
||||
'Gateway',
|
||||
'Passage',
|
||||
'Conduit',
|
||||
'Channel',
|
||||
]
|
||||
|
||||
/**
|
||||
* Generates a creative workflow name using random adjectives and nouns
|
||||
* @returns A creative workflow name like "blazing-phoenix" or "crystal-dragon"
|
||||
*/
|
||||
export function generateCreativeWorkflowName(): string {
|
||||
const adjective = ADJECTIVES[Math.floor(Math.random() * ADJECTIVES.length)]
|
||||
const noun = NOUNS[Math.floor(Math.random() * NOUNS.length)]
|
||||
return `${adjective.toLowerCase()}-${noun.toLowerCase()}`
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user