mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 15:07:55 -05:00
fix(logs): added indexes to speed up logs loading time, modified to only display logs for current workspace (#773)
* added indexes to speed up logs loading time, modified to only display logs for current workspace * remove user check, rely on source-of-truth permissions to display logs
This commit is contained in:
@@ -56,6 +56,7 @@ const QueryParamsSchema = z.object({
|
||||
startDate: z.string().optional(),
|
||||
endDate: z.string().optional(),
|
||||
search: z.string().optional(),
|
||||
workspaceId: z.string(),
|
||||
})
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
@@ -74,7 +75,12 @@ export async function GET(request: NextRequest) {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const params = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
// Get workflows that user can access through direct ownership OR workspace permissions
|
||||
const workflowConditions = and(
|
||||
eq(workflow.workspaceId, params.workspaceId),
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace')
|
||||
)
|
||||
|
||||
const userWorkflows = await db
|
||||
.select({ id: workflow.id, folderId: workflow.folderId })
|
||||
.from(workflow)
|
||||
@@ -86,12 +92,7 @@ export async function GET(request: NextRequest) {
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
or(
|
||||
eq(workflow.userId, userId),
|
||||
and(eq(permissions.userId, userId), eq(permissions.entityType, 'workspace'))
|
||||
)
|
||||
)
|
||||
.where(workflowConditions)
|
||||
|
||||
const userWorkflowIds = userWorkflows.map((w) => w.id)
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
describe('Workflow Logs API Route', () => {
|
||||
const mockWorkflowLogs = [
|
||||
@@ -54,6 +53,7 @@ describe('Workflow Logs API Route', () => {
|
||||
{
|
||||
id: 'workflow-1',
|
||||
userId: 'user-123',
|
||||
workspaceId: 'workspace-123',
|
||||
folderId: 'folder-1',
|
||||
name: 'Test Workflow 1',
|
||||
color: '#3972F6',
|
||||
@@ -65,6 +65,7 @@ describe('Workflow Logs API Route', () => {
|
||||
{
|
||||
id: 'workflow-2',
|
||||
userId: 'user-123',
|
||||
workspaceId: 'workspace-123',
|
||||
folderId: 'folder-2',
|
||||
name: 'Test Workflow 2',
|
||||
color: '#FF6B6B',
|
||||
@@ -76,6 +77,7 @@ describe('Workflow Logs API Route', () => {
|
||||
{
|
||||
id: 'workflow-3',
|
||||
userId: 'user-123',
|
||||
workspaceId: 'workspace-123',
|
||||
folderId: null,
|
||||
name: 'Test Workflow 3',
|
||||
color: '#22C55E',
|
||||
@@ -192,6 +194,7 @@ describe('Workflow Logs API Route', () => {
|
||||
workflow: {
|
||||
id: 'workflow.id',
|
||||
userId: 'workflow.userId',
|
||||
workspaceId: 'workflow.workspaceId',
|
||||
name: 'workflow.name',
|
||||
color: 'workflow.color',
|
||||
description: 'workflow.description',
|
||||
@@ -212,10 +215,11 @@ describe('Workflow Logs API Route', () => {
|
||||
it('should return logs successfully with default parameters', async () => {
|
||||
setupDatabaseMock()
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const url = new URL('http://localhost:3000/api/logs?workspaceId=workspace-123')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
@@ -231,7 +235,9 @@ describe('Workflow Logs API Route', () => {
|
||||
it('should include workflow data when includeWorkflow=true', async () => {
|
||||
setupDatabaseMock()
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?includeWorkflow=true')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&includeWorkflow=true'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -248,7 +254,7 @@ describe('Workflow Logs API Route', () => {
|
||||
const errorLogs = mockWorkflowLogs.filter((log) => log.level === 'error')
|
||||
setupDatabaseMock({ logs: errorLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?level=error')
|
||||
const url = new URL('http://localhost:3000/api/logs?workspaceId=workspace-123&level=error')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -264,7 +270,9 @@ describe('Workflow Logs API Route', () => {
|
||||
const workflow1Logs = mockWorkflowLogs.filter((log) => log.workflowId === 'workflow-1')
|
||||
setupDatabaseMock({ logs: workflow1Logs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?workflowIds=workflow-1')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&workflowIds=workflow-1'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -283,7 +291,9 @@ describe('Workflow Logs API Route', () => {
|
||||
)
|
||||
setupDatabaseMock({ logs: filteredLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?workflowIds=workflow-1,workflow-2')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&workflowIds=workflow-1,workflow-2'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -301,7 +311,9 @@ describe('Workflow Logs API Route', () => {
|
||||
)
|
||||
setupDatabaseMock({ logs: filteredLogs })
|
||||
|
||||
const url = new URL(`http://localhost:3000/api/logs?startDate=${startDate}`)
|
||||
const url = new URL(
|
||||
`http://localhost:3000/api/logs?workspaceId=workspace-123&startDate=${startDate}`
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -318,7 +330,7 @@ describe('Workflow Logs API Route', () => {
|
||||
)
|
||||
setupDatabaseMock({ logs: searchLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?search=failed')
|
||||
const url = new URL('http://localhost:3000/api/logs?workspaceId=workspace-123&search=failed')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -334,7 +346,9 @@ describe('Workflow Logs API Route', () => {
|
||||
const paginatedLogs = mockWorkflowLogs.slice(1, 3)
|
||||
setupDatabaseMock({ logs: paginatedLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?limit=2&offset=1')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&limit=2&offset=1'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -352,10 +366,11 @@ describe('Workflow Logs API Route', () => {
|
||||
it('should return empty array when user has no workflows', async () => {
|
||||
setupDatabaseMock({ userWorkflows: [], logs: [], workflows: [] })
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const url = new URL('http://localhost:3000/api/logs?workspaceId=workspace-123')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
@@ -369,7 +384,9 @@ describe('Workflow Logs API Route', () => {
|
||||
userWorkflows: mockWorkflows.filter((w) => w.id !== 'unauthorized-workflow'),
|
||||
})
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?workflowIds=unauthorized-workflow')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&workflowIds=unauthorized-workflow'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -388,10 +405,11 @@ describe('Workflow Logs API Route', () => {
|
||||
|
||||
setupDatabaseMock()
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const url = new URL('http://localhost:3000/api/logs?workspaceId=workspace-123')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(401)
|
||||
@@ -401,7 +419,7 @@ describe('Workflow Logs API Route', () => {
|
||||
it('should validate query parameters', async () => {
|
||||
setupDatabaseMock()
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?limit=invalid')
|
||||
const url = new URL('http://localhost:3000/api/logs?workspaceId=workspace-123&limit=invalid')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -416,10 +434,11 @@ describe('Workflow Logs API Route', () => {
|
||||
it('should handle database errors gracefully', async () => {
|
||||
setupDatabaseMock({ throwError: true })
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const url = new URL('http://localhost:3000/api/logs?workspaceId=workspace-123')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const response = await GET(req)
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
@@ -436,7 +455,7 @@ describe('Workflow Logs API Route', () => {
|
||||
setupDatabaseMock({ logs: filteredLogs })
|
||||
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?level=info&workflowIds=workflow-1&search=started'
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&level=info&workflowIds=workflow-1&search=started'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
@@ -458,7 +477,9 @@ describe('Workflow Logs API Route', () => {
|
||||
)
|
||||
setupDatabaseMock({ logs: filteredLogs })
|
||||
|
||||
const url = new URL(`http://localhost:3000/api/logs?endDate=${endDate}`)
|
||||
const url = new URL(
|
||||
`http://localhost:3000/api/logs?workspaceId=workspace-123&endDate=${endDate}`
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -472,7 +493,9 @@ describe('Workflow Logs API Route', () => {
|
||||
it('should handle large offset values', async () => {
|
||||
setupDatabaseMock({ logs: [] })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?limit=10&offset=1000')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&limit=10&offset=1000'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -489,7 +512,7 @@ describe('Workflow Logs API Route', () => {
|
||||
const searchLogs = mockWorkflowLogs.filter((log) => log.executionId?.includes('exec-1'))
|
||||
setupDatabaseMock({ logs: searchLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?search=exec-1')
|
||||
const url = new URL('http://localhost:3000/api/logs?workspaceId=workspace-123&search=exec-1')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -505,7 +528,7 @@ describe('Workflow Logs API Route', () => {
|
||||
const apiLogs = mockWorkflowLogs.filter((log) => log.trigger === 'api')
|
||||
setupDatabaseMock({ logs: apiLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?triggers=api')
|
||||
const url = new URL('http://localhost:3000/api/logs?workspaceId=workspace-123&triggers=api')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -523,7 +546,9 @@ describe('Workflow Logs API Route', () => {
|
||||
)
|
||||
setupDatabaseMock({ logs: manualAndApiLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?triggers=manual,api')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&triggers=manual,api'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -542,7 +567,7 @@ describe('Workflow Logs API Route', () => {
|
||||
setupDatabaseMock({ logs: filteredLogs })
|
||||
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?triggers=manual&level=info&workflowIds=workflow-1'
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&triggers=manual&level=info&workflowIds=workflow-1'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
@@ -561,7 +586,9 @@ describe('Workflow Logs API Route', () => {
|
||||
const folder1Logs = mockWorkflowLogs.filter((log) => log.workflowId === 'workflow-1')
|
||||
setupDatabaseMock({ logs: folder1Logs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?folderIds=folder-1')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&folderIds=folder-1'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -579,7 +606,9 @@ describe('Workflow Logs API Route', () => {
|
||||
)
|
||||
setupDatabaseMock({ logs: folder1And2Logs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?folderIds=folder-1,folder-2')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&folderIds=folder-1,folder-2'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -597,7 +626,7 @@ describe('Workflow Logs API Route', () => {
|
||||
const rootLogs = mockWorkflowLogs.filter((log) => log.workflowId === 'workflow-3')
|
||||
setupDatabaseMock({ logs: rootLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?folderIds=root')
|
||||
const url = new URL('http://localhost:3000/api/logs?workspaceId=workspace-123&folderIds=root')
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -616,7 +645,9 @@ describe('Workflow Logs API Route', () => {
|
||||
)
|
||||
setupDatabaseMock({ logs: rootAndFolder1Logs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?folderIds=root,folder-1')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&folderIds=root,folder-1'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -636,7 +667,7 @@ describe('Workflow Logs API Route', () => {
|
||||
setupDatabaseMock({ logs: filteredLogs })
|
||||
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?folderIds=folder-1&workflowIds=workflow-1'
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&folderIds=folder-1&workflowIds=workflow-1'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
@@ -654,7 +685,7 @@ describe('Workflow Logs API Route', () => {
|
||||
setupDatabaseMock({ logs: [] })
|
||||
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?folderIds=folder-1&workflowIds=workflow-2'
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&folderIds=folder-1&workflowIds=workflow-2'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
@@ -673,7 +704,9 @@ describe('Workflow Logs API Route', () => {
|
||||
)
|
||||
setupDatabaseMock({ logs: filteredLogs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?folderIds=folder-1&level=info')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&folderIds=folder-1&level=info'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -689,7 +722,9 @@ describe('Workflow Logs API Route', () => {
|
||||
it('should return empty result when no workflows match folder filter', async () => {
|
||||
setupDatabaseMock({ logs: [] })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?folderIds=non-existent-folder')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&folderIds=non-existent-folder'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
@@ -705,7 +740,9 @@ describe('Workflow Logs API Route', () => {
|
||||
const folder1Logs = mockWorkflowLogs.filter((log) => log.workflowId === 'workflow-1')
|
||||
setupDatabaseMock({ logs: folder1Logs })
|
||||
|
||||
const url = new URL('http://localhost:3000/api/logs?folderIds=folder-1&includeWorkflow=true')
|
||||
const url = new URL(
|
||||
'http://localhost:3000/api/logs?workspaceId=workspace-123&folderIds=folder-1&includeWorkflow=true'
|
||||
)
|
||||
const req = new Request(url.toString())
|
||||
|
||||
const { GET } = await import('./route')
|
||||
|
||||
@@ -22,6 +22,7 @@ const QueryParamsSchema = z.object({
|
||||
startDate: z.string().optional(),
|
||||
endDate: z.string().optional(),
|
||||
search: z.string().optional(),
|
||||
workspaceId: z.string(),
|
||||
})
|
||||
|
||||
// Used to retrieve and display workflow logs
|
||||
@@ -41,10 +42,15 @@ export async function GET(request: NextRequest) {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const params = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
const workflowConditions = and(
|
||||
eq(workflow.workspaceId, params.workspaceId),
|
||||
eq(workflow.userId, userId)
|
||||
)
|
||||
|
||||
const userWorkflows = await db
|
||||
.select({ id: workflow.id, folderId: workflow.folderId })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.userId, userId))
|
||||
.where(workflowConditions)
|
||||
|
||||
const userWorkflowIds = userWorkflows.map((w) => w.id)
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ export function ControlBar() {
|
||||
|
||||
const fetchLogs = async () => {
|
||||
try {
|
||||
const queryParams = buildQueryParams(1, 50) // Get first 50 logs for refresh
|
||||
const queryParams = buildQueryParams(1, 50)
|
||||
const response = await fetch(`/api/logs/enhanced?${queryParams}`)
|
||||
|
||||
if (!response.ok) {
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { AlertCircle, Info, Loader2 } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { ControlBar } from './components/control-bar/control-bar'
|
||||
import { Filters } from './components/filters/filters'
|
||||
@@ -26,6 +27,9 @@ const selectedRowAnimation = `
|
||||
`
|
||||
|
||||
export default function Logs() {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
const {
|
||||
logs,
|
||||
loading,
|
||||
@@ -33,6 +37,7 @@ export default function Logs() {
|
||||
setLogs,
|
||||
setLoading,
|
||||
setError,
|
||||
setWorkspaceId,
|
||||
page,
|
||||
setPage,
|
||||
hasMore,
|
||||
@@ -48,6 +53,11 @@ export default function Logs() {
|
||||
triggers,
|
||||
} = useFilterStore()
|
||||
|
||||
// Set workspace ID in store when component mounts or workspaceId changes
|
||||
useEffect(() => {
|
||||
setWorkspaceId(workspaceId)
|
||||
}, [workspaceId, setWorkspaceId])
|
||||
|
||||
const [selectedLog, setSelectedLog] = useState<WorkflowLog | null>(null)
|
||||
const [selectedLogIndex, setSelectedLogIndex] = useState<number>(-1)
|
||||
const [isSidebarOpen, setIsSidebarOpen] = useState(false)
|
||||
|
||||
@@ -3,6 +3,7 @@ import type { FilterState, TriggerType } from './types'
|
||||
|
||||
export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
logs: [],
|
||||
workspaceId: '',
|
||||
timeRange: 'All time',
|
||||
level: 'all',
|
||||
workflowIds: [],
|
||||
@@ -25,6 +26,8 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
}
|
||||
},
|
||||
|
||||
setWorkspaceId: (workspaceId) => set({ workspaceId }),
|
||||
|
||||
setTimeRange: (timeRange) => {
|
||||
set({ timeRange })
|
||||
get().resetPagination()
|
||||
@@ -111,13 +114,15 @@ export const useFilterStore = create<FilterState>((set, get) => ({
|
||||
|
||||
// Build query parameters for server-side filtering
|
||||
buildQueryParams: (page: number, limit: number) => {
|
||||
const { timeRange, level, workflowIds, folderIds, searchQuery, triggers } = get()
|
||||
const { workspaceId, timeRange, level, workflowIds, folderIds, searchQuery, triggers } = get()
|
||||
const params = new URLSearchParams()
|
||||
|
||||
params.set('includeWorkflow', 'true')
|
||||
params.set('limit', limit.toString())
|
||||
params.set('offset', ((page - 1) * limit).toString())
|
||||
|
||||
params.set('workspaceId', workspaceId)
|
||||
|
||||
// Add level filter
|
||||
if (level !== 'all') {
|
||||
params.set('level', level)
|
||||
|
||||
@@ -120,6 +120,9 @@ export interface FilterState {
|
||||
// Original logs from API
|
||||
logs: WorkflowLog[]
|
||||
|
||||
// Workspace context
|
||||
workspaceId: string
|
||||
|
||||
// Filter states
|
||||
timeRange: TimeRange
|
||||
level: LogLevel
|
||||
@@ -139,6 +142,7 @@ export interface FilterState {
|
||||
|
||||
// Actions
|
||||
setLogs: (logs: WorkflowLog[], append?: boolean) => void
|
||||
setWorkspaceId: (workspaceId: string) => void
|
||||
setTimeRange: (timeRange: TimeRange) => void
|
||||
setLevel: (level: LogLevel) => void
|
||||
setWorkflowIds: (workflowIds: string[]) => void
|
||||
|
||||
5
apps/sim/db/migrations/0059_odd_may_parker.sql
Normal file
5
apps/sim/db/migrations/0059_odd_may_parker.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
CREATE INDEX "workflow_user_id_idx" ON "workflow" USING btree ("user_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_workspace_id_idx" ON "workflow" USING btree ("workspace_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_user_workspace_idx" ON "workflow" USING btree ("user_id","workspace_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_logs_workflow_id_idx" ON "workflow_logs" USING btree ("workflow_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_logs_workflow_created_idx" ON "workflow_logs" USING btree ("workflow_id","created_at");
|
||||
5954
apps/sim/db/migrations/meta/0059_snapshot.json
Normal file
5954
apps/sim/db/migrations/meta/0059_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -407,6 +407,13 @@
|
||||
"when": 1753211027120,
|
||||
"tag": "0058_clean_shiva",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 59,
|
||||
"version": "7",
|
||||
"when": 1753310161586,
|
||||
"tag": "0059_odd_may_parker",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -108,31 +108,39 @@ export const workflowFolder = pgTable(
|
||||
})
|
||||
)
|
||||
|
||||
export const workflow = pgTable('workflow', {
|
||||
id: text('id').primaryKey(),
|
||||
userId: text('user_id')
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: 'cascade' }),
|
||||
workspaceId: text('workspace_id').references(() => workspace.id, { onDelete: 'cascade' }),
|
||||
folderId: text('folder_id').references(() => workflowFolder.id, { onDelete: 'set null' }),
|
||||
name: text('name').notNull(),
|
||||
description: text('description'),
|
||||
// DEPRECATED: Use normalized tables (workflow_blocks, workflow_edges, workflow_subflows) instead
|
||||
state: json('state').notNull(),
|
||||
color: text('color').notNull().default('#3972F6'),
|
||||
lastSynced: timestamp('last_synced').notNull(),
|
||||
createdAt: timestamp('created_at').notNull(),
|
||||
updatedAt: timestamp('updated_at').notNull(),
|
||||
isDeployed: boolean('is_deployed').notNull().default(false),
|
||||
deployedState: json('deployed_state'),
|
||||
deployedAt: timestamp('deployed_at'),
|
||||
collaborators: json('collaborators').notNull().default('[]'),
|
||||
runCount: integer('run_count').notNull().default(0),
|
||||
lastRunAt: timestamp('last_run_at'),
|
||||
variables: json('variables').default('{}'),
|
||||
isPublished: boolean('is_published').notNull().default(false),
|
||||
marketplaceData: json('marketplace_data'),
|
||||
})
|
||||
export const workflow = pgTable(
|
||||
'workflow',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
userId: text('user_id')
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: 'cascade' }),
|
||||
workspaceId: text('workspace_id').references(() => workspace.id, { onDelete: 'cascade' }),
|
||||
folderId: text('folder_id').references(() => workflowFolder.id, { onDelete: 'set null' }),
|
||||
name: text('name').notNull(),
|
||||
description: text('description'),
|
||||
// DEPRECATED: Use normalized tables (workflow_blocks, workflow_edges, workflow_subflows) instead
|
||||
state: json('state').notNull(),
|
||||
color: text('color').notNull().default('#3972F6'),
|
||||
lastSynced: timestamp('last_synced').notNull(),
|
||||
createdAt: timestamp('created_at').notNull(),
|
||||
updatedAt: timestamp('updated_at').notNull(),
|
||||
isDeployed: boolean('is_deployed').notNull().default(false),
|
||||
deployedState: json('deployed_state'),
|
||||
deployedAt: timestamp('deployed_at'),
|
||||
collaborators: json('collaborators').notNull().default('[]'),
|
||||
runCount: integer('run_count').notNull().default(0),
|
||||
lastRunAt: timestamp('last_run_at'),
|
||||
variables: json('variables').default('{}'),
|
||||
isPublished: boolean('is_published').notNull().default(false),
|
||||
marketplaceData: json('marketplace_data'),
|
||||
},
|
||||
(table) => ({
|
||||
userIdIdx: index('workflow_user_id_idx').on(table.userId),
|
||||
workspaceIdIdx: index('workflow_workspace_id_idx').on(table.workspaceId),
|
||||
userWorkspaceIdx: index('workflow_user_workspace_idx').on(table.userId, table.workspaceId),
|
||||
})
|
||||
)
|
||||
|
||||
export const workflowBlocks = pgTable(
|
||||
'workflow_blocks',
|
||||
@@ -237,19 +245,29 @@ export const waitlist = pgTable('waitlist', {
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
})
|
||||
|
||||
export const workflowLogs = pgTable('workflow_logs', {
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
executionId: text('execution_id'),
|
||||
level: text('level').notNull(), // "info", "error", etc.
|
||||
message: text('message').notNull(),
|
||||
duration: text('duration'), // Store as text to allow 'NA' for errors
|
||||
trigger: text('trigger'), // "api", "schedule", "manual"
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
metadata: json('metadata'),
|
||||
})
|
||||
export const workflowLogs = pgTable(
|
||||
'workflow_logs',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
executionId: text('execution_id'),
|
||||
level: text('level').notNull(), // "info", "error", etc.
|
||||
message: text('message').notNull(),
|
||||
duration: text('duration'), // Store as text to allow 'NA' for errors
|
||||
trigger: text('trigger'), // "api", "schedule", "manual"
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
metadata: json('metadata'),
|
||||
},
|
||||
(table) => ({
|
||||
workflowIdIdx: index('workflow_logs_workflow_id_idx').on(table.workflowId),
|
||||
workflowCreatedIdx: index('workflow_logs_workflow_created_idx').on(
|
||||
table.workflowId,
|
||||
table.createdAt
|
||||
),
|
||||
})
|
||||
)
|
||||
|
||||
export const workflowExecutionSnapshots = pgTable(
|
||||
'workflow_execution_snapshots',
|
||||
|
||||
Reference in New Issue
Block a user