v0.2.5: feat, improvement, fix (#595) (#603)

* feat(function): added more granular error logs for function execution for easier debugging (#593)

* added more granular error logs for function execution

* added tests

* fixed syntax error reporting

* feat(models): added temp controls for gpt-4.1 family of models (#594)

* improvement(knowledge-upload): create and upload document to KB (#579)

* improvement: added knowledge upload

* improvement: added greptile comments (#579)

* improvement: changed to text to doc (#579)

* improvement: removed comment (#579)

* added input validation, tested persistence of KB selector

* update docs

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
Co-authored-by: Waleed Latif <walif6@gmail.com>

* fix(remove workflow.state usage): no more usage of deprecated state column in any routes (#586)

* fix(remove workflow.state usage): no more usage of deprecated state col in routes

* fix lint

* fix chat route to only use deployed state

* fix lint

* better typing

* remove useless logs

* fix lint

* restore workflow handler file

* removed all other usages of deprecated 'state' column from workflows table, updated tests

---------

Co-authored-by: Vikhyath Mondreti <vikhyathmondreti@Vikhyaths-MacBook-Air.local>
Co-authored-by: Waleed Latif <walif6@gmail.com>

* fix(doc-selector-kb): enable doc selector when kb is selected (#596)

Co-authored-by: Vikhyath Mondreti <vikhyathmondreti@vikhyaths-air.lan>

* fix(unload): remove beforeunload warning since we communicate via wss (#597)

* fix(executor): fix dependency resolution, allow blocks with multiple inputs to execute (#598)

* feat(billing): added migrations for usage-based billing (#601)

* feat(billing): added migrations for usage-based billing

* lint

* lint

* feat(logging): add new schemas + types for new logging system (#599)

* feat(logging): add new schemas + types for logging

* fix lint

* update migration

* fix lint

* Remove migration 48 to avoid conflict with staging

* fixed merge conflict

* fix lint

---------

Co-authored-by: Vikhyath Mondreti <vikhyathmondreti@Vikhyaths-Air.attlocal.net>

---------

Co-authored-by: Adam Gough <77861281+aadamgough@users.noreply.github.com>
Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Vikhyath Mondreti <vikhyathmondreti@Vikhyaths-MacBook-Air.local>
Co-authored-by: Vikhyath Mondreti <vikhyathmondreti@vikhyaths-air.lan>
Co-authored-by: Vikhyath Mondreti <vikhyathmondreti@Vikhyaths-Air.attlocal.net>
This commit is contained in:
Waleed Latif
2025-07-02 08:40:41 -07:00
committed by GitHub
parent 1604ce4d7c
commit 3b982533d1
13 changed files with 9355 additions and 102 deletions

View File

@@ -1,6 +1,6 @@
'use client'
import { useCallback, useEffect, useMemo, useState } from 'react'
import { useCallback, useEffect, useState } from 'react'
import { Check, ChevronDown, FileText } from 'lucide-react'
import { Button } from '@/components/ui/button'
import {
@@ -13,7 +13,6 @@ import {
} from '@/components/ui/command'
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
import type { SubBlockConfig } from '@/blocks/types'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
interface DocumentData {
@@ -51,19 +50,16 @@ export function DocumentSelector({
isPreview = false,
previewValue,
}: DocumentSelectorProps) {
const { getValue } = useSubBlockStore()
const [documents, setDocuments] = useState<DocumentData[]>([])
const [error, setError] = useState<string | null>(null)
const [open, setOpen] = useState(false)
const [selectedDocument, setSelectedDocument] = useState<DocumentData | null>(null)
const [initialFetchDone, setInitialFetchDone] = useState(false)
// Use the proper hook to get the current value and setter
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
// Get the knowledge base ID from the same block's knowledgeBaseId subblock - memoize to prevent re-renders
const knowledgeBaseId = useMemo(() => getValue(blockId, 'knowledgeBaseId'), [getValue, blockId])
// Get the knowledge base ID from the same block's knowledgeBaseId subblock
const [knowledgeBaseId] = useSubBlockValue(blockId, 'knowledgeBaseId')
// Use preview value when in preview mode, otherwise use store value
const value = isPreview ? previewValue : storeValue
@@ -73,7 +69,6 @@ export function DocumentSelector({
if (!knowledgeBaseId) {
setDocuments([])
setError('No knowledge base selected')
setInitialFetchDone(true)
return
}
@@ -94,7 +89,6 @@ export function DocumentSelector({
const fetchedDocuments = result.data || []
setDocuments(fetchedDocuments)
setInitialFetchDone(true)
} catch (err) {
if ((err as Error).name === 'AbortError') return
setError((err as Error).message)
@@ -138,16 +132,15 @@ export function DocumentSelector({
useEffect(() => {
setDocuments([])
setSelectedDocument(null)
setInitialFetchDone(false)
setError(null)
}, [knowledgeBaseId])
// Fetch documents when knowledge base is available and we haven't fetched yet
// Fetch documents when knowledge base is available
useEffect(() => {
if (knowledgeBaseId && !initialFetchDone && !isPreview) {
if (knowledgeBaseId && !isPreview) {
fetchDocuments()
}
}, [knowledgeBaseId, initialFetchDone, isPreview, fetchDocuments])
}, [knowledgeBaseId, isPreview, fetchDocuments])
const formatDocumentName = (document: DocumentData) => {
return document.filename

View File

@@ -0,0 +1,9 @@
ALTER TABLE "user_stats" ADD COLUMN "current_usage_limit" numeric DEFAULT '5' NOT NULL;--> statement-breakpoint
ALTER TABLE "user_stats" ADD COLUMN "usage_limit_set_by" text;--> statement-breakpoint
ALTER TABLE "user_stats" ADD COLUMN "usage_limit_updated_at" timestamp DEFAULT now();--> statement-breakpoint
ALTER TABLE "user_stats" ADD COLUMN "current_period_cost" numeric DEFAULT '0' NOT NULL;--> statement-breakpoint
ALTER TABLE "user_stats" ADD COLUMN "billing_period_start" timestamp DEFAULT now();--> statement-breakpoint
ALTER TABLE "user_stats" ADD COLUMN "billing_period_end" timestamp;--> statement-breakpoint
ALTER TABLE "user_stats" ADD COLUMN "last_period_cost" numeric DEFAULT '0';--> statement-breakpoint
CREATE INDEX "subscription_reference_status_idx" ON "subscription" USING btree ("reference_id","status");--> statement-breakpoint
ALTER TABLE "subscription" ADD CONSTRAINT "check_enterprise_metadata" CHECK (plan != 'enterprise' OR (metadata IS NOT NULL AND (metadata->>'perSeatAllowance' IS NOT NULL OR metadata->>'totalAllowance' IS NOT NULL)));

View File

@@ -0,0 +1,82 @@
CREATE TABLE "workflow_execution_blocks" (
"id" text PRIMARY KEY NOT NULL,
"execution_id" text NOT NULL,
"workflow_id" text NOT NULL,
"block_id" text NOT NULL,
"block_name" text,
"block_type" text NOT NULL,
"started_at" timestamp NOT NULL,
"ended_at" timestamp,
"duration_ms" integer,
"status" text NOT NULL,
"error_message" text,
"error_stack_trace" text,
"input_data" jsonb,
"output_data" jsonb,
"cost_input" numeric(10, 6),
"cost_output" numeric(10, 6),
"cost_total" numeric(10, 6),
"tokens_prompt" integer,
"tokens_completion" integer,
"tokens_total" integer,
"model_used" text,
"metadata" jsonb,
"created_at" timestamp DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE "workflow_execution_logs" (
"id" text PRIMARY KEY NOT NULL,
"workflow_id" text NOT NULL,
"execution_id" text NOT NULL,
"state_snapshot_id" text NOT NULL,
"level" text NOT NULL,
"message" text NOT NULL,
"trigger" text NOT NULL,
"started_at" timestamp NOT NULL,
"ended_at" timestamp,
"total_duration_ms" integer,
"block_count" integer DEFAULT 0 NOT NULL,
"success_count" integer DEFAULT 0 NOT NULL,
"error_count" integer DEFAULT 0 NOT NULL,
"skipped_count" integer DEFAULT 0 NOT NULL,
"total_cost" numeric(10, 6),
"total_input_cost" numeric(10, 6),
"total_output_cost" numeric(10, 6),
"total_tokens" integer,
"metadata" jsonb DEFAULT '{}' NOT NULL,
"created_at" timestamp DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE "workflow_execution_snapshots" (
"id" text PRIMARY KEY NOT NULL,
"workflow_id" text NOT NULL,
"state_hash" text NOT NULL,
"state_data" jsonb NOT NULL,
"created_at" timestamp DEFAULT now() NOT NULL
);
--> statement-breakpoint
ALTER TABLE "workflow_execution_blocks" ADD CONSTRAINT "workflow_execution_blocks_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "workflow_execution_logs" ADD CONSTRAINT "workflow_execution_logs_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "workflow_execution_logs" ADD CONSTRAINT "workflow_execution_logs_state_snapshot_id_workflow_execution_snapshots_id_fk" FOREIGN KEY ("state_snapshot_id") REFERENCES "public"."workflow_execution_snapshots"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "workflow_execution_snapshots" ADD CONSTRAINT "workflow_execution_snapshots_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "execution_blocks_execution_id_idx" ON "workflow_execution_blocks" USING btree ("execution_id");--> statement-breakpoint
CREATE INDEX "execution_blocks_workflow_id_idx" ON "workflow_execution_blocks" USING btree ("workflow_id");--> statement-breakpoint
CREATE INDEX "execution_blocks_block_id_idx" ON "workflow_execution_blocks" USING btree ("block_id");--> statement-breakpoint
CREATE INDEX "execution_blocks_status_idx" ON "workflow_execution_blocks" USING btree ("status");--> statement-breakpoint
CREATE INDEX "execution_blocks_duration_idx" ON "workflow_execution_blocks" USING btree ("duration_ms");--> statement-breakpoint
CREATE INDEX "execution_blocks_cost_idx" ON "workflow_execution_blocks" USING btree ("cost_total");--> statement-breakpoint
CREATE INDEX "execution_blocks_workflow_execution_idx" ON "workflow_execution_blocks" USING btree ("workflow_id","execution_id");--> statement-breakpoint
CREATE INDEX "execution_blocks_execution_status_idx" ON "workflow_execution_blocks" USING btree ("execution_id","status");--> statement-breakpoint
CREATE INDEX "execution_blocks_started_at_idx" ON "workflow_execution_blocks" USING btree ("started_at");--> statement-breakpoint
CREATE INDEX "workflow_execution_logs_workflow_id_idx" ON "workflow_execution_logs" USING btree ("workflow_id");--> statement-breakpoint
CREATE INDEX "workflow_execution_logs_execution_id_idx" ON "workflow_execution_logs" USING btree ("execution_id");--> statement-breakpoint
CREATE INDEX "workflow_execution_logs_trigger_idx" ON "workflow_execution_logs" USING btree ("trigger");--> statement-breakpoint
CREATE INDEX "workflow_execution_logs_level_idx" ON "workflow_execution_logs" USING btree ("level");--> statement-breakpoint
CREATE INDEX "workflow_execution_logs_started_at_idx" ON "workflow_execution_logs" USING btree ("started_at");--> statement-breakpoint
CREATE INDEX "workflow_execution_logs_cost_idx" ON "workflow_execution_logs" USING btree ("total_cost");--> statement-breakpoint
CREATE INDEX "workflow_execution_logs_duration_idx" ON "workflow_execution_logs" USING btree ("total_duration_ms");--> statement-breakpoint
CREATE UNIQUE INDEX "workflow_execution_logs_execution_id_unique" ON "workflow_execution_logs" USING btree ("execution_id");--> statement-breakpoint
CREATE INDEX "workflow_snapshots_workflow_id_idx" ON "workflow_execution_snapshots" USING btree ("workflow_id");--> statement-breakpoint
CREATE INDEX "workflow_snapshots_hash_idx" ON "workflow_execution_snapshots" USING btree ("state_hash");--> statement-breakpoint
CREATE UNIQUE INDEX "workflow_snapshots_workflow_hash_idx" ON "workflow_execution_snapshots" USING btree ("workflow_id","state_hash");--> statement-breakpoint
CREATE INDEX "workflow_snapshots_created_at_idx" ON "workflow_execution_snapshots" USING btree ("created_at");

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -330,6 +330,20 @@
"when": 1750794256278,
"tag": "0047_new_triathlon",
"breakpoints": true
},
{
"idx": 48,
"version": "7",
"when": 1751422991828,
"tag": "0048_flawless_ultron",
"breakpoints": true
},
{
"idx": 49,
"version": "7",
"when": 1751430703326,
"tag": "0049_fancy_cardiac",
"breakpoints": true
}
]
}

View File

@@ -133,58 +133,43 @@ export const workflow = pgTable('workflow', {
marketplaceData: json('marketplace_data'),
})
// New normalized workflow tables
export const workflowBlocks = pgTable(
'workflow_blocks',
{
// Primary identification
id: text('id').primaryKey(), // Block UUID from the current JSON structure
id: text('id').primaryKey(),
workflowId: text('workflow_id')
.notNull()
.references(() => workflow.id, { onDelete: 'cascade' }), // Link to parent workflow
.references(() => workflow.id, { onDelete: 'cascade' }),
// Block properties (from current BlockState interface)
type: text('type').notNull(), // e.g., 'starter', 'agent', 'api', 'function'
name: text('name').notNull(), // Display name of the block
type: text('type').notNull(), // 'starter', 'agent', 'api', 'function'
name: text('name').notNull(),
// Position coordinates (from position.x, position.y)
positionX: decimal('position_x').notNull(), // X coordinate on canvas
positionY: decimal('position_y').notNull(), // Y coordinate on canvas
positionX: decimal('position_x').notNull(),
positionY: decimal('position_y').notNull(),
// Block behavior flags (from current BlockState)
enabled: boolean('enabled').notNull().default(true), // Whether block is active
horizontalHandles: boolean('horizontal_handles').notNull().default(true), // UI layout preference
isWide: boolean('is_wide').notNull().default(false), // Whether block uses wide layout
advancedMode: boolean('advanced_mode').notNull().default(false), // Whether block is in advanced mode
height: decimal('height').notNull().default('0'), // Custom height override
enabled: boolean('enabled').notNull().default(true),
horizontalHandles: boolean('horizontal_handles').notNull().default(true),
isWide: boolean('is_wide').notNull().default(false),
advancedMode: boolean('advanced_mode').notNull().default(false),
height: decimal('height').notNull().default('0'),
// Block data (keeping JSON for flexibility as current system does)
subBlocks: jsonb('sub_blocks').notNull().default('{}'), // All subblock configurations
outputs: jsonb('outputs').notNull().default('{}'), // Output type definitions
data: jsonb('data').default('{}'), // Additional block-specific data
subBlocks: jsonb('sub_blocks').notNull().default('{}'),
outputs: jsonb('outputs').notNull().default('{}'),
data: jsonb('data').default('{}'),
// Hierarchy support (for loop/parallel child blocks)
parentId: text('parent_id'), // Self-reference handled by foreign key constraint in migration
extent: text('extent'), // 'parent' or null - for ReactFlow parent constraint
parentId: text('parent_id'),
extent: text('extent'), // 'parent' or null
// Timestamps
createdAt: timestamp('created_at').notNull().defaultNow(),
updatedAt: timestamp('updated_at').notNull().defaultNow(),
},
(table) => ({
// Primary access pattern: get all blocks for a workflow
workflowIdIdx: index('workflow_blocks_workflow_id_idx').on(table.workflowId),
// For finding child blocks of a parent (loop/parallel containers)
parentIdIdx: index('workflow_blocks_parent_id_idx').on(table.parentId),
// Composite index for efficient parent-child queries
workflowParentIdx: index('workflow_blocks_workflow_parent_idx').on(
table.workflowId,
table.parentId
),
// For block type filtering/analytics
workflowTypeIdx: index('workflow_blocks_workflow_type_idx').on(table.workflowId, table.type),
})
)
@@ -192,36 +177,26 @@ export const workflowBlocks = pgTable(
export const workflowEdges = pgTable(
'workflow_edges',
{
// Primary identification
id: text('id').primaryKey(), // Edge UUID from ReactFlow
id: text('id').primaryKey(),
workflowId: text('workflow_id')
.notNull()
.references(() => workflow.id, { onDelete: 'cascade' }), // Link to parent workflow
.references(() => workflow.id, { onDelete: 'cascade' }),
// Connection definition (from ReactFlow Edge interface)
sourceBlockId: text('source_block_id')
.notNull()
.references(() => workflowBlocks.id, { onDelete: 'cascade' }), // Source block ID
.references(() => workflowBlocks.id, { onDelete: 'cascade' }),
targetBlockId: text('target_block_id')
.notNull()
.references(() => workflowBlocks.id, { onDelete: 'cascade' }), // Target block ID
sourceHandle: text('source_handle'), // Specific output handle (optional)
targetHandle: text('target_handle'), // Specific input handle (optional)
.references(() => workflowBlocks.id, { onDelete: 'cascade' }),
sourceHandle: text('source_handle'),
targetHandle: text('target_handle'),
// Timestamps
createdAt: timestamp('created_at').notNull().defaultNow(),
},
(table) => ({
// Primary access pattern: get all edges for a workflow
workflowIdIdx: index('workflow_edges_workflow_id_idx').on(table.workflowId),
// For finding outgoing connections from a block
sourceBlockIdx: index('workflow_edges_source_block_idx').on(table.sourceBlockId),
// For finding incoming connections to a block
targetBlockIdx: index('workflow_edges_target_block_idx').on(table.targetBlockId),
// For comprehensive workflow topology queries
workflowSourceIdx: index('workflow_edges_workflow_source_idx').on(
table.workflowId,
table.sourceBlockId
@@ -236,25 +211,19 @@ export const workflowEdges = pgTable(
export const workflowSubflows = pgTable(
'workflow_subflows',
{
// Primary identification
id: text('id').primaryKey(), // Subflow UUID (currently loop/parallel ID)
id: text('id').primaryKey(),
workflowId: text('workflow_id')
.notNull()
.references(() => workflow.id, { onDelete: 'cascade' }), // Link to parent workflow
.references(() => workflow.id, { onDelete: 'cascade' }),
// Subflow type and configuration
type: text('type').notNull(), // 'loop' or 'parallel' (extensible for future types)
config: jsonb('config').notNull().default('{}'), // Type-specific configuration
type: text('type').notNull(), // 'loop' or 'parallel'
config: jsonb('config').notNull().default('{}'),
// Timestamps
createdAt: timestamp('created_at').notNull().defaultNow(),
updatedAt: timestamp('updated_at').notNull().defaultNow(),
},
(table) => ({
// Primary access pattern: get all subflows for a workflow
workflowIdIdx: index('workflow_subflows_workflow_id_idx').on(table.workflowId),
// For filtering by subflow type
workflowTypeIdx: index('workflow_subflows_workflow_type_idx').on(table.workflowId, table.type),
})
)
@@ -273,14 +242,136 @@ export const workflowLogs = pgTable('workflow_logs', {
.notNull()
.references(() => workflow.id, { onDelete: 'cascade' }),
executionId: text('execution_id'),
level: text('level').notNull(), // e.g. "info", "error", etc.
level: text('level').notNull(), // "info", "error", etc.
message: text('message').notNull(),
duration: text('duration'), // Store as text to allow 'NA' for errors
trigger: text('trigger'), // e.g. "api", "schedule", "manual"
trigger: text('trigger'), // "api", "schedule", "manual"
createdAt: timestamp('created_at').notNull().defaultNow(),
metadata: json('metadata'), // Optional JSON field for storing additional context like tool calls
metadata: json('metadata'),
})
export const workflowExecutionSnapshots = pgTable(
'workflow_execution_snapshots',
{
id: text('id').primaryKey(),
workflowId: text('workflow_id')
.notNull()
.references(() => workflow.id, { onDelete: 'cascade' }),
stateHash: text('state_hash').notNull(),
stateData: jsonb('state_data').notNull(),
createdAt: timestamp('created_at').notNull().defaultNow(),
},
(table) => ({
workflowIdIdx: index('workflow_snapshots_workflow_id_idx').on(table.workflowId),
stateHashIdx: index('workflow_snapshots_hash_idx').on(table.stateHash),
workflowHashUnique: uniqueIndex('workflow_snapshots_workflow_hash_idx').on(
table.workflowId,
table.stateHash
),
createdAtIdx: index('workflow_snapshots_created_at_idx').on(table.createdAt),
})
)
export const workflowExecutionLogs = pgTable(
'workflow_execution_logs',
{
id: text('id').primaryKey(),
workflowId: text('workflow_id')
.notNull()
.references(() => workflow.id, { onDelete: 'cascade' }),
executionId: text('execution_id').notNull(),
stateSnapshotId: text('state_snapshot_id')
.notNull()
.references(() => workflowExecutionSnapshots.id),
level: text('level').notNull(), // 'info', 'error'
message: text('message').notNull(),
trigger: text('trigger').notNull(), // 'api', 'webhook', 'schedule', 'manual', 'chat'
startedAt: timestamp('started_at').notNull(),
endedAt: timestamp('ended_at'),
totalDurationMs: integer('total_duration_ms'),
blockCount: integer('block_count').notNull().default(0),
successCount: integer('success_count').notNull().default(0),
errorCount: integer('error_count').notNull().default(0),
skippedCount: integer('skipped_count').notNull().default(0),
totalCost: decimal('total_cost', { precision: 10, scale: 6 }),
totalInputCost: decimal('total_input_cost', { precision: 10, scale: 6 }),
totalOutputCost: decimal('total_output_cost', { precision: 10, scale: 6 }),
totalTokens: integer('total_tokens'),
metadata: jsonb('metadata').notNull().default('{}'),
createdAt: timestamp('created_at').notNull().defaultNow(),
},
(table) => ({
workflowIdIdx: index('workflow_execution_logs_workflow_id_idx').on(table.workflowId),
executionIdIdx: index('workflow_execution_logs_execution_id_idx').on(table.executionId),
triggerIdx: index('workflow_execution_logs_trigger_idx').on(table.trigger),
levelIdx: index('workflow_execution_logs_level_idx').on(table.level),
startedAtIdx: index('workflow_execution_logs_started_at_idx').on(table.startedAt),
costIdx: index('workflow_execution_logs_cost_idx').on(table.totalCost),
durationIdx: index('workflow_execution_logs_duration_idx').on(table.totalDurationMs),
executionIdUnique: uniqueIndex('workflow_execution_logs_execution_id_unique').on(
table.executionId
),
})
)
export const workflowExecutionBlocks = pgTable(
'workflow_execution_blocks',
{
id: text('id').primaryKey(),
executionId: text('execution_id').notNull(),
workflowId: text('workflow_id')
.notNull()
.references(() => workflow.id, { onDelete: 'cascade' }),
blockId: text('block_id').notNull(),
blockName: text('block_name'),
blockType: text('block_type').notNull(),
startedAt: timestamp('started_at').notNull(),
endedAt: timestamp('ended_at'),
durationMs: integer('duration_ms'),
status: text('status').notNull(), // 'success', 'error', 'skipped'
errorMessage: text('error_message'),
errorStackTrace: text('error_stack_trace'),
inputData: jsonb('input_data'),
outputData: jsonb('output_data'),
costInput: decimal('cost_input', { precision: 10, scale: 6 }),
costOutput: decimal('cost_output', { precision: 10, scale: 6 }),
costTotal: decimal('cost_total', { precision: 10, scale: 6 }),
tokensPrompt: integer('tokens_prompt'),
tokensCompletion: integer('tokens_completion'),
tokensTotal: integer('tokens_total'),
modelUsed: text('model_used'),
metadata: jsonb('metadata'),
createdAt: timestamp('created_at').notNull().defaultNow(),
},
(table) => ({
executionIdIdx: index('execution_blocks_execution_id_idx').on(table.executionId),
workflowIdIdx: index('execution_blocks_workflow_id_idx').on(table.workflowId),
blockIdIdx: index('execution_blocks_block_id_idx').on(table.blockId),
statusIdx: index('execution_blocks_status_idx').on(table.status),
durationIdx: index('execution_blocks_duration_idx').on(table.durationMs),
costIdx: index('execution_blocks_cost_idx').on(table.costTotal),
workflowExecutionIdx: index('execution_blocks_workflow_execution_idx').on(
table.workflowId,
table.executionId
),
executionStatusIdx: index('execution_blocks_execution_status_idx').on(
table.executionId,
table.status
),
startedAtIdx: index('execution_blocks_started_at_idx').on(table.startedAt),
})
)
export const environment = pgTable('environment', {
id: text('id').primaryKey(), // Use the user id as the key
userId: text('user_id')
@@ -401,6 +492,14 @@ export const userStats = pgTable('user_stats', {
totalChatExecutions: integer('total_chat_executions').notNull().default(0),
totalTokensUsed: integer('total_tokens_used').notNull().default(0),
totalCost: decimal('total_cost').notNull().default('0'),
currentUsageLimit: decimal('current_usage_limit').notNull().default('5'), // Default $5 for free plan
usageLimitSetBy: text('usage_limit_set_by'), // User ID who set the limit (for team admin tracking)
usageLimitUpdatedAt: timestamp('usage_limit_updated_at').defaultNow(),
// Billing period tracking
currentPeriodCost: decimal('current_period_cost').notNull().default('0'), // Usage in current billing period
billingPeriodStart: timestamp('billing_period_start').defaultNow(), // When current billing period started
billingPeriodEnd: timestamp('billing_period_end'), // When current billing period ends
lastPeriodCost: decimal('last_period_cost').default('0'), // Usage from previous billing period
lastActive: timestamp('last_active').notNull().defaultNow(),
})
@@ -416,21 +515,34 @@ export const customTools = pgTable('custom_tools', {
updatedAt: timestamp('updated_at').notNull().defaultNow(),
})
export const subscription = pgTable('subscription', {
id: text('id').primaryKey(),
plan: text('plan').notNull(),
referenceId: text('reference_id').notNull(),
stripeCustomerId: text('stripe_customer_id'),
stripeSubscriptionId: text('stripe_subscription_id'),
status: text('status'),
periodStart: timestamp('period_start'),
periodEnd: timestamp('period_end'),
cancelAtPeriodEnd: boolean('cancel_at_period_end'),
seats: integer('seats'),
trialStart: timestamp('trial_start'),
trialEnd: timestamp('trial_end'),
metadata: json('metadata'),
})
export const subscription = pgTable(
'subscription',
{
id: text('id').primaryKey(),
plan: text('plan').notNull(),
referenceId: text('reference_id').notNull(),
stripeCustomerId: text('stripe_customer_id'),
stripeSubscriptionId: text('stripe_subscription_id'),
status: text('status'),
periodStart: timestamp('period_start'),
periodEnd: timestamp('period_end'),
cancelAtPeriodEnd: boolean('cancel_at_period_end'),
seats: integer('seats'),
trialStart: timestamp('trial_start'),
trialEnd: timestamp('trial_end'),
metadata: json('metadata'),
},
(table) => ({
referenceStatusIdx: index('subscription_reference_status_idx').on(
table.referenceId,
table.status
),
enterpriseMetadataCheck: check(
'check_enterprise_metadata',
sql`plan != 'enterprise' OR (metadata IS NOT NULL AND (metadata->>'perSeatAllowance' IS NOT NULL OR metadata->>'totalAllowance' IS NOT NULL))`
),
})
)
export const chat = pgTable(
'chat',
@@ -485,7 +597,7 @@ export const member = pgTable('member', {
organizationId: text('organization_id')
.notNull()
.references(() => organization.id, { onDelete: 'cascade' }),
role: text('role').notNull(),
role: text('role').notNull(), // 'admin' or 'member' - team-level permissions only
createdAt: timestamp('created_at').defaultNow().notNull(),
})

View File

@@ -668,4 +668,238 @@ describe('Executor', () => {
expect(createContextSpy).toHaveBeenCalled()
})
})
/**
* Dependency checking logic tests
*/
describe('dependency checking', () => {
test('should handle multi-input blocks with inactive sources correctly', () => {
// Create workflow with router -> multiple APIs -> single agent
const routerWorkflow = {
blocks: [
{
id: 'start',
metadata: { id: 'starter', name: 'Start' },
config: { params: {} },
enabled: true,
},
{
id: 'router',
metadata: { id: 'router', name: 'Router' },
config: { params: { prompt: 'test', model: 'gpt-4' } },
enabled: true,
},
{
id: 'api1',
metadata: { id: 'api', name: 'API 1' },
config: { params: { url: 'http://api1.com', method: 'GET' } },
enabled: true,
},
{
id: 'api2',
metadata: { id: 'api', name: 'API 2' },
config: { params: { url: 'http://api2.com', method: 'GET' } },
enabled: true,
},
{
id: 'agent',
metadata: { id: 'agent', name: 'Agent' },
config: { params: { model: 'gpt-4', userPrompt: 'test' } },
enabled: true,
},
],
connections: [
{ source: 'start', target: 'router' },
{ source: 'router', target: 'api1' },
{ source: 'router', target: 'api2' },
{ source: 'api1', target: 'agent' },
{ source: 'api2', target: 'agent' },
],
loops: {},
parallels: {},
}
const executor = new Executor(routerWorkflow)
const checkDependencies = (executor as any).checkDependencies.bind(executor)
// Mock context simulating: router selected api1, api1 executed, api2 not in active path
const mockContext = {
blockStates: new Map(),
decisions: {
router: new Map([['router', 'api1']]),
condition: new Map(),
},
activeExecutionPath: new Set(['start', 'router', 'api1', 'agent']),
workflow: routerWorkflow,
} as any
const executedBlocks = new Set(['start', 'router', 'api1'])
// Test agent's dependencies
const agentConnections = [
{ source: 'api1', target: 'agent', sourceHandle: 'source' },
{ source: 'api2', target: 'agent', sourceHandle: 'source' },
]
const dependenciesMet = checkDependencies(agentConnections, executedBlocks, mockContext)
// Both dependencies should be met:
// - api1: in active path AND executed = met
// - api2: NOT in active path = automatically met
expect(dependenciesMet).toBe(true)
})
test('should prioritize special connection types over active path check', () => {
const workflow = createMinimalWorkflow()
const executor = new Executor(workflow)
const checkDependencies = (executor as any).checkDependencies.bind(executor)
const mockContext = {
blockStates: new Map(),
decisions: { router: new Map(), condition: new Map() },
activeExecutionPath: new Set(['block1']), // block2 not in active path
completedLoops: new Set(),
workflow: workflow,
} as any
const executedBlocks = new Set(['block1'])
// Test error connection (should be handled before active path check)
const errorConnections = [{ source: 'block2', target: 'block3', sourceHandle: 'error' }]
// Mock block2 with error state
mockContext.blockStates.set('block2', {
output: { error: 'test error' },
})
// Even though block2 is not in active path, error connection should be handled specially
const errorDepsResult = checkDependencies(errorConnections, new Set(['block2']), mockContext)
expect(errorDepsResult).toBe(true) // source executed + has error = dependency met
// Test loop connection
const loopConnections = [
{ source: 'block2', target: 'block3', sourceHandle: 'loop-end-source' },
]
mockContext.completedLoops.add('block2')
const loopDepsResult = checkDependencies(loopConnections, new Set(['block2']), mockContext)
expect(loopDepsResult).toBe(true) // loop completed = dependency met
})
test('should handle router decisions correctly in dependency checking', () => {
const workflow = createMinimalWorkflow()
const executor = new Executor(workflow)
const checkDependencies = (executor as any).checkDependencies.bind(executor)
// Add router block to workflow
workflow.blocks.push({
id: 'router1',
metadata: { id: 'router', name: 'Router' },
config: { params: {} },
enabled: true,
})
const mockContext = {
blockStates: new Map(),
decisions: {
router: new Map([['router1', 'target1']]), // router selected target1
condition: new Map(),
},
activeExecutionPath: new Set(['router1', 'target1', 'target2']),
workflow: workflow,
} as any
const executedBlocks = new Set(['router1'])
// Test selected target
const selectedConnections = [{ source: 'router1', target: 'target1', sourceHandle: 'source' }]
const selectedResult = checkDependencies(selectedConnections, executedBlocks, mockContext)
expect(selectedResult).toBe(true) // router executed + target selected = dependency met
// Test non-selected target
const nonSelectedConnections = [
{ source: 'router1', target: 'target2', sourceHandle: 'source' },
]
const nonSelectedResult = checkDependencies(
nonSelectedConnections,
executedBlocks,
mockContext
)
expect(nonSelectedResult).toBe(true) // router executed + target NOT selected = dependency auto-met
})
test('should handle condition decisions correctly in dependency checking', () => {
const conditionWorkflow = createWorkflowWithCondition()
const executor = new Executor(conditionWorkflow)
const checkDependencies = (executor as any).checkDependencies.bind(executor)
const mockContext = {
blockStates: new Map(),
decisions: {
router: new Map(),
condition: new Map([['condition1', 'true']]), // condition selected true path
},
activeExecutionPath: new Set(['condition1', 'trueTarget']),
workflow: conditionWorkflow,
} as any
const executedBlocks = new Set(['condition1'])
// Test selected condition path
const trueConnections = [
{ source: 'condition1', target: 'trueTarget', sourceHandle: 'condition-true' },
]
const trueResult = checkDependencies(trueConnections, executedBlocks, mockContext)
expect(trueResult).toBe(true)
// Test non-selected condition path
const falseConnections = [
{ source: 'condition1', target: 'falseTarget', sourceHandle: 'condition-false' },
]
const falseResult = checkDependencies(falseConnections, executedBlocks, mockContext)
expect(falseResult).toBe(true) // condition executed + path NOT selected = dependency auto-met
})
test('should handle regular sequential dependencies correctly', () => {
const workflow = createMinimalWorkflow()
const executor = new Executor(workflow)
const checkDependencies = (executor as any).checkDependencies.bind(executor)
const mockContext = {
blockStates: new Map(),
decisions: { router: new Map(), condition: new Map() },
activeExecutionPath: new Set(['block1', 'block2']),
workflow: workflow,
} as any
const executedBlocks = new Set(['block1'])
// Test normal sequential dependency
const normalConnections = [{ source: 'block1', target: 'block2', sourceHandle: 'source' }]
// Without error
const normalResult = checkDependencies(normalConnections, executedBlocks, mockContext)
expect(normalResult).toBe(true) // source executed + no error = dependency met
// With error should fail regular connection
mockContext.blockStates.set('block1', {
output: { error: 'test error' },
})
const errorResult = checkDependencies(normalConnections, executedBlocks, mockContext)
expect(errorResult).toBe(false) // source executed + has error = regular dependency not met
})
test('should handle empty dependency list', () => {
const workflow = createMinimalWorkflow()
const executor = new Executor(workflow)
const checkDependencies = (executor as any).checkDependencies.bind(executor)
const mockContext = createMockContext()
const executedBlocks = new Set<string>()
// Empty connections should return true
const result = checkDependencies([], executedBlocks, mockContext)
expect(result).toBe(true)
})
})
})

View File

@@ -877,6 +877,9 @@ export class Executor {
insideParallel?: string,
iterationIndex?: number
): boolean {
if (incomingConnections.length === 0) {
return true
}
// Check if this is a loop block
const isLoopBlock = incomingConnections.some((conn) => {
const sourceBlock = this.actualWorkflow.blocks.find((b) => b.id === conn.source)
@@ -994,6 +997,12 @@ export class Executor {
return sourceExecuted && conn.target === selectedTarget
}
// If source is not in active path, consider this dependency met
// This allows blocks with multiple inputs to execute even if some inputs are from inactive paths
if (!context.activeExecutionPath.has(conn.source)) {
return true
}
// For error connections, check if the source had an error
if (conn.sourceHandle === 'error') {
return sourceExecuted && hasSourceError
@@ -1004,12 +1013,6 @@ export class Executor {
return sourceExecuted && !hasSourceError
}
// If source is not in active path, consider this dependency met
// This allows blocks with multiple inputs to execute even if some inputs are from inactive paths
if (!context.activeExecutionPath.has(conn.source)) {
return true
}
// For regular blocks, dependency is met if source is executed
return sourceExecuted
})

View File

@@ -408,4 +408,206 @@ describe('PathTracker', () => {
}).not.toThrow()
})
})
describe('Router downstream path activation', () => {
beforeEach(() => {
// Create router workflow with downstream connections
mockWorkflow = {
version: '1.0',
blocks: [
{
id: 'router1',
metadata: { id: 'router', name: 'Router' },
position: { x: 0, y: 0 },
config: { tool: 'router', params: {} },
inputs: {},
outputs: {},
enabled: true,
},
{
id: 'api1',
metadata: { id: 'api', name: 'API 1' },
position: { x: 0, y: 0 },
config: { tool: 'api', params: {} },
inputs: {},
outputs: {},
enabled: true,
},
{
id: 'api2',
metadata: { id: 'api', name: 'API 2' },
position: { x: 0, y: 0 },
config: { tool: 'api', params: {} },
inputs: {},
outputs: {},
enabled: true,
},
{
id: 'agent1',
metadata: { id: 'agent', name: 'Agent' },
position: { x: 0, y: 0 },
config: { tool: 'agent', params: {} },
inputs: {},
outputs: {},
enabled: true,
},
],
connections: [
{ source: 'router1', target: 'api1' },
{ source: 'router1', target: 'api2' },
{ source: 'api1', target: 'agent1' },
{ source: 'api2', target: 'agent1' },
],
loops: {},
parallels: {},
}
pathTracker = new PathTracker(mockWorkflow)
mockContext = {
workflowId: 'test-router-workflow',
blockStates: new Map(),
blockLogs: [],
metadata: { duration: 0 },
environmentVariables: {},
decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(),
loopItems: new Map(),
completedLoops: new Set(),
executedBlocks: new Set(),
activeExecutionPath: new Set(),
workflow: mockWorkflow,
}
})
it('should activate downstream paths when router selects a target', () => {
// Mock router output selecting api1
mockContext.blockStates.set('router1', {
output: {
response: {
selectedPath: {
blockId: 'api1',
blockType: 'api',
blockTitle: 'API 1',
},
},
},
executed: true,
executionTime: 100,
})
// Update paths for router
pathTracker.updateExecutionPaths(['router1'], mockContext)
// Both api1 and agent1 should be activated (downstream from api1)
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
expect(mockContext.activeExecutionPath.has('agent1')).toBe(true)
// api2 should NOT be activated (not selected by router)
expect(mockContext.activeExecutionPath.has('api2')).toBe(false)
})
it('should handle multiple levels of downstream connections', () => {
// Add another level to test deep activation
mockWorkflow.blocks.push({
id: 'finalStep',
metadata: { id: 'api', name: 'Final Step' },
position: { x: 0, y: 0 },
config: { tool: 'api', params: {} },
inputs: {},
outputs: {},
enabled: true,
})
mockWorkflow.connections.push({ source: 'agent1', target: 'finalStep' })
pathTracker = new PathTracker(mockWorkflow)
// Mock router output selecting api1
mockContext.blockStates.set('router1', {
output: {
response: {
selectedPath: {
blockId: 'api1',
blockType: 'api',
blockTitle: 'API 1',
},
},
},
executed: true,
executionTime: 100,
})
pathTracker.updateExecutionPaths(['router1'], mockContext)
// All downstream blocks should be activated
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
expect(mockContext.activeExecutionPath.has('agent1')).toBe(true)
expect(mockContext.activeExecutionPath.has('finalStep')).toBe(true)
// Non-selected path should not be activated
expect(mockContext.activeExecutionPath.has('api2')).toBe(false)
})
it('should not create infinite loops in cyclic workflows', () => {
// Add a cycle to test loop prevention
mockWorkflow.connections.push({ source: 'agent1', target: 'api1' })
pathTracker = new PathTracker(mockWorkflow)
mockContext.blockStates.set('router1', {
output: {
response: {
selectedPath: {
blockId: 'api1',
blockType: 'api',
blockTitle: 'API 1',
},
},
},
executed: true,
executionTime: 100,
})
// This should not throw or cause infinite recursion
expect(() => {
pathTracker.updateExecutionPaths(['router1'], mockContext)
}).not.toThrow()
// Both api1 and agent1 should still be activated
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
expect(mockContext.activeExecutionPath.has('agent1')).toBe(true)
})
it('should handle router with no downstream connections', () => {
// Create isolated router
const isolatedWorkflow = {
...mockWorkflow,
connections: [
{ source: 'router1', target: 'api1' },
{ source: 'router1', target: 'api2' },
// Remove downstream connections from api1/api2
],
}
pathTracker = new PathTracker(isolatedWorkflow)
mockContext.blockStates.set('router1', {
output: {
response: {
selectedPath: {
blockId: 'api1',
blockType: 'api',
blockTitle: 'API 1',
},
},
},
executed: true,
executionTime: 100,
})
pathTracker.updateExecutionPaths(['router1'], mockContext)
// Only the selected target should be activated
expect(mockContext.activeExecutionPath.has('api1')).toBe(true)
expect(mockContext.activeExecutionPath.has('api2')).toBe(false)
expect(mockContext.activeExecutionPath.has('agent1')).toBe(false)
})
})
})

View File

@@ -165,10 +165,28 @@ export class PathTracker {
if (selectedPath) {
context.decisions.router.set(block.id, selectedPath)
context.activeExecutionPath.add(selectedPath)
this.activateDownstreamPaths(selectedPath, context)
logger.info(`Router ${block.id} selected path: ${selectedPath}`)
}
}
/**
* Recursively activate downstream paths from a block
*/
private activateDownstreamPaths(blockId: string, context: ExecutionContext): void {
const outgoingConnections = this.getOutgoingConnections(blockId)
for (const conn of outgoingConnections) {
if (!context.activeExecutionPath.has(conn.target)) {
context.activeExecutionPath.add(conn.target)
this.activateDownstreamPaths(conn.target, context)
}
}
}
/**
* Update paths for condition blocks
*/
@@ -219,9 +237,7 @@ export class PathTracker {
const isPartOfLoop = blockLoops.length > 0
for (const conn of outgoingConnections) {
if (
this.shouldActivateConnection(conn, block.id, hasError, isPartOfLoop, blockLoops, context)
) {
if (this.shouldActivateConnection(conn, hasError, isPartOfLoop, blockLoops, context)) {
context.activeExecutionPath.add(conn.target)
}
}
@@ -253,7 +269,6 @@ export class PathTracker {
*/
private shouldActivateConnection(
conn: SerializedConnection,
sourceBlockId: string,
hasError: boolean,
isPartOfLoop: boolean,
blockLoops: Array<{ id: string; loop: any }>,

380
apps/sim/lib/logs/types.ts Normal file
View File

@@ -0,0 +1,380 @@
import type { Edge } from 'reactflow'
import type { BlockLog, NormalizedBlockOutput } from '@/executor/types'
import type { DeploymentStatus } from '@/stores/workflows/registry/types'
import type { Loop, Parallel, WorkflowState } from '@/stores/workflows/workflow/types'
export type { WorkflowState, Loop, Parallel, DeploymentStatus }
export type WorkflowEdge = Edge
export type { NormalizedBlockOutput, BlockLog }
export interface PricingInfo {
input: number
output: number
cachedInput?: number
updatedAt: string
}
export interface TokenUsage {
prompt: number
completion: number
total: number
}
export interface CostBreakdown {
input: number
output: number
total: number
tokens: TokenUsage
model: string
pricing: PricingInfo
}
export interface ToolCall {
name: string
duration: number
startTime: string
endTime: string
status: 'success' | 'error'
input: Record<string, unknown>
output: Record<string, unknown>
error?: string
}
export type BlockInputData = Record<string, any>
export type BlockOutputData = NormalizedBlockOutput | null
export interface ExecutionEnvironment {
variables: Record<string, string>
workflowId: string
executionId: string
userId: string
workspaceId: string
}
export interface ExecutionTrigger {
type: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
source: string
data?: Record<string, unknown>
timestamp: string
}
export interface ExecutionStatus {
status: 'running' | 'completed' | 'failed' | 'cancelled'
startedAt: string
endedAt?: string
durationMs?: number
}
export interface WorkflowExecutionSnapshot {
id: string
workflowId: string
stateHash: string
stateData: WorkflowState
createdAt: string
}
export type WorkflowExecutionSnapshotInsert = Omit<WorkflowExecutionSnapshot, 'createdAt'>
export type WorkflowExecutionSnapshotSelect = WorkflowExecutionSnapshot
export interface WorkflowExecutionLog {
id: string
workflowId: string
executionId: string
stateSnapshotId: string
level: 'info' | 'error'
message: string
trigger: ExecutionTrigger['type']
startedAt: string
endedAt: string
totalDurationMs: number
blockCount: number
successCount: number
errorCount: number
skippedCount: number
totalCost: number
totalInputCost: number
totalOutputCost: number
totalTokens: number
primaryModel: string
metadata: {
environment: ExecutionEnvironment
trigger: ExecutionTrigger
traceSpans?: TraceSpan[]
errorDetails?: {
blockId: string
blockName: string
error: string
stackTrace?: string
}
}
duration?: string
createdAt: string
}
export type WorkflowExecutionLogInsert = Omit<WorkflowExecutionLog, 'id' | 'createdAt'>
export type WorkflowExecutionLogSelect = WorkflowExecutionLog
export interface BlockExecutionLog {
id: string
executionId: string
workflowId: string
blockId: string
blockName: string
blockType: string
startedAt: string
endedAt: string
durationMs: number
status: 'success' | 'error' | 'skipped'
errorMessage?: string
errorStackTrace?: string
inputData: BlockInputData
outputData: BlockOutputData
cost: CostBreakdown | null
metadata: {
toolCalls?: ToolCall[]
iterationIndex?: number
virtualBlockId?: string
parentBlockId?: string
environmentSnapshot?: Record<string, string>
}
createdAt: string
}
export type BlockExecutionLogInsert = Omit<BlockExecutionLog, 'id' | 'createdAt'>
export type BlockExecutionLogSelect = BlockExecutionLog
export interface TraceSpan {
id: string
name: string
type: string
duration: number
startTime: string
endTime: string
children?: TraceSpan[]
toolCalls?: ToolCall[]
status?: 'success' | 'error'
tokens?: number
relativeStartMs?: number
blockId?: string
input?: Record<string, unknown>
}
export interface WorkflowExecutionSummary {
id: string
workflowId: string
workflowName: string
executionId: string
trigger: ExecutionTrigger['type']
status: ExecutionStatus['status']
startedAt: string
endedAt: string
durationMs: number
blockStats: {
total: number
success: number
error: number
skipped: number
}
costSummary: {
total: number
inputCost: number
outputCost: number
tokens: number
primaryModel: string
}
stateSnapshotId: string
errorSummary?: {
blockId: string
blockName: string
message: string
}
}
export interface WorkflowExecutionDetail extends WorkflowExecutionSummary {
environment: ExecutionEnvironment
triggerData: ExecutionTrigger
blockExecutions: BlockExecutionSummary[]
traceSpans: TraceSpan[]
workflowState: WorkflowState
}
export interface BlockExecutionSummary {
id: string
blockId: string
blockName: string
blockType: string
startedAt: string
endedAt: string
durationMs: number
status: BlockExecutionLog['status']
errorMessage?: string
cost?: CostBreakdown
inputSummary: {
parameterCount: number
hasComplexData: boolean
}
outputSummary: {
hasOutput: boolean
outputType: string
hasError: boolean
}
}
export interface BlockExecutionDetail extends BlockExecutionSummary {
inputData: BlockInputData
outputData: BlockOutputData
metadata: BlockExecutionLog['metadata']
toolCalls?: ToolCall[]
}
export interface PaginatedResponse<T> {
data: T[]
pagination: {
page: number
pageSize: number
total: number
totalPages: number
hasNext: boolean
hasPrevious: boolean
}
}
export type WorkflowExecutionsResponse = PaginatedResponse<WorkflowExecutionSummary>
export type BlockExecutionsResponse = PaginatedResponse<BlockExecutionSummary>
export interface WorkflowExecutionFilters {
workflowIds?: string[]
folderIds?: string[]
triggers?: ExecutionTrigger['type'][]
status?: ExecutionStatus['status'][]
startDate?: string
endDate?: string
search?: string
minDuration?: number
maxDuration?: number
minCost?: number
maxCost?: number
hasErrors?: boolean
}
export interface PaginationParams {
page: number
pageSize: number
sortBy?: 'startedAt' | 'durationMs' | 'totalCost' | 'blockCount'
sortOrder?: 'asc' | 'desc'
}
export interface LogsQueryParams extends WorkflowExecutionFilters, PaginationParams {
includeBlockSummary?: boolean
includeWorkflowState?: boolean
}
export interface LogsError {
code: 'EXECUTION_NOT_FOUND' | 'SNAPSHOT_NOT_FOUND' | 'INVALID_WORKFLOW_STATE' | 'STORAGE_ERROR'
message: string
details?: Record<string, unknown>
}
export interface ValidationError {
field: string
message: string
value: unknown
}
export class LogsServiceError extends Error {
public code: LogsError['code']
public details?: Record<string, unknown>
constructor(message: string, code: LogsError['code'], details?: Record<string, unknown>) {
super(message)
this.name = 'LogsServiceError'
this.code = code
this.details = details
}
}
export interface DatabaseOperationResult<T> {
success: boolean
data?: T
error?: LogsServiceError
}
export interface BatchInsertResult<T> {
inserted: T[]
failed: Array<{
item: T
error: string
}>
totalAttempted: number
totalSucceeded: number
totalFailed: number
}
export interface SnapshotService {
createSnapshot(workflowId: string, state: WorkflowState): Promise<WorkflowExecutionSnapshot>
getSnapshot(id: string): Promise<WorkflowExecutionSnapshot | null>
getSnapshotByHash(workflowId: string, hash: string): Promise<WorkflowExecutionSnapshot | null>
computeStateHash(state: WorkflowState): string
cleanupOrphanedSnapshots(olderThanDays: number): Promise<number>
}
export interface SnapshotCreationResult {
snapshot: WorkflowExecutionSnapshot
isNew: boolean
}
export interface ExecutionLoggerService {
startWorkflowExecution(params: {
workflowId: string
executionId: string
trigger: ExecutionTrigger
environment: ExecutionEnvironment
workflowState: WorkflowState
}): Promise<{
workflowLog: WorkflowExecutionLog
snapshot: WorkflowExecutionSnapshot
}>
logBlockExecution(params: {
executionId: string
workflowId: string
blockId: string
blockName: string
blockType: string
input: BlockInputData
output: BlockOutputData
timing: {
startedAt: string
endedAt: string
durationMs: number
}
status: BlockExecutionLog['status']
error?: {
message: string
stackTrace?: string
}
cost?: CostBreakdown
metadata?: BlockExecutionLog['metadata']
}): Promise<BlockExecutionLog>
completeWorkflowExecution(params: {
executionId: string
endedAt: string
totalDurationMs: number
blockStats: {
total: number
success: number
error: number
skipped: number
}
costSummary: {
totalCost: number
totalInputCost: number
totalOutputCost: number
totalTokens: number
primaryModel: string
}
finalOutput: BlockOutputData
traceSpans?: TraceSpan[]
}): Promise<WorkflowExecutionLog>
}

View File

@@ -43,9 +43,6 @@ async function initializeApplication(): Promise<void> {
// Mark data as initialized only after sync managers have loaded data from DB
dataInitialized = true
// Register cleanup
window.addEventListener('beforeunload', handleBeforeUnload)
// Log initialization timing information
const initDuration = Date.now() - initStartTime
logger.info(`Application initialization completed in ${initDuration}ms`)