mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-07 22:24:06 -05:00
feat(tests): added testing package, overhauled tests (#2586)
* feat(tests): added testing package, overhauled tests * fix build
This commit is contained in:
47
packages/testing/package.json
Normal file
47
packages/testing/package.json
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"name": "@sim/testing",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"bun": ">=1.2.13",
|
||||
"node": ">=20.0.0"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./src/index.ts",
|
||||
"default": "./src/index.ts"
|
||||
},
|
||||
"./factories": {
|
||||
"types": "./src/factories/index.ts",
|
||||
"default": "./src/factories/index.ts"
|
||||
},
|
||||
"./builders": {
|
||||
"types": "./src/builders/index.ts",
|
||||
"default": "./src/builders/index.ts"
|
||||
},
|
||||
"./mocks": {
|
||||
"types": "./src/mocks/index.ts",
|
||||
"default": "./src/mocks/index.ts"
|
||||
},
|
||||
"./assertions": {
|
||||
"types": "./src/assertions/index.ts",
|
||||
"default": "./src/assertions/index.ts"
|
||||
},
|
||||
"./setup": {
|
||||
"types": "./src/setup/vitest.setup.ts",
|
||||
"default": "./src/setup/vitest.setup.ts"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"type-check": "tsc --noEmit"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"vitest": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5.7.3",
|
||||
"vitest": "^3.0.8"
|
||||
}
|
||||
}
|
||||
159
packages/testing/src/assertions/execution.assertions.ts
Normal file
159
packages/testing/src/assertions/execution.assertions.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import { expect } from 'vitest'
|
||||
import type { ExecutionContext } from '../types'
|
||||
|
||||
/**
|
||||
* Asserts that a block was executed.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectBlockExecuted(ctx, 'block-1')
|
||||
* ```
|
||||
*/
|
||||
export function expectBlockExecuted(ctx: ExecutionContext, blockId: string): void {
|
||||
expect(ctx.executedBlocks.has(blockId), `Block "${blockId}" should have been executed`).toBe(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a block was NOT executed.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectBlockNotExecuted(ctx, 'skipped-block')
|
||||
* ```
|
||||
*/
|
||||
export function expectBlockNotExecuted(ctx: ExecutionContext, blockId: string): void {
|
||||
expect(ctx.executedBlocks.has(blockId), `Block "${blockId}" should not have been executed`).toBe(
|
||||
false
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that blocks were executed in a specific order.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectExecutionOrder(executionLog, ['start', 'step1', 'step2', 'end'])
|
||||
* ```
|
||||
*/
|
||||
export function expectExecutionOrder(executedBlocks: string[], expectedOrder: string[]): void {
|
||||
const actualOrder = executedBlocks.filter((id) => expectedOrder.includes(id))
|
||||
expect(actualOrder, 'Blocks should be executed in expected order').toEqual(expectedOrder)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a block has a specific output state.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectBlockOutput(ctx, 'agent-1', { response: 'Hello' })
|
||||
* ```
|
||||
*/
|
||||
export function expectBlockOutput(
|
||||
ctx: ExecutionContext,
|
||||
blockId: string,
|
||||
expectedOutput: Record<string, any>
|
||||
): void {
|
||||
const state = ctx.blockStates.get(blockId)
|
||||
expect(state, `Block "${blockId}" should have state`).toBeDefined()
|
||||
expect(state).toMatchObject(expectedOutput)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that execution has a specific number of logs.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectLogCount(ctx, 5)
|
||||
* ```
|
||||
*/
|
||||
export function expectLogCount(ctx: ExecutionContext, expectedCount: number): void {
|
||||
expect(ctx.blockLogs.length, `Should have ${expectedCount} logs`).toBe(expectedCount)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a condition decision was made.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectConditionDecision(ctx, 'condition-1', true)
|
||||
* ```
|
||||
*/
|
||||
export function expectConditionDecision(
|
||||
ctx: ExecutionContext,
|
||||
blockId: string,
|
||||
expectedResult: boolean
|
||||
): void {
|
||||
const decision = ctx.decisions.condition.get(blockId)
|
||||
expect(decision, `Condition "${blockId}" should have a decision`).toBeDefined()
|
||||
expect(decision).toBe(expectedResult)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a loop was completed.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectLoopCompleted(ctx, 'loop-1')
|
||||
* ```
|
||||
*/
|
||||
export function expectLoopCompleted(ctx: ExecutionContext, loopId: string): void {
|
||||
expect(ctx.completedLoops.has(loopId), `Loop "${loopId}" should be completed`).toBe(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a block is in the active execution path.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectInActivePath(ctx, 'current-block')
|
||||
* ```
|
||||
*/
|
||||
export function expectInActivePath(ctx: ExecutionContext, blockId: string): void {
|
||||
expect(ctx.activeExecutionPath.has(blockId), `Block "${blockId}" should be in active path`).toBe(
|
||||
true
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that execution was cancelled.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectExecutionCancelled(ctx)
|
||||
* ```
|
||||
*/
|
||||
export function expectExecutionCancelled(ctx: ExecutionContext): void {
|
||||
expect(ctx.abortSignal?.aborted, 'Execution should be cancelled').toBe(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that execution was NOT cancelled.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectExecutionNotCancelled(ctx)
|
||||
* ```
|
||||
*/
|
||||
export function expectExecutionNotCancelled(ctx: ExecutionContext): void {
|
||||
expect(ctx.abortSignal?.aborted ?? false, 'Execution should not be cancelled').toBe(false)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that execution has specific environment variables.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectEnvironmentVariables(ctx, { API_KEY: 'test', MODE: 'production' })
|
||||
* ```
|
||||
*/
|
||||
export function expectEnvironmentVariables(
|
||||
ctx: ExecutionContext,
|
||||
expectedVars: Record<string, string>
|
||||
): void {
|
||||
Object.entries(expectedVars).forEach(([key, value]) => {
|
||||
expect(
|
||||
ctx.environmentVariables[key],
|
||||
`Environment variable "${key}" should be "${value}"`
|
||||
).toBe(value)
|
||||
})
|
||||
}
|
||||
69
packages/testing/src/assertions/index.ts
Normal file
69
packages/testing/src/assertions/index.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
/**
|
||||
* Custom assertions for testing workflows and execution.
|
||||
*
|
||||
* These provide semantic, readable assertions for common test scenarios.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import {
|
||||
* expectBlockExists,
|
||||
* expectEdgeConnects,
|
||||
* expectExecutionOrder,
|
||||
* } from '@sim/testing/assertions'
|
||||
*
|
||||
* // Workflow assertions
|
||||
* expectBlockExists(workflow.blocks, 'agent-1', 'agent')
|
||||
* expectEdgeConnects(workflow.edges, 'start', 'agent-1')
|
||||
*
|
||||
* // Execution assertions
|
||||
* expectBlockExecuted(ctx, 'agent-1')
|
||||
* expectExecutionOrder(log, ['start', 'agent-1', 'end'])
|
||||
* ```
|
||||
*/
|
||||
|
||||
// Execution assertions
|
||||
export {
|
||||
expectBlockExecuted,
|
||||
expectBlockNotExecuted,
|
||||
expectBlockOutput,
|
||||
expectConditionDecision,
|
||||
expectEnvironmentVariables,
|
||||
expectExecutionCancelled,
|
||||
expectExecutionNotCancelled,
|
||||
expectExecutionOrder,
|
||||
expectInActivePath,
|
||||
expectLogCount,
|
||||
expectLoopCompleted,
|
||||
} from './execution.assertions'
|
||||
// Permission assertions
|
||||
export {
|
||||
expectApiKeyInvalid,
|
||||
expectApiKeyValid,
|
||||
expectPermissionAllowed,
|
||||
expectPermissionDenied,
|
||||
expectRoleCannotPerform,
|
||||
expectRoleCanPerform,
|
||||
expectSocketAccessDenied,
|
||||
expectSocketAccessGranted,
|
||||
expectUserHasNoPermission,
|
||||
expectUserHasPermission,
|
||||
expectWorkflowAccessDenied,
|
||||
expectWorkflowAccessGranted,
|
||||
} from './permission.assertions'
|
||||
// Workflow assertions
|
||||
export {
|
||||
expectBlockCount,
|
||||
expectBlockDisabled,
|
||||
expectBlockEnabled,
|
||||
expectBlockExists,
|
||||
expectBlockHasParent,
|
||||
expectBlockNotExists,
|
||||
expectBlockPosition,
|
||||
expectEdgeConnects,
|
||||
expectEdgeCount,
|
||||
expectEmptyWorkflow,
|
||||
expectLinearChain,
|
||||
expectLoopExists,
|
||||
expectNoEdgeBetween,
|
||||
expectParallelExists,
|
||||
} from './workflow.assertions'
|
||||
144
packages/testing/src/assertions/permission.assertions.ts
Normal file
144
packages/testing/src/assertions/permission.assertions.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
import { expect } from 'vitest'
|
||||
import type { PermissionType } from '../factories/permission.factory'
|
||||
|
||||
/**
|
||||
* Asserts that a permission check result is allowed.
|
||||
*/
|
||||
export function expectPermissionAllowed(result: { allowed: boolean; reason?: string }): void {
|
||||
expect(result.allowed).toBe(true)
|
||||
expect(result.reason).toBeUndefined()
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a permission check result is denied with a specific reason pattern.
|
||||
*/
|
||||
export function expectPermissionDenied(
|
||||
result: { allowed: boolean; reason?: string },
|
||||
reasonPattern?: string | RegExp
|
||||
): void {
|
||||
expect(result.allowed).toBe(false)
|
||||
expect(result.reason).toBeDefined()
|
||||
if (reasonPattern) {
|
||||
if (typeof reasonPattern === 'string') {
|
||||
expect(result.reason).toContain(reasonPattern)
|
||||
} else {
|
||||
expect(result.reason).toMatch(reasonPattern)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a workflow validation result indicates success.
|
||||
*/
|
||||
export function expectWorkflowAccessGranted(result: {
|
||||
error: { message: string; status: number } | null
|
||||
session: unknown
|
||||
workflow: unknown
|
||||
}): void {
|
||||
expect(result.error).toBeNull()
|
||||
expect(result.session).not.toBeNull()
|
||||
expect(result.workflow).not.toBeNull()
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a workflow validation result indicates access denied.
|
||||
*/
|
||||
export function expectWorkflowAccessDenied(
|
||||
result: {
|
||||
error: { message: string; status: number } | null
|
||||
session: unknown
|
||||
workflow: unknown
|
||||
},
|
||||
expectedStatus: 401 | 403 | 404 = 403
|
||||
): void {
|
||||
expect(result.error).not.toBeNull()
|
||||
expect(result.error?.status).toBe(expectedStatus)
|
||||
expect(result.session).toBeNull()
|
||||
expect(result.workflow).toBeNull()
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a user has a specific permission level.
|
||||
*/
|
||||
export function expectUserHasPermission(
|
||||
permissions: Array<{ userId: string; permissionType: PermissionType }>,
|
||||
userId: string,
|
||||
expectedPermission: PermissionType
|
||||
): void {
|
||||
const userPermission = permissions.find((p) => p.userId === userId)
|
||||
expect(userPermission).toBeDefined()
|
||||
expect(userPermission?.permissionType).toBe(expectedPermission)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a user has no permission.
|
||||
*/
|
||||
export function expectUserHasNoPermission(
|
||||
permissions: Array<{ userId: string; permissionType: PermissionType }>,
|
||||
userId: string
|
||||
): void {
|
||||
const userPermission = permissions.find((p) => p.userId === userId)
|
||||
expect(userPermission).toBeUndefined()
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a role can perform an operation.
|
||||
*/
|
||||
export function expectRoleCanPerform(
|
||||
checkFn: (role: string, operation: string) => { allowed: boolean },
|
||||
role: string,
|
||||
operation: string
|
||||
): void {
|
||||
const result = checkFn(role, operation)
|
||||
expect(result.allowed).toBe(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a role cannot perform an operation.
|
||||
*/
|
||||
export function expectRoleCannotPerform(
|
||||
checkFn: (role: string, operation: string) => { allowed: boolean },
|
||||
role: string,
|
||||
operation: string
|
||||
): void {
|
||||
const result = checkFn(role, operation)
|
||||
expect(result.allowed).toBe(false)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts socket workflow access is granted.
|
||||
*/
|
||||
export function expectSocketAccessGranted(result: {
|
||||
hasAccess: boolean
|
||||
role?: string
|
||||
workspaceId?: string
|
||||
}): void {
|
||||
expect(result.hasAccess).toBe(true)
|
||||
expect(result.role).toBeDefined()
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts socket workflow access is denied.
|
||||
*/
|
||||
export function expectSocketAccessDenied(result: {
|
||||
hasAccess: boolean
|
||||
role?: string
|
||||
workspaceId?: string
|
||||
}): void {
|
||||
expect(result.hasAccess).toBe(false)
|
||||
expect(result.role).toBeUndefined()
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts API key authentication succeeded.
|
||||
*/
|
||||
export function expectApiKeyValid(result: boolean): void {
|
||||
expect(result).toBe(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts API key authentication failed.
|
||||
*/
|
||||
export function expectApiKeyInvalid(result: boolean): void {
|
||||
expect(result).toBe(false)
|
||||
}
|
||||
244
packages/testing/src/assertions/workflow.assertions.ts
Normal file
244
packages/testing/src/assertions/workflow.assertions.ts
Normal file
@@ -0,0 +1,244 @@
|
||||
import { expect } from 'vitest'
|
||||
import type { BlockState, Edge, WorkflowState } from '../types'
|
||||
|
||||
/**
|
||||
* Asserts that a block exists in the workflow.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const workflow = createLinearWorkflow(3)
|
||||
* expectBlockExists(workflow.blocks, 'block-0')
|
||||
* expectBlockExists(workflow.blocks, 'block-0', 'starter')
|
||||
* ```
|
||||
*/
|
||||
export function expectBlockExists(
|
||||
blocks: Record<string, BlockState>,
|
||||
blockId: string,
|
||||
expectedType?: string
|
||||
): void {
|
||||
expect(blocks[blockId], `Block "${blockId}" should exist`).toBeDefined()
|
||||
expect(blocks[blockId].id).toBe(blockId)
|
||||
if (expectedType) {
|
||||
expect(blocks[blockId].type, `Block "${blockId}" should be type "${expectedType}"`).toBe(
|
||||
expectedType
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a block does NOT exist in the workflow.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectBlockNotExists(workflow.blocks, 'deleted-block')
|
||||
* ```
|
||||
*/
|
||||
export function expectBlockNotExists(blocks: Record<string, BlockState>, blockId: string): void {
|
||||
expect(blocks[blockId], `Block "${blockId}" should not exist`).toBeUndefined()
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that an edge connects two blocks.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectEdgeConnects(workflow.edges, 'block-0', 'block-1')
|
||||
* ```
|
||||
*/
|
||||
export function expectEdgeConnects(edges: Edge[], sourceId: string, targetId: string): void {
|
||||
const edge = edges.find((e) => e.source === sourceId && e.target === targetId)
|
||||
expect(edge, `Edge from "${sourceId}" to "${targetId}" should exist`).toBeDefined()
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that no edge connects two blocks.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectNoEdgeBetween(workflow.edges, 'block-1', 'block-0') // No reverse edge
|
||||
* ```
|
||||
*/
|
||||
export function expectNoEdgeBetween(edges: Edge[], sourceId: string, targetId: string): void {
|
||||
const edge = edges.find((e) => e.source === sourceId && e.target === targetId)
|
||||
expect(edge, `Edge from "${sourceId}" to "${targetId}" should not exist`).toBeUndefined()
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a block has a specific parent.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectBlockHasParent(workflow.blocks, 'child-block', 'loop-1')
|
||||
* ```
|
||||
*/
|
||||
export function expectBlockHasParent(
|
||||
blocks: Record<string, BlockState>,
|
||||
childId: string,
|
||||
expectedParentId: string
|
||||
): void {
|
||||
const block = blocks[childId]
|
||||
expect(block, `Child block "${childId}" should exist`).toBeDefined()
|
||||
expect(block.data?.parentId, `Block "${childId}" should have parent "${expectedParentId}"`).toBe(
|
||||
expectedParentId
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a workflow has a specific number of blocks.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectBlockCount(workflow, 5)
|
||||
* ```
|
||||
*/
|
||||
export function expectBlockCount(workflow: WorkflowState, expectedCount: number): void {
|
||||
const actualCount = Object.keys(workflow.blocks).length
|
||||
expect(actualCount, `Workflow should have ${expectedCount} blocks`).toBe(expectedCount)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a workflow has a specific number of edges.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectEdgeCount(workflow, 4)
|
||||
* ```
|
||||
*/
|
||||
export function expectEdgeCount(workflow: WorkflowState, expectedCount: number): void {
|
||||
expect(workflow.edges.length, `Workflow should have ${expectedCount} edges`).toBe(expectedCount)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a block is at a specific position.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectBlockPosition(workflow.blocks, 'block-1', { x: 200, y: 0 })
|
||||
* ```
|
||||
*/
|
||||
export function expectBlockPosition(
|
||||
blocks: Record<string, BlockState>,
|
||||
blockId: string,
|
||||
expectedPosition: { x: number; y: number }
|
||||
): void {
|
||||
const block = blocks[blockId]
|
||||
expect(block, `Block "${blockId}" should exist`).toBeDefined()
|
||||
expect(block.position.x, `Block "${blockId}" x position`).toBeCloseTo(expectedPosition.x, 0)
|
||||
expect(block.position.y, `Block "${blockId}" y position`).toBeCloseTo(expectedPosition.y, 0)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a block is enabled.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectBlockEnabled(workflow.blocks, 'block-1')
|
||||
* ```
|
||||
*/
|
||||
export function expectBlockEnabled(blocks: Record<string, BlockState>, blockId: string): void {
|
||||
const block = blocks[blockId]
|
||||
expect(block, `Block "${blockId}" should exist`).toBeDefined()
|
||||
expect(block.enabled, `Block "${blockId}" should be enabled`).toBe(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a block is disabled.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectBlockDisabled(workflow.blocks, 'disabled-block')
|
||||
* ```
|
||||
*/
|
||||
export function expectBlockDisabled(blocks: Record<string, BlockState>, blockId: string): void {
|
||||
const block = blocks[blockId]
|
||||
expect(block, `Block "${blockId}" should exist`).toBeDefined()
|
||||
expect(block.enabled, `Block "${blockId}" should be disabled`).toBe(false)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a workflow has a loop with specific configuration.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectLoopExists(workflow, 'loop-1', { iterations: 5, loopType: 'for' })
|
||||
* ```
|
||||
*/
|
||||
export function expectLoopExists(
|
||||
workflow: WorkflowState,
|
||||
loopId: string,
|
||||
expectedConfig?: { iterations?: number; loopType?: string; nodes?: string[] }
|
||||
): void {
|
||||
const loop = workflow.loops[loopId]
|
||||
expect(loop, `Loop "${loopId}" should exist`).toBeDefined()
|
||||
|
||||
if (expectedConfig) {
|
||||
if (expectedConfig.iterations !== undefined) {
|
||||
expect(loop.iterations).toBe(expectedConfig.iterations)
|
||||
}
|
||||
if (expectedConfig.loopType !== undefined) {
|
||||
expect(loop.loopType).toBe(expectedConfig.loopType)
|
||||
}
|
||||
if (expectedConfig.nodes !== undefined) {
|
||||
expect(loop.nodes).toEqual(expectedConfig.nodes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that a workflow has a parallel block with specific configuration.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectParallelExists(workflow, 'parallel-1', { count: 3 })
|
||||
* ```
|
||||
*/
|
||||
export function expectParallelExists(
|
||||
workflow: WorkflowState,
|
||||
parallelId: string,
|
||||
expectedConfig?: { count?: number; parallelType?: string; nodes?: string[] }
|
||||
): void {
|
||||
const parallel = workflow.parallels[parallelId]
|
||||
expect(parallel, `Parallel "${parallelId}" should exist`).toBeDefined()
|
||||
|
||||
if (expectedConfig) {
|
||||
if (expectedConfig.count !== undefined) {
|
||||
expect(parallel.count).toBe(expectedConfig.count)
|
||||
}
|
||||
if (expectedConfig.parallelType !== undefined) {
|
||||
expect(parallel.parallelType).toBe(expectedConfig.parallelType)
|
||||
}
|
||||
if (expectedConfig.nodes !== undefined) {
|
||||
expect(parallel.nodes).toEqual(expectedConfig.nodes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the workflow state is empty.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const workflow = createWorkflowState()
|
||||
* expectEmptyWorkflow(workflow)
|
||||
* ```
|
||||
*/
|
||||
export function expectEmptyWorkflow(workflow: WorkflowState): void {
|
||||
expect(Object.keys(workflow.blocks).length, 'Workflow should have no blocks').toBe(0)
|
||||
expect(workflow.edges.length, 'Workflow should have no edges').toBe(0)
|
||||
expect(Object.keys(workflow.loops).length, 'Workflow should have no loops').toBe(0)
|
||||
expect(Object.keys(workflow.parallels).length, 'Workflow should have no parallels').toBe(0)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that blocks are connected in a linear chain.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* expectLinearChain(workflow.edges, ['start', 'step1', 'step2', 'end'])
|
||||
* ```
|
||||
*/
|
||||
export function expectLinearChain(edges: Edge[], blockIds: string[]): void {
|
||||
for (let i = 0; i < blockIds.length - 1; i++) {
|
||||
expectEdgeConnects(edges, blockIds[i], blockIds[i + 1])
|
||||
}
|
||||
}
|
||||
223
packages/testing/src/builders/execution.builder.ts
Normal file
223
packages/testing/src/builders/execution.builder.ts
Normal file
@@ -0,0 +1,223 @@
|
||||
import type { ExecutionContext } from '../types'
|
||||
|
||||
/**
|
||||
* Fluent builder for creating execution contexts.
|
||||
*
|
||||
* Use this for complex execution scenarios where you need
|
||||
* fine-grained control over the context state.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const ctx = new ExecutionContextBuilder()
|
||||
* .forWorkflow('my-workflow')
|
||||
* .withBlockState('block-1', { output: 'hello' })
|
||||
* .markExecuted('block-1')
|
||||
* .withEnvironment({ API_KEY: 'test' })
|
||||
* .build()
|
||||
* ```
|
||||
*/
|
||||
export class ExecutionContextBuilder {
|
||||
private workflowId = 'test-workflow'
|
||||
private executionId = `exec-${Math.random().toString(36).substring(2, 10)}`
|
||||
private blockStates = new Map<string, any>()
|
||||
private executedBlocks = new Set<string>()
|
||||
private blockLogs: any[] = []
|
||||
private metadata: { duration: number; startTime?: string; endTime?: string } = { duration: 0 }
|
||||
private environmentVariables: Record<string, string> = {}
|
||||
private workflowVariables: Record<string, any> = {}
|
||||
private routerDecisions = new Map<string, any>()
|
||||
private conditionDecisions = new Map<string, any>()
|
||||
private loopExecutions = new Map<string, any>()
|
||||
private completedLoops = new Set<string>()
|
||||
private activeExecutionPath = new Set<string>()
|
||||
private abortSignal?: AbortSignal
|
||||
|
||||
/**
|
||||
* Sets the workflow ID.
|
||||
*/
|
||||
forWorkflow(workflowId: string): this {
|
||||
this.workflowId = workflowId
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a custom execution ID.
|
||||
*/
|
||||
withExecutionId(executionId: string): this {
|
||||
this.executionId = executionId
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a block state.
|
||||
*/
|
||||
withBlockState(blockId: string, state: any): this {
|
||||
this.blockStates.set(blockId, state)
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds multiple block states at once.
|
||||
*/
|
||||
withBlockStates(states: Record<string, any>): this {
|
||||
Object.entries(states).forEach(([id, state]) => {
|
||||
this.blockStates.set(id, state)
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks a block as executed.
|
||||
*/
|
||||
markExecuted(blockId: string): this {
|
||||
this.executedBlocks.add(blockId)
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks multiple blocks as executed.
|
||||
*/
|
||||
markAllExecuted(...blockIds: string[]): this {
|
||||
blockIds.forEach((id) => this.executedBlocks.add(id))
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a log entry.
|
||||
*/
|
||||
addLog(log: any): this {
|
||||
this.blockLogs.push(log)
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets execution metadata.
|
||||
*/
|
||||
withMetadata(metadata: { duration?: number; startTime?: string; endTime?: string }): this {
|
||||
if (metadata.duration !== undefined) this.metadata.duration = metadata.duration
|
||||
if (metadata.startTime) this.metadata.startTime = metadata.startTime
|
||||
if (metadata.endTime) this.metadata.endTime = metadata.endTime
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds environment variables.
|
||||
*/
|
||||
withEnvironment(vars: Record<string, string>): this {
|
||||
this.environmentVariables = { ...this.environmentVariables, ...vars }
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds workflow variables.
|
||||
*/
|
||||
withVariables(vars: Record<string, any>): this {
|
||||
this.workflowVariables = { ...this.workflowVariables, ...vars }
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a router decision.
|
||||
*/
|
||||
withRouterDecision(blockId: string, decision: any): this {
|
||||
this.routerDecisions.set(blockId, decision)
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a condition decision.
|
||||
*/
|
||||
withConditionDecision(blockId: string, decision: boolean): this {
|
||||
this.conditionDecisions.set(blockId, decision)
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks a loop as completed.
|
||||
*/
|
||||
completeLoop(loopId: string): this {
|
||||
this.completedLoops.add(loopId)
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a block to the active execution path.
|
||||
*/
|
||||
activatePath(blockId: string): this {
|
||||
this.activeExecutionPath.add(blockId)
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets an abort signal (for cancellation testing).
|
||||
*/
|
||||
withAbortSignal(signal: AbortSignal): this {
|
||||
this.abortSignal = signal
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a context that is already cancelled.
|
||||
*/
|
||||
cancelled(): this {
|
||||
this.abortSignal = AbortSignal.abort()
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a context with a timeout.
|
||||
*/
|
||||
withTimeout(ms: number): this {
|
||||
this.abortSignal = AbortSignal.timeout(ms)
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds and returns the execution context.
|
||||
*/
|
||||
build(): ExecutionContext {
|
||||
return {
|
||||
workflowId: this.workflowId,
|
||||
executionId: this.executionId,
|
||||
blockStates: this.blockStates,
|
||||
executedBlocks: this.executedBlocks,
|
||||
blockLogs: this.blockLogs,
|
||||
metadata: this.metadata,
|
||||
environmentVariables: this.environmentVariables,
|
||||
workflowVariables: this.workflowVariables,
|
||||
decisions: {
|
||||
router: this.routerDecisions,
|
||||
condition: this.conditionDecisions,
|
||||
},
|
||||
loopExecutions: this.loopExecutions,
|
||||
completedLoops: this.completedLoops,
|
||||
activeExecutionPath: this.activeExecutionPath,
|
||||
abortSignal: this.abortSignal,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a fresh context builder for a workflow.
|
||||
*/
|
||||
static createForWorkflow(workflowId: string): ExecutionContextBuilder {
|
||||
return new ExecutionContextBuilder().forWorkflow(workflowId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a cancelled context.
|
||||
*/
|
||||
static createCancelled(workflowId?: string): ExecutionContext {
|
||||
const builder = new ExecutionContextBuilder()
|
||||
if (workflowId) builder.forWorkflow(workflowId)
|
||||
return builder.cancelled().build()
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a context with a timeout.
|
||||
*/
|
||||
static createWithTimeout(ms: number, workflowId?: string): ExecutionContext {
|
||||
const builder = new ExecutionContextBuilder()
|
||||
if (workflowId) builder.forWorkflow(workflowId)
|
||||
return builder.withTimeout(ms).build()
|
||||
}
|
||||
}
|
||||
21
packages/testing/src/builders/index.ts
Normal file
21
packages/testing/src/builders/index.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* Builder classes for fluent test data construction.
|
||||
*
|
||||
* Use builders when you need fine-grained control over complex objects.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { WorkflowBuilder, ExecutionContextBuilder } from '@sim/testing/builders'
|
||||
*
|
||||
* // Build a workflow
|
||||
* const workflow = WorkflowBuilder.linear(3).build()
|
||||
*
|
||||
* // Build an execution context
|
||||
* const ctx = ExecutionContextBuilder.forWorkflow('my-wf')
|
||||
* .withBlockState('block-1', { output: 'hello' })
|
||||
* .build()
|
||||
* ```
|
||||
*/
|
||||
|
||||
export { ExecutionContextBuilder } from './execution.builder'
|
||||
export { WorkflowBuilder } from './workflow.builder'
|
||||
356
packages/testing/src/builders/workflow.builder.ts
Normal file
356
packages/testing/src/builders/workflow.builder.ts
Normal file
@@ -0,0 +1,356 @@
|
||||
import {
|
||||
createAgentBlock,
|
||||
createBlock,
|
||||
createFunctionBlock,
|
||||
createStarterBlock,
|
||||
} from '../factories/block.factory'
|
||||
import type { BlockState, Edge, Loop, Parallel, Position, WorkflowState } from '../types'
|
||||
|
||||
/**
|
||||
* Fluent builder for creating complex workflow states.
|
||||
*
|
||||
* Use this when you need fine-grained control over workflow construction,
|
||||
* especially for testing edge cases or complex scenarios.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Simple linear workflow
|
||||
* const workflow = new WorkflowBuilder()
|
||||
* .addStarter('start')
|
||||
* .addAgent('agent', { x: 200, y: 0 })
|
||||
* .addFunction('end', { x: 400, y: 0 })
|
||||
* .connect('start', 'agent')
|
||||
* .connect('agent', 'end')
|
||||
* .build()
|
||||
*
|
||||
* // Using static presets
|
||||
* const workflow = WorkflowBuilder.linear(5).build()
|
||||
* const workflow = WorkflowBuilder.branching().build()
|
||||
* ```
|
||||
*/
|
||||
export class WorkflowBuilder {
|
||||
private blocks: Record<string, BlockState> = {}
|
||||
private edges: Edge[] = []
|
||||
private loops: Record<string, Loop> = {}
|
||||
private parallels: Record<string, Parallel> = {}
|
||||
private variables: WorkflowState['variables'] = []
|
||||
private isDeployed = false
|
||||
|
||||
/**
|
||||
* Adds a generic block to the workflow.
|
||||
*/
|
||||
addBlock(id: string, type: string, position?: Position, name?: string): this {
|
||||
this.blocks[id] = createBlock({
|
||||
id,
|
||||
type,
|
||||
name: name ?? id,
|
||||
position: position ?? { x: 0, y: 0 },
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a starter block (workflow entry point).
|
||||
*/
|
||||
addStarter(id = 'start', position?: Position): this {
|
||||
this.blocks[id] = createStarterBlock({
|
||||
id,
|
||||
position: position ?? { x: 0, y: 0 },
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a function block.
|
||||
*/
|
||||
addFunction(id: string, position?: Position, name?: string): this {
|
||||
this.blocks[id] = createFunctionBlock({
|
||||
id,
|
||||
name: name ?? id,
|
||||
position: position ?? { x: 0, y: 0 },
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an agent block.
|
||||
*/
|
||||
addAgent(id: string, position?: Position, name?: string): this {
|
||||
this.blocks[id] = createAgentBlock({
|
||||
id,
|
||||
name: name ?? id,
|
||||
position: position ?? { x: 0, y: 0 },
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a condition block.
|
||||
*/
|
||||
addCondition(id: string, position?: Position, name?: string): this {
|
||||
this.blocks[id] = createBlock({
|
||||
id,
|
||||
type: 'condition',
|
||||
name: name ?? id,
|
||||
position: position ?? { x: 0, y: 0 },
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a loop container block.
|
||||
*/
|
||||
addLoop(
|
||||
id: string,
|
||||
position?: Position,
|
||||
config?: {
|
||||
iterations?: number
|
||||
loopType?: 'for' | 'forEach' | 'while' | 'doWhile'
|
||||
}
|
||||
): this {
|
||||
this.blocks[id] = createBlock({
|
||||
id,
|
||||
type: 'loop',
|
||||
name: 'Loop',
|
||||
position: position ?? { x: 0, y: 0 },
|
||||
data: {
|
||||
loopType: config?.loopType ?? 'for',
|
||||
count: config?.iterations ?? 3,
|
||||
type: 'loop',
|
||||
},
|
||||
})
|
||||
this.loops[id] = {
|
||||
id,
|
||||
nodes: [],
|
||||
iterations: config?.iterations ?? 3,
|
||||
loopType: config?.loopType ?? 'for',
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a block as a child of a loop container.
|
||||
*/
|
||||
addLoopChild(loopId: string, childId: string, type = 'function', position?: Position): this {
|
||||
if (!this.loops[loopId]) {
|
||||
throw new Error(`Loop ${loopId} does not exist. Call addLoop first.`)
|
||||
}
|
||||
|
||||
this.blocks[childId] = createBlock({
|
||||
id: childId,
|
||||
type,
|
||||
name: childId,
|
||||
position: position ?? { x: 50, y: 50 },
|
||||
parentId: loopId,
|
||||
})
|
||||
|
||||
this.loops[loopId].nodes.push(childId)
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a parallel container block.
|
||||
*/
|
||||
addParallel(
|
||||
id: string,
|
||||
position?: Position,
|
||||
config?: {
|
||||
count?: number
|
||||
parallelType?: 'count' | 'collection'
|
||||
}
|
||||
): this {
|
||||
this.blocks[id] = createBlock({
|
||||
id,
|
||||
type: 'parallel',
|
||||
name: 'Parallel',
|
||||
position: position ?? { x: 0, y: 0 },
|
||||
data: {
|
||||
parallelType: config?.parallelType ?? 'count',
|
||||
count: config?.count ?? 2,
|
||||
type: 'parallel',
|
||||
},
|
||||
})
|
||||
this.parallels[id] = {
|
||||
id,
|
||||
nodes: [],
|
||||
count: config?.count ?? 2,
|
||||
parallelType: config?.parallelType ?? 'count',
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a block as a child of a parallel container.
|
||||
*/
|
||||
addParallelChild(
|
||||
parallelId: string,
|
||||
childId: string,
|
||||
type = 'function',
|
||||
position?: Position
|
||||
): this {
|
||||
if (!this.parallels[parallelId]) {
|
||||
throw new Error(`Parallel ${parallelId} does not exist. Call addParallel first.`)
|
||||
}
|
||||
|
||||
this.blocks[childId] = createBlock({
|
||||
id: childId,
|
||||
type,
|
||||
name: childId,
|
||||
position: position ?? { x: 50, y: 50 },
|
||||
parentId: parallelId,
|
||||
})
|
||||
|
||||
this.parallels[parallelId].nodes.push(childId)
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an edge connecting two blocks.
|
||||
*/
|
||||
connect(sourceId: string, targetId: string, sourceHandle?: string, targetHandle?: string): this {
|
||||
this.edges.push({
|
||||
id: `${sourceId}-${targetId}`,
|
||||
source: sourceId,
|
||||
target: targetId,
|
||||
sourceHandle,
|
||||
targetHandle,
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a workflow variable.
|
||||
*/
|
||||
addVariable(
|
||||
name: string,
|
||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain',
|
||||
value: any
|
||||
): this {
|
||||
this.variables?.push({
|
||||
id: `var-${Math.random().toString(36).substring(2, 8)}`,
|
||||
name,
|
||||
type,
|
||||
value,
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the workflow as deployed.
|
||||
*/
|
||||
setDeployed(deployed = true): this {
|
||||
this.isDeployed = deployed
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds and returns the workflow state.
|
||||
*/
|
||||
build(): WorkflowState {
|
||||
return {
|
||||
blocks: this.blocks,
|
||||
edges: this.edges,
|
||||
loops: this.loops,
|
||||
parallels: this.parallels,
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: this.isDeployed,
|
||||
variables: this.variables?.length ? this.variables : undefined,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a workflow with the specified blocks and connects them linearly.
|
||||
*/
|
||||
static chain(...blockConfigs: Array<{ id: string; type: string }>): WorkflowBuilder {
|
||||
const builder = new WorkflowBuilder()
|
||||
let x = 0
|
||||
const spacing = 200
|
||||
|
||||
blockConfigs.forEach((config, index) => {
|
||||
builder.addBlock(config.id, config.type, { x, y: 0 })
|
||||
x += spacing
|
||||
|
||||
if (index > 0) {
|
||||
builder.connect(blockConfigs[index - 1].id, config.id)
|
||||
}
|
||||
})
|
||||
|
||||
return builder
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a linear workflow with N blocks.
|
||||
* First block is a starter, rest are function blocks.
|
||||
*/
|
||||
static linear(blockCount: number): WorkflowBuilder {
|
||||
const builder = new WorkflowBuilder()
|
||||
const spacing = 200
|
||||
|
||||
for (let i = 0; i < blockCount; i++) {
|
||||
const id = `block-${i}`
|
||||
const position = { x: i * spacing, y: 0 }
|
||||
|
||||
if (i === 0) {
|
||||
builder.addStarter(id, position)
|
||||
} else {
|
||||
builder.addFunction(id, position, `Step ${i}`)
|
||||
}
|
||||
|
||||
if (i > 0) {
|
||||
builder.connect(`block-${i - 1}`, id)
|
||||
}
|
||||
}
|
||||
|
||||
return builder
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a branching workflow with a condition.
|
||||
*
|
||||
* Structure:
|
||||
* ```
|
||||
* ┌─→ true ─┐
|
||||
* start ─→ cond ├─→ end
|
||||
* └─→ false ┘
|
||||
* ```
|
||||
*/
|
||||
static branching(): WorkflowBuilder {
|
||||
return new WorkflowBuilder()
|
||||
.addStarter('start', { x: 0, y: 0 })
|
||||
.addCondition('condition', { x: 200, y: 0 })
|
||||
.addFunction('true-branch', { x: 400, y: -100 }, 'If True')
|
||||
.addFunction('false-branch', { x: 400, y: 100 }, 'If False')
|
||||
.addFunction('end', { x: 600, y: 0 }, 'End')
|
||||
.connect('start', 'condition')
|
||||
.connect('condition', 'true-branch', 'condition-if')
|
||||
.connect('condition', 'false-branch', 'condition-else')
|
||||
.connect('true-branch', 'end')
|
||||
.connect('false-branch', 'end')
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a workflow with a loop.
|
||||
*/
|
||||
static withLoop(iterations = 3): WorkflowBuilder {
|
||||
return new WorkflowBuilder()
|
||||
.addStarter('start', { x: 0, y: 0 })
|
||||
.addLoop('loop', { x: 200, y: 0 }, { iterations })
|
||||
.addLoopChild('loop', 'loop-body', 'function', { x: 50, y: 50 })
|
||||
.addFunction('end', { x: 500, y: 0 })
|
||||
.connect('start', 'loop')
|
||||
.connect('loop', 'end')
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a workflow with parallel execution.
|
||||
*/
|
||||
static withParallel(count = 2): WorkflowBuilder {
|
||||
return new WorkflowBuilder()
|
||||
.addStarter('start', { x: 0, y: 0 })
|
||||
.addParallel('parallel', { x: 200, y: 0 }, { count })
|
||||
.addParallelChild('parallel', 'parallel-task', 'function', { x: 50, y: 50 })
|
||||
.addFunction('end', { x: 500, y: 0 })
|
||||
.connect('start', 'parallel')
|
||||
.connect('parallel', 'end')
|
||||
}
|
||||
}
|
||||
217
packages/testing/src/factories/block.factory.ts
Normal file
217
packages/testing/src/factories/block.factory.ts
Normal file
@@ -0,0 +1,217 @@
|
||||
import type { BlockData, BlockOutput, BlockState, Position, SubBlockState } from '../types'
|
||||
|
||||
/**
|
||||
* Options for creating a mock block.
|
||||
* All fields are optional - sensible defaults are provided.
|
||||
*/
|
||||
export interface BlockFactoryOptions {
|
||||
id?: string
|
||||
type?: string
|
||||
name?: string
|
||||
position?: Position
|
||||
subBlocks?: Record<string, SubBlockState>
|
||||
outputs?: Record<string, BlockOutput>
|
||||
enabled?: boolean
|
||||
horizontalHandles?: boolean
|
||||
height?: number
|
||||
advancedMode?: boolean
|
||||
triggerMode?: boolean
|
||||
data?: BlockData
|
||||
parentId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a unique block ID.
|
||||
*/
|
||||
function generateBlockId(prefix = 'block'): string {
|
||||
return `${prefix}-${Math.random().toString(36).substring(2, 10)}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock block with sensible defaults.
|
||||
* Override any property as needed.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Basic block
|
||||
* const block = createBlock({ type: 'agent' })
|
||||
*
|
||||
* // Block with specific position
|
||||
* const block = createBlock({ type: 'function', position: { x: 100, y: 200 } })
|
||||
*
|
||||
* // Block with parent (for loops/parallels)
|
||||
* const block = createBlock({ type: 'function', parentId: 'loop-1' })
|
||||
* ```
|
||||
*/
|
||||
export function createBlock(options: BlockFactoryOptions = {}): BlockState {
|
||||
const id = options.id ?? generateBlockId(options.type ?? 'block')
|
||||
|
||||
const data: BlockData = options.data ?? {}
|
||||
if (options.parentId) {
|
||||
data.parentId = options.parentId
|
||||
data.extent = 'parent'
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
type: options.type ?? 'function',
|
||||
name: options.name ?? `Block ${id.substring(0, 8)}`,
|
||||
position: options.position ?? { x: 0, y: 0 },
|
||||
subBlocks: options.subBlocks ?? {},
|
||||
outputs: options.outputs ?? {},
|
||||
enabled: options.enabled ?? true,
|
||||
horizontalHandles: options.horizontalHandles ?? true,
|
||||
height: options.height ?? 0,
|
||||
advancedMode: options.advancedMode ?? false,
|
||||
triggerMode: options.triggerMode ?? false,
|
||||
data: Object.keys(data).length > 0 ? data : undefined,
|
||||
layout: {},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a starter block (workflow entry point).
|
||||
*/
|
||||
export function createStarterBlock(options: Omit<BlockFactoryOptions, 'type'> = {}): BlockState {
|
||||
return createBlock({
|
||||
...options,
|
||||
type: 'starter',
|
||||
name: options.name ?? 'Start',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an agent block (AI agent execution).
|
||||
*/
|
||||
export function createAgentBlock(options: Omit<BlockFactoryOptions, 'type'> = {}): BlockState {
|
||||
return createBlock({
|
||||
...options,
|
||||
type: 'agent',
|
||||
name: options.name ?? 'Agent',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a function block (code execution).
|
||||
*/
|
||||
export function createFunctionBlock(options: Omit<BlockFactoryOptions, 'type'> = {}): BlockState {
|
||||
return createBlock({
|
||||
...options,
|
||||
type: 'function',
|
||||
name: options.name ?? 'Function',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a condition block (branching logic).
|
||||
*/
|
||||
export function createConditionBlock(options: Omit<BlockFactoryOptions, 'type'> = {}): BlockState {
|
||||
return createBlock({
|
||||
...options,
|
||||
type: 'condition',
|
||||
name: options.name ?? 'Condition',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a loop block (iteration container).
|
||||
*/
|
||||
export function createLoopBlock(
|
||||
options: Omit<BlockFactoryOptions, 'type'> & {
|
||||
loopType?: 'for' | 'forEach' | 'while' | 'doWhile'
|
||||
count?: number
|
||||
} = {}
|
||||
): BlockState {
|
||||
const data: BlockData = {
|
||||
...options.data,
|
||||
loopType: options.loopType ?? 'for',
|
||||
count: options.count ?? 3,
|
||||
type: 'loop',
|
||||
}
|
||||
|
||||
return createBlock({
|
||||
...options,
|
||||
type: 'loop',
|
||||
name: options.name ?? 'Loop',
|
||||
data,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a parallel block (concurrent execution container).
|
||||
*/
|
||||
export function createParallelBlock(
|
||||
options: Omit<BlockFactoryOptions, 'type'> & {
|
||||
parallelType?: 'count' | 'collection'
|
||||
count?: number
|
||||
} = {}
|
||||
): BlockState {
|
||||
const data: BlockData = {
|
||||
...options.data,
|
||||
parallelType: options.parallelType ?? 'count',
|
||||
count: options.count ?? 2,
|
||||
type: 'parallel',
|
||||
}
|
||||
|
||||
return createBlock({
|
||||
...options,
|
||||
type: 'parallel',
|
||||
name: options.name ?? 'Parallel',
|
||||
data,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a router block (output routing).
|
||||
*/
|
||||
export function createRouterBlock(options: Omit<BlockFactoryOptions, 'type'> = {}): BlockState {
|
||||
return createBlock({
|
||||
...options,
|
||||
type: 'router',
|
||||
name: options.name ?? 'Router',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an API block (HTTP requests).
|
||||
*/
|
||||
export function createApiBlock(options: Omit<BlockFactoryOptions, 'type'> = {}): BlockState {
|
||||
return createBlock({
|
||||
...options,
|
||||
type: 'api',
|
||||
name: options.name ?? 'API',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a response block (workflow output).
|
||||
*/
|
||||
export function createResponseBlock(options: Omit<BlockFactoryOptions, 'type'> = {}): BlockState {
|
||||
return createBlock({
|
||||
...options,
|
||||
type: 'response',
|
||||
name: options.name ?? 'Response',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a webhook trigger block.
|
||||
*/
|
||||
export function createWebhookBlock(options: Omit<BlockFactoryOptions, 'type'> = {}): BlockState {
|
||||
return createBlock({
|
||||
...options,
|
||||
type: 'webhook',
|
||||
name: options.name ?? 'Webhook',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a knowledge block (vector search).
|
||||
*/
|
||||
export function createKnowledgeBlock(options: Omit<BlockFactoryOptions, 'type'> = {}): BlockState {
|
||||
return createBlock({
|
||||
...options,
|
||||
type: 'knowledge',
|
||||
name: options.name ?? 'Knowledge',
|
||||
})
|
||||
}
|
||||
191
packages/testing/src/factories/dag.factory.ts
Normal file
191
packages/testing/src/factories/dag.factory.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
/**
|
||||
* Factory functions for creating DAG (Directed Acyclic Graph) test fixtures.
|
||||
* These are used in executor tests for DAG construction and edge management.
|
||||
*/
|
||||
|
||||
import { createSerializedBlock, type SerializedBlock } from './serialized-block.factory'
|
||||
|
||||
/**
|
||||
* DAG edge structure.
|
||||
*/
|
||||
export interface DAGEdge {
|
||||
target: string
|
||||
sourceHandle?: string
|
||||
targetHandle?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* DAG node structure.
|
||||
*/
|
||||
export interface DAGNode {
|
||||
id: string
|
||||
block: SerializedBlock
|
||||
outgoingEdges: Map<string, DAGEdge>
|
||||
incomingEdges: Set<string>
|
||||
metadata: Record<string, any>
|
||||
}
|
||||
|
||||
/**
|
||||
* DAG structure.
|
||||
*/
|
||||
export interface DAG {
|
||||
nodes: Map<string, DAGNode>
|
||||
loopConfigs: Map<string, any>
|
||||
parallelConfigs: Map<string, any>
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating a DAG node.
|
||||
*/
|
||||
export interface DAGNodeFactoryOptions {
|
||||
id?: string
|
||||
type?: string
|
||||
block?: SerializedBlock
|
||||
outgoingEdges?: DAGEdge[]
|
||||
incomingEdges?: string[]
|
||||
metadata?: Record<string, any>
|
||||
params?: Record<string, any>
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a DAG node with sensible defaults.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const node = createDAGNode({ id: 'block-1' })
|
||||
*
|
||||
* // With outgoing edges
|
||||
* const node = createDAGNode({
|
||||
* id: 'start',
|
||||
* outgoingEdges: [{ target: 'end' }]
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
export function createDAGNode(options: DAGNodeFactoryOptions = {}): DAGNode {
|
||||
const id = options.id ?? `node-${Math.random().toString(36).substring(2, 8)}`
|
||||
const block =
|
||||
options.block ??
|
||||
createSerializedBlock({
|
||||
id,
|
||||
type: options.type ?? 'function',
|
||||
params: options.params,
|
||||
})
|
||||
|
||||
const outgoingEdges = new Map<string, DAGEdge>()
|
||||
if (options.outgoingEdges) {
|
||||
options.outgoingEdges.forEach((edge, i) => {
|
||||
outgoingEdges.set(`edge-${i}`, edge)
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
block,
|
||||
outgoingEdges,
|
||||
incomingEdges: new Set(options.incomingEdges ?? []),
|
||||
metadata: options.metadata ?? {},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a DAG structure from a list of node IDs.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const dag = createDAG(['block-1', 'block-2', 'block-3'])
|
||||
* ```
|
||||
*/
|
||||
export function createDAG(nodeIds: string[]): DAG {
|
||||
const nodes = new Map<string, DAGNode>()
|
||||
for (const id of nodeIds) {
|
||||
nodes.set(id, createDAGNode({ id }))
|
||||
}
|
||||
return {
|
||||
nodes,
|
||||
loopConfigs: new Map(),
|
||||
parallelConfigs: new Map(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a DAG from a node configuration array.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const dag = createDAGFromNodes([
|
||||
* { id: 'start', outgoingEdges: [{ target: 'middle' }] },
|
||||
* { id: 'middle', outgoingEdges: [{ target: 'end' }], incomingEdges: ['start'] },
|
||||
* { id: 'end', incomingEdges: ['middle'] }
|
||||
* ])
|
||||
* ```
|
||||
*/
|
||||
export function createDAGFromNodes(nodeConfigs: DAGNodeFactoryOptions[]): DAG {
|
||||
const nodes = new Map<string, DAGNode>()
|
||||
for (const config of nodeConfigs) {
|
||||
const node = createDAGNode(config)
|
||||
nodes.set(node.id, node)
|
||||
}
|
||||
return {
|
||||
nodes,
|
||||
loopConfigs: new Map(),
|
||||
parallelConfigs: new Map(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a linear DAG where each node connects to the next.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Creates A -> B -> C
|
||||
* const dag = createLinearDAG(['A', 'B', 'C'])
|
||||
* ```
|
||||
*/
|
||||
export function createLinearDAG(nodeIds: string[]): DAG {
|
||||
const nodes = new Map<string, DAGNode>()
|
||||
|
||||
for (let i = 0; i < nodeIds.length; i++) {
|
||||
const id = nodeIds[i]
|
||||
const outgoingEdges: DAGEdge[] = i < nodeIds.length - 1 ? [{ target: nodeIds[i + 1] }] : []
|
||||
const incomingEdges = i > 0 ? [nodeIds[i - 1]] : []
|
||||
|
||||
nodes.set(id, createDAGNode({ id, outgoingEdges, incomingEdges }))
|
||||
}
|
||||
|
||||
return {
|
||||
nodes,
|
||||
loopConfigs: new Map(),
|
||||
parallelConfigs: new Map(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a node to an existing DAG.
|
||||
*/
|
||||
export function addNodeToDAG(dag: DAG, node: DAGNode): DAG {
|
||||
dag.nodes.set(node.id, node)
|
||||
return dag
|
||||
}
|
||||
|
||||
/**
|
||||
* Connects two nodes in a DAG with an edge.
|
||||
*/
|
||||
export function connectDAGNodes(
|
||||
dag: DAG,
|
||||
sourceId: string,
|
||||
targetId: string,
|
||||
sourceHandle?: string
|
||||
): DAG {
|
||||
const sourceNode = dag.nodes.get(sourceId)
|
||||
const targetNode = dag.nodes.get(targetId)
|
||||
|
||||
if (sourceNode && targetNode) {
|
||||
const edgeId = sourceHandle
|
||||
? `${sourceId}→${targetId}-${sourceHandle}`
|
||||
: `${sourceId}→${targetId}`
|
||||
sourceNode.outgoingEdges.set(edgeId, { target: targetId, sourceHandle })
|
||||
targetNode.incomingEdges.add(sourceId)
|
||||
}
|
||||
|
||||
return dag
|
||||
}
|
||||
88
packages/testing/src/factories/edge.factory.ts
Normal file
88
packages/testing/src/factories/edge.factory.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import type { Edge } from '../types'
|
||||
|
||||
/**
|
||||
* Options for creating a mock edge.
|
||||
*/
|
||||
export interface EdgeFactoryOptions {
|
||||
id?: string
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string
|
||||
targetHandle?: string
|
||||
type?: string
|
||||
data?: Record<string, any>
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates an edge ID from source and target.
|
||||
*/
|
||||
function generateEdgeId(source: string, target: string): string {
|
||||
return `${source}-${target}-${Math.random().toString(36).substring(2, 6)}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock edge connecting two blocks.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Simple edge
|
||||
* const edge = createEdge({ source: 'block-1', target: 'block-2' })
|
||||
*
|
||||
* // Edge with specific handles
|
||||
* const edge = createEdge({
|
||||
* source: 'condition-1',
|
||||
* target: 'block-2',
|
||||
* sourceHandle: 'condition-if'
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
export function createEdge(options: EdgeFactoryOptions): Edge {
|
||||
return {
|
||||
id: options.id ?? generateEdgeId(options.source, options.target),
|
||||
source: options.source,
|
||||
target: options.target,
|
||||
sourceHandle: options.sourceHandle,
|
||||
targetHandle: options.targetHandle,
|
||||
type: options.type ?? 'default',
|
||||
data: options.data,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates multiple edges from a connection specification.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const edges = createEdges([
|
||||
* { source: 'start', target: 'agent' },
|
||||
* { source: 'agent', target: 'end' },
|
||||
* ])
|
||||
* ```
|
||||
*/
|
||||
export function createEdges(
|
||||
connections: Array<{
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string
|
||||
targetHandle?: string
|
||||
}>
|
||||
): Edge[] {
|
||||
return connections.map((conn) => createEdge(conn))
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a linear chain of edges connecting blocks in order.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Creates edges: a->b, b->c, c->d
|
||||
* const edges = createLinearEdges(['a', 'b', 'c', 'd'])
|
||||
* ```
|
||||
*/
|
||||
export function createLinearEdges(blockIds: string[]): Edge[] {
|
||||
const edges: Edge[] = []
|
||||
for (let i = 0; i < blockIds.length - 1; i++) {
|
||||
edges.push(createEdge({ source: blockIds[i], target: blockIds[i + 1] }))
|
||||
}
|
||||
return edges
|
||||
}
|
||||
113
packages/testing/src/factories/execution.factory.ts
Normal file
113
packages/testing/src/factories/execution.factory.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import type { ExecutionContext } from '../types'
|
||||
|
||||
/**
|
||||
* Options for creating a mock execution context.
|
||||
*/
|
||||
export interface ExecutionContextFactoryOptions {
|
||||
workflowId?: string
|
||||
executionId?: string
|
||||
blockStates?: Map<string, any>
|
||||
executedBlocks?: Set<string>
|
||||
blockLogs?: any[]
|
||||
metadata?: {
|
||||
duration?: number
|
||||
startTime?: string
|
||||
endTime?: string
|
||||
}
|
||||
environmentVariables?: Record<string, string>
|
||||
workflowVariables?: Record<string, any>
|
||||
abortSignal?: AbortSignal
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock execution context for testing workflow execution.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const ctx = createExecutionContext({ workflowId: 'test-wf' })
|
||||
*
|
||||
* // With abort signal
|
||||
* const ctx = createExecutionContext({
|
||||
* workflowId: 'test-wf',
|
||||
* abortSignal: AbortSignal.abort(),
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
export function createExecutionContext(
|
||||
options: ExecutionContextFactoryOptions = {}
|
||||
): ExecutionContext {
|
||||
return {
|
||||
workflowId: options.workflowId ?? 'test-workflow',
|
||||
executionId: options.executionId ?? `exec-${Math.random().toString(36).substring(2, 10)}`,
|
||||
blockStates: options.blockStates ?? new Map(),
|
||||
executedBlocks: options.executedBlocks ?? new Set(),
|
||||
blockLogs: options.blockLogs ?? [],
|
||||
metadata: {
|
||||
duration: options.metadata?.duration ?? 0,
|
||||
startTime: options.metadata?.startTime ?? new Date().toISOString(),
|
||||
endTime: options.metadata?.endTime,
|
||||
},
|
||||
environmentVariables: options.environmentVariables ?? {},
|
||||
workflowVariables: options.workflowVariables ?? {},
|
||||
decisions: {
|
||||
router: new Map(),
|
||||
condition: new Map(),
|
||||
},
|
||||
loopExecutions: new Map(),
|
||||
completedLoops: new Set(),
|
||||
activeExecutionPath: new Set(),
|
||||
abortSignal: options.abortSignal,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an execution context with pre-populated block states.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const ctx = createExecutionContextWithStates({
|
||||
* 'block-1': { output: 'hello' },
|
||||
* 'block-2': { output: 'world' },
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
export function createExecutionContextWithStates(
|
||||
blockStates: Record<string, any>,
|
||||
options: Omit<ExecutionContextFactoryOptions, 'blockStates'> = {}
|
||||
): ExecutionContext {
|
||||
const stateMap = new Map(Object.entries(blockStates))
|
||||
return createExecutionContext({
|
||||
...options,
|
||||
blockStates: stateMap,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an execution context that is already cancelled.
|
||||
*/
|
||||
export function createCancelledExecutionContext(
|
||||
options: Omit<ExecutionContextFactoryOptions, 'abortSignal'> = {}
|
||||
): ExecutionContext {
|
||||
return createExecutionContext({
|
||||
...options,
|
||||
abortSignal: AbortSignal.abort(),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an execution context with a timeout.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const ctx = createTimedExecutionContext(5000) // 5 second timeout
|
||||
* ```
|
||||
*/
|
||||
export function createTimedExecutionContext(
|
||||
timeoutMs: number,
|
||||
options: Omit<ExecutionContextFactoryOptions, 'abortSignal'> = {}
|
||||
): ExecutionContext {
|
||||
return createExecutionContext({
|
||||
...options,
|
||||
abortSignal: AbortSignal.timeout(timeoutMs),
|
||||
})
|
||||
}
|
||||
205
packages/testing/src/factories/executor-context.factory.ts
Normal file
205
packages/testing/src/factories/executor-context.factory.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
/**
|
||||
* Factory functions for creating ExecutionContext test fixtures for executor tests.
|
||||
* This is the executor-specific context, different from the generic testing context.
|
||||
*/
|
||||
|
||||
import type {
|
||||
SerializedBlock,
|
||||
SerializedConnection,
|
||||
SerializedWorkflow,
|
||||
} from './serialized-block.factory'
|
||||
|
||||
/**
|
||||
* Block state in execution context.
|
||||
*/
|
||||
export interface ExecutorBlockState {
|
||||
output: Record<string, any>
|
||||
executed: boolean
|
||||
executionTime: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Execution context for executor tests.
|
||||
*/
|
||||
export interface ExecutorContext {
|
||||
workflowId: string
|
||||
workspaceId?: string
|
||||
executionId?: string
|
||||
userId?: string
|
||||
blockStates: Map<string, ExecutorBlockState>
|
||||
executedBlocks: Set<string>
|
||||
blockLogs: any[]
|
||||
metadata: {
|
||||
duration: number
|
||||
startTime?: string
|
||||
endTime?: string
|
||||
}
|
||||
environmentVariables: Record<string, string>
|
||||
workflowVariables?: Record<string, any>
|
||||
decisions: {
|
||||
router: Map<string, string>
|
||||
condition: Map<string, string>
|
||||
}
|
||||
loopExecutions: Map<string, any>
|
||||
completedLoops: Set<string>
|
||||
activeExecutionPath: Set<string>
|
||||
workflow?: SerializedWorkflow
|
||||
currentVirtualBlockId?: string
|
||||
abortSignal?: AbortSignal
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating an executor context.
|
||||
*/
|
||||
export interface ExecutorContextFactoryOptions {
|
||||
workflowId?: string
|
||||
workspaceId?: string
|
||||
executionId?: string
|
||||
userId?: string
|
||||
blockStates?: Map<string, ExecutorBlockState> | Record<string, ExecutorBlockState>
|
||||
executedBlocks?: Set<string> | string[]
|
||||
blockLogs?: any[]
|
||||
metadata?: {
|
||||
duration?: number
|
||||
startTime?: string
|
||||
endTime?: string
|
||||
}
|
||||
environmentVariables?: Record<string, string>
|
||||
workflowVariables?: Record<string, any>
|
||||
workflow?: SerializedWorkflow
|
||||
currentVirtualBlockId?: string
|
||||
abortSignal?: AbortSignal
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an executor context with sensible defaults.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const ctx = createExecutorContext({ workflowId: 'test-wf' })
|
||||
*
|
||||
* // With pre-populated block states
|
||||
* const ctx = createExecutorContext({
|
||||
* blockStates: {
|
||||
* 'block-1': { output: { value: 10 }, executed: true, executionTime: 100 }
|
||||
* }
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
export function createExecutorContext(
|
||||
options: ExecutorContextFactoryOptions = {}
|
||||
): ExecutorContext {
|
||||
let blockStates: Map<string, ExecutorBlockState>
|
||||
if (options.blockStates instanceof Map) {
|
||||
blockStates = options.blockStates
|
||||
} else if (options.blockStates) {
|
||||
blockStates = new Map(Object.entries(options.blockStates))
|
||||
} else {
|
||||
blockStates = new Map()
|
||||
}
|
||||
|
||||
let executedBlocks: Set<string>
|
||||
if (options.executedBlocks instanceof Set) {
|
||||
executedBlocks = options.executedBlocks
|
||||
} else if (Array.isArray(options.executedBlocks)) {
|
||||
executedBlocks = new Set(options.executedBlocks)
|
||||
} else {
|
||||
executedBlocks = new Set()
|
||||
}
|
||||
|
||||
return {
|
||||
workflowId: options.workflowId ?? 'test-workflow-id',
|
||||
workspaceId: options.workspaceId ?? 'test-workspace-id',
|
||||
executionId: options.executionId,
|
||||
userId: options.userId,
|
||||
blockStates,
|
||||
executedBlocks,
|
||||
blockLogs: options.blockLogs ?? [],
|
||||
metadata: {
|
||||
duration: options.metadata?.duration ?? 0,
|
||||
startTime: options.metadata?.startTime,
|
||||
endTime: options.metadata?.endTime,
|
||||
},
|
||||
environmentVariables: options.environmentVariables ?? {},
|
||||
workflowVariables: options.workflowVariables,
|
||||
decisions: {
|
||||
router: new Map(),
|
||||
condition: new Map(),
|
||||
},
|
||||
loopExecutions: new Map(),
|
||||
completedLoops: new Set(),
|
||||
activeExecutionPath: new Set(),
|
||||
workflow: options.workflow,
|
||||
currentVirtualBlockId: options.currentVirtualBlockId,
|
||||
abortSignal: options.abortSignal,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an executor context with pre-executed blocks.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const ctx = createExecutorContextWithBlocks({
|
||||
* 'source-block': { value: 10, text: 'hello' },
|
||||
* 'other-block': { result: true }
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
export function createExecutorContextWithBlocks(
|
||||
blockOutputs: Record<string, Record<string, any>>,
|
||||
options: Omit<ExecutorContextFactoryOptions, 'blockStates' | 'executedBlocks'> = {}
|
||||
): ExecutorContext {
|
||||
const blockStates = new Map<string, ExecutorBlockState>()
|
||||
const executedBlocks = new Set<string>()
|
||||
|
||||
for (const [blockId, output] of Object.entries(blockOutputs)) {
|
||||
blockStates.set(blockId, {
|
||||
output,
|
||||
executed: true,
|
||||
executionTime: 100,
|
||||
})
|
||||
executedBlocks.add(blockId)
|
||||
}
|
||||
|
||||
return createExecutorContext({
|
||||
...options,
|
||||
blockStates,
|
||||
executedBlocks,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a block state to an existing context.
|
||||
* Returns the context for chaining.
|
||||
*/
|
||||
export function addBlockState(
|
||||
ctx: ExecutorContext,
|
||||
blockId: string,
|
||||
output: Record<string, any>,
|
||||
executionTime = 100
|
||||
): ExecutorContext {
|
||||
;(ctx.blockStates as Map<string, ExecutorBlockState>).set(blockId, {
|
||||
output,
|
||||
executed: true,
|
||||
executionTime,
|
||||
})
|
||||
;(ctx.executedBlocks as Set<string>).add(blockId)
|
||||
return ctx
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a minimal workflow for context.
|
||||
*/
|
||||
export function createMinimalWorkflow(
|
||||
blocks: SerializedBlock[],
|
||||
connections: SerializedConnection[] = []
|
||||
): SerializedWorkflow {
|
||||
return {
|
||||
version: '1.0',
|
||||
blocks,
|
||||
connections,
|
||||
loops: {},
|
||||
parallels: {},
|
||||
}
|
||||
}
|
||||
160
packages/testing/src/factories/index.ts
Normal file
160
packages/testing/src/factories/index.ts
Normal file
@@ -0,0 +1,160 @@
|
||||
/**
|
||||
* Factory functions for creating test fixtures.
|
||||
*
|
||||
* Use these to create mock data with sensible defaults.
|
||||
* All functions allow overriding any field.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import {
|
||||
* createBlock,
|
||||
* createStarterBlock,
|
||||
* createAgentBlock,
|
||||
* createLinearWorkflow,
|
||||
* createExecutionContext,
|
||||
* } from '@sim/testing/factories'
|
||||
*
|
||||
* // Create a simple workflow
|
||||
* const workflow = createLinearWorkflow(3)
|
||||
*
|
||||
* // Create a specific block
|
||||
* const agent = createAgentBlock({ id: 'my-agent', position: { x: 100, y: 200 } })
|
||||
*
|
||||
* // Create execution context
|
||||
* const ctx = createExecutionContext({ workflowId: 'test' })
|
||||
* ```
|
||||
*/
|
||||
|
||||
// Block factories
|
||||
export {
|
||||
type BlockFactoryOptions,
|
||||
createAgentBlock,
|
||||
createApiBlock,
|
||||
createBlock,
|
||||
createConditionBlock,
|
||||
createFunctionBlock,
|
||||
createKnowledgeBlock,
|
||||
createLoopBlock,
|
||||
createParallelBlock,
|
||||
createResponseBlock,
|
||||
createRouterBlock,
|
||||
createStarterBlock,
|
||||
createWebhookBlock,
|
||||
} from './block.factory'
|
||||
// DAG factories (for executor DAG tests)
|
||||
export {
|
||||
addNodeToDAG,
|
||||
connectDAGNodes,
|
||||
createDAG,
|
||||
createDAGFromNodes,
|
||||
createDAGNode,
|
||||
createLinearDAG,
|
||||
type DAG,
|
||||
type DAGEdge,
|
||||
type DAGNode,
|
||||
type DAGNodeFactoryOptions,
|
||||
} from './dag.factory'
|
||||
// Edge factories
|
||||
export { createEdge, createEdges, createLinearEdges, type EdgeFactoryOptions } from './edge.factory'
|
||||
// Execution factories
|
||||
export {
|
||||
createCancelledExecutionContext,
|
||||
createExecutionContext,
|
||||
createExecutionContextWithStates,
|
||||
createTimedExecutionContext,
|
||||
type ExecutionContextFactoryOptions,
|
||||
} from './execution.factory'
|
||||
// Executor context factories (for executor tests)
|
||||
export {
|
||||
addBlockState,
|
||||
createExecutorContext,
|
||||
createExecutorContextWithBlocks,
|
||||
createMinimalWorkflow,
|
||||
type ExecutorBlockState,
|
||||
type ExecutorContext,
|
||||
type ExecutorContextFactoryOptions,
|
||||
} from './executor-context.factory'
|
||||
// Permission factories
|
||||
export {
|
||||
createAdminPermission,
|
||||
createEncryptedApiKey,
|
||||
createLegacyApiKey,
|
||||
createPermission,
|
||||
createReadPermission,
|
||||
createSession,
|
||||
createWorkflowAccessContext,
|
||||
createWorkflowRecord,
|
||||
createWorkspaceRecord,
|
||||
createWritePermission,
|
||||
type EntityType,
|
||||
type MockSession,
|
||||
type Permission,
|
||||
type PermissionFactoryOptions,
|
||||
type PermissionType,
|
||||
ROLE_ALLOWED_OPERATIONS,
|
||||
type SessionFactoryOptions,
|
||||
SOCKET_OPERATIONS,
|
||||
type SocketOperation,
|
||||
type WorkflowAccessContext,
|
||||
type WorkflowRecord,
|
||||
type WorkflowRecordFactoryOptions,
|
||||
type WorkspaceRecord,
|
||||
type WorkspaceRecordFactoryOptions,
|
||||
} from './permission.factory'
|
||||
// Serialized block factories (for executor tests)
|
||||
export {
|
||||
createSerializedAgentBlock,
|
||||
createSerializedBlock,
|
||||
createSerializedConditionBlock,
|
||||
createSerializedConnection,
|
||||
createSerializedEvaluatorBlock,
|
||||
createSerializedFunctionBlock,
|
||||
createSerializedRouterBlock,
|
||||
createSerializedStarterBlock,
|
||||
createSerializedWorkflow,
|
||||
resetSerializedBlockCounter,
|
||||
type SerializedBlock,
|
||||
type SerializedBlockFactoryOptions,
|
||||
type SerializedConnection,
|
||||
type SerializedWorkflow,
|
||||
} from './serialized-block.factory'
|
||||
// Undo/redo operation factories
|
||||
export {
|
||||
type AddBlockOperation,
|
||||
type AddEdgeOperation,
|
||||
type BaseOperation,
|
||||
createAddBlockEntry,
|
||||
createAddEdgeEntry,
|
||||
createDuplicateBlockEntry,
|
||||
createMoveBlockEntry,
|
||||
createRemoveBlockEntry,
|
||||
createRemoveEdgeEntry,
|
||||
createUpdateParentEntry,
|
||||
type DuplicateBlockOperation,
|
||||
type MoveBlockOperation,
|
||||
type Operation,
|
||||
type OperationEntry,
|
||||
type OperationType,
|
||||
type RemoveBlockOperation,
|
||||
type RemoveEdgeOperation,
|
||||
type UpdateParentOperation,
|
||||
} from './undo-redo.factory'
|
||||
// User/workspace factories
|
||||
export {
|
||||
createUser,
|
||||
createUserWithWorkspace,
|
||||
createWorkflow,
|
||||
createWorkspace,
|
||||
type UserFactoryOptions,
|
||||
type WorkflowObjectFactoryOptions,
|
||||
type WorkspaceFactoryOptions,
|
||||
} from './user.factory'
|
||||
// Workflow factories
|
||||
export {
|
||||
createBranchingWorkflow,
|
||||
createLinearWorkflow,
|
||||
createLoopWorkflow,
|
||||
createParallelWorkflow,
|
||||
createWorkflowState,
|
||||
type WorkflowFactoryOptions,
|
||||
} from './workflow.factory'
|
||||
313
packages/testing/src/factories/permission.factory.ts
Normal file
313
packages/testing/src/factories/permission.factory.ts
Normal file
@@ -0,0 +1,313 @@
|
||||
import { nanoid } from 'nanoid'
|
||||
|
||||
/**
|
||||
* Permission types in order of access level (highest to lowest).
|
||||
*/
|
||||
export type PermissionType = 'admin' | 'write' | 'read'
|
||||
|
||||
/**
|
||||
* Entity types that can have permissions.
|
||||
*/
|
||||
export type EntityType = 'workspace' | 'workflow' | 'organization'
|
||||
|
||||
/**
|
||||
* Permission record as stored in the database.
|
||||
*/
|
||||
export interface Permission {
|
||||
id: string
|
||||
userId: string
|
||||
entityType: EntityType
|
||||
entityId: string
|
||||
permissionType: PermissionType
|
||||
createdAt: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating a permission.
|
||||
*/
|
||||
export interface PermissionFactoryOptions {
|
||||
id?: string
|
||||
userId?: string
|
||||
entityType?: EntityType
|
||||
entityId?: string
|
||||
permissionType?: PermissionType
|
||||
createdAt?: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock permission record.
|
||||
*/
|
||||
export function createPermission(options: PermissionFactoryOptions = {}): Permission {
|
||||
return {
|
||||
id: options.id ?? nanoid(8),
|
||||
userId: options.userId ?? `user-${nanoid(6)}`,
|
||||
entityType: options.entityType ?? 'workspace',
|
||||
entityId: options.entityId ?? `ws-${nanoid(6)}`,
|
||||
permissionType: options.permissionType ?? 'read',
|
||||
createdAt: options.createdAt ?? new Date(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a workspace admin permission.
|
||||
*/
|
||||
export function createAdminPermission(
|
||||
userId: string,
|
||||
workspaceId: string,
|
||||
options: Partial<PermissionFactoryOptions> = {}
|
||||
): Permission {
|
||||
return createPermission({
|
||||
userId,
|
||||
entityType: 'workspace',
|
||||
entityId: workspaceId,
|
||||
permissionType: 'admin',
|
||||
...options,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a workspace write permission.
|
||||
*/
|
||||
export function createWritePermission(
|
||||
userId: string,
|
||||
workspaceId: string,
|
||||
options: Partial<PermissionFactoryOptions> = {}
|
||||
): Permission {
|
||||
return createPermission({
|
||||
userId,
|
||||
entityType: 'workspace',
|
||||
entityId: workspaceId,
|
||||
permissionType: 'write',
|
||||
...options,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a workspace read permission.
|
||||
*/
|
||||
export function createReadPermission(
|
||||
userId: string,
|
||||
workspaceId: string,
|
||||
options: Partial<PermissionFactoryOptions> = {}
|
||||
): Permission {
|
||||
return createPermission({
|
||||
userId,
|
||||
entityType: 'workspace',
|
||||
entityId: workspaceId,
|
||||
permissionType: 'read',
|
||||
...options,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Workspace record for testing.
|
||||
*/
|
||||
export interface WorkspaceRecord {
|
||||
id: string
|
||||
name: string
|
||||
ownerId: string
|
||||
billedAccountUserId?: string
|
||||
createdAt: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating a workspace.
|
||||
*/
|
||||
export interface WorkspaceRecordFactoryOptions {
|
||||
id?: string
|
||||
name?: string
|
||||
ownerId?: string
|
||||
billedAccountUserId?: string
|
||||
createdAt?: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock workspace record.
|
||||
*/
|
||||
export function createWorkspaceRecord(
|
||||
options: WorkspaceRecordFactoryOptions = {}
|
||||
): WorkspaceRecord {
|
||||
const id = options.id ?? `ws-${nanoid(6)}`
|
||||
const ownerId = options.ownerId ?? `user-${nanoid(6)}`
|
||||
return {
|
||||
id,
|
||||
name: options.name ?? `Workspace ${id}`,
|
||||
ownerId,
|
||||
billedAccountUserId: options.billedAccountUserId ?? ownerId,
|
||||
createdAt: options.createdAt ?? new Date(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Workflow record for testing.
|
||||
*/
|
||||
export interface WorkflowRecord {
|
||||
id: string
|
||||
name: string
|
||||
userId: string
|
||||
workspaceId: string | null
|
||||
state: string
|
||||
isDeployed: boolean
|
||||
runCount: number
|
||||
createdAt: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating a workflow record.
|
||||
*/
|
||||
export interface WorkflowRecordFactoryOptions {
|
||||
id?: string
|
||||
name?: string
|
||||
userId?: string
|
||||
workspaceId?: string | null
|
||||
state?: string
|
||||
isDeployed?: boolean
|
||||
runCount?: number
|
||||
createdAt?: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock workflow database record.
|
||||
*/
|
||||
export function createWorkflowRecord(options: WorkflowRecordFactoryOptions = {}): WorkflowRecord {
|
||||
const id = options.id ?? `wf-${nanoid(6)}`
|
||||
return {
|
||||
id,
|
||||
name: options.name ?? `Workflow ${id}`,
|
||||
userId: options.userId ?? `user-${nanoid(6)}`,
|
||||
workspaceId: options.workspaceId ?? null,
|
||||
state: options.state ?? '{}',
|
||||
isDeployed: options.isDeployed ?? false,
|
||||
runCount: options.runCount ?? 0,
|
||||
createdAt: options.createdAt ?? new Date(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Session object for testing.
|
||||
*/
|
||||
export interface MockSession {
|
||||
user: {
|
||||
id: string
|
||||
email: string
|
||||
name?: string
|
||||
}
|
||||
expiresAt: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating a session.
|
||||
*/
|
||||
export interface SessionFactoryOptions {
|
||||
userId?: string
|
||||
email?: string
|
||||
name?: string
|
||||
expiresAt?: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock session object.
|
||||
*/
|
||||
export function createSession(options: SessionFactoryOptions = {}): MockSession {
|
||||
const userId = options.userId ?? `user-${nanoid(6)}`
|
||||
return {
|
||||
user: {
|
||||
id: userId,
|
||||
email: options.email ?? `${userId}@test.com`,
|
||||
name: options.name,
|
||||
},
|
||||
expiresAt: options.expiresAt ?? new Date(Date.now() + 24 * 60 * 60 * 1000),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Workflow access context for testing.
|
||||
*/
|
||||
export interface WorkflowAccessContext {
|
||||
workflow: WorkflowRecord
|
||||
workspaceOwnerId: string | null
|
||||
workspacePermission: PermissionType | null
|
||||
isOwner: boolean
|
||||
isWorkspaceOwner: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock workflow access context.
|
||||
*/
|
||||
export function createWorkflowAccessContext(options: {
|
||||
workflow: WorkflowRecord
|
||||
workspaceOwnerId?: string | null
|
||||
workspacePermission?: PermissionType | null
|
||||
userId?: string
|
||||
}): WorkflowAccessContext {
|
||||
const { workflow, workspaceOwnerId = null, workspacePermission = null, userId } = options
|
||||
|
||||
return {
|
||||
workflow,
|
||||
workspaceOwnerId,
|
||||
workspacePermission,
|
||||
isOwner: userId ? workflow.userId === userId : false,
|
||||
isWorkspaceOwner: userId && workspaceOwnerId ? workspaceOwnerId === userId : false,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* All socket operations that can be performed.
|
||||
*/
|
||||
export const SOCKET_OPERATIONS = [
|
||||
'add',
|
||||
'remove',
|
||||
'update',
|
||||
'update-position',
|
||||
'update-name',
|
||||
'toggle-enabled',
|
||||
'update-parent',
|
||||
'update-wide',
|
||||
'update-advanced-mode',
|
||||
'update-trigger-mode',
|
||||
'toggle-handles',
|
||||
'duplicate',
|
||||
'replace-state',
|
||||
] as const
|
||||
|
||||
export type SocketOperation = (typeof SOCKET_OPERATIONS)[number]
|
||||
|
||||
/**
|
||||
* Operations allowed for each role.
|
||||
*/
|
||||
export const ROLE_ALLOWED_OPERATIONS: Record<PermissionType, SocketOperation[]> = {
|
||||
admin: [...SOCKET_OPERATIONS],
|
||||
write: [...SOCKET_OPERATIONS],
|
||||
read: ['update-position'],
|
||||
}
|
||||
|
||||
/**
|
||||
* API key formats for testing.
|
||||
*/
|
||||
export interface ApiKeyTestData {
|
||||
plainKey: string
|
||||
encryptedStorage: string
|
||||
last4: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates test API key data.
|
||||
*/
|
||||
export function createLegacyApiKey(): { key: string; prefix: string } {
|
||||
const random = nanoid(24)
|
||||
return {
|
||||
key: `sim_${random}`,
|
||||
prefix: 'sim_',
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates test encrypted format API key data.
|
||||
*/
|
||||
export function createEncryptedApiKey(): { key: string; prefix: string } {
|
||||
const random = nanoid(24)
|
||||
return {
|
||||
key: `sk-sim-${random}`,
|
||||
prefix: 'sk-sim-',
|
||||
}
|
||||
}
|
||||
229
packages/testing/src/factories/serialized-block.factory.ts
Normal file
229
packages/testing/src/factories/serialized-block.factory.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
/**
|
||||
* Factory functions for creating SerializedBlock test fixtures.
|
||||
* These are used in executor tests where blocks are in their serialized form.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Serialized block structure used in executor tests.
|
||||
*/
|
||||
export interface SerializedBlock {
|
||||
id: string
|
||||
position: { x: number; y: number }
|
||||
config: {
|
||||
tool: string
|
||||
params: Record<string, any>
|
||||
}
|
||||
inputs: Record<string, any>
|
||||
outputs: Record<string, any>
|
||||
metadata?: {
|
||||
id: string
|
||||
name?: string
|
||||
description?: string
|
||||
category?: string
|
||||
icon?: string
|
||||
color?: string
|
||||
}
|
||||
enabled: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialized connection structure.
|
||||
*/
|
||||
export interface SerializedConnection {
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string
|
||||
targetHandle?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialized workflow structure.
|
||||
*/
|
||||
export interface SerializedWorkflow {
|
||||
version: string
|
||||
blocks: SerializedBlock[]
|
||||
connections: SerializedConnection[]
|
||||
loops: Record<string, any>
|
||||
parallels?: Record<string, any>
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating a serialized block.
|
||||
*/
|
||||
export interface SerializedBlockFactoryOptions {
|
||||
id?: string
|
||||
type?: string
|
||||
name?: string
|
||||
description?: string
|
||||
position?: { x: number; y: number }
|
||||
tool?: string
|
||||
params?: Record<string, any>
|
||||
inputs?: Record<string, any>
|
||||
outputs?: Record<string, any>
|
||||
enabled?: boolean
|
||||
}
|
||||
|
||||
let blockCounter = 0
|
||||
|
||||
/**
|
||||
* Generates a unique block ID.
|
||||
*/
|
||||
function generateBlockId(prefix = 'block'): string {
|
||||
return `${prefix}-${++blockCounter}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets the block counter (useful for deterministic tests).
|
||||
*/
|
||||
export function resetSerializedBlockCounter(): void {
|
||||
blockCounter = 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a serialized block with sensible defaults.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const block = createSerializedBlock({ type: 'agent', name: 'My Agent' })
|
||||
* ```
|
||||
*/
|
||||
export function createSerializedBlock(
|
||||
options: SerializedBlockFactoryOptions = {}
|
||||
): SerializedBlock {
|
||||
const type = options.type ?? 'function'
|
||||
const id = options.id ?? generateBlockId(type)
|
||||
|
||||
return {
|
||||
id,
|
||||
position: options.position ?? { x: 0, y: 0 },
|
||||
config: {
|
||||
tool: options.tool ?? type,
|
||||
params: options.params ?? {},
|
||||
},
|
||||
inputs: options.inputs ?? {},
|
||||
outputs: options.outputs ?? {},
|
||||
metadata: {
|
||||
id: type,
|
||||
name: options.name ?? `Block ${id}`,
|
||||
description: options.description,
|
||||
},
|
||||
enabled: options.enabled ?? true,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a serialized condition block.
|
||||
*/
|
||||
export function createSerializedConditionBlock(
|
||||
options: Omit<SerializedBlockFactoryOptions, 'type'> = {}
|
||||
): SerializedBlock {
|
||||
return createSerializedBlock({
|
||||
...options,
|
||||
type: 'condition',
|
||||
name: options.name ?? 'Condition',
|
||||
inputs: options.inputs ?? { conditions: 'json' },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a serialized router block.
|
||||
*/
|
||||
export function createSerializedRouterBlock(
|
||||
options: Omit<SerializedBlockFactoryOptions, 'type'> = {}
|
||||
): SerializedBlock {
|
||||
return createSerializedBlock({
|
||||
...options,
|
||||
type: 'router',
|
||||
name: options.name ?? 'Router',
|
||||
inputs: options.inputs ?? { prompt: 'string', model: 'string' },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a serialized evaluator block.
|
||||
*/
|
||||
export function createSerializedEvaluatorBlock(
|
||||
options: Omit<SerializedBlockFactoryOptions, 'type'> = {}
|
||||
): SerializedBlock {
|
||||
return createSerializedBlock({
|
||||
...options,
|
||||
type: 'evaluator',
|
||||
name: options.name ?? 'Evaluator',
|
||||
inputs: options.inputs ?? {
|
||||
content: 'string',
|
||||
metrics: 'json',
|
||||
model: 'string',
|
||||
temperature: 'number',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a serialized agent block.
|
||||
*/
|
||||
export function createSerializedAgentBlock(
|
||||
options: Omit<SerializedBlockFactoryOptions, 'type'> = {}
|
||||
): SerializedBlock {
|
||||
return createSerializedBlock({
|
||||
...options,
|
||||
type: 'agent',
|
||||
name: options.name ?? 'Agent',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a serialized function block.
|
||||
*/
|
||||
export function createSerializedFunctionBlock(
|
||||
options: Omit<SerializedBlockFactoryOptions, 'type'> = {}
|
||||
): SerializedBlock {
|
||||
return createSerializedBlock({
|
||||
...options,
|
||||
type: 'function',
|
||||
name: options.name ?? 'Function',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a serialized starter block.
|
||||
*/
|
||||
export function createSerializedStarterBlock(
|
||||
options: Omit<SerializedBlockFactoryOptions, 'type'> = {}
|
||||
): SerializedBlock {
|
||||
return createSerializedBlock({
|
||||
...options,
|
||||
type: 'starter',
|
||||
name: options.name ?? 'Start',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a simple serialized connection.
|
||||
*/
|
||||
export function createSerializedConnection(
|
||||
source: string,
|
||||
target: string,
|
||||
sourceHandle?: string
|
||||
): SerializedConnection {
|
||||
return {
|
||||
source,
|
||||
target,
|
||||
sourceHandle,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a serialized workflow with the given blocks and connections.
|
||||
*/
|
||||
export function createSerializedWorkflow(
|
||||
blocks: SerializedBlock[],
|
||||
connections: SerializedConnection[] = []
|
||||
): SerializedWorkflow {
|
||||
return {
|
||||
version: '1.0',
|
||||
blocks,
|
||||
connections,
|
||||
loops: {},
|
||||
parallels: {},
|
||||
}
|
||||
}
|
||||
385
packages/testing/src/factories/undo-redo.factory.ts
Normal file
385
packages/testing/src/factories/undo-redo.factory.ts
Normal file
@@ -0,0 +1,385 @@
|
||||
import { nanoid } from 'nanoid'
|
||||
import type { BlockState, Edge } from '../types'
|
||||
|
||||
/**
|
||||
* Operation types supported by the undo/redo store.
|
||||
*/
|
||||
export type OperationType =
|
||||
| 'add-block'
|
||||
| 'remove-block'
|
||||
| 'add-edge'
|
||||
| 'remove-edge'
|
||||
| 'move-block'
|
||||
| 'duplicate-block'
|
||||
| 'update-parent'
|
||||
|
||||
/**
|
||||
* Base operation interface.
|
||||
*/
|
||||
export interface BaseOperation {
|
||||
id: string
|
||||
type: OperationType
|
||||
timestamp: number
|
||||
workflowId: string
|
||||
userId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Move block operation data.
|
||||
*/
|
||||
export interface MoveBlockOperation extends BaseOperation {
|
||||
type: 'move-block'
|
||||
data: {
|
||||
blockId: string
|
||||
before: { x: number; y: number; parentId?: string }
|
||||
after: { x: number; y: number; parentId?: string }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add block operation data.
|
||||
*/
|
||||
export interface AddBlockOperation extends BaseOperation {
|
||||
type: 'add-block'
|
||||
data: { blockId: string }
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove block operation data.
|
||||
*/
|
||||
export interface RemoveBlockOperation extends BaseOperation {
|
||||
type: 'remove-block'
|
||||
data: {
|
||||
blockId: string
|
||||
blockSnapshot: BlockState | null
|
||||
edgeSnapshots?: Edge[]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add edge operation data.
|
||||
*/
|
||||
export interface AddEdgeOperation extends BaseOperation {
|
||||
type: 'add-edge'
|
||||
data: { edgeId: string }
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove edge operation data.
|
||||
*/
|
||||
export interface RemoveEdgeOperation extends BaseOperation {
|
||||
type: 'remove-edge'
|
||||
data: { edgeId: string; edgeSnapshot: Edge | null }
|
||||
}
|
||||
|
||||
/**
|
||||
* Duplicate block operation data.
|
||||
*/
|
||||
export interface DuplicateBlockOperation extends BaseOperation {
|
||||
type: 'duplicate-block'
|
||||
data: {
|
||||
sourceBlockId: string
|
||||
duplicatedBlockId: string
|
||||
duplicatedBlockSnapshot: BlockState
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update parent operation data.
|
||||
*/
|
||||
export interface UpdateParentOperation extends BaseOperation {
|
||||
type: 'update-parent'
|
||||
data: {
|
||||
blockId: string
|
||||
oldParentId?: string
|
||||
newParentId?: string
|
||||
oldPosition: { x: number; y: number }
|
||||
newPosition: { x: number; y: number }
|
||||
}
|
||||
}
|
||||
|
||||
export type Operation =
|
||||
| AddBlockOperation
|
||||
| RemoveBlockOperation
|
||||
| AddEdgeOperation
|
||||
| RemoveEdgeOperation
|
||||
| MoveBlockOperation
|
||||
| DuplicateBlockOperation
|
||||
| UpdateParentOperation
|
||||
|
||||
/**
|
||||
* Operation entry with forward and inverse operations.
|
||||
*/
|
||||
export interface OperationEntry {
|
||||
id: string
|
||||
operation: Operation
|
||||
inverse: Operation
|
||||
createdAt: number
|
||||
}
|
||||
|
||||
interface OperationEntryOptions {
|
||||
id?: string
|
||||
workflowId?: string
|
||||
userId?: string
|
||||
createdAt?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock add-block operation entry.
|
||||
*/
|
||||
export function createAddBlockEntry(
|
||||
blockId: string,
|
||||
options: OperationEntryOptions = {}
|
||||
): OperationEntry {
|
||||
const { id = nanoid(8), workflowId = 'wf-1', userId = 'user-1', createdAt = Date.now() } = options
|
||||
const timestamp = Date.now()
|
||||
|
||||
return {
|
||||
id,
|
||||
createdAt,
|
||||
operation: {
|
||||
id: nanoid(8),
|
||||
type: 'add-block',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { blockId },
|
||||
},
|
||||
inverse: {
|
||||
id: nanoid(8),
|
||||
type: 'remove-block',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { blockId, blockSnapshot: null },
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock remove-block operation entry.
|
||||
*/
|
||||
export function createRemoveBlockEntry(
|
||||
blockId: string,
|
||||
blockSnapshot: BlockState | null = null,
|
||||
options: OperationEntryOptions = {}
|
||||
): OperationEntry {
|
||||
const { id = nanoid(8), workflowId = 'wf-1', userId = 'user-1', createdAt = Date.now() } = options
|
||||
const timestamp = Date.now()
|
||||
|
||||
return {
|
||||
id,
|
||||
createdAt,
|
||||
operation: {
|
||||
id: nanoid(8),
|
||||
type: 'remove-block',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { blockId, blockSnapshot },
|
||||
},
|
||||
inverse: {
|
||||
id: nanoid(8),
|
||||
type: 'add-block',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { blockId },
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock add-edge operation entry.
|
||||
*/
|
||||
export function createAddEdgeEntry(
|
||||
edgeId: string,
|
||||
options: OperationEntryOptions = {}
|
||||
): OperationEntry {
|
||||
const { id = nanoid(8), workflowId = 'wf-1', userId = 'user-1', createdAt = Date.now() } = options
|
||||
const timestamp = Date.now()
|
||||
|
||||
return {
|
||||
id,
|
||||
createdAt,
|
||||
operation: {
|
||||
id: nanoid(8),
|
||||
type: 'add-edge',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { edgeId },
|
||||
},
|
||||
inverse: {
|
||||
id: nanoid(8),
|
||||
type: 'remove-edge',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { edgeId, edgeSnapshot: null },
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock remove-edge operation entry.
|
||||
*/
|
||||
export function createRemoveEdgeEntry(
|
||||
edgeId: string,
|
||||
edgeSnapshot: Edge | null = null,
|
||||
options: OperationEntryOptions = {}
|
||||
): OperationEntry {
|
||||
const { id = nanoid(8), workflowId = 'wf-1', userId = 'user-1', createdAt = Date.now() } = options
|
||||
const timestamp = Date.now()
|
||||
|
||||
return {
|
||||
id,
|
||||
createdAt,
|
||||
operation: {
|
||||
id: nanoid(8),
|
||||
type: 'remove-edge',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { edgeId, edgeSnapshot },
|
||||
},
|
||||
inverse: {
|
||||
id: nanoid(8),
|
||||
type: 'add-edge',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { edgeId },
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
interface MoveBlockOptions extends OperationEntryOptions {
|
||||
before?: { x: number; y: number; parentId?: string }
|
||||
after?: { x: number; y: number; parentId?: string }
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock move-block operation entry.
|
||||
*/
|
||||
export function createMoveBlockEntry(
|
||||
blockId: string,
|
||||
options: MoveBlockOptions = {}
|
||||
): OperationEntry {
|
||||
const {
|
||||
id = nanoid(8),
|
||||
workflowId = 'wf-1',
|
||||
userId = 'user-1',
|
||||
createdAt = Date.now(),
|
||||
before = { x: 0, y: 0 },
|
||||
after = { x: 100, y: 100 },
|
||||
} = options
|
||||
const timestamp = Date.now()
|
||||
|
||||
return {
|
||||
id,
|
||||
createdAt,
|
||||
operation: {
|
||||
id: nanoid(8),
|
||||
type: 'move-block',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { blockId, before, after },
|
||||
},
|
||||
inverse: {
|
||||
id: nanoid(8),
|
||||
type: 'move-block',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { blockId, before: after, after: before },
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock duplicate-block operation entry.
|
||||
*/
|
||||
export function createDuplicateBlockEntry(
|
||||
sourceBlockId: string,
|
||||
duplicatedBlockId: string,
|
||||
duplicatedBlockSnapshot: BlockState,
|
||||
options: OperationEntryOptions = {}
|
||||
): OperationEntry {
|
||||
const { id = nanoid(8), workflowId = 'wf-1', userId = 'user-1', createdAt = Date.now() } = options
|
||||
const timestamp = Date.now()
|
||||
|
||||
return {
|
||||
id,
|
||||
createdAt,
|
||||
operation: {
|
||||
id: nanoid(8),
|
||||
type: 'duplicate-block',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { sourceBlockId, duplicatedBlockId, duplicatedBlockSnapshot },
|
||||
},
|
||||
inverse: {
|
||||
id: nanoid(8),
|
||||
type: 'remove-block',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { blockId: duplicatedBlockId, blockSnapshot: duplicatedBlockSnapshot },
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock update-parent operation entry.
|
||||
*/
|
||||
export function createUpdateParentEntry(
|
||||
blockId: string,
|
||||
options: OperationEntryOptions & {
|
||||
oldParentId?: string
|
||||
newParentId?: string
|
||||
oldPosition?: { x: number; y: number }
|
||||
newPosition?: { x: number; y: number }
|
||||
} = {}
|
||||
): OperationEntry {
|
||||
const {
|
||||
id = nanoid(8),
|
||||
workflowId = 'wf-1',
|
||||
userId = 'user-1',
|
||||
createdAt = Date.now(),
|
||||
oldParentId,
|
||||
newParentId,
|
||||
oldPosition = { x: 0, y: 0 },
|
||||
newPosition = { x: 50, y: 50 },
|
||||
} = options
|
||||
const timestamp = Date.now()
|
||||
|
||||
return {
|
||||
id,
|
||||
createdAt,
|
||||
operation: {
|
||||
id: nanoid(8),
|
||||
type: 'update-parent',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: { blockId, oldParentId, newParentId, oldPosition, newPosition },
|
||||
},
|
||||
inverse: {
|
||||
id: nanoid(8),
|
||||
type: 'update-parent',
|
||||
timestamp,
|
||||
workflowId,
|
||||
userId,
|
||||
data: {
|
||||
blockId,
|
||||
oldParentId: newParentId,
|
||||
newParentId: oldParentId,
|
||||
oldPosition: newPosition,
|
||||
newPosition: oldPosition,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
114
packages/testing/src/factories/user.factory.ts
Normal file
114
packages/testing/src/factories/user.factory.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import type { User, Workflow, WorkflowState, Workspace } from '../types'
|
||||
import { createWorkflowState } from './workflow.factory'
|
||||
|
||||
/**
|
||||
* Options for creating a mock user.
|
||||
*/
|
||||
export interface UserFactoryOptions {
|
||||
id?: string
|
||||
email?: string
|
||||
name?: string
|
||||
image?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock user.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const user = createUser({ email: 'test@example.com' })
|
||||
* ```
|
||||
*/
|
||||
export function createUser(options: UserFactoryOptions = {}): User {
|
||||
const id = options.id ?? `user-${Math.random().toString(36).substring(2, 10)}`
|
||||
return {
|
||||
id,
|
||||
email: options.email ?? `${id}@test.example.com`,
|
||||
name: options.name ?? `Test User ${id.substring(0, 4)}`,
|
||||
image: options.image,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating a mock workspace.
|
||||
*/
|
||||
export interface WorkspaceFactoryOptions {
|
||||
id?: string
|
||||
name?: string
|
||||
ownerId?: string
|
||||
createdAt?: Date
|
||||
updatedAt?: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock workspace.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const workspace = createWorkspace({ name: 'My Workspace' })
|
||||
* ```
|
||||
*/
|
||||
export function createWorkspace(options: WorkspaceFactoryOptions = {}): Workspace {
|
||||
const now = new Date()
|
||||
return {
|
||||
id: options.id ?? `ws-${Math.random().toString(36).substring(2, 10)}`,
|
||||
name: options.name ?? 'Test Workspace',
|
||||
ownerId: options.ownerId ?? `user-${Math.random().toString(36).substring(2, 10)}`,
|
||||
createdAt: options.createdAt ?? now,
|
||||
updatedAt: options.updatedAt ?? now,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating a mock workflow.
|
||||
*/
|
||||
export interface WorkflowObjectFactoryOptions {
|
||||
id?: string
|
||||
name?: string
|
||||
workspaceId?: string
|
||||
state?: WorkflowState
|
||||
createdAt?: Date
|
||||
updatedAt?: Date
|
||||
isDeployed?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock workflow object (not just state).
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const workflow = createWorkflow({ name: 'My Workflow' })
|
||||
* ```
|
||||
*/
|
||||
export function createWorkflow(options: WorkflowObjectFactoryOptions = {}): Workflow {
|
||||
const now = new Date()
|
||||
return {
|
||||
id: options.id ?? `wf-${Math.random().toString(36).substring(2, 10)}`,
|
||||
name: options.name ?? 'Test Workflow',
|
||||
workspaceId: options.workspaceId ?? `ws-${Math.random().toString(36).substring(2, 10)}`,
|
||||
state: options.state ?? createWorkflowState(),
|
||||
createdAt: options.createdAt ?? now,
|
||||
updatedAt: options.updatedAt ?? now,
|
||||
isDeployed: options.isDeployed ?? false,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a user with an associated workspace.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const { user, workspace } = createUserWithWorkspace()
|
||||
* ```
|
||||
*/
|
||||
export function createUserWithWorkspace(
|
||||
userOptions: UserFactoryOptions = {},
|
||||
workspaceOptions: Omit<WorkspaceFactoryOptions, 'ownerId'> = {}
|
||||
): { user: User; workspace: Workspace } {
|
||||
const user = createUser(userOptions)
|
||||
const workspace = createWorkspace({
|
||||
...workspaceOptions,
|
||||
ownerId: user.id,
|
||||
})
|
||||
return { user, workspace }
|
||||
}
|
||||
209
packages/testing/src/factories/workflow.factory.ts
Normal file
209
packages/testing/src/factories/workflow.factory.ts
Normal file
@@ -0,0 +1,209 @@
|
||||
import type { BlockState, Edge, Loop, Parallel, WorkflowState } from '../types'
|
||||
import { createBlock, createFunctionBlock, createStarterBlock } from './block.factory'
|
||||
import { createLinearEdges } from './edge.factory'
|
||||
|
||||
/**
|
||||
* Options for creating a mock workflow state.
|
||||
*/
|
||||
export interface WorkflowFactoryOptions {
|
||||
blocks?: Record<string, BlockState>
|
||||
edges?: Edge[]
|
||||
loops?: Record<string, Loop>
|
||||
parallels?: Record<string, Parallel>
|
||||
lastSaved?: number
|
||||
isDeployed?: boolean
|
||||
variables?: WorkflowState['variables']
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an empty workflow state with defaults.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const workflow = createWorkflowState()
|
||||
* ```
|
||||
*/
|
||||
export function createWorkflowState(options: WorkflowFactoryOptions = {}): WorkflowState {
|
||||
return {
|
||||
blocks: options.blocks ?? {},
|
||||
edges: options.edges ?? [],
|
||||
loops: options.loops ?? {},
|
||||
parallels: options.parallels ?? {},
|
||||
lastSaved: options.lastSaved ?? Date.now(),
|
||||
isDeployed: options.isDeployed ?? false,
|
||||
variables: options.variables,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a simple linear workflow with the specified number of blocks.
|
||||
* First block is always a starter, rest are function blocks.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Creates: starter -> function -> function
|
||||
* const workflow = createLinearWorkflow(3)
|
||||
* ```
|
||||
*/
|
||||
export function createLinearWorkflow(blockCount: number, spacing = 200): WorkflowState {
|
||||
if (blockCount < 1) {
|
||||
return createWorkflowState()
|
||||
}
|
||||
|
||||
const blocks: Record<string, BlockState> = {}
|
||||
const blockIds: string[] = []
|
||||
|
||||
for (let i = 0; i < blockCount; i++) {
|
||||
const id = `block-${i}`
|
||||
blockIds.push(id)
|
||||
|
||||
if (i === 0) {
|
||||
blocks[id] = createStarterBlock({
|
||||
id,
|
||||
position: { x: i * spacing, y: 0 },
|
||||
})
|
||||
} else {
|
||||
blocks[id] = createFunctionBlock({
|
||||
id,
|
||||
name: `Step ${i}`,
|
||||
position: { x: i * spacing, y: 0 },
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return createWorkflowState({
|
||||
blocks,
|
||||
edges: createLinearEdges(blockIds),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a workflow with a branching condition.
|
||||
*
|
||||
* Structure:
|
||||
* ```
|
||||
* ┌─→ true-branch ─┐
|
||||
* start ─→ condition ├─→ end
|
||||
* └─→ false-branch ┘
|
||||
* ```
|
||||
*/
|
||||
export function createBranchingWorkflow(): WorkflowState {
|
||||
const blocks: Record<string, BlockState> = {
|
||||
start: createStarterBlock({ id: 'start', position: { x: 0, y: 0 } }),
|
||||
condition: createBlock({
|
||||
id: 'condition',
|
||||
type: 'condition',
|
||||
name: 'Check',
|
||||
position: { x: 200, y: 0 },
|
||||
}),
|
||||
'true-branch': createFunctionBlock({
|
||||
id: 'true-branch',
|
||||
name: 'If True',
|
||||
position: { x: 400, y: -100 },
|
||||
}),
|
||||
'false-branch': createFunctionBlock({
|
||||
id: 'false-branch',
|
||||
name: 'If False',
|
||||
position: { x: 400, y: 100 },
|
||||
}),
|
||||
end: createFunctionBlock({ id: 'end', name: 'End', position: { x: 600, y: 0 } }),
|
||||
}
|
||||
|
||||
const edges: Edge[] = [
|
||||
{ id: 'e1', source: 'start', target: 'condition' },
|
||||
{ id: 'e2', source: 'condition', target: 'true-branch', sourceHandle: 'condition-if' },
|
||||
{ id: 'e3', source: 'condition', target: 'false-branch', sourceHandle: 'condition-else' },
|
||||
{ id: 'e4', source: 'true-branch', target: 'end' },
|
||||
{ id: 'e5', source: 'false-branch', target: 'end' },
|
||||
]
|
||||
|
||||
return createWorkflowState({ blocks, edges })
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a workflow with a loop container.
|
||||
*
|
||||
* Structure:
|
||||
* ```
|
||||
* start ─→ loop[loop-body] ─→ end
|
||||
* ```
|
||||
*/
|
||||
export function createLoopWorkflow(iterations = 3): WorkflowState {
|
||||
const blocks: Record<string, BlockState> = {
|
||||
start: createStarterBlock({ id: 'start', position: { x: 0, y: 0 } }),
|
||||
loop: createBlock({
|
||||
id: 'loop',
|
||||
type: 'loop',
|
||||
name: 'Loop',
|
||||
position: { x: 200, y: 0 },
|
||||
data: { loopType: 'for', count: iterations, type: 'loop' },
|
||||
}),
|
||||
'loop-body': createFunctionBlock({
|
||||
id: 'loop-body',
|
||||
name: 'Loop Body',
|
||||
position: { x: 50, y: 50 },
|
||||
parentId: 'loop',
|
||||
}),
|
||||
end: createFunctionBlock({ id: 'end', name: 'End', position: { x: 500, y: 0 } }),
|
||||
}
|
||||
|
||||
const edges: Edge[] = [
|
||||
{ id: 'e1', source: 'start', target: 'loop' },
|
||||
{ id: 'e2', source: 'loop', target: 'end' },
|
||||
]
|
||||
|
||||
const loops: Record<string, Loop> = {
|
||||
loop: {
|
||||
id: 'loop',
|
||||
nodes: ['loop-body'],
|
||||
iterations,
|
||||
loopType: 'for',
|
||||
},
|
||||
}
|
||||
|
||||
return createWorkflowState({ blocks, edges, loops })
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a workflow with a parallel container.
|
||||
*
|
||||
* Structure:
|
||||
* ```
|
||||
* start ─→ parallel[parallel-task] ─→ end
|
||||
* ```
|
||||
*/
|
||||
export function createParallelWorkflow(count = 2): WorkflowState {
|
||||
const blocks: Record<string, BlockState> = {
|
||||
start: createStarterBlock({ id: 'start', position: { x: 0, y: 0 } }),
|
||||
parallel: createBlock({
|
||||
id: 'parallel',
|
||||
type: 'parallel',
|
||||
name: 'Parallel',
|
||||
position: { x: 200, y: 0 },
|
||||
data: { parallelType: 'count', count, type: 'parallel' },
|
||||
}),
|
||||
'parallel-task': createFunctionBlock({
|
||||
id: 'parallel-task',
|
||||
name: 'Parallel Task',
|
||||
position: { x: 50, y: 50 },
|
||||
parentId: 'parallel',
|
||||
}),
|
||||
end: createFunctionBlock({ id: 'end', name: 'End', position: { x: 500, y: 0 } }),
|
||||
}
|
||||
|
||||
const edges: Edge[] = [
|
||||
{ id: 'e1', source: 'start', target: 'parallel' },
|
||||
{ id: 'e2', source: 'parallel', target: 'end' },
|
||||
]
|
||||
|
||||
const parallels: Record<string, Parallel> = {
|
||||
parallel: {
|
||||
id: 'parallel',
|
||||
nodes: ['parallel-task'],
|
||||
count,
|
||||
parallelType: 'count',
|
||||
},
|
||||
}
|
||||
|
||||
return createWorkflowState({ blocks, edges, parallels })
|
||||
}
|
||||
61
packages/testing/src/index.ts
Normal file
61
packages/testing/src/index.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* @sim/testing - Shared testing utilities for Sim
|
||||
*
|
||||
* This package provides a comprehensive set of tools for writing tests:
|
||||
* - Factories: Create mock data with sensible defaults
|
||||
* - Builders: Fluent APIs for complex test scenarios
|
||||
* - Mocks: Reusable mock implementations
|
||||
* - Assertions: Semantic test assertions
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import {
|
||||
* // Factories
|
||||
* createBlock,
|
||||
* createStarterBlock,
|
||||
* createLinearWorkflow,
|
||||
* createExecutionContext,
|
||||
*
|
||||
* // Builders
|
||||
* WorkflowBuilder,
|
||||
* ExecutionContextBuilder,
|
||||
*
|
||||
* // Assertions
|
||||
* expectBlockExists,
|
||||
* expectEdgeConnects,
|
||||
* expectBlockExecuted,
|
||||
* } from '@sim/testing'
|
||||
*
|
||||
* describe('MyFeature', () => {
|
||||
* it('should work with a linear workflow', () => {
|
||||
* const workflow = createLinearWorkflow(3)
|
||||
* expectBlockExists(workflow.blocks, 'block-0', 'starter')
|
||||
* expectEdgeConnects(workflow.edges, 'block-0', 'block-1')
|
||||
* })
|
||||
*
|
||||
* it('should work with a complex workflow', () => {
|
||||
* const workflow = WorkflowBuilder.branching().build()
|
||||
* expectBlockCount(workflow, 5)
|
||||
* })
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
|
||||
export * from './assertions'
|
||||
export * from './builders'
|
||||
export * from './factories'
|
||||
export {
|
||||
createMockDb,
|
||||
createMockFetch,
|
||||
createMockLogger,
|
||||
createMockResponse,
|
||||
createMockSocket,
|
||||
createMockStorage,
|
||||
databaseMock,
|
||||
drizzleOrmMock,
|
||||
loggerMock,
|
||||
type MockFetchResponse,
|
||||
setupGlobalFetchMock,
|
||||
setupGlobalStorageMocks,
|
||||
} from './mocks'
|
||||
export * from './types'
|
||||
113
packages/testing/src/mocks/database.mock.ts
Normal file
113
packages/testing/src/mocks/database.mock.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { vi } from 'vitest'
|
||||
|
||||
/**
|
||||
* Creates mock SQL template literal function.
|
||||
* Mimics drizzle-orm's sql tagged template.
|
||||
*/
|
||||
export function createMockSql() {
|
||||
return (strings: TemplateStringsArray, ...values: any[]) => ({
|
||||
strings,
|
||||
values,
|
||||
toSQL: () => ({ sql: strings.join('?'), params: values }),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates mock SQL operators (eq, and, or, etc.).
|
||||
*/
|
||||
export function createMockSqlOperators() {
|
||||
return {
|
||||
eq: vi.fn((a, b) => ({ type: 'eq', left: a, right: b })),
|
||||
ne: vi.fn((a, b) => ({ type: 'ne', left: a, right: b })),
|
||||
gt: vi.fn((a, b) => ({ type: 'gt', left: a, right: b })),
|
||||
gte: vi.fn((a, b) => ({ type: 'gte', left: a, right: b })),
|
||||
lt: vi.fn((a, b) => ({ type: 'lt', left: a, right: b })),
|
||||
lte: vi.fn((a, b) => ({ type: 'lte', left: a, right: b })),
|
||||
and: vi.fn((...conditions) => ({ type: 'and', conditions })),
|
||||
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
|
||||
not: vi.fn((condition) => ({ type: 'not', condition })),
|
||||
isNull: vi.fn((column) => ({ type: 'isNull', column })),
|
||||
isNotNull: vi.fn((column) => ({ type: 'isNotNull', column })),
|
||||
inArray: vi.fn((column, values) => ({ type: 'inArray', column, values })),
|
||||
notInArray: vi.fn((column, values) => ({ type: 'notInArray', column, values })),
|
||||
like: vi.fn((column, pattern) => ({ type: 'like', column, pattern })),
|
||||
ilike: vi.fn((column, pattern) => ({ type: 'ilike', column, pattern })),
|
||||
desc: vi.fn((column) => ({ type: 'desc', column })),
|
||||
asc: vi.fn((column) => ({ type: 'asc', column })),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock database connection.
|
||||
*/
|
||||
export function createMockDb() {
|
||||
return {
|
||||
select: vi.fn(() => ({
|
||||
from: vi.fn(() => ({
|
||||
where: vi.fn(() => ({
|
||||
limit: vi.fn(() => Promise.resolve([])),
|
||||
orderBy: vi.fn(() => Promise.resolve([])),
|
||||
})),
|
||||
leftJoin: vi.fn(() => ({
|
||||
where: vi.fn(() => Promise.resolve([])),
|
||||
})),
|
||||
innerJoin: vi.fn(() => ({
|
||||
where: vi.fn(() => Promise.resolve([])),
|
||||
})),
|
||||
})),
|
||||
})),
|
||||
insert: vi.fn(() => ({
|
||||
values: vi.fn(() => ({
|
||||
returning: vi.fn(() => Promise.resolve([])),
|
||||
onConflictDoUpdate: vi.fn(() => ({
|
||||
returning: vi.fn(() => Promise.resolve([])),
|
||||
})),
|
||||
onConflictDoNothing: vi.fn(() => ({
|
||||
returning: vi.fn(() => Promise.resolve([])),
|
||||
})),
|
||||
})),
|
||||
})),
|
||||
update: vi.fn(() => ({
|
||||
set: vi.fn(() => ({
|
||||
where: vi.fn(() => ({
|
||||
returning: vi.fn(() => Promise.resolve([])),
|
||||
})),
|
||||
})),
|
||||
})),
|
||||
delete: vi.fn(() => ({
|
||||
where: vi.fn(() => ({
|
||||
returning: vi.fn(() => Promise.resolve([])),
|
||||
})),
|
||||
})),
|
||||
transaction: vi.fn(async (callback) => callback(createMockDb())),
|
||||
query: vi.fn(() => Promise.resolve([])),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock module for @sim/db.
|
||||
* Use with vi.mock() to replace the real database.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* vi.mock('@sim/db', () => databaseMock)
|
||||
* ```
|
||||
*/
|
||||
export const databaseMock = {
|
||||
db: createMockDb(),
|
||||
sql: createMockSql(),
|
||||
...createMockSqlOperators(),
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock for drizzle-orm module.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* vi.mock('drizzle-orm', () => drizzleOrmMock)
|
||||
* ```
|
||||
*/
|
||||
export const drizzleOrmMock = {
|
||||
sql: createMockSql(),
|
||||
...createMockSqlOperators(),
|
||||
}
|
||||
135
packages/testing/src/mocks/fetch.mock.ts
Normal file
135
packages/testing/src/mocks/fetch.mock.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import { vi } from 'vitest'
|
||||
|
||||
/**
|
||||
* Type for mock fetch response configuration.
|
||||
*/
|
||||
export interface MockFetchResponse {
|
||||
status?: number
|
||||
statusText?: string
|
||||
ok?: boolean
|
||||
headers?: Record<string, string>
|
||||
json?: any
|
||||
text?: string
|
||||
body?: any
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock fetch function that returns configured responses.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const mockFetch = createMockFetch({
|
||||
* json: { data: 'test' },
|
||||
* status: 200
|
||||
* })
|
||||
* global.fetch = mockFetch
|
||||
* ```
|
||||
*/
|
||||
export function createMockFetch(defaultResponse: MockFetchResponse = {}) {
|
||||
const mockFn = vi.fn(async (_url: string | URL | Request, _init?: RequestInit) => {
|
||||
return createMockResponse(defaultResponse)
|
||||
})
|
||||
|
||||
return mockFn
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock Response object.
|
||||
*/
|
||||
export function createMockResponse(config: MockFetchResponse = {}): Response {
|
||||
const status = config.status ?? 200
|
||||
const ok = config.ok ?? (status >= 200 && status < 300)
|
||||
|
||||
return {
|
||||
status,
|
||||
statusText: config.statusText ?? (ok ? 'OK' : 'Error'),
|
||||
ok,
|
||||
headers: new Headers(config.headers ?? {}),
|
||||
json: vi.fn(async () => config.json ?? {}),
|
||||
text: vi.fn(async () => config.text ?? JSON.stringify(config.json ?? {})),
|
||||
body: config.body ?? null,
|
||||
bodyUsed: false,
|
||||
arrayBuffer: vi.fn(async () => new ArrayBuffer(0)),
|
||||
blob: vi.fn(async () => new Blob()),
|
||||
formData: vi.fn(async () => new FormData()),
|
||||
clone: vi.fn(function (this: Response) {
|
||||
return createMockResponse(config)
|
||||
}),
|
||||
redirected: false,
|
||||
type: 'basic' as ResponseType,
|
||||
url: '',
|
||||
bytes: vi.fn(async () => new Uint8Array()),
|
||||
} as Response
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock fetch that handles multiple URLs with different responses.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const mockFetch = createMultiMockFetch({
|
||||
* '/api/users': { json: [{ id: 1 }] },
|
||||
* '/api/error': { status: 500, json: { error: 'Server Error' } },
|
||||
* })
|
||||
* global.fetch = mockFetch
|
||||
* ```
|
||||
*/
|
||||
export function createMultiMockFetch(
|
||||
routes: Record<string, MockFetchResponse>,
|
||||
defaultResponse?: MockFetchResponse
|
||||
) {
|
||||
return vi.fn(async (url: string | URL | Request, _init?: RequestInit) => {
|
||||
const urlString = url instanceof Request ? url.url : url.toString()
|
||||
|
||||
// Find matching route (exact or partial match)
|
||||
const matchedRoute = Object.keys(routes).find(
|
||||
(route) => urlString === route || urlString.includes(route)
|
||||
)
|
||||
|
||||
if (matchedRoute) {
|
||||
return createMockResponse(routes[matchedRoute])
|
||||
}
|
||||
|
||||
if (defaultResponse) {
|
||||
return createMockResponse(defaultResponse)
|
||||
}
|
||||
|
||||
return createMockResponse({ status: 404, json: { error: 'Not Found' } })
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up global fetch mock.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const mockFetch = setupGlobalFetchMock({ json: { success: true } })
|
||||
* // Later...
|
||||
* expect(mockFetch).toHaveBeenCalledWith('/api/test', expect.anything())
|
||||
* ```
|
||||
*/
|
||||
export function setupGlobalFetchMock(defaultResponse?: MockFetchResponse) {
|
||||
const mockFetch = createMockFetch(defaultResponse)
|
||||
vi.stubGlobal('fetch', mockFetch)
|
||||
return mockFetch
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures fetch to return a specific response for the next call.
|
||||
*/
|
||||
export function mockNextFetchResponse(response: MockFetchResponse) {
|
||||
const currentFetch = globalThis.fetch
|
||||
if (vi.isMockFunction(currentFetch)) {
|
||||
currentFetch.mockResolvedValueOnce(createMockResponse(response))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configures fetch to reject with an error.
|
||||
*/
|
||||
export function mockFetchError(error: Error | string) {
|
||||
const currentFetch = globalThis.fetch
|
||||
if (vi.isMockFunction(currentFetch)) {
|
||||
currentFetch.mockRejectedValueOnce(error instanceof Error ? error : new Error(error))
|
||||
}
|
||||
}
|
||||
47
packages/testing/src/mocks/index.ts
Normal file
47
packages/testing/src/mocks/index.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
/**
|
||||
* Mock implementations for common dependencies.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { createMockLogger, setupGlobalFetchMock, databaseMock } from '@sim/testing/mocks'
|
||||
*
|
||||
* // Mock the logger
|
||||
* vi.mock('@/lib/logs/console/logger', () => ({ createLogger: () => createMockLogger() }))
|
||||
*
|
||||
* // Mock fetch globally
|
||||
* setupGlobalFetchMock({ json: { success: true } })
|
||||
*
|
||||
* // Mock database
|
||||
* vi.mock('@sim/db', () => databaseMock)
|
||||
* ```
|
||||
*/
|
||||
|
||||
// Database mocks
|
||||
export {
|
||||
createMockDb,
|
||||
createMockSql,
|
||||
createMockSqlOperators,
|
||||
databaseMock,
|
||||
drizzleOrmMock,
|
||||
} from './database.mock'
|
||||
// Fetch mocks
|
||||
export {
|
||||
createMockFetch,
|
||||
createMockResponse,
|
||||
createMultiMockFetch,
|
||||
type MockFetchResponse,
|
||||
mockFetchError,
|
||||
mockNextFetchResponse,
|
||||
setupGlobalFetchMock,
|
||||
} from './fetch.mock'
|
||||
// Logger mocks
|
||||
export { clearLoggerMocks, createMockLogger, getLoggerCalls, loggerMock } from './logger.mock'
|
||||
// Socket mocks
|
||||
export {
|
||||
createMockSocket,
|
||||
createMockSocketServer,
|
||||
type MockSocket,
|
||||
type MockSocketServer,
|
||||
} from './socket.mock'
|
||||
// Storage mocks
|
||||
export { clearStorageMocks, createMockStorage, setupGlobalStorageMocks } from './storage.mock'
|
||||
63
packages/testing/src/mocks/logger.mock.ts
Normal file
63
packages/testing/src/mocks/logger.mock.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { vi } from 'vitest'
|
||||
|
||||
/**
|
||||
* Creates a mock logger that captures all log calls.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const logger = createMockLogger()
|
||||
* // Use in your code
|
||||
* logger.info('test message')
|
||||
* // Assert
|
||||
* expect(logger.info).toHaveBeenCalledWith('test message')
|
||||
* ```
|
||||
*/
|
||||
export function createMockLogger() {
|
||||
return {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
trace: vi.fn(),
|
||||
fatal: vi.fn(),
|
||||
child: vi.fn(() => createMockLogger()),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock module for @/lib/logs/console/logger.
|
||||
* Use with vi.mock() to replace the real logger.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* vi.mock('@/lib/logs/console/logger', () => loggerMock)
|
||||
* ```
|
||||
*/
|
||||
export const loggerMock = {
|
||||
createLogger: vi.fn(() => createMockLogger()),
|
||||
logger: createMockLogger(),
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the mock logger calls for assertion.
|
||||
*/
|
||||
export function getLoggerCalls(logger: ReturnType<typeof createMockLogger>) {
|
||||
return {
|
||||
info: logger.info.mock.calls,
|
||||
warn: logger.warn.mock.calls,
|
||||
error: logger.error.mock.calls,
|
||||
debug: logger.debug.mock.calls,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all logger mock calls.
|
||||
*/
|
||||
export function clearLoggerMocks(logger: ReturnType<typeof createMockLogger>) {
|
||||
logger.info.mockClear()
|
||||
logger.warn.mockClear()
|
||||
logger.error.mockClear()
|
||||
logger.debug.mockClear()
|
||||
logger.trace.mockClear()
|
||||
logger.fatal.mockClear()
|
||||
}
|
||||
179
packages/testing/src/mocks/socket.mock.ts
Normal file
179
packages/testing/src/mocks/socket.mock.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
import { type Mock, vi } from 'vitest'
|
||||
|
||||
/**
|
||||
* Mock socket interface for type safety.
|
||||
*/
|
||||
export interface IMockSocket {
|
||||
id: string
|
||||
connected: boolean
|
||||
disconnected: boolean
|
||||
emit: Mock
|
||||
on: Mock
|
||||
once: Mock
|
||||
off: Mock
|
||||
connect: Mock
|
||||
disconnect: Mock
|
||||
join: Mock
|
||||
leave: Mock
|
||||
_handlers: Record<string, ((...args: any[]) => any)[]>
|
||||
_trigger: (event: string, ...args: any[]) => void
|
||||
_reset: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock Socket.IO client socket.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const socket = createMockSocket()
|
||||
* socket.emit('test', { data: 'value' })
|
||||
* expect(socket.emit).toHaveBeenCalledWith('test', { data: 'value' })
|
||||
* ```
|
||||
*/
|
||||
export function createMockSocket(): IMockSocket {
|
||||
const eventHandlers: Record<string, ((...args: any[]) => any)[]> = {}
|
||||
|
||||
const socket = {
|
||||
id: `socket-${Math.random().toString(36).substring(2, 10)}`,
|
||||
connected: true,
|
||||
disconnected: false,
|
||||
|
||||
// Core methods
|
||||
emit: vi.fn((event: string, ..._args: any[]) => {
|
||||
return socket
|
||||
}),
|
||||
|
||||
on: vi.fn((event: string, handler: (...args: any[]) => any) => {
|
||||
if (!eventHandlers[event]) {
|
||||
eventHandlers[event] = []
|
||||
}
|
||||
eventHandlers[event].push(handler)
|
||||
return socket
|
||||
}),
|
||||
|
||||
once: vi.fn((event: string, handler: (...args: any[]) => any) => {
|
||||
if (!eventHandlers[event]) {
|
||||
eventHandlers[event] = []
|
||||
}
|
||||
eventHandlers[event].push(handler)
|
||||
return socket
|
||||
}),
|
||||
|
||||
off: vi.fn((event: string, handler?: (...args: any[]) => any) => {
|
||||
if (handler && eventHandlers[event]) {
|
||||
eventHandlers[event] = eventHandlers[event].filter((h) => h !== handler)
|
||||
} else {
|
||||
delete eventHandlers[event]
|
||||
}
|
||||
return socket
|
||||
}),
|
||||
|
||||
connect: vi.fn(() => {
|
||||
socket.connected = true
|
||||
socket.disconnected = false
|
||||
return socket
|
||||
}),
|
||||
|
||||
disconnect: vi.fn(() => {
|
||||
socket.connected = false
|
||||
socket.disconnected = true
|
||||
return socket
|
||||
}),
|
||||
|
||||
// Room methods
|
||||
join: vi.fn((_room: string) => socket),
|
||||
leave: vi.fn((_room: string) => socket),
|
||||
|
||||
// Utility methods for testing
|
||||
_handlers: eventHandlers,
|
||||
|
||||
_trigger: (event: string, ...args: any[]) => {
|
||||
const handlers = eventHandlers[event] || []
|
||||
handlers.forEach((handler) => handler(...args))
|
||||
},
|
||||
|
||||
_reset: () => {
|
||||
Object.keys(eventHandlers).forEach((key) => delete eventHandlers[key])
|
||||
socket.emit.mockClear()
|
||||
socket.on.mockClear()
|
||||
socket.once.mockClear()
|
||||
socket.off.mockClear()
|
||||
},
|
||||
}
|
||||
|
||||
return socket
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock socket server interface.
|
||||
*/
|
||||
export interface IMockSocketServer {
|
||||
sockets: Map<string, IMockSocket>
|
||||
rooms: Map<string, Set<string>>
|
||||
emit: Mock
|
||||
to: Mock
|
||||
in: Mock
|
||||
_addSocket: (socket: IMockSocket) => void
|
||||
_joinRoom: (socketId: string, room: string) => void
|
||||
_leaveRoom: (socketId: string, room: string) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock Socket.IO server.
|
||||
*/
|
||||
export function createMockSocketServer(): IMockSocketServer {
|
||||
const sockets = new Map<string, IMockSocket>()
|
||||
const rooms = new Map<string, Set<string>>()
|
||||
|
||||
return {
|
||||
sockets,
|
||||
rooms,
|
||||
|
||||
emit: vi.fn((_event: string, ..._args: any[]) => {}),
|
||||
|
||||
to: vi.fn((room: string) => ({
|
||||
emit: vi.fn((event: string, ...args: any[]) => {
|
||||
const socketIds = rooms.get(room) || new Set()
|
||||
socketIds.forEach((id) => {
|
||||
const socket = sockets.get(id)
|
||||
if (socket) {
|
||||
socket._trigger(event, ...args)
|
||||
}
|
||||
})
|
||||
}),
|
||||
})),
|
||||
|
||||
in: vi.fn((room: string) => ({
|
||||
emit: vi.fn((event: string, ...args: any[]) => {
|
||||
const socketIds = rooms.get(room) || new Set()
|
||||
socketIds.forEach((id) => {
|
||||
const socket = sockets.get(id)
|
||||
if (socket) {
|
||||
socket._trigger(event, ...args)
|
||||
}
|
||||
})
|
||||
}),
|
||||
})),
|
||||
|
||||
_addSocket: (socket: ReturnType<typeof createMockSocket>) => {
|
||||
sockets.set(socket.id, socket)
|
||||
},
|
||||
|
||||
_joinRoom: (socketId: string, room: string) => {
|
||||
if (!rooms.has(room)) {
|
||||
rooms.set(room, new Set())
|
||||
}
|
||||
rooms.get(room)?.add(socketId)
|
||||
},
|
||||
|
||||
_leaveRoom: (socketId: string, room: string) => {
|
||||
rooms.get(room)?.delete(socketId)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Type aliases for convenience.
|
||||
*/
|
||||
export type MockSocket = IMockSocket
|
||||
export type MockSocketServer = IMockSocketServer
|
||||
76
packages/testing/src/mocks/storage.mock.ts
Normal file
76
packages/testing/src/mocks/storage.mock.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { vi } from 'vitest'
|
||||
|
||||
/**
|
||||
* Creates a mock storage implementation (localStorage/sessionStorage).
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const storage = createMockStorage()
|
||||
* storage.setItem('key', 'value')
|
||||
* expect(storage.getItem('key')).toBe('value')
|
||||
* ```
|
||||
*/
|
||||
export function createMockStorage(): Storage {
|
||||
const store: Record<string, string> = {}
|
||||
|
||||
return {
|
||||
getItem: vi.fn((key: string) => store[key] ?? null),
|
||||
setItem: vi.fn((key: string, value: string) => {
|
||||
store[key] = value
|
||||
}),
|
||||
removeItem: vi.fn((key: string) => {
|
||||
delete store[key]
|
||||
}),
|
||||
clear: vi.fn(() => {
|
||||
Object.keys(store).forEach((key) => delete store[key])
|
||||
}),
|
||||
key: vi.fn((index: number) => Object.keys(store)[index] ?? null),
|
||||
get length() {
|
||||
return Object.keys(store).length
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up global localStorage and sessionStorage mocks.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // In vitest.setup.ts
|
||||
* setupGlobalStorageMocks()
|
||||
* ```
|
||||
*/
|
||||
export function setupGlobalStorageMocks() {
|
||||
const localStorageMock = createMockStorage()
|
||||
const sessionStorageMock = createMockStorage()
|
||||
|
||||
Object.defineProperty(globalThis, 'localStorage', {
|
||||
value: localStorageMock,
|
||||
writable: true,
|
||||
})
|
||||
|
||||
Object.defineProperty(globalThis, 'sessionStorage', {
|
||||
value: sessionStorageMock,
|
||||
writable: true,
|
||||
})
|
||||
|
||||
return { localStorage: localStorageMock, sessionStorage: sessionStorageMock }
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all storage mock data and calls.
|
||||
*/
|
||||
export function clearStorageMocks() {
|
||||
if (typeof localStorage !== 'undefined') {
|
||||
localStorage.clear()
|
||||
vi.mocked(localStorage.getItem).mockClear()
|
||||
vi.mocked(localStorage.setItem).mockClear()
|
||||
vi.mocked(localStorage.removeItem).mockClear()
|
||||
}
|
||||
if (typeof sessionStorage !== 'undefined') {
|
||||
sessionStorage.clear()
|
||||
vi.mocked(sessionStorage.getItem).mockClear()
|
||||
vi.mocked(sessionStorage.setItem).mockClear()
|
||||
vi.mocked(sessionStorage.removeItem).mockClear()
|
||||
}
|
||||
}
|
||||
74
packages/testing/src/setup/global.setup.ts
Normal file
74
packages/testing/src/setup/global.setup.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
/**
|
||||
* Global setup utilities that run once before all tests.
|
||||
*
|
||||
* Use this for expensive setup that should only happen once.
|
||||
*/
|
||||
|
||||
import { vi } from 'vitest'
|
||||
|
||||
/**
|
||||
* Suppresses specific console warnings/errors during tests.
|
||||
*/
|
||||
export function suppressConsoleWarnings(patterns: RegExp[]): void {
|
||||
const originalWarn = console.warn
|
||||
const originalError = console.error
|
||||
|
||||
console.warn = (...args: any[]) => {
|
||||
const message = args.join(' ')
|
||||
if (patterns.some((pattern) => pattern.test(message))) {
|
||||
return
|
||||
}
|
||||
originalWarn.apply(console, args)
|
||||
}
|
||||
|
||||
console.error = (...args: any[]) => {
|
||||
const message = args.join(' ')
|
||||
if (patterns.some((pattern) => pattern.test(message))) {
|
||||
return
|
||||
}
|
||||
originalError.apply(console, args)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Common patterns to suppress in tests.
|
||||
*/
|
||||
export const COMMON_SUPPRESS_PATTERNS = [
|
||||
/Zustand.*persist middleware/i,
|
||||
/React does not recognize the.*prop/,
|
||||
/Warning: Invalid DOM property/,
|
||||
/act\(\) warning/,
|
||||
]
|
||||
|
||||
/**
|
||||
* Sets up global mocks for Node.js environment.
|
||||
*/
|
||||
export function setupNodeEnvironment(): void {
|
||||
// Mock window if not present
|
||||
if (typeof window === 'undefined') {
|
||||
vi.stubGlobal('window', {
|
||||
location: { href: 'http://localhost:3000' },
|
||||
addEventListener: vi.fn(),
|
||||
removeEventListener: vi.fn(),
|
||||
})
|
||||
}
|
||||
|
||||
// Mock document if not present
|
||||
if (typeof document === 'undefined') {
|
||||
vi.stubGlobal('document', {
|
||||
createElement: vi.fn(() => ({
|
||||
style: {},
|
||||
setAttribute: vi.fn(),
|
||||
appendChild: vi.fn(),
|
||||
})),
|
||||
body: { appendChild: vi.fn() },
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up global mocks after tests.
|
||||
*/
|
||||
export function cleanupGlobalMocks(): void {
|
||||
vi.unstubAllGlobals()
|
||||
}
|
||||
40
packages/testing/src/setup/vitest.setup.ts
Normal file
40
packages/testing/src/setup/vitest.setup.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
/**
|
||||
* Shared Vitest setup file for the testing package.
|
||||
*
|
||||
* Import this in your vitest.config.ts to get common mocks and setup.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // vitest.config.ts
|
||||
* export default defineConfig({
|
||||
* test: {
|
||||
* setupFiles: ['@sim/testing/setup'],
|
||||
* },
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
|
||||
import { afterEach, beforeEach, vi } from 'vitest'
|
||||
import { setupGlobalFetchMock } from '../mocks/fetch.mock'
|
||||
import { createMockLogger } from '../mocks/logger.mock'
|
||||
import { clearStorageMocks, setupGlobalStorageMocks } from '../mocks/storage.mock'
|
||||
|
||||
// Setup global storage mocks
|
||||
setupGlobalStorageMocks()
|
||||
|
||||
// Setup global fetch mock with empty JSON response by default
|
||||
setupGlobalFetchMock({ json: {} })
|
||||
|
||||
// Clear mocks between tests
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
clearStorageMocks()
|
||||
})
|
||||
|
||||
// Export utilities for use in tests
|
||||
export { createMockLogger }
|
||||
export { setupGlobalStorageMocks, clearStorageMocks }
|
||||
export { mockFetchError, mockNextFetchResponse, setupGlobalFetchMock } from '../mocks/fetch.mock'
|
||||
192
packages/testing/src/types/index.ts
Normal file
192
packages/testing/src/types/index.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
/**
|
||||
* Core types for the testing package.
|
||||
* These are simplified versions of the actual types used in apps/sim,
|
||||
* designed for test scenarios without requiring all dependencies.
|
||||
*/
|
||||
|
||||
export interface Position {
|
||||
x: number
|
||||
y: number
|
||||
}
|
||||
|
||||
export interface BlockData {
|
||||
parentId?: string
|
||||
extent?: 'parent'
|
||||
width?: number
|
||||
height?: number
|
||||
count?: number
|
||||
loopType?: 'for' | 'forEach' | 'while' | 'doWhile'
|
||||
parallelType?: 'count' | 'collection'
|
||||
collection?: any
|
||||
whileCondition?: string
|
||||
doWhileCondition?: string
|
||||
type?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* SubBlockType union for testing.
|
||||
* Matches the SubBlockType values from the app (apps/sim/blocks/types.ts).
|
||||
*/
|
||||
export type SubBlockType =
|
||||
| 'short-input'
|
||||
| 'long-input'
|
||||
| 'dropdown'
|
||||
| 'combobox'
|
||||
| 'slider'
|
||||
| 'table'
|
||||
| 'code'
|
||||
| 'switch'
|
||||
| 'tool-input'
|
||||
| 'checkbox-list'
|
||||
| 'grouped-checkbox-list'
|
||||
| 'condition-input'
|
||||
| 'eval-input'
|
||||
| 'time-input'
|
||||
| 'oauth-input'
|
||||
| 'webhook-config'
|
||||
| 'schedule-info'
|
||||
| 'file-selector'
|
||||
| 'project-selector'
|
||||
| 'channel-selector'
|
||||
| 'user-selector'
|
||||
| 'folder-selector'
|
||||
| 'knowledge-base-selector'
|
||||
| 'knowledge-tag-filters'
|
||||
| 'document-selector'
|
||||
| 'document-tag-entry'
|
||||
| 'mcp-server-selector'
|
||||
| 'mcp-tool-selector'
|
||||
| 'mcp-dynamic-args'
|
||||
| 'input-format'
|
||||
|
||||
export interface SubBlockState {
|
||||
id: string
|
||||
type: SubBlockType
|
||||
value: string | number | string[][] | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Primitive value types for block outputs.
|
||||
*/
|
||||
export type PrimitiveValueType = 'string' | 'number' | 'boolean'
|
||||
|
||||
/**
|
||||
* BlockOutput type matching the app's structure.
|
||||
* Can be a primitive type or an object with string keys.
|
||||
*/
|
||||
export type BlockOutput =
|
||||
| PrimitiveValueType
|
||||
| { [key: string]: PrimitiveValueType | Record<string, any> }
|
||||
|
||||
export interface BlockState {
|
||||
id: string
|
||||
type: string
|
||||
name: string
|
||||
position: Position
|
||||
subBlocks: Record<string, SubBlockState>
|
||||
outputs: Record<string, BlockOutput>
|
||||
enabled: boolean
|
||||
horizontalHandles?: boolean
|
||||
height?: number
|
||||
advancedMode?: boolean
|
||||
triggerMode?: boolean
|
||||
data?: BlockData
|
||||
layout?: {
|
||||
measuredWidth?: number
|
||||
measuredHeight?: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface Edge {
|
||||
id: string
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string
|
||||
targetHandle?: string
|
||||
type?: string
|
||||
data?: Record<string, any>
|
||||
}
|
||||
|
||||
export interface Loop {
|
||||
id: string
|
||||
nodes: string[]
|
||||
iterations: number
|
||||
loopType: 'for' | 'forEach' | 'while' | 'doWhile'
|
||||
forEachItems?: any[] | Record<string, any> | string
|
||||
whileCondition?: string
|
||||
doWhileCondition?: string
|
||||
}
|
||||
|
||||
export interface Parallel {
|
||||
id: string
|
||||
nodes: string[]
|
||||
distribution?: any[] | Record<string, any> | string
|
||||
count?: number
|
||||
parallelType?: 'count' | 'collection'
|
||||
}
|
||||
|
||||
export interface WorkflowState {
|
||||
blocks: Record<string, BlockState>
|
||||
edges: Edge[]
|
||||
loops: Record<string, Loop>
|
||||
parallels: Record<string, Parallel>
|
||||
lastSaved?: number
|
||||
lastUpdate?: number
|
||||
isDeployed?: boolean
|
||||
deployedAt?: Date
|
||||
needsRedeployment?: boolean
|
||||
variables?: Array<{
|
||||
id: string
|
||||
name: string
|
||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
|
||||
value: any
|
||||
}>
|
||||
}
|
||||
|
||||
export interface ExecutionContext {
|
||||
workflowId: string
|
||||
executionId?: string
|
||||
blockStates: Map<string, any>
|
||||
executedBlocks: Set<string>
|
||||
blockLogs: any[]
|
||||
metadata: {
|
||||
duration: number
|
||||
startTime?: string
|
||||
endTime?: string
|
||||
}
|
||||
environmentVariables: Record<string, string>
|
||||
workflowVariables?: Record<string, any>
|
||||
decisions: {
|
||||
router: Map<string, any>
|
||||
condition: Map<string, any>
|
||||
}
|
||||
loopExecutions: Map<string, any>
|
||||
completedLoops: Set<string>
|
||||
activeExecutionPath: Set<string>
|
||||
abortSignal?: AbortSignal
|
||||
}
|
||||
|
||||
export interface User {
|
||||
id: string
|
||||
email: string
|
||||
name?: string
|
||||
image?: string
|
||||
}
|
||||
|
||||
export interface Workspace {
|
||||
id: string
|
||||
name: string
|
||||
ownerId: string
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
}
|
||||
|
||||
export interface Workflow {
|
||||
id: string
|
||||
name: string
|
||||
workspaceId: string
|
||||
state: WorkflowState
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
isDeployed?: boolean
|
||||
}
|
||||
20
packages/testing/tsconfig.json
Normal file
20
packages/testing/tsconfig.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@sim/testing/*": ["./src/*"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
Reference in New Issue
Block a user