improvement(tables): ops and experience

This commit is contained in:
waleed
2026-04-03 20:03:03 -07:00
parent 855c892f55
commit eba424c8a3
36 changed files with 1306 additions and 575 deletions

View File

@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
import type { NextRequest } from 'next/server'
import { createFeatureFlagsMock, createMockRequest } from '@sim/testing'
import { drizzleOrmMock } from '@sim/testing/mocks'
import { beforeEach, describe, expect, it, vi } from 'vitest'
const {
mockVerifyCronAuth,
mockExecuteScheduleJob,
mockExecuteJobInline,
mockFeatureFlags,
mockDbReturning,
mockDbUpdate,
mockEnqueue,
@@ -33,12 +33,6 @@ const {
mockVerifyCronAuth: vi.fn().mockReturnValue(null),
mockExecuteScheduleJob: vi.fn().mockResolvedValue(undefined),
mockExecuteJobInline: vi.fn().mockResolvedValue(undefined),
mockFeatureFlags: {
isTriggerDevEnabled: false,
isHosted: false,
isProd: false,
isDev: true,
},
mockDbReturning,
mockDbUpdate,
mockEnqueue,
@@ -49,6 +43,13 @@ const {
}
})
const mockFeatureFlags = createFeatureFlagsMock({
isTriggerDevEnabled: false,
isHosted: false,
isProd: false,
isDev: true,
})
vi.mock('@/lib/auth/internal', () => ({
verifyCronAuth: mockVerifyCronAuth,
}))
@@ -91,17 +92,7 @@ vi.mock('@/lib/workflows/utils', () => ({
}),
}))
vi.mock('drizzle-orm', () => ({
and: vi.fn((...conditions: unknown[]) => ({ type: 'and', conditions })),
eq: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'eq' })),
ne: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'ne' })),
lte: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'lte' })),
lt: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'lt' })),
not: vi.fn((condition: unknown) => ({ type: 'not', condition })),
isNull: vi.fn((field: unknown) => ({ type: 'isNull', field })),
or: vi.fn((...conditions: unknown[]) => ({ type: 'or', conditions })),
sql: vi.fn((strings: unknown, ...values: unknown[]) => ({ type: 'sql', strings, values })),
}))
vi.mock('drizzle-orm', () => drizzleOrmMock)
vi.mock('@sim/db', () => ({
db: {
@@ -177,18 +168,13 @@ const SINGLE_JOB = [
},
]
function createMockRequest(): NextRequest {
const mockHeaders = new Map([
['authorization', 'Bearer test-cron-secret'],
['content-type', 'application/json'],
])
return {
headers: {
get: (key: string) => mockHeaders.get(key.toLowerCase()) || null,
},
url: 'http://localhost:3000/api/schedules/execute',
} as NextRequest
function createCronRequest() {
return createMockRequest(
'GET',
undefined,
{ Authorization: 'Bearer test-cron-secret' },
'http://localhost:3000/api/schedules/execute'
)
}
describe('Scheduled Workflow Execution API Route', () => {
@@ -204,7 +190,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should execute scheduled workflows with Trigger.dev disabled', async () => {
mockDbReturning.mockReturnValueOnce(SINGLE_SCHEDULE).mockReturnValueOnce([])
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as any)
expect(response).toBeDefined()
expect(response.status).toBe(200)
@@ -217,7 +203,7 @@ describe('Scheduled Workflow Execution API Route', () => {
mockFeatureFlags.isTriggerDevEnabled = true
mockDbReturning.mockReturnValueOnce(SINGLE_SCHEDULE).mockReturnValueOnce([])
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as any)
expect(response).toBeDefined()
expect(response.status).toBe(200)
@@ -228,7 +214,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should handle case with no due schedules', async () => {
mockDbReturning.mockReturnValueOnce([]).mockReturnValueOnce([])
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as any)
expect(response.status).toBe(200)
const data = await response.json()
@@ -239,7 +225,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should execute multiple schedules in parallel', async () => {
mockDbReturning.mockReturnValueOnce(MULTIPLE_SCHEDULES).mockReturnValueOnce([])
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as any)
expect(response.status).toBe(200)
const data = await response.json()
@@ -249,7 +235,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should queue mothership jobs to BullMQ when available', async () => {
mockDbReturning.mockReturnValueOnce([]).mockReturnValueOnce(SINGLE_JOB)
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as any)
expect(response.status).toBe(200)
expect(mockEnqueueWorkspaceDispatch).toHaveBeenCalledWith(
@@ -274,7 +260,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should enqueue preassigned correlation metadata for schedules', async () => {
mockDbReturning.mockReturnValue(SINGLE_SCHEDULE)
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as any)
expect(response.status).toBe(200)
expect(mockEnqueueWorkspaceDispatch).toHaveBeenCalledWith(

View File

@@ -6,7 +6,7 @@ import {
DropdownMenuTrigger,
} from '@/components/emcn'
import { ArrowDown, ArrowUp, Duplicate, Pencil, Trash } from '@/components/emcn/icons'
import type { ContextMenuState } from '../../types'
import type { ContextMenuState } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
interface ContextMenuProps {
contextMenu: ContextMenuState

View File

@@ -17,13 +17,16 @@ import {
Textarea,
} from '@/components/emcn'
import type { ColumnDefinition, TableInfo, TableRow } from '@/lib/table'
import {
cleanCellValue,
formatValueForInput,
} from '@/app/workspace/[workspaceId]/tables/[tableId]/utils'
import {
useCreateTableRow,
useDeleteTableRow,
useDeleteTableRows,
useUpdateTableRow,
} from '@/hooks/queries/tables'
import { cleanCellValue, formatValueForInput } from '../../utils'
const logger = createLogger('RowModal')

View File

@@ -1 +1,2 @@
export type { TableFilterHandle } from './table-filter'
export { TableFilter } from './table-filter'

View File

@@ -1,6 +1,14 @@
'use client'
import { memo, useCallback, useMemo, useRef, useState } from 'react'
import {
forwardRef,
memo,
useCallback,
useImperativeHandle,
useMemo,
useRef,
useState,
} from 'react'
import { X } from 'lucide-react'
import { nanoid } from 'nanoid'
import {
@@ -19,22 +27,42 @@ const OPERATOR_LABELS = Object.fromEntries(
COMPARISON_OPERATORS.map((op) => [op.value, op.label])
) as Record<string, string>
export interface TableFilterHandle {
addColumnRule: (columnName: string) => void
}
interface TableFilterProps {
columns: Array<{ name: string; type: string }>
filter: Filter | null
onApply: (filter: Filter | null) => void
onClose: () => void
initialColumn?: string | null
}
export function TableFilter({ columns, filter, onApply, onClose }: TableFilterProps) {
export const TableFilter = forwardRef<TableFilterHandle, TableFilterProps>(function TableFilter(
{ columns, filter, onApply, onClose, initialColumn },
ref
) {
const [rules, setRules] = useState<FilterRule[]>(() => {
const fromFilter = filterToRules(filter)
return fromFilter.length > 0 ? fromFilter : [createRule(columns)]
if (fromFilter.length > 0) return fromFilter
const rule = createRule(columns)
return [initialColumn ? { ...rule, column: initialColumn } : rule]
})
const rulesRef = useRef(rules)
rulesRef.current = rules
useImperativeHandle(
ref,
() => ({
addColumnRule: (columnName: string) => {
setRules((prev) => [...prev, { ...createRule(columns), column: columnName }])
},
}),
[columns]
)
const columnOptions = useMemo(
() => columns.map((col) => ({ value: col.name, label: col.name })),
[columns]
@@ -125,7 +153,7 @@ export function TableFilter({ columns, filter, onApply, onClose }: TableFilterPr
</div>
</div>
)
}
})
interface FilterRuleRowProps {
rule: FilterRule

View File

@@ -0,0 +1,39 @@
import { createTableColumn, createTableRow } from '@sim/testing'
import { describe, expect, it } from 'vitest'
import { buildTableCsv, formatTableExportValue } from './export'
describe('table export utils', () => {
it('formats exported values using table display conventions', () => {
expect(formatTableExportValue('2026-04-03', { name: 'date', type: 'date' })).toBe('04/03/2026')
expect(formatTableExportValue({ nested: true }, { name: 'payload', type: 'json' })).toBe(
'{"nested":true}'
)
expect(formatTableExportValue(null, { name: 'empty', type: 'string' })).toBe('')
})
it('builds CSV using visible columns and escaped values', () => {
const columns = [
createTableColumn({ name: 'name', type: 'string' }),
createTableColumn({ name: 'date', type: 'date' }),
createTableColumn({ name: 'notes', type: 'json' }),
]
const rows = [
createTableRow({
id: 'row_1',
position: 0,
createdAt: '2026-04-03T00:00:00.000Z',
updatedAt: '2026-04-03T00:00:00.000Z',
data: {
name: 'Ada "Lovelace"',
date: '2026-04-03',
notes: { text: 'line 1\nline 2' },
},
}),
]
expect(buildTableCsv(columns, rows)).toBe(
'name,date,notes\r\n"Ada ""Lovelace""",04/03/2026,"{""text"":""line 1\\nline 2""}"'
)
})
})

View File

@@ -0,0 +1,38 @@
import type { ColumnDefinition, TableRow } from '@/lib/table'
import { storageToDisplay } from './utils'
function safeJsonStringify(value: unknown): string {
try {
return JSON.stringify(value)
} catch {
return String(value)
}
}
export function formatTableExportValue(value: unknown, column: ColumnDefinition): string {
if (value === null || value === undefined) return ''
switch (column.type) {
case 'date':
return storageToDisplay(String(value))
case 'json':
return typeof value === 'string' ? value : safeJsonStringify(value)
default:
return String(value)
}
}
export function escapeCsvCell(value: string): string {
return /[",\n\r]/.test(value) ? `"${value.replace(/"/g, '""')}"` : value
}
export function buildTableCsv(columns: ColumnDefinition[], rows: TableRow[]): string {
const headerRow = columns.map((column) => escapeCsvCell(column.name)).join(',')
const dataRows = rows.map((row) =>
columns
.map((column) => escapeCsvCell(formatTableExportValue(row.data[column.name], column)))
.join(',')
)
return [headerRow, ...dataRows].join('\r\n')
}

View File

@@ -1,2 +1,3 @@
export * from './use-context-menu'
export * from './use-export-table'
export * from './use-table-data'

View File

@@ -1,6 +1,6 @@
import { useCallback, useState } from 'react'
import type { TableRow } from '@/lib/table'
import type { ContextMenuState } from '../types'
import type { ContextMenuState } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
interface UseContextMenuReturn {
contextMenu: ContextMenuState

View File

@@ -0,0 +1,84 @@
'use client'
import { useCallback, useState } from 'react'
import { usePostHog } from 'posthog-js/react'
import { toast } from '@/components/emcn'
import { downloadFile, sanitizePathSegment } from '@/lib/core/utils/file-download'
import { captureEvent } from '@/lib/posthog/client'
import type { ColumnDefinition } from '@/lib/table'
import { buildTableCsv } from '@/app/workspace/[workspaceId]/tables/[tableId]/export'
import type { QueryOptions } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
import { fetchAllTableRows } from '@/hooks/queries/tables'
interface UseExportTableParams {
workspaceId: string
tableId: string
tableName?: string | null
columns: ColumnDefinition[]
queryOptions: QueryOptions
canExport: boolean
}
export function useExportTable({
workspaceId,
tableId,
tableName,
columns,
queryOptions,
canExport,
}: UseExportTableParams) {
const posthog = usePostHog()
const [isExporting, setIsExporting] = useState(false)
const handleExportTable = useCallback(async () => {
if (!canExport || !workspaceId || !tableId || isExporting) {
return
}
setIsExporting(true)
try {
const { rows } = await fetchAllTableRows({
workspaceId,
tableId,
filter: queryOptions.filter,
sort: queryOptions.sort,
})
const filename = `${sanitizePathSegment(tableName?.trim() || 'table')}.csv`
const csvContent = buildTableCsv(columns, rows)
downloadFile(csvContent, filename, 'text/csv;charset=utf-8;')
captureEvent(posthog, 'table_exported', {
workspace_id: workspaceId,
table_id: tableId,
row_count: rows.length,
column_count: columns.length,
has_filter: Boolean(queryOptions.filter),
has_sort: Boolean(queryOptions.sort),
})
} catch (error) {
toast.error(error instanceof Error ? error.message : 'Failed to export table', {
duration: 5000,
})
} finally {
setIsExporting(false)
}
}, [
canExport,
columns,
isExporting,
posthog,
queryOptions.filter,
queryOptions.sort,
tableId,
tableName,
workspaceId,
])
return {
isExporting,
handleExportTable,
}
}

View File

@@ -1,6 +1,7 @@
import type { TableDefinition, TableRow } from '@/lib/table'
import { TABLE_LIMITS } from '@/lib/table/constants'
import type { QueryOptions } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
import { useTable, useTableRows } from '@/hooks/queries/tables'
import type { QueryOptions } from '../types'
interface UseTableDataParams {
workspaceId: string
@@ -30,7 +31,7 @@ export function useTableData({
} = useTableRows({
workspaceId,
tableId,
limit: 1000,
limit: TABLE_LIMITS.MAX_QUERY_LIMIT,
offset: 0,
filter: queryOptions.filter,
sort: queryOptions.sort,

View File

@@ -51,6 +51,13 @@ import { Button } from '../button/button'
const ANIMATION_CLASSES =
'data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:animate-out data-[state=open]:animate-in motion-reduce:animate-none'
/**
* Modal content animation classes.
* We keep only the slide animations (no zoom) to stabilize positioning while avoiding scale effects.
*/
const CONTENT_ANIMATION_CLASSES =
'data-[state=closed]:slide-out-to-top-[50%] data-[state=open]:slide-in-from-top-[50%] motion-reduce:animate-none'
/**
* Root modal component. Manages open state.
*/
@@ -159,8 +166,8 @@ const ModalContent = React.forwardRef<
)}
style={{
left: isWorkflowPage
? // --panel-width is always the rendered panel width on /w/ routes (panel is never hidden/collapsed)
'calc(50% + (var(--sidebar-width) - var(--panel-width)) / 2)'
<<<<<<< HEAD
? 'calc(50% + (var(--sidebar-width) - var(--panel-width)) / 2)'
: 'calc(var(--sidebar-width) / 2 + 50%)',
...style,
}}

View File

@@ -6,6 +6,7 @@ import { createLogger } from '@sim/logger'
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { toast } from '@/components/emcn'
import type { Filter, RowData, Sort, TableDefinition, TableMetadata, TableRow } from '@/lib/table'
import { TABLE_LIMITS } from '@/lib/table/constants'
const logger = createLogger('TableQueries')
@@ -23,7 +24,7 @@ export const tableKeys = {
[...tableKeys.rowsRoot(tableId), paramsKey] as const,
}
interface TableRowsParams {
export interface TableRowsParams {
workspaceId: string
tableId: string
limit: number
@@ -32,7 +33,7 @@ interface TableRowsParams {
sort?: Sort | null
}
interface TableRowsResponse {
export interface TableRowsResponse {
rows: TableRow[]
totalCount: number
}
@@ -83,7 +84,7 @@ async function fetchTable(
return (data as { table: TableDefinition }).table
}
async function fetchTableRows({
export async function fetchTableRows({
workspaceId,
tableId,
limit,
@@ -125,6 +126,48 @@ async function fetchTableRows({
}
}
export async function fetchAllTableRows({
workspaceId,
tableId,
filter,
sort,
pageSize = TABLE_LIMITS.MAX_QUERY_LIMIT,
signal,
}: Pick<TableRowsParams, 'workspaceId' | 'tableId' | 'filter' | 'sort'> & {
pageSize?: number
signal?: AbortSignal
}): Promise<TableRowsResponse> {
const rows: TableRow[] = []
let totalCount = Number.POSITIVE_INFINITY
let offset = 0
while (rows.length < totalCount) {
const response = await fetchTableRows({
workspaceId,
tableId,
limit: pageSize,
offset,
filter,
sort,
signal,
})
rows.push(...response.rows)
totalCount = response.totalCount
if (response.rows.length === 0) {
break
}
offset += response.rows.length
}
return {
rows,
totalCount: Number.isFinite(totalCount) ? totalCount : rows.length,
}
}
function invalidateRowData(queryClient: ReturnType<typeof useQueryClient>, tableId: string) {
queryClient.invalidateQueries({ queryKey: tableKeys.rowsRoot(tableId) })
}

View File

@@ -1,30 +1,11 @@
/**
* @vitest-environment node
*/
import { createEditWorkflowRegistryMock } from '@sim/testing'
import { describe, expect, it, vi } from 'vitest'
import { createBlockFromParams } from './builders'
const agentBlockConfig = {
type: 'agent',
name: 'Agent',
outputs: {
content: { type: 'string', description: 'Default content output' },
},
subBlocks: [{ id: 'responseFormat', type: 'response-format' }],
}
const conditionBlockConfig = {
type: 'condition',
name: 'Condition',
outputs: {},
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
}
vi.mock('@/blocks/registry', () => ({
getAllBlocks: () => [agentBlockConfig, conditionBlockConfig],
getBlock: (type: string) =>
type === 'agent' ? agentBlockConfig : type === 'condition' ? conditionBlockConfig : undefined,
}))
vi.mock('@/blocks/registry', () => createEditWorkflowRegistryMock(['agent', 'condition']))
describe('createBlockFromParams', () => {
it('derives agent outputs from responseFormat when outputs are not provided', () => {

View File

@@ -1,69 +1,16 @@
/**
* @vitest-environment node
*/
import { createEditWorkflowRegistryMock } from '@sim/testing'
import { loggerMock } from '@sim/testing/mocks'
import { describe, expect, it, vi } from 'vitest'
import { applyOperationsToWorkflowState } from './engine'
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
}),
}))
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/blocks/registry', () => ({
getAllBlocks: () => [
{
type: 'condition',
name: 'Condition',
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
},
{
type: 'agent',
name: 'Agent',
subBlocks: [
{ id: 'systemPrompt', type: 'long-input' },
{ id: 'model', type: 'combobox' },
],
},
{
type: 'function',
name: 'Function',
subBlocks: [
{ id: 'code', type: 'code' },
{ id: 'language', type: 'dropdown' },
],
},
],
getBlock: (type: string) => {
const blocks: Record<string, any> = {
condition: {
type: 'condition',
name: 'Condition',
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
},
agent: {
type: 'agent',
name: 'Agent',
subBlocks: [
{ id: 'systemPrompt', type: 'long-input' },
{ id: 'model', type: 'combobox' },
],
},
function: {
type: 'function',
name: 'Function',
subBlocks: [
{ id: 'code', type: 'code' },
{ id: 'language', type: 'dropdown' },
],
},
}
return blocks[type] || undefined
},
}))
vi.mock('@/blocks/registry', () =>
createEditWorkflowRegistryMock(['condition', 'agent', 'function'])
)
function makeLoopWorkflow() {
return {

View File

@@ -1,32 +1,12 @@
/**
* @vitest-environment node
*/
import { createEditWorkflowRegistryMock } from '@sim/testing'
import { describe, expect, it, vi } from 'vitest'
import { normalizeConditionRouterIds } from './builders'
import { validateInputsForBlock } from './validation'
const conditionBlockConfig = {
type: 'condition',
name: 'Condition',
outputs: {},
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
}
const routerBlockConfig = {
type: 'router_v2',
name: 'Router',
outputs: {},
subBlocks: [{ id: 'routes', type: 'router-input' }],
}
vi.mock('@/blocks/registry', () => ({
getBlock: (type: string) =>
type === 'condition'
? conditionBlockConfig
: type === 'router_v2'
? routerBlockConfig
: undefined,
}))
vi.mock('@/blocks/registry', () => createEditWorkflowRegistryMock(['condition', 'router_v2']))
describe('validateInputsForBlock', () => {
it('accepts condition-input arrays with arbitrary item ids', () => {

View File

@@ -1,11 +1,11 @@
import { loggerMock } from '@sim/testing'
import { createFeatureFlagsMock, loggerMock } from '@sim/testing'
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
import { RateLimiter } from './rate-limiter'
import type { ConsumeResult, RateLimitStorageAdapter, TokenStatus } from './storage'
import { MANUAL_EXECUTION_LIMIT, RATE_LIMITS, RateLimitError } from './types'
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/core/config/feature-flags', () => ({ isBillingEnabled: true }))
vi.mock('@/lib/core/config/feature-flags', () => createFeatureFlagsMock({ isBillingEnabled: true }))
interface MockAdapter {
consumeTokens: Mock

View File

@@ -0,0 +1,36 @@
import { createLogger } from '@sim/logger'
const logger = createLogger('FileDownload')
/**
* Sanitizes a string for use as a file or path segment in exported assets.
*/
export function sanitizePathSegment(name: string): string {
return name.replace(/[^a-z0-9-_]/gi, '-')
}
/**
* Downloads a file to the user's device.
* Throws if the browser cannot create or trigger the download.
*/
export function downloadFile(
content: Blob | string,
filename: string,
mimeType = 'application/json'
): void {
try {
const blob = content instanceof Blob ? content : new Blob([content], { type: mimeType })
const url = URL.createObjectURL(blob)
const link = document.createElement('a')
link.href = url
link.download = filename
document.body.appendChild(link)
link.click()
document.body.removeChild(link)
URL.revokeObjectURL(url)
} catch (error) {
logger.error('Failed to download file:', error)
throw error
}
}

View File

@@ -1,7 +1,7 @@
/**
* @vitest-environment node
*/
import { loggerMock } from '@sim/testing'
import { createFeatureFlagsMock, loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
interface MockMcpClient {
@@ -38,7 +38,7 @@ const { MockMcpClientConstructor, mockOnToolsChanged, mockPublishToolsChanged }
)
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/core/config/feature-flags', () => ({ isTest: false }))
vi.mock('@/lib/core/config/feature-flags', () => createFeatureFlagsMock({ isTest: false }))
vi.mock('@/lib/mcp/pubsub', () => ({
mcpPubSub: {
onToolsChanged: mockOnToolsChanged,

View File

@@ -317,6 +317,15 @@ export interface PostHogEventMap {
workspace_id: string
}
table_exported: {
workspace_id: string
table_id: string
row_count: number
column_count: number
has_filter: boolean
has_sort: boolean
}
custom_tool_saved: {
tool_id: string
workspace_id: string

View File

@@ -1,10 +1,10 @@
/**
* @vitest-environment node
*/
import { createTableColumn } from '@sim/testing'
import { describe, expect, it } from 'vitest'
import { TABLE_LIMITS } from '../constants'
import {
type ColumnDefinition,
getUniqueColumns,
type TableSchema,
validateColumnDefinition,
@@ -66,12 +66,12 @@ describe('Validation', () => {
describe('validateColumnDefinition', () => {
it('should accept valid column definition', () => {
const column: ColumnDefinition = {
const column = createTableColumn({
name: 'email',
type: 'string',
required: true,
unique: true,
}
})
const result = validateColumnDefinition(column)
expect(result.valid).toBe(true)
})
@@ -80,19 +80,20 @@ describe('Validation', () => {
const types = ['string', 'number', 'boolean', 'date', 'json'] as const
for (const type of types) {
const result = validateColumnDefinition({ name: 'test', type })
const result = validateColumnDefinition(createTableColumn({ name: 'test', type }))
expect(result.valid).toBe(true)
}
})
it('should reject empty column name', () => {
const result = validateColumnDefinition({ name: '', type: 'string' })
const result = validateColumnDefinition(createTableColumn({ name: '', type: 'string' }))
expect(result.valid).toBe(false)
expect(result.errors).toContain('Column name is required')
})
it('should reject invalid column type', () => {
const result = validateColumnDefinition({
...createTableColumn({ name: 'test' }),
name: 'test',
type: 'invalid' as any,
})
@@ -102,7 +103,7 @@ describe('Validation', () => {
it('should reject column name exceeding max length', () => {
const longName = 'a'.repeat(TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH + 1)
const result = validateColumnDefinition({ name: longName, type: 'string' })
const result = validateColumnDefinition(createTableColumn({ name: longName, type: 'string' }))
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('exceeds maximum length')
})
@@ -112,9 +113,9 @@ describe('Validation', () => {
it('should accept valid schema', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string', required: true, unique: true },
{ name: 'name', type: 'string', required: true },
{ name: 'age', type: 'number' },
createTableColumn({ name: 'id', type: 'string', required: true, unique: true }),
createTableColumn({ name: 'name', type: 'string', required: true }),
createTableColumn({ name: 'age', type: 'number' }),
],
}
const result = validateTableSchema(schema)
@@ -131,8 +132,8 @@ describe('Validation', () => {
it('should reject duplicate column names', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string' },
{ name: 'ID', type: 'number' },
createTableColumn({ name: 'id', type: 'string' }),
createTableColumn({ name: 'ID', type: 'number' }),
],
}
const result = validateTableSchema(schema)
@@ -153,10 +154,9 @@ describe('Validation', () => {
})
it('should reject schema exceeding max columns', () => {
const columns = Array.from({ length: TABLE_LIMITS.MAX_COLUMNS_PER_TABLE + 1 }, (_, i) => ({
name: `col_${i}`,
type: 'string' as const,
}))
const columns = Array.from({ length: TABLE_LIMITS.MAX_COLUMNS_PER_TABLE + 1 }, (_, i) =>
createTableColumn({ name: `col_${i}`, type: 'string' })
)
const result = validateTableSchema({ columns })
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('exceeds maximum columns')
@@ -182,11 +182,11 @@ describe('Validation', () => {
describe('validateRowAgainstSchema', () => {
const schema: TableSchema = {
columns: [
{ name: 'name', type: 'string', required: true },
{ name: 'age', type: 'number' },
{ name: 'active', type: 'boolean' },
{ name: 'created', type: 'date' },
{ name: 'metadata', type: 'json' },
createTableColumn({ name: 'name', type: 'string', required: true }),
createTableColumn({ name: 'age', type: 'number' }),
createTableColumn({ name: 'active', type: 'boolean' }),
createTableColumn({ name: 'created', type: 'date' }),
createTableColumn({ name: 'metadata', type: 'json' }),
],
}
@@ -281,10 +281,10 @@ describe('Validation', () => {
it('should return only columns with unique=true', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string', unique: true },
{ name: 'email', type: 'string', unique: true },
{ name: 'name', type: 'string' },
{ name: 'count', type: 'number', unique: false },
createTableColumn({ name: 'id', type: 'string', unique: true }),
createTableColumn({ name: 'email', type: 'string', unique: true }),
createTableColumn({ name: 'name', type: 'string' }),
createTableColumn({ name: 'count', type: 'number', unique: false }),
],
}
const result = getUniqueColumns(schema)
@@ -295,8 +295,8 @@ describe('Validation', () => {
it('should return empty array when no unique columns', () => {
const schema: TableSchema = {
columns: [
{ name: 'name', type: 'string' },
{ name: 'value', type: 'number' },
createTableColumn({ name: 'name', type: 'string' }),
createTableColumn({ name: 'value', type: 'number' }),
],
}
const result = getUniqueColumns(schema)
@@ -307,9 +307,9 @@ describe('Validation', () => {
describe('validateUniqueConstraints', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string', unique: true },
{ name: 'email', type: 'string', unique: true },
{ name: 'name', type: 'string' },
createTableColumn({ name: 'id', type: 'string', unique: true }),
createTableColumn({ name: 'email', type: 'string', unique: true }),
createTableColumn({ name: 'name', type: 'string' }),
],
}

View File

@@ -4,6 +4,7 @@
import {
createBlock as createTestBlock,
createWorkflowState as createTestWorkflowState,
createWorkflowVariablesMap,
} from '@sim/testing'
import { describe, expect, it } from 'vitest'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
@@ -46,6 +47,10 @@ function createBlock(id: string, overrides: Record<string, any> = {}): any {
})
}
function createVariablesMap(...variables: Parameters<typeof createWorkflowVariablesMap>[0]): any {
return createWorkflowVariablesMap(variables)
}
describe('hasWorkflowChanged', () => {
describe('Basic Cases', () => {
it.concurrent('should return true when deployedState is null', () => {
@@ -2181,9 +2186,12 @@ describe('hasWorkflowChanged', () => {
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'hello',
}),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2192,9 +2200,12 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect removed variables', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'hello',
}),
}
const currentState = {
@@ -2208,16 +2219,22 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect variable value changes', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'hello',
}),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'world' },
},
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'world',
}),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2226,16 +2243,12 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect variable type changes', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: '123' },
},
variables: createVariablesMap({ id: 'var1', name: 'myVar', type: 'string', value: '123' }),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'number', value: 123 },
},
variables: createVariablesMap({ id: 'var1', name: 'myVar', type: 'number', value: 123 }),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2244,16 +2257,22 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect variable name changes', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'oldName', type: 'string', value: 'hello' },
},
variables: createVariablesMap({
id: 'var1',
name: 'oldName',
type: 'string',
value: 'hello',
}),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'newName', type: 'string', value: 'hello' },
},
variables: createVariablesMap({
id: 'var1',
name: 'newName',
type: 'string',
value: 'hello',
}),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2262,18 +2281,18 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should not detect change for identical variables', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
},
variables: createVariablesMap(
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
{ id: 'var2', name: 'count', type: 'number', value: 42 }
),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
},
variables: createVariablesMap(
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
{ id: 'var2', name: 'count', type: 'number', value: 42 }
),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(false)
@@ -2310,16 +2329,22 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should handle complex variable values (objects)', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'config', type: 'object', value: { key: 'value1' } },
},
variables: createVariablesMap({
id: 'var1',
name: 'config',
type: 'object',
value: { key: 'value1' },
}),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'config', type: 'object', value: { key: 'value2' } },
},
variables: createVariablesMap({
id: 'var1',
name: 'config',
type: 'object',
value: { key: 'value2' },
}),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2328,16 +2353,22 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should handle complex variable values (arrays)', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'items', type: 'array', value: [1, 2, 3] },
},
variables: createVariablesMap({
id: 'var1',
name: 'items',
type: 'array',
value: [1, 2, 3],
}),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'items', type: 'array', value: [1, 2, 4] },
},
variables: createVariablesMap({
id: 'var1',
name: 'items',
type: 'array',
value: [1, 2, 4],
}),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2346,18 +2377,18 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should not detect change when variable key order differs', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
},
variables: createVariablesMap(
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
{ id: 'var2', name: 'count', type: 'number', value: 42 }
),
}
const currentState = {
...createWorkflowState({}),
variables: {
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
variables: createVariablesMap(
{ id: 'var2', name: 'count', type: 'number', value: 42 },
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' }
),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(false)
@@ -2844,31 +2875,27 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
},
}
;(deployedState as any).variables = createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
validationError: undefined,
},
}
;(currentState as any).variables = createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
validationError: undefined,
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
@@ -2879,31 +2906,27 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'number',
value: 'invalid',
},
}
;(deployedState as any).variables = createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'number',
value: 'invalid',
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'number',
value: 'invalid',
validationError: 'Not a valid number',
},
}
;(currentState as any).variables = createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'number',
value: 'invalid',
validationError: 'Not a valid number',
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
@@ -2914,31 +2937,27 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'old value',
},
}
;(deployedState as any).variables = createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'old value',
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'new value',
validationError: undefined,
},
}
;(currentState as any).variables = createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'new value',
validationError: undefined,
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
})
@@ -2956,15 +2975,13 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
},
}
;(currentState as any).variables = createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
})
@@ -2975,15 +2992,13 @@ describe('hasWorkflowChanged', () => {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
},
}
;(deployedState as any).variables = createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
})
const currentState = createWorkflowState({
blocks: {
@@ -3151,7 +3166,7 @@ describe('generateWorkflowDiffSummary', () => {
})
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: { var1: { id: 'var1', name: 'test', type: 'string', value: 'hello' } },
variables: createVariablesMap({ id: 'var1', name: 'test', type: 'string', value: 'hello' }),
})
const result = generateWorkflowDiffSummary(currentState, previousState)
expect(result.hasChanges).toBe(true)
@@ -3161,11 +3176,11 @@ describe('generateWorkflowDiffSummary', () => {
it.concurrent('should detect modified variables', () => {
const previousState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: { var1: { id: 'var1', name: 'test', type: 'string', value: 'hello' } },
variables: createVariablesMap({ id: 'var1', name: 'test', type: 'string', value: 'hello' }),
})
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: { var1: { id: 'var1', name: 'test', type: 'string', value: 'world' } },
variables: createVariablesMap({ id: 'var1', name: 'test', type: 'string', value: 'world' }),
})
const result = generateWorkflowDiffSummary(currentState, previousState)
expect(result.hasChanges).toBe(true)

View File

@@ -1,6 +1,8 @@
/**
* @vitest-environment node
*/
import { createMockSelectChain, createMockUpdateChain } from '@sim/testing'
import { loggerMock } from '@sim/testing/mocks'
import { beforeEach, describe, expect, it, vi } from 'vitest'
const {
@@ -35,13 +37,7 @@ vi.mock('@sim/db/schema', () => ({
workflowSchedule: { archivedAt: 'workflow_schedule_archived_at' },
}))
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}),
}))
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/workflows/utils', () => ({
getWorkflowById: (...args: unknown[]) => mockGetWorkflowById(...args),
@@ -66,24 +62,6 @@ vi.mock('@/lib/core/telemetry', () => ({
import { archiveWorkflow } from '@/lib/workflows/lifecycle'
function createSelectChain<T>(result: T) {
const chain = {
from: vi.fn().mockReturnThis(),
innerJoin: vi.fn().mockReturnThis(),
where: vi.fn().mockResolvedValue(result),
}
return chain
}
function createUpdateChain() {
return {
set: vi.fn().mockReturnValue({
where: vi.fn().mockResolvedValue([]),
}),
}
}
describe('workflow lifecycle', () => {
beforeEach(() => {
vi.clearAllMocks()
@@ -107,10 +85,10 @@ describe('workflow lifecycle', () => {
archivedAt: new Date(),
})
mockSelect.mockReturnValue(createSelectChain([]))
mockSelect.mockReturnValue(createMockSelectChain([]))
const tx = {
update: vi.fn().mockImplementation(() => createUpdateChain()),
update: vi.fn().mockImplementation(() => createMockUpdateChain()),
}
mockTransaction.mockImplementation(async (callback: (trx: typeof tx) => Promise<void>) =>
callback(tx)

View File

@@ -1,4 +1,5 @@
import { createLogger } from '@sim/logger'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import {
type ExportWorkflowState,
sanitizeForExport,
@@ -8,6 +9,8 @@ import type { Variable, WorkflowState } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowImportExport')
export { downloadFile, sanitizePathSegment } from '@/lib/core/utils/file-download'
async function getJSZip() {
const { default: JSZip } = await import('jszip')
return JSZip
@@ -43,36 +46,6 @@ export interface WorkspaceExportStructure {
folders: FolderExportData[]
}
/**
* Sanitizes a string for use as a path segment in a ZIP file.
*/
export function sanitizePathSegment(name: string): string {
return name.replace(/[^a-z0-9-_]/gi, '-')
}
/**
* Downloads a file to the user's device.
*/
export function downloadFile(
content: Blob | string,
filename: string,
mimeType = 'application/json'
): void {
try {
const blob = content instanceof Blob ? content : new Blob([content], { type: mimeType })
const url = URL.createObjectURL(blob)
const a = document.createElement('a')
a.href = url
a.download = filename
document.body.appendChild(a)
a.click()
document.body.removeChild(a)
URL.revokeObjectURL(url)
} catch (error) {
logger.error('Failed to download file:', error)
}
}
/**
* Fetches a workflow's state and variables for export.
* Returns null if the workflow cannot be fetched.

View File

@@ -1,6 +1,8 @@
/**
* @vitest-environment node
*/
import { createMockDeleteChain, createMockSelectChain, createMockUpdateChain } from '@sim/testing'
import { loggerMock } from '@sim/testing/mocks'
import { beforeEach, describe, expect, it, vi } from 'vitest'
const { mockSelect, mockTransaction, mockArchiveWorkflowsForWorkspace, mockGetWorkspaceWithOwner } =
@@ -33,13 +35,7 @@ vi.mock('@sim/db/schema', () => ({
workspaceNotificationSubscription: { active: 'workspace_notification_active' },
}))
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}),
}))
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/workflows/lifecycle', () => ({
archiveWorkflowsForWorkspace: (...args: unknown[]) => mockArchiveWorkflowsForWorkspace(...args),
@@ -51,14 +47,6 @@ vi.mock('@/lib/workspaces/permissions/utils', () => ({
import { archiveWorkspace } from './lifecycle'
function createUpdateChain() {
return {
set: vi.fn().mockReturnValue({
where: vi.fn().mockResolvedValue([]),
}),
}
}
describe('workspace lifecycle', () => {
beforeEach(() => {
vi.clearAllMocks()
@@ -72,22 +60,12 @@ describe('workspace lifecycle', () => {
archivedAt: null,
})
mockArchiveWorkflowsForWorkspace.mockResolvedValue(2)
mockSelect.mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockResolvedValue([{ id: 'server-1' }]),
}),
})
mockSelect.mockReturnValue(createMockSelectChain([{ id: 'server-1' }]))
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockResolvedValue([{ id: 'kb-1' }]),
}),
}),
update: vi.fn().mockImplementation(() => createUpdateChain()),
delete: vi.fn().mockImplementation(() => ({
where: vi.fn().mockResolvedValue([]),
})),
select: vi.fn().mockReturnValue(createMockSelectChain([{ id: 'kb-1' }])),
update: vi.fn().mockImplementation(() => createMockUpdateChain()),
delete: vi.fn().mockImplementation(() => createMockDeleteChain()),
}
mockTransaction.mockImplementation(async (callback: (trx: typeof tx) => Promise<void>) =>
callback(tx)

View File

@@ -118,6 +118,15 @@ export {
type SerializedConnection,
type SerializedWorkflow,
} from './serialized-block.factory'
export {
createTableColumn,
createTableRow,
type TableColumnFactoryOptions,
type TableColumnFixture,
type TableColumnType,
type TableRowFactoryOptions,
type TableRowFixture,
} from './table.factory'
// Tool mock responses
export {
mockDriveResponses,
@@ -178,3 +187,10 @@ export {
type WorkflowFactoryOptions,
type WorkflowStateFixture,
} from './workflow.factory'
export {
createWorkflowVariable,
createWorkflowVariablesMap,
type WorkflowVariableFactoryOptions,
type WorkflowVariableFixture,
type WorkflowVariableType,
} from './workflow-variable.factory'

View File

@@ -0,0 +1,12 @@
import { describe, expect, it } from 'vitest'
import { createTableColumn } from './table.factory'
describe('table factory', () => {
it('generates default column names that match table naming rules', () => {
const generatedNames = Array.from({ length: 100 }, () => createTableColumn().name)
for (const name of generatedNames) {
expect(name).toMatch(/^[a-z_][a-z0-9_]*$/)
}
})
})

View File

@@ -0,0 +1,62 @@
import { customAlphabet, nanoid } from 'nanoid'
export type TableColumnType = 'string' | 'number' | 'boolean' | 'date' | 'json'
export interface TableColumnFixture {
name: string
type: TableColumnType
required?: boolean
unique?: boolean
}
export interface TableRowFixture {
id: string
data: Record<string, unknown>
position: number
createdAt: string
updatedAt: string
}
export interface TableColumnFactoryOptions {
name?: string
type?: TableColumnType
required?: boolean
unique?: boolean
}
export interface TableRowFactoryOptions {
id?: string
data?: Record<string, unknown>
position?: number
createdAt?: string
updatedAt?: string
}
const createTableColumnSuffix = customAlphabet('abcdefghijklmnopqrstuvwxyz0123456789_', 6)
/**
* Creates a table column fixture with sensible defaults.
*/
export function createTableColumn(options: TableColumnFactoryOptions = {}): TableColumnFixture {
return {
name: options.name ?? `column_${createTableColumnSuffix()}`,
type: options.type ?? 'string',
required: options.required,
unique: options.unique,
}
}
/**
* Creates a table row fixture with sensible defaults.
*/
export function createTableRow(options: TableRowFactoryOptions = {}): TableRowFixture {
const timestamp = new Date().toISOString()
return {
id: options.id ?? `row_${nanoid(8)}`,
data: options.data ?? {},
position: options.position ?? 0,
createdAt: options.createdAt ?? timestamp,
updatedAt: options.updatedAt ?? timestamp,
}
}

View File

@@ -0,0 +1,53 @@
import { nanoid } from 'nanoid'
export type WorkflowVariableType = 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
export interface WorkflowVariableFixture {
id: string
name: string
type: WorkflowVariableType
value: unknown
workflowId?: string
validationError?: string
}
export interface WorkflowVariableFactoryOptions {
id?: string
name?: string
type?: WorkflowVariableType
value?: unknown
workflowId?: string
validationError?: string
}
/**
* Creates a workflow variable fixture with sensible defaults.
*/
export function createWorkflowVariable(
options: WorkflowVariableFactoryOptions = {}
): WorkflowVariableFixture {
const id = options.id ?? `var_${nanoid(8)}`
return {
id,
name: options.name ?? `variable_${id.slice(0, 4)}`,
type: options.type ?? 'string',
value: options.value ?? '',
workflowId: options.workflowId,
validationError: options.validationError,
}
}
/**
* Creates a variables map keyed by variable id.
*/
export function createWorkflowVariablesMap(
variables: WorkflowVariableFactoryOptions[] = []
): Record<string, WorkflowVariableFixture> {
return Object.fromEntries(
variables.map((variable) => {
const fixture = createWorkflowVariable(variable)
return [fixture.id, fixture]
})
)
}

View File

@@ -46,10 +46,14 @@ export * from './builders'
export * from './factories'
export {
AuthTypeMock,
asyncRouteParams,
auditMock,
clearRedisMocks,
createEditWorkflowRegistryMock,
createEnvMock,
createFeatureFlagsMock,
createMockDb,
createMockDeleteChain,
createMockFetch,
createMockFormDataRequest,
createMockGetEnv,
@@ -57,15 +61,19 @@ export {
createMockRedis,
createMockRequest,
createMockResponse,
createMockSelectChain,
createMockSocket,
createMockStorage,
createMockUpdateChain,
databaseMock,
defaultMockEnv,
defaultMockUser,
drizzleOrmMock,
envMock,
featureFlagsMock,
loggerMock,
type MockAuthResult,
type MockFeatureFlags,
type MockFetchResponse,
type MockHybridAuthResult,
type MockRedis,

View File

@@ -103,6 +103,38 @@ export function createMockDb() {
}
}
/**
* Creates a select chain that resolves from `where()`.
*/
export function createMockSelectChain<T>(result: T) {
return {
from: vi.fn().mockReturnThis(),
innerJoin: vi.fn().mockReturnThis(),
leftJoin: vi.fn().mockReturnThis(),
where: vi.fn().mockResolvedValue(result),
}
}
/**
* Creates an update chain that resolves from `where()`.
*/
export function createMockUpdateChain<T>(result: T = [] as T) {
return {
set: vi.fn().mockReturnValue({
where: vi.fn().mockResolvedValue(result),
}),
}
}
/**
* Creates a delete chain that resolves from `where()`.
*/
export function createMockDeleteChain<T>(result: T = [] as T) {
return {
where: vi.fn().mockResolvedValue(result),
}
}
/**
* Mock module for @sim/db.
* Use with vi.mock() to replace the real database.

View File

@@ -0,0 +1,47 @@
const editWorkflowBlockConfigs: Record<string, any> = {
condition: {
type: 'condition',
name: 'Condition',
outputs: {},
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
},
agent: {
type: 'agent',
name: 'Agent',
outputs: {
content: { type: 'string', description: 'Default content output' },
},
subBlocks: [
{ id: 'systemPrompt', type: 'long-input' },
{ id: 'model', type: 'combobox' },
{ id: 'responseFormat', type: 'response-format' },
],
},
function: {
type: 'function',
name: 'Function',
outputs: {},
subBlocks: [
{ id: 'code', type: 'code' },
{ id: 'language', type: 'dropdown' },
],
},
router_v2: {
type: 'router_v2',
name: 'Router',
outputs: {},
subBlocks: [{ id: 'routes', type: 'router-input' }],
},
}
export function createEditWorkflowRegistryMock(types?: string[]) {
const enabledTypes = new Set(types ?? Object.keys(editWorkflowBlockConfigs))
const blocks = Object.fromEntries(
Object.entries(editWorkflowBlockConfigs).filter(([type]) => enabledTypes.has(type))
)
return {
getAllBlocks: () => Object.values(blocks),
getBlock: (type: string) => blocks[type],
}
}

View File

@@ -0,0 +1,65 @@
export interface MockFeatureFlags {
isProd: boolean
isDev: boolean
isTest: boolean
isHosted: boolean
isBillingEnabled: boolean
isEmailVerificationEnabled: boolean
isAuthDisabled: boolean
isRegistrationDisabled: boolean
isEmailPasswordEnabled: boolean
isSignupEmailValidationEnabled: boolean
isTriggerDevEnabled: boolean
isSsoEnabled: boolean
isCredentialSetsEnabled: boolean
isAccessControlEnabled: boolean
isOrganizationsEnabled: boolean
isInboxEnabled: boolean
isE2bEnabled: boolean
isAzureConfigured: boolean
isInvitationsDisabled: boolean
isPublicApiDisabled: boolean
isReactGrabEnabled: boolean
isReactScanEnabled: boolean
getAllowedIntegrationsFromEnv: () => string[] | null
getAllowedMcpDomainsFromEnv: () => string[] | null
getCostMultiplier: () => number
}
/**
* Creates a mutable mock for the feature flags module.
*/
export function createFeatureFlagsMock(
overrides: Partial<MockFeatureFlags> = {}
): MockFeatureFlags {
return {
isProd: false,
isDev: false,
isTest: true,
isHosted: false,
isBillingEnabled: false,
isEmailVerificationEnabled: false,
isAuthDisabled: false,
isRegistrationDisabled: false,
isEmailPasswordEnabled: true,
isSignupEmailValidationEnabled: false,
isTriggerDevEnabled: false,
isSsoEnabled: false,
isCredentialSetsEnabled: false,
isAccessControlEnabled: false,
isOrganizationsEnabled: false,
isInboxEnabled: false,
isE2bEnabled: false,
isAzureConfigured: false,
isInvitationsDisabled: false,
isPublicApiDisabled: false,
isReactGrabEnabled: false,
isReactScanEnabled: false,
getAllowedIntegrationsFromEnv: () => null,
getAllowedMcpDomainsFromEnv: () => null,
getCostMultiplier: () => 1,
...overrides,
}
}
export const featureFlagsMock = createFeatureFlagsMock()

View File

@@ -16,7 +16,6 @@
* ```
*/
// API mocks
export {
mockCommonSchemas,
mockConsoleLogger,
@@ -24,16 +23,13 @@ export {
mockKnowledgeSchemas,
setupCommonApiMocks,
} from './api.mock'
// Audit mocks
export { auditMock } from './audit.mock'
// Auth mocks
export {
defaultMockUser,
type MockAuthResult,
type MockUser,
mockAuth,
} from './auth.mock'
// Blocks mocks
export {
blocksMock,
createMockGetBlock,
@@ -42,18 +38,23 @@ export {
mockToolConfigs,
toolsUtilsMock,
} from './blocks.mock'
// Database mocks
export {
createMockDb,
createMockDeleteChain,
createMockSelectChain,
createMockSql,
createMockSqlOperators,
createMockUpdateChain,
databaseMock,
drizzleOrmMock,
} from './database.mock'
// Env mocks
export { createEditWorkflowRegistryMock } from './edit-workflow.mock'
export { createEnvMock, createMockGetEnv, defaultMockEnv, envMock } from './env.mock'
// Executor mocks - use side-effect import: import '@sim/testing/mocks/executor'
// Fetch mocks
export {
createFeatureFlagsMock,
featureFlagsMock,
type MockFeatureFlags,
} from './feature-flags.mock'
export {
createMockFetch,
createMockResponse,
@@ -63,24 +64,21 @@ export {
mockNextFetchResponse,
setupGlobalFetchMock,
} from './fetch.mock'
// Hybrid auth mocks
export { AuthTypeMock, type MockHybridAuthResult, mockHybridAuth } from './hybrid-auth.mock'
// Logger mocks
export { clearLoggerMocks, createMockLogger, getLoggerCalls, loggerMock } from './logger.mock'
// Redis mocks
export { clearRedisMocks, createMockRedis, type MockRedis } from './redis.mock'
// Request mocks
export { createMockFormDataRequest, createMockRequest, requestUtilsMock } from './request.mock'
// Socket mocks
export {
asyncRouteParams,
createMockFormDataRequest,
createMockRequest,
requestUtilsMock,
} from './request.mock'
export {
createMockSocket,
createMockSocketServer,
type MockSocket,
type MockSocketServer,
} from './socket.mock'
// Storage mocks
export { clearStorageMocks, createMockStorage, setupGlobalStorageMocks } from './storage.mock'
// Telemetry mocks
export { telemetryMock } from './telemetry.mock'
// UUID mocks
export { mockCryptoUuid, mockUuid } from './uuid.mock'

View File

@@ -59,6 +59,13 @@ export function createMockFormDataRequest(
})
}
/**
* Creates the async `params` object used by App Router route handlers.
*/
export function asyncRouteParams<T extends Record<string, unknown>>(params: T): Promise<T> {
return Promise.resolve(params)
}
/**
* Pre-configured mock for @/lib/core/utils/request module.
*