v0.6.48: import csv into tables, subflow fixes, CSP updates

This commit is contained in:
Waleed
2026-04-16 14:08:49 -07:00
committed by GitHub
41 changed files with 3226 additions and 600 deletions

View File

@@ -16,17 +16,34 @@ User arguments: $ARGUMENTS
Read before analyzing:
1. https://react.dev/reference/react/useCallback — official docs on when useCallback is actually needed
## The one rule that matters
`useCallback` is only useful when **something observes the reference**. Ask: does anything care if this function gets a new identity on re-render?
Observers that care about reference stability:
- A `useEffect` that lists the function in its deps array
- A `useMemo` that lists the function in its deps array
- Another `useCallback` that lists the function in its deps array
- A child component wrapped in `React.memo` that receives the function as a prop
If none of those apply — if the function is only called inline, or passed to a non-memoized child, or assigned to a native element event — the reference is unobserved and `useCallback` adds overhead with zero benefit.
## Anti-patterns to detect
1. **useCallback on functions not passed as props or deps**: No benefit if only called within the same component.
2. **useCallback with deps that change every render**: Memoization is wasted.
3. **useCallback on handlers passed to native elements**: `<button onClick={fn}>` doesn't benefit from stable references.
4. **useCallback wrapping functions that return new objects/arrays**: Memoization at the wrong level.
5. **useCallback with empty deps when deps are needed**: Stale closures.
6. **Pairing useCallback + React.memo unnecessarily**: Only optimize when you've measured a problem.
7. **useCallback in hooks that don't need stable references**: Not every hook return needs memoization.
1. **No observer tracks the reference**: The function is only called inline in the same component, or passed to a non-memoized child, or used as a native element handler (`<button onClick={fn}>`). Nothing re-runs or bails out based on reference identity. Remove `useCallback`.
2. **useCallback with deps that change every render**: If a dep is a plain object/array created inline, or state that changes on every interaction, memoization buys nothing — the function gets a new identity anyway.
3. **useCallback on handlers passed only to native elements**: `<button onClick={fn}>` — React never does reference equality on native element props. No benefit.
4. **useCallback wrapping functions that return new objects/arrays**: Stable function identity, unstable return value — memoization is at the wrong level. Use `useMemo` on the return value instead, or restructure.
5. **useCallback with empty deps when deps are needed**: Stale closure — reads initial values forever. This is a correctness bug, not just a performance issue.
6. **Pairing useCallback + React.memo on trivially cheap renders**: If the child renders in < 1ms and re-renders rarely, the memo infrastructure costs more than it saves.
Note: This codebase uses a ref pattern for stable callbacks (`useRef` + empty deps). That pattern is correct — don't flag it.
## Patterns that ARE correct — do not flag
- `useCallback` whose result is in a `useEffect` dep array — prevents the effect from re-running on every render
- `useCallback` whose result is in a `useMemo` dep array — prevents the memo from recomputing on every render
- `useCallback` whose result is a dep of another `useCallback` — stabilises a callback chain
- `useCallback` passed to a `React.memo`-wrapped child — the whole point of the pattern
- This codebase's ref pattern: `useRef` + callback with empty deps that reads the ref inside — correct, do not flag
## Steps

View File

@@ -0,0 +1,295 @@
/**
* @vitest-environment node
*/
import { NextRequest } from 'next/server'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import type { TableDefinition } from '@/lib/table'
const {
mockCheckSessionOrInternalAuth,
mockCheckAccess,
mockBatchInsertRows,
mockReplaceTableRows,
} = vi.hoisted(() => ({
mockCheckSessionOrInternalAuth: vi.fn(),
mockCheckAccess: vi.fn(),
mockBatchInsertRows: vi.fn(),
mockReplaceTableRows: vi.fn(),
}))
vi.mock('@/lib/auth/hybrid', () => ({
AuthType: { SESSION: 'session', API_KEY: 'api_key', INTERNAL_JWT: 'internal_jwt' },
checkSessionOrInternalAuth: mockCheckSessionOrInternalAuth,
}))
vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: vi.fn().mockReturnValue('req-test-123'),
}))
vi.mock('@/lib/core/utils/uuid', () => ({
generateId: vi.fn().mockReturnValue('deadbeefcafef00d'),
generateShortId: vi.fn().mockReturnValue('short-id'),
}))
vi.mock('@/app/api/table/utils', async () => {
const { NextResponse } = await import('next/server')
return {
checkAccess: mockCheckAccess,
accessError: (result: { status: number }) => {
const message = result.status === 404 ? 'Table not found' : 'Access denied'
return NextResponse.json({ error: message }, { status: result.status })
},
}
})
/**
* The route imports `batchInsertRows` and `replaceTableRows` from the barrel,
* which forwards them from `./service`. Mocking the service module replaces
* both without having to touch the other real helpers (`parseCsvBuffer`,
* `coerceRowsForTable`, etc.) exported through the barrel.
*/
vi.mock('@/lib/table/service', () => ({
batchInsertRows: mockBatchInsertRows,
replaceTableRows: mockReplaceTableRows,
}))
import { POST } from '@/app/api/table/[tableId]/import-csv/route'
function createCsvFile(contents: string, name = 'data.csv', type = 'text/csv'): File {
return new File([contents], name, { type })
}
function createFormData(
file: File,
options?: {
workspaceId?: string | null
mode?: string | null
mapping?: unknown
}
): FormData {
const form = new FormData()
form.append('file', file)
if (options?.workspaceId !== null) {
form.append('workspaceId', options?.workspaceId ?? 'workspace-1')
}
if (options?.mode !== null) {
form.append('mode', options?.mode ?? 'append')
}
if (options?.mapping !== undefined) {
form.append(
'mapping',
typeof options.mapping === 'string' ? options.mapping : JSON.stringify(options.mapping)
)
}
return form
}
function buildTable(overrides: Partial<TableDefinition> = {}): TableDefinition {
return {
id: 'tbl_1',
name: 'People',
description: null,
schema: {
columns: [
{ name: 'name', type: 'string', required: true },
{ name: 'age', type: 'number' },
],
},
metadata: null,
rowCount: 0,
maxRows: 100,
workspaceId: 'workspace-1',
createdBy: 'user-1',
archivedAt: null,
createdAt: new Date('2024-01-01'),
updatedAt: new Date('2024-01-01'),
...overrides,
}
}
async function callPost(form: FormData, { tableId }: { tableId: string } = { tableId: 'tbl_1' }) {
const req = new NextRequest(`http://localhost:3000/api/table/${tableId}/import-csv`, {
method: 'POST',
body: form,
})
return POST(req, { params: Promise.resolve({ tableId }) })
}
describe('POST /api/table/[tableId]/import-csv', () => {
beforeEach(() => {
vi.clearAllMocks()
mockCheckSessionOrInternalAuth.mockResolvedValue({
success: true,
userId: 'user-1',
authType: 'session',
})
mockCheckAccess.mockResolvedValue({ ok: true, table: buildTable() })
mockBatchInsertRows.mockImplementation(async (data: { rows: unknown[] }) =>
data.rows.map((_, i) => ({ id: `row_${i}` }))
)
mockReplaceTableRows.mockResolvedValue({ deletedCount: 0, insertedCount: 0 })
})
it('returns 401 when the user is not authenticated', async () => {
mockCheckSessionOrInternalAuth.mockResolvedValueOnce({
success: false,
error: 'Authentication required',
})
const response = await callPost(createFormData(createCsvFile('name,age\nAlice,30')))
expect(response.status).toBe(401)
})
it('returns 400 when the mode is invalid', async () => {
const response = await callPost(
createFormData(createCsvFile('name,age\nAlice,30'), { mode: 'bogus' })
)
expect(response.status).toBe(400)
const data = await response.json()
expect(data.error).toMatch(/Invalid mode/)
})
it('returns 403 when the user lacks workspace write access', async () => {
mockCheckAccess.mockResolvedValueOnce({ ok: false, status: 403 })
const response = await callPost(createFormData(createCsvFile('name,age\nAlice,30')))
expect(response.status).toBe(403)
})
it('returns 400 when the target table is archived', async () => {
mockCheckAccess.mockResolvedValueOnce({
ok: true,
table: buildTable({ archivedAt: new Date('2024-01-02') }),
})
const response = await callPost(createFormData(createCsvFile('name,age\nAlice,30')))
expect(response.status).toBe(400)
const data = await response.json()
expect(data.error).toMatch(/archived/i)
})
it('returns 400 when the CSV is missing a required column', async () => {
const response = await callPost(createFormData(createCsvFile('age\n30')))
expect(response.status).toBe(400)
const data = await response.json()
expect(data.error).toMatch(/missing required columns/i)
expect(data.details?.missingRequired).toEqual(['name'])
expect(mockBatchInsertRows).not.toHaveBeenCalled()
})
it('appends rows via batchInsertRows', async () => {
const response = await callPost(
createFormData(createCsvFile('name,age\nAlice,30\nBob,40'), { mode: 'append' })
)
expect(response.status).toBe(200)
const data = await response.json()
expect(data.data.mode).toBe('append')
expect(data.data.insertedCount).toBe(2)
expect(mockBatchInsertRows).toHaveBeenCalledTimes(1)
const callArgs = mockBatchInsertRows.mock.calls[0][0] as { rows: unknown[] }
expect(callArgs.rows).toEqual([
{ name: 'Alice', age: 30 },
{ name: 'Bob', age: 40 },
])
expect(mockReplaceTableRows).not.toHaveBeenCalled()
})
it('rejects append when it would exceed maxRows', async () => {
mockCheckAccess.mockResolvedValueOnce({
ok: true,
table: buildTable({ rowCount: 99, maxRows: 100 }),
})
const response = await callPost(
createFormData(createCsvFile('name,age\nAlice,30\nBob,40'), { mode: 'append' })
)
expect(response.status).toBe(400)
const data = await response.json()
expect(data.error).toMatch(/exceed table row limit/)
expect(mockBatchInsertRows).not.toHaveBeenCalled()
})
it('replaces rows via replaceTableRows', async () => {
mockReplaceTableRows.mockResolvedValueOnce({ deletedCount: 5, insertedCount: 2 })
const response = await callPost(
createFormData(createCsvFile('name,age\nAlice,30\nBob,40'), { mode: 'replace' })
)
expect(response.status).toBe(200)
const data = await response.json()
expect(data.data.mode).toBe('replace')
expect(data.data.deletedCount).toBe(5)
expect(data.data.insertedCount).toBe(2)
expect(mockReplaceTableRows).toHaveBeenCalledTimes(1)
expect(mockBatchInsertRows).not.toHaveBeenCalled()
})
it('uses an explicit mapping when provided', async () => {
const response = await callPost(
createFormData(createCsvFile('First Name,Years\nAlice,30\nBob,40', 'people.csv'), {
mode: 'append',
mapping: { 'First Name': 'name', Years: 'age' },
})
)
expect(response.status).toBe(200)
const data = await response.json()
expect(data.data.mappedColumns).toEqual(['First Name', 'Years'])
const callArgs = mockBatchInsertRows.mock.calls[0][0] as { rows: unknown[] }
expect(callArgs.rows).toEqual([
{ name: 'Alice', age: 30 },
{ name: 'Bob', age: 40 },
])
})
it('returns 400 when the mapping targets a non-existent column', async () => {
const response = await callPost(
createFormData(createCsvFile('a\nAlice'), {
mode: 'append',
mapping: { a: 'nonexistent' },
})
)
expect(response.status).toBe(400)
const data = await response.json()
expect(data.error).toMatch(/do not exist on the table/)
})
it('returns 400 when a mapping value is not a string or null', async () => {
const response = await callPost(
createFormData(createCsvFile('name,age\nAlice,30'), {
mode: 'append',
mapping: { name: 42 },
})
)
expect(response.status).toBe(400)
const data = await response.json()
expect(data.error).toMatch(/Mapping values must be/)
})
it('surfaces unique violations from batchInsertRows as 400', async () => {
mockBatchInsertRows.mockRejectedValueOnce(
new Error('Row 1: Column "name" must be unique. Value "Alice" already exists in row row_xxx')
)
const response = await callPost(
createFormData(createCsvFile('name,age\nAlice,30'), { mode: 'append' })
)
expect(response.status).toBe(400)
const data = await response.json()
expect(data.error).toMatch(/must be unique/)
expect(data.data?.insertedCount).toBe(0)
})
it('accepts TSV files', async () => {
const response = await callPost(
createFormData(
createCsvFile('name\tage\nAlice\t30', 'data.tsv', 'text/tab-separated-values'),
{ mode: 'append' }
)
)
expect(response.status).toBe(200)
expect(mockBatchInsertRows).toHaveBeenCalledTimes(1)
})
it('returns 400 for unsupported file extensions', async () => {
const response = await callPost(
createFormData(createCsvFile('name,age', 'data.json', 'application/json'))
)
expect(response.status).toBe(400)
const data = await response.json()
expect(data.error).toMatch(/CSV and TSV/)
})
})

View File

@@ -0,0 +1,267 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { generateId } from '@/lib/core/utils/uuid'
import {
batchInsertRows,
buildAutoMapping,
CSV_MAX_BATCH_SIZE,
CSV_MAX_FILE_SIZE_BYTES,
type CsvHeaderMapping,
CsvImportValidationError,
coerceRowsForTable,
parseCsvBuffer,
replaceTableRows,
validateMapping,
} from '@/lib/table'
import { accessError, checkAccess } from '@/app/api/table/utils'
const logger = createLogger('TableImportCSVExisting')
const IMPORT_MODES = new Set(['append', 'replace'])
interface RouteParams {
params: Promise<{ tableId: string }>
}
export async function POST(request: NextRequest, { params }: RouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const formData = await request.formData()
const file = formData.get('file')
const workspaceId = formData.get('workspaceId') as string | null
const rawMode = (formData.get('mode') as string | null) ?? 'append'
const rawMapping = formData.get('mapping') as string | null
if (!file || !(file instanceof File)) {
return NextResponse.json({ error: 'CSV file is required' }, { status: 400 })
}
if (file.size > CSV_MAX_FILE_SIZE_BYTES) {
return NextResponse.json(
{
error: `File exceeds maximum allowed size of ${CSV_MAX_FILE_SIZE_BYTES / (1024 * 1024)} MB`,
},
{ status: 400 }
)
}
if (!workspaceId) {
return NextResponse.json({ error: 'Workspace ID is required' }, { status: 400 })
}
if (!IMPORT_MODES.has(rawMode)) {
return NextResponse.json(
{ error: `Invalid mode "${rawMode}". Must be "append" or "replace".` },
{ status: 400 }
)
}
const mode = rawMode as 'append' | 'replace'
const ext = file.name.split('.').pop()?.toLowerCase()
if (ext !== 'csv' && ext !== 'tsv') {
return NextResponse.json({ error: 'Only CSV and TSV files are supported' }, { status: 400 })
}
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (table.workspaceId !== workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
if (table.archivedAt) {
return NextResponse.json({ error: 'Cannot import into an archived table' }, { status: 400 })
}
let mapping: CsvHeaderMapping | undefined
if (rawMapping) {
try {
const parsed = JSON.parse(rawMapping)
if (parsed === null || typeof parsed !== 'object' || Array.isArray(parsed)) {
return NextResponse.json(
{ error: 'mapping must be a JSON object mapping CSV headers to column names' },
{ status: 400 }
)
}
mapping = parsed as CsvHeaderMapping
} catch {
return NextResponse.json({ error: 'mapping must be valid JSON' }, { status: 400 })
}
}
const buffer = Buffer.from(await file.arrayBuffer())
const delimiter = ext === 'tsv' ? '\t' : ','
const { headers, rows } = await parseCsvBuffer(buffer, delimiter)
const effectiveMapping = mapping ?? buildAutoMapping(headers, table.schema)
let validation: ReturnType<typeof validateMapping>
try {
validation = validateMapping({
csvHeaders: headers,
mapping: effectiveMapping,
tableSchema: table.schema,
})
} catch (err) {
if (err instanceof CsvImportValidationError) {
return NextResponse.json({ error: err.message, details: err.details }, { status: 400 })
}
throw err
}
if (validation.mappedHeaders.length === 0) {
return NextResponse.json(
{
error: `No CSV headers map to columns on the table. CSV headers: ${headers.join(', ')}. Table columns: ${table.schema.columns.map((c) => c.name).join(', ')}`,
},
{ status: 400 }
)
}
const coerced = coerceRowsForTable(rows, table.schema, validation.effectiveMap)
if (mode === 'append') {
if (table.rowCount + coerced.length > table.maxRows) {
const deficit = table.rowCount + coerced.length - table.maxRows
return NextResponse.json(
{
error: `Append would exceed table row limit (${table.maxRows}). Currently ${table.rowCount} rows, ${coerced.length} new rows, ${deficit} over.`,
},
{ status: 400 }
)
}
let inserted = 0
try {
for (let i = 0; i < coerced.length; i += CSV_MAX_BATCH_SIZE) {
const batch = coerced.slice(i, i + CSV_MAX_BATCH_SIZE)
const batchRequestId = generateId().slice(0, 8)
const result = await batchInsertRows(
{
tableId: table.id,
rows: batch,
workspaceId,
userId: authResult.userId,
},
table,
batchRequestId
)
inserted += result.length
}
} catch (err) {
const message = err instanceof Error ? err.message : String(err)
logger.warn(`[${requestId}] Append failed mid-import for table ${tableId}`, {
inserted,
total: coerced.length,
error: message,
})
const isClientError =
message.includes('row limit') ||
message.includes('Insufficient capacity') ||
message.includes('Schema validation') ||
message.includes('must be unique') ||
message.includes('Row size exceeds') ||
/^Row \d+:/.test(message)
return NextResponse.json(
{
error: isClientError ? message : 'Failed to import CSV',
data: { insertedCount: inserted },
},
{ status: isClientError ? 400 : 500 }
)
}
logger.info(`[${requestId}] Append CSV imported`, {
tableId: table.id,
fileName: file.name,
mode,
inserted,
mappedColumns: validation.mappedHeaders.length,
skippedHeaders: validation.skippedHeaders.length,
})
return NextResponse.json({
success: true,
data: {
tableId: table.id,
mode,
insertedCount: inserted,
mappedColumns: validation.mappedHeaders,
skippedHeaders: validation.skippedHeaders,
unmappedColumns: validation.unmappedColumns,
sourceFile: file.name,
},
})
}
try {
const result = await replaceTableRows(
{ tableId: table.id, rows: coerced, workspaceId, userId: authResult.userId },
table,
requestId
)
logger.info(`[${requestId}] Replace CSV imported`, {
tableId: table.id,
fileName: file.name,
mode,
deleted: result.deletedCount,
inserted: result.insertedCount,
mappedColumns: validation.mappedHeaders.length,
})
return NextResponse.json({
success: true,
data: {
tableId: table.id,
mode,
deletedCount: result.deletedCount,
insertedCount: result.insertedCount,
mappedColumns: validation.mappedHeaders,
skippedHeaders: validation.skippedHeaders,
unmappedColumns: validation.unmappedColumns,
sourceFile: file.name,
},
})
} catch (err) {
const message = err instanceof Error ? err.message : String(err)
const isClientError =
message.includes('row limit') ||
message.includes('Schema validation') ||
message.includes('must be unique') ||
message.includes('Row size exceeds') ||
/^Row \d+:/.test(message)
if (isClientError) {
return NextResponse.json({ error: message }, { status: 400 })
}
throw err
}
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
logger.error(`[${requestId}] CSV import into existing table failed:`, error)
const isClientError =
message.includes('CSV file has no') ||
message.includes('already exists') ||
message.includes('Invalid column name')
return NextResponse.json(
{ error: isClientError ? message : 'Failed to import CSV' },
{ status: isClientError ? 400 : 500 }
)
}
}

View File

@@ -5,157 +5,22 @@ import { generateRequestId } from '@/lib/core/utils/request'
import { generateId } from '@/lib/core/utils/uuid'
import {
batchInsertRows,
CSV_MAX_BATCH_SIZE,
CSV_MAX_FILE_SIZE_BYTES,
coerceRowsForTable,
createTable,
deleteTable,
getWorkspaceTableLimits,
inferSchemaFromCsv,
parseCsvBuffer,
sanitizeName,
type TableSchema,
} from '@/lib/table'
import type { ColumnDefinition, RowData } from '@/lib/table/types'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import { normalizeColumn } from '@/app/api/table/utils'
const logger = createLogger('TableImportCSV')
const MAX_CSV_FILE_SIZE = 50 * 1024 * 1024
const MAX_BATCH_SIZE = 1000
const SCHEMA_SAMPLE_SIZE = 100
type ColumnType = 'string' | 'number' | 'boolean' | 'date'
async function parseCsvBuffer(
buffer: Buffer,
delimiter = ','
): Promise<{ headers: string[]; rows: Record<string, unknown>[] }> {
const { parse } = await import('csv-parse/sync')
const parsed = parse(buffer.toString('utf-8'), {
columns: true,
skip_empty_lines: true,
trim: true,
relax_column_count: true,
relax_quotes: true,
skip_records_with_error: true,
cast: false,
delimiter,
}) as Record<string, unknown>[]
if (parsed.length === 0) {
throw new Error('CSV file has no data rows')
}
const headers = Object.keys(parsed[0])
if (headers.length === 0) {
throw new Error('CSV file has no headers')
}
return { headers, rows: parsed }
}
function inferColumnType(values: unknown[]): ColumnType {
const nonEmpty = values.filter((v) => v !== null && v !== undefined && v !== '')
if (nonEmpty.length === 0) return 'string'
const allNumber = nonEmpty.every((v) => {
const n = Number(v)
return !Number.isNaN(n) && String(v).trim() !== ''
})
if (allNumber) return 'number'
const allBoolean = nonEmpty.every((v) => {
const s = String(v).toLowerCase()
return s === 'true' || s === 'false'
})
if (allBoolean) return 'boolean'
const isoDatePattern = /^\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}(:\d{2})?)?/
const allDate = nonEmpty.every((v) => {
const s = String(v)
return isoDatePattern.test(s) && !Number.isNaN(Date.parse(s))
})
if (allDate) return 'date'
return 'string'
}
function inferSchema(headers: string[], rows: Record<string, unknown>[]): ColumnDefinition[] {
const sample = rows.slice(0, SCHEMA_SAMPLE_SIZE)
const seen = new Set<string>()
return headers.map((name) => {
let colName = sanitizeName(name)
let suffix = 2
while (seen.has(colName.toLowerCase())) {
colName = `${sanitizeName(name)}_${suffix}`
suffix++
}
seen.add(colName.toLowerCase())
return {
name: colName,
type: inferColumnType(sample.map((r) => r[name])),
}
})
}
/**
* Strips non-alphanumeric characters (except underscore), collapses runs of
* underscores, and ensures the name starts with a letter or underscore.
* Used for both table names and column names to satisfy NAME_PATTERN.
*/
function sanitizeName(raw: string, fallbackPrefix = 'col'): string {
let name = raw
.trim()
.replace(/[^a-zA-Z0-9_]/g, '_')
.replace(/_+/g, '_')
.replace(/^_+|_+$/g, '')
if (!name || /^\d/.test(name)) {
name = `${fallbackPrefix}_${name}`
}
return name
}
function coerceValue(value: unknown, colType: ColumnType): string | number | boolean | null {
if (value === null || value === undefined || value === '') return null
switch (colType) {
case 'number': {
const n = Number(value)
return Number.isNaN(n) ? null : n
}
case 'boolean': {
const s = String(value).toLowerCase()
if (s === 'true') return true
if (s === 'false') return false
return null
}
case 'date': {
const d = new Date(String(value))
return Number.isNaN(d.getTime()) ? String(value) : d.toISOString()
}
default:
return String(value)
}
}
function coerceRows(
rows: Record<string, unknown>[],
columns: ColumnDefinition[],
headerToColumn: Map<string, string>
): RowData[] {
const colTypeMap = new Map(columns.map((c) => [c.name, c.type as ColumnType]))
return rows.map((row) => {
const coerced: RowData = {}
for (const [header, value] of Object.entries(row)) {
const colName = headerToColumn.get(header)
if (colName) {
coerced[colName] = coerceValue(value, colTypeMap.get(colName) ?? 'string')
}
}
return coerced
})
}
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
@@ -173,9 +38,11 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: 'CSV file is required' }, { status: 400 })
}
if (file.size > MAX_CSV_FILE_SIZE) {
if (file.size > CSV_MAX_FILE_SIZE_BYTES) {
return NextResponse.json(
{ error: `File exceeds maximum allowed size of ${MAX_CSV_FILE_SIZE / (1024 * 1024)} MB` },
{
error: `File exceeds maximum allowed size of ${CSV_MAX_FILE_SIZE_BYTES / (1024 * 1024)} MB`,
},
{ status: 400 }
)
}
@@ -198,9 +65,7 @@ export async function POST(request: NextRequest) {
const delimiter = ext === 'tsv' ? '\t' : ','
const { headers, rows } = await parseCsvBuffer(buffer, delimiter)
const columns = inferSchema(headers, rows)
const headerToColumn = new Map(headers.map((h, i) => [h, columns[i].name]))
const { columns, headerToColumn } = inferSchemaFromCsv(headers, rows)
const tableName = sanitizeName(file.name.replace(/\.[^.]+$/, ''), 'imported_table')
const planLimits = await getWorkspaceTableLimits(workspaceId)
@@ -222,10 +87,10 @@ export async function POST(request: NextRequest) {
)
try {
const coerced = coerceRows(rows, columns, headerToColumn)
const coerced = coerceRowsForTable(rows, normalizedSchema, headerToColumn)
let inserted = 0
for (let i = 0; i < coerced.length; i += MAX_BATCH_SIZE) {
const batch = coerced.slice(i, i + MAX_BATCH_SIZE)
for (let i = 0; i < coerced.length; i += CSV_MAX_BATCH_SIZE) {
const batch = coerced.slice(i, i + CSV_MAX_BATCH_SIZE)
const batchRequestId = generateId().slice(0, 8)
const result = await batchInsertRows(
{ tableId: table.id, rows: batch, workspaceId, userId: authResult.userId },

View File

@@ -25,6 +25,7 @@ export function InlineRenameInput({ value, onChange, onSubmit, onCancel }: Inlin
ref={inputRef}
type='text'
value={value}
size={Math.max(value.length + 2, 5)}
onChange={(e) => onChange(e.target.value)}
onKeyDown={(e) => {
if (e.key === 'Enter') onSubmit()
@@ -32,7 +33,7 @@ export function InlineRenameInput({ value, onChange, onSubmit, onCancel }: Inlin
}}
onBlur={onSubmit}
onClick={(e) => e.stopPropagation()}
className='min-w-0 flex-1 truncate border-0 bg-transparent p-0 font-medium text-[var(--text-body)] text-sm outline-none focus:outline-none focus:ring-0'
className='min-w-0 border-0 bg-transparent p-0 font-medium text-[var(--text-body)] text-sm outline-none focus:outline-none focus:ring-0'
/>
)
}

View File

@@ -1,6 +1,6 @@
'use client'
import { useCallback, useEffect, useRef, useState } from 'react'
import { memo, useEffect, useRef, useState } from 'react'
import {
Button,
Check,
@@ -50,7 +50,12 @@ interface MessageActionsProps {
requestId?: string
}
export function MessageActions({ content, chatId, userQuery, requestId }: MessageActionsProps) {
export const MessageActions = memo(function MessageActions({
content,
chatId,
userQuery,
requestId,
}: MessageActionsProps) {
const [copied, setCopied] = useState(false)
const [copiedRequestId, setCopiedRequestId] = useState(false)
const [pendingFeedback, setPendingFeedback] = useState<'up' | 'down' | null>(null)
@@ -70,7 +75,7 @@ export function MessageActions({ content, chatId, userQuery, requestId }: Messag
}
}, [])
const copyToClipboard = useCallback(async () => {
const copyToClipboard = async () => {
if (!content) return
const text = toPlainText(content)
if (!text) return
@@ -84,9 +89,9 @@ export function MessageActions({ content, chatId, userQuery, requestId }: Messag
} catch {
/* clipboard unavailable */
}
}, [content])
}
const copyRequestId = useCallback(async () => {
const copyRequestId = async () => {
if (!requestId) return
try {
await navigator.clipboard.writeText(requestId)
@@ -98,20 +103,17 @@ export function MessageActions({ content, chatId, userQuery, requestId }: Messag
} catch {
/* clipboard unavailable */
}
}, [requestId])
}
const handleFeedbackClick = useCallback(
(type: 'up' | 'down') => {
if (chatId && userQuery) {
setPendingFeedback(type)
setFeedbackText('')
setCopiedRequestId(false)
}
},
[chatId, userQuery]
)
const handleFeedbackClick = (type: 'up' | 'down') => {
if (chatId && userQuery) {
setPendingFeedback(type)
setFeedbackText('')
setCopiedRequestId(false)
}
}
const handleSubmitFeedback = useCallback(() => {
const handleSubmitFeedback = () => {
if (!pendingFeedback || !chatId || !userQuery) return
const text = feedbackText.trim()
if (!text) {
@@ -128,15 +130,15 @@ export function MessageActions({ content, chatId, userQuery, requestId }: Messag
})
setPendingFeedback(null)
setFeedbackText('')
}, [pendingFeedback, chatId, userQuery, content, feedbackText])
}
const handleModalClose = useCallback((open: boolean) => {
const handleModalClose = (open: boolean) => {
if (!open) {
setPendingFeedback(null)
setFeedbackText('')
setCopiedRequestId(false)
}
}, [])
}
if (!content) return null
@@ -224,4 +226,4 @@ export function MessageActions({ content, chatId, userQuery, requestId }: Messag
</Modal>
</>
)
}
})

View File

@@ -40,6 +40,7 @@ export interface HeaderAction {
icon?: React.ElementType
onClick: () => void
disabled?: boolean
active?: boolean
}
export interface CreateAction {
@@ -103,7 +104,13 @@ export const ResourceHeader = memo(function ResourceHeader({
onClick={action.onClick}
disabled={action.disabled}
variant='subtle'
className='px-2 py-1 text-caption'
className={cn(
'px-2 py-1 text-caption',
action.active !== undefined && 'rounded-lg',
action.active === true &&
'bg-[var(--surface-active)] hover-hover:bg-[var(--surface-active)]',
action.active === false && 'hover-hover:bg-[var(--surface-hover)]'
)}
>
{ActionIcon && (
<ActionIcon

View File

@@ -222,7 +222,7 @@ const SortDropdown = memo(function SortDropdown({ config }: { config: SortConfig
Sort
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align='end'>
<DropdownMenuContent align='start'>
{options.map((option) => {
const isActive = active?.column === option.id
const Icon = option.icon

View File

@@ -1,6 +1,9 @@
import {
type ComponentProps,
type Dispatch,
memo,
type ReactNode,
type SetStateAction,
useCallback,
useEffect,
useMemo,
@@ -134,6 +137,116 @@ function useResourceNameLookup(workspaceId: string): Map<string, string> {
}, [workflows, tables, files, knowledgeBases, folders])
}
interface ResourceTabItemProps {
resource: MothershipResource
idx: number
isActive: boolean
isHovered: boolean
isDragging: boolean
isSelected: boolean
showGapBefore: boolean
showGapAfter: boolean
displayName: string
chatId?: string
onDragStart: (e: React.DragEvent, idx: number) => void
onDragOver: (e: React.DragEvent, idx: number) => void
onDragLeave: () => void
onDragEnd: () => void
onTabClick: (e: React.MouseEvent, idx: number) => void
setHoveredTabId: Dispatch<SetStateAction<string | null>>
onRemove: (e: React.MouseEvent, resource: MothershipResource) => void
}
const ResourceTabItem = memo(function ResourceTabItem({
resource,
idx,
isActive,
isHovered,
isDragging,
isSelected,
showGapBefore,
showGapAfter,
displayName,
chatId,
onDragStart,
onDragOver,
onDragLeave,
onDragEnd,
onTabClick,
setHoveredTabId,
onRemove,
}: ResourceTabItemProps) {
const config = getResourceConfig(resource.type)
return (
<div className='relative flex shrink-0 items-center'>
{showGapBefore && (
<div className='-translate-x-1/2 -translate-y-1/2 pointer-events-none absolute top-1/2 left-0 z-10 h-[16px] w-[2px] rounded-full bg-[var(--text-subtle)]' />
)}
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='subtle'
draggable
data-resource-tab-id={resource.id}
onDragStart={(e) => onDragStart(e, idx)}
onDragOver={(e) => onDragOver(e, idx)}
onDragLeave={onDragLeave}
onDragEnd={onDragEnd}
onMouseDown={(e) => {
if (e.button === 1) {
e.preventDefault()
if (chatId) onRemove(e, resource)
}
}}
onClick={(e) => onTabClick(e, idx)}
onMouseEnter={() => setHoveredTabId(resource.id)}
onMouseLeave={() => setHoveredTabId(null)}
className={cn(
'group relative shrink-0 bg-transparent px-2 py-1 pr-[22px] text-caption transition-opacity duration-150',
isActive && 'bg-[var(--surface-4)]',
isSelected && !isActive && 'bg-[var(--surface-3)]',
isDragging && 'opacity-30'
)}
>
{config.renderTabIcon(resource, 'mr-1.5 h-[14px] w-[14px]')}
{displayName}
{(isHovered || isActive) && chatId && (
<span
role='button'
tabIndex={-1}
onClick={(e) => onRemove(e, resource)}
onKeyDown={(e) => {
if (e.key === 'Enter') onRemove(e as unknown as React.MouseEvent, resource)
}}
className='-translate-y-1/2 absolute top-1/2 right-[4px] flex items-center justify-center rounded-sm p-[1px] hover-hover:bg-[var(--surface-5)]'
aria-label={`Close ${displayName}`}
>
<svg
className='h-[10px] w-[10px] text-[var(--text-icon)]'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth='2.5'
strokeLinecap='round'
strokeLinejoin='round'
>
<path d='M18 6 6 18M6 6l12 12' />
</svg>
</span>
)}
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='bottom'>
<p>{displayName}</p>
</Tooltip.Content>
</Tooltip.Root>
{showGapAfter && (
<div className='-translate-y-1/2 pointer-events-none absolute top-1/2 right-0 z-10 h-[16px] w-[2px] translate-x-1/2 rounded-full bg-[var(--text-subtle)]' />
)}
</div>
)
})
interface ResourceTabsProps {
workspaceId: string
chatId?: string
@@ -476,7 +589,6 @@ export function ResourceTabs({
onDrop={handleDrop}
>
{resources.map((resource, idx) => {
const config = getResourceConfig(resource.type)
const displayName = nameLookup.get(`${resource.type}:${resource.id}`) ?? resource.title
const isActive = activeId === resource.id
const isHovered = hoveredTabId === resource.id
@@ -494,73 +606,26 @@ export function ResourceTabs({
draggedIdx !== idx
return (
<div key={resource.id} className='relative flex shrink-0 items-center'>
{showGapBefore && (
<div className='-translate-x-1/2 -translate-y-1/2 pointer-events-none absolute top-1/2 left-0 z-10 h-[16px] w-[2px] rounded-full bg-[var(--text-subtle)]' />
)}
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='subtle'
draggable
data-resource-tab-id={resource.id}
onDragStart={(e) => handleDragStart(e, idx)}
onDragOver={(e) => handleDragOver(e, idx)}
onDragLeave={handleDragLeave}
onDragEnd={handleDragEnd}
onMouseDown={(e) => {
if (e.button === 1) {
e.preventDefault()
if (chatId) handleRemove(e, resource)
}
}}
onClick={(e) => handleTabClick(e, idx)}
onMouseEnter={() => setHoveredTabId(resource.id)}
onMouseLeave={() => setHoveredTabId(null)}
className={cn(
'group relative shrink-0 bg-transparent px-2 py-1 pr-[22px] text-caption transition-opacity duration-150',
isActive && 'bg-[var(--surface-4)]',
isSelected && !isActive && 'bg-[var(--surface-3)]',
isDragging && 'opacity-30'
)}
>
{config.renderTabIcon(resource, 'mr-1.5 h-[14px] w-[14px]')}
{displayName}
{(isHovered || isActive) && chatId && (
<span
role='button'
tabIndex={-1}
onClick={(e) => handleRemove(e, resource)}
onKeyDown={(e) => {
if (e.key === 'Enter')
handleRemove(e as unknown as React.MouseEvent, resource)
}}
className='-translate-y-1/2 absolute top-1/2 right-[4px] flex items-center justify-center rounded-sm p-[1px] hover-hover:bg-[var(--surface-5)]'
aria-label={`Close ${displayName}`}
>
<svg
className='h-[10px] w-[10px] text-[var(--text-icon)]'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth='2.5'
strokeLinecap='round'
strokeLinejoin='round'
>
<path d='M18 6 6 18M6 6l12 12' />
</svg>
</span>
)}
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='bottom'>
<p>{displayName}</p>
</Tooltip.Content>
</Tooltip.Root>
{showGapAfter && (
<div className='-translate-y-1/2 pointer-events-none absolute top-1/2 right-0 z-10 h-[16px] w-[2px] translate-x-1/2 rounded-full bg-[var(--text-subtle)]' />
)}
</div>
<ResourceTabItem
key={resource.id}
resource={resource}
idx={idx}
isActive={isActive}
isHovered={isHovered}
isDragging={isDragging}
isSelected={isSelected}
showGapBefore={showGapBefore}
showGapAfter={showGapAfter}
displayName={displayName}
chatId={chatId}
onDragStart={handleDragStart}
onDragOver={handleDragOver}
onDragLeave={handleDragLeave}
onDragEnd={handleDragEnd}
onTabClick={handleTabClick}
setHoveredTabId={setHoveredTabId}
onRemove={handleRemove}
/>
)
})}
</div>

View File

@@ -1,6 +1,6 @@
'use client'
import { forwardRef, memo, useCallback, useEffect, useState } from 'react'
import { forwardRef, memo, useState } from 'react'
import type { FilePreviewSession } from '@/lib/copilot/request/session'
import { cn } from '@/lib/core/utils/cn'
import { getFileExtension } from '@/lib/uploads/utils/file-utils'
@@ -80,15 +80,13 @@ export const MothershipView = memo(
: undefined
const [previewMode, setPreviewMode] = useState<PreviewMode>('preview')
const [prevActiveId, setPrevActiveId] = useState<string | null | undefined>(active?.id)
const handleCyclePreview = useCallback(() => setPreviewMode((m) => PREVIEW_CYCLE[m]), [])
const handleCyclePreview = () => setPreviewMode((m) => PREVIEW_CYCLE[m])
useEffect(() => {
if (active?.id !== prevActiveId) {
setPrevActiveId(active?.id)
setPreviewMode('preview')
}
}, [active?.id, prevActiveId])
const [prevActiveId, setPrevActiveId] = useState(active?.id)
if (prevActiveId !== active?.id) {
setPrevActiveId(active?.id)
setPreviewMode('preview')
}
const isActivePreviewable =
canEdit &&

View File

@@ -1,6 +1,6 @@
'use client'
import React from 'react'
import { memo } from 'react'
import {
AudioIcon,
CsvIcon,
@@ -25,7 +25,7 @@ const DROP_OVERLAY_ICONS = [
VideoIcon,
] as const
export const DropOverlay = React.memo(function DropOverlay() {
export const DropOverlay = memo(function DropOverlay() {
return (
<div className='pointer-events-none absolute inset-[6px] z-10 flex items-center justify-center rounded-[14px] border-[1.5px] border-[var(--border-1)] border-dashed bg-[var(--white)] dark:bg-[var(--surface-4)]'>
<div className='flex flex-col items-center gap-2'>

View File

@@ -14,7 +14,7 @@ import {
DropdownMenuContent,
DropdownMenuTrigger,
Library,
Loader,
RefreshCw,
} from '@/components/emcn'
import { DatePicker } from '@/components/emcn/components/date-picker/date-picker'
import { dollarsToCredits } from '@/lib/billing/credits/conversion'
@@ -1086,9 +1086,9 @@ export default function Logs() {
)
const refreshIcon = useMemo(() => {
if (!isVisuallyRefreshing) return Loader
const Spinning = (props: React.SVGProps<SVGSVGElement>) => <Loader {...props} animate />
Spinning.displayName = 'SpinningLoader'
if (!isVisuallyRefreshing) return RefreshCw
const Spinning = (props: React.SVGProps<SVGSVGElement>) => <RefreshCw {...props} animate />
Spinning.displayName = 'SpinningRefresh'
return Spinning
}, [isVisuallyRefreshing])
@@ -1106,7 +1106,7 @@ export default function Logs() {
onClick: handleOpenNotificationSettings,
},
{
label: '',
label: 'Refresh',
icon: refreshIcon,
onClick: handleRefresh,
disabled: isVisuallyRefreshing,
@@ -1114,12 +1114,12 @@ export default function Logs() {
{
label: 'Logs',
onClick: () => setViewMode('logs'),
disabled: !isDashboardView,
active: !isDashboardView,
},
{
label: 'Dashboard',
onClick: () => setViewMode('dashboard'),
disabled: isDashboardView,
active: isDashboardView,
},
],
[

View File

@@ -22,6 +22,7 @@ import {
ModalFooter,
ModalHeader,
Skeleton,
Upload,
} from '@/components/emcn'
import {
ArrowLeft,
@@ -45,6 +46,7 @@ import type { ColumnDefinition, Filter, SortDirection, TableRow as TableRowType
import type { ColumnOption, SortConfig } from '@/app/workspace/[workspaceId]/components'
import { ResourceHeader, ResourceOptionsBar } from '@/app/workspace/[workspaceId]/components'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { ImportCsvDialog } from '@/app/workspace/[workspaceId]/tables/components/import-csv-dialog'
import {
useAddTableColumn,
useBatchCreateTableRows,
@@ -200,6 +202,7 @@ export function Table({
const lastCheckboxRowRef = useRef<number | null>(null)
const [showDeleteTableConfirm, setShowDeleteTableConfirm] = useState(false)
const [deletingColumn, setDeletingColumn] = useState<string | null>(null)
const [isImportCsvOpen, setIsImportCsvOpen] = useState(false)
const [columnWidths, setColumnWidths] = useState<Record<string, number>>({})
const columnWidthsRef = useRef(columnWidths)
@@ -1555,6 +1558,21 @@ export function Table({
[handleAddColumn, addColumnMutation.isPending]
)
const headerActions = useMemo(
() =>
tableData
? [
{
label: 'Import CSV',
icon: Upload,
onClick: () => setIsImportCsvOpen(true),
disabled: userPermissions.canEdit !== true,
},
]
: undefined,
[tableData, userPermissions.canEdit]
)
const activeSortState = useMemo(() => {
if (!queryOptions.sort) return null
const entries = Object.entries(queryOptions.sort)
@@ -1619,7 +1637,12 @@ export function Table({
<div ref={containerRef} className='flex h-full flex-col overflow-hidden'>
{!embedded && (
<>
<ResourceHeader icon={TableIcon} breadcrumbs={breadcrumbs} create={createAction} />
<ResourceHeader
icon={TableIcon}
breadcrumbs={breadcrumbs}
create={createAction}
actions={headerActions}
/>
<ResourceOptionsBar
sort={sortConfig}
@@ -1875,6 +1898,15 @@ export function Table({
</Modal>
)}
{tableData && (
<ImportCsvDialog
open={isImportCsvOpen}
onOpenChange={setIsImportCsvOpen}
workspaceId={workspaceId}
table={tableData}
/>
)}
<Modal
open={deletingColumn !== null}
onOpenChange={(open) => {

View File

@@ -0,0 +1,513 @@
'use client'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import {
Button,
ButtonGroup,
ButtonGroupItem,
Combobox,
type ComboboxOption,
Label,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
toast,
} from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { buildAutoMapping, parseCsvBuffer } from '@/lib/table/csv-import'
import type { TableDefinition } from '@/lib/table/types'
import { type CsvImportMode, useImportCsvIntoTable } from '@/hooks/queries/tables'
const logger = createLogger('ImportCsvDialog')
const MAX_SAMPLE_ROWS = 5
const MAX_EXAMPLES_IN_ERROR = 3
/**
* Sentinel value for the "Do not import" option in the mapping combobox. The
* whitespace is intentional: valid column names must match `NAME_PATTERN`
* (`/^[a-z_][a-z0-9_]*$/i`), so no real column can share this value.
*/
const SKIP_VALUE = '__ skip __'
/**
* Converts the verbose backend error messages into a short, human-friendly
* summary suitable for the modal footer. Specifically collapses repeated
* `Row N: Column "X" must be unique. Value "Y" already exists in row row_abc`
* segments into a single concise summary without internal row IDs.
*/
function summarizeImportError(message: string): string {
const uniqueMatches = [
...message.matchAll(/Column\s+"([^"]+)"\s+must be unique\.\s+Value\s+"([^"]+)"/g),
]
if (uniqueMatches.length > 0) {
const column = uniqueMatches[0][1]
const values = Array.from(new Set(uniqueMatches.map((m) => m[2])))
const preview = values
.slice(0, MAX_EXAMPLES_IN_ERROR)
.map((v) => `"${v}"`)
.join(', ')
const extra = values.length - MAX_EXAMPLES_IN_ERROR
const suffix = extra > 0 ? `, +${extra} more` : ''
return `${values.length} row${values.length === 1 ? '' : 's'} conflict on unique column "${column}" (${preview}${suffix})`
}
const requiredMatch = message.match(/missing required columns?:\s*(.+)/i)
if (requiredMatch) {
return `Missing required column(s): ${requiredMatch[1].replace(/[.;]+$/, '')}`
}
const rowLimitMatch = message.match(/row limit[^.;]*/i)
if (rowLimitMatch) {
return rowLimitMatch[0].trim()
}
const stripped = message.replace(/\s+in row\s+row_[a-f0-9]+/gi, '').trim()
if (stripped.length > 180) return `${stripped.slice(0, 177)}...`
return stripped
}
interface ImportCsvDialogProps {
open: boolean
onOpenChange: (open: boolean) => void
workspaceId: string
table: TableDefinition
onImported?: (result: { insertedCount?: number; deletedCount?: number }) => void
}
interface ParsedCsv {
file: File
headers: string[]
sampleRows: Record<string, unknown>[]
totalRows: number
}
export function ImportCsvDialog({
open,
onOpenChange,
workspaceId,
table,
onImported,
}: ImportCsvDialogProps) {
const [parsed, setParsed] = useState<ParsedCsv | null>(null)
const [parseError, setParseError] = useState<string | null>(null)
const [submitError, setSubmitError] = useState<string | null>(null)
const [parsing, setParsing] = useState(false)
const [mapping, setMapping] = useState<Record<string, string | null>>({})
const [mode, setMode] = useState<CsvImportMode>('append')
const [isDragging, setIsDragging] = useState(false)
const fileInputRef = useRef<HTMLInputElement>(null)
const importMutation = useImportCsvIntoTable()
const resetState = useCallback(() => {
setParsed(null)
setParseError(null)
setSubmitError(null)
setMapping({})
setMode('append')
setIsDragging(false)
setParsing(false)
if (fileInputRef.current) fileInputRef.current.value = ''
}, [])
useEffect(() => {
if (!open) resetState()
}, [open, resetState])
useEffect(() => {
resetState()
}, [table.id, resetState])
const columnOptions: ComboboxOption[] = useMemo(() => {
const options: ComboboxOption[] = [{ label: 'Do not import', value: SKIP_VALUE }]
for (const col of table.schema.columns) {
options.push({
label: col.required ? `${col.name} (required)` : col.name,
value: col.name,
})
}
return options
}, [table.schema.columns])
const handleFileSelected = useCallback(
async (file: File) => {
const ext = file.name.split('.').pop()?.toLowerCase()
if (ext !== 'csv' && ext !== 'tsv') {
setParseError('Only CSV and TSV files are supported')
return
}
setParsing(true)
setParseError(null)
try {
const arrayBuffer = await file.arrayBuffer()
const delimiter = ext === 'tsv' ? '\t' : ','
const { headers, rows } = await parseCsvBuffer(new Uint8Array(arrayBuffer), delimiter)
const autoMapping = buildAutoMapping(headers, table.schema)
setParsed({
file,
headers,
sampleRows: rows.slice(0, MAX_SAMPLE_ROWS),
totalRows: rows.length,
})
setMapping(autoMapping)
} catch (err) {
const message = err instanceof Error ? err.message : 'Failed to parse CSV'
logger.error('CSV parse failed', err)
setParseError(message)
} finally {
setParsing(false)
}
},
[table.schema]
)
const handleFileInputChange = useCallback(
(e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0]
if (file) void handleFileSelected(file)
},
[handleFileSelected]
)
const handleDragEnter = useCallback((e: React.DragEvent<HTMLButtonElement>) => {
e.preventDefault()
setIsDragging(true)
}, [])
const handleDragOver = useCallback((e: React.DragEvent<HTMLButtonElement>) => {
e.preventDefault()
}, [])
const handleDragLeave = useCallback((e: React.DragEvent<HTMLButtonElement>) => {
e.preventDefault()
setIsDragging(false)
}, [])
const handleDrop = useCallback(
(e: React.DragEvent<HTMLButtonElement>) => {
e.preventDefault()
setIsDragging(false)
const file = e.dataTransfer.files?.[0]
if (file) void handleFileSelected(file)
},
[handleFileSelected]
)
const handleMappingChange = useCallback((header: string, value: string) => {
setSubmitError(null)
setMapping((prev) => ({
...prev,
[header]: value === SKIP_VALUE ? null : value,
}))
}, [])
const handleModeChange = useCallback((value: string) => {
setSubmitError(null)
setMode(value as CsvImportMode)
}, [])
const { missingRequired, duplicateTargets, mappedCount, skipCount } = useMemo(() => {
const mappedTargets = new Map<string, string[]>()
let mapped = 0
let skipped = 0
for (const header of parsed?.headers ?? []) {
const target = mapping[header]
if (!target) {
skipped++
continue
}
mapped++
const existing = mappedTargets.get(target) ?? []
existing.push(header)
mappedTargets.set(target, existing)
}
const dupes = [...mappedTargets.entries()]
.filter(([, headers]) => headers.length > 1)
.map(([col]) => col)
const mappedSet = new Set(mappedTargets.keys())
const missing = table.schema.columns
.filter((c) => c.required && !mappedSet.has(c.name))
.map((c) => c.name)
return {
missingRequired: missing,
duplicateTargets: dupes,
mappedCount: mapped,
skipCount: skipped,
}
}, [mapping, parsed?.headers, table.schema.columns])
const appendCapacityDeficit = useMemo(() => {
if (!parsed || mode !== 'append') return 0
const projected = table.rowCount + parsed.totalRows
return projected > table.maxRows ? projected - table.maxRows : 0
}, [mode, parsed, table.maxRows, table.rowCount])
const replaceCapacityDeficit = useMemo(() => {
if (!parsed || mode !== 'replace') return 0
return parsed.totalRows > table.maxRows ? parsed.totalRows - table.maxRows : 0
}, [mode, parsed, table.maxRows])
const canSubmit =
parsed !== null &&
!importMutation.isPending &&
missingRequired.length === 0 &&
duplicateTargets.length === 0 &&
mappedCount > 0 &&
appendCapacityDeficit === 0 &&
replaceCapacityDeficit === 0
const importCsv = importMutation.mutateAsync
const handleSubmit = useCallback(async () => {
if (!parsed || !canSubmit) return
setSubmitError(null)
try {
const result = await importCsv({
workspaceId,
tableId: table.id,
file: parsed.file,
mode,
mapping,
})
const data = result.data
if (mode === 'append') {
toast.success(`Imported ${data?.insertedCount ?? 0} rows into "${table.name}"`)
} else {
toast.success(
`Replaced rows in "${table.name}": deleted ${data?.deletedCount ?? 0}, inserted ${data?.insertedCount ?? 0}`
)
}
onImported?.({
insertedCount: data?.insertedCount,
deletedCount: data?.deletedCount,
})
onOpenChange(false)
} catch (err) {
const message = err instanceof Error ? err.message : 'Failed to import CSV'
setSubmitError(summarizeImportError(message))
logger.error('CSV import into existing table failed', err)
}
}, [
canSubmit,
importCsv,
mapping,
mode,
onImported,
onOpenChange,
parsed,
table.id,
table.name,
workspaceId,
])
const hasWarning =
missingRequired.length > 0 ||
duplicateTargets.length > 0 ||
appendCapacityDeficit > 0 ||
replaceCapacityDeficit > 0
return (
<Modal open={open} onOpenChange={onOpenChange}>
<ModalContent size='lg'>
<ModalHeader>Import CSV into {table.name}</ModalHeader>
<ModalBody>
{!parsed ? (
<div className='flex flex-col gap-2'>
<Label>Upload CSV</Label>
<Button
type='button'
variant='default'
onClick={() => fileInputRef.current?.click()}
onDragEnter={handleDragEnter}
onDragOver={handleDragOver}
onDragLeave={handleDragLeave}
onDrop={handleDrop}
disabled={parsing}
className={cn(
'!bg-[var(--surface-1)] hover-hover:!bg-[var(--surface-4)] w-full justify-center border border-[var(--border-1)] border-dashed py-2.5',
isDragging && 'border-[var(--surface-7)]'
)}
>
<input
ref={fileInputRef}
type='file'
accept='.csv,.tsv'
onChange={handleFileInputChange}
className='hidden'
/>
<div className='flex flex-col gap-0.5 text-center'>
<span className='text-[var(--text-primary)]'>
{parsing
? 'Parsing...'
: isDragging
? 'Drop file here'
: 'Drop CSV or TSV here or click to browse'}
</span>
<span className='text-[var(--text-tertiary)] text-xs'>
Map columns to append or replace rows in this table
</span>
</div>
</Button>
{parseError && (
<p className='text-[var(--text-error)] text-caption leading-tight'>{parseError}</p>
)}
</div>
) : (
<div className='flex flex-col gap-4'>
<div className='flex items-center justify-between gap-3 rounded-sm border border-[var(--border)] p-2'>
<div className='flex min-w-0 flex-col'>
<span className='truncate text-caption text-[var(--text-primary)]'>
{parsed.file.name}
</span>
<span className='text-[var(--text-tertiary)] text-xs'>
{parsed.totalRows.toLocaleString()} rows · {parsed.headers.length} columns
</span>
</div>
<Button variant='ghost' size='sm' onClick={resetState}>
Change file
</Button>
</div>
<div className='flex flex-col gap-2'>
<Label>Mode</Label>
<ButtonGroup value={mode} onValueChange={handleModeChange}>
<ButtonGroupItem value='append'>Append</ButtonGroupItem>
<ButtonGroupItem value='replace'>Replace all rows</ButtonGroupItem>
</ButtonGroup>
</div>
<div className='flex flex-col gap-2'>
<Label>Column mapping</Label>
<div className='overflow-hidden rounded-sm border border-[var(--border)]'>
<div className='max-h-[320px] overflow-auto'>
<Table>
<TableHeader>
<TableRow>
<TableHead>CSV column</TableHead>
<TableHead>Target column</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{parsed.headers.map((header) => {
const sample = parsed.sampleRows
.map((r) =>
r[header] === '' || r[header] == null ? '' : String(r[header])
)
.filter(Boolean)
.slice(0, 2)
.join(', ')
return (
<TableRow key={header}>
<TableCell>
<div className='flex min-w-0 flex-col'>
<span className='truncate text-[var(--text-primary)]'>
{header}
</span>
{sample && (
<span className='truncate text-[var(--text-tertiary)] text-xs'>
{sample}
</span>
)}
</div>
</TableCell>
<TableCell>
<Combobox
options={columnOptions}
value={mapping[header] ?? SKIP_VALUE}
onChange={(value) => handleMappingChange(header, value)}
size='sm'
className='w-full'
/>
</TableCell>
</TableRow>
)
})}
</TableBody>
</Table>
</div>
</div>
<span className='text-[var(--text-tertiary)] text-xs'>
{mappedCount} mapped · {skipCount} skipped
</span>
</div>
{hasWarning && (
<div className='flex flex-col gap-1'>
{missingRequired.length > 0 && (
<p className='text-[var(--text-error)] text-caption leading-tight'>
Missing required column(s): {missingRequired.join(', ')}
</p>
)}
{duplicateTargets.length > 0 && (
<p className='text-[var(--text-error)] text-caption leading-tight'>
Multiple CSV columns target: {duplicateTargets.join(', ')} (pick one)
</p>
)}
{appendCapacityDeficit > 0 && (
<p className='text-[var(--text-error)] text-caption leading-tight'>
Append would exceed the row limit ({table.maxRows.toLocaleString()}) by{' '}
{appendCapacityDeficit.toLocaleString()} row(s). Remove rows or switch to
Replace.
</p>
)}
{replaceCapacityDeficit > 0 && (
<p className='text-[var(--text-error)] text-caption leading-tight'>
CSV has {parsed.totalRows.toLocaleString()} rows, which exceeds the table
limit of {table.maxRows.toLocaleString()} by{' '}
{replaceCapacityDeficit.toLocaleString()}.
</p>
)}
</div>
)}
{mode === 'replace' && !hasWarning && (
<p className='text-[var(--text-error)] text-caption leading-tight'>
Replace will permanently delete the {table.rowCount.toLocaleString()} existing
row(s) before inserting the new rows.
</p>
)}
{submitError && (
<p
className='text-[var(--text-error)] text-caption leading-tight'
title={submitError}
>
{submitError}
</p>
)}
</div>
)}
</ModalBody>
<ModalFooter>
<Button
variant='default'
onClick={() => onOpenChange(false)}
disabled={importMutation.isPending}
>
Cancel
</Button>
<Button
variant={mode === 'replace' ? 'destructive' : 'primary'}
onClick={handleSubmit}
disabled={!canSubmit}
>
{importMutation.isPending
? mode === 'replace'
? 'Replacing...'
: 'Importing...'
: mode === 'replace'
? 'Replace rows'
: 'Append rows'}
</Button>
</ModalFooter>
</ModalContent>
</Modal>
)
}

View File

@@ -0,0 +1 @@
export * from './import-csv-dialog'

View File

@@ -1,2 +1,3 @@
export * from './import-csv-dialog'
export * from './table-context-menu'
export * from './tables-list-context-menu'

View File

@@ -6,6 +6,7 @@ import {
DropdownMenuItem,
DropdownMenuSeparator,
DropdownMenuTrigger,
Upload,
} from '@/components/emcn'
import { Copy, Database, Pencil, Trash } from '@/components/emcn/icons'
@@ -17,8 +18,10 @@ interface TableContextMenuProps {
onDelete?: () => void
onViewSchema?: () => void
onRename?: () => void
onImportCsv?: () => void
disableDelete?: boolean
disableRename?: boolean
disableImport?: boolean
menuRef?: React.RefObject<HTMLDivElement | null>
}
@@ -30,8 +33,10 @@ export function TableContextMenu({
onDelete,
onViewSchema,
onRename,
onImportCsv,
disableDelete = false,
disableRename = false,
disableImport = false,
}: TableContextMenuProps) {
return (
<DropdownMenu open={isOpen} onOpenChange={(open) => !open && onClose()} modal={false}>
@@ -67,7 +72,15 @@ export function TableContextMenu({
Rename
</DropdownMenuItem>
)}
{(onViewSchema || onRename) && (onCopyId || onDelete) && <DropdownMenuSeparator />}
{onImportCsv && (
<DropdownMenuItem disabled={disableImport} onSelect={onImportCsv}>
<Upload />
Import CSV...
</DropdownMenuItem>
)}
{(onViewSchema || onRename || onImportCsv) && (onCopyId || onDelete) && (
<DropdownMenuSeparator />
)}
{onCopyId && (
<DropdownMenuItem onSelect={onCopyId}>
<Copy />

View File

@@ -27,7 +27,10 @@ import type {
} from '@/app/workspace/[workspaceId]/components'
import { ownerCell, Resource, timeCell } from '@/app/workspace/[workspaceId]/components'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { TablesListContextMenu } from '@/app/workspace/[workspaceId]/tables/components'
import {
ImportCsvDialog,
TablesListContextMenu,
} from '@/app/workspace/[workspaceId]/tables/components'
import { TableContextMenu } from '@/app/workspace/[workspaceId]/tables/components/table-context-menu'
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import {
@@ -76,6 +79,7 @@ export function Tables() {
const uploadCsv = useUploadCsvToTable()
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false)
const [isImportDialogOpen, setIsImportDialogOpen] = useState(false)
const [activeTable, setActiveTable] = useState<TableDefinition | null>(null)
const [searchTerm, setSearchTerm] = useState('')
const debouncedSearchTerm = useDebounce(searchTerm, 300)
@@ -525,10 +529,24 @@ export function Tables() {
if (activeTable) navigator.clipboard.writeText(activeTable.id)
}}
onDelete={() => setIsDeleteDialogOpen(true)}
onImportCsv={() => setIsImportDialogOpen(true)}
disableDelete={userPermissions.canEdit !== true}
disableRename={userPermissions.canEdit !== true}
disableImport={userPermissions.canEdit !== true}
/>
{activeTable && (
<ImportCsvDialog
open={isImportDialogOpen}
onOpenChange={(open) => {
setIsImportDialogOpen(open)
if (!open) setActiveTable(null)
}}
workspaceId={workspaceId}
table={activeTable}
/>
)}
<Modal open={isDeleteDialogOpen} onOpenChange={setIsDeleteDialogOpen}>
<ModalContent size='sm'>
<ModalHeader>Delete Table</ModalHeader>

View File

@@ -7,20 +7,20 @@ import type { SVGProps } from 'react'
export function Bell(props: SVGProps<SVGSVGElement>) {
return (
<svg
xmlns='http://www.w3.org/2000/svg'
width='24'
height='24'
viewBox='-1 -2 24 24'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth='1.75'
strokeWidth='2'
strokeLinecap='round'
strokeLinejoin='round'
xmlns='http://www.w3.org/2000/svg'
aria-hidden='true'
{...props}
>
<path d='M15.25 6.75C15.25 5.35761 14.6969 4.02226 13.7123 3.03769C12.7277 2.05312 11.3924 1.5 10 1.5C8.60761 1.5 7.27226 2.05312 6.28769 3.03769C5.30312 4.02226 4.75 5.35761 4.75 6.75C4.75 12.75 2.25 14.5 2.25 14.5H17.75C17.75 14.5 15.25 12.75 15.25 6.75Z' />
<path d='M11.4425 17.75C11.2655 18.0547 11.0133 18.3088 10.7101 18.4882C10.4068 18.6676 10.0627 18.7662 9.71 18.7749C9.35735 18.7836 9.00888 18.7022 8.69728 18.5381C8.38568 18.374 8.12138 18.1327 7.92999 17.8375' />
<path d='M6 8a6 6 0 0 1 12 0c0 7 3 9 3 9H3s3-2 3-9' />
<path d='M10.3 21a1.94 1.94 0 0 0 3.4 0' />
</svg>
)
}

View File

@@ -60,6 +60,7 @@ export { PillsRing } from './pills-ring'
export { Play, PlayOutline } from './play'
export { Plus } from './plus'
export { Redo } from './redo'
export { RefreshCw } from './refresh-cw'
export { Rocket } from './rocket'
export { Rows3 } from './rows3'
export { Search } from './search'

View File

@@ -0,0 +1,31 @@
import type { SVGProps } from 'react'
import styles from '@/components/emcn/icons/animate/loader.module.css'
import { cn } from '@/lib/core/utils/cn'
export interface RefreshCwProps extends SVGProps<SVGSVGElement> {
animate?: boolean
}
export function RefreshCw({ animate = false, className, ...props }: RefreshCwProps) {
return (
<svg
xmlns='http://www.w3.org/2000/svg'
width='24'
height='24'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth='2'
strokeLinecap='round'
strokeLinejoin='round'
className={cn(animate && styles['animated-loader-svg'], className)}
aria-hidden='true'
{...props}
>
<path d='M3 12a9 9 0 0 1 9-9 9.75 9.75 0 0 1 6.74 2.74L21 8' />
<path d='M21 3v5h-5' />
<path d='M21 12a9 9 0 0 1-9 9 9.75 9.75 0 0 1-6.74-2.74L3 16' />
<path d='M8 16H3v5' />
</svg>
)
}

View File

@@ -600,6 +600,171 @@ describe('EdgeManager', () => {
})
expect(readyNodes).toContain(function1Id)
})
/**
* Regression for the substring-match bug in clearDeactivatedEdgesForNodes.
*
* Reproduces the real workflow pattern where an empty upstream loop (e.g. KG) cascade
* deactivates its `loop_exit` edge into the next loop's sentinel-start (e.g. SBJ). When
* SBJ iterates and resets its state between iterations, the old buggy `includes(\`-${nodeId}-\`)`
* check matched edge keys where the sentinel was the TARGET (not the source), wrongly
* reactivating that external edge. That made countActiveIncomingEdges see a phantom pending
* upstream and SBJ's sentinel-start stopped being ready, stalling the loop after iteration 1.
*/
it('should not re-activate external cascade-deactivated edges pointing INTO a loop node', () => {
const externalNodeId = 'external-node'
const sbjSentinelStartId = 'loop-sbj-sentinel-start'
const sbjSentinelEndId = 'loop-sbj-sentinel-end'
const bodyNodeId = 'body-node'
const externalNode = createMockNode(externalNodeId, [
{ target: sbjSentinelStartId, sourceHandle: 'condition-if' },
])
const sbjSentinelStartNode = createMockNode(
sbjSentinelStartId,
[{ target: bodyNodeId }],
[externalNodeId]
)
const bodyNode = createMockNode(
bodyNodeId,
[{ target: sbjSentinelEndId }],
[sbjSentinelStartId]
)
const sbjSentinelEndNode = createMockNode(sbjSentinelEndId, [], [bodyNodeId])
const nodes = new Map<string, DAGNode>([
[externalNodeId, externalNode],
[sbjSentinelStartId, sbjSentinelStartNode],
[bodyNodeId, bodyNode],
[sbjSentinelEndId, sbjSentinelEndNode],
])
const dag = createMockDAG(nodes)
const edgeManager = new EdgeManager(dag)
edgeManager.processOutgoingEdges(externalNode, { selectedOption: 'else' })
expect(edgeManager.isNodeReady(sbjSentinelStartNode)).toBe(true)
edgeManager.clearDeactivatedEdgesForNodes(
new Set([sbjSentinelStartId, sbjSentinelEndId, bodyNodeId])
)
expect(edgeManager.isNodeReady(sbjSentinelStartNode)).toBe(true)
})
/**
* End-to-end regression: after a loop reset while an external edge is cascade-deactivated,
* the backwards `loop_continue` edge from sentinel-end must still mark sentinel-start as
* ready. The old code removed the external edge's deactivation entry, leaving a phantom
* active incoming and producing the exact "loop stops after 1 iteration" symptom the user
* hit on the Group A workflow.
*/
it('should leave sbjSentinelStart ready after loop reset when external edge is cascade-deactivated', () => {
const externalNodeId = 'external-node'
const sbjSentinelStartId = 'loop-sbj-sentinel-start'
const sbjSentinelEndId = 'loop-sbj-sentinel-end'
const bodyNodeId = 'body-node'
const externalNode = createMockNode(externalNodeId, [
{ target: sbjSentinelStartId, sourceHandle: 'condition-if' },
])
const sbjSentinelStartNode = createMockNode(
sbjSentinelStartId,
[{ target: bodyNodeId }],
[externalNodeId]
)
const bodyNode = createMockNode(
bodyNodeId,
[{ target: sbjSentinelEndId }],
[sbjSentinelStartId]
)
const sbjSentinelEndNode = createMockNode(
sbjSentinelEndId,
[{ target: sbjSentinelStartId, sourceHandle: 'loop_continue' }],
[bodyNodeId]
)
const nodes = new Map<string, DAGNode>([
[externalNodeId, externalNode],
[sbjSentinelStartId, sbjSentinelStartNode],
[bodyNodeId, bodyNode],
[sbjSentinelEndId, sbjSentinelEndNode],
])
const dag = createMockDAG(nodes)
const edgeManager = new EdgeManager(dag)
edgeManager.processOutgoingEdges(externalNode, { selectedOption: 'else' })
edgeManager.clearDeactivatedEdgesForNodes(
new Set([sbjSentinelStartId, sbjSentinelEndId, bodyNodeId])
)
const readyNodes = edgeManager.processOutgoingEdges(sbjSentinelEndNode, {
selectedRoute: 'loop_continue',
})
expect(readyNodes).toContain(sbjSentinelStartId)
})
/**
* Guard against an overly narrow fix: edges whose SOURCE is inside the loop (e.g. a body
* node that deactivated its outgoing edge during the previous iteration) must still be
* cleared on reset so the next iteration can traverse them.
*/
it('should re-activate internal loop edges (source inside loop) when resetting loop state', () => {
const sbjSentinelStartId = 'loop-sbj-sentinel-start'
const sbjSentinelEndId = 'loop-sbj-sentinel-end'
const conditionInLoopId = 'condition-in-loop'
const thenBranchId = 'then-branch'
const sbjSentinelStartNode = createMockNode(sbjSentinelStartId, [
{ target: conditionInLoopId },
])
const conditionInLoopNode = createMockNode(
conditionInLoopId,
[
{ target: thenBranchId, sourceHandle: 'condition-if' },
{ target: sbjSentinelEndId, sourceHandle: 'condition-else' },
],
[sbjSentinelStartId]
)
const thenBranchNode = createMockNode(
thenBranchId,
[{ target: sbjSentinelEndId }],
[conditionInLoopId]
)
const sbjSentinelEndNode = createMockNode(
sbjSentinelEndId,
[],
[conditionInLoopId, thenBranchId]
)
const nodes = new Map<string, DAGNode>([
[sbjSentinelStartId, sbjSentinelStartNode],
[conditionInLoopId, conditionInLoopNode],
[thenBranchId, thenBranchNode],
[sbjSentinelEndId, sbjSentinelEndNode],
])
const dag = createMockDAG(nodes)
const edgeManager = new EdgeManager(dag)
edgeManager.processOutgoingEdges(conditionInLoopNode, { selectedOption: 'else' })
edgeManager.clearDeactivatedEdgesForNodes(
new Set([sbjSentinelStartId, sbjSentinelEndId, conditionInLoopId, thenBranchId])
)
thenBranchNode.incomingEdges.add(conditionInLoopId)
const readyNodes = edgeManager.processOutgoingEdges(conditionInLoopNode, {
selectedOption: 'if',
})
expect(readyNodes).toContain(thenBranchId)
})
})
describe('restoreIncomingEdge', () => {

View File

@@ -128,12 +128,21 @@ export class EdgeManager {
/**
* Clear deactivated edges for a set of nodes (used when restoring loop state for next iteration).
*
* Only clears edges whose SOURCE is in the provided set. Edges pointing INTO a node in the set
* whose source lives outside (e.g. an external branch whose path was cascade-deactivated) must
* remain deactivated — otherwise `countActiveIncomingEdges` would count a source that will never
* fire again, stalling the loop on its next iteration.
*
* Edge-key format is `${sourceId}-${targetId}-${handle}`, so `startsWith("${nodeId}-")` uniquely
* matches "node is source". An `includes("-${nodeId}-")` check would also match "node is target"
* and is unsafe for the reset semantics.
*/
clearDeactivatedEdgesForNodes(nodeIds: Set<string>): void {
const edgesToRemove: string[] = []
for (const edgeKey of this.deactivatedEdges) {
for (const nodeId of nodeIds) {
if (edgeKey.startsWith(`${nodeId}-`) || edgeKey.includes(`-${nodeId}-`)) {
if (edgeKey.startsWith(`${nodeId}-`)) {
edgesToRemove.push(edgeKey)
break
}
@@ -142,7 +151,6 @@ export class EdgeManager {
for (const edgeKey of edgesToRemove) {
this.deactivatedEdges.delete(edgeKey)
}
// Also clear activated edge tracking for these nodes
for (const nodeId of nodeIds) {
this.nodesWithActivatedEdge.delete(nodeId)
}

View File

@@ -7,13 +7,8 @@ import type { DAG } from '@/executor/dag/builder'
import type { EdgeManager } from '@/executor/execution/edge-manager'
import type { LoopScope } from '@/executor/execution/state'
import type { BlockStateController, ContextExtensions } from '@/executor/execution/types'
import {
type ExecutionContext,
getNextExecutionOrder,
type NormalizedBlockOutput,
} from '@/executor/types'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
import type { LoopConfigWithNodes } from '@/executor/types/loop'
import { buildContainerIterationContext } from '@/executor/utils/iteration-context'
import { replaceValidReferences } from '@/executor/utils/reference-validation'
import {
addSubflowErrorLog,
@@ -22,6 +17,7 @@ import {
buildSentinelEndId,
buildSentinelStartId,
emitEmptySubflowEvents,
emitSubflowSuccessEvents,
extractBaseBlockId,
resolveArrayInput,
validateMaxCount,
@@ -319,31 +315,7 @@ export class LoopOrchestrator {
const output = { results }
this.state.setBlockOutput(loopId, output, DEFAULTS.EXECUTION_TIME)
if (this.contextExtensions?.onBlockComplete) {
const now = new Date().toISOString()
const iterationContext = buildContainerIterationContext(ctx, loopId)
try {
await this.contextExtensions.onBlockComplete(
loopId,
'Loop',
'loop',
{
output,
executionTime: DEFAULTS.EXECUTION_TIME,
startedAt: now,
executionOrder: getNextExecutionOrder(ctx),
endedAt: now,
},
iterationContext
)
} catch (error) {
logger.warn('Loop completion callback failed', {
loopId,
error: error instanceof Error ? error.message : String(error),
})
}
}
await emitSubflowSuccessEvents(ctx, loopId, 'loop', output, this.contextExtensions)
return {
shouldContinue: false,

View File

@@ -3,17 +3,13 @@ import { DEFAULTS } from '@/executor/constants'
import type { DAG } from '@/executor/dag/builder'
import type { ParallelScope } from '@/executor/execution/state'
import type { BlockStateWriter, ContextExtensions } from '@/executor/execution/types'
import {
type ExecutionContext,
getNextExecutionOrder,
type NormalizedBlockOutput,
} from '@/executor/types'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
import type { ParallelConfigWithNodes } from '@/executor/types/parallel'
import { buildContainerIterationContext } from '@/executor/utils/iteration-context'
import { ParallelExpander } from '@/executor/utils/parallel-expansion'
import {
addSubflowErrorLog,
emitEmptySubflowEvents,
emitSubflowSuccessEvents,
extractBranchIndex,
resolveArrayInput,
validateMaxCount,
@@ -318,34 +314,7 @@ export class ParallelOrchestrator {
const output = { results }
this.state.setBlockOutput(parallelId, output)
// Emit onBlockComplete for the parallel container so the UI can track it.
// When this parallel is nested inside a parent subflow (parallel or loop), emit
// iteration context so the terminal can group this event under the parent container.
if (this.contextExtensions?.onBlockComplete) {
const now = new Date().toISOString()
const iterationContext = buildContainerIterationContext(ctx, parallelId)
try {
await this.contextExtensions.onBlockComplete(
parallelId,
'Parallel',
'parallel',
{
output,
executionTime: 0,
startedAt: now,
executionOrder: getNextExecutionOrder(ctx),
endedAt: now,
},
iterationContext
)
} catch (error) {
logger.warn('Parallel completion callback failed', {
parallelId,
error: error instanceof Error ? error.message : String(error),
})
}
}
await emitSubflowSuccessEvents(ctx, parallelId, 'parallel', output, this.contextExtensions)
return {
allBranchesComplete: true,

View File

@@ -396,3 +396,60 @@ export async function emitEmptySubflowEvents(
}
}
}
/**
* Emits the BlockLog + onBlockComplete callback for a loop/parallel container that
* finished successfully with at least one iteration. Without this, successful container
* runs produce no top-level BlockLog, which forces the trace-span builder to fall back
* to generic counter-based names ("Loop 1", "Parallel 1") instead of the user-configured
* block name.
*/
export async function emitSubflowSuccessEvents(
ctx: ExecutionContext,
blockId: string,
blockType: 'loop' | 'parallel',
output: { results: any[] },
contextExtensions: ContextExtensions | null
): Promise<void> {
const now = new Date().toISOString()
const executionOrder = getNextExecutionOrder(ctx)
const block = ctx.workflow?.blocks.find((b) => b.id === blockId)
const blockName = block?.metadata?.name ?? blockType
const iterationContext = buildContainerIterationContext(ctx, blockId)
ctx.blockLogs.push({
blockId,
blockName,
blockType,
startedAt: now,
endedAt: now,
durationMs: DEFAULTS.EXECUTION_TIME,
success: true,
output,
executionOrder,
})
if (contextExtensions?.onBlockComplete) {
try {
await contextExtensions.onBlockComplete(
blockId,
blockName,
blockType,
{
output,
executionTime: DEFAULTS.EXECUTION_TIME,
startedAt: now,
executionOrder,
endedAt: now,
},
iterationContext
)
} catch (error) {
logger.warn('Subflow success completion callback failed', {
blockId,
blockType,
error: error instanceof Error ? error.message : String(error),
})
}
}
}

View File

@@ -780,6 +780,77 @@ export function useUploadCsvToTable() {
})
}
export type CsvHeaderMapping = Record<string, string | null>
export type CsvImportMode = 'append' | 'replace'
interface ImportCsvIntoTableParams {
workspaceId: string
tableId: string
file: File
mode: CsvImportMode
mapping?: CsvHeaderMapping
}
interface ImportCsvIntoTableResponse {
success: boolean
data?: {
tableId: string
mode: CsvImportMode
insertedCount?: number
deletedCount?: number
mappedColumns?: string[]
skippedHeaders?: string[]
unmappedColumns?: string[]
sourceFile?: string
}
}
/**
* Upload a CSV file to an existing table in append or replace mode. Supports
* an optional explicit header-to-column mapping; when omitted the server
* auto-maps headers by sanitized name.
*/
export function useImportCsvIntoTable() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({
workspaceId,
tableId,
file,
mode,
mapping,
}: ImportCsvIntoTableParams): Promise<ImportCsvIntoTableResponse> => {
const formData = new FormData()
formData.append('file', file)
formData.append('workspaceId', workspaceId)
formData.append('mode', mode)
if (mapping) {
formData.append('mapping', JSON.stringify(mapping))
}
const response = await fetch(`/api/table/${tableId}/import-csv`, {
method: 'POST',
body: formData,
})
if (!response.ok) {
const data = await response.json().catch(() => ({}))
throw new Error(data.error || 'CSV import failed')
}
return response.json()
},
onSettled: (_data, _error, variables) => {
if (!variables) return
invalidateRowCount(queryClient, variables.workspaceId, variables.tableId)
},
onError: (error) => {
logger.error('Failed to import CSV into table:', error)
},
})
}
export function useDeleteColumn({ workspaceId, tableId }: RowMutationContext) {
const queryClient = useQueryClient()

View File

@@ -2830,6 +2830,22 @@ export const UserTable: ToolCatalogEntry = {
type: 'number',
description: 'Maximum rows to return or affect (optional, default 100)',
},
mapping: {
type: 'object',
description:
'Optional explicit CSV-header → table-column mapping for import_file, as { "csvHeader": "columnName" | null }. When omitted, headers are auto-matched by sanitized name (case-insensitive fallback). Use null to skip a CSV column.',
additionalProperties: {
type: 'string',
description:
'Target column name on the table. Use null to skip this CSV header instead of a column name.',
},
},
mode: {
type: 'string',
description:
"Import mode for import_file. 'append' (default) adds rows; 'replace' truncates existing rows in a transaction before inserting the new rows.",
enum: ['append', 'replace'],
},
name: { type: 'string', description: "Table name (required for 'create')" },
newName: { type: 'string', description: 'New column name (required for rename_column)' },
newType: {

View File

@@ -10,7 +10,7 @@ export interface ToolRuntimeSchemaEntry {
}
export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
agent: {
['agent']: {
parameters: {
properties: {
request: {
@@ -23,7 +23,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
auth: {
['auth']: {
parameters: {
properties: {
request: {
@@ -36,7 +36,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
check_deployment_status: {
['check_deployment_status']: {
parameters: {
type: 'object',
properties: {
@@ -48,7 +48,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
complete_job: {
['complete_job']: {
parameters: {
type: 'object',
properties: {
@@ -61,7 +61,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
context_write: {
['context_write']: {
parameters: {
type: 'object',
properties: {
@@ -78,7 +78,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
crawl_website: {
['crawl_website']: {
parameters: {
type: 'object',
properties: {
@@ -113,7 +113,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
create_file: {
['create_file']: {
parameters: {
type: 'object',
properties: {
@@ -149,7 +149,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
required: ['success', 'message'],
},
},
create_folder: {
['create_folder']: {
parameters: {
type: 'object',
properties: {
@@ -170,7 +170,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
create_job: {
['create_job']: {
parameters: {
type: 'object',
properties: {
@@ -220,7 +220,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
create_workflow: {
['create_workflow']: {
parameters: {
type: 'object',
properties: {
@@ -245,7 +245,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
create_workspace_mcp_server: {
['create_workspace_mcp_server']: {
parameters: {
type: 'object',
properties: {
@@ -266,7 +266,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
debug: {
['debug']: {
parameters: {
properties: {
context: {
@@ -285,7 +285,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
delete_file: {
['delete_file']: {
parameters: {
type: 'object',
properties: {
@@ -314,7 +314,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
required: ['success', 'message'],
},
},
delete_folder: {
['delete_folder']: {
parameters: {
type: 'object',
properties: {
@@ -330,7 +330,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
delete_workflow: {
['delete_workflow']: {
parameters: {
type: 'object',
properties: {
@@ -346,7 +346,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
delete_workspace_mcp_server: {
['delete_workspace_mcp_server']: {
parameters: {
type: 'object',
properties: {
@@ -359,7 +359,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
deploy: {
['deploy']: {
parameters: {
properties: {
request: {
@@ -373,7 +373,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
deploy_api: {
['deploy_api']: {
parameters: {
type: 'object',
properties: {
@@ -447,7 +447,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
],
},
},
deploy_chat: {
['deploy_chat']: {
parameters: {
type: 'object',
properties: {
@@ -595,7 +595,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
],
},
},
deploy_mcp: {
['deploy_mcp']: {
parameters: {
type: 'object',
properties: {
@@ -711,7 +711,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
required: ['deploymentType', 'deploymentStatus'],
},
},
download_to_workspace_file: {
['download_to_workspace_file']: {
parameters: {
type: 'object',
properties: {
@@ -730,7 +730,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
edit_content: {
['edit_content']: {
parameters: {
type: 'object',
properties: {
@@ -762,7 +762,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
required: ['success', 'message'],
},
},
edit_workflow: {
['edit_workflow']: {
parameters: {
type: 'object',
properties: {
@@ -801,13 +801,13 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
file: {
['file']: {
parameters: {
type: 'object',
},
resultSchema: undefined,
},
function_execute: {
['function_execute']: {
parameters: {
type: 'object',
properties: {
@@ -868,7 +868,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
generate_api_key: {
['generate_api_key']: {
parameters: {
type: 'object',
properties: {
@@ -886,7 +886,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
generate_image: {
['generate_image']: {
parameters: {
type: 'object',
properties: {
@@ -923,7 +923,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
generate_visualization: {
['generate_visualization']: {
parameters: {
type: 'object',
properties: {
@@ -963,7 +963,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
get_block_outputs: {
['get_block_outputs']: {
parameters: {
type: 'object',
properties: {
@@ -984,7 +984,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
get_block_upstream_references: {
['get_block_upstream_references']: {
parameters: {
type: 'object',
properties: {
@@ -1006,7 +1006,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
get_deployed_workflow_state: {
['get_deployed_workflow_state']: {
parameters: {
type: 'object',
properties: {
@@ -1019,7 +1019,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
get_deployment_version: {
['get_deployment_version']: {
parameters: {
type: 'object',
properties: {
@@ -1036,7 +1036,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
get_execution_summary: {
['get_execution_summary']: {
parameters: {
type: 'object',
properties: {
@@ -1063,7 +1063,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
get_job_logs: {
['get_job_logs']: {
parameters: {
type: 'object',
properties: {
@@ -1088,7 +1088,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
get_page_contents: {
['get_page_contents']: {
parameters: {
type: 'object',
properties: {
@@ -1116,14 +1116,14 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
get_platform_actions: {
['get_platform_actions']: {
parameters: {
type: 'object',
properties: {},
},
resultSchema: undefined,
},
get_workflow_data: {
['get_workflow_data']: {
parameters: {
type: 'object',
properties: {
@@ -1142,7 +1142,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
get_workflow_logs: {
['get_workflow_logs']: {
parameters: {
type: 'object',
properties: {
@@ -1168,7 +1168,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
glob: {
['glob']: {
parameters: {
type: 'object',
properties: {
@@ -1187,7 +1187,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
grep: {
['grep']: {
parameters: {
type: 'object',
properties: {
@@ -1234,7 +1234,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
job: {
['job']: {
parameters: {
properties: {
request: {
@@ -1247,7 +1247,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
knowledge: {
['knowledge']: {
parameters: {
properties: {
request: {
@@ -1260,7 +1260,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
knowledge_base: {
['knowledge_base']: {
parameters: {
type: 'object',
properties: {
@@ -1452,7 +1452,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
required: ['success', 'message'],
},
},
list_folders: {
['list_folders']: {
parameters: {
type: 'object',
properties: {
@@ -1464,14 +1464,14 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
list_user_workspaces: {
['list_user_workspaces']: {
parameters: {
type: 'object',
properties: {},
},
resultSchema: undefined,
},
list_workspace_mcp_servers: {
['list_workspace_mcp_servers']: {
parameters: {
type: 'object',
properties: {
@@ -1483,7 +1483,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
manage_credential: {
['manage_credential']: {
parameters: {
type: 'object',
properties: {
@@ -1512,7 +1512,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
manage_custom_tool: {
['manage_custom_tool']: {
parameters: {
type: 'object',
properties: {
@@ -1591,7 +1591,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
manage_job: {
['manage_job']: {
parameters: {
type: 'object',
properties: {
@@ -1661,7 +1661,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
manage_mcp_tool: {
['manage_mcp_tool']: {
parameters: {
type: 'object',
properties: {
@@ -1712,7 +1712,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
manage_skill: {
['manage_skill']: {
parameters: {
type: 'object',
properties: {
@@ -1744,7 +1744,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
materialize_file: {
['materialize_file']: {
parameters: {
type: 'object',
properties: {
@@ -1778,7 +1778,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
move_folder: {
['move_folder']: {
parameters: {
type: 'object',
properties: {
@@ -1796,7 +1796,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
move_workflow: {
['move_workflow']: {
parameters: {
type: 'object',
properties: {
@@ -1816,7 +1816,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
oauth_get_auth_link: {
['oauth_get_auth_link']: {
parameters: {
type: 'object',
properties: {
@@ -1830,7 +1830,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
oauth_request_access: {
['oauth_request_access']: {
parameters: {
type: 'object',
properties: {
@@ -1844,7 +1844,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
open_resource: {
['open_resource']: {
parameters: {
type: 'object',
properties: {
@@ -1872,7 +1872,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
read: {
['read']: {
parameters: {
type: 'object',
properties: {
@@ -1899,7 +1899,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
redeploy: {
['redeploy']: {
parameters: {
type: 'object',
properties: {
@@ -1967,7 +1967,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
],
},
},
rename_file: {
['rename_file']: {
parameters: {
type: 'object',
properties: {
@@ -2002,7 +2002,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
required: ['success', 'message'],
},
},
rename_workflow: {
['rename_workflow']: {
parameters: {
type: 'object',
properties: {
@@ -2019,7 +2019,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
research: {
['research']: {
parameters: {
properties: {
topic: {
@@ -2032,7 +2032,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
respond: {
['respond']: {
parameters: {
additionalProperties: true,
properties: {
@@ -2055,7 +2055,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
restore_resource: {
['restore_resource']: {
parameters: {
type: 'object',
properties: {
@@ -2073,7 +2073,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
revert_to_version: {
['revert_to_version']: {
parameters: {
type: 'object',
properties: {
@@ -2090,7 +2090,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
run: {
['run']: {
parameters: {
properties: {
context: {
@@ -2107,7 +2107,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
run_block: {
['run_block']: {
parameters: {
type: 'object',
properties: {
@@ -2139,7 +2139,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
run_from_block: {
['run_from_block']: {
parameters: {
type: 'object',
properties: {
@@ -2171,7 +2171,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
run_workflow: {
['run_workflow']: {
parameters: {
type: 'object',
properties: {
@@ -2199,7 +2199,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
run_workflow_until_block: {
['run_workflow_until_block']: {
parameters: {
type: 'object',
properties: {
@@ -2231,7 +2231,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
scrape_page: {
['scrape_page']: {
parameters: {
type: 'object',
properties: {
@@ -2252,7 +2252,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
search_documentation: {
['search_documentation']: {
parameters: {
type: 'object',
properties: {
@@ -2269,7 +2269,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
search_library_docs: {
['search_library_docs']: {
parameters: {
type: 'object',
properties: {
@@ -2290,7 +2290,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
search_online: {
['search_online']: {
parameters: {
type: 'object',
properties: {
@@ -2331,7 +2331,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
search_patterns: {
['search_patterns']: {
parameters: {
type: 'object',
properties: {
@@ -2353,7 +2353,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
set_block_enabled: {
['set_block_enabled']: {
parameters: {
type: 'object',
properties: {
@@ -2375,7 +2375,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
set_environment_variables: {
['set_environment_variables']: {
parameters: {
type: 'object',
properties: {
@@ -2409,7 +2409,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
set_global_workflow_variables: {
['set_global_workflow_variables']: {
parameters: {
type: 'object',
properties: {
@@ -2447,7 +2447,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
superagent: {
['superagent']: {
parameters: {
properties: {
task: {
@@ -2461,7 +2461,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
table: {
['table']: {
parameters: {
properties: {
request: {
@@ -2474,7 +2474,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
tool_search_tool_regex: {
['tool_search_tool_regex']: {
parameters: {
properties: {
case_insensitive: {
@@ -2495,7 +2495,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
update_job_history: {
['update_job_history']: {
parameters: {
type: 'object',
properties: {
@@ -2513,7 +2513,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
update_workspace_mcp_server: {
['update_workspace_mcp_server']: {
parameters: {
type: 'object',
properties: {
@@ -2538,7 +2538,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
user_memory: {
['user_memory']: {
parameters: {
type: 'object',
properties: {
@@ -2586,7 +2586,7 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
user_table: {
['user_table']: {
parameters: {
type: 'object',
properties: {
@@ -2635,6 +2635,22 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
type: 'number',
description: 'Maximum rows to return or affect (optional, default 100)',
},
mapping: {
type: 'object',
description:
'Optional explicit CSV-header → table-column mapping for import_file, as { "csvHeader": "columnName" | null }. When omitted, headers are auto-matched by sanitized name (case-insensitive fallback). Use null to skip a CSV column.',
additionalProperties: {
type: 'string',
description:
'Target column name on the table. Use null to skip this CSV header instead of a column name.',
},
},
mode: {
type: 'string',
description:
"Import mode for import_file. 'append' (default) adds rows; 'replace' truncates existing rows in a transaction before inserting the new rows.",
enum: ['append', 'replace'],
},
name: {
type: 'string',
description: "Table name (required for 'create')",
@@ -2761,13 +2777,13 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
required: ['success', 'message'],
},
},
workflow: {
['workflow']: {
parameters: {
type: 'object',
},
resultSchema: undefined,
},
workspace_file: {
['workspace_file']: {
parameters: {
type: 'object',
properties: {

View File

@@ -0,0 +1,104 @@
/**
* @vitest-environment node
*/
import { describe, expect, it } from 'vitest'
import {
extractTabularData,
serializeOutputForFile,
unwrapFunctionExecuteOutput,
} from '@/lib/copilot/request/tools/files'
describe('unwrapFunctionExecuteOutput', () => {
it('unwraps the function_execute envelope { result, stdout }', () => {
expect(unwrapFunctionExecuteOutput({ result: 'name,age\nAlice,30', stdout: '' })).toBe(
'name,age\nAlice,30'
)
})
it('passes through objects that do not have both result + stdout', () => {
const output = { data: { rows: [], totalCount: 0 } }
expect(unwrapFunctionExecuteOutput(output)).toBe(output)
})
it('passes through strings and arrays untouched', () => {
expect(unwrapFunctionExecuteOutput('hello')).toBe('hello')
const arr: unknown[] = [{ a: 1 }]
expect(unwrapFunctionExecuteOutput(arr)).toBe(arr)
})
})
describe('serializeOutputForFile (csv)', () => {
it('returns raw CSV text when function_execute result is already a CSV string', () => {
const output = {
result: 'name,age\nAlice,30\nBob,40',
stdout: '(2 rows)',
}
expect(serializeOutputForFile(output, 'csv')).toBe('name,age\nAlice,30\nBob,40')
})
it('converts a result array of objects into CSV', () => {
const output = {
result: [
{ name: 'Alice', age: 30 },
{ name: 'Bob', age: 40 },
],
stdout: '',
}
expect(serializeOutputForFile(output, 'csv')).toBe('name,age\nAlice,30\nBob,40')
})
it('returns the raw string when the non-envelope output is already a CSV string', () => {
expect(serializeOutputForFile('a,b\n1,2', 'csv')).toBe('a,b\n1,2')
})
it('falls back to JSON.stringify when the payload is not tabular and not a string', () => {
const output = { result: { foo: 'bar' }, stdout: '' }
expect(serializeOutputForFile(output, 'csv')).toBe('{\n "foo": "bar"\n}')
})
})
describe('serializeOutputForFile (json / txt / md)', () => {
it('unwraps the envelope for json format so the file contains only result', () => {
const output = { result: { hello: 'world' }, stdout: 'log' }
expect(serializeOutputForFile(output, 'json')).toBe('{\n "hello": "world"\n}')
})
it('returns the string payload as-is for txt/md/html formats', () => {
const output = { result: '# Report\n\nHello', stdout: '' }
expect(serializeOutputForFile(output, 'md')).toBe('# Report\n\nHello')
expect(serializeOutputForFile(output, 'txt')).toBe('# Report\n\nHello')
expect(serializeOutputForFile(output, 'html')).toBe('# Report\n\nHello')
})
})
describe('extractTabularData', () => {
it('extracts rows directly from an array input', () => {
expect(extractTabularData([{ a: 1 }, { a: 2 }])).toEqual([{ a: 1 }, { a: 2 }])
})
it('does NOT unwrap function_execute envelopes on its own (callers must pre-unwrap)', () => {
// Caller is responsible for unwrapping { result, stdout } envelopes first.
// Keeping that concern out of this function prevents a double unwrap when
// the user's payload itself happens to have matching keys.
expect(extractTabularData({ result: [{ a: 1 }], stdout: '' })).toBeNull()
})
it('extracts rows from the user_table query_rows shape', () => {
const rows = extractTabularData({
data: {
rows: [
{ id: 'row_1', data: { name: 'Alice' } },
{ id: 'row_2', data: { name: 'Bob' } },
],
totalCount: 2,
},
})
expect(rows).toEqual([{ name: 'Alice' }, { name: 'Bob' }])
})
it('returns null for non-tabular inputs', () => {
expect(extractTabularData('plain string')).toBeNull()
expect(extractTabularData(null)).toBeNull()
expect(extractTabularData({ foo: 'bar' })).toBeNull()
})
})

View File

@@ -26,8 +26,29 @@ export const FORMAT_TO_CONTENT_TYPE: Record<OutputFormat, string> = {
}
/**
* Try to pull a flat array of row-objects out of the various shapes that
* `function_execute` and `user_table` can return.
* Unwraps the `function_execute` response envelope `{ result, stdout }` so the
* rest of the serialization code works on the user's actual payload (a string,
* array, object, etc.) instead of JSON-stringifying the envelope itself.
*
* Only unwraps when both keys are present — that's the unique shape of
* `function_execute` (see `apps/sim/tools/function/types.ts` `CodeExecutionOutput`).
* `user_table` returns `{ data, message, success }` which is left alone.
*/
export function unwrapFunctionExecuteOutput(output: unknown): unknown {
if (!output || typeof output !== 'object' || Array.isArray(output)) return output
const obj = output as Record<string, unknown>
if ('result' in obj && 'stdout' in obj) {
return obj.result
}
return output
}
/**
* Try to pull a flat array of row-objects out of an already-unwrapped tool
* payload. Callers are responsible for stripping any `function_execute`
* envelope first (via {@link unwrapFunctionExecuteOutput}) — this function
* does not re-unwrap, so a user payload that coincidentally has `result` and
* `stdout` keys is not mistaken for another envelope.
*/
export function extractTabularData(output: unknown): Record<string, unknown>[] | null {
if (!output || typeof output !== 'object') return null
@@ -41,14 +62,6 @@ export function extractTabularData(output: unknown): Record<string, unknown>[] |
const obj = output as Record<string, unknown>
// function_execute shape: { result: [...], stdout: "..." }
if (Array.isArray(obj.result)) {
const rows = obj.result
if (rows.length > 0 && typeof rows[0] === 'object' && rows[0] !== null) {
return rows as Record<string, unknown>[]
}
}
// user_table query_rows shape: { data: { rows: [{ data: {...} }], totalCount } }
if (obj.data && typeof obj.data === 'object' && !Array.isArray(obj.data)) {
const data = obj.data as Record<string, unknown>
@@ -112,16 +125,18 @@ export function resolveOutputFormat(fileName: string, explicit?: string): Output
}
export function serializeOutputForFile(output: unknown, format: OutputFormat): string {
if (typeof output === 'string') return output
const unwrapped = unwrapFunctionExecuteOutput(output)
if (typeof unwrapped === 'string') return unwrapped
if (format === 'csv') {
const rows = extractTabularData(output)
const rows = extractTabularData(unwrapped)
if (rows && rows.length > 0) {
return convertRowsToCsv(rows)
}
}
return JSON.stringify(output, null, 2)
return JSON.stringify(unwrapped, null, 2)
}
export async function maybeWriteOutputToFile(

View File

@@ -0,0 +1,220 @@
/**
* @vitest-environment node
*/
import { beforeEach, describe, expect, it, vi } from 'vitest'
import type { TableDefinition } from '@/lib/table'
const {
mockResolveWorkspaceFileReference,
mockDownloadWorkspaceFile,
mockGetTableById,
mockBatchInsertRows,
mockReplaceTableRows,
} = vi.hoisted(() => ({
mockResolveWorkspaceFileReference: vi.fn(),
mockDownloadWorkspaceFile: vi.fn(),
mockGetTableById: vi.fn(),
mockBatchInsertRows: vi.fn(),
mockReplaceTableRows: vi.fn(),
}))
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}),
}))
vi.mock('@/lib/core/utils/uuid', () => ({
generateId: vi.fn().mockReturnValue('deadbeefcafef00d'),
generateShortId: vi.fn().mockReturnValue('short-id'),
}))
vi.mock('@/lib/uploads/contexts/workspace/workspace-file-manager', () => ({
resolveWorkspaceFileReference: mockResolveWorkspaceFileReference,
downloadWorkspaceFile: mockDownloadWorkspaceFile,
}))
vi.mock('@/lib/table/service', () => ({
addTableColumn: vi.fn(),
batchInsertRows: mockBatchInsertRows,
batchUpdateRows: vi.fn(),
createTable: vi.fn(),
deleteColumn: vi.fn(),
deleteColumns: vi.fn(),
deleteRow: vi.fn(),
deleteRowsByFilter: vi.fn(),
deleteRowsByIds: vi.fn(),
deleteTable: vi.fn(),
getRowById: vi.fn(),
getTableById: mockGetTableById,
insertRow: vi.fn(),
queryRows: vi.fn(),
renameColumn: vi.fn(),
renameTable: vi.fn(),
replaceTableRows: mockReplaceTableRows,
updateColumnConstraints: vi.fn(),
updateColumnType: vi.fn(),
updateRow: vi.fn(),
updateRowsByFilter: vi.fn(),
}))
import { userTableServerTool } from '@/lib/copilot/tools/server/table/user-table'
function buildTable(overrides: Partial<TableDefinition> = {}): TableDefinition {
return {
id: 'tbl_1',
name: 'People',
description: null,
schema: {
columns: [
{ name: 'name', type: 'string', required: true },
{ name: 'age', type: 'number' },
],
},
metadata: null,
rowCount: 0,
maxRows: 100,
workspaceId: 'workspace-1',
createdBy: 'user-1',
archivedAt: null,
createdAt: new Date('2024-01-01'),
updatedAt: new Date('2024-01-01'),
...overrides,
}
}
describe('userTableServerTool.import_file', () => {
beforeEach(() => {
vi.clearAllMocks()
mockResolveWorkspaceFileReference.mockResolvedValue({
name: 'people.csv',
type: 'text/csv',
})
mockDownloadWorkspaceFile.mockResolvedValue(Buffer.from('name,age\nAlice,30\nBob,40'))
mockGetTableById.mockResolvedValue(buildTable())
mockBatchInsertRows.mockImplementation(async (data: { rows: unknown[] }) =>
data.rows.map((_, i) => ({ id: `row_${i}` }))
)
mockReplaceTableRows.mockResolvedValue({ deletedCount: 0, insertedCount: 0 })
})
it('appends rows using auto-mapping by default', async () => {
const result = await userTableServerTool.execute(
{
operation: 'import_file',
args: { tableId: 'tbl_1', fileId: 'file-1' },
},
{ userId: 'user-1', workspaceId: 'workspace-1' }
)
expect(result.success).toBe(true)
expect(result.data?.mode).toBe('append')
expect(result.data?.rowCount).toBe(2)
expect(mockBatchInsertRows).toHaveBeenCalledTimes(1)
expect(mockReplaceTableRows).not.toHaveBeenCalled()
const call = mockBatchInsertRows.mock.calls[0][0] as { rows: unknown[] }
expect(call.rows).toEqual([
{ name: 'Alice', age: 30 },
{ name: 'Bob', age: 40 },
])
})
it('replaces rows in replace mode', async () => {
mockReplaceTableRows.mockResolvedValueOnce({ deletedCount: 3, insertedCount: 2 })
const result = await userTableServerTool.execute(
{
operation: 'import_file',
args: { tableId: 'tbl_1', fileId: 'file-1', mode: 'replace' },
},
{ userId: 'user-1', workspaceId: 'workspace-1' }
)
expect(result.success).toBe(true)
expect(result.data?.mode).toBe('replace')
expect(result.data?.deletedCount).toBe(3)
expect(result.data?.insertedCount).toBe(2)
expect(mockReplaceTableRows).toHaveBeenCalledTimes(1)
expect(mockBatchInsertRows).not.toHaveBeenCalled()
})
it('uses the caller-provided mapping', async () => {
mockDownloadWorkspaceFile.mockResolvedValueOnce(
Buffer.from('Full Name,Years\nAlice,30\nBob,40')
)
const result = await userTableServerTool.execute(
{
operation: 'import_file',
args: {
tableId: 'tbl_1',
fileId: 'file-1',
mapping: { 'Full Name': 'name', Years: 'age' },
},
},
{ userId: 'user-1', workspaceId: 'workspace-1' }
)
expect(result.success).toBe(true)
const call = mockBatchInsertRows.mock.calls[0][0] as { rows: unknown[] }
expect(call.rows).toEqual([
{ name: 'Alice', age: 30 },
{ name: 'Bob', age: 40 },
])
})
it('rejects unknown modes', async () => {
const result = await userTableServerTool.execute(
{
operation: 'import_file',
args: { tableId: 'tbl_1', fileId: 'file-1', mode: 'merge' },
},
{ userId: 'user-1', workspaceId: 'workspace-1' }
)
expect(result.success).toBe(false)
expect(result.message).toMatch(/Invalid mode/)
expect(mockBatchInsertRows).not.toHaveBeenCalled()
})
it('refuses to import into an archived table', async () => {
mockGetTableById.mockResolvedValueOnce(buildTable({ archivedAt: new Date('2024-02-01') }))
const result = await userTableServerTool.execute(
{
operation: 'import_file',
args: { tableId: 'tbl_1', fileId: 'file-1' },
},
{ userId: 'user-1', workspaceId: 'workspace-1' }
)
expect(result.success).toBe(false)
expect(result.message).toMatch(/archived/i)
})
it('refuses to import when the table belongs to a different workspace', async () => {
mockGetTableById.mockResolvedValueOnce(buildTable({ workspaceId: 'workspace-other' }))
const result = await userTableServerTool.execute(
{
operation: 'import_file',
args: { tableId: 'tbl_1', fileId: 'file-1' },
},
{ userId: 'user-1', workspaceId: 'workspace-1' }
)
expect(result.success).toBe(false)
expect(result.message).toMatch(/not found/i)
expect(mockBatchInsertRows).not.toHaveBeenCalled()
})
it('reports missing required columns instead of inserting', async () => {
mockDownloadWorkspaceFile.mockResolvedValueOnce(Buffer.from('age\n30'))
const result = await userTableServerTool.execute(
{
operation: 'import_file',
args: { tableId: 'tbl_1', fileId: 'file-1' },
},
{ userId: 'user-1', workspaceId: 'workspace-1' }
)
expect(result.success).toBe(false)
expect(result.message).toMatch(/missing required columns/i)
expect(mockBatchInsertRows).not.toHaveBeenCalled()
})
})

View File

@@ -7,6 +7,17 @@ import {
} from '@/lib/copilot/tools/server/base-tool'
import { generateId } from '@/lib/core/utils/uuid'
import { COLUMN_TYPES } from '@/lib/table/constants'
import {
buildAutoMapping,
CSV_MAX_BATCH_SIZE,
type CsvHeaderMapping,
CsvImportValidationError,
coerceRowsForTable,
inferSchemaFromCsv,
parseCsvBuffer,
sanitizeName,
validateMapping,
} from '@/lib/table/csv-import'
import {
addTableColumn,
batchInsertRows,
@@ -24,12 +35,13 @@ import {
queryRows,
renameColumn,
renameTable,
replaceTableRows,
updateColumnConstraints,
updateColumnType,
updateRow,
updateRowsByFilter,
} from '@/lib/table/service'
import type { ColumnDefinition, RowData, TableDefinition } from '@/lib/table/types'
import type { RowData, TableDefinition } from '@/lib/table/types'
import {
downloadWorkspaceFile,
resolveWorkspaceFileReference,
@@ -48,22 +60,27 @@ type UserTableResult = {
data?: any
}
const MAX_BATCH_SIZE = 1000
const SCHEMA_SAMPLE_SIZE = 100
const MAX_BATCH_SIZE = CSV_MAX_BATCH_SIZE
type ColumnType = 'string' | 'number' | 'boolean' | 'date' | 'json'
function sanitizeColumnName(raw: string): string {
let name = raw
.trim()
.replace(/[^a-zA-Z0-9_]/g, '_')
.replace(/_+/g, '_')
.replace(/^_|_$/g, '')
if (!name || /^\d/.test(name)) name = `col_${name}`
return name
async function resolveWorkspaceFile(
fileReference: string,
workspaceId: string
): Promise<{ buffer: Buffer; name: string; type: string }> {
const record = await resolveWorkspaceFileReference(workspaceId, fileReference)
if (!record) {
throw new Error(
`File not found: "${fileReference}". Use glob("files/by-id/*/meta.json") to list canonical file IDs.`
)
}
const buffer = await downloadWorkspaceFile(record)
return { buffer, name: record.name, type: record.type }
}
function sanitizeHeaders(
/**
* Sanitizes raw JSON headers/rows so they conform to the same rules as CSV
* imports (so `inferSchemaFromCsv` and friends can be reused).
*/
function sanitizeJsonHeaders(
headers: string[],
rows: Record<string, unknown>[]
): { headers: string[]; rows: Record<string, unknown>[] } {
@@ -71,7 +88,7 @@ function sanitizeHeaders(
const seen = new Set<string>()
for (const raw of headers) {
let safe = sanitizeColumnName(raw)
let safe = sanitizeName(raw)
while (seen.has(safe)) safe = `${safe}_`
seen.add(safe)
renamed.set(raw, safe)
@@ -92,35 +109,6 @@ function sanitizeHeaders(
}
}
async function resolveWorkspaceFile(
fileReference: string,
workspaceId: string
): Promise<{ buffer: Buffer; name: string; type: string }> {
const record = await resolveWorkspaceFileReference(workspaceId, fileReference)
if (!record) {
throw new Error(
`File not found: "${fileReference}". Use glob("files/by-id/*/meta.json") to list canonical file IDs.`
)
}
const buffer = await downloadWorkspaceFile(record)
return { buffer, name: record.name, type: record.type }
}
function parseFileRows(
buffer: Buffer,
fileName: string,
contentType: string
): Promise<{ headers: string[]; rows: Record<string, unknown>[] }> {
const ext = fileName.split('.').pop()?.toLowerCase()
if (ext === 'json' || contentType === 'application/json') {
return parseJsonRows(buffer)
}
if (ext === 'csv' || ext === 'tsv' || contentType === 'text/csv') {
return parseCsvRows(buffer)
}
throw new Error(`Unsupported file format: "${ext}". Supported: csv, tsv, json`)
}
async function parseJsonRows(
buffer: Buffer
): Promise<{ headers: string[]; rows: Record<string, unknown>[] }> {
@@ -138,98 +126,23 @@ async function parseJsonRows(
}
for (const key of Object.keys(row)) headerSet.add(key)
}
return sanitizeHeaders([...headerSet], parsed)
return sanitizeJsonHeaders([...headerSet], parsed)
}
async function parseCsvRows(
buffer: Buffer
async function parseFileRows(
buffer: Buffer,
fileName: string,
contentType: string
): Promise<{ headers: string[]; rows: Record<string, unknown>[] }> {
const { parse } = await import('csv-parse/sync')
const parsed = parse(buffer.toString('utf-8'), {
columns: true,
skip_empty_lines: true,
trim: true,
relax_column_count: true,
relax_quotes: true,
skip_records_with_error: true,
cast: false,
}) as Record<string, unknown>[]
if (parsed.length === 0) {
throw new Error('CSV file has no data rows')
const ext = fileName.split('.').pop()?.toLowerCase()
if (ext === 'json' || contentType === 'application/json') {
return parseJsonRows(buffer)
}
const headers = Object.keys(parsed[0])
if (headers.length === 0) {
throw new Error('CSV file has no headers')
if (ext === 'csv' || ext === 'tsv' || contentType === 'text/csv') {
const delimiter = ext === 'tsv' ? '\t' : ','
return parseCsvBuffer(buffer, delimiter)
}
return sanitizeHeaders(headers, parsed)
}
function inferColumnType(values: unknown[]): ColumnType {
const nonEmpty = values.filter((v) => v !== null && v !== undefined && v !== '')
if (nonEmpty.length === 0) return 'string'
const allNumber = nonEmpty.every((v) => {
const n = Number(v)
return !Number.isNaN(n) && String(v).trim() !== ''
})
if (allNumber) return 'number'
const allBoolean = nonEmpty.every((v) => {
const s = String(v).toLowerCase()
return s === 'true' || s === 'false'
})
if (allBoolean) return 'boolean'
const isoDatePattern = /^\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}(:\d{2})?)?/
const allDate = nonEmpty.every((v) => {
const s = String(v)
return isoDatePattern.test(s) && !Number.isNaN(Date.parse(s))
})
if (allDate) return 'date'
return 'string'
}
function inferSchema(headers: string[], rows: Record<string, unknown>[]): ColumnDefinition[] {
const sample = rows.slice(0, SCHEMA_SAMPLE_SIZE)
return headers.map((name) => ({
name,
type: inferColumnType(sample.map((r) => r[name])),
}))
}
function coerceValue(value: unknown, colType: ColumnType): string | number | boolean | null {
if (value === null || value === undefined || value === '') return null
switch (colType) {
case 'number': {
const n = Number(value)
return Number.isNaN(n) ? null : n
}
case 'boolean': {
const s = String(value).toLowerCase()
return s === 'true'
}
case 'date':
return new Date(String(value)).toISOString()
default:
return String(value)
}
}
function coerceRows(
rows: Record<string, unknown>[],
columns: ColumnDefinition[],
columnMap: Map<string, ColumnDefinition>
): RowData[] {
return rows.map((row) => {
const coerced: RowData = {}
for (const col of columns) {
if (row[col.name] !== undefined) {
coerced[col.name] = coerceValue(row[col.name], col.type as ColumnType)
}
}
return coerced
})
throw new Error(`Unsupported file format: "${ext}". Supported: csv, tsv, json`)
}
async function batchInsertAll(
@@ -240,11 +153,16 @@ async function batchInsertAll(
context?: ServerToolContext
): Promise<number> {
let inserted = 0
const userId = context?.userId
for (let i = 0; i < rows.length; i += MAX_BATCH_SIZE) {
assertServerToolNotAborted(context, 'Request aborted before table mutation could be applied.')
const batch = rows.slice(i, i + MAX_BATCH_SIZE)
const requestId = generateId().slice(0, 8)
const result = await batchInsertRows({ tableId, rows: batch, workspaceId }, table, requestId)
const result = await batchInsertRows(
{ tableId, rows: batch, workspaceId, userId },
table,
requestId
)
inserted += result.length
}
return inserted
@@ -384,7 +302,7 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
const requestId = generateId().slice(0, 8)
assertNotAborted()
const row = await insertRow(
{ tableId: args.tableId, data: args.data, workspaceId },
{ tableId: args.tableId, data: args.data, workspaceId, userId: context.userId },
table,
requestId
)
@@ -415,7 +333,7 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
const requestId = generateId().slice(0, 8)
assertNotAborted()
const rows = await batchInsertRows(
{ tableId: args.tableId, rows: args.rows, workspaceId },
{ tableId: args.tableId, rows: args.rows, workspaceId, userId: context.userId },
table,
requestId
)
@@ -724,7 +642,7 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
return { success: false, message: 'File contains no data rows' }
}
const columns = inferSchema(headers, rows)
const { columns, headerToColumn } = inferSchemaFromCsv(headers, rows)
const tableName = args.name || file.name.replace(/\.[^.]+$/, '')
const requestId = generateId().slice(0, 8)
assertNotAborted()
@@ -739,8 +657,7 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
requestId
)
const columnMap = new Map(columns.map((c) => [c.name, c]))
const coerced = coerceRows(rows, columns, columnMap)
const coerced = coerceRowsForTable(rows, { columns }, headerToColumn)
const inserted = await batchInsertAll(table.id, coerced, table, workspaceId, context)
logger.info('Table created from file', {
@@ -769,6 +686,10 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
const filePath = (args as Record<string, unknown>).filePath as string | undefined
const tableId = (args as Record<string, unknown>).tableId as string | undefined
const fileReference = fileId || filePath
const rawMode = (args as Record<string, unknown>).mode as string | undefined
const rawMapping = (args as Record<string, unknown>).mapping as
| CsvHeaderMapping
| undefined
if (!fileReference) {
return {
success: false,
@@ -782,11 +703,21 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
if (!workspaceId) {
return { success: false, message: 'Workspace ID is required' }
}
if (rawMode && rawMode !== 'append' && rawMode !== 'replace') {
return {
success: false,
message: `Invalid mode "${rawMode}". Must be "append" or "replace".`,
}
}
const mode: 'append' | 'replace' = rawMode === 'replace' ? 'replace' : 'append'
const table = await getTableById(tableId)
if (!table) {
if (!table || table.workspaceId !== workspaceId) {
return { success: false, message: `Table not found: ${tableId}` }
}
if (table.archivedAt) {
return { success: false, message: `Table is archived: ${tableId}` }
}
const file = await resolveWorkspaceFile(fileReference, workspaceId)
const { headers, rows } = await parseFileRows(file.buffer, file.name, file.type)
@@ -794,47 +725,86 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
return { success: false, message: 'File contains no data rows' }
}
const tableColumns = table.schema.columns as ColumnDefinition[]
const tableColNames = new Set(tableColumns.map((c) => c.name))
const mappedHeaders = headers.filter((h) => tableColNames.has(h))
if (mappedHeaders.length === 0) {
const mapping: CsvHeaderMapping = rawMapping ?? buildAutoMapping(headers, table.schema)
let validation: ReturnType<typeof validateMapping>
try {
validation = validateMapping({
csvHeaders: headers,
mapping,
tableSchema: table.schema,
})
} catch (err) {
if (err instanceof CsvImportValidationError) {
return { success: false, message: err.message }
}
throw err
}
if (validation.mappedHeaders.length === 0) {
return {
success: false,
message: `No matching columns between file (${headers.join(', ')}) and table (${tableColumns.map((c) => c.name).join(', ')})`,
message: `No matching columns between file (${headers.join(', ')}) and table (${table.schema.columns.map((c) => c.name).join(', ')})`,
}
}
const requiredMissing = tableColumns
.filter((c) => c.required && !headers.includes(c.name))
.map((c) => c.name)
if (requiredMissing.length > 0) {
const coerced = coerceRowsForTable(rows, table.schema, validation.effectiveMap)
if (mode === 'replace') {
assertNotAborted()
const requestId = generateId().slice(0, 8)
const result = await replaceTableRows(
{ tableId: table.id, rows: coerced, workspaceId, userId: context.userId },
table,
requestId
)
logger.info('Rows replaced from file', {
tableId: table.id,
fileName: file.name,
mode,
matchedColumns: validation.mappedHeaders.length,
deleted: result.deletedCount,
inserted: result.insertedCount,
userId: context.userId,
})
return {
success: false,
message: `File is missing required columns: ${requiredMissing.join(', ')}`,
success: true,
message: `Replaced rows in "${table.name}" from "${file.name}": deleted ${result.deletedCount}, inserted ${result.insertedCount}`,
data: {
tableId: table.id,
tableName: table.name,
mode,
matchedColumns: validation.mappedHeaders,
skippedColumns: validation.skippedHeaders,
deletedCount: result.deletedCount,
insertedCount: result.insertedCount,
sourceFile: file.name,
},
}
}
const columnMap = new Map(tableColumns.map((c) => [c.name, c]))
const matchedColumns = tableColumns.filter((c) => headers.includes(c.name))
const coerced = coerceRows(rows, matchedColumns, columnMap)
const inserted = await batchInsertAll(table.id, coerced, table, workspaceId, context)
logger.info('Rows imported from file', {
tableId: table.id,
fileName: file.name,
matchedColumns: mappedHeaders.length,
mode,
matchedColumns: validation.mappedHeaders.length,
rows: inserted,
userId: context.userId,
})
return {
success: true,
message: `Imported ${inserted} rows into "${table.name}" from "${file.name}" (${mappedHeaders.length} columns matched)`,
message: `Imported ${inserted} rows into "${table.name}" from "${file.name}" (${validation.mappedHeaders.length} columns matched)`,
data: {
tableId: table.id,
tableName: table.name,
matchedColumns: mappedHeaders,
skippedColumns: headers.filter((h) => !tableColNames.has(h)),
mode,
matchedColumns: validation.mappedHeaders,
skippedColumns: validation.skippedHeaders,
rowCount: inserted,
sourceFile: file.name,
},

View File

@@ -22,6 +22,7 @@ export interface CSPDirectives {
'media-src'?: string[]
'font-src'?: string[]
'connect-src'?: string[]
'worker-src'?: string[]
'frame-src'?: string[]
'frame-ancestors'?: string[]
'form-action'?: string[]
@@ -83,6 +84,8 @@ const STATIC_CONNECT_SRC = [
'https://api.github.com',
'https://github.com/*',
'https://challenges.cloudflare.com',
...(isReactGrabEnabled ? ['https://www.react-grab.com'] : []),
...(isDev ? ['ws://localhost:4722'] : []),
...(isHosted
? [
'https://www.googletagmanager.com',
@@ -90,6 +93,7 @@ const STATIC_CONNECT_SRC = [
'https://*.analytics.google.com',
'https://analytics.google.com',
'https://www.google.com',
'https://analytics.ahrefs.com',
]
: []),
] as const
@@ -146,6 +150,7 @@ export const buildTimeCSPDirectives: CSPDirectives = {
],
'media-src': ["'self'", 'blob:'],
'worker-src': ["'self'", 'blob:'],
'font-src': ["'self'", 'https://fonts.gstatic.com'],
'connect-src': [

View File

@@ -1970,4 +1970,116 @@ describe('nested subflow grouping via parentIterations', () => {
expect(nestedP2).toBeDefined()
expect(nestedP2!.children).toHaveLength(1)
})
it.concurrent(
'uses the user-configured loop name for the container span when a success BlockLog is present',
() => {
const result: ExecutionResult = {
success: true,
output: { content: 'done' },
metadata: { duration: 3000, startTime: '2024-01-01T10:00:00.000Z' },
logs: [
{
blockId: 'loop-sbj',
blockName: 'LoopGroupA (SBJ)',
blockType: 'loop',
startedAt: '2024-01-01T10:00:00.000Z',
endedAt: '2024-01-01T10:00:03.000Z',
durationMs: 3000,
success: true,
output: { results: [[{ value: 1 }], [{ value: 2 }]] },
executionOrder: 10,
},
{
blockId: 'api-1',
blockName: 'Send (iteration 0)',
blockType: 'api',
startedAt: '2024-01-01T10:00:00.000Z',
endedAt: '2024-01-01T10:00:01.000Z',
durationMs: 1000,
success: true,
loopId: 'loop-sbj',
iterationIndex: 0,
executionOrder: 1,
},
{
blockId: 'api-1',
blockName: 'Send (iteration 1)',
blockType: 'api',
startedAt: '2024-01-01T10:00:01.000Z',
endedAt: '2024-01-01T10:00:02.000Z',
durationMs: 1000,
success: true,
loopId: 'loop-sbj',
iterationIndex: 1,
executionOrder: 2,
},
],
}
const { traceSpans } = buildTraceSpans(result)
const workflow = traceSpans[0]
const loop = workflow.children!.find((s) => s.type === 'loop')
expect(loop).toBeDefined()
expect(loop!.name).toBe('LoopGroupA (SBJ)')
expect(loop!.children).toHaveLength(2)
}
)
it.concurrent(
'uses the user-configured parallel name for the container span when a success BlockLog is present',
() => {
const result: ExecutionResult = {
success: true,
output: { content: 'done' },
metadata: { duration: 2000, startTime: '2024-01-01T10:00:00.000Z' },
logs: [
{
blockId: 'parallel-a',
blockName: 'FanOutCalls',
blockType: 'parallel',
startedAt: '2024-01-01T10:00:00.000Z',
endedAt: '2024-01-01T10:00:02.000Z',
durationMs: 2000,
success: true,
output: { results: [[{ v: 1 }], [{ v: 2 }]] },
executionOrder: 10,
},
{
blockId: 'api-1',
blockName: 'Call (iteration 0)',
blockType: 'api',
startedAt: '2024-01-01T10:00:00.000Z',
endedAt: '2024-01-01T10:00:01.000Z',
durationMs: 1000,
success: true,
parallelId: 'parallel-a',
iterationIndex: 0,
executionOrder: 1,
},
{
blockId: 'api-1',
blockName: 'Call (iteration 1)',
blockType: 'api',
startedAt: '2024-01-01T10:00:01.000Z',
endedAt: '2024-01-01T10:00:02.000Z',
durationMs: 1000,
success: true,
parallelId: 'parallel-a',
iterationIndex: 1,
executionOrder: 2,
},
],
}
const { traceSpans } = buildTraceSpans(result)
const workflow = traceSpans[0]
const parallel = workflow.children!.find((s) => s.type === 'parallel')
expect(parallel).toBeDefined()
expect(parallel!.name).toBe('FanOutCalls')
expect(parallel!.children).toHaveLength(2)
}
)
})

View File

@@ -744,8 +744,10 @@ function buildContainerChildren(
* chain determines the top-level container. Iteration spans are peeled one level at a
* time and recursed.
*
* Sentinel blocks (parallel/loop containers) do NOT produce BlockLogs, so there are no
* sentinel spans to anchor grouping. Containers are synthesized from the iteration data.
* Container BlockLogs (parallel/loop) are produced on skip (empty collection), error, and
* successful completion. When present, they supply the user-configured container name via
* `resolveContainerName`; otherwise the container is synthesized from iteration data with a
* counter-based fallback name.
*/
function groupIterationBlocksRecursive(
spans: TraceSpan[],

View File

@@ -0,0 +1,277 @@
/**
* @vitest-environment node
*/
import { describe, expect, it } from 'vitest'
import {
buildAutoMapping,
CsvImportValidationError,
coerceRowsForTable,
coerceValue,
inferColumnType,
inferSchemaFromCsv,
parseCsvBuffer,
sanitizeName,
validateMapping,
} from '@/lib/table/csv-import'
import type { TableSchema } from '@/lib/table/types'
describe('csv-import', () => {
describe('parseCsvBuffer', () => {
it('parses a CSV string and extracts headers', async () => {
const { headers, rows } = await parseCsvBuffer('a,b\n1,2\n3,4')
expect(headers).toEqual(['a', 'b'])
expect(rows).toEqual([
{ a: '1', b: '2' },
{ a: '3', b: '4' },
])
})
it('strips a UTF-8 BOM from the first header', async () => {
const text = `\uFEFFname,age\nAlice,30`
const { headers } = await parseCsvBuffer(text)
expect(headers).toEqual(['name', 'age'])
})
it('parses a Uint8Array input in browser-like environments', async () => {
const bytes = new TextEncoder().encode('a,b\n1,2')
const { headers, rows } = await parseCsvBuffer(bytes)
expect(headers).toEqual(['a', 'b'])
expect(rows).toHaveLength(1)
})
it('parses TSV when delimiter is tab', async () => {
const { headers, rows } = await parseCsvBuffer('a\tb\n1\t2', '\t')
expect(headers).toEqual(['a', 'b'])
expect(rows).toEqual([{ a: '1', b: '2' }])
})
it('throws when the file has no data rows', async () => {
await expect(parseCsvBuffer('a,b')).rejects.toThrow(/no data rows/i)
})
})
describe('inferColumnType', () => {
it('returns "string" for empty samples', () => {
expect(inferColumnType([])).toBe('string')
expect(inferColumnType([null, undefined, ''])).toBe('string')
})
it('detects numeric columns', () => {
expect(inferColumnType(['1', '2', '3.14'])).toBe('number')
})
it('detects boolean columns (case-insensitive)', () => {
expect(inferColumnType(['true', 'FALSE', 'True'])).toBe('boolean')
})
it('detects ISO date columns', () => {
expect(inferColumnType(['2024-01-01', '2024-02-01T12:00:00'])).toBe('date')
})
it('falls back to "string"', () => {
expect(inferColumnType(['abc', 'def'])).toBe('string')
expect(inferColumnType(['1', 'abc'])).toBe('string')
})
})
describe('sanitizeName', () => {
it('strips unsupported chars and collapses underscores', () => {
expect(sanitizeName('Hello World!')).toBe('Hello_World')
expect(sanitizeName(' foo-bar ')).toBe('foo_bar')
})
it('prefixes names that start with a digit', () => {
expect(sanitizeName('123abc')).toBe('col_123abc')
})
it('fills in an empty name with the prefix', () => {
expect(sanitizeName('$$$')).toBe('col_')
})
})
describe('inferSchemaFromCsv', () => {
it('produces sanitized column names and inferred types', () => {
const { columns, headerToColumn } = inferSchemaFromCsv(
['First Name', 'Age', 'Active'],
[
{ 'First Name': 'Alice', Age: '30', Active: 'true' },
{ 'First Name': 'Bob', Age: '40', Active: 'false' },
]
)
expect(columns).toEqual([
{ name: 'First_Name', type: 'string' },
{ name: 'Age', type: 'number' },
{ name: 'Active', type: 'boolean' },
])
expect(headerToColumn.get('First Name')).toBe('First_Name')
expect(headerToColumn.get('Age')).toBe('Age')
})
it('disambiguates duplicate sanitized headers', () => {
const { columns } = inferSchemaFromCsv(
['a b', 'a-b', 'a.b'],
[{ 'a b': '1', 'a-b': '2', 'a.b': '3' }]
)
expect(columns.map((c) => c.name)).toEqual(['a_b', 'a_b_2', 'a_b_3'])
})
})
describe('coerceValue', () => {
it('returns null for empty values', () => {
expect(coerceValue(null, 'string')).toBeNull()
expect(coerceValue(undefined, 'number')).toBeNull()
expect(coerceValue('', 'boolean')).toBeNull()
})
it('coerces numbers', () => {
expect(coerceValue('42', 'number')).toBe(42)
expect(coerceValue('not a number', 'number')).toBeNull()
})
it('coerces booleans strictly', () => {
expect(coerceValue('true', 'boolean')).toBe(true)
expect(coerceValue('FALSE', 'boolean')).toBe(false)
expect(coerceValue('yes', 'boolean')).toBeNull()
})
it('coerces dates to ISO strings and falls back to the original string', () => {
expect(coerceValue('2024-01-01', 'date')).toBe(new Date('2024-01-01').toISOString())
expect(coerceValue('not-a-date', 'date')).toBe('not-a-date')
})
})
describe('buildAutoMapping', () => {
const schema: TableSchema = {
columns: [
{ name: 'First_Name', type: 'string' },
{ name: 'age', type: 'number' },
],
}
it('maps by exact sanitized name', () => {
const mapping = buildAutoMapping(['First_Name', 'age'], schema)
expect(mapping).toEqual({ First_Name: 'First_Name', age: 'age' })
})
it('falls back to a case/punctuation-insensitive match', () => {
const mapping = buildAutoMapping(['first name', 'AGE'], schema)
expect(mapping).toEqual({ 'first name': 'First_Name', AGE: 'age' })
})
it('returns null for headers without a match', () => {
const mapping = buildAutoMapping(['unmatched'], schema)
expect(mapping).toEqual({ unmatched: null })
})
})
describe('validateMapping', () => {
const schema: TableSchema = {
columns: [
{ name: 'name', type: 'string', required: true },
{ name: 'age', type: 'number' },
],
}
it('accepts a valid mapping and lists skipped/unmapped', () => {
const result = validateMapping({
csvHeaders: ['name', 'age', 'extra'],
mapping: { name: 'name', age: 'age', extra: null },
tableSchema: schema,
})
expect(result.mappedHeaders).toEqual(['name', 'age'])
expect(result.skippedHeaders).toEqual(['extra'])
expect(result.unmappedColumns).toEqual([])
expect(result.effectiveMap.get('name')).toBe('name')
expect(result.effectiveMap.has('extra')).toBe(false)
})
it('throws when a required column is missing', () => {
expect(() =>
validateMapping({
csvHeaders: ['age'],
mapping: { age: 'age' },
tableSchema: schema,
})
).toThrow(CsvImportValidationError)
})
it('throws when a mapping targets a non-existent column', () => {
expect(() =>
validateMapping({
csvHeaders: ['name'],
mapping: { name: 'nonexistent' },
tableSchema: schema,
})
).toThrow(/do not exist on the table/)
})
it('throws when multiple headers map to the same column', () => {
expect(() =>
validateMapping({
csvHeaders: ['a', 'b'],
mapping: { a: 'name', b: 'name' },
tableSchema: schema,
})
).toThrow(/same column/)
})
it('throws when mapping references an unknown CSV header', () => {
expect(() =>
validateMapping({
csvHeaders: ['name'],
mapping: { name: 'name', bogus: 'age' },
tableSchema: schema,
})
).toThrow(/unknown CSV headers/)
})
it('throws when a mapping value is neither a string nor null', () => {
expect(() =>
validateMapping({
csvHeaders: ['name'],
mapping: { name: 42 as unknown as string },
tableSchema: schema,
})
).toThrow(/Mapping values must be/)
})
})
describe('coerceRowsForTable', () => {
const schema: TableSchema = {
columns: [
{ name: 'name', type: 'string' },
{ name: 'age', type: 'number' },
{ name: 'active', type: 'boolean' },
],
}
it('applies the table column type using the effective mapping', () => {
const rows = coerceRowsForTable(
[
{ Name: 'Alice', Age: '30', Active: 'true' },
{ Name: 'Bob', Age: 'oops', Active: 'false' },
],
schema,
new Map([
['Name', 'name'],
['Age', 'age'],
['Active', 'active'],
])
)
expect(rows).toEqual([
{ name: 'Alice', age: 30, active: true },
{ name: 'Bob', age: null, active: false },
])
})
it('drops CSV headers absent from the mapping', () => {
const rows = coerceRowsForTable(
[{ name: 'Alice', extra: 'keep me out' }],
schema,
new Map([['name', 'name']])
)
expect(rows).toEqual([{ name: 'Alice' }])
})
})
})

View File

@@ -0,0 +1,391 @@
/**
* Shared CSV import helpers for user-defined tables.
*
* Used by:
* - `POST /api/table/import-csv` (create new table from CSV)
* - `POST /api/table/[tableId]/import-csv` (append/replace into existing table)
* - Copilot `user-table` tool (`create_from_file`, `import_file`)
*
* Keeping a single implementation avoids drift between HTTP and agent code paths.
*/
import type { ColumnDefinition, RowData, TableSchema } from '@/lib/table/types'
/** Narrower type than `COLUMN_TYPES` used internally for coercion. */
export type CsvColumnType = 'string' | 'number' | 'boolean' | 'date' | 'json'
/** Number of CSV rows sampled when inferring column types for a new table. */
export const CSV_SCHEMA_SAMPLE_SIZE = 100
/** Maximum rows inserted per `batchInsertRows` call during import. */
export const CSV_MAX_BATCH_SIZE = 1000
/** Maximum CSV/TSV file size accepted by import routes (50 MB). */
export const CSV_MAX_FILE_SIZE_BYTES = 50 * 1024 * 1024
/**
* Error thrown when the user-supplied mapping or CSV does not line up with the
* target table. Callers should translate this into a 400 response.
*/
export class CsvImportValidationError extends Error {
readonly code = 'CSV_IMPORT_VALIDATION' as const
readonly details: {
missingRequired?: string[]
duplicateTargets?: string[]
unknownColumns?: string[]
unknownHeaders?: string[]
}
constructor(
message: string,
details: {
missingRequired?: string[]
duplicateTargets?: string[]
unknownColumns?: string[]
unknownHeaders?: string[]
} = {}
) {
super(message)
this.name = 'CsvImportValidationError'
this.details = details
}
}
/**
* Parses a CSV/TSV payload using `csv-parse/sync`. Accepts a Node `Buffer`,
* browser-friendly `Uint8Array`, or already-decoded string. Strips a leading
* UTF-8 BOM so headers are not silently prefixed with `\uFEFF`.
*/
export async function parseCsvBuffer(
input: Buffer | Uint8Array | string,
delimiter = ','
): Promise<{ headers: string[]; rows: Record<string, unknown>[] }> {
const { parse } = await import('csv-parse/sync')
let text: string
if (typeof input === 'string') {
text = input
} else if (typeof Buffer !== 'undefined' && Buffer.isBuffer(input)) {
text = input.toString('utf-8')
} else {
text = new TextDecoder('utf-8').decode(input as Uint8Array)
}
text = text.replace(/^\uFEFF/, '')
const parsed = parse(text, {
columns: true,
skip_empty_lines: true,
trim: true,
relax_column_count: true,
relax_quotes: true,
skip_records_with_error: true,
cast: false,
delimiter,
}) as Record<string, unknown>[]
if (parsed.length === 0) {
throw new Error('CSV file has no data rows')
}
const headers = Object.keys(parsed[0])
if (headers.length === 0) {
throw new Error('CSV file has no headers')
}
return { headers, rows: parsed }
}
/**
* Infers a column type from a sample of non-empty values. Order matters: we
* prefer narrower types (number > boolean > ISO date) and fall back to string.
* JSON is never inferred automatically.
*/
export function inferColumnType(values: unknown[]): Exclude<CsvColumnType, 'json'> {
const nonEmpty = values.filter((v) => v !== null && v !== undefined && v !== '')
if (nonEmpty.length === 0) return 'string'
const allNumber = nonEmpty.every((v) => {
const n = Number(v)
return !Number.isNaN(n) && String(v).trim() !== ''
})
if (allNumber) return 'number'
const allBoolean = nonEmpty.every((v) => {
const s = String(v).toLowerCase()
return s === 'true' || s === 'false'
})
if (allBoolean) return 'boolean'
const isoDatePattern = /^\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}(:\d{2})?)?/
const allDate = nonEmpty.every((v) => {
const s = String(v)
return isoDatePattern.test(s) && !Number.isNaN(Date.parse(s))
})
if (allDate) return 'date'
return 'string'
}
/**
* Sanitizes a raw header into a valid column/table name. Strips disallowed
* characters, collapses runs of underscores, and ensures the first character
* is a letter or underscore (prefixing with `fallbackPrefix` otherwise).
*/
export function sanitizeName(raw: string, fallbackPrefix = 'col'): string {
let name = raw
.trim()
.replace(/[^a-zA-Z0-9_]/g, '_')
.replace(/_+/g, '_')
.replace(/^_+|_+$/g, '')
if (!name || /^\d/.test(name)) {
name = `${fallbackPrefix}_${name}`
}
return name
}
/**
* Returns column definitions inferred from CSV headers + sample rows. Duplicate
* sanitized names are suffixed with `_2`, `_3`, etc. Also returns the header ->
* column-name mapping used when coercing row values.
*/
export function inferSchemaFromCsv(
headers: string[],
rows: Record<string, unknown>[]
): { columns: ColumnDefinition[]; headerToColumn: Map<string, string> } {
const sample = rows.slice(0, CSV_SCHEMA_SAMPLE_SIZE)
const seen = new Set<string>()
const headerToColumn = new Map<string, string>()
const columns = headers.map((header) => {
const base = sanitizeName(header)
let colName = base
let suffix = 2
while (seen.has(colName.toLowerCase())) {
colName = `${base}_${suffix}`
suffix++
}
seen.add(colName.toLowerCase())
headerToColumn.set(header, colName)
return {
name: colName,
type: inferColumnType(sample.map((r) => r[header])),
} satisfies ColumnDefinition
})
return { columns, headerToColumn }
}
/**
* Coerces a single value to the requested column type. Returns `null` for
* empty inputs or values that cannot be parsed (numbers/booleans). Dates fall
* back to the original string when unparseable so that schema validation can
* reject it with context rather than silently inserting `null`.
*/
export function coerceValue(
value: unknown,
colType: CsvColumnType
): string | number | boolean | null | Record<string, unknown> | unknown[] {
if (value === null || value === undefined || value === '') return null
switch (colType) {
case 'number': {
const n = Number(value)
return Number.isNaN(n) ? null : n
}
case 'boolean': {
const s = String(value).toLowerCase()
if (s === 'true') return true
if (s === 'false') return false
return null
}
case 'date': {
const d = new Date(String(value))
return Number.isNaN(d.getTime()) ? String(value) : d.toISOString()
}
case 'json': {
if (typeof value === 'object') return value as Record<string, unknown> | unknown[]
try {
return JSON.parse(String(value))
} catch {
return String(value)
}
}
default:
return String(value)
}
}
/**
* Mapping from raw CSV header to target column name, with `null` indicating
* "do not import this column".
*/
export type CsvHeaderMapping = Record<string, string | null>
export interface CsvMappingValidationResult {
/** Columns present in the CSV that landed on a real table column. */
mappedHeaders: string[]
/** Columns in the CSV that the user/client chose to skip. */
skippedHeaders: string[]
/** Target column names that ended up unmapped (resolved from the mapping). */
unmappedColumns: string[]
/** Effective header -> column map (after dropping unknown / null targets). */
effectiveMap: Map<string, string>
}
/**
* Validates a user-supplied mapping against the target table schema. Rejects
* unknown target columns, duplicate targets, and required table columns that
* are not covered by the CSV. Returns the normalized header -> column map.
*/
export function validateMapping(params: {
csvHeaders: string[]
mapping: CsvHeaderMapping
tableSchema: TableSchema
}): CsvMappingValidationResult {
const { csvHeaders, mapping, tableSchema } = params
const columnByName = new Map(tableSchema.columns.map((c) => [c.name, c]))
const unknownHeaders = Object.keys(mapping).filter((h) => !csvHeaders.includes(h))
if (unknownHeaders.length > 0) {
throw new CsvImportValidationError(
`Mapping references unknown CSV headers: ${unknownHeaders.join(', ')}`,
{ unknownHeaders }
)
}
const invalidTargets = Object.entries(mapping).filter(
([, target]) => target !== null && typeof target !== 'string'
)
if (invalidTargets.length > 0) {
throw new CsvImportValidationError(
`Mapping values must be a column name (string) or null, got: ${invalidTargets
.map(([header]) => header)
.join(', ')}`
)
}
const targetsSeen = new Map<string, string[]>()
const unknownColumns: string[] = []
const effectiveMap = new Map<string, string>()
const skippedHeaders: string[] = []
for (const header of csvHeaders) {
const target = header in mapping ? mapping[header] : undefined
if (target === null || target === undefined) {
skippedHeaders.push(header)
continue
}
if (!columnByName.has(target)) {
unknownColumns.push(target)
continue
}
const existing = targetsSeen.get(target) ?? []
existing.push(header)
targetsSeen.set(target, existing)
effectiveMap.set(header, target)
}
if (unknownColumns.length > 0) {
throw new CsvImportValidationError(
`Mapping references columns that do not exist on the table: ${unknownColumns.join(', ')}`,
{ unknownColumns }
)
}
const duplicateTargets = [...targetsSeen.entries()]
.filter(([, headers]) => headers.length > 1)
.map(([col]) => col)
if (duplicateTargets.length > 0) {
throw new CsvImportValidationError(
`Multiple CSV headers map to the same column(s): ${duplicateTargets.join(', ')}`,
{ duplicateTargets }
)
}
const mappedTargets = new Set(effectiveMap.values())
const unmappedColumns = tableSchema.columns
.filter((c) => !mappedTargets.has(c.name))
.map((c) => c.name)
const missingRequired = tableSchema.columns
.filter((c) => c.required && !mappedTargets.has(c.name))
.map((c) => c.name)
if (missingRequired.length > 0) {
throw new CsvImportValidationError(
`CSV is missing required columns: ${missingRequired.join(', ')}`,
{ missingRequired }
)
}
return {
mappedHeaders: [...effectiveMap.keys()],
skippedHeaders,
unmappedColumns,
effectiveMap,
}
}
/**
* Builds an auto-mapping from CSV headers to table columns: prefers exact
* sanitized-name matches and falls back to a case- and punctuation-insensitive
* comparison. Unmapped headers are set to `null`.
*/
export function buildAutoMapping(csvHeaders: string[], tableSchema: TableSchema): CsvHeaderMapping {
const mapping: CsvHeaderMapping = {}
const columns = tableSchema.columns
const exactByName = new Map(columns.map((c) => [c.name, c.name]))
const loose = new Map<string, string>()
for (const col of columns) {
loose.set(col.name.toLowerCase().replace(/[^a-z0-9]/g, ''), col.name)
}
const usedTargets = new Set<string>()
for (const header of csvHeaders) {
const sanitized = sanitizeName(header)
const exact = exactByName.get(sanitized)
if (exact && !usedTargets.has(exact)) {
mapping[header] = exact
usedTargets.add(exact)
continue
}
const key = header.toLowerCase().replace(/[^a-z0-9]/g, '')
const fuzzy = loose.get(key)
if (fuzzy && !usedTargets.has(fuzzy)) {
mapping[header] = fuzzy
usedTargets.add(fuzzy)
continue
}
mapping[header] = null
}
return mapping
}
/**
* Coerces parsed CSV rows into `RowData` objects keyed by target column name,
* applying the column types declared in `tableSchema`. Headers not present in
* `headerToColumn` are dropped. Missing table columns remain unset (schema
* validation decides whether that's acceptable).
*/
export function coerceRowsForTable(
rows: Record<string, unknown>[],
tableSchema: TableSchema,
headerToColumn: Map<string, string>
): RowData[] {
const typeByName = new Map(tableSchema.columns.map((c) => [c.name, c.type as CsvColumnType]))
return rows.map((row) => {
const coerced: RowData = {}
for (const [header, value] of Object.entries(row)) {
const colName = headerToColumn.get(header)
if (!colName) continue
const colType = typeByName.get(colName) ?? 'string'
coerced[colName] = coerceValue(value, colType) as RowData[string]
}
return coerced
})
}

View File

@@ -7,6 +7,7 @@
export * from './billing'
export * from './constants'
export * from './csv-import'
export * from './llm'
export * from './query-builder'
export * from './service'

View File

@@ -30,6 +30,8 @@ import type {
QueryOptions,
QueryResult,
RenameColumnData,
ReplaceRowsData,
ReplaceRowsResult,
RowData,
TableDefinition,
TableMetadata,
@@ -776,6 +778,120 @@ export async function batchInsertRows(
}))
}
/**
* Replaces all rows in a table with a new set of rows. Deletes existing rows
* and inserts the provided rows inside a single transaction so the table is
* never observed in an empty intermediate state by other readers.
*
* Validates each row against the schema, enforces unique constraints within the
* new rows (existing rows are deleted, so DB-side checks are unnecessary), and
* enforces `maxRows` before the replace executes.
*
* @param data - Replace data (rows to install)
* @param table - Table definition
* @param requestId - Request ID for logging
* @returns Count of rows deleted and inserted
* @throws Error if validation fails or capacity exceeded
*/
export async function replaceTableRows(
data: ReplaceRowsData,
table: TableDefinition,
requestId: string
): Promise<ReplaceRowsResult> {
if (data.tableId !== table.id) {
throw new Error(`Table ID mismatch: ${data.tableId} vs ${table.id}`)
}
if (data.workspaceId !== table.workspaceId) {
throw new Error(`Workspace ID mismatch: ${data.workspaceId} does not own table ${data.tableId}`)
}
if (data.rows.length > table.maxRows) {
throw new Error(
`Cannot replace: ${data.rows.length} rows exceeds table row limit (${table.maxRows})`
)
}
for (let i = 0; i < data.rows.length; i++) {
const row = data.rows[i]
const sizeValidation = validateRowSize(row)
if (!sizeValidation.valid) {
throw new Error(`Row ${i + 1}: ${sizeValidation.errors.join(', ')}`)
}
const schemaValidation = validateRowAgainstSchema(row, table.schema)
if (!schemaValidation.valid) {
throw new Error(`Row ${i + 1}: ${schemaValidation.errors.join(', ')}`)
}
}
const uniqueColumns = getUniqueColumns(table.schema)
if (uniqueColumns.length > 0 && data.rows.length > 0) {
const seen = new Map<string, Map<string, number>>()
for (const col of uniqueColumns) {
seen.set(col.name, new Map())
}
for (let i = 0; i < data.rows.length; i++) {
const row = data.rows[i]
for (const col of uniqueColumns) {
const value = row[col.name]
if (value === null || value === undefined) continue
const normalized = typeof value === 'string' ? value.toLowerCase() : JSON.stringify(value)
const map = seen.get(col.name)!
if (map.has(normalized)) {
throw new Error(
`Row ${i + 1}: Column "${col.name}" must be unique. Value "${String(value)}" duplicates row ${map.get(normalized)! + 1} in batch`
)
}
map.set(normalized, i)
}
}
}
const now = new Date()
const result = await db.transaction(async (trx) => {
await trx.execute(
sql`SELECT 1 FROM user_table_definitions WHERE id = ${data.tableId} FOR UPDATE`
)
const deletedRows = await trx
.delete(userTableRows)
.where(eq(userTableRows.tableId, data.tableId))
.returning({ id: userTableRows.id })
let insertedCount = 0
if (data.rows.length > 0) {
const rowsToInsert = data.rows.map((rowData, i) => ({
id: `row_${generateId().replace(/-/g, '')}`,
tableId: data.tableId,
workspaceId: data.workspaceId,
data: rowData,
position: i,
createdAt: now,
updatedAt: now,
...(data.userId ? { createdBy: data.userId } : {}),
}))
const batchSize = TABLE_LIMITS.MAX_BATCH_INSERT_SIZE
for (let i = 0; i < rowsToInsert.length; i += batchSize) {
const chunk = rowsToInsert.slice(i, i + batchSize)
const inserted = await trx.insert(userTableRows).values(chunk).returning({
id: userTableRows.id,
})
insertedCount += inserted.length
}
}
return { deletedCount: deletedRows.length, insertedCount }
})
logger.info(
`[${requestId}] Replaced rows in table ${data.tableId}: deleted ${result.deletedCount}, inserted ${result.insertedCount}`
)
return result
}
/**
* Upserts a row: updates an existing row if a match is found on the conflict target
* column, otherwise inserts a new row.

View File

@@ -241,6 +241,18 @@ export interface BulkDeleteByIdsResult {
missingRowIds: string[]
}
export interface ReplaceRowsData {
tableId: string
rows: RowData[]
workspaceId: string
userId?: string
}
export interface ReplaceRowsResult {
deletedCount: number
insertedCount: number
}
export interface RenameColumnData {
tableId: string
oldName: string