Compare commits

..

2 Commits

Author SHA1 Message Date
Theodore Li
498504c35b Fix import ordering 2026-04-04 16:49:09 -07:00
Theodore Li
c367da81fb feat(block): Add cloudwatch block (#3911)
* feat(block): add cloudwatch integration

* Fix bun lock

* Add logger, use execution timeout

* Switch metric dimensions to map style input

* Fix attribute names for dimension map

* Fix import styling

---------

Co-authored-by: Theodore Li <theo@sim.ai>
2026-04-04 16:35:36 -07:00
79 changed files with 733 additions and 16473 deletions

View File

@@ -5,7 +5,6 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { captureServerEvent } from '@/lib/posthog/server'
import { performDeleteFolder } from '@/lib/workflows/orchestration'
import { checkForCircularReference } from '@/lib/workflows/utils'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -157,13 +156,6 @@ export async function DELETE(
return NextResponse.json({ error: result.error }, { status })
}
captureServerEvent(
session.user.id,
'folder_deleted',
{ workspace_id: existingFolder.workspaceId },
{ groups: { workspace: existingFolder.workspaceId } }
)
return NextResponse.json({
success: true,
deletedItems: result.deletedItems,

View File

@@ -6,7 +6,6 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { captureServerEvent } from '@/lib/posthog/server'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('FoldersAPI')
@@ -146,13 +145,6 @@ export async function POST(request: NextRequest) {
logger.info('Created new folder:', { id, name, workspaceId, parentId })
captureServerEvent(
session.user.id,
'folder_created',
{ workspace_id: workspaceId },
{ groups: { workspace: workspaceId } }
)
recordAudit({
workspaceId,
actorId: session.user.id,

View File

@@ -13,7 +13,6 @@ import { z } from 'zod'
import { decryptApiKey } from '@/lib/api-key/crypto'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { hasLiveSyncAccess } from '@/lib/billing/core/subscription'
import { generateRequestId } from '@/lib/core/utils/request'
import { deleteDocumentStorageFiles } from '@/lib/knowledge/documents/service'
import { cleanupUnusedTagDefinitions } from '@/lib/knowledge/tags/service'
@@ -117,20 +116,6 @@ export async function PATCH(request: NextRequest, { params }: RouteParams) {
)
}
if (
parsed.data.syncIntervalMinutes !== undefined &&
parsed.data.syncIntervalMinutes > 0 &&
parsed.data.syncIntervalMinutes < 60
) {
const canUseLiveSync = await hasLiveSyncAccess(auth.userId)
if (!canUseLiveSync) {
return NextResponse.json(
{ error: 'Live sync requires a Max or Enterprise plan' },
{ status: 403 }
)
}
}
if (parsed.data.sourceConfig !== undefined) {
const existingRows = await db
.select()

View File

@@ -7,7 +7,6 @@ import { z } from 'zod'
import { encryptApiKey } from '@/lib/api-key/crypto'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { hasLiveSyncAccess } from '@/lib/billing/core/subscription'
import { generateRequestId } from '@/lib/core/utils/request'
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
import { allocateTagSlots } from '@/lib/knowledge/constants'
@@ -98,16 +97,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
const { connectorType, credentialId, apiKey, sourceConfig, syncIntervalMinutes } = parsed.data
if (syncIntervalMinutes > 0 && syncIntervalMinutes < 60) {
const canUseLiveSync = await hasLiveSyncAccess(auth.userId)
if (!canUseLiveSync) {
return NextResponse.json(
{ error: 'Live sync requires a Max or Enterprise plan' },
{ status: 403 }
)
}
}
const connectorConfig = CONNECTOR_REGISTRY[connectorType]
if (!connectorConfig) {
return NextResponse.json(
@@ -162,39 +151,19 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
const tagSlotMapping: Record<string, string> = {}
let newTagSlots: Record<string, string> = {}
if (connectorConfig.tagDefinitions?.length) {
const disabledIds = new Set((sourceConfig.disabledTagIds as string[] | undefined) ?? [])
const enabledDefs = connectorConfig.tagDefinitions.filter((td) => !disabledIds.has(td.id))
const existingDefs = await db
.select({
tagSlot: knowledgeBaseTagDefinitions.tagSlot,
displayName: knowledgeBaseTagDefinitions.displayName,
fieldType: knowledgeBaseTagDefinitions.fieldType,
})
.select({ tagSlot: knowledgeBaseTagDefinitions.tagSlot })
.from(knowledgeBaseTagDefinitions)
.where(eq(knowledgeBaseTagDefinitions.knowledgeBaseId, knowledgeBaseId))
const usedSlots = new Set<string>(existingDefs.map((d) => d.tagSlot))
const existingByName = new Map(
existingDefs.map((d) => [d.displayName, { tagSlot: d.tagSlot, fieldType: d.fieldType }])
)
const defsNeedingSlots: typeof enabledDefs = []
for (const td of enabledDefs) {
const existing = existingByName.get(td.displayName)
if (existing && existing.fieldType === td.fieldType) {
tagSlotMapping[td.id] = existing.tagSlot
} else {
defsNeedingSlots.push(td)
}
}
const { mapping, skipped: skippedTags } = allocateTagSlots(defsNeedingSlots, usedSlots)
const { mapping, skipped: skippedTags } = allocateTagSlots(enabledDefs, usedSlots)
Object.assign(tagSlotMapping, mapping)
newTagSlots = mapping
for (const name of skippedTags) {
logger.warn(`[${requestId}] No available slots for "${name}"`)
@@ -228,7 +197,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
throw new Error('Knowledge base not found')
}
for (const [semanticId, slot] of Object.entries(newTagSlots)) {
for (const [semanticId, slot] of Object.entries(tagSlotMapping)) {
const td = connectorConfig.tagDefinitions!.find((d) => d.id === semanticId)!
await createTagDefinition(
{

View File

@@ -10,7 +10,6 @@ import {
retryDocumentProcessing,
updateDocument,
} from '@/lib/knowledge/documents/service'
import { captureServerEvent } from '@/lib/posthog/server'
import { checkDocumentAccess, checkDocumentWriteAccess } from '@/app/api/knowledge/utils'
const logger = createLogger('DocumentByIdAPI')
@@ -286,14 +285,6 @@ export async function DELETE(
request: req,
})
const kbWorkspaceId = accessCheck.knowledgeBase?.workspaceId ?? ''
captureServerEvent(
userId,
'knowledge_base_document_deleted',
{ knowledge_base_id: knowledgeBaseId, workspace_id: kbWorkspaceId },
kbWorkspaceId ? { groups: { workspace: kbWorkspaceId } } : undefined
)
return NextResponse.json({
success: true,
data: result,

View File

@@ -5,7 +5,6 @@
* @vitest-environment node
*/
import { createEnvMock, databaseMock, loggerMock } from '@sim/testing'
import { mockNextFetchResponse } from '@sim/testing/mocks'
import { beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('drizzle-orm')
@@ -15,6 +14,16 @@ vi.mock('@/lib/knowledge/documents/utils', () => ({
retryWithExponentialBackoff: (fn: any) => fn(),
}))
vi.stubGlobal(
'fetch',
vi.fn().mockResolvedValue({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
})
)
vi.mock('@/lib/core/config/env', () => createEnvMock())
import {
@@ -169,16 +178,17 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
mockNextFetchResponse({
json: {
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
}),
} as any)
const result = await generateSearchEmbedding('test query')
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
expect(fetchSpy).toHaveBeenCalledWith(
'https://test.openai.azure.com/openai/deployments/text-embedding-ada-002/embeddings?api-version=2024-12-01-preview',
expect.objectContaining({
headers: expect.objectContaining({
@@ -199,16 +209,17 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
mockNextFetchResponse({
json: {
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
}),
} as any)
const result = await generateSearchEmbedding('test query')
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
expect(fetchSpy).toHaveBeenCalledWith(
'https://api.openai.com/v1/embeddings',
expect.objectContaining({
headers: expect.objectContaining({
@@ -232,16 +243,17 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
mockNextFetchResponse({
json: {
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
}),
} as any)
await generateSearchEmbedding('test query')
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
expect(fetchSpy).toHaveBeenCalledWith(
expect.stringContaining('api-version='),
expect.any(Object)
)
@@ -261,16 +273,17 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
mockNextFetchResponse({
json: {
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
}),
} as any)
await generateSearchEmbedding('test query', 'text-embedding-3-small')
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
expect(fetchSpy).toHaveBeenCalledWith(
'https://test.openai.azure.com/openai/deployments/custom-embedding-model/embeddings?api-version=2024-12-01-preview',
expect.any(Object)
)
@@ -298,12 +311,13 @@ describe('Knowledge Search Utils', () => {
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
})
mockNextFetchResponse({
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: false,
status: 404,
statusText: 'Not Found',
text: 'Deployment not found',
})
text: async () => 'Deployment not found',
} as any)
await expect(generateSearchEmbedding('test query')).rejects.toThrow('Embedding API failed')
@@ -318,12 +332,13 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
mockNextFetchResponse({
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: false,
status: 429,
statusText: 'Too Many Requests',
text: 'Rate limit exceeded',
})
text: async () => 'Rate limit exceeded',
} as any)
await expect(generateSearchEmbedding('test query')).rejects.toThrow('Embedding API failed')
@@ -341,16 +356,17 @@ describe('Knowledge Search Utils', () => {
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
})
mockNextFetchResponse({
json: {
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
}),
} as any)
await generateSearchEmbedding('test query')
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
expect(fetchSpy).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
body: JSON.stringify({
@@ -371,16 +387,17 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
mockNextFetchResponse({
json: {
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
}),
} as any)
await generateSearchEmbedding('test query', 'text-embedding-3-small')
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
expect(fetchSpy).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
body: JSON.stringify({

View File

@@ -77,7 +77,6 @@ vi.stubGlobal(
{ embedding: [0.1, 0.2], index: 0 },
{ embedding: [0.3, 0.4], index: 1 },
],
usage: { prompt_tokens: 2, total_tokens: 2 },
}),
})
)
@@ -295,7 +294,7 @@ describe('Knowledge Utils', () => {
it.concurrent('should return same length as input', async () => {
const result = await generateEmbeddings(['a', 'b'])
expect(result.embeddings.length).toBe(2)
expect(result.length).toBe(2)
})
it('should use Azure OpenAI when Azure config is provided', async () => {
@@ -314,7 +313,6 @@ describe('Knowledge Utils', () => {
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2], index: 0 }],
usage: { prompt_tokens: 1, total_tokens: 1 },
}),
} as any)
@@ -344,7 +342,6 @@ describe('Knowledge Utils', () => {
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2], index: 0 }],
usage: { prompt_tokens: 1, total_tokens: 1 },
}),
} as any)

View File

@@ -159,7 +159,16 @@ export async function PATCH(
}
)
}
if (isUnread === true) {
if (isUnread === false) {
captureServerEvent(
userId,
'task_marked_read',
{ workspace_id: updatedChat.workspaceId },
{
groups: { workspace: updatedChat.workspaceId },
}
)
} else if (isUnread === true) {
captureServerEvent(
userId,
'task_marked_unread',

View File

@@ -7,7 +7,6 @@ import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { validateCronExpression } from '@/lib/workflows/schedules/utils'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
@@ -299,13 +298,6 @@ export async function DELETE(
request,
})
captureServerEvent(
session.user.id,
'scheduled_task_deleted',
{ workspace_id: workspaceId ?? '' },
workspaceId ? { groups: { workspace: workspaceId } } : undefined
)
return NextResponse.json({ message: 'Schedule deleted successfully' })
} catch (error) {
logger.error(`[${requestId}] Error deleting schedule`, error)

View File

@@ -3,9 +3,6 @@
*
* @vitest-environment node
*/
import { createFeatureFlagsMock, createMockRequest } from '@sim/testing'
import { drizzleOrmMock } from '@sim/testing/mocks'
import type { NextRequest } from 'next/server'
import { beforeEach, describe, expect, it, vi } from 'vitest'
@@ -13,6 +10,7 @@ const {
mockVerifyCronAuth,
mockExecuteScheduleJob,
mockExecuteJobInline,
mockFeatureFlags,
mockDbReturning,
mockDbUpdate,
mockEnqueue,
@@ -35,6 +33,12 @@ const {
mockVerifyCronAuth: vi.fn().mockReturnValue(null),
mockExecuteScheduleJob: vi.fn().mockResolvedValue(undefined),
mockExecuteJobInline: vi.fn().mockResolvedValue(undefined),
mockFeatureFlags: {
isTriggerDevEnabled: false,
isHosted: false,
isProd: false,
isDev: true,
},
mockDbReturning,
mockDbUpdate,
mockEnqueue,
@@ -45,13 +49,6 @@ const {
}
})
const mockFeatureFlags = createFeatureFlagsMock({
isTriggerDevEnabled: false,
isHosted: false,
isProd: false,
isDev: true,
})
vi.mock('@/lib/auth/internal', () => ({
verifyCronAuth: mockVerifyCronAuth,
}))
@@ -94,7 +91,17 @@ vi.mock('@/lib/workflows/utils', () => ({
}),
}))
vi.mock('drizzle-orm', () => drizzleOrmMock)
vi.mock('drizzle-orm', () => ({
and: vi.fn((...conditions: unknown[]) => ({ type: 'and', conditions })),
eq: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'eq' })),
ne: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'ne' })),
lte: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'lte' })),
lt: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'lt' })),
not: vi.fn((condition: unknown) => ({ type: 'not', condition })),
isNull: vi.fn((field: unknown) => ({ type: 'isNull', field })),
or: vi.fn((...conditions: unknown[]) => ({ type: 'or', conditions })),
sql: vi.fn((strings: unknown, ...values: unknown[]) => ({ type: 'sql', strings, values })),
}))
vi.mock('@sim/db', () => ({
db: {
@@ -170,13 +177,18 @@ const SINGLE_JOB = [
},
]
function createCronRequest() {
return createMockRequest(
'GET',
undefined,
{ Authorization: 'Bearer test-cron-secret' },
'http://localhost:3000/api/schedules/execute'
)
function createMockRequest(): NextRequest {
const mockHeaders = new Map([
['authorization', 'Bearer test-cron-secret'],
['content-type', 'application/json'],
])
return {
headers: {
get: (key: string) => mockHeaders.get(key.toLowerCase()) || null,
},
url: 'http://localhost:3000/api/schedules/execute',
} as NextRequest
}
describe('Scheduled Workflow Execution API Route', () => {
@@ -192,7 +204,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should execute scheduled workflows with Trigger.dev disabled', async () => {
mockDbReturning.mockReturnValueOnce(SINGLE_SCHEDULE).mockReturnValueOnce([])
const response = await GET(createCronRequest() as unknown as NextRequest)
const response = await GET(createMockRequest())
expect(response).toBeDefined()
expect(response.status).toBe(200)
@@ -205,7 +217,7 @@ describe('Scheduled Workflow Execution API Route', () => {
mockFeatureFlags.isTriggerDevEnabled = true
mockDbReturning.mockReturnValueOnce(SINGLE_SCHEDULE).mockReturnValueOnce([])
const response = await GET(createCronRequest() as unknown as NextRequest)
const response = await GET(createMockRequest())
expect(response).toBeDefined()
expect(response.status).toBe(200)
@@ -216,7 +228,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should handle case with no due schedules', async () => {
mockDbReturning.mockReturnValueOnce([]).mockReturnValueOnce([])
const response = await GET(createCronRequest() as unknown as NextRequest)
const response = await GET(createMockRequest())
expect(response.status).toBe(200)
const data = await response.json()
@@ -227,7 +239,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should execute multiple schedules in parallel', async () => {
mockDbReturning.mockReturnValueOnce(MULTIPLE_SCHEDULES).mockReturnValueOnce([])
const response = await GET(createCronRequest() as unknown as NextRequest)
const response = await GET(createMockRequest())
expect(response.status).toBe(200)
const data = await response.json()
@@ -237,7 +249,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should queue mothership jobs to BullMQ when available', async () => {
mockDbReturning.mockReturnValueOnce([]).mockReturnValueOnce(SINGLE_JOB)
const response = await GET(createCronRequest() as unknown as NextRequest)
const response = await GET(createMockRequest())
expect(response.status).toBe(200)
expect(mockEnqueueWorkspaceDispatch).toHaveBeenCalledWith(
@@ -262,7 +274,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should enqueue preassigned correlation metadata for schedules', async () => {
mockDbReturning.mockReturnValue(SINGLE_SCHEDULE)
const response = await GET(createCronRequest() as unknown as NextRequest)
const response = await GET(createMockRequest())
expect(response.status).toBe(200)
expect(mockEnqueueWorkspaceDispatch).toHaveBeenCalledWith(

View File

@@ -5,7 +5,6 @@ import { and, eq, isNull, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { validateCronExpression } from '@/lib/workflows/schedules/utils'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
@@ -278,13 +277,6 @@ export async function POST(req: NextRequest) {
lifecycle,
})
captureServerEvent(
session.user.id,
'scheduled_task_created',
{ workspace_id: workspaceId },
{ groups: { workspace: workspaceId } }
)
return NextResponse.json(
{ schedule: { id, status: 'active', cronExpression, nextRunAt } },
{ status: 201 }

View File

@@ -16,8 +16,7 @@ import { workflow, workflowFolder } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import { exportFolderToZip } from '@/lib/workflows/operations/import-export'
import { exportFolderToZip, sanitizePathSegment } from '@/lib/workflows/operations/import-export'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
import {

View File

@@ -20,7 +20,7 @@ import { createLogger } from '@sim/logger'
import { inArray } from 'drizzle-orm'
import JSZip from 'jszip'
import { NextResponse } from 'next/server'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import { sanitizePathSegment } from '@/lib/workflows/operations/import-export'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
import {

View File

@@ -16,8 +16,7 @@ import { workflow, workflowFolder, workspace } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import { exportWorkspaceToZip } from '@/lib/workflows/operations/import-export'
import { exportWorkspaceToZip, sanitizePathSegment } from '@/lib/workflows/operations/import-export'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
import {

View File

@@ -228,7 +228,6 @@ export function Home({ chatId }: HomeProps = {}) {
if (!trimmed && !(fileAttachments && fileAttachments.length > 0)) return
captureEvent(posthogRef.current, 'task_message_sent', {
workspace_id: workspaceId,
has_attachments: !!(fileAttachments && fileAttachments.length > 0),
has_contexts: !!(contexts && contexts.length > 0),
is_new_task: !chatId,
@@ -240,7 +239,7 @@ export function Home({ chatId }: HomeProps = {}) {
sendMessage(trimmed || 'Analyze the attached file(s).', fileAttachments, contexts)
},
[sendMessage, workspaceId, chatId]
[sendMessage]
)
useEffect(() => {

View File

@@ -19,23 +19,26 @@ import {
ModalHeader,
Tooltip,
} from '@/components/emcn'
import { getSubscriptionAccessState } from '@/lib/billing/client'
import { consumeOAuthReturnContext } from '@/lib/credentials/client-state'
import { getProviderIdFromServiceId, type OAuthProvider } from '@/lib/oauth'
import { OAuthModal } from '@/app/workspace/[workspaceId]/components/oauth-modal'
import { ConnectorSelectorField } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/add-connector-modal/components/connector-selector-field'
import { SYNC_INTERVALS } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/consts'
import { MaxBadge } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/max-badge'
import { isBillingEnabled } from '@/app/workspace/[workspaceId]/settings/navigation'
import { getDependsOnFields } from '@/blocks/utils'
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
import type { ConnectorConfig, ConnectorConfigField } from '@/connectors/types'
import { useCreateConnector } from '@/hooks/queries/kb/connectors'
import { useOAuthCredentials } from '@/hooks/queries/oauth/oauth-credentials'
import { useSubscriptionData } from '@/hooks/queries/subscription'
import type { SelectorKey } from '@/hooks/selectors/types'
import { useCredentialRefreshTriggers } from '@/hooks/use-credential-refresh-triggers'
const SYNC_INTERVALS = [
{ label: 'Every hour', value: 60 },
{ label: 'Every 6 hours', value: 360 },
{ label: 'Daily', value: 1440 },
{ label: 'Weekly', value: 10080 },
{ label: 'Manual only', value: 0 },
] as const
const CONNECTOR_ENTRIES = Object.entries(CONNECTOR_REGISTRY)
interface AddConnectorModalProps {
@@ -72,10 +75,6 @@ export function AddConnectorModal({
const { workspaceId } = useParams<{ workspaceId: string }>()
const { mutate: createConnector, isPending: isCreating } = useCreateConnector()
const { data: subscriptionResponse } = useSubscriptionData({ enabled: isBillingEnabled })
const subscriptionAccess = getSubscriptionAccessState(subscriptionResponse?.data)
const hasMaxAccess = !isBillingEnabled || subscriptionAccess.hasUsableMaxAccess
const connectorConfig = selectedType ? CONNECTOR_REGISTRY[selectedType] : null
const isApiKeyMode = connectorConfig?.auth.mode === 'apiKey'
const connectorProviderId = useMemo(
@@ -529,13 +528,8 @@ export function AddConnectorModal({
onValueChange={(val) => setSyncInterval(Number(val))}
>
{SYNC_INTERVALS.map((interval) => (
<ButtonGroupItem
key={interval.value}
value={String(interval.value)}
disabled={interval.requiresMax && !hasMaxAccess}
>
<ButtonGroupItem key={interval.value} value={String(interval.value)}>
{interval.label}
{interval.requiresMax && !hasMaxAccess && <MaxBadge />}
</ButtonGroupItem>
))}
</ButtonGroup>

View File

@@ -79,8 +79,6 @@ export function ConnectorSelectorField({
options={comboboxOptions}
value={value || undefined}
onChange={onChange}
searchable
searchPlaceholder={`Search ${field.title.toLowerCase()}...`}
placeholder={
!credentialId
? 'Connect an account first'

View File

@@ -1,8 +0,0 @@
export const SYNC_INTERVALS = [
{ label: 'Live', value: 5, requiresMax: true },
{ label: 'Every hour', value: 60, requiresMax: false },
{ label: 'Every 6 hours', value: 360, requiresMax: false },
{ label: 'Daily', value: 1440, requiresMax: false },
{ label: 'Weekly', value: 10080, requiresMax: false },
{ label: 'Manual only', value: 0, requiresMax: false },
] as const

View File

@@ -21,10 +21,6 @@ import {
ModalTabsTrigger,
Skeleton,
} from '@/components/emcn'
import { getSubscriptionAccessState } from '@/lib/billing/client'
import { SYNC_INTERVALS } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/consts'
import { MaxBadge } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/max-badge'
import { isBillingEnabled } from '@/app/workspace/[workspaceId]/settings/navigation'
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
import type { ConnectorConfig } from '@/connectors/types'
import type { ConnectorData } from '@/hooks/queries/kb/connectors'
@@ -34,10 +30,17 @@ import {
useRestoreConnectorDocument,
useUpdateConnector,
} from '@/hooks/queries/kb/connectors'
import { useSubscriptionData } from '@/hooks/queries/subscription'
const logger = createLogger('EditConnectorModal')
const SYNC_INTERVALS = [
{ label: 'Every hour', value: 60 },
{ label: 'Every 6 hours', value: 360 },
{ label: 'Daily', value: 1440 },
{ label: 'Weekly', value: 10080 },
{ label: 'Manual only', value: 0 },
] as const
/** Keys injected by the sync engine — not user-editable */
const INTERNAL_CONFIG_KEYS = new Set(['tagSlotMapping', 'disabledTagIds'])
@@ -73,10 +76,6 @@ export function EditConnectorModal({
const { mutate: updateConnector, isPending: isSaving } = useUpdateConnector()
const { data: subscriptionResponse } = useSubscriptionData({ enabled: isBillingEnabled })
const subscriptionAccess = getSubscriptionAccessState(subscriptionResponse?.data)
const hasMaxAccess = !isBillingEnabled || subscriptionAccess.hasUsableMaxAccess
const hasChanges = useMemo(() => {
if (syncInterval !== connector.syncIntervalMinutes) return true
for (const [key, value] of Object.entries(sourceConfig)) {
@@ -147,7 +146,6 @@ export function EditConnectorModal({
setSourceConfig={setSourceConfig}
syncInterval={syncInterval}
setSyncInterval={setSyncInterval}
hasMaxAccess={hasMaxAccess}
error={error}
/>
</ModalTabsContent>
@@ -186,7 +184,6 @@ interface SettingsTabProps {
setSourceConfig: React.Dispatch<React.SetStateAction<Record<string, string>>>
syncInterval: number
setSyncInterval: (v: number) => void
hasMaxAccess: boolean
error: string | null
}
@@ -196,7 +193,6 @@ function SettingsTab({
setSourceConfig,
syncInterval,
setSyncInterval,
hasMaxAccess,
error,
}: SettingsTabProps) {
return (
@@ -238,13 +234,8 @@ function SettingsTab({
onValueChange={(val) => setSyncInterval(Number(val))}
>
{SYNC_INTERVALS.map((interval) => (
<ButtonGroupItem
key={interval.value}
value={String(interval.value)}
disabled={interval.requiresMax && !hasMaxAccess}
>
<ButtonGroupItem key={interval.value} value={String(interval.value)}>
{interval.label}
{interval.requiresMax && !hasMaxAccess && <MaxBadge />}
</ButtonGroupItem>
))}
</ButtonGroup>

View File

@@ -1,7 +0,0 @@
export function MaxBadge() {
return (
<span className='ml-1 shrink-0 rounded-[3px] bg-[var(--surface-5)] px-1 py-[1px] font-medium text-[9px] text-[var(--text-icon)] uppercase tracking-wide'>
Max
</span>
)
}

View File

@@ -1,9 +1,8 @@
'use client'
import { memo, useCallback, useMemo, useRef, useState } from 'react'
import { memo, useCallback, useMemo, useState } from 'react'
import { ArrowUp, Bell, Library, MoreHorizontal, RefreshCw } from 'lucide-react'
import { useParams } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { useShallow } from 'zustand/react/shallow'
import {
Button,
@@ -19,7 +18,6 @@ import { DatePicker } from '@/components/emcn/components/date-picker/date-picker
import { cn } from '@/lib/core/utils/cn'
import { hasActiveFilters } from '@/lib/logs/filters'
import { getTriggerOptions } from '@/lib/logs/get-trigger-options'
import { captureEvent } from '@/lib/posthog/client'
import { type LogStatus, STATUS_CONFIG } from '@/app/workspace/[workspaceId]/logs/utils'
import { getBlock } from '@/blocks/registry'
import { useFolderMap } from '@/hooks/queries/folders'
@@ -181,9 +179,6 @@ export const LogsToolbar = memo(function LogsToolbar({
}: LogsToolbarProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const posthog = usePostHog()
const posthogRef = useRef(posthog)
posthogRef.current = posthog
const {
level,
@@ -263,45 +258,8 @@ export const LogsToolbar = memo(function LogsToolbar({
} else {
setLevel(values.join(','))
}
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'status',
workspace_id: workspaceId,
})
},
[setLevel, workspaceId]
)
const handleWorkflowFilterChange = useCallback(
(values: string[]) => {
setWorkflowIds(values)
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'workflow',
workspace_id: workspaceId,
})
},
[setWorkflowIds, workspaceId]
)
const handleFolderFilterChange = useCallback(
(values: string[]) => {
setFolderIds(values)
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'folder',
workspace_id: workspaceId,
})
},
[setFolderIds, workspaceId]
)
const handleTriggerFilterChange = useCallback(
(values: string[]) => {
setTriggers(values)
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'trigger',
workspace_id: workspaceId,
})
},
[setTriggers, workspaceId]
[setLevel]
)
const statusDisplayLabel = useMemo(() => {
@@ -390,13 +348,9 @@ export const LogsToolbar = memo(function LogsToolbar({
} else {
clearDateRange()
setTimeRange(val as typeof timeRange)
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'time',
workspace_id: workspaceId,
})
}
},
[timeRange, setTimeRange, clearDateRange, workspaceId]
[timeRange, setTimeRange, clearDateRange]
)
/**
@@ -406,12 +360,8 @@ export const LogsToolbar = memo(function LogsToolbar({
(start: string, end: string) => {
setDateRange(start, end)
setDatePickerOpen(false)
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'time',
workspace_id: workspaceId,
})
},
[setDateRange, workspaceId]
[setDateRange]
)
/**
@@ -595,7 +545,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={workflowOptions}
multiSelect
multiSelectValues={workflowIds}
onMultiSelectChange={handleWorkflowFilterChange}
onMultiSelectChange={setWorkflowIds}
placeholder='All workflows'
overlayContent={
<span className='flex items-center gap-1.5 truncate text-[var(--text-primary)]'>
@@ -630,7 +580,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={folderOptions}
multiSelect
multiSelectValues={folderIds}
onMultiSelectChange={handleFolderFilterChange}
onMultiSelectChange={setFolderIds}
placeholder='All folders'
overlayContent={
<span className='truncate text-[var(--text-primary)]'>
@@ -655,7 +605,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={triggerOptions}
multiSelect
multiSelectValues={triggers}
onMultiSelectChange={handleTriggerFilterChange}
onMultiSelectChange={setTriggers}
placeholder='All triggers'
overlayContent={
<span className='truncate text-[var(--text-primary)]'>
@@ -726,7 +676,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={workflowOptions}
multiSelect
multiSelectValues={workflowIds}
onMultiSelectChange={handleWorkflowFilterChange}
onMultiSelectChange={setWorkflowIds}
placeholder='Workflow'
overlayContent={
<span className='flex items-center gap-1.5 truncate text-[var(--text-primary)]'>
@@ -757,7 +707,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={folderOptions}
multiSelect
multiSelectValues={folderIds}
onMultiSelectChange={handleFolderFilterChange}
onMultiSelectChange={setFolderIds}
placeholder='Folder'
overlayContent={
<span className='truncate text-[var(--text-primary)]'>{folderDisplayLabel}</span>
@@ -776,7 +726,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={triggerOptions}
multiSelect
multiSelectValues={triggers}
onMultiSelectChange={handleTriggerFilterChange}
onMultiSelectChange={setTriggers}
placeholder='Trigger'
overlayContent={
<span className='truncate text-[var(--text-primary)]'>{triggerDisplayLabel}</span>

View File

@@ -62,8 +62,6 @@ const roleOptions = [
{ value: 'admin', label: 'Admin' },
] as const
const roleComboOptions = roleOptions.map((option) => ({ value: option.value, label: option.label }))
export function IntegrationsManager() {
const params = useParams()
const workspaceId = (params?.workspaceId as string) || ''
@@ -1317,32 +1315,42 @@ export function IntegrationsManager() {
</div>
</div>
<Combobox
options={roleComboOptions}
value={
roleOptions.find((option) => option.value === member.role)?.label || ''
}
selectedValue={member.role}
onChange={(value) =>
handleChangeMemberRole(member.userId, value as WorkspaceCredentialRole)
}
placeholder='Role'
disabled={
!isSelectedAdmin || (member.role === 'admin' && adminMemberCount <= 1)
}
size='sm'
/>
{isSelectedAdmin ? (
<Button
variant='ghost'
onClick={() => handleRemoveMember(member.userId)}
disabled={member.role === 'admin' && adminMemberCount <= 1}
className='w-full justify-end'
>
Remove
</Button>
<>
<Combobox
options={roleOptions.map((option) => ({
value: option.value,
label: option.label,
}))}
value={
roleOptions.find((option) => option.value === member.role)?.label ||
''
}
selectedValue={member.role}
onChange={(value) =>
handleChangeMemberRole(
member.userId,
value as WorkspaceCredentialRole
)
}
placeholder='Role'
disabled={member.role === 'admin' && adminMemberCount <= 1}
size='sm'
/>
<Button
variant='ghost'
onClick={() => handleRemoveMember(member.userId)}
disabled={member.role === 'admin' && adminMemberCount <= 1}
className='w-full justify-end'
>
Remove
</Button>
</>
) : (
<div />
<>
<Badge variant='gray-secondary'>{member.role}</Badge>
<div />
</>
)}
</div>
))}
@@ -1362,7 +1370,10 @@ export function IntegrationsManager() {
size='sm'
/>
<Combobox
options={roleComboOptions}
options={roleOptions.map((option) => ({
value: option.value,
label: option.label,
}))}
value={
roleOptions.find((option) => option.value === memberRole)?.label || ''
}

View File

@@ -6,7 +6,7 @@ import {
DropdownMenuTrigger,
} from '@/components/emcn'
import { ArrowDown, ArrowUp, Duplicate, Pencil, Trash } from '@/components/emcn/icons'
import type { ContextMenuState } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
import type { ContextMenuState } from '../../types'
interface ContextMenuProps {
contextMenu: ContextMenuState

View File

@@ -17,17 +17,13 @@ import {
Textarea,
} from '@/components/emcn'
import type { ColumnDefinition, TableInfo, TableRow } from '@/lib/table'
import {
cleanCellValue,
formatValueForInput,
} from '@/app/workspace/[workspaceId]/tables/[tableId]/utils'
import {
useCreateTableRow,
useDeleteTableRow,
useDeleteTableRows,
useUpdateTableRow,
} from '@/hooks/queries/tables'
import { useTableUndoStore } from '@/stores/table/store'
import { cleanCellValue, formatValueForInput } from '../../utils'
const logger = createLogger('RowModal')
@@ -43,9 +39,13 @@ export interface RowModalProps {
function createInitialRowData(columns: ColumnDefinition[]): Record<string, unknown> {
const initial: Record<string, unknown> = {}
for (const col of columns) {
initial[col.name] = col.type === 'boolean' ? false : ''
}
columns.forEach((col) => {
if (col.type === 'boolean') {
initial[col.name] = false
} else {
initial[col.name] = ''
}
})
return initial
}
@@ -54,13 +54,16 @@ function cleanRowData(
rowData: Record<string, unknown>
): Record<string, unknown> {
const cleanData: Record<string, unknown> = {}
for (const col of columns) {
columns.forEach((col) => {
const value = rowData[col.name]
try {
cleanData[col.name] = cleanCellValue(rowData[col.name], col)
cleanData[col.name] = cleanCellValue(value, col)
} catch {
throw new Error(`Invalid JSON for field: ${col.name}`)
}
}
})
return cleanData
}
@@ -83,7 +86,8 @@ export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess
const workspaceId = params.workspaceId as string
const tableId = table.id
const columns = table.schema?.columns || []
const schema = table?.schema
const columns = schema?.columns || []
const [rowData, setRowData] = useState<Record<string, unknown>>(() =>
getInitialRowData(mode, columns, row)
@@ -93,7 +97,6 @@ export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess
const updateRowMutation = useUpdateTableRow({ workspaceId, tableId })
const deleteRowMutation = useDeleteTableRow({ workspaceId, tableId })
const deleteRowsMutation = useDeleteTableRows({ workspaceId, tableId })
const pushToUndoStack = useTableUndoStore((s) => s.push)
const isSubmitting =
createRowMutation.isPending ||
updateRowMutation.isPending ||
@@ -108,24 +111,9 @@ export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess
const cleanData = cleanRowData(columns, rowData)
if (mode === 'add') {
const response = await createRowMutation.mutateAsync({ data: cleanData })
const createdRow = (response as { data?: { row?: { id?: string; position?: number } } })
?.data?.row
if (createdRow?.id) {
pushToUndoStack(tableId, {
type: 'create-row',
rowId: createdRow.id,
position: createdRow.position ?? 0,
data: cleanData,
})
}
await createRowMutation.mutateAsync({ data: cleanData })
} else if (mode === 'edit' && row) {
const oldData = row.data as Record<string, unknown>
await updateRowMutation.mutateAsync({ rowId: row.id, data: cleanData })
pushToUndoStack(tableId, {
type: 'update-cells',
cells: [{ rowId: row.id, oldData, newData: cleanData }],
})
}
onSuccess()
@@ -141,14 +129,8 @@ export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess
const idsToDelete = rowIds ?? (row ? [row.id] : [])
try {
if (idsToDelete.length === 1 && row) {
if (idsToDelete.length === 1) {
await deleteRowMutation.mutateAsync(idsToDelete[0])
pushToUndoStack(tableId, {
type: 'delete-rows',
rows: [
{ rowId: row.id, data: row.data as Record<string, unknown>, position: row.position },
],
})
} else {
await deleteRowsMutation.mutateAsync(idsToDelete)
}

View File

@@ -1,2 +1 @@
export type { TableFilterHandle } from './table-filter'
export { TableFilter } from './table-filter'

View File

@@ -1,14 +1,6 @@
'use client'
import {
forwardRef,
memo,
useCallback,
useImperativeHandle,
useMemo,
useRef,
useState,
} from 'react'
import { memo, useCallback, useMemo, useRef, useState } from 'react'
import { X } from 'lucide-react'
import { nanoid } from 'nanoid'
import {
@@ -27,42 +19,22 @@ const OPERATOR_LABELS = Object.fromEntries(
COMPARISON_OPERATORS.map((op) => [op.value, op.label])
) as Record<string, string>
export interface TableFilterHandle {
addColumnRule: (columnName: string) => void
}
interface TableFilterProps {
columns: Array<{ name: string; type: string }>
filter: Filter | null
onApply: (filter: Filter | null) => void
onClose: () => void
initialColumn?: string | null
}
export const TableFilter = forwardRef<TableFilterHandle, TableFilterProps>(function TableFilter(
{ columns, filter, onApply, onClose, initialColumn },
ref
) {
export function TableFilter({ columns, filter, onApply, onClose }: TableFilterProps) {
const [rules, setRules] = useState<FilterRule[]>(() => {
const fromFilter = filterToRules(filter)
if (fromFilter.length > 0) return fromFilter
const rule = createRule(columns)
return [initialColumn ? { ...rule, column: initialColumn } : rule]
return fromFilter.length > 0 ? fromFilter : [createRule(columns)]
})
const rulesRef = useRef(rules)
rulesRef.current = rules
useImperativeHandle(
ref,
() => ({
addColumnRule: (columnName: string) => {
setRules((prev) => [...prev, { ...createRule(columns), column: columnName }])
},
}),
[columns]
)
const columnOptions = useMemo(
() => columns.map((col) => ({ value: col.name, label: col.name })),
[columns]
@@ -153,7 +125,7 @@ export const TableFilter = forwardRef<TableFilterHandle, TableFilterProps>(funct
</div>
</div>
)
})
}
interface FilterRuleRowProps {
rule: FilterRule

View File

@@ -24,15 +24,11 @@ import {
Skeleton,
} from '@/components/emcn'
import {
ArrowDown,
ArrowLeft,
ArrowRight,
ArrowUp,
Calendar as CalendarIcon,
ChevronDown,
Download,
Fingerprint,
ListFilter,
Pencil,
Plus,
Table as TableIcon,
@@ -49,26 +45,6 @@ import type { ColumnDefinition, Filter, SortDirection, TableRow as TableRowType
import type { ColumnOption, SortConfig } from '@/app/workspace/[workspaceId]/components'
import { ResourceHeader, ResourceOptionsBar } from '@/app/workspace/[workspaceId]/components'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { ContextMenu } from '@/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu'
import { RowModal } from '@/app/workspace/[workspaceId]/tables/[tableId]/components/row-modal'
import type { TableFilterHandle } from '@/app/workspace/[workspaceId]/tables/[tableId]/components/table-filter'
import { TableFilter } from '@/app/workspace/[workspaceId]/tables/[tableId]/components/table-filter'
import {
useContextMenu,
useExportTable,
useTableData,
} from '@/app/workspace/[workspaceId]/tables/[tableId]/hooks'
import type {
EditingCell,
QueryOptions,
SaveReason,
} from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
import {
cleanCellValue,
displayToStorage,
formatValueForInput,
storageToDisplay,
} from '@/app/workspace/[workspaceId]/tables/[tableId]/utils'
import {
useAddTableColumn,
useBatchCreateTableRows,
@@ -84,6 +60,17 @@ import {
import { useInlineRename } from '@/hooks/use-inline-rename'
import { extractCreatedRowId, useTableUndo } from '@/hooks/use-table-undo'
import type { DeletedRowSnapshot } from '@/stores/table/types'
import { useContextMenu, useTableData } from '../../hooks'
import type { EditingCell, QueryOptions, SaveReason } from '../../types'
import {
cleanCellValue,
displayToStorage,
formatValueForInput,
storageToDisplay,
} from '../../utils'
import { ContextMenu } from '../context-menu'
import { RowModal } from '../row-modal'
import { TableFilter } from '../table-filter'
interface CellCoord {
rowIndex: number
@@ -101,7 +88,6 @@ interface NormalizedSelection {
const EMPTY_COLUMNS: never[] = []
const EMPTY_CHECKED_ROWS = new Set<number>()
const clearCheckedRows = (prev: Set<number>) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS)
const COL_WIDTH = 160
const COL_WIDTH_MIN = 80
const CHECKBOX_COL_WIDTH = 40
@@ -210,7 +196,6 @@ export function Table({
const [initialCharacter, setInitialCharacter] = useState<string | null>(null)
const [selectionAnchor, setSelectionAnchor] = useState<CellCoord | null>(null)
const [selectionFocus, setSelectionFocus] = useState<CellCoord | null>(null)
const [isColumnSelection, setIsColumnSelection] = useState(false)
const [checkedRows, setCheckedRows] = useState(EMPTY_CHECKED_ROWS)
const lastCheckboxRowRef = useRef<number | null>(null)
const [showDeleteTableConfirm, setShowDeleteTableConfirm] = useState(false)
@@ -235,7 +220,6 @@ export function Table({
const metadataSeededRef = useRef(false)
const containerRef = useRef<HTMLDivElement>(null)
const scrollRef = useRef<HTMLDivElement>(null)
const tableFilterRef = useRef<TableFilterHandle>(null)
const isDraggingRef = useRef(false)
const { tableData, isLoadingTable, rows, isLoadingRows } = useTableData({
@@ -307,11 +291,10 @@ export function Table({
const positionMapRef = useRef(positionMap)
positionMapRef.current = positionMap
const normalizedSelection = useMemo(() => {
const raw = computeNormalizedSelection(selectionAnchor, selectionFocus)
if (!raw || !isColumnSelection) return raw
return { ...raw, startRow: 0, endRow: Math.max(maxPosition, 0) }
}, [selectionAnchor, selectionFocus, isColumnSelection, maxPosition])
const normalizedSelection = useMemo(
() => computeNormalizedSelection(selectionAnchor, selectionFocus),
[selectionAnchor, selectionFocus]
)
const displayColCount = isLoadingTable ? SKELETON_COL_COUNT : displayColumns.length
const tableWidth = useMemo(() => {
@@ -332,18 +315,7 @@ export function Table({
}, [resizingColumn, displayColumns, columnWidths])
const dropIndicatorLeft = useMemo(() => {
if (!dropTargetColumnName || !dragColumnName) return null
const dragIdx = displayColumns.findIndex((c) => c.name === dragColumnName)
const targetIdx = displayColumns.findIndex((c) => c.name === dropTargetColumnName)
if (dragIdx !== -1 && targetIdx !== -1) {
// Suppress when drop would be a no-op (same effective position)
if (targetIdx === dragIdx) return null
if (dropSide === 'right' && targetIdx === dragIdx - 1) return null
if (dropSide === 'left' && targetIdx === dragIdx + 1) return null
}
if (!dropTargetColumnName) return null
let left = CHECKBOX_COL_WIDTH
for (const col of displayColumns) {
if (dropSide === 'left' && col.name === dropTargetColumnName) return left
@@ -351,7 +323,7 @@ export function Table({
if (dropSide === 'right' && col.name === dropTargetColumnName) return left
}
return null
}, [dropTargetColumnName, dropSide, displayColumns, columnWidths, dragColumnName])
}, [dropTargetColumnName, dropSide, displayColumns, columnWidths])
const isAllRowsSelected = useMemo(() => {
if (checkedRows.size > 0 && rows.length > 0 && checkedRows.size >= rows.length) {
@@ -378,7 +350,6 @@ export function Table({
const rowsRef = useRef(rows)
const selectionAnchorRef = useRef(selectionAnchor)
const selectionFocusRef = useRef(selectionFocus)
const normalizedSelectionRef = useRef(normalizedSelection)
const checkedRowsRef = useRef(checkedRows)
checkedRowsRef.current = checkedRows
@@ -388,7 +359,6 @@ export function Table({
rowsRef.current = rows
selectionAnchorRef.current = selectionAnchor
selectionFocusRef.current = selectionFocus
normalizedSelectionRef.current = normalizedSelection
const deleteTableMutation = useDeleteTable(workspaceId)
const renameTableMutation = useRenameTable(workspaceId)
@@ -604,8 +574,7 @@ export function Table({
const handleCellMouseDown = useCallback(
(rowIndex: number, colIndex: number, shiftKey: boolean) => {
setCheckedRows(clearCheckedRows)
setIsColumnSelection(false)
setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS))
lastCheckboxRowRef.current = null
if (shiftKey && selectionAnchorRef.current) {
setSelectionFocus({ rowIndex, colIndex })
@@ -628,7 +597,6 @@ export function Table({
setEditingCell(null)
setSelectionAnchor(null)
setSelectionFocus(null)
setIsColumnSelection(false)
if (shiftKey && lastCheckboxRowRef.current !== null) {
const from = Math.min(lastCheckboxRowRef.current, rowIndex)
@@ -659,8 +627,7 @@ export function Table({
const handleClearSelection = useCallback(() => {
setSelectionAnchor(null)
setSelectionFocus(null)
setIsColumnSelection(false)
setCheckedRows(clearCheckedRows)
setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS))
lastCheckboxRowRef.current = null
}, [])
@@ -670,7 +637,6 @@ export function Table({
setEditingCell(null)
setSelectionAnchor(null)
setSelectionFocus(null)
setIsColumnSelection(false)
const all = new Set<number>()
for (const row of rws) {
all.add(row.position)
@@ -716,22 +682,21 @@ export function Table({
const target = dropTargetColumnNameRef.current
const side = dropSideRef.current
if (target && dragged !== target) {
const currentOrder = columnOrderRef.current ?? columnsRef.current.map((c) => c.name)
const newOrder = currentOrder.filter((n) => n !== dragged)
const targetIndex = newOrder.indexOf(target)
if (targetIndex === -1) {
setDragColumnName(null)
setDropTargetColumnName(null)
setDropSide('left')
return
const cols = columnsRef.current
const currentOrder = columnOrderRef.current ?? cols.map((c) => c.name)
const fromIndex = currentOrder.indexOf(dragged)
const toIndex = currentOrder.indexOf(target)
if (fromIndex !== -1 && toIndex !== -1) {
const newOrder = currentOrder.filter((n) => n !== dragged)
let insertIndex = newOrder.indexOf(target)
if (side === 'right') insertIndex += 1
newOrder.splice(insertIndex, 0, dragged)
setColumnOrder(newOrder)
updateMetadataRef.current({
columnWidths: columnWidthsRef.current,
columnOrder: newOrder,
})
}
const insertIndex = side === 'right' ? targetIndex + 1 : targetIndex
newOrder.splice(insertIndex, 0, dragged)
setColumnOrder(newOrder)
updateMetadataRef.current({
columnWidths: columnWidthsRef.current,
columnOrder: newOrder,
})
}
setDragColumnName(null)
setDropTargetColumnName(null)
@@ -817,9 +782,6 @@ export function Table({
const updateMetadataRef = useRef(updateMetadataMutation.mutate)
updateMetadataRef.current = updateMetadataMutation.mutate
const addColumnAsyncRef = useRef(addColumnMutation.mutateAsync)
addColumnAsyncRef.current = addColumnMutation.mutateAsync
const toggleBooleanCellRef = useRef(toggleBooleanCell)
toggleBooleanCellRef.current = toggleBooleanCell
@@ -832,21 +794,7 @@ export function Table({
const handleKeyDown = (e: KeyboardEvent) => {
const tag = (e.target as HTMLElement).tagName
if (tag === 'INPUT' || tag === 'TEXTAREA' || tag === 'SELECT') {
if (e.key === 'Escape') setIsColumnSelection(false)
return
}
if (e.key === 'Escape') {
e.preventDefault()
isDraggingRef.current = false
setSelectionAnchor(null)
setSelectionFocus(null)
setIsColumnSelection(false)
setCheckedRows(clearCheckedRows)
lastCheckboxRowRef.current = null
return
}
if (tag === 'INPUT' || tag === 'TEXTAREA' || tag === 'SELECT') return
if ((e.metaKey || e.ctrlKey) && (e.key === 'z' || e.key === 'y')) {
e.preventDefault()
@@ -858,6 +806,15 @@ export function Table({
return
}
if (e.key === 'Escape') {
e.preventDefault()
setSelectionAnchor(null)
setSelectionFocus(null)
setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS))
lastCheckboxRowRef.current = null
return
}
if ((e.metaKey || e.ctrlKey) && e.key === 'a') {
e.preventDefault()
const rws = rowsRef.current
@@ -865,7 +822,6 @@ export function Table({
setEditingCell(null)
setSelectionAnchor(null)
setSelectionFocus(null)
setIsColumnSelection(false)
const all = new Set<number>()
for (const row of rws) {
all.add(row.position)
@@ -879,7 +835,6 @@ export function Table({
const a = selectionAnchorRef.current
if (!a || editingCellRef.current) return
e.preventDefault()
setIsColumnSelection(false)
setSelectionFocus(null)
setCheckedRows((prev) => {
const next = new Set(prev)
@@ -932,7 +887,6 @@ export function Table({
const row = positionMapRef.current.get(anchor.rowIndex)
if (!row) return
e.preventDefault()
setIsColumnSelection(false)
const position = row.position + 1
const colIndex = anchor.colIndex
createRef.current(
@@ -954,12 +908,12 @@ export function Table({
if (e.key === 'Enter' || e.key === 'F2') {
if (!canEditRef.current) return
e.preventDefault()
setIsColumnSelection(false)
const col = cols[anchor.colIndex]
if (!col) return
const row = positionMapRef.current.get(anchor.rowIndex)
if (!row) return
if (col.type === 'boolean') {
toggleBooleanCellRef.current(row.id, col.name, row.data[col.name])
return
@@ -981,8 +935,7 @@ export function Table({
if (e.key === 'Tab') {
e.preventDefault()
setCheckedRows(clearCheckedRows)
setIsColumnSelection(false)
setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS))
lastCheckboxRowRef.current = null
setSelectionAnchor(moveCell(anchor, cols.length, totalRows, e.shiftKey ? -1 : 1))
setSelectionFocus(null)
@@ -991,8 +944,7 @@ export function Table({
if (['ArrowUp', 'ArrowDown', 'ArrowLeft', 'ArrowRight'].includes(e.key)) {
e.preventDefault()
setCheckedRows(clearCheckedRows)
setIsColumnSelection(false)
setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS))
lastCheckboxRowRef.current = null
const focus = selectionFocusRef.current ?? anchor
const origin = e.shiftKey ? focus : anchor
@@ -1027,7 +979,7 @@ export function Table({
if (e.key === 'Delete' || e.key === 'Backspace') {
if (!canEditRef.current) return
e.preventDefault()
const sel = normalizedSelectionRef.current
const sel = computeNormalizedSelection(anchor, selectionFocusRef.current)
if (!sel) return
const pMap = positionMapRef.current
const undoCells: Array<{ rowId: string; data: Record<string, unknown> }> = []
@@ -1059,7 +1011,6 @@ export function Table({
if (col.type === 'number' && !/[\d.-]/.test(e.key)) return
if (col.type === 'date' && !/[\d\-/]/.test(e.key)) return
e.preventDefault()
setIsColumnSelection(false)
const row = positionMapRef.current.get(anchor.rowIndex)
if (!row) return
@@ -1096,7 +1047,10 @@ export function Table({
return
}
const sel = normalizedSelectionRef.current
const anchor = selectionAnchorRef.current
if (!anchor) return
const sel = computeNormalizedSelection(anchor, selectionFocusRef.current)
if (!sel) return
e.preventDefault()
@@ -1152,7 +1106,10 @@ export function Table({
}
e.clipboardData?.setData('text/plain', lines.join('\n'))
} else {
const sel = normalizedSelectionRef.current
const anchor = selectionAnchorRef.current
if (!anchor) return
const sel = computeNormalizedSelection(anchor, selectionFocusRef.current)
if (!sel) return
e.preventDefault()
@@ -1188,7 +1145,7 @@ export function Table({
}
}
const handlePaste = async (e: ClipboardEvent) => {
const handlePaste = (e: ClipboardEvent) => {
const tag = (e.target as HTMLElement).tagName
if (tag === 'INPUT' || tag === 'TEXTAREA') return
if (!canEditRef.current) return
@@ -1207,48 +1164,8 @@ export function Table({
if (pasteRows.length === 0) return
let currentCols = columnsRef.current
const currentCols = columnsRef.current
const pMap = positionMapRef.current
const maxPasteCols = Math.max(...pasteRows.map((pr) => pr.length))
const neededExtraCols = Math.max(
0,
currentAnchor.colIndex + maxPasteCols - currentCols.length
)
if (neededExtraCols > 0) {
// Generate unique names for the new columns without colliding with each other
const existingNames = new Set(currentCols.map((c) => c.name.toLowerCase()))
const newColNames: string[] = []
for (let i = 0; i < neededExtraCols; i++) {
let name = 'untitled'
let n = 2
while (existingNames.has(name.toLowerCase())) {
name = `untitled_${n}`
n++
}
existingNames.add(name.toLowerCase())
newColNames.push(name)
}
// Create columns sequentially so each invalidation completes before the next
const createdColNames: string[] = []
try {
for (const name of newColNames) {
await addColumnAsyncRef.current({ name, type: 'string' })
createdColNames.push(name)
}
} catch {
// If column creation fails partway, paste into whatever columns were created
}
// Build updated column list locally — React Query cache may not have refreshed yet
if (createdColNames.length > 0) {
currentCols = [
...currentCols,
...createdColNames.map((name) => ({ name, type: 'string' as const })),
]
}
}
const undoCells: Array<{ rowId: string; data: Record<string, unknown> }> = []
const updateBatch: Array<{ rowId: string; data: Record<string, unknown> }> = []
@@ -1328,6 +1245,7 @@ export function Table({
)
}
const maxPasteCols = Math.max(...pasteRows.map((pr) => pr.length))
setSelectionFocus({
rowIndex: currentAnchor.rowIndex + pasteRows.length - 1,
colIndex: Math.min(currentAnchor.colIndex + maxPasteCols - 1, currentCols.length - 1),
@@ -1403,10 +1321,10 @@ export function Table({
}, [])
const generateColumnName = useCallback(() => {
const existing = new Set(schemaColumnsRef.current.map((c) => c.name.toLowerCase()))
const existing = schemaColumnsRef.current.map((c) => c.name.toLowerCase())
let name = 'untitled'
let i = 2
while (existing.has(name)) {
while (existing.includes(name.toLowerCase())) {
name = `untitled_${i}`
i++
}
@@ -1511,10 +1429,7 @@ export function Table({
}, [])
const handleRenameColumn = useCallback(
(name: string) => {
isDraggingRef.current = false
columnRename.startRename(name, name)
},
(name: string) => columnRename.startRename(name, name),
[columnRename.startRename]
)
@@ -1525,22 +1440,10 @@ export function Table({
const handleDeleteColumnConfirm = useCallback(() => {
if (!deletingColumn) return
const columnToDelete = deletingColumn
const column = schemaColumnsRef.current.find((c) => c.name === columnToDelete)
const position = schemaColumnsRef.current.findIndex((c) => c.name === columnToDelete)
const orderAtDelete = columnOrderRef.current
setDeletingColumn(null)
deleteColumnMutation.mutate(columnToDelete, {
onSuccess: () => {
if (column && position !== -1) {
pushUndoRef.current({
type: 'delete-column',
columnName: columnToDelete,
columnType: column.type,
position,
unique: !!column.unique,
required: !!column.required,
})
}
if (!orderAtDelete) return
const newOrder = orderAtDelete.filter((n) => n !== columnToDelete)
setColumnOrder(newOrder)
@@ -1565,28 +1468,13 @@ export function Table({
}, [])
const [filterOpen, setFilterOpen] = useState(false)
const [initialFilterColumn, setInitialFilterColumn] = useState<string | null>(null)
const handleFilterToggle = useCallback(() => {
setInitialFilterColumn(null)
setFilterOpen((prev) => !prev)
}, [])
const handleFilterClose = useCallback(() => {
setFilterOpen(false)
setInitialFilterColumn(null)
}, [])
const filterOpenRef = useRef(filterOpen)
filterOpenRef.current = filterOpen
const handleFilterByColumn = useCallback((columnName: string) => {
if (filterOpenRef.current && tableFilterRef.current) {
tableFilterRef.current.addColumnRule(columnName)
} else {
setInitialFilterColumn(columnName)
setFilterOpen(true)
}
}, [])
const columnOptions = useMemo<ColumnOption[]>(
@@ -1667,27 +1555,6 @@ export function Table({
[handleAddColumn, addColumnMutation.isPending]
)
const { handleExportTable, isExporting } = useExportTable({
workspaceId,
tableId,
tableName: tableData?.name,
columns: displayColumns,
queryOptions,
canExport: userPermissions.canEdit,
})
const headerActions = useMemo(
() => [
{
label: isExporting ? 'Exporting...' : 'Export CSV',
icon: Download,
onClick: () => void handleExportTable(),
disabled: !userPermissions.canEdit || !hasTableData || isLoadingTable || isExporting,
},
],
[handleExportTable, hasTableData, isExporting, isLoadingTable, userPermissions.canEdit]
)
const activeSortState = useMemo(() => {
if (!queryOptions.sort) return null
const entries = Object.entries(queryOptions.sort)
@@ -1696,32 +1563,6 @@ export function Table({
return { column, direction }
}, [queryOptions.sort])
const selectedColumnRange = useMemo(() => {
if (!isColumnSelection || !normalizedSelection) return null
return { start: normalizedSelection.startCol, end: normalizedSelection.endCol }
}, [isColumnSelection, normalizedSelection])
const draggingColIndex = useMemo(
() => (dragColumnName ? displayColumns.findIndex((c) => c.name === dragColumnName) : null),
[dragColumnName, displayColumns]
)
const handleColumnSelect = useCallback((colIndex: number) => {
setSelectionAnchor({ rowIndex: 0, colIndex })
setSelectionFocus({ rowIndex: 0, colIndex })
setIsColumnSelection(true)
}, [])
const handleSortAsc = useCallback(
(columnName: string) => handleSortChange(columnName, 'asc'),
[handleSortChange]
)
const handleSortDesc = useCallback(
(columnName: string) => handleSortChange(columnName, 'desc'),
[handleSortChange]
)
const sortConfig = useMemo<SortConfig>(
() => ({
options: columnOptions,
@@ -1778,12 +1619,7 @@ export function Table({
<div ref={containerRef} className='flex h-full flex-col overflow-hidden'>
{!embedded && (
<>
<ResourceHeader
icon={TableIcon}
breadcrumbs={breadcrumbs}
actions={headerActions}
create={createAction}
/>
<ResourceHeader icon={TableIcon} breadcrumbs={breadcrumbs} create={createAction} />
<ResourceOptionsBar
sort={sortConfig}
@@ -1792,12 +1628,10 @@ export function Table({
/>
{filterOpen && (
<TableFilter
ref={tableFilterRef}
columns={displayColumns}
filter={queryOptions.filter}
onApply={handleFilterApply}
onClose={handleFilterClose}
initialColumn={initialFilterColumn}
/>
)}
</>
@@ -1857,11 +1691,10 @@ export function Table({
checked={isAllRowsSelected}
onCheckedChange={handleSelectAllToggle}
/>
{displayColumns.map((column, colIndex) => (
{displayColumns.map((column) => (
<ColumnHeaderMenu
key={column.name}
column={column}
colIndex={colIndex}
readOnly={!userPermissions.canEdit}
isRenaming={columnRename.editingId === column.name}
renameValue={
@@ -1880,20 +1713,10 @@ export function Table({
onResize={handleColumnResize}
onResizeEnd={handleColumnResizeEnd}
isDragging={dragColumnName === column.name}
isDropTarget={
dropTargetColumnName === column.name && dropIndicatorLeft !== null
}
onDragStart={handleColumnDragStart}
onDragOver={handleColumnDragOver}
onDragEnd={handleColumnDragEnd}
onDragLeave={handleColumnDragLeave}
sortDirection={
activeSortState?.column === column.name ? activeSortState.direction : null
}
onSortAsc={handleSortAsc}
onSortDesc={handleSortDesc}
onFilterColumn={handleFilterByColumn}
onColumnSelect={handleColumnSelect}
/>
))}
{userPermissions.canEdit && (
@@ -1921,7 +1744,6 @@ export function Table({
startPosition={prevPosition + 1}
columns={displayColumns}
normalizedSelection={normalizedSelection}
draggingColIndex={draggingColIndex}
checkedRows={checkedRows}
firstRowUnderHeader={prevPosition === -1}
onCellMouseDown={handleCellMouseDown}
@@ -1944,7 +1766,6 @@ export function Table({
: null
}
normalizedSelection={normalizedSelection}
draggingColIndex={draggingColIndex}
onClick={handleCellClick}
onDoubleClick={handleCellDoubleClick}
onSave={handleInlineSave}
@@ -2096,7 +1917,6 @@ interface PositionGapRowsProps {
startPosition: number
columns: ColumnDefinition[]
normalizedSelection: NormalizedSelection | null
draggingColIndex: number | null
checkedRows: Set<number>
firstRowUnderHeader?: boolean
onCellMouseDown: (rowIndex: number, colIndex: number, shiftKey: boolean) => void
@@ -2110,7 +1930,6 @@ const PositionGapRows = React.memo(
startPosition,
columns,
normalizedSelection,
draggingColIndex,
checkedRows,
firstRowUnderHeader = false,
onCellMouseDown,
@@ -2176,11 +1995,7 @@ const PositionGapRows = React.memo(
key={col.name}
data-row={position}
data-col={colIndex}
className={cn(
CELL,
(isHighlighted || isAnchor) && 'relative',
draggingColIndex === colIndex && 'opacity-40'
)}
className={cn(CELL, (isHighlighted || isAnchor) && 'relative')}
onMouseDown={(e) => {
if (e.button !== 0) return
onCellMouseDown(position, colIndex, e.shiftKey)
@@ -2225,7 +2040,6 @@ const PositionGapRows = React.memo(
prev.startPosition !== next.startPosition ||
prev.columns !== next.columns ||
prev.normalizedSelection !== next.normalizedSelection ||
prev.draggingColIndex !== next.draggingColIndex ||
prev.firstRowUnderHeader !== next.firstRowUnderHeader ||
prev.onCellMouseDown !== next.onCellMouseDown ||
prev.onCellMouseEnter !== next.onCellMouseEnter ||
@@ -2268,7 +2082,6 @@ interface DataRowProps {
initialCharacter: string | null
pendingCellValue: Record<string, unknown> | null
normalizedSelection: NormalizedSelection | null
draggingColIndex: number | null
onClick: (rowId: string, columnName: string) => void
onDoubleClick: (rowId: string, columnName: string) => void
onSave: (rowId: string, columnName: string, value: unknown, reason: SaveReason) => void
@@ -2319,7 +2132,6 @@ function dataRowPropsAreEqual(prev: DataRowProps, next: DataRowProps): boolean {
prev.isFirstRow !== next.isFirstRow ||
prev.editingColumnName !== next.editingColumnName ||
prev.pendingCellValue !== next.pendingCellValue ||
prev.draggingColIndex !== next.draggingColIndex ||
prev.onClick !== next.onClick ||
prev.onDoubleClick !== next.onDoubleClick ||
prev.onSave !== next.onSave ||
@@ -2356,7 +2168,6 @@ const DataRow = React.memo(function DataRow({
initialCharacter,
pendingCellValue,
normalizedSelection,
draggingColIndex,
isRowChecked,
onClick,
onDoubleClick,
@@ -2424,11 +2235,7 @@ const DataRow = React.memo(function DataRow({
key={column.name}
data-row={rowIndex}
data-col={colIndex}
className={cn(
CELL,
(isHighlighted || isAnchor || isEditing) && 'relative',
draggingColIndex === colIndex && 'opacity-40'
)}
className={cn(CELL, (isHighlighted || isAnchor || isEditing) && 'relative')}
onMouseDown={(e) => {
if (e.button !== 0 || isEditing) return
onCellMouseDown(rowIndex, colIndex, e.shiftKey)
@@ -2798,7 +2605,6 @@ const COLUMN_TYPE_OPTIONS: { type: string; label: string; icon: React.ElementTyp
const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
column,
colIndex,
readOnly,
isRenaming,
renameValue,
@@ -2815,19 +2621,12 @@ const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
onResize,
onResizeEnd,
isDragging,
isDropTarget,
onDragStart,
onDragOver,
onDragEnd,
onDragLeave,
sortDirection,
onSortAsc,
onSortDesc,
onFilterColumn,
onColumnSelect,
}: {
column: ColumnDefinition
colIndex: number
readOnly?: boolean
isRenaming: boolean
renameValue: string
@@ -2844,16 +2643,10 @@ const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
onResize: (columnName: string, width: number) => void
onResizeEnd: () => void
isDragging?: boolean
isDropTarget?: boolean
onDragStart?: (columnName: string) => void
onDragOver?: (columnName: string, side: 'left' | 'right') => void
onDragEnd?: () => void
onDragLeave?: () => void
sortDirection?: SortDirection | null
onSortAsc?: (columnName: string) => void
onSortDesc?: (columnName: string) => void
onFilterColumn?: (columnName: string) => void
onColumnSelect?: (colIndex: number) => void
}) {
const renameInputRef = useRef<HTMLInputElement>(null)
@@ -2942,8 +2735,7 @@ const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
<th
className={cn(
'group relative border-[var(--border)] border-r border-b bg-[var(--bg)] p-0 text-left align-middle',
isDragging && 'opacity-40',
isDropTarget && 'bg-[var(--selection)]/10'
isDragging && 'opacity-40'
)}
onDragOver={handleDragOver}
onDrop={handleDrop}
@@ -2968,7 +2760,7 @@ const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
) : readOnly ? (
<div className='flex h-full w-full min-w-0 items-center px-2 py-[7px]'>
<ColumnTypeIcon type={column.type} />
<span className='ml-1.5 min-w-0 overflow-clip text-ellipsis whitespace-nowrap font-medium text-[var(--text-primary)] text-small'>
<span className='ml-1.5 min-w-0 overflow-clip text-ellipsis whitespace-nowrap font-medium text-[13px] text-[var(--text-primary)]'>
{column.name}
</span>
</div>
@@ -2979,34 +2771,15 @@ const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
<button
type='button'
className='flex min-w-0 flex-1 cursor-pointer items-center px-2 py-[7px] outline-none'
onClick={() => onColumnSelect?.(colIndex)}
>
<ColumnTypeIcon type={column.type} />
<span className='ml-1.5 min-w-0 overflow-clip text-ellipsis whitespace-nowrap font-medium text-[var(--text-primary)] text-small'>
{column.name}
</span>
{sortDirection && (
<span className='ml-1 shrink-0'>
<SortDirectionIndicator direction={sortDirection} />
</span>
)}
<ChevronDown className='ml-1.5 h-[7px] w-[9px] shrink-0 text-[var(--text-muted)]' />
</button>
</DropdownMenuTrigger>
<DropdownMenuContent align='start'>
<DropdownMenuItem onSelect={() => onSortAsc?.(column.name)}>
<ArrowUp />
Sort ascending
</DropdownMenuItem>
<DropdownMenuItem onSelect={() => onSortDesc?.(column.name)}>
<ArrowDown />
Sort descending
</DropdownMenuItem>
<DropdownMenuItem onSelect={() => onFilterColumn?.(column.name)}>
<ListFilter />
Filter by this column
</DropdownMenuItem>
<DropdownMenuSeparator />
<DropdownMenuItem onSelect={() => onRenameColumn(column.name)}>
<Pencil />
Rename column
@@ -3127,11 +2900,3 @@ function ColumnTypeIcon({ type }: { type: string }) {
const Icon = COLUMN_TYPE_ICONS[type] ?? TypeText
return <Icon className='h-3 w-3 shrink-0 text-[var(--text-icon)]' />
}
function SortDirectionIndicator({ direction }: { direction: SortDirection }) {
return direction === 'asc' ? (
<ArrowUp className='h-[10px] w-[10px] text-[var(--text-muted)]' />
) : (
<ArrowDown className='h-[10px] w-[10px] text-[var(--text-muted)]' />
)
}

View File

@@ -1,39 +0,0 @@
import { createTableColumn, createTableRow } from '@sim/testing'
import { describe, expect, it } from 'vitest'
import { buildTableCsv, formatTableExportValue } from './export'
describe('table export utils', () => {
it('formats exported values using table display conventions', () => {
expect(formatTableExportValue('2026-04-03', { name: 'date', type: 'date' })).toBe('04/03/2026')
expect(formatTableExportValue({ nested: true }, { name: 'payload', type: 'json' })).toBe(
'{"nested":true}'
)
expect(formatTableExportValue(null, { name: 'empty', type: 'string' })).toBe('')
})
it('builds CSV using visible columns and escaped values', () => {
const columns = [
createTableColumn({ name: 'name', type: 'string' }),
createTableColumn({ name: 'date', type: 'date' }),
createTableColumn({ name: 'notes', type: 'json' }),
]
const rows = [
createTableRow({
id: 'row_1',
position: 0,
createdAt: '2026-04-03T00:00:00.000Z',
updatedAt: '2026-04-03T00:00:00.000Z',
data: {
name: 'Ada "Lovelace"',
date: '2026-04-03',
notes: { text: 'line 1\nline 2' },
},
}),
]
expect(buildTableCsv(columns, rows)).toBe(
'name,date,notes\r\n"Ada ""Lovelace""",04/03/2026,"{""text"":""line 1\\nline 2""}"'
)
})
})

View File

@@ -1,38 +0,0 @@
import type { ColumnDefinition, TableRow } from '@/lib/table'
import { storageToDisplay } from './utils'
function safeJsonStringify(value: unknown): string {
try {
return JSON.stringify(value)
} catch {
return String(value)
}
}
export function formatTableExportValue(value: unknown, column: ColumnDefinition): string {
if (value === null || value === undefined) return ''
switch (column.type) {
case 'date':
return storageToDisplay(String(value))
case 'json':
return typeof value === 'string' ? value : safeJsonStringify(value)
default:
return String(value)
}
}
export function escapeCsvCell(value: string): string {
return /[",\n\r]/.test(value) ? `"${value.replace(/"/g, '""')}"` : value
}
export function buildTableCsv(columns: ColumnDefinition[], rows: TableRow[]): string {
const headerRow = columns.map((column) => escapeCsvCell(column.name)).join(',')
const dataRows = rows.map((row) =>
columns
.map((column) => escapeCsvCell(formatTableExportValue(row.data[column.name], column)))
.join(',')
)
return [headerRow, ...dataRows].join('\r\n')
}

View File

@@ -1,3 +1,2 @@
export * from './use-context-menu'
export * from './use-export-table'
export * from './use-table-data'

View File

@@ -1,6 +1,6 @@
import { useCallback, useState } from 'react'
import type { TableRow } from '@/lib/table'
import type { ContextMenuState } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
import type { ContextMenuState } from '../types'
interface UseContextMenuReturn {
contextMenu: ContextMenuState

View File

@@ -1,84 +0,0 @@
'use client'
import { useCallback, useRef, useState } from 'react'
import { usePostHog } from 'posthog-js/react'
import { toast } from '@/components/emcn'
import { downloadFile, sanitizePathSegment } from '@/lib/core/utils/file-download'
import { captureEvent } from '@/lib/posthog/client'
import type { ColumnDefinition } from '@/lib/table'
import { buildTableCsv } from '@/app/workspace/[workspaceId]/tables/[tableId]/export'
import type { QueryOptions } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
import { fetchAllTableRows } from '@/hooks/queries/tables'
interface UseExportTableParams {
workspaceId: string
tableId: string
tableName?: string | null
columns: ColumnDefinition[]
queryOptions: QueryOptions
canExport: boolean
}
export function useExportTable({
workspaceId,
tableId,
tableName,
columns,
queryOptions,
canExport,
}: UseExportTableParams) {
const posthog = usePostHog()
const [isExporting, setIsExporting] = useState(false)
const isExportingRef = useRef(false)
const handleExportTable = useCallback(async () => {
if (!canExport || !workspaceId || !tableId || isExportingRef.current) return
isExportingRef.current = true
setIsExporting(true)
try {
const { rows } = await fetchAllTableRows({
workspaceId,
tableId,
filter: queryOptions.filter,
sort: queryOptions.sort,
})
const filename = `${sanitizePathSegment(tableName?.trim() || 'table')}.csv`
const csvContent = buildTableCsv(columns, rows)
downloadFile(csvContent, filename, 'text/csv;charset=utf-8;')
captureEvent(posthog, 'table_exported', {
workspace_id: workspaceId,
table_id: tableId,
row_count: rows.length,
column_count: columns.length,
has_filter: Boolean(queryOptions.filter),
has_sort: Boolean(queryOptions.sort),
})
} catch (error) {
toast.error(error instanceof Error ? error.message : 'Failed to export table', {
duration: 5000,
})
} finally {
isExportingRef.current = false
setIsExporting(false)
}
}, [
canExport,
columns,
posthog,
queryOptions.filter,
queryOptions.sort,
tableId,
tableName,
workspaceId,
])
return {
isExporting,
handleExportTable,
}
}

View File

@@ -1,7 +1,6 @@
import type { TableDefinition, TableRow } from '@/lib/table'
import { TABLE_LIMITS } from '@/lib/table/constants'
import type { QueryOptions } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
import { useTable, useTableRows } from '@/hooks/queries/tables'
import type { QueryOptions } from '../types'
interface UseTableDataParams {
workspaceId: string
@@ -31,7 +30,7 @@ export function useTableData({
} = useTableRows({
workspaceId,
tableId,
limit: TABLE_LIMITS.MAX_QUERY_LIMIT,
limit: 1000,
offset: 0,
filter: queryOptions.filter,
sort: queryOptions.sort,

View File

@@ -68,8 +68,9 @@ export function Tables() {
const { data: tables = [], isLoading, error } = useTablesList(workspaceId)
const { data: members } = useWorkspaceMembersQuery(workspaceId)
if (error) logger.error('Failed to load tables:', error)
if (error) {
logger.error('Failed to load tables:', error)
}
const deleteTable = useDeleteTable(workspaceId)
const createTable = useCreateTable(workspaceId)
const uploadCsv = useUploadCsvToTable()

View File

@@ -3,13 +3,11 @@
import { useCallback, useDeferredValue, useEffect, useMemo, useRef, useState } from 'react'
import { Command } from 'cmdk'
import { useParams, useRouter } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { createPortal } from 'react-dom'
import { Library } from '@/components/emcn'
import { Calendar, Database, File, HelpCircle, Settings, Table } from '@/components/emcn/icons'
import { Search } from '@/components/emcn/icons/search'
import { cn } from '@/lib/core/utils/cn'
import { captureEvent } from '@/lib/posthog/client'
import { hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
import { SIDEBAR_SCROLL_EVENT } from '@/app/workspace/[workspaceId]/w/components/sidebar/sidebar'
import { usePermissionConfig } from '@/hooks/use-permission-config'
@@ -57,14 +55,11 @@ export function SearchModal({
const [mounted, setMounted] = useState(false)
const { navigateToSettings } = useSettingsNavigation()
const { config: permissionConfig } = usePermissionConfig()
const posthog = usePostHog()
const routerRef = useRef(router)
routerRef.current = router
const onOpenChangeRef = useRef(onOpenChange)
onOpenChangeRef.current = onOpenChange
const posthogRef = useRef(posthog)
posthogRef.current = posthog
useEffect(() => {
setMounted(true)
@@ -159,8 +154,6 @@ export function SearchModal({
}, [open])
const deferredSearch = useDeferredValue(search)
const deferredSearchRef = useRef(deferredSearch)
deferredSearchRef.current = deferredSearch
const handleSearchChange = useCallback((value: string) => {
setSearch(value)
@@ -195,151 +188,59 @@ export function SearchModal({
detail: { type: block.type, enableTriggerMode },
})
)
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: type,
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
[]
)
const handleToolOperationSelect = useCallback(
(op: SearchToolOperationItem) => {
const handleToolOperationSelect = useCallback((op: SearchToolOperationItem) => {
window.dispatchEvent(
new CustomEvent('add-block-from-toolbar', {
detail: { type: op.blockType, presetOperation: op.operationId },
})
)
onOpenChangeRef.current(false)
}, [])
const handleWorkflowSelect = useCallback((workflow: WorkflowItem) => {
if (!workflow.isCurrent && workflow.href) {
routerRef.current.push(workflow.href)
window.dispatchEvent(
new CustomEvent('add-block-from-toolbar', {
detail: { type: op.blockType, presetOperation: op.operationId },
})
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: workflow.id } })
)
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'tool_operation',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
}
onOpenChangeRef.current(false)
}, [])
const handleWorkflowSelect = useCallback(
(workflow: WorkflowItem) => {
if (!workflow.isCurrent && workflow.href) {
routerRef.current.push(workflow.href)
window.dispatchEvent(
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: workflow.id } })
)
const handleWorkspaceSelect = useCallback((workspace: WorkspaceItem) => {
if (!workspace.isCurrent && workspace.href) {
routerRef.current.push(workspace.href)
}
onOpenChangeRef.current(false)
}, [])
const handleTaskSelect = useCallback((task: TaskItem) => {
routerRef.current.push(task.href)
onOpenChangeRef.current(false)
}, [])
const handlePageSelect = useCallback((page: PageItem) => {
if (page.onClick) {
page.onClick()
} else if (page.href) {
if (page.href.startsWith('http')) {
window.open(page.href, '_blank', 'noopener,noreferrer')
} else {
routerRef.current.push(page.href)
}
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'workflow',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
}
onOpenChangeRef.current(false)
}, [])
const handleWorkspaceSelect = useCallback(
(workspace: WorkspaceItem) => {
if (!workspace.isCurrent && workspace.href) {
routerRef.current.push(workspace.href)
}
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'workspace',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleTaskSelect = useCallback(
(task: TaskItem) => {
routerRef.current.push(task.href)
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'task',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleTableSelect = useCallback(
(item: TaskItem) => {
routerRef.current.push(item.href)
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'table',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleFileSelect = useCallback(
(item: TaskItem) => {
routerRef.current.push(item.href)
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'file',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleKbSelect = useCallback(
(item: TaskItem) => {
routerRef.current.push(item.href)
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'knowledge_base',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handlePageSelect = useCallback(
(page: PageItem) => {
if (page.onClick) {
page.onClick()
} else if (page.href) {
if (page.href.startsWith('http')) {
window.open(page.href, '_blank', 'noopener,noreferrer')
} else {
routerRef.current.push(page.href)
}
}
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'page',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleDocSelect = useCallback(
(doc: SearchDocItem) => {
window.open(doc.href, '_blank', 'noopener,noreferrer')
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'docs',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleDocSelect = useCallback((doc: SearchDocItem) => {
window.open(doc.href, '_blank', 'noopener,noreferrer')
onOpenChangeRef.current(false)
}, [])
const handleBlockSelectAsBlock = useCallback(
(block: SearchBlockItem) => handleBlockSelect(block, 'block'),
@@ -469,9 +370,9 @@ export function SearchModal({
<TriggersGroup items={filteredTriggers} onSelect={handleBlockSelectAsTrigger} />
<WorkflowsGroup items={filteredWorkflows} onSelect={handleWorkflowSelect} />
<TasksGroup items={filteredTasks} onSelect={handleTaskSelect} />
<TablesGroup items={filteredTables} onSelect={handleTableSelect} />
<FilesGroup items={filteredFiles} onSelect={handleFileSelect} />
<KnowledgeBasesGroup items={filteredKnowledgeBases} onSelect={handleKbSelect} />
<TablesGroup items={filteredTables} onSelect={handleTaskSelect} />
<FilesGroup items={filteredFiles} onSelect={handleTaskSelect} />
<KnowledgeBasesGroup items={filteredKnowledgeBases} onSelect={handleTaskSelect} />
<ToolOpsGroup items={filteredToolOps} onSelect={handleToolOperationSelect} />
<WorkspacesGroup items={filteredWorkspaces} onSelect={handleWorkspaceSelect} />
<DocsGroup items={filteredDocs} onSelect={handleDocSelect} />

View File

@@ -1,12 +1,13 @@
import { useCallback, useMemo, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useParams } from 'next/navigation'
import { downloadFile, sanitizePathSegment } from '@/lib/core/utils/file-download'
import { getFolderById } from '@/lib/folders/tree'
import {
downloadFile,
exportFolderToZip,
type FolderExportData,
fetchWorkflowForExport,
sanitizePathSegment,
type WorkflowExportData,
} from '@/lib/workflows/operations/import-export'
import { useFolderMap } from '@/hooks/queries/folders'

View File

@@ -1,8 +1,8 @@
import { useCallback, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useParams } from 'next/navigation'
import { downloadFile } from '@/lib/core/utils/file-download'
import {
downloadFile,
exportWorkflowsToZip,
type FolderExportData,
fetchWorkflowForExport,

View File

@@ -1,13 +1,12 @@
import { useCallback, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useParams } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { downloadFile, sanitizePathSegment } from '@/lib/core/utils/file-download'
import { captureEvent } from '@/lib/posthog/client'
import {
downloadFile,
exportWorkflowsToZip,
exportWorkflowToJson,
fetchWorkflowForExport,
sanitizePathSegment,
} from '@/lib/workflows/operations/import-export'
import { getWorkflows } from '@/hooks/queries/utils/workflow-cache'
import { useFolderStore } from '@/stores/folders/store'
@@ -28,7 +27,6 @@ export function useExportWorkflow({ onSuccess }: UseExportWorkflowProps = {}) {
const [isExporting, setIsExporting] = useState(false)
const params = useParams()
const workspaceId = params.workspaceId as string | undefined
const posthog = usePostHog()
const onSuccessRef = useRef(onSuccess)
onSuccessRef.current = onSuccess
@@ -36,9 +34,6 @@ export function useExportWorkflow({ onSuccess }: UseExportWorkflowProps = {}) {
const workspaceIdRef = useRef(workspaceId)
workspaceIdRef.current = workspaceId
const posthogRef = useRef(posthog)
posthogRef.current = posthog
/**
* Export the workflow(s) to JSON or ZIP
* - Single workflow: exports as JSON file
@@ -105,12 +100,6 @@ export function useExportWorkflow({ onSuccess }: UseExportWorkflowProps = {}) {
const { clearSelection } = useFolderStore.getState()
clearSelection()
captureEvent(posthogRef.current, 'workflow_exported', {
workspace_id: workspaceIdRef.current ?? '',
workflow_count: exportedWorkflows.length,
format: exportedWorkflows.length === 1 ? 'json' : 'zip',
})
logger.info('Workflow(s) exported successfully', {
workflowIds: workflowIdsToExport,
count: exportedWorkflows.length,

View File

@@ -1,10 +1,11 @@
import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger'
import { downloadFile, sanitizePathSegment } from '@/lib/core/utils/file-download'
import {
downloadFile,
exportWorkspaceToZip,
type FolderExportData,
fetchWorkflowForExport,
sanitizePathSegment,
type WorkflowExportData,
} from '@/lib/workflows/operations/import-export'

View File

@@ -1,14 +1,12 @@
import { useCallback, useRef, useState } from 'react'
import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import { useRouter } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import { captureEvent } from '@/lib/posthog/client'
import {
extractWorkflowsFromFiles,
extractWorkflowsFromZip,
persistImportedWorkflow,
sanitizePathSegment,
} from '@/lib/workflows/operations/import-export'
import { useCreateFolder } from '@/hooks/queries/folders'
import { folderKeys } from '@/hooks/queries/utils/folder-keys'
@@ -38,9 +36,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
const queryClient = useQueryClient()
const createFolderMutation = useCreateFolder()
const clearDiff = useWorkflowDiffStore((state) => state.clearDiff)
const posthog = usePostHog()
const posthogRef = useRef(posthog)
posthogRef.current = posthog
const [isImporting, setIsImporting] = useState(false)
/**
@@ -209,11 +204,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
logger.info(`Import complete. Imported ${importedWorkflowIds.length} workflow(s)`)
if (importedWorkflowIds.length > 0) {
captureEvent(posthogRef.current, 'workflow_imported', {
workspace_id: workspaceId,
workflow_count: importedWorkflowIds.length,
format: hasZip && fileArray.length === 1 ? 'zip' : 'json',
})
router.push(
`/workspace/${workspaceId}/w/${importedWorkflowIds[importedWorkflowIds.length - 1]}`
)

View File

@@ -1,11 +1,11 @@
import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useRouter } from 'next/navigation'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import {
extractWorkflowName,
extractWorkflowsFromZip,
parseWorkflowJson,
sanitizePathSegment,
} from '@/lib/workflows/operations/import-export'
import { useCreateFolder } from '@/hooks/queries/folders'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'

View File

@@ -51,13 +51,6 @@ import { Button } from '../button/button'
const ANIMATION_CLASSES =
'data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:animate-out data-[state=open]:animate-in motion-reduce:animate-none'
/**
* Modal content animation classes.
* We keep only the slide animations (no zoom) to stabilize positioning while avoiding scale effects.
*/
const CONTENT_ANIMATION_CLASSES =
'data-[state=closed]:slide-out-to-top-[50%] data-[state=open]:slide-in-from-top-[50%] motion-reduce:animate-none'
/**
* Root modal component. Manages open state.
*/
@@ -166,7 +159,8 @@ const ModalContent = React.forwardRef<
)}
style={{
left: isWorkflowPage
? 'calc(50% + (var(--sidebar-width) - var(--panel-width)) / 2)'
? // --panel-width is always the rendered panel width on /w/ routes (panel is never hidden/collapsed)
'calc(50% + (var(--sidebar-width) - var(--panel-width)) / 2)'
: 'calc(var(--sidebar-width) / 2 + 50%)',
...style,
}}

View File

@@ -119,7 +119,7 @@ const ISSUE_FIELDS = `
`
const ISSUE_BY_ID_QUERY = `
query GetIssue($id: ID!) {
query GetIssue($id: String!) {
issue(id: $id) {
${ISSUE_FIELDS}
}
@@ -147,13 +147,13 @@ function buildIssuesQuery(sourceConfig: Record<string, unknown>): {
const variables: Record<string, unknown> = {}
if (teamId) {
varDefs.push('$teamId: ID!')
varDefs.push('$teamId: String!')
filterClauses.push('team: { id: { eq: $teamId } }')
variables.teamId = teamId
}
if (projectId) {
varDefs.push('$projectId: ID!')
varDefs.push('$projectId: String!')
filterClauses.push('project: { id: { eq: $projectId } }')
variables.projectId = projectId
}

View File

@@ -6,7 +6,6 @@ import { createLogger } from '@sim/logger'
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { toast } from '@/components/emcn'
import type { Filter, RowData, Sort, TableDefinition, TableMetadata, TableRow } from '@/lib/table'
import { TABLE_LIMITS } from '@/lib/table/constants'
const logger = createLogger('TableQueries')
@@ -24,7 +23,7 @@ export const tableKeys = {
[...tableKeys.rowsRoot(tableId), paramsKey] as const,
}
export interface TableRowsParams {
interface TableRowsParams {
workspaceId: string
tableId: string
limit: number
@@ -33,7 +32,7 @@ export interface TableRowsParams {
sort?: Sort | null
}
export interface TableRowsResponse {
interface TableRowsResponse {
rows: TableRow[]
totalCount: number
}
@@ -84,7 +83,7 @@ async function fetchTable(
return (data as { table: TableDefinition }).table
}
export async function fetchTableRows({
async function fetchTableRows({
workspaceId,
tableId,
limit,
@@ -126,48 +125,6 @@ export async function fetchTableRows({
}
}
export async function fetchAllTableRows({
workspaceId,
tableId,
filter,
sort,
pageSize = TABLE_LIMITS.MAX_QUERY_LIMIT,
signal,
}: Pick<TableRowsParams, 'workspaceId' | 'tableId' | 'filter' | 'sort'> & {
pageSize?: number
signal?: AbortSignal
}): Promise<TableRowsResponse> {
const rows: TableRow[] = []
let totalCount = Number.POSITIVE_INFINITY
let offset = 0
while (rows.length < totalCount) {
const response = await fetchTableRows({
workspaceId,
tableId,
limit: pageSize,
offset,
filter,
sort,
signal,
})
rows.push(...response.rows)
totalCount = response.totalCount
if (response.rows.length === 0) {
break
}
offset += response.rows.length
}
return {
rows,
totalCount: Number.isFinite(totalCount) ? totalCount : rows.length,
}
}
function invalidateRowData(queryClient: ReturnType<typeof useQueryClient>, tableId: string) {
queryClient.invalidateQueries({ queryKey: tableKeys.rowsRoot(tableId) })
}

View File

@@ -191,21 +191,6 @@ export function useTableUndo({ workspaceId, tableId }: UseTableUndoProps) {
break
}
case 'delete-column': {
if (direction === 'undo') {
addColumnMutation.mutate({
name: action.columnName,
type: action.columnType,
position: action.position,
unique: action.unique,
required: action.required,
})
} else {
deleteColumnMutation.mutate(action.columnName)
}
break
}
case 'rename-column': {
if (direction === 'undo') {
updateColumnMutation.mutate({

View File

@@ -448,11 +448,9 @@ export async function hasInboxAccess(userId: string): Promise<boolean> {
if (!isProd) {
return true
}
const [sub, billingStatus] = await Promise.all([
getHighestPrioritySubscription(userId),
getEffectiveBillingStatus(userId),
])
const sub = await getHighestPrioritySubscription(userId)
if (!sub) return false
const billingStatus = await getEffectiveBillingStatus(userId)
if (!hasUsableSubscriptionAccess(sub.status, billingStatus.billingBlocked)) return false
return getPlanTierCredits(sub.plan) >= 25000 || checkEnterprisePlan(sub)
} catch (error) {
@@ -461,30 +459,6 @@ export async function hasInboxAccess(userId: string): Promise<boolean> {
}
}
/**
* Check if user has access to live sync (every 5 minutes) for KB connectors
* Returns true if:
* - Self-hosted deployment, OR
* - User has a Max plan (credits >= 25000) or enterprise plan
*/
export async function hasLiveSyncAccess(userId: string): Promise<boolean> {
try {
if (!isHosted) {
return true
}
const [sub, billingStatus] = await Promise.all([
getHighestPrioritySubscription(userId),
getEffectiveBillingStatus(userId),
])
if (!sub) return false
if (!hasUsableSubscriptionAccess(sub.status, billingStatus.billingBlocked)) return false
return getPlanTierCredits(sub.plan) >= 25000 || checkEnterprisePlan(sub)
} catch (error) {
logger.error('Error checking live sync access', { error, userId })
return false
}
}
/**
* Check if user has exceeded their cost limit based on current period usage
*/

View File

@@ -21,7 +21,6 @@ export type UsageLogSource =
| 'workspace-chat'
| 'mcp_copilot'
| 'mothership_block'
| 'knowledge-base'
/**
* Metadata for 'model' category charges

View File

@@ -81,8 +81,7 @@ export class DocsChunker {
const textChunks = await this.splitContent(markdownContent)
logger.info(`Generating embeddings for ${textChunks.length} chunks in ${relativePath}`)
const embeddings: number[][] =
textChunks.length > 0 ? (await generateEmbeddings(textChunks)).embeddings : []
const embeddings = textChunks.length > 0 ? await generateEmbeddings(textChunks) : []
const embeddingModel = 'text-embedding-3-small'
const chunks: DocChunk[] = []

View File

@@ -1,11 +1,30 @@
/**
* @vitest-environment node
*/
import { createEditWorkflowRegistryMock } from '@sim/testing'
import { describe, expect, it, vi } from 'vitest'
import { createBlockFromParams } from './builders'
vi.mock('@/blocks/registry', () => createEditWorkflowRegistryMock(['agent', 'condition']))
const agentBlockConfig = {
type: 'agent',
name: 'Agent',
outputs: {
content: { type: 'string', description: 'Default content output' },
},
subBlocks: [{ id: 'responseFormat', type: 'response-format' }],
}
const conditionBlockConfig = {
type: 'condition',
name: 'Condition',
outputs: {},
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
}
vi.mock('@/blocks/registry', () => ({
getAllBlocks: () => [agentBlockConfig, conditionBlockConfig],
getBlock: (type: string) =>
type === 'agent' ? agentBlockConfig : type === 'condition' ? conditionBlockConfig : undefined,
}))
describe('createBlockFromParams', () => {
it('derives agent outputs from responseFormat when outputs are not provided', () => {

View File

@@ -1,16 +1,69 @@
/**
* @vitest-environment node
*/
import { createEditWorkflowRegistryMock } from '@sim/testing'
import { loggerMock } from '@sim/testing/mocks'
import { describe, expect, it, vi } from 'vitest'
import { applyOperationsToWorkflowState } from './engine'
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
}),
}))
vi.mock('@/blocks/registry', () =>
createEditWorkflowRegistryMock(['condition', 'agent', 'function'])
)
vi.mock('@/blocks/registry', () => ({
getAllBlocks: () => [
{
type: 'condition',
name: 'Condition',
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
},
{
type: 'agent',
name: 'Agent',
subBlocks: [
{ id: 'systemPrompt', type: 'long-input' },
{ id: 'model', type: 'combobox' },
],
},
{
type: 'function',
name: 'Function',
subBlocks: [
{ id: 'code', type: 'code' },
{ id: 'language', type: 'dropdown' },
],
},
],
getBlock: (type: string) => {
const blocks: Record<string, any> = {
condition: {
type: 'condition',
name: 'Condition',
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
},
agent: {
type: 'agent',
name: 'Agent',
subBlocks: [
{ id: 'systemPrompt', type: 'long-input' },
{ id: 'model', type: 'combobox' },
],
},
function: {
type: 'function',
name: 'Function',
subBlocks: [
{ id: 'code', type: 'code' },
{ id: 'language', type: 'dropdown' },
],
},
}
return blocks[type] || undefined
},
}))
function makeLoopWorkflow() {
return {

View File

@@ -1,12 +1,32 @@
/**
* @vitest-environment node
*/
import { createEditWorkflowRegistryMock } from '@sim/testing'
import { describe, expect, it, vi } from 'vitest'
import { normalizeConditionRouterIds } from './builders'
import { validateInputsForBlock } from './validation'
vi.mock('@/blocks/registry', () => createEditWorkflowRegistryMock(['condition', 'router_v2']))
const conditionBlockConfig = {
type: 'condition',
name: 'Condition',
outputs: {},
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
}
const routerBlockConfig = {
type: 'router_v2',
name: 'Router',
outputs: {},
subBlocks: [{ id: 'routes', type: 'router-input' }],
}
vi.mock('@/blocks/registry', () => ({
getBlock: (type: string) =>
type === 'condition'
? conditionBlockConfig
: type === 'router_v2'
? routerBlockConfig
: undefined,
}))
describe('validateInputsForBlock', () => {
it('accepts condition-input arrays with arbitrary item ids', () => {

View File

@@ -1,11 +1,11 @@
import { createFeatureFlagsMock, loggerMock } from '@sim/testing'
import { loggerMock } from '@sim/testing'
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
import { RateLimiter } from './rate-limiter'
import type { ConsumeResult, RateLimitStorageAdapter, TokenStatus } from './storage'
import { MANUAL_EXECUTION_LIMIT, RATE_LIMITS, RateLimitError } from './types'
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/core/config/feature-flags', () => createFeatureFlagsMock({ isBillingEnabled: true }))
vi.mock('@/lib/core/config/feature-flags', () => ({ isBillingEnabled: true }))
interface MockAdapter {
consumeTokens: Mock

View File

@@ -1,36 +0,0 @@
import { createLogger } from '@sim/logger'
const logger = createLogger('FileDownload')
/**
* Sanitizes a string for use as a file or path segment in exported assets.
*/
export function sanitizePathSegment(name: string): string {
return name.replace(/[^a-z0-9-_]/gi, '-')
}
/**
* Downloads a file to the user's device.
* Throws if the browser cannot create or trigger the download.
*/
export function downloadFile(
content: Blob | string,
filename: string,
mimeType = 'application/json'
): void {
try {
const blob = content instanceof Blob ? content : new Blob([content], { type: mimeType })
const url = URL.createObjectURL(blob)
const link = document.createElement('a')
link.href = url
link.download = filename
document.body.appendChild(link)
link.click()
document.body.removeChild(link)
URL.revokeObjectURL(url)
} catch (error) {
logger.error('Failed to download file:', error)
throw error
}
}

View File

@@ -110,7 +110,7 @@ export async function createChunk(
workspaceId?: string | null
): Promise<ChunkData> {
logger.info(`[${requestId}] Generating embedding for manual chunk`)
const { embeddings } = await generateEmbeddings([chunkData.content], undefined, workspaceId)
const embeddings = await generateEmbeddings([chunkData.content], undefined, workspaceId)
// Calculate accurate token count
const tokenCount = estimateTokenCount(chunkData.content, 'openai')
@@ -359,7 +359,7 @@ export async function updateChunk(
if (content !== currentChunk[0].content) {
logger.info(`[${requestId}] Content changed, regenerating embedding for chunk ${chunkId}`)
const { embeddings } = await generateEmbeddings([content], undefined, workspaceId)
const embeddings = await generateEmbeddings([content], undefined, workspaceId)
// Calculate accurate token count
const tokenCount = estimateTokenCount(content, 'openai')

View File

@@ -25,11 +25,9 @@ import {
type SQL,
sql,
} from 'drizzle-orm'
import { recordUsage } from '@/lib/billing/core/usage-log'
import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing'
import { createBullMQJobData, isBullMQEnabled } from '@/lib/core/bullmq'
import { env } from '@/lib/core/config/env'
import { getCostMultiplier, isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
import { enqueueWorkspaceDispatch } from '@/lib/core/workspace-dispatch'
import { processDocument } from '@/lib/knowledge/documents/document-processor'
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
@@ -45,7 +43,6 @@ import type { ProcessedDocumentTags } from '@/lib/knowledge/types'
import { deleteFile } from '@/lib/uploads/core/storage-service'
import { extractStorageKey } from '@/lib/uploads/utils/file-utils'
import type { DocumentProcessingPayload } from '@/background/knowledge-processing'
import { calculateCost } from '@/providers/utils'
const logger = createLogger('DocumentService')
@@ -463,10 +460,6 @@ export async function processDocumentAsync(
overlap: rawConfig?.overlap ?? 200,
}
let totalEmbeddingTokens = 0
let embeddingIsBYOK = false
let embeddingModelName = 'text-embedding-3-small'
await withTimeout(
(async () => {
const processed = await processDocument(
@@ -507,20 +500,10 @@ export async function processDocumentAsync(
const batchNum = Math.floor(i / batchSize) + 1
logger.info(`[${documentId}] Processing embedding batch ${batchNum}/${totalBatches}`)
const {
embeddings: batchEmbeddings,
totalTokens: batchTokens,
isBYOK,
modelName,
} = await generateEmbeddings(batch, undefined, kb[0].workspaceId)
const batchEmbeddings = await generateEmbeddings(batch, undefined, kb[0].workspaceId)
for (const emb of batchEmbeddings) {
embeddings.push(emb)
}
totalEmbeddingTokens += batchTokens
if (i === 0) {
embeddingIsBYOK = isBYOK
embeddingModelName = modelName
}
}
}
@@ -655,45 +638,6 @@ export async function processDocumentAsync(
const processingTime = Date.now() - startTime
logger.info(`[${documentId}] Successfully processed document in ${processingTime}ms`)
if (!embeddingIsBYOK && totalEmbeddingTokens > 0 && kb[0].userId) {
try {
const costMultiplier = getCostMultiplier()
const { total: cost } = calculateCost(
embeddingModelName,
totalEmbeddingTokens,
0,
false,
costMultiplier
)
if (cost > 0) {
await recordUsage({
userId: kb[0].userId,
workspaceId: kb[0].workspaceId ?? undefined,
entries: [
{
category: 'model',
source: 'knowledge-base',
description: embeddingModelName,
cost,
metadata: { inputTokens: totalEmbeddingTokens, outputTokens: 0 },
},
],
additionalStats: {
totalTokensUsed: sql`total_tokens_used + ${totalEmbeddingTokens}`,
},
})
await checkAndBillOverageThreshold(kb[0].userId)
} else {
logger.warn(
`[${documentId}] Embedding model "${embeddingModelName}" has no pricing entry — billing skipped`,
{ totalEmbeddingTokens, embeddingModelName }
)
}
} catch (billingError) {
logger.error(`[${documentId}] Failed to record embedding usage`, { error: billingError })
}
}
} catch (error) {
const processingTime = Date.now() - startTime
const errorMessage = error instanceof Error ? error.message : 'Unknown error'

View File

@@ -35,7 +35,6 @@ interface EmbeddingConfig {
apiUrl: string
headers: Record<string, string>
modelName: string
isBYOK: boolean
}
interface EmbeddingResponseItem {
@@ -72,19 +71,16 @@ async function getEmbeddingConfig(
'Content-Type': 'application/json',
},
modelName: kbModelName,
isBYOK: false,
}
}
let openaiApiKey = env.OPENAI_API_KEY
let isBYOK = false
if (workspaceId) {
const byokResult = await getBYOKKey(workspaceId, 'openai')
if (byokResult) {
logger.info('Using workspace BYOK key for OpenAI embeddings')
openaiApiKey = byokResult.apiKey
isBYOK = true
}
}
@@ -102,16 +98,12 @@ async function getEmbeddingConfig(
'Content-Type': 'application/json',
},
modelName: embeddingModel,
isBYOK,
}
}
const EMBEDDING_REQUEST_TIMEOUT_MS = 60_000
async function callEmbeddingAPI(
inputs: string[],
config: EmbeddingConfig
): Promise<{ embeddings: number[][]; totalTokens: number }> {
async function callEmbeddingAPI(inputs: string[], config: EmbeddingConfig): Promise<number[][]> {
return retryWithExponentialBackoff(
async () => {
const useDimensions = supportsCustomDimensions(config.modelName)
@@ -148,10 +140,7 @@ async function callEmbeddingAPI(
}
const data: EmbeddingAPIResponse = await response.json()
return {
embeddings: data.data.map((item) => item.embedding),
totalTokens: data.usage.total_tokens,
}
return data.data.map((item) => item.embedding)
},
{
maxRetries: 3,
@@ -189,23 +178,14 @@ async function processWithConcurrency<T, R>(
return results
}
export interface GenerateEmbeddingsResult {
embeddings: number[][]
totalTokens: number
isBYOK: boolean
modelName: string
}
/**
* Generate embeddings for multiple texts with token-aware batching and parallel processing.
* Returns embeddings alongside actual token count, model name, and whether a workspace BYOK key
* was used (vs. the platform's shared key) — enabling callers to make correct billing decisions.
* Generate embeddings for multiple texts with token-aware batching and parallel processing
*/
export async function generateEmbeddings(
texts: string[],
embeddingModel = 'text-embedding-3-small',
workspaceId?: string | null
): Promise<GenerateEmbeddingsResult> {
): Promise<number[][]> {
const config = await getEmbeddingConfig(embeddingModel, workspaceId)
const batches = batchByTokenLimit(texts, MAX_TOKENS_PER_REQUEST, embeddingModel)
@@ -224,20 +204,13 @@ export async function generateEmbeddings(
)
const allEmbeddings: number[][] = []
let totalTokens = 0
for (const batch of batchResults) {
for (const emb of batch.embeddings) {
for (const emb of batch) {
allEmbeddings.push(emb)
}
totalTokens += batch.totalTokens
}
return {
embeddings: allEmbeddings,
totalTokens,
isBYOK: config.isBYOK,
modelName: config.modelName,
}
return allEmbeddings
}
/**
@@ -254,6 +227,6 @@ export async function generateSearchEmbedding(
`Using ${config.useAzure ? 'Azure OpenAI' : 'OpenAI'} for search embedding generation`
)
const { embeddings } = await callEmbeddingAPI([query], config)
const embeddings = await callEmbeddingAPI([query], config)
return embeddings[0]
}

View File

@@ -1,7 +1,7 @@
/**
* @vitest-environment node
*/
import { createFeatureFlagsMock, loggerMock } from '@sim/testing'
import { loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
interface MockMcpClient {
@@ -38,7 +38,7 @@ const { MockMcpClientConstructor, mockOnToolsChanged, mockPublishToolsChanged }
)
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/core/config/feature-flags', () => createFeatureFlagsMock({ isTest: false }))
vi.mock('@/lib/core/config/feature-flags', () => ({ isTest: false }))
vi.mock('@/lib/mcp/pubsub', () => ({
mcpPubSub: {
onToolsChanged: mockOnToolsChanged,

View File

@@ -317,15 +317,6 @@ export interface PostHogEventMap {
workspace_id: string
}
table_exported: {
workspace_id: string
table_id: string
row_count: number
column_count: number
has_filter: boolean
has_sort: boolean
}
custom_tool_saved: {
tool_id: string
workspace_id: string
@@ -367,12 +358,15 @@ export interface PostHogEventMap {
workspace_id: string
}
task_marked_read: {
workspace_id: string
}
task_marked_unread: {
workspace_id: string
}
task_message_sent: {
workspace_id: string
has_attachments: boolean
has_contexts: boolean
is_new_task: boolean
@@ -395,62 +389,6 @@ export interface PostHogEventMap {
source: 'help_menu' | 'editor_button' | 'toolbar_context_menu'
block_type?: string
}
search_result_selected: {
result_type:
| 'block'
| 'tool'
| 'trigger'
| 'tool_operation'
| 'workflow'
| 'workspace'
| 'task'
| 'table'
| 'file'
| 'knowledge_base'
| 'page'
| 'docs'
query_length: number
workspace_id: string
}
workflow_imported: {
workspace_id: string
workflow_count: number
format: 'json' | 'zip'
}
workflow_exported: {
workspace_id: string
workflow_count: number
format: 'json' | 'zip'
}
folder_created: {
workspace_id: string
}
folder_deleted: {
workspace_id: string
}
logs_filter_applied: {
filter_type: 'status' | 'workflow' | 'folder' | 'trigger' | 'time'
workspace_id: string
}
knowledge_base_document_deleted: {
knowledge_base_id: string
workspace_id: string
}
scheduled_task_created: {
workspace_id: string
}
scheduled_task_deleted: {
workspace_id: string
}
}
export type PostHogEventName = keyof PostHogEventMap

View File

@@ -1,10 +1,10 @@
/**
* @vitest-environment node
*/
import { createTableColumn } from '@sim/testing'
import { describe, expect, it } from 'vitest'
import { TABLE_LIMITS } from '../constants'
import {
type ColumnDefinition,
getUniqueColumns,
type TableSchema,
validateColumnDefinition,
@@ -66,12 +66,12 @@ describe('Validation', () => {
describe('validateColumnDefinition', () => {
it('should accept valid column definition', () => {
const column = createTableColumn({
const column: ColumnDefinition = {
name: 'email',
type: 'string',
required: true,
unique: true,
})
}
const result = validateColumnDefinition(column)
expect(result.valid).toBe(true)
})
@@ -80,20 +80,19 @@ describe('Validation', () => {
const types = ['string', 'number', 'boolean', 'date', 'json'] as const
for (const type of types) {
const result = validateColumnDefinition(createTableColumn({ name: 'test', type }))
const result = validateColumnDefinition({ name: 'test', type })
expect(result.valid).toBe(true)
}
})
it('should reject empty column name', () => {
const result = validateColumnDefinition(createTableColumn({ name: '', type: 'string' }))
const result = validateColumnDefinition({ name: '', type: 'string' })
expect(result.valid).toBe(false)
expect(result.errors).toContain('Column name is required')
})
it('should reject invalid column type', () => {
const result = validateColumnDefinition({
...createTableColumn({ name: 'test' }),
name: 'test',
type: 'invalid' as any,
})
@@ -103,7 +102,7 @@ describe('Validation', () => {
it('should reject column name exceeding max length', () => {
const longName = 'a'.repeat(TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH + 1)
const result = validateColumnDefinition(createTableColumn({ name: longName, type: 'string' }))
const result = validateColumnDefinition({ name: longName, type: 'string' })
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('exceeds maximum length')
})
@@ -113,9 +112,9 @@ describe('Validation', () => {
it('should accept valid schema', () => {
const schema: TableSchema = {
columns: [
createTableColumn({ name: 'id', type: 'string', required: true, unique: true }),
createTableColumn({ name: 'name', type: 'string', required: true }),
createTableColumn({ name: 'age', type: 'number' }),
{ name: 'id', type: 'string', required: true, unique: true },
{ name: 'name', type: 'string', required: true },
{ name: 'age', type: 'number' },
],
}
const result = validateTableSchema(schema)
@@ -132,8 +131,8 @@ describe('Validation', () => {
it('should reject duplicate column names', () => {
const schema: TableSchema = {
columns: [
createTableColumn({ name: 'id', type: 'string' }),
createTableColumn({ name: 'ID', type: 'number' }),
{ name: 'id', type: 'string' },
{ name: 'ID', type: 'number' },
],
}
const result = validateTableSchema(schema)
@@ -154,9 +153,10 @@ describe('Validation', () => {
})
it('should reject schema exceeding max columns', () => {
const columns = Array.from({ length: TABLE_LIMITS.MAX_COLUMNS_PER_TABLE + 1 }, (_, i) =>
createTableColumn({ name: `col_${i}`, type: 'string' })
)
const columns = Array.from({ length: TABLE_LIMITS.MAX_COLUMNS_PER_TABLE + 1 }, (_, i) => ({
name: `col_${i}`,
type: 'string' as const,
}))
const result = validateTableSchema({ columns })
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('exceeds maximum columns')
@@ -182,11 +182,11 @@ describe('Validation', () => {
describe('validateRowAgainstSchema', () => {
const schema: TableSchema = {
columns: [
createTableColumn({ name: 'name', type: 'string', required: true }),
createTableColumn({ name: 'age', type: 'number' }),
createTableColumn({ name: 'active', type: 'boolean' }),
createTableColumn({ name: 'created', type: 'date' }),
createTableColumn({ name: 'metadata', type: 'json' }),
{ name: 'name', type: 'string', required: true },
{ name: 'age', type: 'number' },
{ name: 'active', type: 'boolean' },
{ name: 'created', type: 'date' },
{ name: 'metadata', type: 'json' },
],
}
@@ -281,10 +281,10 @@ describe('Validation', () => {
it('should return only columns with unique=true', () => {
const schema: TableSchema = {
columns: [
createTableColumn({ name: 'id', type: 'string', unique: true }),
createTableColumn({ name: 'email', type: 'string', unique: true }),
createTableColumn({ name: 'name', type: 'string' }),
createTableColumn({ name: 'count', type: 'number', unique: false }),
{ name: 'id', type: 'string', unique: true },
{ name: 'email', type: 'string', unique: true },
{ name: 'name', type: 'string' },
{ name: 'count', type: 'number', unique: false },
],
}
const result = getUniqueColumns(schema)
@@ -295,8 +295,8 @@ describe('Validation', () => {
it('should return empty array when no unique columns', () => {
const schema: TableSchema = {
columns: [
createTableColumn({ name: 'name', type: 'string' }),
createTableColumn({ name: 'value', type: 'number' }),
{ name: 'name', type: 'string' },
{ name: 'value', type: 'number' },
],
}
const result = getUniqueColumns(schema)
@@ -307,9 +307,9 @@ describe('Validation', () => {
describe('validateUniqueConstraints', () => {
const schema: TableSchema = {
columns: [
createTableColumn({ name: 'id', type: 'string', unique: true }),
createTableColumn({ name: 'email', type: 'string', unique: true }),
createTableColumn({ name: 'name', type: 'string' }),
{ name: 'id', type: 'string', unique: true },
{ name: 'email', type: 'string', unique: true },
{ name: 'name', type: 'string' },
],
}

View File

@@ -1,12 +1,9 @@
/**
* Tests for workflow change detection comparison logic
*/
import type { WorkflowVariableFixture } from '@sim/testing'
import {
createBlock as createTestBlock,
createWorkflowState as createTestWorkflowState,
createWorkflowVariablesMap,
} from '@sim/testing'
import { describe, expect, it } from 'vitest'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
@@ -49,12 +46,6 @@ function createBlock(id: string, overrides: Record<string, any> = {}): any {
})
}
function createVariablesMap(
...variables: Parameters<typeof createWorkflowVariablesMap>[0]
): Record<string, WorkflowVariableFixture> {
return createWorkflowVariablesMap(variables)
}
describe('hasWorkflowChanged', () => {
describe('Basic Cases', () => {
it.concurrent('should return true when deployedState is null', () => {
@@ -2190,12 +2181,9 @@ describe('hasWorkflowChanged', () => {
const currentState = {
...createWorkflowState({}),
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'hello',
}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2204,12 +2192,9 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect removed variables', () => {
const deployedState = {
...createWorkflowState({}),
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'hello',
}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
}
const currentState = {
@@ -2223,22 +2208,16 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect variable value changes', () => {
const deployedState = {
...createWorkflowState({}),
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'hello',
}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
}
const currentState = {
...createWorkflowState({}),
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'world',
}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'world' },
},
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2247,12 +2226,16 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect variable type changes', () => {
const deployedState = {
...createWorkflowState({}),
variables: createVariablesMap({ id: 'var1', name: 'myVar', type: 'string', value: '123' }),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: '123' },
},
}
const currentState = {
...createWorkflowState({}),
variables: createVariablesMap({ id: 'var1', name: 'myVar', type: 'number', value: 123 }),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'number', value: 123 },
},
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2261,22 +2244,16 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect variable name changes', () => {
const deployedState = {
...createWorkflowState({}),
variables: createVariablesMap({
id: 'var1',
name: 'oldName',
type: 'string',
value: 'hello',
}),
variables: {
var1: { id: 'var1', name: 'oldName', type: 'string', value: 'hello' },
},
}
const currentState = {
...createWorkflowState({}),
variables: createVariablesMap({
id: 'var1',
name: 'newName',
type: 'string',
value: 'hello',
}),
variables: {
var1: { id: 'var1', name: 'newName', type: 'string', value: 'hello' },
},
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2285,18 +2262,18 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should not detect change for identical variables', () => {
const deployedState = {
...createWorkflowState({}),
variables: createVariablesMap(
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
{ id: 'var2', name: 'count', type: 'number', value: 42 }
),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
},
}
const currentState = {
...createWorkflowState({}),
variables: createVariablesMap(
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
{ id: 'var2', name: 'count', type: 'number', value: 42 }
),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
},
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(false)
@@ -2333,22 +2310,16 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should handle complex variable values (objects)', () => {
const deployedState = {
...createWorkflowState({}),
variables: createVariablesMap({
id: 'var1',
name: 'config',
type: 'object',
value: { key: 'value1' },
}),
variables: {
var1: { id: 'var1', name: 'config', type: 'object', value: { key: 'value1' } },
},
}
const currentState = {
...createWorkflowState({}),
variables: createVariablesMap({
id: 'var1',
name: 'config',
type: 'object',
value: { key: 'value2' },
}),
variables: {
var1: { id: 'var1', name: 'config', type: 'object', value: { key: 'value2' } },
},
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2357,22 +2328,16 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should handle complex variable values (arrays)', () => {
const deployedState = {
...createWorkflowState({}),
variables: createVariablesMap({
id: 'var1',
name: 'items',
type: 'array',
value: [1, 2, 3],
}),
variables: {
var1: { id: 'var1', name: 'items', type: 'array', value: [1, 2, 3] },
},
}
const currentState = {
...createWorkflowState({}),
variables: createVariablesMap({
id: 'var1',
name: 'items',
type: 'array',
value: [1, 2, 4],
}),
variables: {
var1: { id: 'var1', name: 'items', type: 'array', value: [1, 2, 4] },
},
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2381,18 +2346,18 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should not detect change when variable key order differs', () => {
const deployedState = {
...createWorkflowState({}),
variables: createVariablesMap(
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
{ id: 'var2', name: 'count', type: 'number', value: 42 }
),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
},
}
const currentState = {
...createWorkflowState({}),
variables: createVariablesMap(
{ id: 'var2', name: 'count', type: 'number', value: 42 },
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' }
),
variables: {
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(false)
@@ -2875,135 +2840,175 @@ describe('hasWorkflowChanged', () => {
describe('Variables (UI-only fields should not trigger change)', () => {
it.concurrent('should not detect change when validationError differs', () => {
const deployedState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
blocks: {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
}),
})
},
}
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
validationError: undefined,
}),
})
},
}
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent('should not detect change when validationError has value vs missing', () => {
const deployedState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
blocks: {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'number',
value: 'invalid',
}),
})
},
}
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'number',
value: 'invalid',
validationError: 'Not a valid number',
}),
})
},
}
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent('should detect change when variable value differs', () => {
const deployedState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
blocks: {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'old value',
}),
})
},
}
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'new value',
}),
})
validationError: undefined,
},
}
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
})
it.concurrent('should detect change when variable is added', () => {
const deployedState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: {},
blocks: {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {}
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
}),
})
},
}
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
})
it.concurrent('should detect change when variable is removed', () => {
const deployedState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
blocks: {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
}),
})
},
}
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: {},
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {}
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
})
it.concurrent('should not detect change when empty array vs empty object', () => {
const deployedState = createWorkflowState({
blocks: { block1: createBlock('block1') },
blocks: {
block1: createBlock('block1'),
},
})
// Intentional type violation to test robustness with malformed data
;(deployedState as unknown as Record<string, unknown>).variables = []
;(deployedState as any).variables = []
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: {},
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {}
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
@@ -3146,7 +3151,7 @@ describe('generateWorkflowDiffSummary', () => {
})
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({ id: 'var1', name: 'test', type: 'string', value: 'hello' }),
variables: { var1: { id: 'var1', name: 'test', type: 'string', value: 'hello' } },
})
const result = generateWorkflowDiffSummary(currentState, previousState)
expect(result.hasChanges).toBe(true)
@@ -3156,11 +3161,11 @@ describe('generateWorkflowDiffSummary', () => {
it.concurrent('should detect modified variables', () => {
const previousState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({ id: 'var1', name: 'test', type: 'string', value: 'hello' }),
variables: { var1: { id: 'var1', name: 'test', type: 'string', value: 'hello' } },
})
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({ id: 'var1', name: 'test', type: 'string', value: 'world' }),
variables: { var1: { id: 'var1', name: 'test', type: 'string', value: 'world' } },
})
const result = generateWorkflowDiffSummary(currentState, previousState)
expect(result.hasChanges).toBe(true)

View File

@@ -1,8 +1,6 @@
/**
* @vitest-environment node
*/
import { createMockSelectChain, createMockUpdateChain } from '@sim/testing'
import { loggerMock } from '@sim/testing/mocks'
import { beforeEach, describe, expect, it, vi } from 'vitest'
const {
@@ -37,7 +35,13 @@ vi.mock('@sim/db/schema', () => ({
workflowSchedule: { archivedAt: 'workflow_schedule_archived_at' },
}))
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}),
}))
vi.mock('@/lib/workflows/utils', () => ({
getWorkflowById: (...args: unknown[]) => mockGetWorkflowById(...args),
@@ -62,6 +66,24 @@ vi.mock('@/lib/core/telemetry', () => ({
import { archiveWorkflow } from '@/lib/workflows/lifecycle'
function createSelectChain<T>(result: T) {
const chain = {
from: vi.fn().mockReturnThis(),
innerJoin: vi.fn().mockReturnThis(),
where: vi.fn().mockResolvedValue(result),
}
return chain
}
function createUpdateChain() {
return {
set: vi.fn().mockReturnValue({
where: vi.fn().mockResolvedValue([]),
}),
}
}
describe('workflow lifecycle', () => {
beforeEach(() => {
vi.clearAllMocks()
@@ -85,10 +107,10 @@ describe('workflow lifecycle', () => {
archivedAt: new Date(),
})
mockSelect.mockReturnValue(createMockSelectChain([]))
mockSelect.mockReturnValue(createSelectChain([]))
const tx = {
update: vi.fn().mockImplementation(() => createMockUpdateChain()),
update: vi.fn().mockImplementation(() => createUpdateChain()),
}
mockTransaction.mockImplementation(async (callback: (trx: typeof tx) => Promise<void>) =>
callback(tx)

View File

@@ -1,5 +1,4 @@
import { createLogger } from '@sim/logger'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import {
type ExportWorkflowState,
sanitizeForExport,
@@ -44,6 +43,36 @@ export interface WorkspaceExportStructure {
folders: FolderExportData[]
}
/**
* Sanitizes a string for use as a path segment in a ZIP file.
*/
export function sanitizePathSegment(name: string): string {
return name.replace(/[^a-z0-9-_]/gi, '-')
}
/**
* Downloads a file to the user's device.
*/
export function downloadFile(
content: Blob | string,
filename: string,
mimeType = 'application/json'
): void {
try {
const blob = content instanceof Blob ? content : new Blob([content], { type: mimeType })
const url = URL.createObjectURL(blob)
const a = document.createElement('a')
a.href = url
a.download = filename
document.body.appendChild(a)
a.click()
document.body.removeChild(a)
URL.revokeObjectURL(url)
} catch (error) {
logger.error('Failed to download file:', error)
}
}
/**
* Fetches a workflow's state and variables for export.
* Returns null if the workflow cannot be fetched.

View File

@@ -1,8 +1,6 @@
/**
* @vitest-environment node
*/
import { createMockDeleteChain, createMockSelectChain, createMockUpdateChain } from '@sim/testing'
import { loggerMock } from '@sim/testing/mocks'
import { beforeEach, describe, expect, it, vi } from 'vitest'
const { mockSelect, mockTransaction, mockArchiveWorkflowsForWorkspace, mockGetWorkspaceWithOwner } =
@@ -35,7 +33,13 @@ vi.mock('@sim/db/schema', () => ({
workspaceNotificationSubscription: { active: 'workspace_notification_active' },
}))
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}),
}))
vi.mock('@/lib/workflows/lifecycle', () => ({
archiveWorkflowsForWorkspace: (...args: unknown[]) => mockArchiveWorkflowsForWorkspace(...args),
@@ -47,6 +51,14 @@ vi.mock('@/lib/workspaces/permissions/utils', () => ({
import { archiveWorkspace } from './lifecycle'
function createUpdateChain() {
return {
set: vi.fn().mockReturnValue({
where: vi.fn().mockResolvedValue([]),
}),
}
}
describe('workspace lifecycle', () => {
beforeEach(() => {
vi.clearAllMocks()
@@ -60,12 +72,22 @@ describe('workspace lifecycle', () => {
archivedAt: null,
})
mockArchiveWorkflowsForWorkspace.mockResolvedValue(2)
mockSelect.mockReturnValue(createMockSelectChain([{ id: 'server-1' }]))
mockSelect.mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockResolvedValue([{ id: 'server-1' }]),
}),
})
const tx = {
select: vi.fn().mockReturnValue(createMockSelectChain([{ id: 'kb-1' }])),
update: vi.fn().mockImplementation(() => createMockUpdateChain()),
delete: vi.fn().mockImplementation(() => createMockDeleteChain()),
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockResolvedValue([{ id: 'kb-1' }]),
}),
}),
update: vi.fn().mockImplementation(() => createUpdateChain()),
delete: vi.fn().mockImplementation(() => ({
where: vi.fn().mockResolvedValue([]),
})),
}
mockTransaction.mockImplementation(async (callback: (trx: typeof tx) => Promise<void>) =>
callback(tx)

View File

@@ -114,12 +114,6 @@ export const useTableUndoStore = create<TableUndoState>()(
if (action.type === 'create-row' && action.rowId === oldRowId) {
return { ...entry, action: { ...action, rowId: newRowId } }
}
if (action.type === 'create-rows') {
const patchedRows = action.rows.map((r) =>
r.rowId === oldRowId ? { ...r, rowId: newRowId } : r
)
return { ...entry, action: { ...action, rows: patchedRows } }
}
return entry
})

View File

@@ -32,14 +32,6 @@ export type TableUndoAction =
}
| { type: 'delete-rows'; rows: DeletedRowSnapshot[] }
| { type: 'create-column'; columnName: string; position: number }
| {
type: 'delete-column'
columnName: string
columnType: string
position: number
unique: boolean
required: boolean
}
| { type: 'rename-column'; oldName: string; newName: string }
| { type: 'update-column-type'; columnName: string; previousType: string; newType: string }
| {

View File

@@ -1 +0,0 @@
ALTER TYPE "public"."usage_log_source" ADD VALUE 'knowledge-base';

File diff suppressed because it is too large Load Diff

View File

@@ -1289,13 +1289,6 @@
"when": 1775149654511,
"tag": "0184_hard_thaddeus_ross",
"breakpoints": true
},
{
"idx": 185,
"version": "7",
"when": 1775247973312,
"tag": "0185_new_gravity",
"breakpoints": true
}
]
}

View File

@@ -2273,7 +2273,6 @@ export const usageLogSourceEnum = pgEnum('usage_log_source', [
'workspace-chat',
'mcp_copilot',
'mothership_block',
'knowledge-base',
])
export const usageLog = pgTable(

View File

@@ -118,15 +118,6 @@ export {
type SerializedConnection,
type SerializedWorkflow,
} from './serialized-block.factory'
export {
createTableColumn,
createTableRow,
type TableColumnFactoryOptions,
type TableColumnFixture,
type TableColumnType,
type TableRowFactoryOptions,
type TableRowFixture,
} from './table.factory'
// Tool mock responses
export {
mockDriveResponses,
@@ -187,10 +178,3 @@ export {
type WorkflowFactoryOptions,
type WorkflowStateFixture,
} from './workflow.factory'
export {
createWorkflowVariable,
createWorkflowVariablesMap,
type WorkflowVariableFactoryOptions,
type WorkflowVariableFixture,
type WorkflowVariableType,
} from './workflow-variable.factory'

View File

@@ -1,12 +0,0 @@
import { describe, expect, it } from 'vitest'
import { createTableColumn } from './table.factory'
describe('table factory', () => {
it('generates default column names that match table naming rules', () => {
const generatedNames = Array.from({ length: 100 }, () => createTableColumn().name)
for (const name of generatedNames) {
expect(name).toMatch(/^[a-z_][a-z0-9_]*$/)
}
})
})

View File

@@ -1,62 +0,0 @@
import { customAlphabet, nanoid } from 'nanoid'
export type TableColumnType = 'string' | 'number' | 'boolean' | 'date' | 'json'
export interface TableColumnFixture {
name: string
type: TableColumnType
required?: boolean
unique?: boolean
}
export interface TableRowFixture {
id: string
data: Record<string, unknown>
position: number
createdAt: string
updatedAt: string
}
export interface TableColumnFactoryOptions {
name?: string
type?: TableColumnType
required?: boolean
unique?: boolean
}
export interface TableRowFactoryOptions {
id?: string
data?: Record<string, unknown>
position?: number
createdAt?: string
updatedAt?: string
}
const createTableColumnSuffix = customAlphabet('abcdefghijklmnopqrstuvwxyz0123456789_', 6)
/**
* Creates a table column fixture with sensible defaults.
*/
export function createTableColumn(options: TableColumnFactoryOptions = {}): TableColumnFixture {
return {
name: options.name ?? `column_${createTableColumnSuffix()}`,
type: options.type ?? 'string',
required: options.required,
unique: options.unique,
}
}
/**
* Creates a table row fixture with sensible defaults.
*/
export function createTableRow(options: TableRowFactoryOptions = {}): TableRowFixture {
const timestamp = new Date().toISOString()
return {
id: options.id ?? `row_${nanoid(8)}`,
data: options.data ?? {},
position: options.position ?? 0,
createdAt: options.createdAt ?? timestamp,
updatedAt: options.updatedAt ?? timestamp,
}
}

View File

@@ -1,53 +0,0 @@
import { nanoid } from 'nanoid'
export type WorkflowVariableType = 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
export interface WorkflowVariableFixture {
id: string
name: string
type: WorkflowVariableType
value: unknown
workflowId?: string
validationError?: string
}
export interface WorkflowVariableFactoryOptions {
id?: string
name?: string
type?: WorkflowVariableType
value?: unknown
workflowId?: string
validationError?: string
}
/**
* Creates a workflow variable fixture with sensible defaults.
*/
export function createWorkflowVariable(
options: WorkflowVariableFactoryOptions = {}
): WorkflowVariableFixture {
const id = options.id ?? `var_${nanoid(8)}`
return {
id,
name: options.name ?? `variable_${id.slice(0, 4)}`,
type: options.type ?? 'string',
value: options.value ?? '',
workflowId: options.workflowId,
validationError: options.validationError,
}
}
/**
* Creates a variables map keyed by variable id.
*/
export function createWorkflowVariablesMap(
variables: WorkflowVariableFactoryOptions[] = []
): Record<string, WorkflowVariableFixture> {
return Object.fromEntries(
variables.map((variable) => {
const fixture = createWorkflowVariable(variable)
return [fixture.id, fixture]
})
)
}

View File

@@ -46,14 +46,10 @@ export * from './builders'
export * from './factories'
export {
AuthTypeMock,
asyncRouteParams,
auditMock,
clearRedisMocks,
createEditWorkflowRegistryMock,
createEnvMock,
createFeatureFlagsMock,
createMockDb,
createMockDeleteChain,
createMockFetch,
createMockFormDataRequest,
createMockGetEnv,
@@ -61,19 +57,15 @@ export {
createMockRedis,
createMockRequest,
createMockResponse,
createMockSelectChain,
createMockSocket,
createMockStorage,
createMockUpdateChain,
databaseMock,
defaultMockEnv,
defaultMockUser,
drizzleOrmMock,
envMock,
featureFlagsMock,
loggerMock,
type MockAuthResult,
type MockFeatureFlags,
type MockFetchResponse,
type MockHybridAuthResult,
type MockRedis,

View File

@@ -103,38 +103,6 @@ export function createMockDb() {
}
}
/**
* Creates a select chain that resolves from `where()`.
*/
export function createMockSelectChain<T>(result: T) {
return {
from: vi.fn().mockReturnThis(),
innerJoin: vi.fn().mockReturnThis(),
leftJoin: vi.fn().mockReturnThis(),
where: vi.fn().mockResolvedValue(result),
}
}
/**
* Creates an update chain that resolves from `where()`.
*/
export function createMockUpdateChain<T>(result: T = [] as T) {
return {
set: vi.fn().mockReturnValue({
where: vi.fn().mockResolvedValue(result),
}),
}
}
/**
* Creates a delete chain that resolves from `where()`.
*/
export function createMockDeleteChain<T>(result: T = [] as T) {
return {
where: vi.fn().mockResolvedValue(result),
}
}
/**
* Mock module for @sim/db.
* Use with vi.mock() to replace the real database.

View File

@@ -1,55 +0,0 @@
const editWorkflowBlockConfigs: Record<
string,
{
type: string
name: string
outputs: Record<string, unknown>
subBlocks: { id: string; type: string }[]
}
> = {
condition: {
type: 'condition',
name: 'Condition',
outputs: {},
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
},
agent: {
type: 'agent',
name: 'Agent',
outputs: {
content: { type: 'string', description: 'Default content output' },
},
subBlocks: [
{ id: 'systemPrompt', type: 'long-input' },
{ id: 'model', type: 'combobox' },
{ id: 'responseFormat', type: 'response-format' },
],
},
function: {
type: 'function',
name: 'Function',
outputs: {},
subBlocks: [
{ id: 'code', type: 'code' },
{ id: 'language', type: 'dropdown' },
],
},
router_v2: {
type: 'router_v2',
name: 'Router',
outputs: {},
subBlocks: [{ id: 'routes', type: 'router-input' }],
},
}
export function createEditWorkflowRegistryMock(types?: string[]) {
const enabledTypes = new Set(types ?? Object.keys(editWorkflowBlockConfigs))
const blocks = Object.fromEntries(
Object.entries(editWorkflowBlockConfigs).filter(([type]) => enabledTypes.has(type))
)
return {
getAllBlocks: () => Object.values(blocks),
getBlock: (type: string) => blocks[type],
}
}

View File

@@ -1,65 +0,0 @@
export interface MockFeatureFlags {
isProd: boolean
isDev: boolean
isTest: boolean
isHosted: boolean
isBillingEnabled: boolean
isEmailVerificationEnabled: boolean
isAuthDisabled: boolean
isRegistrationDisabled: boolean
isEmailPasswordEnabled: boolean
isSignupEmailValidationEnabled: boolean
isTriggerDevEnabled: boolean
isSsoEnabled: boolean
isCredentialSetsEnabled: boolean
isAccessControlEnabled: boolean
isOrganizationsEnabled: boolean
isInboxEnabled: boolean
isE2bEnabled: boolean
isAzureConfigured: boolean
isInvitationsDisabled: boolean
isPublicApiDisabled: boolean
isReactGrabEnabled: boolean
isReactScanEnabled: boolean
getAllowedIntegrationsFromEnv: () => string[] | null
getAllowedMcpDomainsFromEnv: () => string[] | null
getCostMultiplier: () => number
}
/**
* Creates a mutable mock for the feature flags module.
*/
export function createFeatureFlagsMock(
overrides: Partial<MockFeatureFlags> = {}
): MockFeatureFlags {
return {
isProd: false,
isDev: false,
isTest: true,
isHosted: false,
isBillingEnabled: false,
isEmailVerificationEnabled: false,
isAuthDisabled: false,
isRegistrationDisabled: false,
isEmailPasswordEnabled: true,
isSignupEmailValidationEnabled: false,
isTriggerDevEnabled: false,
isSsoEnabled: false,
isCredentialSetsEnabled: false,
isAccessControlEnabled: false,
isOrganizationsEnabled: false,
isInboxEnabled: false,
isE2bEnabled: false,
isAzureConfigured: false,
isInvitationsDisabled: false,
isPublicApiDisabled: false,
isReactGrabEnabled: false,
isReactScanEnabled: false,
getAllowedIntegrationsFromEnv: () => null,
getAllowedMcpDomainsFromEnv: () => null,
getCostMultiplier: () => 1,
...overrides,
}
}
export const featureFlagsMock = createFeatureFlagsMock()

View File

@@ -16,6 +16,7 @@
* ```
*/
// API mocks
export {
mockCommonSchemas,
mockConsoleLogger,
@@ -23,13 +24,16 @@ export {
mockKnowledgeSchemas,
setupCommonApiMocks,
} from './api.mock'
// Audit mocks
export { auditMock } from './audit.mock'
// Auth mocks
export {
defaultMockUser,
type MockAuthResult,
type MockUser,
mockAuth,
} from './auth.mock'
// Blocks mocks
export {
blocksMock,
createMockGetBlock,
@@ -38,23 +42,18 @@ export {
mockToolConfigs,
toolsUtilsMock,
} from './blocks.mock'
// Database mocks
export {
createMockDb,
createMockDeleteChain,
createMockSelectChain,
createMockSql,
createMockSqlOperators,
createMockUpdateChain,
databaseMock,
drizzleOrmMock,
} from './database.mock'
export { createEditWorkflowRegistryMock } from './edit-workflow.mock'
// Env mocks
export { createEnvMock, createMockGetEnv, defaultMockEnv, envMock } from './env.mock'
export {
createFeatureFlagsMock,
featureFlagsMock,
type MockFeatureFlags,
} from './feature-flags.mock'
// Executor mocks - use side-effect import: import '@sim/testing/mocks/executor'
// Fetch mocks
export {
createMockFetch,
createMockResponse,
@@ -64,21 +63,24 @@ export {
mockNextFetchResponse,
setupGlobalFetchMock,
} from './fetch.mock'
// Hybrid auth mocks
export { AuthTypeMock, type MockHybridAuthResult, mockHybridAuth } from './hybrid-auth.mock'
// Logger mocks
export { clearLoggerMocks, createMockLogger, getLoggerCalls, loggerMock } from './logger.mock'
// Redis mocks
export { clearRedisMocks, createMockRedis, type MockRedis } from './redis.mock'
export {
asyncRouteParams,
createMockFormDataRequest,
createMockRequest,
requestUtilsMock,
} from './request.mock'
// Request mocks
export { createMockFormDataRequest, createMockRequest, requestUtilsMock } from './request.mock'
// Socket mocks
export {
createMockSocket,
createMockSocketServer,
type MockSocket,
type MockSocketServer,
} from './socket.mock'
// Storage mocks
export { clearStorageMocks, createMockStorage, setupGlobalStorageMocks } from './storage.mock'
// Telemetry mocks
export { telemetryMock } from './telemetry.mock'
// UUID mocks
export { mockCryptoUuid, mockUuid } from './uuid.mock'

View File

@@ -59,13 +59,6 @@ export function createMockFormDataRequest(
})
}
/**
* Creates the async `params` object used by App Router route handlers.
*/
export function asyncRouteParams<T extends Record<string, unknown>>(params: T): Promise<T> {
return Promise.resolve(params)
}
/**
* Pre-configured mock for @/lib/core/utils/request module.
*