Compare commits

...

25 Commits

Author SHA1 Message Date
waleed
aef5b54b01 improvement(tables): restore column drop target background highlight 2026-04-04 17:22:15 -07:00
waleed
5fe71484e3 fix(tables): remove leftover AbortController from use-export-table 2026-04-04 17:22:15 -07:00
waleed
5933877023 fix: direct import for sanitizePathSegment in use-import-workspace 2026-04-04 17:22:15 -07:00
waleed
4f7459250c fix(tables): direct imports for downloadFile/sanitizePathSegment, fix greptile comments 2026-04-04 17:22:15 -07:00
waleed
194a2d38e4 improvement(tables): suppress drag indicator when drop would be no-op 2026-04-04 17:21:48 -07:00
waleed
8b9367e217 fix(tables): remove any types, clean up test variable assignments 2026-04-04 17:21:48 -07:00
waleed
12c527d7ea fix(tables): isColumnSelection dead code, paste column failure, drag indexOf guard 2026-04-04 17:21:48 -07:00
waleed
f7d7bc1a43 fix(tables): undo/redo gaps, escape regression, conflict marker
- Add delete-column undo/redo support
- Add undo tracking to RowModal (create/edit/delete)
- Fix patchUndoRowId to also patch create-rows actions
- Extract actual row position from API response (not -1)
- Fix Escape key to preserve cell selection when editing
- Remove stray conflict marker from modal.tsx
2026-04-04 17:21:48 -07:00
waleed
9e0fc2cd85 fixes 2026-04-04 17:21:48 -07:00
waleed
f588b36914 fix 2026-04-04 17:21:48 -07:00
waleed
eba424c8a3 improvement(tables): ops and experience 2026-04-04 17:21:48 -07:00
Theodore Li
855c892f55 feat(block): Add cloudwatch block (#3953)
* feat(block): Add cloudwatch block (#3911)

* feat(block): add cloudwatch integration

* Fix bun lock

* Add logger, use execution timeout

* Switch metric dimensions to map style input

* Fix attribute names for dimension map

* Fix import styling

---------

Co-authored-by: Theodore Li <theo@sim.ai>

* Fix import ordering

---------

Co-authored-by: Theodore Li <theo@sim.ai>
2026-04-04 19:54:12 -04:00
Waleed
8ae4b88d80 fix(integrations): show disabled role combobox for readonly members (#3962) 2026-04-04 16:50:11 -07:00
Waleed
a70ccddef5 fix(kb): fix Linear connector GraphQL type errors and tag slot reuse (#3961)
* fix(kb): fix Linear connector GraphQL type errors and tag slot reuse

* fix(kb): simplify tag slot reuse, revert Linear GraphQL types to String

Clean up newTagSlotMapping into direct assignment, remove unnecessary
comment, and revert ID! back to String! to match Linear SDK types.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* fix(kb): use ID! type for Linear GraphQL filter variables

* fix(kb): verify field type when reusing existing tag slots

Add fieldType check to the tag slot reuse logic so a connector with
a matching displayName but different fieldType falls through to fresh
slot allocation instead of silently reusing an incompatible slot.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* fix(kb): enable search on connector selector dropdowns

---------

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-04 16:50:04 -07:00
Waleed
b4d9b8c396 feat(analytics): posthog audit — remove noise, add 10 new events (#3960)
* feat(analytics): posthog audit — remove noise, add 10 new events

Remove task_marked_read (fires automatically on every task view).

Add workspace_id to task_message_sent for group analytics.

New events:
- search_result_selected: block/tool/trigger/workflow/table/file/
  knowledge_base/workspace/task/page/docs with query_length
- workflow_imported: count + format (json/zip)
- workflow_exported: count + format (json/zip)
- folder_created / folder_deleted
- logs_filter_applied: status/workflow/folder/trigger/time
- knowledge_base_document_deleted
- scheduled_task_created / scheduled_task_deleted

* fix(analytics): use usePostHog + captureEvent in hooks, track custom date range

* fix(analytics): always fire scheduled_task_deleted regardless of workspaceId

* fix(analytics): correct format field logic and add missing useCallback deps
2026-04-04 16:49:52 -07:00
Waleed
ce53275e9d feat(knowledge): add Live sync option to KB connectors + fix embedding billing (#3959)
* feat(knowledge): add Live sync option to KB connector modal for Max/Enterprise users

Adds a "Live" (every 5 min) sync frequency option gated to Max and Enterprise plan users.
Includes client-side badge + disabled state, shared sync intervals constant, and server-side
plan validation on both POST and PATCH connector routes.

* fix(knowledge): record embedding usage cost for KB document processing

Adds billing tracking to the KB embedding pipeline, which was previously
generating OpenAI API calls with no cost recorded. Token counts are now
captured from the actual API response and recorded via recordUsage after
successful embedding insertion. BYOK workspaces are excluded from billing.
Applies to all execution paths: direct, BullMQ, and Trigger.dev.

* fix(knowledge): simplify embedding billing — use calculateCost, return modelName

- Use calculateCost() from @/providers/utils instead of inline formula, consistent
  with how LLM billing works throughout the platform
- Return modelName from GenerateEmbeddingsResult so billing uses the actual model
  (handles custom Azure deployments) instead of a hardcoded fallback string
- Fix docs-chunker.ts empty-path fallback to satisfy full GenerateEmbeddingsResult type

* fix(knowledge): remove dev bypass from hasLiveSyncAccess

* chore(knowledge): rename sync-intervals to consts, fix stale TSDoc comment

* improvement(knowledge): extract MaxBadge component, capture billing config once per document

* fix(knowledge): add knowledge-base to usage_log_source enum, fix docs-chunker type

* fix(knowledge): generate migration for knowledge-base usage_log_source enum value

* fix(knowledge): add knowledge-base to usage_log_source enum via drizzle-kit

* fix(knowledge): fix search embedding test mocks, parallelize billing lookups

* fix(knowledge): warn when embedding model has no pricing entry

* fix(knowledge): call checkAndBillOverageThreshold after embedding usage
2026-04-04 16:49:42 -07:00
abhinavDhulipala
7971a64e63 fix(setup): db migrate hard fail and correct ini env (#3946) 2026-04-04 16:22:19 -07:00
abhinavDhulipala
f39b4c74dc fix(setup): bun run prepare explicitly (#3947) 2026-04-04 16:13:53 -07:00
Waleed
0ba8ab1ec7 fix(posthog): upgrade SDKs and fix serverless event flushing (#3951)
* fix(posthog): upgrade SDKs and fix serverless event flushing

* fix(posthog): revert flushAt to 20 for long-running ECS container
2026-04-04 16:11:35 -07:00
Waleed
039e57541e fix(csp): allow Cloudflare Turnstile domains for script, frame, and connect (#3948) 2026-04-04 15:54:14 -07:00
Theodore Li
75f8c6ad7e fix(ui): persist active resource tab in url, fix internal markdown links (#3925)
* fix(ui): handle markdown internal links

* Fix lint

* Reference correct scroll container

* Add resource tab to url state, scroll correctly on new tab

* Handle delete all resource by clearing url

---------

Co-authored-by: Theodore Li <theo@sim.ai>
2026-04-04 18:25:35 -04:00
Waleed
c2b12cf21f fix(captcha): use getResponsePromise for Turnstile execute-on-submit flow (#3943) 2026-04-04 12:34:53 -07:00
Waleed
4a9439e952 improvement(models): tighten model metadata and crawl discovery (#3942)
* improvement(models): tighten model metadata and crawl discovery

Made-with: Cursor

* revert hardcoded FF

* fix(models): narrow structured output ranking signal

Made-with: Cursor

* fix(models): remove generic best-for copy

Made-with: Cursor

* fix(models): restore best-for with stricter criteria

Made-with: Cursor

* fix

* models
2026-04-04 11:53:54 -07:00
Waleed
893e322a49 fix(envvars): restore workflowUserId fallback for scheduled execution env var resolution (#3941)
* fix(envvars): restore workflowUserId fallback for scheduled execution env var resolution

* test(envvars): add coverage for env var user resolution branches
2026-04-04 11:22:52 -07:00
Emir Karabeg
b0cb95be2f feat: mothership/copilot feedback (#3940)
* feat: mothership/copilot feedback

* fix(feedback): remove mutation object from useCallback deps
2026-04-04 10:46:49 -07:00
135 changed files with 19817 additions and 1229 deletions

View File

@@ -90,6 +90,7 @@ Sim also supports local models via [Ollama](https://ollama.ai) and [vLLM](https:
git clone https://github.com/simstudioai/sim.git
cd sim
bun install
bun run prepare # Set up pre-commit hooks
```
2. Set up PostgreSQL with pgvector:
@@ -104,6 +105,11 @@ Or install manually via the [pgvector guide](https://github.com/pgvector/pgvecto
```bash
cp apps/sim/.env.example apps/sim/.env
# Create your secrets
perl -i -pe "s/your_encryption_key/$(openssl rand -hex 32)/" apps/sim/.env
perl -i -pe "s/your_internal_api_secret/$(openssl rand -hex 32)/" apps/sim/.env
perl -i -pe "s/your_api_encryption_key/$(openssl rand -hex 32)/" apps/sim/.env
# DB configs for migration
cp packages/db/.env.example packages/db/.env
# Edit both .env files to set DATABASE_URL="postgresql://postgres:your_password@localhost:5432/simstudio"
```
@@ -111,7 +117,7 @@ cp packages/db/.env.example packages/db/.env
4. Run migrations:
```bash
cd packages/db && bunx drizzle-kit migrate --config=./drizzle.config.ts
cd packages/db && bun run db:migrate
```
5. Start development servers:

View File

@@ -1,8 +1,5 @@
# Database (Required)
DATABASE_URL="postgresql://postgres:password@localhost:5432/postgres"
# PostgreSQL Port (Optional) - defaults to 5432 if not specified
# POSTGRES_PORT=5432
DATABASE_URL="postgresql://postgres:your_password@localhost:5432/simstudio"
# Authentication (Required unless DISABLE_AUTH=true)
BETTER_AUTH_SECRET=your_secret_key # Use `openssl rand -hex 32` to generate, or visit https://www.better-auth.com/docs/installation

View File

@@ -99,8 +99,6 @@ function SignupFormContent({
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
const [formError, setFormError] = useState<string | null>(null)
const turnstileRef = useRef<TurnstileInstance>(null)
const captchaResolveRef = useRef<((token: string) => void) | null>(null)
const captchaRejectRef = useRef<((reason: Error) => void) | null>(null)
const turnstileSiteKey = useMemo(() => getEnv('NEXT_PUBLIC_TURNSTILE_SITE_KEY'), [])
const redirectUrl = useMemo(
() => searchParams.get('redirect') || searchParams.get('callbackUrl') || '',
@@ -258,27 +256,14 @@ function SignupFormContent({
let token: string | undefined
const widget = turnstileRef.current
if (turnstileSiteKey && widget) {
let timeoutId: ReturnType<typeof setTimeout> | undefined
try {
widget.reset()
token = await Promise.race([
new Promise<string>((resolve, reject) => {
captchaResolveRef.current = resolve
captchaRejectRef.current = reject
widget.execute()
}),
new Promise<string>((_, reject) => {
timeoutId = setTimeout(() => reject(new Error('Captcha timed out')), 15_000)
}),
])
widget.execute()
token = await widget.getResponsePromise()
} catch {
setFormError('Captcha verification failed. Please try again.')
setIsLoading(false)
return
} finally {
clearTimeout(timeoutId)
captchaResolveRef.current = null
captchaRejectRef.current = null
}
}
@@ -535,10 +520,7 @@ function SignupFormContent({
<Turnstile
ref={turnstileRef}
siteKey={turnstileSiteKey}
onSuccess={(token) => captchaResolveRef.current?.(token)}
onError={() => captchaRejectRef.current?.(new Error('Captcha verification failed'))}
onExpire={() => captchaRejectRef.current?.(new Error('Captcha token expired'))}
options={{ execution: 'execute' }}
options={{ execution: 'execute', appearance: 'execute' }}
/>
)}

View File

@@ -18,6 +18,7 @@ import {
formatPrice,
formatTokenCount,
formatUpdatedAt,
getEffectiveMaxOutputTokens,
getModelBySlug,
getPricingBounds,
getProviderBySlug,
@@ -198,7 +199,8 @@ export default async function ModelPage({
</div>
<p className='max-w-[820px] text-[17px] text-[var(--landing-text-muted)] leading-relaxed'>
{model.summary} {model.bestFor}
{model.summary}
{model.bestFor ? ` ${model.bestFor}` : ''}
</p>
<div className='mt-8 flex flex-wrap gap-3'>
@@ -229,13 +231,11 @@ export default async function ModelPage({
? `${formatPrice(model.pricing.cachedInput)}/1M`
: 'N/A'
}
compact
/>
<StatCard label='Output price' value={`${formatPrice(model.pricing.output)}/1M`} />
<StatCard
label='Context window'
value={model.contextWindow ? formatTokenCount(model.contextWindow) : 'Unknown'}
compact
/>
</section>
@@ -280,12 +280,12 @@ export default async function ModelPage({
label='Max output'
value={
model.capabilities.maxOutputTokens
? `${formatTokenCount(model.capabilities.maxOutputTokens)} tokens`
: 'Standard defaults'
? `${formatTokenCount(getEffectiveMaxOutputTokens(model.capabilities))} tokens`
: 'Not published'
}
/>
<DetailItem label='Provider' value={provider.name} />
<DetailItem label='Best for' value={model.bestFor} />
{model.bestFor ? <DetailItem label='Best for' value={model.bestFor} /> : null}
</div>
</section>

View File

@@ -0,0 +1,49 @@
import { describe, expect, it } from 'vitest'
import { buildModelCapabilityFacts, getEffectiveMaxOutputTokens, getModelBySlug } from './utils'
describe('model catalog capability facts', () => {
it.concurrent(
'shows structured outputs support and published max output tokens for gpt-4o',
() => {
const model = getModelBySlug('openai', 'gpt-4o')
expect(model).not.toBeNull()
expect(model).toBeDefined()
const capabilityFacts = buildModelCapabilityFacts(model!)
const structuredOutputs = capabilityFacts.find((fact) => fact.label === 'Structured outputs')
const maxOutputTokens = capabilityFacts.find((fact) => fact.label === 'Max output tokens')
expect(getEffectiveMaxOutputTokens(model!.capabilities)).toBe(16384)
expect(structuredOutputs?.value).toBe('Supported')
expect(maxOutputTokens?.value).toBe('16k')
}
)
it.concurrent('preserves native structured outputs labeling for claude models', () => {
const model = getModelBySlug('anthropic', 'claude-sonnet-4-6')
expect(model).not.toBeNull()
expect(model).toBeDefined()
const capabilityFacts = buildModelCapabilityFacts(model!)
const structuredOutputs = capabilityFacts.find((fact) => fact.label === 'Structured outputs')
expect(structuredOutputs?.value).toBe('Supported (native)')
})
it.concurrent('does not invent a max output token limit when one is not published', () => {
expect(getEffectiveMaxOutputTokens({})).toBeNull()
})
it.concurrent('keeps best-for copy for clearly differentiated models only', () => {
const researchModel = getModelBySlug('google', 'deep-research-pro-preview-12-2025')
const generalModel = getModelBySlug('xai', 'grok-4-latest')
expect(researchModel).not.toBeNull()
expect(generalModel).not.toBeNull()
expect(researchModel?.bestFor).toContain('research workflows')
expect(generalModel?.bestFor).toBeUndefined()
})
})

View File

@@ -112,7 +112,7 @@ export interface CatalogModel {
capabilities: ModelCapabilities
capabilityTags: string[]
summary: string
bestFor: string
bestFor?: string
searchText: string
}
@@ -190,6 +190,14 @@ export function formatCapabilityBoolean(
return value ? positive : negative
}
function supportsCatalogStructuredOutputs(capabilities: ModelCapabilities): boolean {
return !capabilities.deepResearch
}
export function getEffectiveMaxOutputTokens(capabilities: ModelCapabilities): number | null {
return capabilities.maxOutputTokens ?? null
}
function trimTrailingZeros(value: string): string {
return value.replace(/\.0+$/, '').replace(/(\.\d*?)0+$/, '$1')
}
@@ -326,7 +334,7 @@ function buildCapabilityTags(capabilities: ModelCapabilities): string[] {
tags.push('Tool choice')
}
if (capabilities.nativeStructuredOutputs) {
if (supportsCatalogStructuredOutputs(capabilities)) {
tags.push('Structured outputs')
}
@@ -365,7 +373,7 @@ function buildBestForLine(model: {
pricing: PricingInfo
capabilities: ModelCapabilities
contextWindow: number | null
}): string {
}): string | null {
const { pricing, capabilities, contextWindow } = model
if (capabilities.deepResearch) {
@@ -376,10 +384,6 @@ function buildBestForLine(model: {
return 'Best for reasoning-heavy tasks that need more deliberate model control.'
}
if (pricing.input <= 0.2 && pricing.output <= 1.25) {
return 'Best for cost-sensitive automations, background tasks, and high-volume workloads.'
}
if (contextWindow && contextWindow >= 1000000) {
return 'Best for long-context retrieval, large documents, and high-memory workflows.'
}
@@ -388,7 +392,11 @@ function buildBestForLine(model: {
return 'Best for production workflows that need reliable typed outputs.'
}
return 'Best for general-purpose AI workflows inside Sim.'
if (pricing.input <= 0.2 && pricing.output <= 1.25) {
return 'Best for cost-sensitive automations, background tasks, and high-volume workloads.'
}
return null
}
function buildModelSummary(
@@ -437,6 +445,11 @@ const rawProviders = Object.values(PROVIDER_DEFINITIONS).map((provider) => {
const shortId = stripProviderPrefix(provider.id, model.id)
const mergedCapabilities = { ...provider.capabilities, ...model.capabilities }
const capabilityTags = buildCapabilityTags(mergedCapabilities)
const bestFor = buildBestForLine({
pricing: model.pricing,
capabilities: mergedCapabilities,
contextWindow: model.contextWindow ?? null,
})
const displayName = formatModelDisplayName(provider.id, model.id)
const modelSlug = slugify(shortId)
const href = `/models/${providerSlug}/${modelSlug}`
@@ -461,11 +474,7 @@ const rawProviders = Object.values(PROVIDER_DEFINITIONS).map((provider) => {
model.contextWindow ?? null,
capabilityTags
),
bestFor: buildBestForLine({
pricing: model.pricing,
capabilities: mergedCapabilities,
contextWindow: model.contextWindow ?? null,
}),
...(bestFor ? { bestFor } : {}),
searchText: [
provider.name,
providerDisplayName,
@@ -683,6 +692,7 @@ export function buildModelFaqs(provider: CatalogProvider, model: CatalogModel):
export function buildModelCapabilityFacts(model: CatalogModel): CapabilityFact[] {
const { capabilities } = model
const supportsStructuredOutputs = supportsCatalogStructuredOutputs(capabilities)
return [
{
@@ -711,7 +721,11 @@ export function buildModelCapabilityFacts(model: CatalogModel): CapabilityFact[]
},
{
label: 'Structured outputs',
value: formatCapabilityBoolean(capabilities.nativeStructuredOutputs),
value: supportsStructuredOutputs
? capabilities.nativeStructuredOutputs
? 'Supported (native)'
: 'Supported'
: 'Not supported',
},
{
label: 'Tool choice',
@@ -732,8 +746,8 @@ export function buildModelCapabilityFacts(model: CatalogModel): CapabilityFact[]
{
label: 'Max output tokens',
value: capabilities.maxOutputTokens
? formatTokenCount(capabilities.maxOutputTokens)
: 'Standard defaults',
? formatTokenCount(getEffectiveMaxOutputTokens(capabilities))
: 'Not published',
},
]
}
@@ -752,8 +766,8 @@ export function getProviderCapabilitySummary(provider: CatalogProvider): Capabil
const reasoningCount = provider.models.filter(
(model) => model.capabilities.reasoningEffort || model.capabilities.thinking
).length
const structuredCount = provider.models.filter(
(model) => model.capabilities.nativeStructuredOutputs
const structuredCount = provider.models.filter((model) =>
supportsCatalogStructuredOutputs(model.capabilities)
).length
const deepResearchCount = provider.models.filter(
(model) => model.capabilities.deepResearch

View File

@@ -5,6 +5,7 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { captureServerEvent } from '@/lib/posthog/server'
import { performDeleteFolder } from '@/lib/workflows/orchestration'
import { checkForCircularReference } from '@/lib/workflows/utils'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -156,6 +157,13 @@ export async function DELETE(
return NextResponse.json({ error: result.error }, { status })
}
captureServerEvent(
session.user.id,
'folder_deleted',
{ workspace_id: existingFolder.workspaceId },
{ groups: { workspace: existingFolder.workspaceId } }
)
return NextResponse.json({
success: true,
deletedItems: result.deletedItems,

View File

@@ -6,6 +6,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { captureServerEvent } from '@/lib/posthog/server'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('FoldersAPI')
@@ -145,6 +146,13 @@ export async function POST(request: NextRequest) {
logger.info('Created new folder:', { id, name, workspaceId, parentId })
captureServerEvent(
session.user.id,
'folder_created',
{ workspace_id: workspaceId },
{ groups: { workspace: workspaceId } }
)
recordAudit({
workspaceId,
actorId: session.user.id,

View File

@@ -13,6 +13,7 @@ import { z } from 'zod'
import { decryptApiKey } from '@/lib/api-key/crypto'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { hasLiveSyncAccess } from '@/lib/billing/core/subscription'
import { generateRequestId } from '@/lib/core/utils/request'
import { deleteDocumentStorageFiles } from '@/lib/knowledge/documents/service'
import { cleanupUnusedTagDefinitions } from '@/lib/knowledge/tags/service'
@@ -116,6 +117,20 @@ export async function PATCH(request: NextRequest, { params }: RouteParams) {
)
}
if (
parsed.data.syncIntervalMinutes !== undefined &&
parsed.data.syncIntervalMinutes > 0 &&
parsed.data.syncIntervalMinutes < 60
) {
const canUseLiveSync = await hasLiveSyncAccess(auth.userId)
if (!canUseLiveSync) {
return NextResponse.json(
{ error: 'Live sync requires a Max or Enterprise plan' },
{ status: 403 }
)
}
}
if (parsed.data.sourceConfig !== undefined) {
const existingRows = await db
.select()

View File

@@ -7,6 +7,7 @@ import { z } from 'zod'
import { encryptApiKey } from '@/lib/api-key/crypto'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { hasLiveSyncAccess } from '@/lib/billing/core/subscription'
import { generateRequestId } from '@/lib/core/utils/request'
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
import { allocateTagSlots } from '@/lib/knowledge/constants'
@@ -97,6 +98,16 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
const { connectorType, credentialId, apiKey, sourceConfig, syncIntervalMinutes } = parsed.data
if (syncIntervalMinutes > 0 && syncIntervalMinutes < 60) {
const canUseLiveSync = await hasLiveSyncAccess(auth.userId)
if (!canUseLiveSync) {
return NextResponse.json(
{ error: 'Live sync requires a Max or Enterprise plan' },
{ status: 403 }
)
}
}
const connectorConfig = CONNECTOR_REGISTRY[connectorType]
if (!connectorConfig) {
return NextResponse.json(
@@ -151,19 +162,39 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
const tagSlotMapping: Record<string, string> = {}
let newTagSlots: Record<string, string> = {}
if (connectorConfig.tagDefinitions?.length) {
const disabledIds = new Set((sourceConfig.disabledTagIds as string[] | undefined) ?? [])
const enabledDefs = connectorConfig.tagDefinitions.filter((td) => !disabledIds.has(td.id))
const existingDefs = await db
.select({ tagSlot: knowledgeBaseTagDefinitions.tagSlot })
.select({
tagSlot: knowledgeBaseTagDefinitions.tagSlot,
displayName: knowledgeBaseTagDefinitions.displayName,
fieldType: knowledgeBaseTagDefinitions.fieldType,
})
.from(knowledgeBaseTagDefinitions)
.where(eq(knowledgeBaseTagDefinitions.knowledgeBaseId, knowledgeBaseId))
const usedSlots = new Set<string>(existingDefs.map((d) => d.tagSlot))
const { mapping, skipped: skippedTags } = allocateTagSlots(enabledDefs, usedSlots)
const existingByName = new Map(
existingDefs.map((d) => [d.displayName, { tagSlot: d.tagSlot, fieldType: d.fieldType }])
)
const defsNeedingSlots: typeof enabledDefs = []
for (const td of enabledDefs) {
const existing = existingByName.get(td.displayName)
if (existing && existing.fieldType === td.fieldType) {
tagSlotMapping[td.id] = existing.tagSlot
} else {
defsNeedingSlots.push(td)
}
}
const { mapping, skipped: skippedTags } = allocateTagSlots(defsNeedingSlots, usedSlots)
Object.assign(tagSlotMapping, mapping)
newTagSlots = mapping
for (const name of skippedTags) {
logger.warn(`[${requestId}] No available slots for "${name}"`)
@@ -197,7 +228,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
throw new Error('Knowledge base not found')
}
for (const [semanticId, slot] of Object.entries(tagSlotMapping)) {
for (const [semanticId, slot] of Object.entries(newTagSlots)) {
const td = connectorConfig.tagDefinitions!.find((d) => d.id === semanticId)!
await createTagDefinition(
{

View File

@@ -10,6 +10,7 @@ import {
retryDocumentProcessing,
updateDocument,
} from '@/lib/knowledge/documents/service'
import { captureServerEvent } from '@/lib/posthog/server'
import { checkDocumentAccess, checkDocumentWriteAccess } from '@/app/api/knowledge/utils'
const logger = createLogger('DocumentByIdAPI')
@@ -285,6 +286,14 @@ export async function DELETE(
request: req,
})
const kbWorkspaceId = accessCheck.knowledgeBase?.workspaceId ?? ''
captureServerEvent(
userId,
'knowledge_base_document_deleted',
{ knowledge_base_id: knowledgeBaseId, workspace_id: kbWorkspaceId },
kbWorkspaceId ? { groups: { workspace: kbWorkspaceId } } : undefined
)
return NextResponse.json({
success: true,
data: result,

View File

@@ -5,6 +5,7 @@
* @vitest-environment node
*/
import { createEnvMock, databaseMock, loggerMock } from '@sim/testing'
import { mockNextFetchResponse } from '@sim/testing/mocks'
import { beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('drizzle-orm')
@@ -14,16 +15,6 @@ vi.mock('@/lib/knowledge/documents/utils', () => ({
retryWithExponentialBackoff: (fn: any) => fn(),
}))
vi.stubGlobal(
'fetch',
vi.fn().mockResolvedValue({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
})
)
vi.mock('@/lib/core/config/env', () => createEnvMock())
import {
@@ -178,17 +169,16 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
mockNextFetchResponse({
json: {
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
const result = await generateSearchEmbedding('test query')
expect(fetchSpy).toHaveBeenCalledWith(
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
'https://test.openai.azure.com/openai/deployments/text-embedding-ada-002/embeddings?api-version=2024-12-01-preview',
expect.objectContaining({
headers: expect.objectContaining({
@@ -209,17 +199,16 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
mockNextFetchResponse({
json: {
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
const result = await generateSearchEmbedding('test query')
expect(fetchSpy).toHaveBeenCalledWith(
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
'https://api.openai.com/v1/embeddings',
expect.objectContaining({
headers: expect.objectContaining({
@@ -243,17 +232,16 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
mockNextFetchResponse({
json: {
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
await generateSearchEmbedding('test query')
expect(fetchSpy).toHaveBeenCalledWith(
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
expect.stringContaining('api-version='),
expect.any(Object)
)
@@ -273,17 +261,16 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
mockNextFetchResponse({
json: {
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
await generateSearchEmbedding('test query', 'text-embedding-3-small')
expect(fetchSpy).toHaveBeenCalledWith(
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
'https://test.openai.azure.com/openai/deployments/custom-embedding-model/embeddings?api-version=2024-12-01-preview',
expect.any(Object)
)
@@ -311,13 +298,12 @@ describe('Knowledge Search Utils', () => {
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
mockNextFetchResponse({
ok: false,
status: 404,
statusText: 'Not Found',
text: async () => 'Deployment not found',
} as any)
text: 'Deployment not found',
})
await expect(generateSearchEmbedding('test query')).rejects.toThrow('Embedding API failed')
@@ -332,13 +318,12 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
mockNextFetchResponse({
ok: false,
status: 429,
statusText: 'Too Many Requests',
text: async () => 'Rate limit exceeded',
} as any)
text: 'Rate limit exceeded',
})
await expect(generateSearchEmbedding('test query')).rejects.toThrow('Embedding API failed')
@@ -356,17 +341,16 @@ describe('Knowledge Search Utils', () => {
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
mockNextFetchResponse({
json: {
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
await generateSearchEmbedding('test query')
expect(fetchSpy).toHaveBeenCalledWith(
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
body: JSON.stringify({
@@ -387,17 +371,16 @@ describe('Knowledge Search Utils', () => {
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
mockNextFetchResponse({
json: {
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
usage: { prompt_tokens: 1, total_tokens: 1 },
},
})
await generateSearchEmbedding('test query', 'text-embedding-3-small')
expect(fetchSpy).toHaveBeenCalledWith(
expect(vi.mocked(fetch)).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
body: JSON.stringify({

View File

@@ -77,6 +77,7 @@ vi.stubGlobal(
{ embedding: [0.1, 0.2], index: 0 },
{ embedding: [0.3, 0.4], index: 1 },
],
usage: { prompt_tokens: 2, total_tokens: 2 },
}),
})
)
@@ -294,7 +295,7 @@ describe('Knowledge Utils', () => {
it.concurrent('should return same length as input', async () => {
const result = await generateEmbeddings(['a', 'b'])
expect(result.length).toBe(2)
expect(result.embeddings.length).toBe(2)
})
it('should use Azure OpenAI when Azure config is provided', async () => {
@@ -313,6 +314,7 @@ describe('Knowledge Utils', () => {
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2], index: 0 }],
usage: { prompt_tokens: 1, total_tokens: 1 },
}),
} as any)
@@ -342,6 +344,7 @@ describe('Knowledge Utils', () => {
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2], index: 0 }],
usage: { prompt_tokens: 1, total_tokens: 1 },
}),
} as any)

View File

@@ -159,16 +159,7 @@ export async function PATCH(
}
)
}
if (isUnread === false) {
captureServerEvent(
userId,
'task_marked_read',
{ workspace_id: updatedChat.workspaceId },
{
groups: { workspace: updatedChat.workspaceId },
}
)
} else if (isUnread === true) {
if (isUnread === true) {
captureServerEvent(
userId,
'task_marked_unread',

View File

@@ -7,6 +7,7 @@ import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { validateCronExpression } from '@/lib/workflows/schedules/utils'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
@@ -298,6 +299,13 @@ export async function DELETE(
request,
})
captureServerEvent(
session.user.id,
'scheduled_task_deleted',
{ workspace_id: workspaceId ?? '' },
workspaceId ? { groups: { workspace: workspaceId } } : undefined
)
return NextResponse.json({ message: 'Schedule deleted successfully' })
} catch (error) {
logger.error(`[${requestId}] Error deleting schedule`, error)

View File

@@ -3,6 +3,9 @@
*
* @vitest-environment node
*/
import { createFeatureFlagsMock, createMockRequest } from '@sim/testing'
import { drizzleOrmMock } from '@sim/testing/mocks'
import type { NextRequest } from 'next/server'
import { beforeEach, describe, expect, it, vi } from 'vitest'
@@ -10,7 +13,6 @@ const {
mockVerifyCronAuth,
mockExecuteScheduleJob,
mockExecuteJobInline,
mockFeatureFlags,
mockDbReturning,
mockDbUpdate,
mockEnqueue,
@@ -33,12 +35,6 @@ const {
mockVerifyCronAuth: vi.fn().mockReturnValue(null),
mockExecuteScheduleJob: vi.fn().mockResolvedValue(undefined),
mockExecuteJobInline: vi.fn().mockResolvedValue(undefined),
mockFeatureFlags: {
isTriggerDevEnabled: false,
isHosted: false,
isProd: false,
isDev: true,
},
mockDbReturning,
mockDbUpdate,
mockEnqueue,
@@ -49,6 +45,13 @@ const {
}
})
const mockFeatureFlags = createFeatureFlagsMock({
isTriggerDevEnabled: false,
isHosted: false,
isProd: false,
isDev: true,
})
vi.mock('@/lib/auth/internal', () => ({
verifyCronAuth: mockVerifyCronAuth,
}))
@@ -91,17 +94,7 @@ vi.mock('@/lib/workflows/utils', () => ({
}),
}))
vi.mock('drizzle-orm', () => ({
and: vi.fn((...conditions: unknown[]) => ({ type: 'and', conditions })),
eq: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'eq' })),
ne: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'ne' })),
lte: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'lte' })),
lt: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'lt' })),
not: vi.fn((condition: unknown) => ({ type: 'not', condition })),
isNull: vi.fn((field: unknown) => ({ type: 'isNull', field })),
or: vi.fn((...conditions: unknown[]) => ({ type: 'or', conditions })),
sql: vi.fn((strings: unknown, ...values: unknown[]) => ({ type: 'sql', strings, values })),
}))
vi.mock('drizzle-orm', () => drizzleOrmMock)
vi.mock('@sim/db', () => ({
db: {
@@ -177,18 +170,13 @@ const SINGLE_JOB = [
},
]
function createMockRequest(): NextRequest {
const mockHeaders = new Map([
['authorization', 'Bearer test-cron-secret'],
['content-type', 'application/json'],
])
return {
headers: {
get: (key: string) => mockHeaders.get(key.toLowerCase()) || null,
},
url: 'http://localhost:3000/api/schedules/execute',
} as NextRequest
function createCronRequest() {
return createMockRequest(
'GET',
undefined,
{ Authorization: 'Bearer test-cron-secret' },
'http://localhost:3000/api/schedules/execute'
)
}
describe('Scheduled Workflow Execution API Route', () => {
@@ -204,7 +192,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should execute scheduled workflows with Trigger.dev disabled', async () => {
mockDbReturning.mockReturnValueOnce(SINGLE_SCHEDULE).mockReturnValueOnce([])
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as unknown as NextRequest)
expect(response).toBeDefined()
expect(response.status).toBe(200)
@@ -217,7 +205,7 @@ describe('Scheduled Workflow Execution API Route', () => {
mockFeatureFlags.isTriggerDevEnabled = true
mockDbReturning.mockReturnValueOnce(SINGLE_SCHEDULE).mockReturnValueOnce([])
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as unknown as NextRequest)
expect(response).toBeDefined()
expect(response.status).toBe(200)
@@ -228,7 +216,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should handle case with no due schedules', async () => {
mockDbReturning.mockReturnValueOnce([]).mockReturnValueOnce([])
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as unknown as NextRequest)
expect(response.status).toBe(200)
const data = await response.json()
@@ -239,7 +227,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should execute multiple schedules in parallel', async () => {
mockDbReturning.mockReturnValueOnce(MULTIPLE_SCHEDULES).mockReturnValueOnce([])
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as unknown as NextRequest)
expect(response.status).toBe(200)
const data = await response.json()
@@ -249,7 +237,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should queue mothership jobs to BullMQ when available', async () => {
mockDbReturning.mockReturnValueOnce([]).mockReturnValueOnce(SINGLE_JOB)
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as unknown as NextRequest)
expect(response.status).toBe(200)
expect(mockEnqueueWorkspaceDispatch).toHaveBeenCalledWith(
@@ -274,7 +262,7 @@ describe('Scheduled Workflow Execution API Route', () => {
it('should enqueue preassigned correlation metadata for schedules', async () => {
mockDbReturning.mockReturnValue(SINGLE_SCHEDULE)
const response = await GET(createMockRequest())
const response = await GET(createCronRequest() as unknown as NextRequest)
expect(response.status).toBe(200)
expect(mockEnqueueWorkspaceDispatch).toHaveBeenCalledWith(

View File

@@ -5,6 +5,7 @@ import { and, eq, isNull, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { validateCronExpression } from '@/lib/workflows/schedules/utils'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
@@ -277,6 +278,13 @@ export async function POST(req: NextRequest) {
lifecycle,
})
captureServerEvent(
session.user.id,
'scheduled_task_created',
{ workspace_id: workspaceId },
{ groups: { workspace: workspaceId } }
)
return NextResponse.json(
{ schedule: { id, status: 'active', cronExpression, nextRunAt } },
{ status: 201 }

View File

@@ -0,0 +1,96 @@
import {
type AlarmType,
CloudWatchClient,
DescribeAlarmsCommand,
type StateValue,
} from '@aws-sdk/client-cloudwatch'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
const logger = createLogger('CloudWatchDescribeAlarms')
const DescribeAlarmsSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
alarmNamePrefix: z.string().optional(),
stateValue: z.preprocess(
(v) => (v === '' ? undefined : v),
z.enum(['OK', 'ALARM', 'INSUFFICIENT_DATA']).optional()
),
alarmType: z.preprocess(
(v) => (v === '' ? undefined : v),
z.enum(['MetricAlarm', 'CompositeAlarm']).optional()
),
limit: z.preprocess(
(v) => (v === '' || v === undefined || v === null ? undefined : v),
z.number({ coerce: true }).int().positive().optional()
),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const validatedData = DescribeAlarmsSchema.parse(body)
const client = new CloudWatchClient({
region: validatedData.region,
credentials: {
accessKeyId: validatedData.accessKeyId,
secretAccessKey: validatedData.secretAccessKey,
},
})
const command = new DescribeAlarmsCommand({
...(validatedData.alarmNamePrefix && { AlarmNamePrefix: validatedData.alarmNamePrefix }),
...(validatedData.stateValue && { StateValue: validatedData.stateValue as StateValue }),
...(validatedData.alarmType && { AlarmTypes: [validatedData.alarmType as AlarmType] }),
...(validatedData.limit !== undefined && { MaxRecords: validatedData.limit }),
})
const response = await client.send(command)
const metricAlarms = (response.MetricAlarms ?? []).map((a) => ({
alarmName: a.AlarmName ?? '',
alarmArn: a.AlarmArn ?? '',
stateValue: a.StateValue ?? 'UNKNOWN',
stateReason: a.StateReason ?? '',
metricName: a.MetricName,
namespace: a.Namespace,
comparisonOperator: a.ComparisonOperator,
threshold: a.Threshold,
evaluationPeriods: a.EvaluationPeriods,
stateUpdatedTimestamp: a.StateUpdatedTimestamp?.getTime(),
}))
const compositeAlarms = (response.CompositeAlarms ?? []).map((a) => ({
alarmName: a.AlarmName ?? '',
alarmArn: a.AlarmArn ?? '',
stateValue: a.StateValue ?? 'UNKNOWN',
stateReason: a.StateReason ?? '',
metricName: undefined,
namespace: undefined,
comparisonOperator: undefined,
threshold: undefined,
evaluationPeriods: undefined,
stateUpdatedTimestamp: a.StateUpdatedTimestamp?.getTime(),
}))
return NextResponse.json({
success: true,
output: { alarms: [...metricAlarms, ...compositeAlarms] },
})
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : 'Failed to describe CloudWatch alarms'
logger.error('DescribeAlarms failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,62 @@
import { DescribeLogGroupsCommand } from '@aws-sdk/client-cloudwatch-logs'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { createCloudWatchLogsClient } from '@/app/api/tools/cloudwatch/utils'
const logger = createLogger('CloudWatchDescribeLogGroups')
const DescribeLogGroupsSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
prefix: z.string().optional(),
limit: z.preprocess(
(v) => (v === '' || v === undefined || v === null ? undefined : v),
z.number({ coerce: true }).int().positive().optional()
),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const validatedData = DescribeLogGroupsSchema.parse(body)
const client = createCloudWatchLogsClient({
region: validatedData.region,
accessKeyId: validatedData.accessKeyId,
secretAccessKey: validatedData.secretAccessKey,
})
const command = new DescribeLogGroupsCommand({
...(validatedData.prefix && { logGroupNamePrefix: validatedData.prefix }),
...(validatedData.limit !== undefined && { limit: validatedData.limit }),
})
const response = await client.send(command)
const logGroups = (response.logGroups ?? []).map((lg) => ({
logGroupName: lg.logGroupName ?? '',
arn: lg.arn ?? '',
storedBytes: lg.storedBytes ?? 0,
retentionInDays: lg.retentionInDays,
creationTime: lg.creationTime,
}))
return NextResponse.json({
success: true,
output: { logGroups },
})
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : 'Failed to describe CloudWatch log groups'
logger.error('DescribeLogGroups failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,52 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { createCloudWatchLogsClient, describeLogStreams } from '@/app/api/tools/cloudwatch/utils'
const logger = createLogger('CloudWatchDescribeLogStreams')
const DescribeLogStreamsSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
logGroupName: z.string().min(1, 'Log group name is required'),
prefix: z.string().optional(),
limit: z.preprocess(
(v) => (v === '' || v === undefined || v === null ? undefined : v),
z.number({ coerce: true }).int().positive().optional()
),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const validatedData = DescribeLogStreamsSchema.parse(body)
const client = createCloudWatchLogsClient({
region: validatedData.region,
accessKeyId: validatedData.accessKeyId,
secretAccessKey: validatedData.secretAccessKey,
})
const result = await describeLogStreams(client, validatedData.logGroupName, {
prefix: validatedData.prefix,
limit: validatedData.limit,
})
return NextResponse.json({
success: true,
output: { logStreams: result.logStreams },
})
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : 'Failed to describe CloudWatch log streams'
logger.error('DescribeLogStreams failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,60 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { createCloudWatchLogsClient, getLogEvents } from '@/app/api/tools/cloudwatch/utils'
const logger = createLogger('CloudWatchGetLogEvents')
const GetLogEventsSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
logGroupName: z.string().min(1, 'Log group name is required'),
logStreamName: z.string().min(1, 'Log stream name is required'),
startTime: z.number({ coerce: true }).int().optional(),
endTime: z.number({ coerce: true }).int().optional(),
limit: z.preprocess(
(v) => (v === '' || v === undefined || v === null ? undefined : v),
z.number({ coerce: true }).int().positive().optional()
),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const validatedData = GetLogEventsSchema.parse(body)
const client = createCloudWatchLogsClient({
region: validatedData.region,
accessKeyId: validatedData.accessKeyId,
secretAccessKey: validatedData.secretAccessKey,
})
const result = await getLogEvents(
client,
validatedData.logGroupName,
validatedData.logStreamName,
{
startTime: validatedData.startTime,
endTime: validatedData.endTime,
limit: validatedData.limit,
}
)
return NextResponse.json({
success: true,
output: { events: result.events },
})
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : 'Failed to get CloudWatch log events'
logger.error('GetLogEvents failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,97 @@
import { CloudWatchClient, GetMetricStatisticsCommand } from '@aws-sdk/client-cloudwatch'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
const logger = createLogger('CloudWatchGetMetricStatistics')
const GetMetricStatisticsSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
namespace: z.string().min(1, 'Namespace is required'),
metricName: z.string().min(1, 'Metric name is required'),
startTime: z.number({ coerce: true }).int(),
endTime: z.number({ coerce: true }).int(),
period: z.number({ coerce: true }).int().min(1),
statistics: z.array(z.enum(['Average', 'Sum', 'Minimum', 'Maximum', 'SampleCount'])).min(1),
dimensions: z.string().optional(),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const validatedData = GetMetricStatisticsSchema.parse(body)
const client = new CloudWatchClient({
region: validatedData.region,
credentials: {
accessKeyId: validatedData.accessKeyId,
secretAccessKey: validatedData.secretAccessKey,
},
})
let parsedDimensions: { Name: string; Value: string }[] | undefined
if (validatedData.dimensions) {
try {
const dims = JSON.parse(validatedData.dimensions)
if (Array.isArray(dims)) {
parsedDimensions = dims.map((d: Record<string, string>) => ({
Name: d.name,
Value: d.value,
}))
} else if (typeof dims === 'object') {
parsedDimensions = Object.entries(dims).map(([name, value]) => ({
Name: name,
Value: String(value),
}))
}
} catch {
throw new Error('Invalid dimensions JSON')
}
}
const command = new GetMetricStatisticsCommand({
Namespace: validatedData.namespace,
MetricName: validatedData.metricName,
StartTime: new Date(validatedData.startTime * 1000),
EndTime: new Date(validatedData.endTime * 1000),
Period: validatedData.period,
Statistics: validatedData.statistics,
...(parsedDimensions && { Dimensions: parsedDimensions }),
})
const response = await client.send(command)
const datapoints = (response.Datapoints ?? [])
.sort((a, b) => (a.Timestamp?.getTime() ?? 0) - (b.Timestamp?.getTime() ?? 0))
.map((dp) => ({
timestamp: dp.Timestamp ? Math.floor(dp.Timestamp.getTime() / 1000) : 0,
average: dp.Average,
sum: dp.Sum,
minimum: dp.Minimum,
maximum: dp.Maximum,
sampleCount: dp.SampleCount,
unit: dp.Unit,
}))
return NextResponse.json({
success: true,
output: {
label: response.Label ?? validatedData.metricName,
datapoints,
},
})
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : 'Failed to get CloudWatch metric statistics'
logger.error('GetMetricStatistics failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,67 @@
import { CloudWatchClient, ListMetricsCommand } from '@aws-sdk/client-cloudwatch'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
const logger = createLogger('CloudWatchListMetrics')
const ListMetricsSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
namespace: z.string().optional(),
metricName: z.string().optional(),
recentlyActive: z.boolean().optional(),
limit: z.preprocess(
(v) => (v === '' || v === undefined || v === null ? undefined : v),
z.number({ coerce: true }).int().positive().optional()
),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const validatedData = ListMetricsSchema.parse(body)
const client = new CloudWatchClient({
region: validatedData.region,
credentials: {
accessKeyId: validatedData.accessKeyId,
secretAccessKey: validatedData.secretAccessKey,
},
})
const command = new ListMetricsCommand({
...(validatedData.namespace && { Namespace: validatedData.namespace }),
...(validatedData.metricName && { MetricName: validatedData.metricName }),
...(validatedData.recentlyActive && { RecentlyActive: 'PT3H' }),
})
const response = await client.send(command)
const metrics = (response.Metrics ?? []).slice(0, validatedData.limit ?? 500).map((m) => ({
namespace: m.Namespace ?? '',
metricName: m.MetricName ?? '',
dimensions: (m.Dimensions ?? []).map((d) => ({
name: d.Name ?? '',
value: d.Value ?? '',
})),
}))
return NextResponse.json({
success: true,
output: { metrics },
})
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : 'Failed to list CloudWatch metrics'
logger.error('ListMetrics failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,71 @@
import { StartQueryCommand } from '@aws-sdk/client-cloudwatch-logs'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { createCloudWatchLogsClient, pollQueryResults } from '@/app/api/tools/cloudwatch/utils'
const logger = createLogger('CloudWatchQueryLogs')
const QueryLogsSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
logGroupNames: z.array(z.string().min(1)).min(1, 'At least one log group name is required'),
queryString: z.string().min(1, 'Query string is required'),
startTime: z.number({ coerce: true }).int(),
endTime: z.number({ coerce: true }).int(),
limit: z.preprocess(
(v) => (v === '' || v === undefined || v === null ? undefined : v),
z.number({ coerce: true }).int().positive().optional()
),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const validatedData = QueryLogsSchema.parse(body)
const client = createCloudWatchLogsClient({
region: validatedData.region,
accessKeyId: validatedData.accessKeyId,
secretAccessKey: validatedData.secretAccessKey,
})
const startQueryCommand = new StartQueryCommand({
logGroupNames: validatedData.logGroupNames,
queryString: validatedData.queryString,
startTime: validatedData.startTime,
endTime: validatedData.endTime,
...(validatedData.limit !== undefined && { limit: validatedData.limit }),
})
const startQueryResponse = await client.send(startQueryCommand)
const queryId = startQueryResponse.queryId
if (!queryId) {
throw new Error('Failed to start CloudWatch Log Insights query: no queryId returned')
}
const result = await pollQueryResults(client, queryId)
return NextResponse.json({
success: true,
output: {
results: result.results,
statistics: result.statistics,
status: result.status,
},
})
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : 'CloudWatch Log Insights query failed'
logger.error('QueryLogs failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,161 @@
import {
CloudWatchLogsClient,
DescribeLogStreamsCommand,
GetLogEventsCommand,
GetQueryResultsCommand,
type ResultField,
} from '@aws-sdk/client-cloudwatch-logs'
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
interface AwsCredentials {
region: string
accessKeyId: string
secretAccessKey: string
}
export function createCloudWatchLogsClient(config: AwsCredentials): CloudWatchLogsClient {
return new CloudWatchLogsClient({
region: config.region,
credentials: {
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
},
})
}
interface PollOptions {
maxWaitMs?: number
pollIntervalMs?: number
}
interface PollResult {
results: Record<string, string>[]
statistics: {
bytesScanned: number
recordsMatched: number
recordsScanned: number
}
status: string
}
function parseResultFields(fields: ResultField[] | undefined): Record<string, string> {
const record: Record<string, string> = {}
if (!fields) return record
for (const field of fields) {
if (field.field && field.value !== undefined) {
record[field.field] = field.value ?? ''
}
}
return record
}
export async function pollQueryResults(
client: CloudWatchLogsClient,
queryId: string,
options: PollOptions = {}
): Promise<PollResult> {
const { maxWaitMs = DEFAULT_EXECUTION_TIMEOUT_MS, pollIntervalMs = 1_000 } = options
const startTime = Date.now()
while (Date.now() - startTime < maxWaitMs) {
const command = new GetQueryResultsCommand({ queryId })
const response = await client.send(command)
const status = response.status ?? 'Unknown'
if (status === 'Complete') {
return {
results: (response.results ?? []).map(parseResultFields),
statistics: {
bytesScanned: response.statistics?.bytesScanned ?? 0,
recordsMatched: response.statistics?.recordsMatched ?? 0,
recordsScanned: response.statistics?.recordsScanned ?? 0,
},
status,
}
}
if (status === 'Failed' || status === 'Cancelled') {
throw new Error(`CloudWatch Log Insights query ${status.toLowerCase()}`)
}
await new Promise((resolve) => setTimeout(resolve, pollIntervalMs))
}
// Timeout -- fetch one last time for partial results
const finalResponse = await client.send(new GetQueryResultsCommand({ queryId }))
return {
results: (finalResponse.results ?? []).map(parseResultFields),
statistics: {
bytesScanned: finalResponse.statistics?.bytesScanned ?? 0,
recordsMatched: finalResponse.statistics?.recordsMatched ?? 0,
recordsScanned: finalResponse.statistics?.recordsScanned ?? 0,
},
status: `Timeout (last status: ${finalResponse.status ?? 'Unknown'})`,
}
}
export async function describeLogStreams(
client: CloudWatchLogsClient,
logGroupName: string,
options?: { prefix?: string; limit?: number }
): Promise<{
logStreams: {
logStreamName: string
lastEventTimestamp: number | undefined
firstEventTimestamp: number | undefined
creationTime: number | undefined
storedBytes: number
}[]
}> {
const hasPrefix = Boolean(options?.prefix)
const command = new DescribeLogStreamsCommand({
logGroupName,
...(hasPrefix
? { orderBy: 'LogStreamName', logStreamNamePrefix: options!.prefix }
: { orderBy: 'LastEventTime', descending: true }),
...(options?.limit !== undefined && { limit: options.limit }),
})
const response = await client.send(command)
return {
logStreams: (response.logStreams ?? []).map((ls) => ({
logStreamName: ls.logStreamName ?? '',
lastEventTimestamp: ls.lastEventTimestamp,
firstEventTimestamp: ls.firstEventTimestamp,
creationTime: ls.creationTime,
storedBytes: ls.storedBytes ?? 0,
})),
}
}
export async function getLogEvents(
client: CloudWatchLogsClient,
logGroupName: string,
logStreamName: string,
options?: { startTime?: number; endTime?: number; limit?: number }
): Promise<{
events: {
timestamp: number | undefined
message: string | undefined
ingestionTime: number | undefined
}[]
}> {
const command = new GetLogEventsCommand({
logGroupIdentifier: logGroupName,
logStreamName,
...(options?.startTime !== undefined && { startTime: options.startTime * 1000 }),
...(options?.endTime !== undefined && { endTime: options.endTime * 1000 }),
...(options?.limit !== undefined && { limit: options.limit }),
startFromHead: true,
})
const response = await client.send(command)
return {
events: (response.events ?? []).map((e) => ({
timestamp: e.timestamp,
message: e.message,
ingestionTime: e.ingestionTime,
})),
}
}

View File

@@ -16,7 +16,8 @@ import { workflow, workflowFolder } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { exportFolderToZip, sanitizePathSegment } from '@/lib/workflows/operations/import-export'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import { exportFolderToZip } from '@/lib/workflows/operations/import-export'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
import {

View File

@@ -20,7 +20,7 @@ import { createLogger } from '@sim/logger'
import { inArray } from 'drizzle-orm'
import JSZip from 'jszip'
import { NextResponse } from 'next/server'
import { sanitizePathSegment } from '@/lib/workflows/operations/import-export'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
import {

View File

@@ -16,7 +16,8 @@ import { workflow, workflowFolder, workspace } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { exportWorkspaceToZip, sanitizePathSegment } from '@/lib/workflows/operations/import-export'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import { exportWorkspaceToZip } from '@/lib/workflows/operations/import-export'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
import {

View File

@@ -1,42 +1,44 @@
import { getBaseUrl } from '@/lib/core/utils/urls'
export async function GET() {
export function GET() {
const baseUrl = getBaseUrl()
const llmsContent = `# Sim
const content = `# Sim
> Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to deploy and orchestrate agentic workflows.
> Sim is the open-source platform to build AI agents and run your agentic workforce. Connect integrations and LLMs to deploy and orchestrate agentic workflows.
Sim lets teams create agents, workflows, knowledge bases, tables, and docs. Over 100,000 builders use Sim — from startups to Fortune 500 companies. SOC2 compliant.
Sim lets teams create agents, workflows, knowledge bases, tables, and docs. It supports both product discovery pages and deeper technical documentation.
## Core Pages
## Preferred URLs
- [Homepage](${baseUrl}): Product overview, features, and pricing
- [Homepage](${baseUrl}): Product overview and primary entry point
- [Integrations directory](${baseUrl}/integrations): Public catalog of integrations and automation capabilities
- [Models directory](${baseUrl}/models): Public catalog of AI models, pricing, context windows, and capabilities
- [Blog](${baseUrl}/blog): Announcements, guides, and product context
- [Changelog](${baseUrl}/changelog): Product updates and release notes
- [Sim Blog](${baseUrl}/blog): Announcements, insights, and guides
## Documentation
- [Documentation](https://docs.sim.ai): Complete guides and API reference
- [Quickstart](https://docs.sim.ai/quickstart): Get started in 5 minutes
- [API Reference](https://docs.sim.ai/api): REST API documentation
- [Documentation](https://docs.sim.ai): Product guides and technical reference
- [Quickstart](https://docs.sim.ai/quickstart): Fastest path to getting started
- [API Reference](https://docs.sim.ai/api): API documentation
## Key Concepts
- **Workspace**: Container for workflows, data sources, and executions
- **Workflow**: Directed graph of blocks defining an agentic process
- **Block**: Individual step (LLM call, tool call, HTTP request, code execution)
- **Block**: Individual step such as an LLM call, tool call, HTTP request, or code execution
- **Trigger**: Event or schedule that initiates workflow execution
- **Execution**: A single run of a workflow with logs and outputs
- **Knowledge Base**: Vector-indexed document store for retrieval-augmented generation
- **Knowledge Base**: Document store used for retrieval-augmented generation
## Capabilities
- AI agent creation and deployment
- Agentic workflow orchestration
- 1,000+ integrations (Slack, Gmail, Notion, Airtable, databases, and more)
- Multi-model LLM orchestration (OpenAI, Anthropic, Google, Mistral, xAI, Perplexity)
- Knowledge base creation with retrieval-augmented generation (RAG)
- Integrations across business tools, databases, and communication platforms
- Multi-model LLM orchestration
- Knowledge bases and retrieval-augmented generation
- Table creation and management
- Document creation and processing
- Scheduled and webhook-triggered executions
@@ -45,24 +47,19 @@ Sim lets teams create agents, workflows, knowledge bases, tables, and docs. Over
- AI agent deployment and orchestration
- Knowledge bases and RAG pipelines
- Document creation and processing
- Customer support automation
- Internal operations (sales, marketing, legal, finance)
- Internal operations workflows across sales, marketing, legal, and finance
## Links
## Additional Links
- [GitHub Repository](https://github.com/simstudioai/sim): Open-source codebase
- [Discord Community](https://discord.gg/Hr4UWYEcTT): Get help and connect with 100,000+ builders
- [X/Twitter](https://x.com/simdotai): Product updates and announcements
## Optional
- [Careers](https://jobs.ashbyhq.com/sim): Join the Sim team
- [Docs](https://docs.sim.ai): Canonical documentation source
- [Terms of Service](${baseUrl}/terms): Legal terms
- [Privacy Policy](${baseUrl}/privacy): Data handling practices
- [Sitemap](${baseUrl}/sitemap.xml): Public URL inventory
`
return new Response(llmsContent, {
return new Response(content, {
headers: {
'Content-Type': 'text/markdown; charset=utf-8',
'Cache-Control': 'public, max-age=86400, s-maxage=86400',

View File

@@ -8,6 +8,34 @@ export default async function sitemap(): Promise<MetadataRoute.Sitemap> {
const baseUrl = getBaseUrl()
const now = new Date()
const integrationPages: MetadataRoute.Sitemap = integrations.map((integration) => ({
url: `${baseUrl}/integrations/${integration.slug}`,
lastModified: now,
}))
const modelHubPages: MetadataRoute.Sitemap = [
{
url: `${baseUrl}/integrations`,
lastModified: now,
},
{
url: `${baseUrl}/models`,
lastModified: now,
},
{
url: `${baseUrl}/partners`,
lastModified: now,
},
]
const providerPages: MetadataRoute.Sitemap = MODEL_PROVIDERS_WITH_CATALOGS.map((provider) => ({
url: `${baseUrl}${provider.href}`,
lastModified: new Date(
Math.max(...provider.models.map((model) => new Date(model.pricing.updatedAt).getTime()))
),
}))
const modelPages: MetadataRoute.Sitemap = ALL_CATALOG_MODELS.map((model) => ({
url: `${baseUrl}${model.href}`,
lastModified: new Date(model.pricing.updatedAt),
}))
const staticPages: MetadataRoute.Sitemap = [
{
@@ -26,14 +54,6 @@ export default async function sitemap(): Promise<MetadataRoute.Sitemap> {
// url: `${baseUrl}/templates`,
// lastModified: now,
// },
{
url: `${baseUrl}/integrations`,
lastModified: now,
},
{
url: `${baseUrl}/models`,
lastModified: now,
},
{
url: `${baseUrl}/changelog`,
lastModified: now,
@@ -54,20 +74,12 @@ export default async function sitemap(): Promise<MetadataRoute.Sitemap> {
lastModified: new Date(p.updated ?? p.date),
}))
const integrationPages: MetadataRoute.Sitemap = integrations.map((i) => ({
url: `${baseUrl}/integrations/${i.slug}`,
lastModified: now,
}))
const providerPages: MetadataRoute.Sitemap = MODEL_PROVIDERS_WITH_CATALOGS.map((provider) => ({
url: `${baseUrl}${provider.href}`,
lastModified: now,
}))
const modelPages: MetadataRoute.Sitemap = ALL_CATALOG_MODELS.map((model) => ({
url: `${baseUrl}${model.href}`,
lastModified: new Date(model.pricing.updatedAt),
}))
return [...staticPages, ...blogPages, ...integrationPages, ...providerPages, ...modelPages]
return [
...staticPages,
...modelHubPages,
...integrationPages,
...providerPages,
...modelPages,
...blogPages,
]
}

View File

@@ -1,22 +1,59 @@
'use client'
import { useCallback, useEffect, useRef, useState } from 'react'
import { Check, Copy, Ellipsis, Hash } from 'lucide-react'
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
Button,
Check,
Copy,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Textarea,
ThumbsDown,
ThumbsUp,
} from '@/components/emcn'
import { useSubmitCopilotFeedback } from '@/hooks/queries/copilot-feedback'
const SPECIAL_TAGS = 'thinking|options|usage_upgrade|credential|mothership-error|file'
function toPlainText(raw: string): string {
return (
raw
// Strip special tags and their contents
.replace(new RegExp(`<\\/?(${SPECIAL_TAGS})(?:>[\\s\\S]*?<\\/(${SPECIAL_TAGS})>|>)`, 'g'), '')
// Strip markdown
.replace(/^#{1,6}\s+/gm, '')
.replace(/\*\*(.+?)\*\*/g, '$1')
.replace(/\*(.+?)\*/g, '$1')
.replace(/`{3}[\s\S]*?`{3}/g, '')
.replace(/`(.+?)`/g, '$1')
.replace(/\[([^\]]+)\]\([^)]+\)/g, '$1')
.replace(/^[>\-*]\s+/gm, '')
.replace(/!\[[^\]]*\]\([^)]+\)/g, '')
// Normalize whitespace
.replace(/\n{3,}/g, '\n\n')
.trim()
)
}
const ICON_CLASS = 'h-[14px] w-[14px]'
const BUTTON_CLASS =
'flex h-[26px] w-[26px] items-center justify-center rounded-[6px] text-[var(--text-icon)] transition-colors hover-hover:bg-[var(--surface-hover)] focus-visible:outline-none'
interface MessageActionsProps {
content: string
requestId?: string
chatId?: string
userQuery?: string
}
export function MessageActions({ content, requestId }: MessageActionsProps) {
const [copied, setCopied] = useState<'message' | 'request' | null>(null)
export function MessageActions({ content, chatId, userQuery }: MessageActionsProps) {
const [copied, setCopied] = useState(false)
const [pendingFeedback, setPendingFeedback] = useState<'up' | 'down' | null>(null)
const [feedbackText, setFeedbackText] = useState('')
const resetTimeoutRef = useRef<number | null>(null)
const submitFeedback = useSubmitCopilotFeedback()
useEffect(() => {
return () => {
@@ -26,59 +63,119 @@ export function MessageActions({ content, requestId }: MessageActionsProps) {
}
}, [])
const copyToClipboard = useCallback(async (text: string, type: 'message' | 'request') => {
const copyToClipboard = useCallback(async () => {
if (!content) return
const text = toPlainText(content)
if (!text) return
try {
await navigator.clipboard.writeText(text)
setCopied(type)
setCopied(true)
if (resetTimeoutRef.current !== null) {
window.clearTimeout(resetTimeoutRef.current)
}
resetTimeoutRef.current = window.setTimeout(() => setCopied(null), 1500)
resetTimeoutRef.current = window.setTimeout(() => setCopied(false), 1500)
} catch {
/* clipboard unavailable */
}
}, [content])
const handleFeedbackClick = useCallback(
(type: 'up' | 'down') => {
if (chatId && userQuery) {
setPendingFeedback(type)
setFeedbackText('')
}
},
[chatId, userQuery]
)
const handleSubmitFeedback = useCallback(() => {
if (!pendingFeedback || !chatId || !userQuery) return
const text = feedbackText.trim()
if (!text) {
setPendingFeedback(null)
setFeedbackText('')
return
}
submitFeedback.mutate({
chatId,
userQuery,
agentResponse: content,
isPositiveFeedback: pendingFeedback === 'up',
feedback: text,
})
setPendingFeedback(null)
setFeedbackText('')
}, [pendingFeedback, chatId, userQuery, content, feedbackText])
const handleModalClose = useCallback((open: boolean) => {
if (!open) {
setPendingFeedback(null)
setFeedbackText('')
}
}, [])
if (!content && !requestId) {
return null
}
if (!content) return null
return (
<DropdownMenu modal={false}>
<DropdownMenuTrigger asChild>
<>
<div className='flex items-center gap-0.5'>
<button
type='button'
aria-label='More options'
className='flex h-5 w-5 items-center justify-center rounded-sm text-[var(--text-icon)] opacity-0 transition-colors transition-opacity hover-hover:bg-[var(--surface-3)] hover-hover:text-[var(--text-primary)] focus-visible:opacity-100 focus-visible:outline-none group-hover/msg:opacity-100 data-[state=open]:opacity-100'
onClick={(event) => event.stopPropagation()}
aria-label='Copy message'
onClick={copyToClipboard}
className={BUTTON_CLASS}
>
<Ellipsis className='h-3 w-3' strokeWidth={2} />
{copied ? <Check className={ICON_CLASS} /> : <Copy className={ICON_CLASS} />}
</button>
</DropdownMenuTrigger>
<DropdownMenuContent align='end' side='top' sideOffset={4}>
<DropdownMenuItem
disabled={!content}
onSelect={(event) => {
event.stopPropagation()
void copyToClipboard(content, 'message')
}}
<button
type='button'
aria-label='Like'
onClick={() => handleFeedbackClick('up')}
className={BUTTON_CLASS}
>
{copied === 'message' ? <Check /> : <Copy />}
<span>Copy Message</span>
</DropdownMenuItem>
<DropdownMenuItem
disabled={!requestId}
onSelect={(event) => {
event.stopPropagation()
if (requestId) {
void copyToClipboard(requestId, 'request')
}
}}
<ThumbsUp className={ICON_CLASS} />
</button>
<button
type='button'
aria-label='Dislike'
onClick={() => handleFeedbackClick('down')}
className={BUTTON_CLASS}
>
{copied === 'request' ? <Check /> : <Hash />}
<span>Copy Request ID</span>
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
<ThumbsDown className={ICON_CLASS} />
</button>
</div>
<Modal open={pendingFeedback !== null} onOpenChange={handleModalClose}>
<ModalContent size='sm'>
<ModalHeader>Give feedback</ModalHeader>
<ModalBody>
<div className='flex flex-col gap-2'>
<p className='font-medium text-[var(--text-secondary)] text-sm'>
{pendingFeedback === 'up' ? 'What did you like?' : 'What could be improved?'}
</p>
<Textarea
placeholder={
pendingFeedback === 'up'
? 'Tell us what was helpful...'
: 'Tell us what went wrong...'
}
value={feedbackText}
onChange={(e) => setFeedbackText(e.target.value)}
rows={3}
/>
</div>
</ModalBody>
<ModalFooter>
<Button variant='default' onClick={() => handleModalClose(false)}>
Cancel
</Button>
<Button variant='primary' onClick={handleSubmitFeedback}>
Submit
</Button>
</ModalFooter>
</ModalContent>
</Modal>
</>
)
}

View File

@@ -1,8 +1,10 @@
'use client'
import { createContext, memo, useContext, useMemo, useRef } from 'react'
import { createContext, memo, useCallback, useContext, useEffect, useMemo, useRef } from 'react'
import { useRouter } from 'next/navigation'
import type { Components, ExtraProps } from 'react-markdown'
import ReactMarkdown from 'react-markdown'
import rehypeSlug from 'rehype-slug'
import remarkBreaks from 'remark-breaks'
import remarkGfm from 'remark-gfm'
import { Checkbox } from '@/components/emcn'
@@ -70,6 +72,7 @@ export const PreviewPanel = memo(function PreviewPanel({
})
const REMARK_PLUGINS = [remarkGfm, remarkBreaks]
const REHYPE_PLUGINS = [rehypeSlug]
/**
* Carries the contentRef and toggle handler from MarkdownPreview down to the
@@ -83,29 +86,43 @@ const MarkdownCheckboxCtx = createContext<{
/** Carries the resolved checkbox index from LiRenderer to InputRenderer. */
const CheckboxIndexCtx = createContext(-1)
const NavigateCtx = createContext<((path: string) => void) | null>(null)
const STATIC_MARKDOWN_COMPONENTS = {
p: ({ children }: { children?: React.ReactNode }) => (
<p className='mb-3 break-words text-[14px] text-[var(--text-primary)] leading-[1.6] last:mb-0'>
{children}
</p>
),
h1: ({ children }: { children?: React.ReactNode }) => (
<h1 className='mt-6 mb-4 break-words font-semibold text-[24px] text-[var(--text-primary)] first:mt-0'>
h1: ({ id, children }: { id?: string; children?: React.ReactNode }) => (
<h1
id={id}
className='mt-6 mb-4 break-words font-semibold text-[24px] text-[var(--text-primary)] first:mt-0'
>
{children}
</h1>
),
h2: ({ children }: { children?: React.ReactNode }) => (
<h2 className='mt-5 mb-3 break-words font-semibold text-[20px] text-[var(--text-primary)] first:mt-0'>
h2: ({ id, children }: { id?: string; children?: React.ReactNode }) => (
<h2
id={id}
className='mt-5 mb-3 break-words font-semibold text-[20px] text-[var(--text-primary)] first:mt-0'
>
{children}
</h2>
),
h3: ({ children }: { children?: React.ReactNode }) => (
<h3 className='mt-4 mb-2 break-words font-semibold text-[16px] text-[var(--text-primary)] first:mt-0'>
h3: ({ id, children }: { id?: string; children?: React.ReactNode }) => (
<h3
id={id}
className='mt-4 mb-2 break-words font-semibold text-[16px] text-[var(--text-primary)] first:mt-0'
>
{children}
</h3>
),
h4: ({ children }: { children?: React.ReactNode }) => (
<h4 className='mt-3 mb-2 break-words font-semibold text-[14px] text-[var(--text-primary)] first:mt-0'>
h4: ({ id, children }: { id?: string; children?: React.ReactNode }) => (
<h4
id={id}
className='mt-3 mb-2 break-words font-semibold text-[14px] text-[var(--text-primary)] first:mt-0'
>
{children}
</h4>
),
@@ -138,16 +155,6 @@ const STATIC_MARKDOWN_COMPONENTS = {
)
},
pre: ({ children }: { children?: React.ReactNode }) => <>{children}</>,
a: ({ href, children }: { href?: string; children?: React.ReactNode }) => (
<a
href={href}
target='_blank'
rel='noopener noreferrer'
className='break-all text-[var(--brand-secondary)] underline-offset-2 hover:underline'
>
{children}
</a>
),
strong: ({ children }: { children?: React.ReactNode }) => (
<strong className='break-words font-semibold text-[var(--text-primary)]'>{children}</strong>
),
@@ -267,8 +274,75 @@ function InputRenderer({
)
}
function isInternalHref(
href: string,
origin = window.location.origin
): { pathname: string; hash: string } | null {
if (href.startsWith('#')) return { pathname: '', hash: href }
try {
const url = new URL(href, origin)
if (url.origin === origin && url.pathname.startsWith('/workspace/')) {
return { pathname: url.pathname, hash: url.hash }
}
} catch {
if (href.startsWith('/workspace/')) {
const hashIdx = href.indexOf('#')
if (hashIdx === -1) return { pathname: href, hash: '' }
return { pathname: href.slice(0, hashIdx), hash: href.slice(hashIdx) }
}
}
return null
}
function AnchorRenderer({ href, children }: { href?: string; children?: React.ReactNode }) {
const navigate = useContext(NavigateCtx)
const parsed = useMemo(() => (href ? isInternalHref(href) : null), [href])
const handleClick = useCallback(
(e: React.MouseEvent<HTMLAnchorElement>) => {
if (!parsed || e.metaKey || e.ctrlKey || e.shiftKey || e.altKey) return
e.preventDefault()
if (parsed.pathname === '' && parsed.hash) {
const el = document.getElementById(parsed.hash.slice(1))
if (el) {
const container = el.closest('.overflow-auto') as HTMLElement | null
if (container) {
container.scrollTo({ top: el.offsetTop - container.offsetTop, behavior: 'smooth' })
} else {
el.scrollIntoView({ behavior: 'smooth' })
}
}
return
}
const destination = parsed.pathname + parsed.hash
if (navigate) {
navigate(destination)
} else {
window.location.assign(destination)
}
},
[parsed, navigate]
)
return (
<a
href={href}
target={parsed ? undefined : '_blank'}
rel={parsed ? undefined : 'noopener noreferrer'}
onClick={handleClick}
className='break-all text-[var(--brand-secondary)] underline-offset-2 hover:underline'
>
{children}
</a>
)
}
const MARKDOWN_COMPONENTS = {
...STATIC_MARKDOWN_COMPONENTS,
a: AnchorRenderer,
ul: UlRenderer,
ol: OlRenderer,
li: LiRenderer,
@@ -284,6 +358,7 @@ const MarkdownPreview = memo(function MarkdownPreview({
isStreaming?: boolean
onCheckboxToggle?: (checkboxIndex: number, checked: boolean) => void
}) {
const { push: navigate } = useRouter()
const { ref: scrollRef } = useAutoScroll(isStreaming)
const { committed, incoming, generation } = useStreamingReveal(content, isStreaming)
@@ -295,10 +370,30 @@ const MarkdownPreview = memo(function MarkdownPreview({
[onCheckboxToggle]
)
const hasScrolledToHash = useRef(false)
useEffect(() => {
const hash = window.location.hash
if (!hash || hasScrolledToHash.current) return
const id = hash.slice(1)
const el = document.getElementById(id)
if (!el) return
hasScrolledToHash.current = true
const container = el.closest('.overflow-auto') as HTMLElement | null
if (container) {
container.scrollTo({ top: el.offsetTop - container.offsetTop, behavior: 'smooth' })
} else {
el.scrollIntoView({ behavior: 'smooth' })
}
}, [content])
const committedMarkdown = useMemo(
() =>
committed ? (
<ReactMarkdown remarkPlugins={REMARK_PLUGINS} components={MARKDOWN_COMPONENTS}>
<ReactMarkdown
remarkPlugins={REMARK_PLUGINS}
rehypePlugins={REHYPE_PLUGINS}
components={MARKDOWN_COMPONENTS}
>
{committed}
</ReactMarkdown>
) : null,
@@ -307,30 +402,42 @@ const MarkdownPreview = memo(function MarkdownPreview({
if (onCheckboxToggle) {
return (
<MarkdownCheckboxCtx.Provider value={ctxValue}>
<div ref={scrollRef} className='h-full overflow-auto p-6'>
<ReactMarkdown remarkPlugins={REMARK_PLUGINS} components={MARKDOWN_COMPONENTS}>
{content}
</ReactMarkdown>
</div>
</MarkdownCheckboxCtx.Provider>
<NavigateCtx.Provider value={navigate}>
<MarkdownCheckboxCtx.Provider value={ctxValue}>
<div ref={scrollRef} className='h-full overflow-auto p-6'>
<ReactMarkdown
remarkPlugins={REMARK_PLUGINS}
rehypePlugins={REHYPE_PLUGINS}
components={MARKDOWN_COMPONENTS}
>
{content}
</ReactMarkdown>
</div>
</MarkdownCheckboxCtx.Provider>
</NavigateCtx.Provider>
)
}
return (
<div ref={scrollRef} className='h-full overflow-auto p-6'>
{committedMarkdown}
{incoming && (
<div
key={generation}
className={cn(isStreaming && 'animate-stream-fade-in', '[&>:first-child]:mt-0')}
>
<ReactMarkdown remarkPlugins={REMARK_PLUGINS} components={MARKDOWN_COMPONENTS}>
{incoming}
</ReactMarkdown>
</div>
)}
</div>
<NavigateCtx.Provider value={navigate}>
<div ref={scrollRef} className='h-full overflow-auto p-6'>
{committedMarkdown}
{incoming && (
<div
key={generation}
className={cn(isStreaming && 'animate-stream-fade-in', '[&>:first-child]:mt-0')}
>
<ReactMarkdown
remarkPlugins={REMARK_PLUGINS}
rehypePlugins={REHYPE_PLUGINS}
components={MARKDOWN_COMPONENTS}
>
{incoming}
</ReactMarkdown>
</div>
)}
</div>
</NavigateCtx.Provider>
)
})

View File

@@ -473,9 +473,9 @@ function MothershipErrorDisplay({ data }: { data: MothershipErrorTagData }) {
const detail = data.code ? `${data.message} (${data.code})` : data.message
return (
<span className='animate-stream-fade-in font-base text-[13px] text-[var(--text-secondary)] italic leading-[20px]'>
<p className='animate-stream-fade-in font-base text-[13px] text-[var(--text-secondary)] italic leading-[20px]'>
{detail}
</span>
</p>
)
}

View File

@@ -35,6 +35,7 @@ interface MothershipChatProps {
onSendQueuedMessage: (id: string) => Promise<void>
onEditQueuedMessage: (id: string) => void
userId?: string
chatId?: string
onContextAdd?: (context: ChatContext) => void
editValue?: string
onEditValueConsumed?: () => void
@@ -53,7 +54,7 @@ const LAYOUT_STYLES = {
userRow: 'flex flex-col items-end gap-[6px] pt-3',
attachmentWidth: 'max-w-[70%]',
userBubble: 'max-w-[70%] overflow-hidden rounded-[16px] bg-[var(--surface-5)] px-3.5 py-2',
assistantRow: 'group/msg relative pb-5',
assistantRow: 'group/msg',
footer: 'flex-shrink-0 px-[24px] pb-[16px]',
footerInner: 'mx-auto max-w-[42rem]',
},
@@ -63,7 +64,7 @@ const LAYOUT_STYLES = {
userRow: 'flex flex-col items-end gap-[6px] pt-2',
attachmentWidth: 'max-w-[85%]',
userBubble: 'max-w-[85%] overflow-hidden rounded-[16px] bg-[var(--surface-5)] px-3 py-2',
assistantRow: 'group/msg relative pb-3',
assistantRow: 'group/msg',
footer: 'flex-shrink-0 px-3 pb-3',
footerInner: '',
},
@@ -80,6 +81,7 @@ export function MothershipChat({
onSendQueuedMessage,
onEditQueuedMessage,
userId,
chatId,
onContextAdd,
editValue,
onEditValueConsumed,
@@ -147,20 +149,28 @@ export function MothershipChat({
}
const isLastMessage = index === messages.length - 1
const precedingUserMsg = [...messages]
.slice(0, index)
.reverse()
.find((m) => m.role === 'user')
return (
<div key={msg.id} className={styles.assistantRow}>
{!isThisStreaming && (msg.content || msg.contentBlocks?.length) && (
<div className='absolute right-0 bottom-0 z-10'>
<MessageActions content={msg.content} requestId={msg.requestId} />
</div>
)}
<MessageContent
blocks={msg.contentBlocks || []}
fallbackContent={msg.content}
isStreaming={isThisStreaming}
onOptionSelect={isLastMessage ? onSubmit : undefined}
/>
{!isThisStreaming && (msg.content || msg.contentBlocks?.length) && (
<div className='mt-2.5'>
<MessageActions
content={msg.content}
chatId={chatId}
userQuery={precedingUserMsg?.content}
/>
</div>
)}
</div>
)
})}

View File

@@ -115,7 +115,7 @@ export const MothershipView = memo(
<div
ref={ref}
className={cn(
'relative z-10 flex h-full flex-col overflow-hidden border-[var(--border)] bg-[var(--bg)] transition-[width,min-width,border-width] duration-500 ease-[cubic-bezier(0.16,1,0.3,1)]',
'relative z-10 flex h-full flex-col overflow-hidden border-[var(--border)] bg-[var(--bg)] transition-[width,min-width,border-width] duration-200 ease-[cubic-bezier(0.25,0.1,0.25,1)]',
isCollapsed ? 'w-0 min-w-0 border-l-0' : 'w-1/2 border-l',
className
)}

View File

@@ -2,7 +2,7 @@
import { useCallback, useEffect, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useParams, useRouter } from 'next/navigation'
import { useParams, useRouter, useSearchParams } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { PanelLeft } from '@/components/emcn/icons'
import { useSession } from '@/lib/auth/auth-client'
@@ -28,6 +28,8 @@ interface HomeProps {
export function Home({ chatId }: HomeProps = {}) {
const { workspaceId } = useParams<{ workspaceId: string }>()
const router = useRouter()
const searchParams = useSearchParams()
const initialResourceId = searchParams.get('resource')
const { data: session } = useSession()
const posthog = usePostHog()
const posthogRef = useRef(posthog)
@@ -160,7 +162,10 @@ export function Home({ chatId }: HomeProps = {}) {
} = useChat(
workspaceId,
chatId,
getMothershipUseChatOptions({ onResourceEvent: handleResourceEvent })
getMothershipUseChatOptions({
onResourceEvent: handleResourceEvent,
initialActiveResourceId: initialResourceId,
})
)
const [editingInputValue, setEditingInputValue] = useState('')
@@ -183,6 +188,16 @@ export function Home({ chatId }: HomeProps = {}) {
[editQueuedMessage]
)
useEffect(() => {
const url = new URL(window.location.href)
if (activeResourceId) {
url.searchParams.set('resource', activeResourceId)
} else {
url.searchParams.delete('resource')
}
window.history.replaceState(null, '', url.toString())
}, [activeResourceId])
useEffect(() => {
wasSendingRef.current = false
if (resolvedChatId) markRead(resolvedChatId)
@@ -213,6 +228,7 @@ export function Home({ chatId }: HomeProps = {}) {
if (!trimmed && !(fileAttachments && fileAttachments.length > 0)) return
captureEvent(posthogRef.current, 'task_message_sent', {
workspace_id: workspaceId,
has_attachments: !!(fileAttachments && fileAttachments.length > 0),
has_contexts: !!(contexts && contexts.length > 0),
is_new_task: !chatId,
@@ -224,7 +240,7 @@ export function Home({ chatId }: HomeProps = {}) {
sendMessage(trimmed || 'Analyze the attached file(s).', fileAttachments, contexts)
},
[sendMessage]
[sendMessage, workspaceId, chatId]
)
useEffect(() => {
@@ -348,6 +364,7 @@ export function Home({ chatId }: HomeProps = {}) {
onSendQueuedMessage={sendNow}
onEditQueuedMessage={handleEditQueuedMessage}
userId={session?.user?.id}
chatId={resolvedChatId}
onContextAdd={handleContextAdd}
editValue={editingInputValue}
onEditValueConsumed={clearEditingValue}

View File

@@ -377,10 +377,11 @@ export interface UseChatOptions {
onToolResult?: (toolName: string, success: boolean, result: unknown) => void
onTitleUpdate?: () => void
onStreamEnd?: (chatId: string, messages: ChatMessage[]) => void
initialActiveResourceId?: string | null
}
export function getMothershipUseChatOptions(
options: Pick<UseChatOptions, 'onResourceEvent' | 'onStreamEnd'> = {}
options: Pick<UseChatOptions, 'onResourceEvent' | 'onStreamEnd' | 'initialActiveResourceId'> = {}
): UseChatOptions {
return {
apiPath: MOTHERSHIP_CHAT_API_PATH,
@@ -416,6 +417,7 @@ export function useChat(
const [resolvedChatId, setResolvedChatId] = useState<string | undefined>(initialChatId)
const [resources, setResources] = useState<MothershipResource[]>([])
const [activeResourceId, setActiveResourceId] = useState<string | null>(null)
const initialActiveResourceIdRef = useRef(options?.initialActiveResourceId)
const onResourceEventRef = useRef(options?.onResourceEvent)
onResourceEventRef.current = options?.onResourceEvent
const apiPathRef = useRef(options?.apiPath ?? MOTHERSHIP_CHAT_API_PATH)
@@ -845,7 +847,12 @@ export function useChat(
const persistedResources = history.resources.filter((r) => r.id !== 'streaming-file')
if (persistedResources.length > 0) {
setResources(persistedResources)
setActiveResourceId(persistedResources[persistedResources.length - 1].id)
const initialId = initialActiveResourceIdRef.current
const restoredId =
initialId && persistedResources.some((r) => r.id === initialId)
? initialId
: persistedResources[persistedResources.length - 1].id
setActiveResourceId(restoredId)
for (const resource of persistedResources) {
if (resource.type !== 'workflow') continue

View File

@@ -19,26 +19,23 @@ import {
ModalHeader,
Tooltip,
} from '@/components/emcn'
import { getSubscriptionAccessState } from '@/lib/billing/client'
import { consumeOAuthReturnContext } from '@/lib/credentials/client-state'
import { getProviderIdFromServiceId, type OAuthProvider } from '@/lib/oauth'
import { OAuthModal } from '@/app/workspace/[workspaceId]/components/oauth-modal'
import { ConnectorSelectorField } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/add-connector-modal/components/connector-selector-field'
import { SYNC_INTERVALS } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/consts'
import { MaxBadge } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/max-badge'
import { isBillingEnabled } from '@/app/workspace/[workspaceId]/settings/navigation'
import { getDependsOnFields } from '@/blocks/utils'
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
import type { ConnectorConfig, ConnectorConfigField } from '@/connectors/types'
import { useCreateConnector } from '@/hooks/queries/kb/connectors'
import { useOAuthCredentials } from '@/hooks/queries/oauth/oauth-credentials'
import { useSubscriptionData } from '@/hooks/queries/subscription'
import type { SelectorKey } from '@/hooks/selectors/types'
import { useCredentialRefreshTriggers } from '@/hooks/use-credential-refresh-triggers'
const SYNC_INTERVALS = [
{ label: 'Every hour', value: 60 },
{ label: 'Every 6 hours', value: 360 },
{ label: 'Daily', value: 1440 },
{ label: 'Weekly', value: 10080 },
{ label: 'Manual only', value: 0 },
] as const
const CONNECTOR_ENTRIES = Object.entries(CONNECTOR_REGISTRY)
interface AddConnectorModalProps {
@@ -75,6 +72,10 @@ export function AddConnectorModal({
const { workspaceId } = useParams<{ workspaceId: string }>()
const { mutate: createConnector, isPending: isCreating } = useCreateConnector()
const { data: subscriptionResponse } = useSubscriptionData({ enabled: isBillingEnabled })
const subscriptionAccess = getSubscriptionAccessState(subscriptionResponse?.data)
const hasMaxAccess = !isBillingEnabled || subscriptionAccess.hasUsableMaxAccess
const connectorConfig = selectedType ? CONNECTOR_REGISTRY[selectedType] : null
const isApiKeyMode = connectorConfig?.auth.mode === 'apiKey'
const connectorProviderId = useMemo(
@@ -528,8 +529,13 @@ export function AddConnectorModal({
onValueChange={(val) => setSyncInterval(Number(val))}
>
{SYNC_INTERVALS.map((interval) => (
<ButtonGroupItem key={interval.value} value={String(interval.value)}>
<ButtonGroupItem
key={interval.value}
value={String(interval.value)}
disabled={interval.requiresMax && !hasMaxAccess}
>
{interval.label}
{interval.requiresMax && !hasMaxAccess && <MaxBadge />}
</ButtonGroupItem>
))}
</ButtonGroup>

View File

@@ -79,6 +79,8 @@ export function ConnectorSelectorField({
options={comboboxOptions}
value={value || undefined}
onChange={onChange}
searchable
searchPlaceholder={`Search ${field.title.toLowerCase()}...`}
placeholder={
!credentialId
? 'Connect an account first'

View File

@@ -0,0 +1,8 @@
export const SYNC_INTERVALS = [
{ label: 'Live', value: 5, requiresMax: true },
{ label: 'Every hour', value: 60, requiresMax: false },
{ label: 'Every 6 hours', value: 360, requiresMax: false },
{ label: 'Daily', value: 1440, requiresMax: false },
{ label: 'Weekly', value: 10080, requiresMax: false },
{ label: 'Manual only', value: 0, requiresMax: false },
] as const

View File

@@ -21,6 +21,10 @@ import {
ModalTabsTrigger,
Skeleton,
} from '@/components/emcn'
import { getSubscriptionAccessState } from '@/lib/billing/client'
import { SYNC_INTERVALS } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/consts'
import { MaxBadge } from '@/app/workspace/[workspaceId]/knowledge/[id]/components/max-badge'
import { isBillingEnabled } from '@/app/workspace/[workspaceId]/settings/navigation'
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
import type { ConnectorConfig } from '@/connectors/types'
import type { ConnectorData } from '@/hooks/queries/kb/connectors'
@@ -30,17 +34,10 @@ import {
useRestoreConnectorDocument,
useUpdateConnector,
} from '@/hooks/queries/kb/connectors'
import { useSubscriptionData } from '@/hooks/queries/subscription'
const logger = createLogger('EditConnectorModal')
const SYNC_INTERVALS = [
{ label: 'Every hour', value: 60 },
{ label: 'Every 6 hours', value: 360 },
{ label: 'Daily', value: 1440 },
{ label: 'Weekly', value: 10080 },
{ label: 'Manual only', value: 0 },
] as const
/** Keys injected by the sync engine — not user-editable */
const INTERNAL_CONFIG_KEYS = new Set(['tagSlotMapping', 'disabledTagIds'])
@@ -76,6 +73,10 @@ export function EditConnectorModal({
const { mutate: updateConnector, isPending: isSaving } = useUpdateConnector()
const { data: subscriptionResponse } = useSubscriptionData({ enabled: isBillingEnabled })
const subscriptionAccess = getSubscriptionAccessState(subscriptionResponse?.data)
const hasMaxAccess = !isBillingEnabled || subscriptionAccess.hasUsableMaxAccess
const hasChanges = useMemo(() => {
if (syncInterval !== connector.syncIntervalMinutes) return true
for (const [key, value] of Object.entries(sourceConfig)) {
@@ -146,6 +147,7 @@ export function EditConnectorModal({
setSourceConfig={setSourceConfig}
syncInterval={syncInterval}
setSyncInterval={setSyncInterval}
hasMaxAccess={hasMaxAccess}
error={error}
/>
</ModalTabsContent>
@@ -184,6 +186,7 @@ interface SettingsTabProps {
setSourceConfig: React.Dispatch<React.SetStateAction<Record<string, string>>>
syncInterval: number
setSyncInterval: (v: number) => void
hasMaxAccess: boolean
error: string | null
}
@@ -193,6 +196,7 @@ function SettingsTab({
setSourceConfig,
syncInterval,
setSyncInterval,
hasMaxAccess,
error,
}: SettingsTabProps) {
return (
@@ -234,8 +238,13 @@ function SettingsTab({
onValueChange={(val) => setSyncInterval(Number(val))}
>
{SYNC_INTERVALS.map((interval) => (
<ButtonGroupItem key={interval.value} value={String(interval.value)}>
<ButtonGroupItem
key={interval.value}
value={String(interval.value)}
disabled={interval.requiresMax && !hasMaxAccess}
>
{interval.label}
{interval.requiresMax && !hasMaxAccess && <MaxBadge />}
</ButtonGroupItem>
))}
</ButtonGroup>

View File

@@ -0,0 +1,7 @@
export function MaxBadge() {
return (
<span className='ml-1 shrink-0 rounded-[3px] bg-[var(--surface-5)] px-1 py-[1px] font-medium text-[9px] text-[var(--text-icon)] uppercase tracking-wide'>
Max
</span>
)
}

View File

@@ -1,8 +1,9 @@
'use client'
import { memo, useCallback, useMemo, useState } from 'react'
import { memo, useCallback, useMemo, useRef, useState } from 'react'
import { ArrowUp, Bell, Library, MoreHorizontal, RefreshCw } from 'lucide-react'
import { useParams } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { useShallow } from 'zustand/react/shallow'
import {
Button,
@@ -18,6 +19,7 @@ import { DatePicker } from '@/components/emcn/components/date-picker/date-picker
import { cn } from '@/lib/core/utils/cn'
import { hasActiveFilters } from '@/lib/logs/filters'
import { getTriggerOptions } from '@/lib/logs/get-trigger-options'
import { captureEvent } from '@/lib/posthog/client'
import { type LogStatus, STATUS_CONFIG } from '@/app/workspace/[workspaceId]/logs/utils'
import { getBlock } from '@/blocks/registry'
import { useFolderMap } from '@/hooks/queries/folders'
@@ -179,6 +181,9 @@ export const LogsToolbar = memo(function LogsToolbar({
}: LogsToolbarProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const posthog = usePostHog()
const posthogRef = useRef(posthog)
posthogRef.current = posthog
const {
level,
@@ -258,8 +263,45 @@ export const LogsToolbar = memo(function LogsToolbar({
} else {
setLevel(values.join(','))
}
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'status',
workspace_id: workspaceId,
})
},
[setLevel]
[setLevel, workspaceId]
)
const handleWorkflowFilterChange = useCallback(
(values: string[]) => {
setWorkflowIds(values)
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'workflow',
workspace_id: workspaceId,
})
},
[setWorkflowIds, workspaceId]
)
const handleFolderFilterChange = useCallback(
(values: string[]) => {
setFolderIds(values)
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'folder',
workspace_id: workspaceId,
})
},
[setFolderIds, workspaceId]
)
const handleTriggerFilterChange = useCallback(
(values: string[]) => {
setTriggers(values)
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'trigger',
workspace_id: workspaceId,
})
},
[setTriggers, workspaceId]
)
const statusDisplayLabel = useMemo(() => {
@@ -348,9 +390,13 @@ export const LogsToolbar = memo(function LogsToolbar({
} else {
clearDateRange()
setTimeRange(val as typeof timeRange)
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'time',
workspace_id: workspaceId,
})
}
},
[timeRange, setTimeRange, clearDateRange]
[timeRange, setTimeRange, clearDateRange, workspaceId]
)
/**
@@ -360,8 +406,12 @@ export const LogsToolbar = memo(function LogsToolbar({
(start: string, end: string) => {
setDateRange(start, end)
setDatePickerOpen(false)
captureEvent(posthogRef.current, 'logs_filter_applied', {
filter_type: 'time',
workspace_id: workspaceId,
})
},
[setDateRange]
[setDateRange, workspaceId]
)
/**
@@ -545,7 +595,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={workflowOptions}
multiSelect
multiSelectValues={workflowIds}
onMultiSelectChange={setWorkflowIds}
onMultiSelectChange={handleWorkflowFilterChange}
placeholder='All workflows'
overlayContent={
<span className='flex items-center gap-1.5 truncate text-[var(--text-primary)]'>
@@ -580,7 +630,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={folderOptions}
multiSelect
multiSelectValues={folderIds}
onMultiSelectChange={setFolderIds}
onMultiSelectChange={handleFolderFilterChange}
placeholder='All folders'
overlayContent={
<span className='truncate text-[var(--text-primary)]'>
@@ -605,7 +655,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={triggerOptions}
multiSelect
multiSelectValues={triggers}
onMultiSelectChange={setTriggers}
onMultiSelectChange={handleTriggerFilterChange}
placeholder='All triggers'
overlayContent={
<span className='truncate text-[var(--text-primary)]'>
@@ -676,7 +726,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={workflowOptions}
multiSelect
multiSelectValues={workflowIds}
onMultiSelectChange={setWorkflowIds}
onMultiSelectChange={handleWorkflowFilterChange}
placeholder='Workflow'
overlayContent={
<span className='flex items-center gap-1.5 truncate text-[var(--text-primary)]'>
@@ -707,7 +757,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={folderOptions}
multiSelect
multiSelectValues={folderIds}
onMultiSelectChange={setFolderIds}
onMultiSelectChange={handleFolderFilterChange}
placeholder='Folder'
overlayContent={
<span className='truncate text-[var(--text-primary)]'>{folderDisplayLabel}</span>
@@ -726,7 +776,7 @@ export const LogsToolbar = memo(function LogsToolbar({
options={triggerOptions}
multiSelect
multiSelectValues={triggers}
onMultiSelectChange={setTriggers}
onMultiSelectChange={handleTriggerFilterChange}
placeholder='Trigger'
overlayContent={
<span className='truncate text-[var(--text-primary)]'>{triggerDisplayLabel}</span>

View File

@@ -62,6 +62,8 @@ const roleOptions = [
{ value: 'admin', label: 'Admin' },
] as const
const roleComboOptions = roleOptions.map((option) => ({ value: option.value, label: option.label }))
export function IntegrationsManager() {
const params = useParams()
const workspaceId = (params?.workspaceId as string) || ''
@@ -1315,42 +1317,32 @@ export function IntegrationsManager() {
</div>
</div>
<Combobox
options={roleComboOptions}
value={
roleOptions.find((option) => option.value === member.role)?.label || ''
}
selectedValue={member.role}
onChange={(value) =>
handleChangeMemberRole(member.userId, value as WorkspaceCredentialRole)
}
placeholder='Role'
disabled={
!isSelectedAdmin || (member.role === 'admin' && adminMemberCount <= 1)
}
size='sm'
/>
{isSelectedAdmin ? (
<>
<Combobox
options={roleOptions.map((option) => ({
value: option.value,
label: option.label,
}))}
value={
roleOptions.find((option) => option.value === member.role)?.label ||
''
}
selectedValue={member.role}
onChange={(value) =>
handleChangeMemberRole(
member.userId,
value as WorkspaceCredentialRole
)
}
placeholder='Role'
disabled={member.role === 'admin' && adminMemberCount <= 1}
size='sm'
/>
<Button
variant='ghost'
onClick={() => handleRemoveMember(member.userId)}
disabled={member.role === 'admin' && adminMemberCount <= 1}
className='w-full justify-end'
>
Remove
</Button>
</>
<Button
variant='ghost'
onClick={() => handleRemoveMember(member.userId)}
disabled={member.role === 'admin' && adminMemberCount <= 1}
className='w-full justify-end'
>
Remove
</Button>
) : (
<>
<Badge variant='gray-secondary'>{member.role}</Badge>
<div />
</>
<div />
)}
</div>
))}
@@ -1370,10 +1362,7 @@ export function IntegrationsManager() {
size='sm'
/>
<Combobox
options={roleOptions.map((option) => ({
value: option.value,
label: option.label,
}))}
options={roleComboOptions}
value={
roleOptions.find((option) => option.value === memberRole)?.label || ''
}

View File

@@ -6,7 +6,7 @@ import {
DropdownMenuTrigger,
} from '@/components/emcn'
import { ArrowDown, ArrowUp, Duplicate, Pencil, Trash } from '@/components/emcn/icons'
import type { ContextMenuState } from '../../types'
import type { ContextMenuState } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
interface ContextMenuProps {
contextMenu: ContextMenuState

View File

@@ -17,13 +17,17 @@ import {
Textarea,
} from '@/components/emcn'
import type { ColumnDefinition, TableInfo, TableRow } from '@/lib/table'
import {
cleanCellValue,
formatValueForInput,
} from '@/app/workspace/[workspaceId]/tables/[tableId]/utils'
import {
useCreateTableRow,
useDeleteTableRow,
useDeleteTableRows,
useUpdateTableRow,
} from '@/hooks/queries/tables'
import { cleanCellValue, formatValueForInput } from '../../utils'
import { useTableUndoStore } from '@/stores/table/store'
const logger = createLogger('RowModal')
@@ -39,13 +43,9 @@ export interface RowModalProps {
function createInitialRowData(columns: ColumnDefinition[]): Record<string, unknown> {
const initial: Record<string, unknown> = {}
columns.forEach((col) => {
if (col.type === 'boolean') {
initial[col.name] = false
} else {
initial[col.name] = ''
}
})
for (const col of columns) {
initial[col.name] = col.type === 'boolean' ? false : ''
}
return initial
}
@@ -54,16 +54,13 @@ function cleanRowData(
rowData: Record<string, unknown>
): Record<string, unknown> {
const cleanData: Record<string, unknown> = {}
columns.forEach((col) => {
const value = rowData[col.name]
for (const col of columns) {
try {
cleanData[col.name] = cleanCellValue(value, col)
cleanData[col.name] = cleanCellValue(rowData[col.name], col)
} catch {
throw new Error(`Invalid JSON for field: ${col.name}`)
}
})
}
return cleanData
}
@@ -86,8 +83,7 @@ export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess
const workspaceId = params.workspaceId as string
const tableId = table.id
const schema = table?.schema
const columns = schema?.columns || []
const columns = table.schema?.columns || []
const [rowData, setRowData] = useState<Record<string, unknown>>(() =>
getInitialRowData(mode, columns, row)
@@ -97,6 +93,7 @@ export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess
const updateRowMutation = useUpdateTableRow({ workspaceId, tableId })
const deleteRowMutation = useDeleteTableRow({ workspaceId, tableId })
const deleteRowsMutation = useDeleteTableRows({ workspaceId, tableId })
const pushToUndoStack = useTableUndoStore((s) => s.push)
const isSubmitting =
createRowMutation.isPending ||
updateRowMutation.isPending ||
@@ -111,9 +108,24 @@ export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess
const cleanData = cleanRowData(columns, rowData)
if (mode === 'add') {
await createRowMutation.mutateAsync({ data: cleanData })
const response = await createRowMutation.mutateAsync({ data: cleanData })
const createdRow = (response as { data?: { row?: { id?: string; position?: number } } })
?.data?.row
if (createdRow?.id) {
pushToUndoStack(tableId, {
type: 'create-row',
rowId: createdRow.id,
position: createdRow.position ?? 0,
data: cleanData,
})
}
} else if (mode === 'edit' && row) {
const oldData = row.data as Record<string, unknown>
await updateRowMutation.mutateAsync({ rowId: row.id, data: cleanData })
pushToUndoStack(tableId, {
type: 'update-cells',
cells: [{ rowId: row.id, oldData, newData: cleanData }],
})
}
onSuccess()
@@ -129,8 +141,14 @@ export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess
const idsToDelete = rowIds ?? (row ? [row.id] : [])
try {
if (idsToDelete.length === 1) {
if (idsToDelete.length === 1 && row) {
await deleteRowMutation.mutateAsync(idsToDelete[0])
pushToUndoStack(tableId, {
type: 'delete-rows',
rows: [
{ rowId: row.id, data: row.data as Record<string, unknown>, position: row.position },
],
})
} else {
await deleteRowsMutation.mutateAsync(idsToDelete)
}

View File

@@ -1 +1,2 @@
export type { TableFilterHandle } from './table-filter'
export { TableFilter } from './table-filter'

View File

@@ -1,6 +1,14 @@
'use client'
import { memo, useCallback, useMemo, useRef, useState } from 'react'
import {
forwardRef,
memo,
useCallback,
useImperativeHandle,
useMemo,
useRef,
useState,
} from 'react'
import { X } from 'lucide-react'
import { nanoid } from 'nanoid'
import {
@@ -19,22 +27,42 @@ const OPERATOR_LABELS = Object.fromEntries(
COMPARISON_OPERATORS.map((op) => [op.value, op.label])
) as Record<string, string>
export interface TableFilterHandle {
addColumnRule: (columnName: string) => void
}
interface TableFilterProps {
columns: Array<{ name: string; type: string }>
filter: Filter | null
onApply: (filter: Filter | null) => void
onClose: () => void
initialColumn?: string | null
}
export function TableFilter({ columns, filter, onApply, onClose }: TableFilterProps) {
export const TableFilter = forwardRef<TableFilterHandle, TableFilterProps>(function TableFilter(
{ columns, filter, onApply, onClose, initialColumn },
ref
) {
const [rules, setRules] = useState<FilterRule[]>(() => {
const fromFilter = filterToRules(filter)
return fromFilter.length > 0 ? fromFilter : [createRule(columns)]
if (fromFilter.length > 0) return fromFilter
const rule = createRule(columns)
return [initialColumn ? { ...rule, column: initialColumn } : rule]
})
const rulesRef = useRef(rules)
rulesRef.current = rules
useImperativeHandle(
ref,
() => ({
addColumnRule: (columnName: string) => {
setRules((prev) => [...prev, { ...createRule(columns), column: columnName }])
},
}),
[columns]
)
const columnOptions = useMemo(
() => columns.map((col) => ({ value: col.name, label: col.name })),
[columns]
@@ -125,7 +153,7 @@ export function TableFilter({ columns, filter, onApply, onClose }: TableFilterPr
</div>
</div>
)
}
})
interface FilterRuleRowProps {
rule: FilterRule

View File

@@ -24,11 +24,15 @@ import {
Skeleton,
} from '@/components/emcn'
import {
ArrowDown,
ArrowLeft,
ArrowRight,
ArrowUp,
Calendar as CalendarIcon,
ChevronDown,
Download,
Fingerprint,
ListFilter,
Pencil,
Plus,
Table as TableIcon,
@@ -45,6 +49,26 @@ import type { ColumnDefinition, Filter, SortDirection, TableRow as TableRowType
import type { ColumnOption, SortConfig } from '@/app/workspace/[workspaceId]/components'
import { ResourceHeader, ResourceOptionsBar } from '@/app/workspace/[workspaceId]/components'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { ContextMenu } from '@/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu'
import { RowModal } from '@/app/workspace/[workspaceId]/tables/[tableId]/components/row-modal'
import type { TableFilterHandle } from '@/app/workspace/[workspaceId]/tables/[tableId]/components/table-filter'
import { TableFilter } from '@/app/workspace/[workspaceId]/tables/[tableId]/components/table-filter'
import {
useContextMenu,
useExportTable,
useTableData,
} from '@/app/workspace/[workspaceId]/tables/[tableId]/hooks'
import type {
EditingCell,
QueryOptions,
SaveReason,
} from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
import {
cleanCellValue,
displayToStorage,
formatValueForInput,
storageToDisplay,
} from '@/app/workspace/[workspaceId]/tables/[tableId]/utils'
import {
useAddTableColumn,
useBatchCreateTableRows,
@@ -60,17 +84,6 @@ import {
import { useInlineRename } from '@/hooks/use-inline-rename'
import { extractCreatedRowId, useTableUndo } from '@/hooks/use-table-undo'
import type { DeletedRowSnapshot } from '@/stores/table/types'
import { useContextMenu, useTableData } from '../../hooks'
import type { EditingCell, QueryOptions, SaveReason } from '../../types'
import {
cleanCellValue,
displayToStorage,
formatValueForInput,
storageToDisplay,
} from '../../utils'
import { ContextMenu } from '../context-menu'
import { RowModal } from '../row-modal'
import { TableFilter } from '../table-filter'
interface CellCoord {
rowIndex: number
@@ -88,6 +101,7 @@ interface NormalizedSelection {
const EMPTY_COLUMNS: never[] = []
const EMPTY_CHECKED_ROWS = new Set<number>()
const clearCheckedRows = (prev: Set<number>) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS)
const COL_WIDTH = 160
const COL_WIDTH_MIN = 80
const CHECKBOX_COL_WIDTH = 40
@@ -196,6 +210,7 @@ export function Table({
const [initialCharacter, setInitialCharacter] = useState<string | null>(null)
const [selectionAnchor, setSelectionAnchor] = useState<CellCoord | null>(null)
const [selectionFocus, setSelectionFocus] = useState<CellCoord | null>(null)
const [isColumnSelection, setIsColumnSelection] = useState(false)
const [checkedRows, setCheckedRows] = useState(EMPTY_CHECKED_ROWS)
const lastCheckboxRowRef = useRef<number | null>(null)
const [showDeleteTableConfirm, setShowDeleteTableConfirm] = useState(false)
@@ -220,6 +235,7 @@ export function Table({
const metadataSeededRef = useRef(false)
const containerRef = useRef<HTMLDivElement>(null)
const scrollRef = useRef<HTMLDivElement>(null)
const tableFilterRef = useRef<TableFilterHandle>(null)
const isDraggingRef = useRef(false)
const { tableData, isLoadingTable, rows, isLoadingRows } = useTableData({
@@ -291,10 +307,11 @@ export function Table({
const positionMapRef = useRef(positionMap)
positionMapRef.current = positionMap
const normalizedSelection = useMemo(
() => computeNormalizedSelection(selectionAnchor, selectionFocus),
[selectionAnchor, selectionFocus]
)
const normalizedSelection = useMemo(() => {
const raw = computeNormalizedSelection(selectionAnchor, selectionFocus)
if (!raw || !isColumnSelection) return raw
return { ...raw, startRow: 0, endRow: Math.max(maxPosition, 0) }
}, [selectionAnchor, selectionFocus, isColumnSelection, maxPosition])
const displayColCount = isLoadingTable ? SKELETON_COL_COUNT : displayColumns.length
const tableWidth = useMemo(() => {
@@ -315,7 +332,18 @@ export function Table({
}, [resizingColumn, displayColumns, columnWidths])
const dropIndicatorLeft = useMemo(() => {
if (!dropTargetColumnName) return null
if (!dropTargetColumnName || !dragColumnName) return null
const dragIdx = displayColumns.findIndex((c) => c.name === dragColumnName)
const targetIdx = displayColumns.findIndex((c) => c.name === dropTargetColumnName)
if (dragIdx !== -1 && targetIdx !== -1) {
// Suppress when drop would be a no-op (same effective position)
if (targetIdx === dragIdx) return null
if (dropSide === 'right' && targetIdx === dragIdx - 1) return null
if (dropSide === 'left' && targetIdx === dragIdx + 1) return null
}
let left = CHECKBOX_COL_WIDTH
for (const col of displayColumns) {
if (dropSide === 'left' && col.name === dropTargetColumnName) return left
@@ -323,7 +351,7 @@ export function Table({
if (dropSide === 'right' && col.name === dropTargetColumnName) return left
}
return null
}, [dropTargetColumnName, dropSide, displayColumns, columnWidths])
}, [dropTargetColumnName, dropSide, displayColumns, columnWidths, dragColumnName])
const isAllRowsSelected = useMemo(() => {
if (checkedRows.size > 0 && rows.length > 0 && checkedRows.size >= rows.length) {
@@ -350,6 +378,7 @@ export function Table({
const rowsRef = useRef(rows)
const selectionAnchorRef = useRef(selectionAnchor)
const selectionFocusRef = useRef(selectionFocus)
const normalizedSelectionRef = useRef(normalizedSelection)
const checkedRowsRef = useRef(checkedRows)
checkedRowsRef.current = checkedRows
@@ -359,6 +388,7 @@ export function Table({
rowsRef.current = rows
selectionAnchorRef.current = selectionAnchor
selectionFocusRef.current = selectionFocus
normalizedSelectionRef.current = normalizedSelection
const deleteTableMutation = useDeleteTable(workspaceId)
const renameTableMutation = useRenameTable(workspaceId)
@@ -574,7 +604,8 @@ export function Table({
const handleCellMouseDown = useCallback(
(rowIndex: number, colIndex: number, shiftKey: boolean) => {
setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS))
setCheckedRows(clearCheckedRows)
setIsColumnSelection(false)
lastCheckboxRowRef.current = null
if (shiftKey && selectionAnchorRef.current) {
setSelectionFocus({ rowIndex, colIndex })
@@ -597,6 +628,7 @@ export function Table({
setEditingCell(null)
setSelectionAnchor(null)
setSelectionFocus(null)
setIsColumnSelection(false)
if (shiftKey && lastCheckboxRowRef.current !== null) {
const from = Math.min(lastCheckboxRowRef.current, rowIndex)
@@ -627,7 +659,8 @@ export function Table({
const handleClearSelection = useCallback(() => {
setSelectionAnchor(null)
setSelectionFocus(null)
setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS))
setIsColumnSelection(false)
setCheckedRows(clearCheckedRows)
lastCheckboxRowRef.current = null
}, [])
@@ -637,6 +670,7 @@ export function Table({
setEditingCell(null)
setSelectionAnchor(null)
setSelectionFocus(null)
setIsColumnSelection(false)
const all = new Set<number>()
for (const row of rws) {
all.add(row.position)
@@ -682,21 +716,22 @@ export function Table({
const target = dropTargetColumnNameRef.current
const side = dropSideRef.current
if (target && dragged !== target) {
const cols = columnsRef.current
const currentOrder = columnOrderRef.current ?? cols.map((c) => c.name)
const fromIndex = currentOrder.indexOf(dragged)
const toIndex = currentOrder.indexOf(target)
if (fromIndex !== -1 && toIndex !== -1) {
const newOrder = currentOrder.filter((n) => n !== dragged)
let insertIndex = newOrder.indexOf(target)
if (side === 'right') insertIndex += 1
newOrder.splice(insertIndex, 0, dragged)
setColumnOrder(newOrder)
updateMetadataRef.current({
columnWidths: columnWidthsRef.current,
columnOrder: newOrder,
})
const currentOrder = columnOrderRef.current ?? columnsRef.current.map((c) => c.name)
const newOrder = currentOrder.filter((n) => n !== dragged)
const targetIndex = newOrder.indexOf(target)
if (targetIndex === -1) {
setDragColumnName(null)
setDropTargetColumnName(null)
setDropSide('left')
return
}
const insertIndex = side === 'right' ? targetIndex + 1 : targetIndex
newOrder.splice(insertIndex, 0, dragged)
setColumnOrder(newOrder)
updateMetadataRef.current({
columnWidths: columnWidthsRef.current,
columnOrder: newOrder,
})
}
setDragColumnName(null)
setDropTargetColumnName(null)
@@ -782,6 +817,9 @@ export function Table({
const updateMetadataRef = useRef(updateMetadataMutation.mutate)
updateMetadataRef.current = updateMetadataMutation.mutate
const addColumnAsyncRef = useRef(addColumnMutation.mutateAsync)
addColumnAsyncRef.current = addColumnMutation.mutateAsync
const toggleBooleanCellRef = useRef(toggleBooleanCell)
toggleBooleanCellRef.current = toggleBooleanCell
@@ -794,7 +832,21 @@ export function Table({
const handleKeyDown = (e: KeyboardEvent) => {
const tag = (e.target as HTMLElement).tagName
if (tag === 'INPUT' || tag === 'TEXTAREA' || tag === 'SELECT') return
if (tag === 'INPUT' || tag === 'TEXTAREA' || tag === 'SELECT') {
if (e.key === 'Escape') setIsColumnSelection(false)
return
}
if (e.key === 'Escape') {
e.preventDefault()
isDraggingRef.current = false
setSelectionAnchor(null)
setSelectionFocus(null)
setIsColumnSelection(false)
setCheckedRows(clearCheckedRows)
lastCheckboxRowRef.current = null
return
}
if ((e.metaKey || e.ctrlKey) && (e.key === 'z' || e.key === 'y')) {
e.preventDefault()
@@ -806,15 +858,6 @@ export function Table({
return
}
if (e.key === 'Escape') {
e.preventDefault()
setSelectionAnchor(null)
setSelectionFocus(null)
setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS))
lastCheckboxRowRef.current = null
return
}
if ((e.metaKey || e.ctrlKey) && e.key === 'a') {
e.preventDefault()
const rws = rowsRef.current
@@ -822,6 +865,7 @@ export function Table({
setEditingCell(null)
setSelectionAnchor(null)
setSelectionFocus(null)
setIsColumnSelection(false)
const all = new Set<number>()
for (const row of rws) {
all.add(row.position)
@@ -835,6 +879,7 @@ export function Table({
const a = selectionAnchorRef.current
if (!a || editingCellRef.current) return
e.preventDefault()
setIsColumnSelection(false)
setSelectionFocus(null)
setCheckedRows((prev) => {
const next = new Set(prev)
@@ -887,6 +932,7 @@ export function Table({
const row = positionMapRef.current.get(anchor.rowIndex)
if (!row) return
e.preventDefault()
setIsColumnSelection(false)
const position = row.position + 1
const colIndex = anchor.colIndex
createRef.current(
@@ -908,12 +954,12 @@ export function Table({
if (e.key === 'Enter' || e.key === 'F2') {
if (!canEditRef.current) return
e.preventDefault()
setIsColumnSelection(false)
const col = cols[anchor.colIndex]
if (!col) return
const row = positionMapRef.current.get(anchor.rowIndex)
if (!row) return
if (col.type === 'boolean') {
toggleBooleanCellRef.current(row.id, col.name, row.data[col.name])
return
@@ -935,7 +981,8 @@ export function Table({
if (e.key === 'Tab') {
e.preventDefault()
setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS))
setCheckedRows(clearCheckedRows)
setIsColumnSelection(false)
lastCheckboxRowRef.current = null
setSelectionAnchor(moveCell(anchor, cols.length, totalRows, e.shiftKey ? -1 : 1))
setSelectionFocus(null)
@@ -944,7 +991,8 @@ export function Table({
if (['ArrowUp', 'ArrowDown', 'ArrowLeft', 'ArrowRight'].includes(e.key)) {
e.preventDefault()
setCheckedRows((prev) => (prev.size === 0 ? prev : EMPTY_CHECKED_ROWS))
setCheckedRows(clearCheckedRows)
setIsColumnSelection(false)
lastCheckboxRowRef.current = null
const focus = selectionFocusRef.current ?? anchor
const origin = e.shiftKey ? focus : anchor
@@ -979,7 +1027,7 @@ export function Table({
if (e.key === 'Delete' || e.key === 'Backspace') {
if (!canEditRef.current) return
e.preventDefault()
const sel = computeNormalizedSelection(anchor, selectionFocusRef.current)
const sel = normalizedSelectionRef.current
if (!sel) return
const pMap = positionMapRef.current
const undoCells: Array<{ rowId: string; data: Record<string, unknown> }> = []
@@ -1011,6 +1059,7 @@ export function Table({
if (col.type === 'number' && !/[\d.-]/.test(e.key)) return
if (col.type === 'date' && !/[\d\-/]/.test(e.key)) return
e.preventDefault()
setIsColumnSelection(false)
const row = positionMapRef.current.get(anchor.rowIndex)
if (!row) return
@@ -1047,10 +1096,7 @@ export function Table({
return
}
const anchor = selectionAnchorRef.current
if (!anchor) return
const sel = computeNormalizedSelection(anchor, selectionFocusRef.current)
const sel = normalizedSelectionRef.current
if (!sel) return
e.preventDefault()
@@ -1106,10 +1152,7 @@ export function Table({
}
e.clipboardData?.setData('text/plain', lines.join('\n'))
} else {
const anchor = selectionAnchorRef.current
if (!anchor) return
const sel = computeNormalizedSelection(anchor, selectionFocusRef.current)
const sel = normalizedSelectionRef.current
if (!sel) return
e.preventDefault()
@@ -1145,7 +1188,7 @@ export function Table({
}
}
const handlePaste = (e: ClipboardEvent) => {
const handlePaste = async (e: ClipboardEvent) => {
const tag = (e.target as HTMLElement).tagName
if (tag === 'INPUT' || tag === 'TEXTAREA') return
if (!canEditRef.current) return
@@ -1164,8 +1207,48 @@ export function Table({
if (pasteRows.length === 0) return
const currentCols = columnsRef.current
let currentCols = columnsRef.current
const pMap = positionMapRef.current
const maxPasteCols = Math.max(...pasteRows.map((pr) => pr.length))
const neededExtraCols = Math.max(
0,
currentAnchor.colIndex + maxPasteCols - currentCols.length
)
if (neededExtraCols > 0) {
// Generate unique names for the new columns without colliding with each other
const existingNames = new Set(currentCols.map((c) => c.name.toLowerCase()))
const newColNames: string[] = []
for (let i = 0; i < neededExtraCols; i++) {
let name = 'untitled'
let n = 2
while (existingNames.has(name.toLowerCase())) {
name = `untitled_${n}`
n++
}
existingNames.add(name.toLowerCase())
newColNames.push(name)
}
// Create columns sequentially so each invalidation completes before the next
const createdColNames: string[] = []
try {
for (const name of newColNames) {
await addColumnAsyncRef.current({ name, type: 'string' })
createdColNames.push(name)
}
} catch {
// If column creation fails partway, paste into whatever columns were created
}
// Build updated column list locally — React Query cache may not have refreshed yet
if (createdColNames.length > 0) {
currentCols = [
...currentCols,
...createdColNames.map((name) => ({ name, type: 'string' as const })),
]
}
}
const undoCells: Array<{ rowId: string; data: Record<string, unknown> }> = []
const updateBatch: Array<{ rowId: string; data: Record<string, unknown> }> = []
@@ -1245,7 +1328,6 @@ export function Table({
)
}
const maxPasteCols = Math.max(...pasteRows.map((pr) => pr.length))
setSelectionFocus({
rowIndex: currentAnchor.rowIndex + pasteRows.length - 1,
colIndex: Math.min(currentAnchor.colIndex + maxPasteCols - 1, currentCols.length - 1),
@@ -1321,10 +1403,10 @@ export function Table({
}, [])
const generateColumnName = useCallback(() => {
const existing = schemaColumnsRef.current.map((c) => c.name.toLowerCase())
const existing = new Set(schemaColumnsRef.current.map((c) => c.name.toLowerCase()))
let name = 'untitled'
let i = 2
while (existing.includes(name.toLowerCase())) {
while (existing.has(name)) {
name = `untitled_${i}`
i++
}
@@ -1429,7 +1511,10 @@ export function Table({
}, [])
const handleRenameColumn = useCallback(
(name: string) => columnRename.startRename(name, name),
(name: string) => {
isDraggingRef.current = false
columnRename.startRename(name, name)
},
[columnRename.startRename]
)
@@ -1440,10 +1525,22 @@ export function Table({
const handleDeleteColumnConfirm = useCallback(() => {
if (!deletingColumn) return
const columnToDelete = deletingColumn
const column = schemaColumnsRef.current.find((c) => c.name === columnToDelete)
const position = schemaColumnsRef.current.findIndex((c) => c.name === columnToDelete)
const orderAtDelete = columnOrderRef.current
setDeletingColumn(null)
deleteColumnMutation.mutate(columnToDelete, {
onSuccess: () => {
if (column && position !== -1) {
pushUndoRef.current({
type: 'delete-column',
columnName: columnToDelete,
columnType: column.type,
position,
unique: !!column.unique,
required: !!column.required,
})
}
if (!orderAtDelete) return
const newOrder = orderAtDelete.filter((n) => n !== columnToDelete)
setColumnOrder(newOrder)
@@ -1468,13 +1565,28 @@ export function Table({
}, [])
const [filterOpen, setFilterOpen] = useState(false)
const [initialFilterColumn, setInitialFilterColumn] = useState<string | null>(null)
const handleFilterToggle = useCallback(() => {
setInitialFilterColumn(null)
setFilterOpen((prev) => !prev)
}, [])
const handleFilterClose = useCallback(() => {
setFilterOpen(false)
setInitialFilterColumn(null)
}, [])
const filterOpenRef = useRef(filterOpen)
filterOpenRef.current = filterOpen
const handleFilterByColumn = useCallback((columnName: string) => {
if (filterOpenRef.current && tableFilterRef.current) {
tableFilterRef.current.addColumnRule(columnName)
} else {
setInitialFilterColumn(columnName)
setFilterOpen(true)
}
}, [])
const columnOptions = useMemo<ColumnOption[]>(
@@ -1555,6 +1667,27 @@ export function Table({
[handleAddColumn, addColumnMutation.isPending]
)
const { handleExportTable, isExporting } = useExportTable({
workspaceId,
tableId,
tableName: tableData?.name,
columns: displayColumns,
queryOptions,
canExport: userPermissions.canEdit,
})
const headerActions = useMemo(
() => [
{
label: isExporting ? 'Exporting...' : 'Export CSV',
icon: Download,
onClick: () => void handleExportTable(),
disabled: !userPermissions.canEdit || !hasTableData || isLoadingTable || isExporting,
},
],
[handleExportTable, hasTableData, isExporting, isLoadingTable, userPermissions.canEdit]
)
const activeSortState = useMemo(() => {
if (!queryOptions.sort) return null
const entries = Object.entries(queryOptions.sort)
@@ -1563,6 +1696,32 @@ export function Table({
return { column, direction }
}, [queryOptions.sort])
const selectedColumnRange = useMemo(() => {
if (!isColumnSelection || !normalizedSelection) return null
return { start: normalizedSelection.startCol, end: normalizedSelection.endCol }
}, [isColumnSelection, normalizedSelection])
const draggingColIndex = useMemo(
() => (dragColumnName ? displayColumns.findIndex((c) => c.name === dragColumnName) : null),
[dragColumnName, displayColumns]
)
const handleColumnSelect = useCallback((colIndex: number) => {
setSelectionAnchor({ rowIndex: 0, colIndex })
setSelectionFocus({ rowIndex: 0, colIndex })
setIsColumnSelection(true)
}, [])
const handleSortAsc = useCallback(
(columnName: string) => handleSortChange(columnName, 'asc'),
[handleSortChange]
)
const handleSortDesc = useCallback(
(columnName: string) => handleSortChange(columnName, 'desc'),
[handleSortChange]
)
const sortConfig = useMemo<SortConfig>(
() => ({
options: columnOptions,
@@ -1619,7 +1778,12 @@ export function Table({
<div ref={containerRef} className='flex h-full flex-col overflow-hidden'>
{!embedded && (
<>
<ResourceHeader icon={TableIcon} breadcrumbs={breadcrumbs} create={createAction} />
<ResourceHeader
icon={TableIcon}
breadcrumbs={breadcrumbs}
actions={headerActions}
create={createAction}
/>
<ResourceOptionsBar
sort={sortConfig}
@@ -1628,10 +1792,12 @@ export function Table({
/>
{filterOpen && (
<TableFilter
ref={tableFilterRef}
columns={displayColumns}
filter={queryOptions.filter}
onApply={handleFilterApply}
onClose={handleFilterClose}
initialColumn={initialFilterColumn}
/>
)}
</>
@@ -1691,10 +1857,11 @@ export function Table({
checked={isAllRowsSelected}
onCheckedChange={handleSelectAllToggle}
/>
{displayColumns.map((column) => (
{displayColumns.map((column, colIndex) => (
<ColumnHeaderMenu
key={column.name}
column={column}
colIndex={colIndex}
readOnly={!userPermissions.canEdit}
isRenaming={columnRename.editingId === column.name}
renameValue={
@@ -1713,10 +1880,20 @@ export function Table({
onResize={handleColumnResize}
onResizeEnd={handleColumnResizeEnd}
isDragging={dragColumnName === column.name}
isDropTarget={
dropTargetColumnName === column.name && dropIndicatorLeft !== null
}
onDragStart={handleColumnDragStart}
onDragOver={handleColumnDragOver}
onDragEnd={handleColumnDragEnd}
onDragLeave={handleColumnDragLeave}
sortDirection={
activeSortState?.column === column.name ? activeSortState.direction : null
}
onSortAsc={handleSortAsc}
onSortDesc={handleSortDesc}
onFilterColumn={handleFilterByColumn}
onColumnSelect={handleColumnSelect}
/>
))}
{userPermissions.canEdit && (
@@ -1744,6 +1921,7 @@ export function Table({
startPosition={prevPosition + 1}
columns={displayColumns}
normalizedSelection={normalizedSelection}
draggingColIndex={draggingColIndex}
checkedRows={checkedRows}
firstRowUnderHeader={prevPosition === -1}
onCellMouseDown={handleCellMouseDown}
@@ -1766,6 +1944,7 @@ export function Table({
: null
}
normalizedSelection={normalizedSelection}
draggingColIndex={draggingColIndex}
onClick={handleCellClick}
onDoubleClick={handleCellDoubleClick}
onSave={handleInlineSave}
@@ -1917,6 +2096,7 @@ interface PositionGapRowsProps {
startPosition: number
columns: ColumnDefinition[]
normalizedSelection: NormalizedSelection | null
draggingColIndex: number | null
checkedRows: Set<number>
firstRowUnderHeader?: boolean
onCellMouseDown: (rowIndex: number, colIndex: number, shiftKey: boolean) => void
@@ -1930,6 +2110,7 @@ const PositionGapRows = React.memo(
startPosition,
columns,
normalizedSelection,
draggingColIndex,
checkedRows,
firstRowUnderHeader = false,
onCellMouseDown,
@@ -1995,7 +2176,11 @@ const PositionGapRows = React.memo(
key={col.name}
data-row={position}
data-col={colIndex}
className={cn(CELL, (isHighlighted || isAnchor) && 'relative')}
className={cn(
CELL,
(isHighlighted || isAnchor) && 'relative',
draggingColIndex === colIndex && 'opacity-40'
)}
onMouseDown={(e) => {
if (e.button !== 0) return
onCellMouseDown(position, colIndex, e.shiftKey)
@@ -2040,6 +2225,7 @@ const PositionGapRows = React.memo(
prev.startPosition !== next.startPosition ||
prev.columns !== next.columns ||
prev.normalizedSelection !== next.normalizedSelection ||
prev.draggingColIndex !== next.draggingColIndex ||
prev.firstRowUnderHeader !== next.firstRowUnderHeader ||
prev.onCellMouseDown !== next.onCellMouseDown ||
prev.onCellMouseEnter !== next.onCellMouseEnter ||
@@ -2082,6 +2268,7 @@ interface DataRowProps {
initialCharacter: string | null
pendingCellValue: Record<string, unknown> | null
normalizedSelection: NormalizedSelection | null
draggingColIndex: number | null
onClick: (rowId: string, columnName: string) => void
onDoubleClick: (rowId: string, columnName: string) => void
onSave: (rowId: string, columnName: string, value: unknown, reason: SaveReason) => void
@@ -2132,6 +2319,7 @@ function dataRowPropsAreEqual(prev: DataRowProps, next: DataRowProps): boolean {
prev.isFirstRow !== next.isFirstRow ||
prev.editingColumnName !== next.editingColumnName ||
prev.pendingCellValue !== next.pendingCellValue ||
prev.draggingColIndex !== next.draggingColIndex ||
prev.onClick !== next.onClick ||
prev.onDoubleClick !== next.onDoubleClick ||
prev.onSave !== next.onSave ||
@@ -2168,6 +2356,7 @@ const DataRow = React.memo(function DataRow({
initialCharacter,
pendingCellValue,
normalizedSelection,
draggingColIndex,
isRowChecked,
onClick,
onDoubleClick,
@@ -2235,7 +2424,11 @@ const DataRow = React.memo(function DataRow({
key={column.name}
data-row={rowIndex}
data-col={colIndex}
className={cn(CELL, (isHighlighted || isAnchor || isEditing) && 'relative')}
className={cn(
CELL,
(isHighlighted || isAnchor || isEditing) && 'relative',
draggingColIndex === colIndex && 'opacity-40'
)}
onMouseDown={(e) => {
if (e.button !== 0 || isEditing) return
onCellMouseDown(rowIndex, colIndex, e.shiftKey)
@@ -2605,6 +2798,7 @@ const COLUMN_TYPE_OPTIONS: { type: string; label: string; icon: React.ElementTyp
const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
column,
colIndex,
readOnly,
isRenaming,
renameValue,
@@ -2621,12 +2815,19 @@ const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
onResize,
onResizeEnd,
isDragging,
isDropTarget,
onDragStart,
onDragOver,
onDragEnd,
onDragLeave,
sortDirection,
onSortAsc,
onSortDesc,
onFilterColumn,
onColumnSelect,
}: {
column: ColumnDefinition
colIndex: number
readOnly?: boolean
isRenaming: boolean
renameValue: string
@@ -2643,10 +2844,16 @@ const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
onResize: (columnName: string, width: number) => void
onResizeEnd: () => void
isDragging?: boolean
isDropTarget?: boolean
onDragStart?: (columnName: string) => void
onDragOver?: (columnName: string, side: 'left' | 'right') => void
onDragEnd?: () => void
onDragLeave?: () => void
sortDirection?: SortDirection | null
onSortAsc?: (columnName: string) => void
onSortDesc?: (columnName: string) => void
onFilterColumn?: (columnName: string) => void
onColumnSelect?: (colIndex: number) => void
}) {
const renameInputRef = useRef<HTMLInputElement>(null)
@@ -2735,7 +2942,8 @@ const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
<th
className={cn(
'group relative border-[var(--border)] border-r border-b bg-[var(--bg)] p-0 text-left align-middle',
isDragging && 'opacity-40'
isDragging && 'opacity-40',
isDropTarget && 'bg-[var(--selection)]/10'
)}
onDragOver={handleDragOver}
onDrop={handleDrop}
@@ -2760,7 +2968,7 @@ const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
) : readOnly ? (
<div className='flex h-full w-full min-w-0 items-center px-2 py-[7px]'>
<ColumnTypeIcon type={column.type} />
<span className='ml-1.5 min-w-0 overflow-clip text-ellipsis whitespace-nowrap font-medium text-[13px] text-[var(--text-primary)]'>
<span className='ml-1.5 min-w-0 overflow-clip text-ellipsis whitespace-nowrap font-medium text-[var(--text-primary)] text-small'>
{column.name}
</span>
</div>
@@ -2771,15 +2979,34 @@ const ColumnHeaderMenu = React.memo(function ColumnHeaderMenu({
<button
type='button'
className='flex min-w-0 flex-1 cursor-pointer items-center px-2 py-[7px] outline-none'
onClick={() => onColumnSelect?.(colIndex)}
>
<ColumnTypeIcon type={column.type} />
<span className='ml-1.5 min-w-0 overflow-clip text-ellipsis whitespace-nowrap font-medium text-[var(--text-primary)] text-small'>
{column.name}
</span>
{sortDirection && (
<span className='ml-1 shrink-0'>
<SortDirectionIndicator direction={sortDirection} />
</span>
)}
<ChevronDown className='ml-1.5 h-[7px] w-[9px] shrink-0 text-[var(--text-muted)]' />
</button>
</DropdownMenuTrigger>
<DropdownMenuContent align='start'>
<DropdownMenuItem onSelect={() => onSortAsc?.(column.name)}>
<ArrowUp />
Sort ascending
</DropdownMenuItem>
<DropdownMenuItem onSelect={() => onSortDesc?.(column.name)}>
<ArrowDown />
Sort descending
</DropdownMenuItem>
<DropdownMenuItem onSelect={() => onFilterColumn?.(column.name)}>
<ListFilter />
Filter by this column
</DropdownMenuItem>
<DropdownMenuSeparator />
<DropdownMenuItem onSelect={() => onRenameColumn(column.name)}>
<Pencil />
Rename column
@@ -2900,3 +3127,11 @@ function ColumnTypeIcon({ type }: { type: string }) {
const Icon = COLUMN_TYPE_ICONS[type] ?? TypeText
return <Icon className='h-3 w-3 shrink-0 text-[var(--text-icon)]' />
}
function SortDirectionIndicator({ direction }: { direction: SortDirection }) {
return direction === 'asc' ? (
<ArrowUp className='h-[10px] w-[10px] text-[var(--text-muted)]' />
) : (
<ArrowDown className='h-[10px] w-[10px] text-[var(--text-muted)]' />
)
}

View File

@@ -0,0 +1,39 @@
import { createTableColumn, createTableRow } from '@sim/testing'
import { describe, expect, it } from 'vitest'
import { buildTableCsv, formatTableExportValue } from './export'
describe('table export utils', () => {
it('formats exported values using table display conventions', () => {
expect(formatTableExportValue('2026-04-03', { name: 'date', type: 'date' })).toBe('04/03/2026')
expect(formatTableExportValue({ nested: true }, { name: 'payload', type: 'json' })).toBe(
'{"nested":true}'
)
expect(formatTableExportValue(null, { name: 'empty', type: 'string' })).toBe('')
})
it('builds CSV using visible columns and escaped values', () => {
const columns = [
createTableColumn({ name: 'name', type: 'string' }),
createTableColumn({ name: 'date', type: 'date' }),
createTableColumn({ name: 'notes', type: 'json' }),
]
const rows = [
createTableRow({
id: 'row_1',
position: 0,
createdAt: '2026-04-03T00:00:00.000Z',
updatedAt: '2026-04-03T00:00:00.000Z',
data: {
name: 'Ada "Lovelace"',
date: '2026-04-03',
notes: { text: 'line 1\nline 2' },
},
}),
]
expect(buildTableCsv(columns, rows)).toBe(
'name,date,notes\r\n"Ada ""Lovelace""",04/03/2026,"{""text"":""line 1\\nline 2""}"'
)
})
})

View File

@@ -0,0 +1,38 @@
import type { ColumnDefinition, TableRow } from '@/lib/table'
import { storageToDisplay } from './utils'
function safeJsonStringify(value: unknown): string {
try {
return JSON.stringify(value)
} catch {
return String(value)
}
}
export function formatTableExportValue(value: unknown, column: ColumnDefinition): string {
if (value === null || value === undefined) return ''
switch (column.type) {
case 'date':
return storageToDisplay(String(value))
case 'json':
return typeof value === 'string' ? value : safeJsonStringify(value)
default:
return String(value)
}
}
export function escapeCsvCell(value: string): string {
return /[",\n\r]/.test(value) ? `"${value.replace(/"/g, '""')}"` : value
}
export function buildTableCsv(columns: ColumnDefinition[], rows: TableRow[]): string {
const headerRow = columns.map((column) => escapeCsvCell(column.name)).join(',')
const dataRows = rows.map((row) =>
columns
.map((column) => escapeCsvCell(formatTableExportValue(row.data[column.name], column)))
.join(',')
)
return [headerRow, ...dataRows].join('\r\n')
}

View File

@@ -1,2 +1,3 @@
export * from './use-context-menu'
export * from './use-export-table'
export * from './use-table-data'

View File

@@ -1,6 +1,6 @@
import { useCallback, useState } from 'react'
import type { TableRow } from '@/lib/table'
import type { ContextMenuState } from '../types'
import type { ContextMenuState } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
interface UseContextMenuReturn {
contextMenu: ContextMenuState

View File

@@ -0,0 +1,84 @@
'use client'
import { useCallback, useRef, useState } from 'react'
import { usePostHog } from 'posthog-js/react'
import { toast } from '@/components/emcn'
import { downloadFile, sanitizePathSegment } from '@/lib/core/utils/file-download'
import { captureEvent } from '@/lib/posthog/client'
import type { ColumnDefinition } from '@/lib/table'
import { buildTableCsv } from '@/app/workspace/[workspaceId]/tables/[tableId]/export'
import type { QueryOptions } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
import { fetchAllTableRows } from '@/hooks/queries/tables'
interface UseExportTableParams {
workspaceId: string
tableId: string
tableName?: string | null
columns: ColumnDefinition[]
queryOptions: QueryOptions
canExport: boolean
}
export function useExportTable({
workspaceId,
tableId,
tableName,
columns,
queryOptions,
canExport,
}: UseExportTableParams) {
const posthog = usePostHog()
const [isExporting, setIsExporting] = useState(false)
const isExportingRef = useRef(false)
const handleExportTable = useCallback(async () => {
if (!canExport || !workspaceId || !tableId || isExportingRef.current) return
isExportingRef.current = true
setIsExporting(true)
try {
const { rows } = await fetchAllTableRows({
workspaceId,
tableId,
filter: queryOptions.filter,
sort: queryOptions.sort,
})
const filename = `${sanitizePathSegment(tableName?.trim() || 'table')}.csv`
const csvContent = buildTableCsv(columns, rows)
downloadFile(csvContent, filename, 'text/csv;charset=utf-8;')
captureEvent(posthog, 'table_exported', {
workspace_id: workspaceId,
table_id: tableId,
row_count: rows.length,
column_count: columns.length,
has_filter: Boolean(queryOptions.filter),
has_sort: Boolean(queryOptions.sort),
})
} catch (error) {
toast.error(error instanceof Error ? error.message : 'Failed to export table', {
duration: 5000,
})
} finally {
isExportingRef.current = false
setIsExporting(false)
}
}, [
canExport,
columns,
posthog,
queryOptions.filter,
queryOptions.sort,
tableId,
tableName,
workspaceId,
])
return {
isExporting,
handleExportTable,
}
}

View File

@@ -1,6 +1,7 @@
import type { TableDefinition, TableRow } from '@/lib/table'
import { TABLE_LIMITS } from '@/lib/table/constants'
import type { QueryOptions } from '@/app/workspace/[workspaceId]/tables/[tableId]/types'
import { useTable, useTableRows } from '@/hooks/queries/tables'
import type { QueryOptions } from '../types'
interface UseTableDataParams {
workspaceId: string
@@ -30,7 +31,7 @@ export function useTableData({
} = useTableRows({
workspaceId,
tableId,
limit: 1000,
limit: TABLE_LIMITS.MAX_QUERY_LIMIT,
offset: 0,
filter: queryOptions.filter,
sort: queryOptions.sort,

View File

@@ -68,9 +68,8 @@ export function Tables() {
const { data: tables = [], isLoading, error } = useTablesList(workspaceId)
const { data: members } = useWorkspaceMembersQuery(workspaceId)
if (error) {
logger.error('Failed to load tables:', error)
}
if (error) logger.error('Failed to load tables:', error)
const deleteTable = useDeleteTable(workspaceId)
const createTable = useCreateTable(workspaceId)
const uploadCsv = useUploadCsvToTable()

View File

@@ -839,6 +839,7 @@ export const Panel = memo(function Panel({ workspaceId: propWorkspaceId }: Panel
onSendQueuedMessage={copilotSendNow}
onEditQueuedMessage={handleCopilotEditQueuedMessage}
userId={session?.user?.id}
chatId={copilotResolvedChatId}
editValue={copilotEditingInputValue}
onEditValueConsumed={clearCopilotEditingValue}
layout='copilot-view'

View File

@@ -12,7 +12,6 @@ import {
Button,
Combobox,
Input,
Label,
Modal,
ModalBody,
ModalContent,
@@ -432,7 +431,7 @@ export function HelpModal({ open, onOpenChange, workflowId, workspaceId }: HelpM
<div ref={scrollContainerRef} className='min-h-0 flex-1 overflow-y-auto'>
<div className='space-y-3'>
<div className='flex flex-col gap-2'>
<Label htmlFor='type'>Request</Label>
<p className='font-medium text-[var(--text-secondary)] text-sm'>Request</p>
<Combobox
id='type'
options={REQUEST_TYPE_OPTIONS}
@@ -447,7 +446,7 @@ export function HelpModal({ open, onOpenChange, workflowId, workspaceId }: HelpM
</div>
<div className='flex flex-col gap-2'>
<Label htmlFor='subject'>Subject</Label>
<p className='font-medium text-[var(--text-secondary)] text-sm'>Subject</p>
<Input
id='subject'
placeholder='Brief description of your request'
@@ -457,7 +456,7 @@ export function HelpModal({ open, onOpenChange, workflowId, workspaceId }: HelpM
</div>
<div className='flex flex-col gap-2'>
<Label htmlFor='message'>Message</Label>
<p className='font-medium text-[var(--text-secondary)] text-sm'>Message</p>
<Textarea
id='message'
placeholder='Please provide details about your request...'
@@ -468,7 +467,9 @@ export function HelpModal({ open, onOpenChange, workflowId, workspaceId }: HelpM
</div>
<div className='flex flex-col gap-2'>
<Label>Attach Images (Optional)</Label>
<p className='font-medium text-[var(--text-secondary)] text-sm'>
Attach Images (Optional)
</p>
<Button
type='button'
variant='default'
@@ -505,7 +506,9 @@ export function HelpModal({ open, onOpenChange, workflowId, workspaceId }: HelpM
{images.length > 0 && (
<div className='space-y-2'>
<Label>Uploaded Images</Label>
<p className='font-medium text-[var(--text-secondary)] text-sm'>
Uploaded Images
</p>
<div className='grid grid-cols-2 gap-3'>
{images.map((image, index) => (
<div

View File

@@ -3,11 +3,13 @@
import { useCallback, useDeferredValue, useEffect, useMemo, useRef, useState } from 'react'
import { Command } from 'cmdk'
import { useParams, useRouter } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { createPortal } from 'react-dom'
import { Library } from '@/components/emcn'
import { Calendar, Database, File, HelpCircle, Settings, Table } from '@/components/emcn/icons'
import { Search } from '@/components/emcn/icons/search'
import { cn } from '@/lib/core/utils/cn'
import { captureEvent } from '@/lib/posthog/client'
import { hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
import { SIDEBAR_SCROLL_EVENT } from '@/app/workspace/[workspaceId]/w/components/sidebar/sidebar'
import { usePermissionConfig } from '@/hooks/use-permission-config'
@@ -55,11 +57,14 @@ export function SearchModal({
const [mounted, setMounted] = useState(false)
const { navigateToSettings } = useSettingsNavigation()
const { config: permissionConfig } = usePermissionConfig()
const posthog = usePostHog()
const routerRef = useRef(router)
routerRef.current = router
const onOpenChangeRef = useRef(onOpenChange)
onOpenChangeRef.current = onOpenChange
const posthogRef = useRef(posthog)
posthogRef.current = posthog
useEffect(() => {
setMounted(true)
@@ -154,6 +159,8 @@ export function SearchModal({
}, [open])
const deferredSearch = useDeferredValue(search)
const deferredSearchRef = useRef(deferredSearch)
deferredSearchRef.current = deferredSearch
const handleSearchChange = useCallback((value: string) => {
setSearch(value)
@@ -188,59 +195,151 @@ export function SearchModal({
detail: { type: block.type, enableTriggerMode },
})
)
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: type,
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[]
[workspaceId]
)
const handleToolOperationSelect = useCallback((op: SearchToolOperationItem) => {
window.dispatchEvent(
new CustomEvent('add-block-from-toolbar', {
detail: { type: op.blockType, presetOperation: op.operationId },
})
)
onOpenChangeRef.current(false)
}, [])
const handleWorkflowSelect = useCallback((workflow: WorkflowItem) => {
if (!workflow.isCurrent && workflow.href) {
routerRef.current.push(workflow.href)
const handleToolOperationSelect = useCallback(
(op: SearchToolOperationItem) => {
window.dispatchEvent(
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: workflow.id } })
new CustomEvent('add-block-from-toolbar', {
detail: { type: op.blockType, presetOperation: op.operationId },
})
)
}
onOpenChangeRef.current(false)
}, [])
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'tool_operation',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleWorkspaceSelect = useCallback((workspace: WorkspaceItem) => {
if (!workspace.isCurrent && workspace.href) {
routerRef.current.push(workspace.href)
}
onOpenChangeRef.current(false)
}, [])
const handleTaskSelect = useCallback((task: TaskItem) => {
routerRef.current.push(task.href)
onOpenChangeRef.current(false)
}, [])
const handlePageSelect = useCallback((page: PageItem) => {
if (page.onClick) {
page.onClick()
} else if (page.href) {
if (page.href.startsWith('http')) {
window.open(page.href, '_blank', 'noopener,noreferrer')
} else {
routerRef.current.push(page.href)
const handleWorkflowSelect = useCallback(
(workflow: WorkflowItem) => {
if (!workflow.isCurrent && workflow.href) {
routerRef.current.push(workflow.href)
window.dispatchEvent(
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: workflow.id } })
)
}
}
onOpenChangeRef.current(false)
}, [])
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'workflow',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleDocSelect = useCallback((doc: SearchDocItem) => {
window.open(doc.href, '_blank', 'noopener,noreferrer')
onOpenChangeRef.current(false)
}, [])
const handleWorkspaceSelect = useCallback(
(workspace: WorkspaceItem) => {
if (!workspace.isCurrent && workspace.href) {
routerRef.current.push(workspace.href)
}
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'workspace',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleTaskSelect = useCallback(
(task: TaskItem) => {
routerRef.current.push(task.href)
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'task',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleTableSelect = useCallback(
(item: TaskItem) => {
routerRef.current.push(item.href)
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'table',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleFileSelect = useCallback(
(item: TaskItem) => {
routerRef.current.push(item.href)
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'file',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleKbSelect = useCallback(
(item: TaskItem) => {
routerRef.current.push(item.href)
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'knowledge_base',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handlePageSelect = useCallback(
(page: PageItem) => {
if (page.onClick) {
page.onClick()
} else if (page.href) {
if (page.href.startsWith('http')) {
window.open(page.href, '_blank', 'noopener,noreferrer')
} else {
routerRef.current.push(page.href)
}
}
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'page',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleDocSelect = useCallback(
(doc: SearchDocItem) => {
window.open(doc.href, '_blank', 'noopener,noreferrer')
captureEvent(posthogRef.current, 'search_result_selected', {
result_type: 'docs',
query_length: deferredSearchRef.current.length,
workspace_id: workspaceId,
})
onOpenChangeRef.current(false)
},
[workspaceId]
)
const handleBlockSelectAsBlock = useCallback(
(block: SearchBlockItem) => handleBlockSelect(block, 'block'),
@@ -370,9 +469,9 @@ export function SearchModal({
<TriggersGroup items={filteredTriggers} onSelect={handleBlockSelectAsTrigger} />
<WorkflowsGroup items={filteredWorkflows} onSelect={handleWorkflowSelect} />
<TasksGroup items={filteredTasks} onSelect={handleTaskSelect} />
<TablesGroup items={filteredTables} onSelect={handleTaskSelect} />
<FilesGroup items={filteredFiles} onSelect={handleTaskSelect} />
<KnowledgeBasesGroup items={filteredKnowledgeBases} onSelect={handleTaskSelect} />
<TablesGroup items={filteredTables} onSelect={handleTableSelect} />
<FilesGroup items={filteredFiles} onSelect={handleFileSelect} />
<KnowledgeBasesGroup items={filteredKnowledgeBases} onSelect={handleKbSelect} />
<ToolOpsGroup items={filteredToolOps} onSelect={handleToolOperationSelect} />
<WorkspacesGroup items={filteredWorkspaces} onSelect={handleWorkspaceSelect} />
<DocsGroup items={filteredDocs} onSelect={handleDocSelect} />

View File

@@ -316,6 +316,7 @@ export const Sidebar = memo(function Sidebar() {
const sidebarRef = useRef<HTMLElement>(null)
const fileInputRef = useRef<HTMLInputElement>(null)
const scrollContainerRef = useRef<HTMLDivElement>(null)
const scrollContentRef = useRef<HTMLDivElement>(null)
const posthog = usePostHog()
const { data: sessionData, isPending: sessionLoading } = useSession()
@@ -894,6 +895,9 @@ export const Sidebar = memo(function Sidebar() {
container.addEventListener('scroll', updateScrollState, { passive: true })
const observer = new ResizeObserver(updateScrollState)
observer.observe(container)
if (scrollContentRef.current) {
observer.observe(scrollContentRef.current)
}
return () => {
container.removeEventListener('scroll', updateScrollState)
@@ -1336,275 +1340,286 @@ export const Sidebar = memo(function Sidebar() {
!hasOverflowTop && 'border-transparent'
)}
>
<div className='tasks-section flex flex-shrink-0 flex-col' data-tour='nav-tasks'>
<div className='flex h-[18px] flex-shrink-0 items-center justify-between px-4'>
<div className='font-base text-[var(--text-icon)] text-small'>All tasks</div>
{!isCollapsed && (
<div className='flex items-center justify-center gap-2'>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
className='h-[18px] w-[18px] rounded-sm p-0 hover-hover:bg-[var(--surface-hover)]'
onClick={handleNewTask}
>
<Plus className='h-[16px] w-[16px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
<Tooltip.Shortcut keys={isMac ? '⌘⇧K' : 'Ctrl+Shift+K'}>
New task
</Tooltip.Shortcut>
</Tooltip.Content>
</Tooltip.Root>
<div ref={scrollContentRef} className='flex flex-col'>
<div
className='tasks-section flex flex-shrink-0 flex-col'
data-tour='nav-tasks'
>
<div className='flex h-[18px] flex-shrink-0 items-center justify-between px-4'>
<div className='font-base text-[var(--text-icon)] text-small'>
All tasks
</div>
)}
</div>
{isCollapsed ? (
<CollapsedSidebarMenu
icon={tasksCollapsedIcon}
hover={tasksHover}
ariaLabel='Tasks'
className='mt-1.5'
primaryAction={tasksPrimaryAction}
>
{tasksLoading ? (
<DropdownMenuItem disabled>
<Loader className='h-[14px] w-[14px]' animate />
Loading...
</DropdownMenuItem>
) : (
tasks.map((task) => (
<CollapsedTaskFlyoutItem
key={task.id}
task={task}
isCurrentRoute={task.id !== 'new' && pathname === task.href}
isMenuOpen={menuOpenTaskId === task.id}
isEditing={task.id === taskFlyoutRename.editingId}
editValue={taskFlyoutRename.value}
inputRef={taskFlyoutRename.inputRef}
isRenaming={taskFlyoutRename.isSaving}
onEditValueChange={taskFlyoutRename.setValue}
onEditKeyDown={taskFlyoutRename.handleKeyDown}
onEditBlur={handleTaskRenameBlur}
onContextMenu={handleTaskContextMenu}
onMorePointerDown={handleTaskMorePointerDown}
onMoreClick={handleTaskMoreClick}
/>
))
)}
</CollapsedSidebarMenu>
) : (
<div className='mt-1.5 flex flex-col gap-0.5 px-2'>
{tasksLoading ? (
<SidebarItemSkeleton />
) : (
<>
{tasks.slice(0, visibleTaskCount).map((task) => {
const isCurrentRoute = task.id !== 'new' && pathname === task.href
const isRenaming = taskFlyoutRename.editingId === task.id
const isSelected = task.id !== 'new' && selectedTasks.has(task.id)
if (isRenaming) {
return (
<div
key={task.id}
className='mx-0.5 flex h-[30px] items-center gap-2 rounded-lg bg-[var(--surface-active)] px-2 text-sm'
>
<Blimp className='h-[16px] w-[16px] flex-shrink-0 text-[var(--text-icon)]' />
<input
ref={taskFlyoutRename.inputRef}
value={taskFlyoutRename.value}
onChange={(e) => taskFlyoutRename.setValue(e.target.value)}
onKeyDown={taskFlyoutRename.handleKeyDown}
onBlur={handleTaskRenameBlur}
className='min-w-0 flex-1 border-none bg-transparent font-base text-[14px] text-[var(--text-body)] outline-none'
/>
</div>
)
}
return (
<SidebarTaskItem
key={task.id}
task={task}
isCurrentRoute={isCurrentRoute}
isSelected={isSelected}
isActive={!!task.isActive}
isUnread={!!task.isUnread}
isMenuOpen={menuOpenTaskId === task.id}
showCollapsedTooltips={showCollapsedTooltips}
onMultiSelectClick={handleTaskClick}
onContextMenu={handleTaskContextMenu}
onMorePointerDown={handleTaskMorePointerDown}
onMoreClick={handleTaskMoreClick}
/>
)
})}
{tasks.length > visibleTaskCount && (
<button
type='button'
onClick={handleSeeMoreTasks}
className='mx-0.5 flex h-[30px] items-center gap-2 rounded-lg px-2 text-[var(--text-icon)] text-sm hover-hover:bg-[var(--surface-hover)]'
>
<MoreHorizontal className='h-[16px] w-[16px] flex-shrink-0' />
<span className='font-base'>See more</span>
</button>
)}
</>
)}
</div>
)}
</div>
<div
className='workflows-section relative mt-3.5 flex flex-col'
data-tour='nav-workflows'
>
<div className='flex h-[18px] flex-shrink-0 items-center justify-between px-4'>
<div className='font-base text-[var(--text-icon)] text-small'>Workflows</div>
{!isCollapsed && (
<div className='flex items-center justify-center gap-2'>
<DropdownMenu>
{!isCollapsed && (
<div className='flex items-center justify-center gap-2'>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<DropdownMenuTrigger asChild>
<Button
variant='ghost'
className='h-[18px] w-[18px] rounded-sm p-0 hover-hover:bg-[var(--surface-hover)]'
disabled={!canEdit}
>
{isImporting || isCreatingFolder ? (
<Loader className='h-[16px] w-[16px]' animate />
) : (
<MoreHorizontal className='h-[16px] w-[16px]' />
)}
</Button>
</DropdownMenuTrigger>
<Button
variant='ghost'
className='h-[18px] w-[18px] rounded-sm p-0 hover-hover:bg-[var(--surface-hover)]'
onClick={handleNewTask}
>
<Plus className='h-[16px] w-[16px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
<p>More actions</p>
<Tooltip.Shortcut keys={isMac ? '⌘⇧K' : 'Ctrl+Shift+K'}>
New task
</Tooltip.Shortcut>
</Tooltip.Content>
</Tooltip.Root>
<DropdownMenuContent
align='start'
sideOffset={8}
className='min-w-[160px]'
>
<DropdownMenuItem
onSelect={handleImportWorkflow}
disabled={!canEdit || isImporting}
>
<Download />
{isImporting ? 'Importing...' : 'Import workflow'}
</DropdownMenuItem>
<DropdownMenuItem
onSelect={handleCreateFolder}
disabled={!canEdit || isCreatingFolder}
>
<FolderPlus />
{isCreatingFolder ? 'Creating folder...' : 'Create folder'}
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
className='h-[18px] w-[18px] rounded-sm p-0 hover-hover:bg-[var(--surface-hover)]'
onClick={handleCreateWorkflow}
disabled={isCreatingWorkflow || !canEdit}
>
<Plus className='h-[16px] w-[16px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
{isCreatingWorkflow ? (
<p>Creating workflow...</p>
) : (
<Tooltip.Shortcut keys={isMac ? '⌘⇧P' : 'Ctrl+Shift+P'}>
New workflow
</Tooltip.Shortcut>
</div>
)}
</div>
{isCollapsed ? (
<CollapsedSidebarMenu
icon={tasksCollapsedIcon}
hover={tasksHover}
ariaLabel='Tasks'
className='mt-1.5'
primaryAction={tasksPrimaryAction}
>
{tasksLoading ? (
<DropdownMenuItem disabled>
<Loader className='h-[14px] w-[14px]' animate />
Loading...
</DropdownMenuItem>
) : (
tasks.map((task) => (
<CollapsedTaskFlyoutItem
key={task.id}
task={task}
isCurrentRoute={task.id !== 'new' && pathname === task.href}
isMenuOpen={menuOpenTaskId === task.id}
isEditing={task.id === taskFlyoutRename.editingId}
editValue={taskFlyoutRename.value}
inputRef={taskFlyoutRename.inputRef}
isRenaming={taskFlyoutRename.isSaving}
onEditValueChange={taskFlyoutRename.setValue}
onEditKeyDown={taskFlyoutRename.handleKeyDown}
onEditBlur={handleTaskRenameBlur}
onContextMenu={handleTaskContextMenu}
onMorePointerDown={handleTaskMorePointerDown}
onMoreClick={handleTaskMoreClick}
/>
))
)}
</CollapsedSidebarMenu>
) : (
<div className='mt-1.5 flex flex-col gap-0.5 px-2'>
{tasksLoading ? (
<SidebarItemSkeleton />
) : (
<>
{tasks.slice(0, visibleTaskCount).map((task) => {
const isCurrentRoute = task.id !== 'new' && pathname === task.href
const isRenaming = taskFlyoutRename.editingId === task.id
const isSelected = task.id !== 'new' && selectedTasks.has(task.id)
if (isRenaming) {
return (
<div
key={task.id}
className='mx-0.5 flex h-[30px] items-center gap-2 rounded-lg bg-[var(--surface-active)] px-2 text-sm'
>
<Blimp className='h-[16px] w-[16px] flex-shrink-0 text-[var(--text-icon)]' />
<input
ref={taskFlyoutRename.inputRef}
value={taskFlyoutRename.value}
onChange={(e) => taskFlyoutRename.setValue(e.target.value)}
onKeyDown={taskFlyoutRename.handleKeyDown}
onBlur={handleTaskRenameBlur}
className='min-w-0 flex-1 border-none bg-transparent font-base text-[14px] text-[var(--text-body)] outline-none'
/>
</div>
)
}
return (
<SidebarTaskItem
key={task.id}
task={task}
isCurrentRoute={isCurrentRoute}
isSelected={isSelected}
isActive={!!task.isActive}
isUnread={!!task.isUnread}
isMenuOpen={menuOpenTaskId === task.id}
showCollapsedTooltips={showCollapsedTooltips}
onMultiSelectClick={handleTaskClick}
onContextMenu={handleTaskContextMenu}
onMorePointerDown={handleTaskMorePointerDown}
onMoreClick={handleTaskMoreClick}
/>
)
})}
{tasks.length > visibleTaskCount && (
<button
type='button'
onClick={handleSeeMoreTasks}
className='mx-0.5 flex h-[30px] items-center gap-2 rounded-lg px-2 text-[var(--text-icon)] text-sm hover-hover:bg-[var(--surface-hover)]'
>
<MoreHorizontal className='h-[16px] w-[16px] flex-shrink-0' />
<span className='font-base'>See more</span>
</button>
)}
</Tooltip.Content>
</Tooltip.Root>
</>
)}
</div>
)}
</div>
{isCollapsed ? (
<CollapsedSidebarMenu
icon={workflowsCollapsedIcon}
hover={workflowsHover}
ariaLabel='Workflows'
className='mt-1.5'
primaryAction={workflowsPrimaryAction}
>
{workflowsLoading && regularWorkflows.length === 0 ? (
<DropdownMenuItem disabled>
<Loader className='h-[14px] w-[14px]' animate />
Loading...
</DropdownMenuItem>
) : regularWorkflows.length === 0 ? (
<DropdownMenuItem disabled>No workflows yet</DropdownMenuItem>
) : (
<>
<CollapsedFolderItems
nodes={folderTree}
workflowsByFolder={workflowsByFolder}
workspaceId={workspaceId}
currentWorkflowId={workflowId}
editingWorkflowId={workflowFlyoutRename.editingId}
editingValue={workflowFlyoutRename.value}
editInputRef={workflowFlyoutRename.inputRef}
isRenamingWorkflow={workflowFlyoutRename.isSaving}
onEditValueChange={workflowFlyoutRename.setValue}
onEditKeyDown={workflowFlyoutRename.handleKeyDown}
onEditBlur={handleWorkflowRenameBlur}
onWorkflowOpenInNewTab={handleCollapsedWorkflowOpenInNewTab}
onWorkflowRename={handleCollapsedWorkflowRename}
canRenameWorkflow={canEdit}
/>
{(workflowsByFolder.root || []).map((workflow) => (
<CollapsedWorkflowFlyoutItem
key={workflow.id}
workflow={workflow}
href={`/workspace/${workspaceId}/w/${workflow.id}`}
isCurrentRoute={workflow.id === workflowId}
isEditing={workflow.id === workflowFlyoutRename.editingId}
editValue={workflowFlyoutRename.value}
inputRef={workflowFlyoutRename.inputRef}
isRenaming={workflowFlyoutRename.isSaving}
<div
className='workflows-section relative mt-3.5 flex flex-col'
data-tour='nav-workflows'
>
<div className='flex h-[18px] flex-shrink-0 items-center justify-between px-4'>
<div className='font-base text-[var(--text-icon)] text-small'>
Workflows
</div>
{!isCollapsed && (
<div className='flex items-center justify-center gap-2'>
<DropdownMenu>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<DropdownMenuTrigger asChild>
<Button
variant='ghost'
className='h-[18px] w-[18px] rounded-sm p-0 hover-hover:bg-[var(--surface-hover)]'
disabled={!canEdit}
>
{isImporting || isCreatingFolder ? (
<Loader className='h-[16px] w-[16px]' animate />
) : (
<MoreHorizontal className='h-[16px] w-[16px]' />
)}
</Button>
</DropdownMenuTrigger>
</Tooltip.Trigger>
<Tooltip.Content>
<p>More actions</p>
</Tooltip.Content>
</Tooltip.Root>
<DropdownMenuContent
align='start'
sideOffset={8}
className='min-w-[160px]'
>
<DropdownMenuItem
onSelect={handleImportWorkflow}
disabled={!canEdit || isImporting}
>
<Download />
{isImporting ? 'Importing...' : 'Import workflow'}
</DropdownMenuItem>
<DropdownMenuItem
onSelect={handleCreateFolder}
disabled={!canEdit || isCreatingFolder}
>
<FolderPlus />
{isCreatingFolder ? 'Creating folder...' : 'Create folder'}
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
className='h-[18px] w-[18px] rounded-sm p-0 hover-hover:bg-[var(--surface-hover)]'
onClick={handleCreateWorkflow}
disabled={isCreatingWorkflow || !canEdit}
>
<Plus className='h-[16px] w-[16px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
{isCreatingWorkflow ? (
<p>Creating workflow...</p>
) : (
<Tooltip.Shortcut keys={isMac ? '⌘⇧P' : 'Ctrl+Shift+P'}>
New workflow
</Tooltip.Shortcut>
)}
</Tooltip.Content>
</Tooltip.Root>
</div>
)}
</div>
{isCollapsed ? (
<CollapsedSidebarMenu
icon={workflowsCollapsedIcon}
hover={workflowsHover}
ariaLabel='Workflows'
className='mt-1.5'
primaryAction={workflowsPrimaryAction}
>
{workflowsLoading && regularWorkflows.length === 0 ? (
<DropdownMenuItem disabled>
<Loader className='h-[14px] w-[14px]' animate />
Loading...
</DropdownMenuItem>
) : regularWorkflows.length === 0 ? (
<DropdownMenuItem disabled>No workflows yet</DropdownMenuItem>
) : (
<>
<CollapsedFolderItems
nodes={folderTree}
workflowsByFolder={workflowsByFolder}
workspaceId={workspaceId}
currentWorkflowId={workflowId}
editingWorkflowId={workflowFlyoutRename.editingId}
editingValue={workflowFlyoutRename.value}
editInputRef={workflowFlyoutRename.inputRef}
isRenamingWorkflow={workflowFlyoutRename.isSaving}
onEditValueChange={workflowFlyoutRename.setValue}
onEditKeyDown={workflowFlyoutRename.handleKeyDown}
onEditBlur={handleWorkflowRenameBlur}
onOpenInNewTab={() => handleCollapsedWorkflowOpenInNewTab(workflow)}
onRename={() => handleCollapsedWorkflowRename(workflow)}
canRename={canEdit}
onWorkflowOpenInNewTab={handleCollapsedWorkflowOpenInNewTab}
onWorkflowRename={handleCollapsedWorkflowRename}
canRenameWorkflow={canEdit}
/>
))}
</>
)}
</CollapsedSidebarMenu>
) : (
<div className='mt-1.5 px-2'>
{workflowsLoading && regularWorkflows.length === 0 && (
<SidebarItemSkeleton />
)}
<WorkflowList
workspaceId={workspaceId}
workflowId={workflowId}
regularWorkflows={regularWorkflows}
isLoading={isLoading}
canReorder={canEdit}
handleFileChange={handleImportFileChange}
fileInputRef={fileInputRef}
scrollContainerRef={scrollContainerRef}
onCreateWorkflow={handleCreateWorkflow}
onCreateFolder={handleCreateFolder}
disableCreate={!canEdit || isCreatingWorkflow || isCreatingFolder}
/>
</div>
)}
{(workflowsByFolder.root || []).map((workflow) => (
<CollapsedWorkflowFlyoutItem
key={workflow.id}
workflow={workflow}
href={`/workspace/${workspaceId}/w/${workflow.id}`}
isCurrentRoute={workflow.id === workflowId}
isEditing={workflow.id === workflowFlyoutRename.editingId}
editValue={workflowFlyoutRename.value}
inputRef={workflowFlyoutRename.inputRef}
isRenaming={workflowFlyoutRename.isSaving}
onEditValueChange={workflowFlyoutRename.setValue}
onEditKeyDown={workflowFlyoutRename.handleKeyDown}
onEditBlur={handleWorkflowRenameBlur}
onOpenInNewTab={() =>
handleCollapsedWorkflowOpenInNewTab(workflow)
}
onRename={() => handleCollapsedWorkflowRename(workflow)}
canRename={canEdit}
/>
))}
</>
)}
</CollapsedSidebarMenu>
) : (
<div className='mt-1.5 px-2'>
{workflowsLoading && regularWorkflows.length === 0 && (
<SidebarItemSkeleton />
)}
<WorkflowList
workspaceId={workspaceId}
workflowId={workflowId}
regularWorkflows={regularWorkflows}
isLoading={isLoading}
canReorder={canEdit}
handleFileChange={handleImportFileChange}
fileInputRef={fileInputRef}
scrollContainerRef={scrollContainerRef}
onCreateWorkflow={handleCreateWorkflow}
onCreateFolder={handleCreateFolder}
disableCreate={!canEdit || isCreatingWorkflow || isCreatingFolder}
/>
</div>
)}
</div>
</div>
</div>

View File

@@ -1,13 +1,12 @@
import { useCallback, useMemo, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useParams } from 'next/navigation'
import { downloadFile, sanitizePathSegment } from '@/lib/core/utils/file-download'
import { getFolderById } from '@/lib/folders/tree'
import {
downloadFile,
exportFolderToZip,
type FolderExportData,
fetchWorkflowForExport,
sanitizePathSegment,
type WorkflowExportData,
} from '@/lib/workflows/operations/import-export'
import { useFolderMap } from '@/hooks/queries/folders'

View File

@@ -1,8 +1,8 @@
import { useCallback, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useParams } from 'next/navigation'
import { downloadFile } from '@/lib/core/utils/file-download'
import {
downloadFile,
exportWorkflowsToZip,
type FolderExportData,
fetchWorkflowForExport,

View File

@@ -1,12 +1,13 @@
import { useCallback, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useParams } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { downloadFile, sanitizePathSegment } from '@/lib/core/utils/file-download'
import { captureEvent } from '@/lib/posthog/client'
import {
downloadFile,
exportWorkflowsToZip,
exportWorkflowToJson,
fetchWorkflowForExport,
sanitizePathSegment,
} from '@/lib/workflows/operations/import-export'
import { getWorkflows } from '@/hooks/queries/utils/workflow-cache'
import { useFolderStore } from '@/stores/folders/store'
@@ -27,6 +28,7 @@ export function useExportWorkflow({ onSuccess }: UseExportWorkflowProps = {}) {
const [isExporting, setIsExporting] = useState(false)
const params = useParams()
const workspaceId = params.workspaceId as string | undefined
const posthog = usePostHog()
const onSuccessRef = useRef(onSuccess)
onSuccessRef.current = onSuccess
@@ -34,6 +36,9 @@ export function useExportWorkflow({ onSuccess }: UseExportWorkflowProps = {}) {
const workspaceIdRef = useRef(workspaceId)
workspaceIdRef.current = workspaceId
const posthogRef = useRef(posthog)
posthogRef.current = posthog
/**
* Export the workflow(s) to JSON or ZIP
* - Single workflow: exports as JSON file
@@ -100,6 +105,12 @@ export function useExportWorkflow({ onSuccess }: UseExportWorkflowProps = {}) {
const { clearSelection } = useFolderStore.getState()
clearSelection()
captureEvent(posthogRef.current, 'workflow_exported', {
workspace_id: workspaceIdRef.current ?? '',
workflow_count: exportedWorkflows.length,
format: exportedWorkflows.length === 1 ? 'json' : 'zip',
})
logger.info('Workflow(s) exported successfully', {
workflowIds: workflowIdsToExport,
count: exportedWorkflows.length,

View File

@@ -1,11 +1,10 @@
import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger'
import { downloadFile, sanitizePathSegment } from '@/lib/core/utils/file-download'
import {
downloadFile,
exportWorkspaceToZip,
type FolderExportData,
fetchWorkflowForExport,
sanitizePathSegment,
type WorkflowExportData,
} from '@/lib/workflows/operations/import-export'

View File

@@ -1,12 +1,14 @@
import { useCallback, useState } from 'react'
import { useCallback, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import { useRouter } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import { captureEvent } from '@/lib/posthog/client'
import {
extractWorkflowsFromFiles,
extractWorkflowsFromZip,
persistImportedWorkflow,
sanitizePathSegment,
} from '@/lib/workflows/operations/import-export'
import { useCreateFolder } from '@/hooks/queries/folders'
import { folderKeys } from '@/hooks/queries/utils/folder-keys'
@@ -36,6 +38,9 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
const queryClient = useQueryClient()
const createFolderMutation = useCreateFolder()
const clearDiff = useWorkflowDiffStore((state) => state.clearDiff)
const posthog = usePostHog()
const posthogRef = useRef(posthog)
posthogRef.current = posthog
const [isImporting, setIsImporting] = useState(false)
/**
@@ -204,6 +209,11 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
logger.info(`Import complete. Imported ${importedWorkflowIds.length} workflow(s)`)
if (importedWorkflowIds.length > 0) {
captureEvent(posthogRef.current, 'workflow_imported', {
workspace_id: workspaceId,
workflow_count: importedWorkflowIds.length,
format: hasZip && fileArray.length === 1 ? 'zip' : 'json',
})
router.push(
`/workspace/${workspaceId}/w/${importedWorkflowIds[importedWorkflowIds.length - 1]}`
)

View File

@@ -1,11 +1,11 @@
import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useRouter } from 'next/navigation'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import {
extractWorkflowName,
extractWorkflowsFromZip,
parseWorkflowJson,
sanitizePathSegment,
} from '@/lib/workflows/operations/import-export'
import { useCreateFolder } from '@/hooks/queries/folders'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'

View File

@@ -0,0 +1,571 @@
import { CloudWatchIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import { IntegrationType } from '@/blocks/types'
import type {
CloudWatchDescribeAlarmsResponse,
CloudWatchDescribeLogGroupsResponse,
CloudWatchDescribeLogStreamsResponse,
CloudWatchGetLogEventsResponse,
CloudWatchGetMetricStatisticsResponse,
CloudWatchListMetricsResponse,
CloudWatchQueryLogsResponse,
} from '@/tools/cloudwatch/types'
export const CloudWatchBlock: BlockConfig<
| CloudWatchQueryLogsResponse
| CloudWatchDescribeLogGroupsResponse
| CloudWatchDescribeLogStreamsResponse
| CloudWatchGetLogEventsResponse
| CloudWatchDescribeAlarmsResponse
| CloudWatchListMetricsResponse
| CloudWatchGetMetricStatisticsResponse
> = {
type: 'cloudwatch',
name: 'CloudWatch',
description: 'Query and monitor AWS CloudWatch logs, metrics, and alarms',
longDescription:
'Integrate AWS CloudWatch into workflows. Run Log Insights queries, list log groups, retrieve log events, list and get metrics, and monitor alarms. Requires AWS access key and secret access key.',
category: 'tools',
integrationType: IntegrationType.Analytics,
tags: ['cloud', 'monitoring'],
bgColor: 'linear-gradient(45deg, #B0084D 0%, #FF4F8B 100%)',
icon: CloudWatchIcon,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
options: [
{ label: 'Query Logs (Insights)', id: 'query_logs' },
{ label: 'Describe Log Groups', id: 'describe_log_groups' },
{ label: 'Get Log Events', id: 'get_log_events' },
{ label: 'Describe Log Streams', id: 'describe_log_streams' },
{ label: 'List Metrics', id: 'list_metrics' },
{ label: 'Get Metric Statistics', id: 'get_metric_statistics' },
{ label: 'Describe Alarms', id: 'describe_alarms' },
],
value: () => 'query_logs',
},
{
id: 'awsRegion',
title: 'AWS Region',
type: 'short-input',
placeholder: 'us-east-1',
required: true,
},
{
id: 'awsAccessKeyId',
title: 'AWS Access Key ID',
type: 'short-input',
placeholder: 'AKIA...',
password: true,
required: true,
},
{
id: 'awsSecretAccessKey',
title: 'AWS Secret Access Key',
type: 'short-input',
placeholder: 'Your secret access key',
password: true,
required: true,
},
// Query Logs fields
{
id: 'logGroupSelector',
title: 'Log Group',
type: 'file-selector',
canonicalParamId: 'logGroupNames',
selectorKey: 'cloudwatch.logGroups',
dependsOn: ['awsAccessKeyId', 'awsSecretAccessKey', 'awsRegion'],
placeholder: 'Select a log group',
condition: { field: 'operation', value: 'query_logs' },
required: { field: 'operation', value: 'query_logs' },
mode: 'basic',
},
{
id: 'logGroupNamesInput',
title: 'Log Group Names',
type: 'short-input',
canonicalParamId: 'logGroupNames',
placeholder: '/aws/lambda/my-func, /aws/ecs/my-service',
condition: { field: 'operation', value: 'query_logs' },
required: { field: 'operation', value: 'query_logs' },
mode: 'advanced',
},
{
id: 'queryString',
title: 'Query',
type: 'code',
placeholder: 'fields @timestamp, @message\n| sort @timestamp desc\n| limit 20',
condition: { field: 'operation', value: 'query_logs' },
required: { field: 'operation', value: 'query_logs' },
wandConfig: {
enabled: true,
prompt: `Generate a CloudWatch Log Insights query based on the user's description.
The query language supports: fields, filter, stats, sort, limit, parse, display.
Common patterns:
- fields @timestamp, @message | sort @timestamp desc | limit 20
- filter @message like /ERROR/ | stats count(*) by bin(1h)
- stats avg(duration) as avgDuration by functionName | sort avgDuration desc
- filter @message like /Exception/ | parse @message "* Exception: *" as prefix, errorMsg
- stats count(*) as requestCount by status | sort requestCount desc
Return ONLY the query — no explanations, no markdown code blocks.`,
placeholder: 'Describe what you want to find in the logs...',
},
},
{
id: 'startTime',
title: 'Start Time (Unix epoch seconds)',
type: 'short-input',
placeholder: 'e.g., 1711900800',
condition: {
field: 'operation',
value: ['query_logs', 'get_log_events', 'get_metric_statistics'],
},
required: { field: 'operation', value: ['query_logs', 'get_metric_statistics'] },
},
{
id: 'endTime',
title: 'End Time (Unix epoch seconds)',
type: 'short-input',
placeholder: 'e.g., 1711987200',
condition: {
field: 'operation',
value: ['query_logs', 'get_log_events', 'get_metric_statistics'],
},
required: { field: 'operation', value: ['query_logs', 'get_metric_statistics'] },
},
// Describe Log Groups fields
{
id: 'prefix',
title: 'Log Group Name Prefix',
type: 'short-input',
placeholder: '/aws/lambda/',
condition: { field: 'operation', value: 'describe_log_groups' },
},
// Get Log Events / Describe Log Streams — shared log group selector
{
id: 'logGroupNameSelector',
title: 'Log Group',
type: 'file-selector',
canonicalParamId: 'logGroupName',
selectorKey: 'cloudwatch.logGroups',
dependsOn: ['awsAccessKeyId', 'awsSecretAccessKey', 'awsRegion'],
placeholder: 'Select a log group',
condition: { field: 'operation', value: ['get_log_events', 'describe_log_streams'] },
required: { field: 'operation', value: ['get_log_events', 'describe_log_streams'] },
mode: 'basic',
},
{
id: 'logGroupNameInput',
title: 'Log Group Name',
type: 'short-input',
canonicalParamId: 'logGroupName',
placeholder: '/aws/lambda/my-func',
condition: { field: 'operation', value: ['get_log_events', 'describe_log_streams'] },
required: { field: 'operation', value: ['get_log_events', 'describe_log_streams'] },
mode: 'advanced',
},
// Describe Log Streams — stream prefix filter
{
id: 'streamPrefix',
title: 'Stream Name Prefix',
type: 'short-input',
placeholder: '2024/03/31/',
condition: { field: 'operation', value: 'describe_log_streams' },
},
// Get Log Events — log stream selector (cascading: depends on log group)
{
id: 'logStreamNameSelector',
title: 'Log Stream',
type: 'file-selector',
canonicalParamId: 'logStreamName',
selectorKey: 'cloudwatch.logStreams',
dependsOn: ['awsAccessKeyId', 'awsSecretAccessKey', 'awsRegion', 'logGroupNameSelector'],
placeholder: 'Select a log stream',
condition: { field: 'operation', value: 'get_log_events' },
required: { field: 'operation', value: 'get_log_events' },
mode: 'basic',
},
{
id: 'logStreamNameInput',
title: 'Log Stream Name',
type: 'short-input',
canonicalParamId: 'logStreamName',
placeholder: '2024/03/31/[$LATEST]abc123',
condition: { field: 'operation', value: 'get_log_events' },
required: { field: 'operation', value: 'get_log_events' },
mode: 'advanced',
},
// List Metrics fields
{
id: 'metricNamespace',
title: 'Namespace',
type: 'short-input',
placeholder: 'e.g., AWS/EC2, AWS/Lambda, AWS/RDS',
condition: { field: 'operation', value: ['list_metrics', 'get_metric_statistics'] },
required: { field: 'operation', value: 'get_metric_statistics' },
},
{
id: 'metricName',
title: 'Metric Name',
type: 'short-input',
placeholder: 'e.g., CPUUtilization, Invocations',
condition: { field: 'operation', value: ['list_metrics', 'get_metric_statistics'] },
required: { field: 'operation', value: 'get_metric_statistics' },
},
{
id: 'recentlyActive',
title: 'Recently Active Only',
type: 'switch',
condition: { field: 'operation', value: 'list_metrics' },
},
// Get Metric Statistics fields
{
id: 'metricPeriod',
title: 'Period (seconds)',
type: 'short-input',
placeholder: 'e.g., 60, 300, 3600',
condition: { field: 'operation', value: 'get_metric_statistics' },
required: { field: 'operation', value: 'get_metric_statistics' },
},
{
id: 'metricStatistics',
title: 'Statistics',
type: 'dropdown',
options: [
{ label: 'Average', id: 'Average' },
{ label: 'Sum', id: 'Sum' },
{ label: 'Minimum', id: 'Minimum' },
{ label: 'Maximum', id: 'Maximum' },
{ label: 'Sample Count', id: 'SampleCount' },
],
condition: { field: 'operation', value: 'get_metric_statistics' },
required: { field: 'operation', value: 'get_metric_statistics' },
},
{
id: 'metricDimensions',
title: 'Dimensions',
type: 'table',
columns: ['name', 'value'],
condition: { field: 'operation', value: 'get_metric_statistics' },
},
// Describe Alarms fields
{
id: 'alarmNamePrefix',
title: 'Alarm Name Prefix',
type: 'short-input',
placeholder: 'my-service-',
condition: { field: 'operation', value: 'describe_alarms' },
},
{
id: 'stateValue',
title: 'State',
type: 'dropdown',
options: [
{ label: 'All States', id: '' },
{ label: 'OK', id: 'OK' },
{ label: 'ALARM', id: 'ALARM' },
{ label: 'INSUFFICIENT_DATA', id: 'INSUFFICIENT_DATA' },
],
condition: { field: 'operation', value: 'describe_alarms' },
},
{
id: 'alarmType',
title: 'Alarm Type',
type: 'dropdown',
options: [
{ label: 'All Types', id: '' },
{ label: 'Metric Alarm', id: 'MetricAlarm' },
{ label: 'Composite Alarm', id: 'CompositeAlarm' },
],
condition: { field: 'operation', value: 'describe_alarms' },
},
// Shared limit field
{
id: 'limit',
title: 'Limit',
type: 'short-input',
placeholder: '100',
condition: {
field: 'operation',
value: [
'query_logs',
'describe_log_groups',
'get_log_events',
'describe_log_streams',
'list_metrics',
'describe_alarms',
],
},
},
],
tools: {
access: [
'cloudwatch_query_logs',
'cloudwatch_describe_log_groups',
'cloudwatch_get_log_events',
'cloudwatch_describe_log_streams',
'cloudwatch_list_metrics',
'cloudwatch_get_metric_statistics',
'cloudwatch_describe_alarms',
],
config: {
tool: (params) => {
switch (params.operation) {
case 'query_logs':
return 'cloudwatch_query_logs'
case 'describe_log_groups':
return 'cloudwatch_describe_log_groups'
case 'get_log_events':
return 'cloudwatch_get_log_events'
case 'describe_log_streams':
return 'cloudwatch_describe_log_streams'
case 'list_metrics':
return 'cloudwatch_list_metrics'
case 'get_metric_statistics':
return 'cloudwatch_get_metric_statistics'
case 'describe_alarms':
return 'cloudwatch_describe_alarms'
default:
throw new Error(`Invalid CloudWatch operation: ${params.operation}`)
}
},
params: (params) => {
const { operation, startTime, endTime, limit, ...rest } = params
const awsRegion = rest.awsRegion
const awsAccessKeyId = rest.awsAccessKeyId
const awsSecretAccessKey = rest.awsSecretAccessKey
const parsedLimit = limit ? Number.parseInt(String(limit), 10) : undefined
switch (operation) {
case 'query_logs': {
const logGroupNames = rest.logGroupNames
if (!logGroupNames) {
throw new Error('Log group names are required')
}
if (!startTime) {
throw new Error('Start time is required')
}
if (!endTime) {
throw new Error('End time is required')
}
const groupNames =
typeof logGroupNames === 'string'
? logGroupNames
.split(',')
.map((n: string) => n.trim())
.filter(Boolean)
: logGroupNames
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
logGroupNames: groupNames,
queryString: rest.queryString,
startTime: Number(startTime),
endTime: Number(endTime),
...(parsedLimit !== undefined && { limit: parsedLimit }),
}
}
case 'describe_log_groups':
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
...(rest.prefix && { prefix: rest.prefix }),
...(parsedLimit !== undefined && { limit: parsedLimit }),
}
case 'get_log_events': {
if (!rest.logGroupName) {
throw new Error('Log group name is required')
}
if (!rest.logStreamName) {
throw new Error('Log stream name is required')
}
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
logGroupName: rest.logGroupName,
logStreamName: rest.logStreamName,
...(startTime && { startTime: Number(startTime) }),
...(endTime && { endTime: Number(endTime) }),
...(parsedLimit !== undefined && { limit: parsedLimit }),
}
}
case 'describe_log_streams': {
if (!rest.logGroupName) {
throw new Error('Log group name is required')
}
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
logGroupName: rest.logGroupName,
...(rest.streamPrefix && { prefix: rest.streamPrefix }),
...(parsedLimit !== undefined && { limit: parsedLimit }),
}
}
case 'list_metrics':
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
...(rest.metricNamespace && { namespace: rest.metricNamespace }),
...(rest.metricName && { metricName: rest.metricName }),
...(rest.recentlyActive && { recentlyActive: true }),
...(parsedLimit !== undefined && { limit: parsedLimit }),
}
case 'get_metric_statistics': {
if (!rest.metricNamespace) {
throw new Error('Namespace is required')
}
if (!rest.metricName) {
throw new Error('Metric name is required')
}
if (!startTime) {
throw new Error('Start time is required')
}
if (!endTime) {
throw new Error('End time is required')
}
if (!rest.metricPeriod) {
throw new Error('Period is required')
}
const stat = rest.metricStatistics
if (!stat) {
throw new Error('Statistics is required')
}
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
namespace: rest.metricNamespace,
metricName: rest.metricName,
startTime: Number(startTime),
endTime: Number(endTime),
period: Number(rest.metricPeriod),
statistics: Array.isArray(stat) ? stat : [stat],
...(rest.metricDimensions && {
dimensions: (() => {
const dims = rest.metricDimensions
if (typeof dims === 'string') return dims
if (Array.isArray(dims)) {
const obj: Record<string, string> = {}
for (const row of dims) {
const name = row.cells?.name
const value = row.cells?.value
if (name && value !== undefined) obj[name] = String(value)
}
return JSON.stringify(obj)
}
return JSON.stringify(dims)
})(),
}),
}
}
case 'describe_alarms':
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
...(rest.alarmNamePrefix && { alarmNamePrefix: rest.alarmNamePrefix }),
...(rest.stateValue && { stateValue: rest.stateValue }),
...(rest.alarmType && { alarmType: rest.alarmType }),
...(parsedLimit !== undefined && { limit: parsedLimit }),
}
default:
throw new Error(`Invalid CloudWatch operation: ${operation}`)
}
},
},
},
inputs: {
operation: { type: 'string', description: 'CloudWatch operation to perform' },
awsRegion: { type: 'string', description: 'AWS region' },
awsAccessKeyId: { type: 'string', description: 'AWS access key ID' },
awsSecretAccessKey: { type: 'string', description: 'AWS secret access key' },
logGroupNames: { type: 'string', description: 'Log group name(s) for query' },
queryString: { type: 'string', description: 'CloudWatch Log Insights query string' },
startTime: { type: 'string', description: 'Start time as Unix epoch seconds' },
endTime: { type: 'string', description: 'End time as Unix epoch seconds' },
prefix: { type: 'string', description: 'Log group name prefix filter' },
logGroupName: {
type: 'string',
description: 'Log group name for get events / describe streams',
},
logStreamName: { type: 'string', description: 'Log stream name for get events' },
streamPrefix: { type: 'string', description: 'Log stream name prefix filter' },
metricNamespace: { type: 'string', description: 'Metric namespace (e.g., AWS/EC2)' },
metricName: { type: 'string', description: 'Metric name (e.g., CPUUtilization)' },
recentlyActive: { type: 'boolean', description: 'Only show recently active metrics' },
metricPeriod: { type: 'number', description: 'Granularity in seconds' },
metricStatistics: { type: 'string', description: 'Statistic type (Average, Sum, etc.)' },
metricDimensions: { type: 'json', description: 'Metric dimensions (Name/Value pairs)' },
alarmNamePrefix: { type: 'string', description: 'Alarm name prefix filter' },
stateValue: {
type: 'string',
description: 'Alarm state filter (OK, ALARM, INSUFFICIENT_DATA)',
},
alarmType: { type: 'string', description: 'Alarm type filter (MetricAlarm, CompositeAlarm)' },
limit: { type: 'number', description: 'Maximum number of results' },
},
outputs: {
results: {
type: 'array',
description: 'Log Insights query result rows',
},
statistics: {
type: 'json',
description: 'Query statistics (bytesScanned, recordsMatched, recordsScanned)',
},
status: {
type: 'string',
description: 'Query completion status',
},
logGroups: {
type: 'array',
description: 'List of CloudWatch log groups',
},
events: {
type: 'array',
description: 'Log events with timestamp and message',
},
logStreams: {
type: 'array',
description: 'Log streams with metadata',
},
metrics: {
type: 'array',
description: 'List of available metrics',
},
label: {
type: 'string',
description: 'Metric label',
},
datapoints: {
type: 'array',
description: 'Metric datapoints with timestamps and values',
},
alarms: {
type: 'array',
description: 'CloudWatch alarms with state and configuration',
},
},
}

View File

@@ -24,6 +24,7 @@ import { CirclebackBlock } from '@/blocks/blocks/circleback'
import { ClayBlock } from '@/blocks/blocks/clay'
import { ClerkBlock } from '@/blocks/blocks/clerk'
import { CloudflareBlock } from '@/blocks/blocks/cloudflare'
import { CloudWatchBlock } from '@/blocks/blocks/cloudwatch'
import { ConditionBlock } from '@/blocks/blocks/condition'
import { ConfluenceBlock, ConfluenceV2Block } from '@/blocks/blocks/confluence'
import { CredentialBlock } from '@/blocks/blocks/credential'
@@ -241,6 +242,7 @@ export const registry: Record<string, BlockConfig> = {
chat_trigger: ChatTriggerBlock,
circleback: CirclebackBlock,
cloudflare: CloudflareBlock,
cloudwatch: CloudWatchBlock,
clay: ClayBlock,
clerk: ClerkBlock,
condition: ConditionBlock,

View File

@@ -51,6 +51,13 @@ import { Button } from '../button/button'
const ANIMATION_CLASSES =
'data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:animate-out data-[state=open]:animate-in motion-reduce:animate-none'
/**
* Modal content animation classes.
* We keep only the slide animations (no zoom) to stabilize positioning while avoiding scale effects.
*/
const CONTENT_ANIMATION_CLASSES =
'data-[state=closed]:slide-out-to-top-[50%] data-[state=open]:slide-in-from-top-[50%] motion-reduce:animate-none'
/**
* Root modal component. Manages open state.
*/
@@ -159,8 +166,7 @@ const ModalContent = React.forwardRef<
)}
style={{
left: isWorkflowPage
? // --panel-width is always the rendered panel width on /w/ routes (panel is never hidden/collapsed)
'calc(50% + (var(--sidebar-width) - var(--panel-width)) / 2)'
? 'calc(50% + (var(--sidebar-width) - var(--panel-width)) / 2)'
: 'calc(var(--sidebar-width) / 2 + 50%)',
...style,
}}

View File

@@ -72,6 +72,8 @@ export { Table } from './table'
export { TableX } from './table-x'
export { TagIcon } from './tag'
export { TerminalWindow } from './terminal-window'
export { ThumbsDown } from './thumbs-down'
export { ThumbsUp } from './thumbs-up'
export { Trash } from './trash'
export { TrashOutline } from './trash-outline'
export { Trash2 } from './trash2'

View File

@@ -0,0 +1,28 @@
import type { SVGProps } from 'react'
/**
* ThumbsDown icon component
* @param props - SVG properties including className, fill, etc.
*/
export function ThumbsDown(props: SVGProps<SVGSVGElement>) {
return (
<svg
width='24'
height='24'
viewBox='-1 -2 24 24'
fill='none'
stroke='currentColor'
strokeWidth='1.75'
strokeLinecap='round'
strokeLinejoin='round'
xmlns='http://www.w3.org/2000/svg'
aria-hidden='true'
{...props}
>
<g transform='scale(1,-1) translate(0,-20)'>
<path d='M6 8v12' />
<path d='M14 3.88L13 8h5.83a2 2 0 0 1 1.92 2.56l-2.33 8A2 2 0 0 1 16.5 20H3a2 2 0 0 1-2-2v-8a2 2 0 0 1 2-2h2.76a2 2 0 0 0 1.79-1.11L11 0a3.13 3.13 0 0 1 3 3.88Z' />
</g>
</svg>
)
}

View File

@@ -0,0 +1,26 @@
import type { SVGProps } from 'react'
/**
* ThumbsUp icon component
* @param props - SVG properties including className, fill, etc.
*/
export function ThumbsUp(props: SVGProps<SVGSVGElement>) {
return (
<svg
width='24'
height='24'
viewBox='-1 -2 24 24'
fill='none'
stroke='currentColor'
strokeWidth='1.75'
strokeLinecap='round'
strokeLinejoin='round'
xmlns='http://www.w3.org/2000/svg'
aria-hidden='true'
{...props}
>
<path d='M6 8v12' />
<path d='M14 3.88L13 8h5.83a2 2 0 0 1 1.92 2.56l-2.33 8A2 2 0 0 1 16.5 20H3a2 2 0 0 1-2-2v-8a2 2 0 0 1 2-2h2.76a2 2 0 0 0 1.79-1.11L11 0a3.13 3.13 0 0 1 3 3.88Z' />
</svg>
)
}

View File

@@ -4653,6 +4653,33 @@ export function SQSIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function CloudWatchIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
viewBox='0 0 80 80'
version='1.1'
xmlns='http://www.w3.org/2000/svg'
xmlnsXlink='http://www.w3.org/1999/xlink'
>
<g
id='Icon-Architecture/64/Arch_Amazon-CloudWatch_64'
stroke='none'
strokeWidth='1'
fill='none'
fillRule='evenodd'
transform='translate(40, 40) scale(1.25) translate(-40, -40)'
>
<path
d='M53,42 L41,42 L41,24 L43,24 L43,40 L53,40 L53,42 Z M40,66 C24.561,66 12,53.439 12,38 C12,22.561 24.561,10 40,10 C55.439,10 68,22.561 68,38 C68,53.439 55.439,66 40,66 M40,8 C23.458,8 10,21.458 10,38 C10,54.542 23.458,68 40,68 C56.542,68 70,54.542 70,38 C70,21.458 56.542,8 40,8'
id='Amazon-CloudWatch_Icon_64_Squid'
fill='currentColor'
/>
</g>
</svg>
)
}
export function TextractIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg

View File

@@ -119,7 +119,7 @@ const ISSUE_FIELDS = `
`
const ISSUE_BY_ID_QUERY = `
query GetIssue($id: String!) {
query GetIssue($id: ID!) {
issue(id: $id) {
${ISSUE_FIELDS}
}
@@ -147,13 +147,13 @@ function buildIssuesQuery(sourceConfig: Record<string, unknown>): {
const variables: Record<string, unknown> = {}
if (teamId) {
varDefs.push('$teamId: String!')
varDefs.push('$teamId: ID!')
filterClauses.push('team: { id: { eq: $teamId } }')
variables.teamId = teamId
}
if (projectId) {
varDefs.push('$projectId: String!')
varDefs.push('$projectId: ID!')
filterClauses.push('project: { id: { eq: $projectId } }')
variables.projectId = projectId
}

View File

@@ -0,0 +1,39 @@
import { createLogger } from '@sim/logger'
import { useMutation } from '@tanstack/react-query'
const logger = createLogger('CopilotFeedbackMutation')
interface SubmitFeedbackVariables {
chatId: string
userQuery: string
agentResponse: string
isPositiveFeedback: boolean
feedback?: string
}
interface SubmitFeedbackResponse {
success: boolean
feedbackId: string
}
export function useSubmitCopilotFeedback() {
return useMutation({
mutationFn: async (variables: SubmitFeedbackVariables): Promise<SubmitFeedbackResponse> => {
const response = await fetch('/api/copilot/feedback', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(variables),
})
if (!response.ok) {
const data = await response.json().catch(() => ({}))
throw new Error(data.error || 'Failed to submit feedback')
}
return response.json()
},
onError: (error) => {
logger.error('Failed to submit copilot feedback:', error)
},
})
}

View File

@@ -6,6 +6,7 @@ import { createLogger } from '@sim/logger'
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { toast } from '@/components/emcn'
import type { Filter, RowData, Sort, TableDefinition, TableMetadata, TableRow } from '@/lib/table'
import { TABLE_LIMITS } from '@/lib/table/constants'
const logger = createLogger('TableQueries')
@@ -23,7 +24,7 @@ export const tableKeys = {
[...tableKeys.rowsRoot(tableId), paramsKey] as const,
}
interface TableRowsParams {
export interface TableRowsParams {
workspaceId: string
tableId: string
limit: number
@@ -32,7 +33,7 @@ interface TableRowsParams {
sort?: Sort | null
}
interface TableRowsResponse {
export interface TableRowsResponse {
rows: TableRow[]
totalCount: number
}
@@ -83,7 +84,7 @@ async function fetchTable(
return (data as { table: TableDefinition }).table
}
async function fetchTableRows({
export async function fetchTableRows({
workspaceId,
tableId,
limit,
@@ -125,6 +126,48 @@ async function fetchTableRows({
}
}
export async function fetchAllTableRows({
workspaceId,
tableId,
filter,
sort,
pageSize = TABLE_LIMITS.MAX_QUERY_LIMIT,
signal,
}: Pick<TableRowsParams, 'workspaceId' | 'tableId' | 'filter' | 'sort'> & {
pageSize?: number
signal?: AbortSignal
}): Promise<TableRowsResponse> {
const rows: TableRow[] = []
let totalCount = Number.POSITIVE_INFINITY
let offset = 0
while (rows.length < totalCount) {
const response = await fetchTableRows({
workspaceId,
tableId,
limit: pageSize,
offset,
filter,
sort,
signal,
})
rows.push(...response.rows)
totalCount = response.totalCount
if (response.rows.length === 0) {
break
}
offset += response.rows.length
}
return {
rows,
totalCount: Number.isFinite(totalCount) ? totalCount : rows.length,
}
}
function invalidateRowData(queryClient: ReturnType<typeof useQueryClient>, tableId: string) {
queryClient.invalidateQueries({ queryKey: tableKeys.rowsRoot(tableId) })
}

View File

@@ -1716,6 +1716,81 @@ const registry: Record<SelectorKey, SelectorDefinition> = {
}))
},
},
'cloudwatch.logGroups': {
key: 'cloudwatch.logGroups',
staleTime: SELECTOR_STALE,
getQueryKey: ({ context }: SelectorQueryArgs) => [
'selectors',
'cloudwatch.logGroups',
context.awsAccessKeyId ?? 'none',
context.awsRegion ?? 'none',
],
enabled: ({ context }) =>
Boolean(context.awsAccessKeyId && context.awsSecretAccessKey && context.awsRegion),
fetchList: async ({ context, search }: SelectorQueryArgs) => {
const body = JSON.stringify({
accessKeyId: context.awsAccessKeyId,
secretAccessKey: context.awsSecretAccessKey,
region: context.awsRegion,
...(search && { prefix: search }),
})
const data = await fetchJson<{
output: { logGroups: { logGroupName: string }[] }
}>('/api/tools/cloudwatch/describe-log-groups', {
method: 'POST',
body,
})
return (data.output?.logGroups || []).map((lg) => ({
id: lg.logGroupName,
label: lg.logGroupName,
}))
},
fetchById: async ({ detailId }: SelectorQueryArgs) => {
if (!detailId) return null
return { id: detailId, label: detailId }
},
},
'cloudwatch.logStreams': {
key: 'cloudwatch.logStreams',
staleTime: SELECTOR_STALE,
getQueryKey: ({ context }: SelectorQueryArgs) => [
'selectors',
'cloudwatch.logStreams',
context.awsAccessKeyId ?? 'none',
context.awsRegion ?? 'none',
context.logGroupName ?? 'none',
],
enabled: ({ context }) =>
Boolean(
context.awsAccessKeyId &&
context.awsSecretAccessKey &&
context.awsRegion &&
context.logGroupName
),
fetchList: async ({ context, search }: SelectorQueryArgs) => {
const body = JSON.stringify({
accessKeyId: context.awsAccessKeyId,
secretAccessKey: context.awsSecretAccessKey,
region: context.awsRegion,
logGroupName: context.logGroupName,
...(search && { prefix: search }),
})
const data = await fetchJson<{
output: { logStreams: { logStreamName: string; lastEventTimestamp?: number }[] }
}>('/api/tools/cloudwatch/describe-log-streams', {
method: 'POST',
body,
})
return (data.output?.logStreams || []).map((ls) => ({
id: ls.logStreamName,
label: ls.logStreamName,
}))
},
fetchById: async ({ detailId }: SelectorQueryArgs) => {
if (!detailId) return null
return { id: detailId, label: detailId }
},
},
'sim.workflows': {
key: 'sim.workflows',
staleTime: SELECTOR_STALE,

View File

@@ -49,6 +49,8 @@ export type SelectorKey =
| 'webflow.sites'
| 'webflow.collections'
| 'webflow.items'
| 'cloudwatch.logGroups'
| 'cloudwatch.logStreams'
| 'sim.workflows'
export interface SelectorOption {
@@ -78,6 +80,10 @@ export interface SelectorContext {
datasetId?: string
serviceDeskId?: string
impersonateUserEmail?: string
awsAccessKeyId?: string
awsSecretAccessKey?: string
awsRegion?: string
logGroupName?: string
}
export interface SelectorQueryArgs {

View File

@@ -191,6 +191,21 @@ export function useTableUndo({ workspaceId, tableId }: UseTableUndoProps) {
break
}
case 'delete-column': {
if (direction === 'undo') {
addColumnMutation.mutate({
name: action.columnName,
type: action.columnType,
position: action.position,
unique: action.unique,
required: action.required,
})
} else {
deleteColumnMutation.mutate(action.columnName)
}
break
}
case 'rename-column': {
if (direction === 'undo') {
updateColumnMutation.mutate({

View File

@@ -140,25 +140,17 @@ async function initializeOpenTelemetry() {
sdk.start()
const shutdownHandler = async () => {
const shutdownOtel = async () => {
try {
await sdk.shutdown()
logger.info('OpenTelemetry SDK shut down successfully')
} catch (err) {
logger.error('Error shutting down OpenTelemetry SDK', err)
}
try {
const { getPostHogClient } = await import('@/lib/posthog/server')
await getPostHogClient()?.shutdown()
logger.info('PostHog client shut down successfully')
} catch (err) {
logger.error('Error shutting down PostHog client', err)
}
}
process.on('SIGTERM', shutdownHandler)
process.on('SIGINT', shutdownHandler)
process.on('SIGTERM', shutdownOtel)
process.on('SIGINT', shutdownOtel)
logger.info('OpenTelemetry instrumentation initialized with business span filtering')
} catch (error) {
@@ -169,6 +161,19 @@ async function initializeOpenTelemetry() {
export async function register() {
await initializeOpenTelemetry()
const shutdownPostHog = async () => {
try {
const { getPostHogClient } = await import('@/lib/posthog/server')
await getPostHogClient()?.shutdown()
logger.info('PostHog client shut down successfully')
} catch (err) {
logger.error('Error shutting down PostHog client', err)
}
}
process.on('SIGTERM', shutdownPostHog)
process.on('SIGINT', shutdownPostHog)
const { startMemoryTelemetry } = await import('./lib/monitoring/memory-telemetry')
startMemoryTelemetry()
}

View File

@@ -448,9 +448,11 @@ export async function hasInboxAccess(userId: string): Promise<boolean> {
if (!isProd) {
return true
}
const sub = await getHighestPrioritySubscription(userId)
const [sub, billingStatus] = await Promise.all([
getHighestPrioritySubscription(userId),
getEffectiveBillingStatus(userId),
])
if (!sub) return false
const billingStatus = await getEffectiveBillingStatus(userId)
if (!hasUsableSubscriptionAccess(sub.status, billingStatus.billingBlocked)) return false
return getPlanTierCredits(sub.plan) >= 25000 || checkEnterprisePlan(sub)
} catch (error) {
@@ -459,6 +461,30 @@ export async function hasInboxAccess(userId: string): Promise<boolean> {
}
}
/**
* Check if user has access to live sync (every 5 minutes) for KB connectors
* Returns true if:
* - Self-hosted deployment, OR
* - User has a Max plan (credits >= 25000) or enterprise plan
*/
export async function hasLiveSyncAccess(userId: string): Promise<boolean> {
try {
if (!isHosted) {
return true
}
const [sub, billingStatus] = await Promise.all([
getHighestPrioritySubscription(userId),
getEffectiveBillingStatus(userId),
])
if (!sub) return false
if (!hasUsableSubscriptionAccess(sub.status, billingStatus.billingBlocked)) return false
return getPlanTierCredits(sub.plan) >= 25000 || checkEnterprisePlan(sub)
} catch (error) {
logger.error('Error checking live sync access', { error, userId })
return false
}
}
/**
* Check if user has exceeded their cost limit based on current period usage
*/

View File

@@ -21,6 +21,7 @@ export type UsageLogSource =
| 'workspace-chat'
| 'mcp_copilot'
| 'mothership_block'
| 'knowledge-base'
/**
* Metadata for 'model' category charges

View File

@@ -81,7 +81,8 @@ export class DocsChunker {
const textChunks = await this.splitContent(markdownContent)
logger.info(`Generating embeddings for ${textChunks.length} chunks in ${relativePath}`)
const embeddings = textChunks.length > 0 ? await generateEmbeddings(textChunks) : []
const embeddings: number[][] =
textChunks.length > 0 ? (await generateEmbeddings(textChunks)).embeddings : []
const embeddingModel = 'text-embedding-3-small'
const chunks: DocChunk[] = []

View File

@@ -1,30 +1,11 @@
/**
* @vitest-environment node
*/
import { createEditWorkflowRegistryMock } from '@sim/testing'
import { describe, expect, it, vi } from 'vitest'
import { createBlockFromParams } from './builders'
const agentBlockConfig = {
type: 'agent',
name: 'Agent',
outputs: {
content: { type: 'string', description: 'Default content output' },
},
subBlocks: [{ id: 'responseFormat', type: 'response-format' }],
}
const conditionBlockConfig = {
type: 'condition',
name: 'Condition',
outputs: {},
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
}
vi.mock('@/blocks/registry', () => ({
getAllBlocks: () => [agentBlockConfig, conditionBlockConfig],
getBlock: (type: string) =>
type === 'agent' ? agentBlockConfig : type === 'condition' ? conditionBlockConfig : undefined,
}))
vi.mock('@/blocks/registry', () => createEditWorkflowRegistryMock(['agent', 'condition']))
describe('createBlockFromParams', () => {
it('derives agent outputs from responseFormat when outputs are not provided', () => {

View File

@@ -1,69 +1,16 @@
/**
* @vitest-environment node
*/
import { createEditWorkflowRegistryMock } from '@sim/testing'
import { loggerMock } from '@sim/testing/mocks'
import { describe, expect, it, vi } from 'vitest'
import { applyOperationsToWorkflowState } from './engine'
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
}),
}))
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/blocks/registry', () => ({
getAllBlocks: () => [
{
type: 'condition',
name: 'Condition',
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
},
{
type: 'agent',
name: 'Agent',
subBlocks: [
{ id: 'systemPrompt', type: 'long-input' },
{ id: 'model', type: 'combobox' },
],
},
{
type: 'function',
name: 'Function',
subBlocks: [
{ id: 'code', type: 'code' },
{ id: 'language', type: 'dropdown' },
],
},
],
getBlock: (type: string) => {
const blocks: Record<string, any> = {
condition: {
type: 'condition',
name: 'Condition',
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
},
agent: {
type: 'agent',
name: 'Agent',
subBlocks: [
{ id: 'systemPrompt', type: 'long-input' },
{ id: 'model', type: 'combobox' },
],
},
function: {
type: 'function',
name: 'Function',
subBlocks: [
{ id: 'code', type: 'code' },
{ id: 'language', type: 'dropdown' },
],
},
}
return blocks[type] || undefined
},
}))
vi.mock('@/blocks/registry', () =>
createEditWorkflowRegistryMock(['condition', 'agent', 'function'])
)
function makeLoopWorkflow() {
return {

View File

@@ -1,32 +1,12 @@
/**
* @vitest-environment node
*/
import { createEditWorkflowRegistryMock } from '@sim/testing'
import { describe, expect, it, vi } from 'vitest'
import { normalizeConditionRouterIds } from './builders'
import { validateInputsForBlock } from './validation'
const conditionBlockConfig = {
type: 'condition',
name: 'Condition',
outputs: {},
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
}
const routerBlockConfig = {
type: 'router_v2',
name: 'Router',
outputs: {},
subBlocks: [{ id: 'routes', type: 'router-input' }],
}
vi.mock('@/blocks/registry', () => ({
getBlock: (type: string) =>
type === 'condition'
? conditionBlockConfig
: type === 'router_v2'
? routerBlockConfig
: undefined,
}))
vi.mock('@/blocks/registry', () => createEditWorkflowRegistryMock(['condition', 'router_v2']))
describe('validateInputsForBlock', () => {
it('accepts condition-input arrays with arbitrary item ids', () => {

View File

@@ -1,11 +1,11 @@
import { loggerMock } from '@sim/testing'
import { createFeatureFlagsMock, loggerMock } from '@sim/testing'
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
import { RateLimiter } from './rate-limiter'
import type { ConsumeResult, RateLimitStorageAdapter, TokenStatus } from './storage'
import { MANUAL_EXECUTION_LIMIT, RATE_LIMITS, RateLimitError } from './types'
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/core/config/feature-flags', () => ({ isBillingEnabled: true }))
vi.mock('@/lib/core/config/feature-flags', () => createFeatureFlagsMock({ isBillingEnabled: true }))
interface MockAdapter {
consumeTokens: Mock

View File

@@ -40,6 +40,7 @@ export const buildTimeCSPDirectives: CSPDirectives = {
'https://*.google.com',
'https://apis.google.com',
'https://assets.onedollarstats.com',
'https://challenges.cloudflare.com',
...(isReactGrabEnabled ? ['https://unpkg.com'] : []),
],
@@ -102,6 +103,7 @@ export const buildTimeCSPDirectives: CSPDirectives = {
'https://*.supabase.co',
'https://api.github.com',
'https://github.com/*',
'https://challenges.cloudflare.com',
'https://collector.onedollarstats.com',
...getHostnameFromUrl(env.NEXT_PUBLIC_BRAND_LOGO_URL),
...getHostnameFromUrl(env.NEXT_PUBLIC_PRIVACY_URL),
@@ -110,6 +112,7 @@ export const buildTimeCSPDirectives: CSPDirectives = {
'frame-src': [
"'self'",
'https://challenges.cloudflare.com',
'https://drive.google.com',
'https://docs.google.com',
'https://*.google.com',
@@ -171,13 +174,13 @@ export function generateRuntimeCSP(): string {
return `
default-src 'self';
script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://assets.onedollarstats.com ${reactGrabScript};
script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://assets.onedollarstats.com https://challenges.cloudflare.com ${reactGrabScript};
style-src 'self' 'unsafe-inline' https://fonts.googleapis.com;
img-src 'self' data: blob: https://*.googleusercontent.com https://*.google.com https://*.atlassian.com https://cdn.discordapp.com https://*.githubusercontent.com https://*.s3.amazonaws.com https://s3.amazonaws.com https://*.amazonaws.com https://*.blob.core.windows.net https://github.com/* https://collector.onedollarstats.com ${brandLogoDomain} ${brandFaviconDomain};
media-src 'self' blob:;
font-src 'self' https://fonts.gstatic.com;
connect-src 'self' ${appUrl} ${ollamaUrl} ${socketUrl} ${socketWsUrl} https://api.browser-use.com https://api.exa.ai https://api.firecrawl.dev https://*.googleapis.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.blob.core.windows.net https://api.github.com https://github.com/* https://*.atlassian.com https://*.supabase.co https://collector.onedollarstats.com ${dynamicDomainsStr};
frame-src 'self' https://drive.google.com https://docs.google.com https://*.google.com;
connect-src 'self' ${appUrl} ${ollamaUrl} ${socketUrl} ${socketWsUrl} https://api.browser-use.com https://api.exa.ai https://api.firecrawl.dev https://*.googleapis.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.blob.core.windows.net https://api.github.com https://github.com/* https://*.atlassian.com https://*.supabase.co https://challenges.cloudflare.com https://collector.onedollarstats.com ${dynamicDomainsStr};
frame-src 'self' https://challenges.cloudflare.com https://drive.google.com https://docs.google.com https://*.google.com;
frame-ancestors 'self';
form-action 'self';
base-uri 'self';

View File

@@ -0,0 +1,36 @@
import { createLogger } from '@sim/logger'
const logger = createLogger('FileDownload')
/**
* Sanitizes a string for use as a file or path segment in exported assets.
*/
export function sanitizePathSegment(name: string): string {
return name.replace(/[^a-z0-9-_]/gi, '-')
}
/**
* Downloads a file to the user's device.
* Throws if the browser cannot create or trigger the download.
*/
export function downloadFile(
content: Blob | string,
filename: string,
mimeType = 'application/json'
): void {
try {
const blob = content instanceof Blob ? content : new Blob([content], { type: mimeType })
const url = URL.createObjectURL(blob)
const link = document.createElement('a')
link.href = url
link.download = filename
document.body.appendChild(link)
link.click()
document.body.removeChild(link)
URL.revokeObjectURL(url)
} catch (error) {
logger.error('Failed to download file:', error)
throw error
}
}

View File

@@ -110,7 +110,7 @@ export async function createChunk(
workspaceId?: string | null
): Promise<ChunkData> {
logger.info(`[${requestId}] Generating embedding for manual chunk`)
const embeddings = await generateEmbeddings([chunkData.content], undefined, workspaceId)
const { embeddings } = await generateEmbeddings([chunkData.content], undefined, workspaceId)
// Calculate accurate token count
const tokenCount = estimateTokenCount(chunkData.content, 'openai')
@@ -359,7 +359,7 @@ export async function updateChunk(
if (content !== currentChunk[0].content) {
logger.info(`[${requestId}] Content changed, regenerating embedding for chunk ${chunkId}`)
const embeddings = await generateEmbeddings([content], undefined, workspaceId)
const { embeddings } = await generateEmbeddings([content], undefined, workspaceId)
// Calculate accurate token count
const tokenCount = estimateTokenCount(content, 'openai')

View File

@@ -25,9 +25,11 @@ import {
type SQL,
sql,
} from 'drizzle-orm'
import { recordUsage } from '@/lib/billing/core/usage-log'
import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing'
import { createBullMQJobData, isBullMQEnabled } from '@/lib/core/bullmq'
import { env } from '@/lib/core/config/env'
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
import { getCostMultiplier, isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
import { enqueueWorkspaceDispatch } from '@/lib/core/workspace-dispatch'
import { processDocument } from '@/lib/knowledge/documents/document-processor'
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
@@ -43,6 +45,7 @@ import type { ProcessedDocumentTags } from '@/lib/knowledge/types'
import { deleteFile } from '@/lib/uploads/core/storage-service'
import { extractStorageKey } from '@/lib/uploads/utils/file-utils'
import type { DocumentProcessingPayload } from '@/background/knowledge-processing'
import { calculateCost } from '@/providers/utils'
const logger = createLogger('DocumentService')
@@ -460,6 +463,10 @@ export async function processDocumentAsync(
overlap: rawConfig?.overlap ?? 200,
}
let totalEmbeddingTokens = 0
let embeddingIsBYOK = false
let embeddingModelName = 'text-embedding-3-small'
await withTimeout(
(async () => {
const processed = await processDocument(
@@ -500,10 +507,20 @@ export async function processDocumentAsync(
const batchNum = Math.floor(i / batchSize) + 1
logger.info(`[${documentId}] Processing embedding batch ${batchNum}/${totalBatches}`)
const batchEmbeddings = await generateEmbeddings(batch, undefined, kb[0].workspaceId)
const {
embeddings: batchEmbeddings,
totalTokens: batchTokens,
isBYOK,
modelName,
} = await generateEmbeddings(batch, undefined, kb[0].workspaceId)
for (const emb of batchEmbeddings) {
embeddings.push(emb)
}
totalEmbeddingTokens += batchTokens
if (i === 0) {
embeddingIsBYOK = isBYOK
embeddingModelName = modelName
}
}
}
@@ -638,6 +655,45 @@ export async function processDocumentAsync(
const processingTime = Date.now() - startTime
logger.info(`[${documentId}] Successfully processed document in ${processingTime}ms`)
if (!embeddingIsBYOK && totalEmbeddingTokens > 0 && kb[0].userId) {
try {
const costMultiplier = getCostMultiplier()
const { total: cost } = calculateCost(
embeddingModelName,
totalEmbeddingTokens,
0,
false,
costMultiplier
)
if (cost > 0) {
await recordUsage({
userId: kb[0].userId,
workspaceId: kb[0].workspaceId ?? undefined,
entries: [
{
category: 'model',
source: 'knowledge-base',
description: embeddingModelName,
cost,
metadata: { inputTokens: totalEmbeddingTokens, outputTokens: 0 },
},
],
additionalStats: {
totalTokensUsed: sql`total_tokens_used + ${totalEmbeddingTokens}`,
},
})
await checkAndBillOverageThreshold(kb[0].userId)
} else {
logger.warn(
`[${documentId}] Embedding model "${embeddingModelName}" has no pricing entry — billing skipped`,
{ totalEmbeddingTokens, embeddingModelName }
)
}
} catch (billingError) {
logger.error(`[${documentId}] Failed to record embedding usage`, { error: billingError })
}
}
} catch (error) {
const processingTime = Date.now() - startTime
const errorMessage = error instanceof Error ? error.message : 'Unknown error'

View File

@@ -35,6 +35,7 @@ interface EmbeddingConfig {
apiUrl: string
headers: Record<string, string>
modelName: string
isBYOK: boolean
}
interface EmbeddingResponseItem {
@@ -71,16 +72,19 @@ async function getEmbeddingConfig(
'Content-Type': 'application/json',
},
modelName: kbModelName,
isBYOK: false,
}
}
let openaiApiKey = env.OPENAI_API_KEY
let isBYOK = false
if (workspaceId) {
const byokResult = await getBYOKKey(workspaceId, 'openai')
if (byokResult) {
logger.info('Using workspace BYOK key for OpenAI embeddings')
openaiApiKey = byokResult.apiKey
isBYOK = true
}
}
@@ -98,12 +102,16 @@ async function getEmbeddingConfig(
'Content-Type': 'application/json',
},
modelName: embeddingModel,
isBYOK,
}
}
const EMBEDDING_REQUEST_TIMEOUT_MS = 60_000
async function callEmbeddingAPI(inputs: string[], config: EmbeddingConfig): Promise<number[][]> {
async function callEmbeddingAPI(
inputs: string[],
config: EmbeddingConfig
): Promise<{ embeddings: number[][]; totalTokens: number }> {
return retryWithExponentialBackoff(
async () => {
const useDimensions = supportsCustomDimensions(config.modelName)
@@ -140,7 +148,10 @@ async function callEmbeddingAPI(inputs: string[], config: EmbeddingConfig): Prom
}
const data: EmbeddingAPIResponse = await response.json()
return data.data.map((item) => item.embedding)
return {
embeddings: data.data.map((item) => item.embedding),
totalTokens: data.usage.total_tokens,
}
},
{
maxRetries: 3,
@@ -178,14 +189,23 @@ async function processWithConcurrency<T, R>(
return results
}
export interface GenerateEmbeddingsResult {
embeddings: number[][]
totalTokens: number
isBYOK: boolean
modelName: string
}
/**
* Generate embeddings for multiple texts with token-aware batching and parallel processing
* Generate embeddings for multiple texts with token-aware batching and parallel processing.
* Returns embeddings alongside actual token count, model name, and whether a workspace BYOK key
* was used (vs. the platform's shared key) — enabling callers to make correct billing decisions.
*/
export async function generateEmbeddings(
texts: string[],
embeddingModel = 'text-embedding-3-small',
workspaceId?: string | null
): Promise<number[][]> {
): Promise<GenerateEmbeddingsResult> {
const config = await getEmbeddingConfig(embeddingModel, workspaceId)
const batches = batchByTokenLimit(texts, MAX_TOKENS_PER_REQUEST, embeddingModel)
@@ -204,13 +224,20 @@ export async function generateEmbeddings(
)
const allEmbeddings: number[][] = []
let totalTokens = 0
for (const batch of batchResults) {
for (const emb of batch) {
for (const emb of batch.embeddings) {
allEmbeddings.push(emb)
}
totalTokens += batch.totalTokens
}
return allEmbeddings
return {
embeddings: allEmbeddings,
totalTokens,
isBYOK: config.isBYOK,
modelName: config.modelName,
}
}
/**
@@ -227,6 +254,6 @@ export async function generateSearchEmbedding(
`Using ${config.useAzure ? 'Azure OpenAI' : 'OpenAI'} for search embedding generation`
)
const embeddings = await callEmbeddingAPI([query], config)
const { embeddings } = await callEmbeddingAPI([query], config)
return embeddings[0]
}

View File

@@ -1,7 +1,7 @@
/**
* @vitest-environment node
*/
import { loggerMock } from '@sim/testing'
import { createFeatureFlagsMock, loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
interface MockMcpClient {
@@ -38,7 +38,7 @@ const { MockMcpClientConstructor, mockOnToolsChanged, mockPublishToolsChanged }
)
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/core/config/feature-flags', () => ({ isTest: false }))
vi.mock('@/lib/core/config/feature-flags', () => createFeatureFlagsMock({ isTest: false }))
vi.mock('@/lib/mcp/pubsub', () => ({
mcpPubSub: {
onToolsChanged: mockOnToolsChanged,

View File

@@ -317,6 +317,15 @@ export interface PostHogEventMap {
workspace_id: string
}
table_exported: {
workspace_id: string
table_id: string
row_count: number
column_count: number
has_filter: boolean
has_sort: boolean
}
custom_tool_saved: {
tool_id: string
workspace_id: string
@@ -358,15 +367,12 @@ export interface PostHogEventMap {
workspace_id: string
}
task_marked_read: {
workspace_id: string
}
task_marked_unread: {
workspace_id: string
}
task_message_sent: {
workspace_id: string
has_attachments: boolean
has_contexts: boolean
is_new_task: boolean
@@ -389,6 +395,62 @@ export interface PostHogEventMap {
source: 'help_menu' | 'editor_button' | 'toolbar_context_menu'
block_type?: string
}
search_result_selected: {
result_type:
| 'block'
| 'tool'
| 'trigger'
| 'tool_operation'
| 'workflow'
| 'workspace'
| 'task'
| 'table'
| 'file'
| 'knowledge_base'
| 'page'
| 'docs'
query_length: number
workspace_id: string
}
workflow_imported: {
workspace_id: string
workflow_count: number
format: 'json' | 'zip'
}
workflow_exported: {
workspace_id: string
workflow_count: number
format: 'json' | 'zip'
}
folder_created: {
workspace_id: string
}
folder_deleted: {
workspace_id: string
}
logs_filter_applied: {
filter_type: 'status' | 'workflow' | 'folder' | 'trigger' | 'time'
workspace_id: string
}
knowledge_base_document_deleted: {
knowledge_base_id: string
workspace_id: string
}
scheduled_task_created: {
workspace_id: string
}
scheduled_task_deleted: {
workspace_id: string
}
}
export type PostHogEventName = keyof PostHogEventMap

View File

@@ -1,10 +1,10 @@
/**
* @vitest-environment node
*/
import { createTableColumn } from '@sim/testing'
import { describe, expect, it } from 'vitest'
import { TABLE_LIMITS } from '../constants'
import {
type ColumnDefinition,
getUniqueColumns,
type TableSchema,
validateColumnDefinition,
@@ -66,12 +66,12 @@ describe('Validation', () => {
describe('validateColumnDefinition', () => {
it('should accept valid column definition', () => {
const column: ColumnDefinition = {
const column = createTableColumn({
name: 'email',
type: 'string',
required: true,
unique: true,
}
})
const result = validateColumnDefinition(column)
expect(result.valid).toBe(true)
})
@@ -80,19 +80,20 @@ describe('Validation', () => {
const types = ['string', 'number', 'boolean', 'date', 'json'] as const
for (const type of types) {
const result = validateColumnDefinition({ name: 'test', type })
const result = validateColumnDefinition(createTableColumn({ name: 'test', type }))
expect(result.valid).toBe(true)
}
})
it('should reject empty column name', () => {
const result = validateColumnDefinition({ name: '', type: 'string' })
const result = validateColumnDefinition(createTableColumn({ name: '', type: 'string' }))
expect(result.valid).toBe(false)
expect(result.errors).toContain('Column name is required')
})
it('should reject invalid column type', () => {
const result = validateColumnDefinition({
...createTableColumn({ name: 'test' }),
name: 'test',
type: 'invalid' as any,
})
@@ -102,7 +103,7 @@ describe('Validation', () => {
it('should reject column name exceeding max length', () => {
const longName = 'a'.repeat(TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH + 1)
const result = validateColumnDefinition({ name: longName, type: 'string' })
const result = validateColumnDefinition(createTableColumn({ name: longName, type: 'string' }))
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('exceeds maximum length')
})
@@ -112,9 +113,9 @@ describe('Validation', () => {
it('should accept valid schema', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string', required: true, unique: true },
{ name: 'name', type: 'string', required: true },
{ name: 'age', type: 'number' },
createTableColumn({ name: 'id', type: 'string', required: true, unique: true }),
createTableColumn({ name: 'name', type: 'string', required: true }),
createTableColumn({ name: 'age', type: 'number' }),
],
}
const result = validateTableSchema(schema)
@@ -131,8 +132,8 @@ describe('Validation', () => {
it('should reject duplicate column names', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string' },
{ name: 'ID', type: 'number' },
createTableColumn({ name: 'id', type: 'string' }),
createTableColumn({ name: 'ID', type: 'number' }),
],
}
const result = validateTableSchema(schema)
@@ -153,10 +154,9 @@ describe('Validation', () => {
})
it('should reject schema exceeding max columns', () => {
const columns = Array.from({ length: TABLE_LIMITS.MAX_COLUMNS_PER_TABLE + 1 }, (_, i) => ({
name: `col_${i}`,
type: 'string' as const,
}))
const columns = Array.from({ length: TABLE_LIMITS.MAX_COLUMNS_PER_TABLE + 1 }, (_, i) =>
createTableColumn({ name: `col_${i}`, type: 'string' })
)
const result = validateTableSchema({ columns })
expect(result.valid).toBe(false)
expect(result.errors[0]).toContain('exceeds maximum columns')
@@ -182,11 +182,11 @@ describe('Validation', () => {
describe('validateRowAgainstSchema', () => {
const schema: TableSchema = {
columns: [
{ name: 'name', type: 'string', required: true },
{ name: 'age', type: 'number' },
{ name: 'active', type: 'boolean' },
{ name: 'created', type: 'date' },
{ name: 'metadata', type: 'json' },
createTableColumn({ name: 'name', type: 'string', required: true }),
createTableColumn({ name: 'age', type: 'number' }),
createTableColumn({ name: 'active', type: 'boolean' }),
createTableColumn({ name: 'created', type: 'date' }),
createTableColumn({ name: 'metadata', type: 'json' }),
],
}
@@ -281,10 +281,10 @@ describe('Validation', () => {
it('should return only columns with unique=true', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string', unique: true },
{ name: 'email', type: 'string', unique: true },
{ name: 'name', type: 'string' },
{ name: 'count', type: 'number', unique: false },
createTableColumn({ name: 'id', type: 'string', unique: true }),
createTableColumn({ name: 'email', type: 'string', unique: true }),
createTableColumn({ name: 'name', type: 'string' }),
createTableColumn({ name: 'count', type: 'number', unique: false }),
],
}
const result = getUniqueColumns(schema)
@@ -295,8 +295,8 @@ describe('Validation', () => {
it('should return empty array when no unique columns', () => {
const schema: TableSchema = {
columns: [
{ name: 'name', type: 'string' },
{ name: 'value', type: 'number' },
createTableColumn({ name: 'name', type: 'string' }),
createTableColumn({ name: 'value', type: 'number' }),
],
}
const result = getUniqueColumns(schema)
@@ -307,9 +307,9 @@ describe('Validation', () => {
describe('validateUniqueConstraints', () => {
const schema: TableSchema = {
columns: [
{ name: 'id', type: 'string', unique: true },
{ name: 'email', type: 'string', unique: true },
{ name: 'name', type: 'string' },
createTableColumn({ name: 'id', type: 'string', unique: true }),
createTableColumn({ name: 'email', type: 'string', unique: true }),
createTableColumn({ name: 'name', type: 'string' }),
],
}

View File

@@ -1,9 +1,12 @@
/**
* Tests for workflow change detection comparison logic
*/
import type { WorkflowVariableFixture } from '@sim/testing'
import {
createBlock as createTestBlock,
createWorkflowState as createTestWorkflowState,
createWorkflowVariablesMap,
} from '@sim/testing'
import { describe, expect, it } from 'vitest'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
@@ -46,6 +49,12 @@ function createBlock(id: string, overrides: Record<string, any> = {}): any {
})
}
function createVariablesMap(
...variables: Parameters<typeof createWorkflowVariablesMap>[0]
): Record<string, WorkflowVariableFixture> {
return createWorkflowVariablesMap(variables)
}
describe('hasWorkflowChanged', () => {
describe('Basic Cases', () => {
it.concurrent('should return true when deployedState is null', () => {
@@ -2181,9 +2190,12 @@ describe('hasWorkflowChanged', () => {
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'hello',
}),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2192,9 +2204,12 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect removed variables', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'hello',
}),
}
const currentState = {
@@ -2208,16 +2223,22 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect variable value changes', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'hello',
}),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'world' },
},
variables: createVariablesMap({
id: 'var1',
name: 'myVar',
type: 'string',
value: 'world',
}),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2226,16 +2247,12 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect variable type changes', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: '123' },
},
variables: createVariablesMap({ id: 'var1', name: 'myVar', type: 'string', value: '123' }),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'number', value: 123 },
},
variables: createVariablesMap({ id: 'var1', name: 'myVar', type: 'number', value: 123 }),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2244,16 +2261,22 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should detect variable name changes', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'oldName', type: 'string', value: 'hello' },
},
variables: createVariablesMap({
id: 'var1',
name: 'oldName',
type: 'string',
value: 'hello',
}),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'newName', type: 'string', value: 'hello' },
},
variables: createVariablesMap({
id: 'var1',
name: 'newName',
type: 'string',
value: 'hello',
}),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2262,18 +2285,18 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should not detect change for identical variables', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
},
variables: createVariablesMap(
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
{ id: 'var2', name: 'count', type: 'number', value: 42 }
),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
},
variables: createVariablesMap(
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
{ id: 'var2', name: 'count', type: 'number', value: 42 }
),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(false)
@@ -2310,16 +2333,22 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should handle complex variable values (objects)', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'config', type: 'object', value: { key: 'value1' } },
},
variables: createVariablesMap({
id: 'var1',
name: 'config',
type: 'object',
value: { key: 'value1' },
}),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'config', type: 'object', value: { key: 'value2' } },
},
variables: createVariablesMap({
id: 'var1',
name: 'config',
type: 'object',
value: { key: 'value2' },
}),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2328,16 +2357,22 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should handle complex variable values (arrays)', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'items', type: 'array', value: [1, 2, 3] },
},
variables: createVariablesMap({
id: 'var1',
name: 'items',
type: 'array',
value: [1, 2, 3],
}),
}
const currentState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'items', type: 'array', value: [1, 2, 4] },
},
variables: createVariablesMap({
id: 'var1',
name: 'items',
type: 'array',
value: [1, 2, 4],
}),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(true)
@@ -2346,18 +2381,18 @@ describe('hasWorkflowChanged', () => {
it.concurrent('should not detect change when variable key order differs', () => {
const deployedState = {
...createWorkflowState({}),
variables: {
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
},
variables: createVariablesMap(
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
{ id: 'var2', name: 'count', type: 'number', value: 42 }
),
}
const currentState = {
...createWorkflowState({}),
variables: {
var2: { id: 'var2', name: 'count', type: 'number', value: 42 },
var1: { id: 'var1', name: 'myVar', type: 'string', value: 'hello' },
},
variables: createVariablesMap(
{ id: 'var2', name: 'count', type: 'number', value: 42 },
{ id: 'var1', name: 'myVar', type: 'string', value: 'hello' }
),
}
expect(hasWorkflowChanged(currentState as any, deployedState as any)).toBe(false)
@@ -2840,175 +2875,135 @@ describe('hasWorkflowChanged', () => {
describe('Variables (UI-only fields should not trigger change)', () => {
it.concurrent('should not detect change when validationError differs', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
},
}
}),
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
validationError: undefined,
},
}
}),
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent('should not detect change when validationError has value vs missing', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'number',
value: 'invalid',
},
}
}),
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'number',
value: 'invalid',
validationError: 'Not a valid number',
},
}
}),
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent('should detect change when variable value differs', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'old value',
},
}
}),
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'new value',
validationError: undefined,
},
}
}),
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
})
it.concurrent('should detect change when variable is added', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
blocks: { block1: createBlock('block1') },
variables: {},
})
;(deployedState as any).variables = {}
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
})
;(currentState as any).variables = {
var1: {
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
},
}
}),
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
})
it.concurrent('should detect change when variable is removed', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
})
;(deployedState as any).variables = {
var1: {
blocks: { block1: createBlock('block1') },
variables: createVariablesMap({
id: 'var1',
workflowId: 'workflow1',
name: 'myVar',
type: 'plain',
value: 'test',
},
}
}),
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
blocks: { block1: createBlock('block1') },
variables: {},
})
;(currentState as any).variables = {}
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
})
it.concurrent('should not detect change when empty array vs empty object', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
blocks: { block1: createBlock('block1') },
})
;(deployedState as any).variables = []
// Intentional type violation to test robustness with malformed data
;(deployedState as unknown as Record<string, unknown>).variables = []
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1'),
},
blocks: { block1: createBlock('block1') },
variables: {},
})
;(currentState as any).variables = {}
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
@@ -3151,7 +3146,7 @@ describe('generateWorkflowDiffSummary', () => {
})
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: { var1: { id: 'var1', name: 'test', type: 'string', value: 'hello' } },
variables: createVariablesMap({ id: 'var1', name: 'test', type: 'string', value: 'hello' }),
})
const result = generateWorkflowDiffSummary(currentState, previousState)
expect(result.hasChanges).toBe(true)
@@ -3161,11 +3156,11 @@ describe('generateWorkflowDiffSummary', () => {
it.concurrent('should detect modified variables', () => {
const previousState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: { var1: { id: 'var1', name: 'test', type: 'string', value: 'hello' } },
variables: createVariablesMap({ id: 'var1', name: 'test', type: 'string', value: 'hello' }),
})
const currentState = createWorkflowState({
blocks: { block1: createBlock('block1') },
variables: { var1: { id: 'var1', name: 'test', type: 'string', value: 'world' } },
variables: createVariablesMap({ id: 'var1', name: 'test', type: 'string', value: 'world' }),
})
const result = generateWorkflowDiffSummary(currentState, previousState)
expect(result.hasChanges).toBe(true)

View File

@@ -123,6 +123,7 @@ describe('executeWorkflowCore terminal finalization sequencing', () => {
requestId: 'req-1',
workflowId: 'workflow-1',
userId: 'user-1',
workflowUserId: 'workflow-owner',
workspaceId: 'workspace-1',
triggerType: 'api',
executionId: 'execution-1',
@@ -755,4 +756,92 @@ describe('executeWorkflowCore terminal finalization sequencing', () => {
expect(safeCompleteWithErrorMock).not.toHaveBeenCalled()
expect(wasExecutionFinalizedByCore(envError, 'execution-no-log-start')).toBe(false)
})
it('uses sessionUserId for env resolution when isClientSession is true', async () => {
const snapshot = {
...createSnapshot(),
metadata: {
...createSnapshot().metadata,
isClientSession: true,
sessionUserId: 'session-user',
workflowUserId: 'workflow-owner',
},
}
getPersonalAndWorkspaceEnvMock.mockResolvedValue({
personalEncrypted: {},
workspaceEncrypted: {},
personalDecrypted: {},
workspaceDecrypted: {},
})
safeStartMock.mockResolvedValue(true)
executorExecuteMock.mockResolvedValue({
output: { done: true },
logs: [],
metadata: { duration: 123, startTime: 'start', endTime: 'end' },
})
await executeWorkflowCore({
snapshot: snapshot as any,
callbacks: {},
loggingSession: loggingSession as any,
})
expect(getPersonalAndWorkspaceEnvMock).toHaveBeenCalledWith('session-user', 'workspace-1')
})
it('uses workflowUserId for env resolution in server-side execution', async () => {
const snapshot = {
...createSnapshot(),
metadata: {
...createSnapshot().metadata,
isClientSession: false,
sessionUserId: undefined,
workflowUserId: 'workflow-owner',
userId: 'billing-actor',
},
}
getPersonalAndWorkspaceEnvMock.mockResolvedValue({
personalEncrypted: {},
workspaceEncrypted: {},
personalDecrypted: {},
workspaceDecrypted: {},
})
safeStartMock.mockResolvedValue(true)
executorExecuteMock.mockResolvedValue({
output: { done: true },
logs: [],
metadata: { duration: 123, startTime: 'start', endTime: 'end' },
})
await executeWorkflowCore({
snapshot: snapshot as any,
callbacks: {},
loggingSession: loggingSession as any,
})
expect(getPersonalAndWorkspaceEnvMock).toHaveBeenCalledWith('workflow-owner', 'workspace-1')
})
it('throws when workflowUserId is missing in server-side execution', async () => {
const snapshot = {
...createSnapshot(),
metadata: {
...createSnapshot().metadata,
isClientSession: false,
sessionUserId: undefined,
workflowUserId: undefined,
userId: 'billing-actor',
},
}
await expect(
executeWorkflowCore({
snapshot: snapshot as any,
callbacks: {},
loggingSession: loggingSession as any,
})
).rejects.toThrow('Missing workflowUserId in execution metadata')
})
})

View File

@@ -325,10 +325,13 @@ export async function executeWorkflowCore(
const mergedStates = mergeSubblockStateWithValues(blocks)
const personalEnvUserId = metadata.sessionUserId || metadata.userId
const personalEnvUserId =
metadata.isClientSession && metadata.sessionUserId
? metadata.sessionUserId
: metadata.workflowUserId
if (!personalEnvUserId) {
throw new Error('Missing execution actor for environment resolution')
throw new Error('Missing workflowUserId in execution metadata')
}
const { personalEncrypted, workspaceEncrypted, personalDecrypted, workspaceDecrypted } =

View File

@@ -1,6 +1,8 @@
/**
* @vitest-environment node
*/
import { createMockSelectChain, createMockUpdateChain } from '@sim/testing'
import { loggerMock } from '@sim/testing/mocks'
import { beforeEach, describe, expect, it, vi } from 'vitest'
const {
@@ -35,13 +37,7 @@ vi.mock('@sim/db/schema', () => ({
workflowSchedule: { archivedAt: 'workflow_schedule_archived_at' },
}))
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}),
}))
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/workflows/utils', () => ({
getWorkflowById: (...args: unknown[]) => mockGetWorkflowById(...args),
@@ -66,24 +62,6 @@ vi.mock('@/lib/core/telemetry', () => ({
import { archiveWorkflow } from '@/lib/workflows/lifecycle'
function createSelectChain<T>(result: T) {
const chain = {
from: vi.fn().mockReturnThis(),
innerJoin: vi.fn().mockReturnThis(),
where: vi.fn().mockResolvedValue(result),
}
return chain
}
function createUpdateChain() {
return {
set: vi.fn().mockReturnValue({
where: vi.fn().mockResolvedValue([]),
}),
}
}
describe('workflow lifecycle', () => {
beforeEach(() => {
vi.clearAllMocks()
@@ -107,10 +85,10 @@ describe('workflow lifecycle', () => {
archivedAt: new Date(),
})
mockSelect.mockReturnValue(createSelectChain([]))
mockSelect.mockReturnValue(createMockSelectChain([]))
const tx = {
update: vi.fn().mockImplementation(() => createUpdateChain()),
update: vi.fn().mockImplementation(() => createMockUpdateChain()),
}
mockTransaction.mockImplementation(async (callback: (trx: typeof tx) => Promise<void>) =>
callback(tx)

View File

@@ -1,4 +1,5 @@
import { createLogger } from '@sim/logger'
import { sanitizePathSegment } from '@/lib/core/utils/file-download'
import {
type ExportWorkflowState,
sanitizeForExport,
@@ -43,36 +44,6 @@ export interface WorkspaceExportStructure {
folders: FolderExportData[]
}
/**
* Sanitizes a string for use as a path segment in a ZIP file.
*/
export function sanitizePathSegment(name: string): string {
return name.replace(/[^a-z0-9-_]/gi, '-')
}
/**
* Downloads a file to the user's device.
*/
export function downloadFile(
content: Blob | string,
filename: string,
mimeType = 'application/json'
): void {
try {
const blob = content instanceof Blob ? content : new Blob([content], { type: mimeType })
const url = URL.createObjectURL(blob)
const a = document.createElement('a')
a.href = url
a.download = filename
document.body.appendChild(a)
a.click()
document.body.removeChild(a)
URL.revokeObjectURL(url)
} catch (error) {
logger.error('Failed to download file:', error)
}
}
/**
* Fetches a workflow's state and variables for export.
* Returns null if the workflow cannot be fetched.

Some files were not shown because too many files have changed in this diff Show More