mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 15:07:55 -05:00
v0.4.3: posthog, docs updates, search modal improvements
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
import type { ReactNode } from 'react'
|
||||
import { defineI18nUI } from 'fumadocs-ui/i18n'
|
||||
import { DocsLayout } from 'fumadocs-ui/layouts/docs'
|
||||
import { RootProvider } from 'fumadocs-ui/provider'
|
||||
import { RootProvider } from 'fumadocs-ui/provider/next'
|
||||
import { ExternalLink, GithubIcon } from 'lucide-react'
|
||||
import { Inter } from 'next/font/google'
|
||||
import Image from 'next/image'
|
||||
|
||||
@@ -15,9 +15,9 @@
|
||||
"@vercel/analytics": "1.5.0",
|
||||
"@vercel/og": "^0.6.5",
|
||||
"clsx": "^2.1.1",
|
||||
"fumadocs-core": "^15.7.5",
|
||||
"fumadocs-mdx": "^11.5.6",
|
||||
"fumadocs-ui": "^15.7.5",
|
||||
"fumadocs-core": "15.8.2",
|
||||
"fumadocs-mdx": "11.10.1",
|
||||
"fumadocs-ui": "15.8.2",
|
||||
"lucide-react": "^0.511.0",
|
||||
"next": "15.4.1",
|
||||
"next-themes": "^0.4.6",
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
# Database (Required)
|
||||
DATABASE_URL="postgresql://postgres:password@localhost:5432/postgres"
|
||||
# DATABASE_SSL=disable # Optional: SSL mode (disable, prefer, require, verify-ca, verify-full)
|
||||
# DATABASE_SSL_CA= # Optional: Base64-encoded CA certificate (required for verify-ca/verify-full)
|
||||
# To generate: cat your-ca.crt | base64 | tr -d '\n'
|
||||
|
||||
# PostgreSQL Port (Optional) - defaults to 5432 if not specified
|
||||
# POSTGRES_PORT=5432
|
||||
|
||||
@@ -237,7 +237,6 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
||||
parallels: {},
|
||||
isDeployed: true,
|
||||
deploymentStatuses: { production: 'deployed' },
|
||||
hasActiveWebhook: false,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -287,7 +286,6 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
||||
parallels: {},
|
||||
isDeployed: true,
|
||||
deploymentStatuses: { production: 'deployed' },
|
||||
hasActiveWebhook: false,
|
||||
lastSaved: 1640995200000,
|
||||
},
|
||||
},
|
||||
@@ -309,7 +307,6 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
||||
parallels: {},
|
||||
isDeployed: true,
|
||||
deploymentStatuses: { production: 'deployed' },
|
||||
hasActiveWebhook: false,
|
||||
lastSaved: 1640995200000,
|
||||
}),
|
||||
}
|
||||
@@ -445,7 +442,6 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
||||
parallels: {},
|
||||
isDeployed: false,
|
||||
deploymentStatuses: {},
|
||||
hasActiveWebhook: false,
|
||||
lastSaved: 1640995200000,
|
||||
})
|
||||
})
|
||||
@@ -722,7 +718,6 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
||||
production: 'deployed',
|
||||
staging: 'pending',
|
||||
},
|
||||
hasActiveWebhook: true,
|
||||
deployedAt: '2024-01-01T10:00:00.000Z',
|
||||
},
|
||||
}
|
||||
@@ -769,7 +764,6 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
||||
production: 'deployed',
|
||||
staging: 'pending',
|
||||
},
|
||||
hasActiveWebhook: true,
|
||||
deployedAt: '2024-01-01T10:00:00.000Z',
|
||||
lastSaved: 1640995200000,
|
||||
})
|
||||
|
||||
@@ -73,7 +73,6 @@ export async function POST(request: NextRequest) {
|
||||
parallels: checkpointState?.parallels || {},
|
||||
isDeployed: checkpointState?.isDeployed || false,
|
||||
deploymentStatuses: checkpointState?.deploymentStatuses || {},
|
||||
hasActiveWebhook: checkpointState?.hasActiveWebhook || false,
|
||||
lastSaved: Date.now(),
|
||||
// Only include deployedAt if it's a valid date string that can be converted
|
||||
...(checkpointState?.deployedAt &&
|
||||
|
||||
59
apps/sim/app/api/copilot/training/examples/route.ts
Normal file
59
apps/sim/app/api/copilot/training/examples/route.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('CopilotTrainingExamplesAPI')
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const baseUrl = env.AGENT_INDEXER_URL
|
||||
if (!baseUrl) {
|
||||
logger.error('Missing AGENT_INDEXER_URL environment variable')
|
||||
return NextResponse.json({ error: 'Missing AGENT_INDEXER_URL env' }, { status: 500 })
|
||||
}
|
||||
|
||||
const apiKey = env.AGENT_INDEXER_API_KEY
|
||||
if (!apiKey) {
|
||||
logger.error('Missing AGENT_INDEXER_API_KEY environment variable')
|
||||
return NextResponse.json({ error: 'Missing AGENT_INDEXER_API_KEY env' }, { status: 500 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
|
||||
logger.info('Sending workflow example to agent indexer', {
|
||||
hasJsonField: typeof body?.json === 'string',
|
||||
})
|
||||
|
||||
const upstream = await fetch(`${baseUrl}/examples/add`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'x-api-key': apiKey,
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (!upstream.ok) {
|
||||
const errorText = await upstream.text()
|
||||
logger.error('Agent indexer rejected the example', {
|
||||
status: upstream.status,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json({ error: errorText }, { status: upstream.status })
|
||||
}
|
||||
|
||||
const data = await upstream.json()
|
||||
logger.info('Successfully sent workflow example to agent indexer')
|
||||
|
||||
return NextResponse.json(data, {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
})
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'Failed to add example'
|
||||
logger.error('Failed to send workflow example', { error: err })
|
||||
return NextResponse.json({ error: errorMessage }, { status: 502 })
|
||||
}
|
||||
}
|
||||
@@ -97,7 +97,13 @@ export async function GET(request: NextRequest) {
|
||||
const baseQuery = db
|
||||
.select(selectColumns)
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
workflow,
|
||||
and(
|
||||
eq(workflowExecutionLogs.workflowId, workflow.id),
|
||||
eq(workflow.workspaceId, params.workspaceId)
|
||||
)
|
||||
)
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
@@ -107,8 +113,8 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
)
|
||||
|
||||
// Build conditions for the joined query
|
||||
let conditions: SQL | undefined = eq(workflow.workspaceId, params.workspaceId)
|
||||
// Build additional conditions for the query
|
||||
let conditions: SQL | undefined
|
||||
|
||||
// Filter by level
|
||||
if (params.level && params.level !== 'all') {
|
||||
@@ -180,7 +186,13 @@ export async function GET(request: NextRequest) {
|
||||
const countQuery = db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
workflow,
|
||||
and(
|
||||
eq(workflowExecutionLogs.workflowId, workflow.id),
|
||||
eq(workflow.workspaceId, params.workspaceId)
|
||||
)
|
||||
)
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
|
||||
@@ -76,6 +76,8 @@ export async function GET() {
|
||||
telemetryEnabled: userSettings.telemetryEnabled,
|
||||
emailPreferences: userSettings.emailPreferences ?? {},
|
||||
billingUsageNotificationsEnabled: userSettings.billingUsageNotificationsEnabled ?? true,
|
||||
showFloatingControls: userSettings.showFloatingControls ?? true,
|
||||
showTrainingControls: userSettings.showTrainingControls ?? false,
|
||||
},
|
||||
},
|
||||
{ status: 200 }
|
||||
|
||||
@@ -124,7 +124,13 @@ export async function GET(request: NextRequest) {
|
||||
workflowDescription: workflow.description,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
workflow,
|
||||
and(
|
||||
eq(workflowExecutionLogs.workflowId, workflow.id),
|
||||
eq(workflow.workspaceId, params.workspaceId)
|
||||
)
|
||||
)
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
|
||||
@@ -76,7 +76,6 @@ export async function POST(
|
||||
isDeployed: true,
|
||||
deployedAt: new Date(),
|
||||
deploymentStatuses: deployedState.deploymentStatuses || {},
|
||||
hasActiveWebhook: deployedState.hasActiveWebhook || false,
|
||||
})
|
||||
|
||||
if (!saveResult.success) {
|
||||
|
||||
@@ -133,7 +133,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
state: {
|
||||
// Default values for expected properties
|
||||
deploymentStatuses: {},
|
||||
hasActiveWebhook: false,
|
||||
// Data from normalized tables
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
|
||||
@@ -89,13 +89,6 @@ const ParallelSchema = z.object({
|
||||
parallelType: z.enum(['count', 'collection']).optional(),
|
||||
})
|
||||
|
||||
const DeploymentStatusSchema = z.object({
|
||||
id: z.string(),
|
||||
status: z.enum(['deploying', 'deployed', 'failed', 'stopping', 'stopped']),
|
||||
deployedAt: z.date().optional(),
|
||||
error: z.string().optional(),
|
||||
})
|
||||
|
||||
const WorkflowStateSchema = z.object({
|
||||
blocks: z.record(BlockStateSchema),
|
||||
edges: z.array(EdgeSchema),
|
||||
@@ -103,9 +96,7 @@ const WorkflowStateSchema = z.object({
|
||||
parallels: z.record(ParallelSchema).optional(),
|
||||
lastSaved: z.number().optional(),
|
||||
isDeployed: z.boolean().optional(),
|
||||
deployedAt: z.date().optional(),
|
||||
deploymentStatuses: z.record(DeploymentStatusSchema).optional(),
|
||||
hasActiveWebhook: z.boolean().optional(),
|
||||
deployedAt: z.coerce.date().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -204,8 +195,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
lastSaved: state.lastSaved || Date.now(),
|
||||
isDeployed: state.isDeployed || false,
|
||||
deployedAt: state.deployedAt,
|
||||
deploymentStatuses: state.deploymentStatuses || {},
|
||||
hasActiveWebhook: state.hasActiveWebhook || false,
|
||||
}
|
||||
|
||||
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowState as any)
|
||||
|
||||
@@ -89,7 +89,6 @@ export async function GET(request: NextRequest) {
|
||||
// Use normalized table data - construct state from normalized tables
|
||||
workflowState = {
|
||||
deploymentStatuses: {},
|
||||
hasActiveWebhook: false,
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
loops: normalizedData.loops,
|
||||
|
||||
@@ -86,7 +86,6 @@ export function DiffControls() {
|
||||
lastSaved: rawState.lastSaved || Date.now(),
|
||||
isDeployed: rawState.isDeployed || false,
|
||||
deploymentStatuses: rawState.deploymentStatuses || {},
|
||||
hasActiveWebhook: rawState.hasActiveWebhook || false,
|
||||
// Only include deployedAt if it's a valid date, never include null/undefined
|
||||
...(rawState.deployedAt && rawState.deployedAt instanceof Date
|
||||
? { deployedAt: rawState.deployedAt }
|
||||
|
||||
@@ -30,6 +30,7 @@ import { Textarea } from '@/components/ui/textarea'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/json-sanitizer'
|
||||
import { formatEditSequence } from '@/lib/workflows/training/compute-edit-sequence'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
|
||||
import { useCopilotTrainingStore } from '@/stores/copilot-training/store'
|
||||
|
||||
/**
|
||||
@@ -52,6 +53,8 @@ export function TrainingModal() {
|
||||
markDatasetSent,
|
||||
} = useCopilotTrainingStore()
|
||||
|
||||
const currentWorkflow = useCurrentWorkflow()
|
||||
|
||||
const [localPrompt, setLocalPrompt] = useState(currentPrompt)
|
||||
const [localTitle, setLocalTitle] = useState(currentTitle)
|
||||
const [copiedId, setCopiedId] = useState<string | null>(null)
|
||||
@@ -63,6 +66,11 @@ export function TrainingModal() {
|
||||
const [sendingSelected, setSendingSelected] = useState(false)
|
||||
const [sentDatasets, setSentDatasets] = useState<Set<string>>(new Set())
|
||||
const [failedDatasets, setFailedDatasets] = useState<Set<string>>(new Set())
|
||||
const [sendingLiveWorkflow, setSendingLiveWorkflow] = useState(false)
|
||||
const [liveWorkflowSent, setLiveWorkflowSent] = useState(false)
|
||||
const [liveWorkflowFailed, setLiveWorkflowFailed] = useState(false)
|
||||
const [liveWorkflowTitle, setLiveWorkflowTitle] = useState('')
|
||||
const [liveWorkflowDescription, setLiveWorkflowDescription] = useState('')
|
||||
|
||||
const handleStart = () => {
|
||||
if (localTitle.trim() && localPrompt.trim()) {
|
||||
@@ -285,6 +293,46 @@ export function TrainingModal() {
|
||||
}
|
||||
}
|
||||
|
||||
const handleSendLiveWorkflow = async () => {
|
||||
if (!liveWorkflowTitle.trim() || !liveWorkflowDescription.trim()) {
|
||||
return
|
||||
}
|
||||
|
||||
setLiveWorkflowSent(false)
|
||||
setLiveWorkflowFailed(false)
|
||||
setSendingLiveWorkflow(true)
|
||||
|
||||
try {
|
||||
const sanitizedWorkflow = sanitizeForCopilot(currentWorkflow.workflowState)
|
||||
|
||||
const response = await fetch('/api/copilot/training/examples', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
json: JSON.stringify(sanitizedWorkflow),
|
||||
source_path: liveWorkflowTitle,
|
||||
summary: liveWorkflowDescription,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to send live workflow')
|
||||
}
|
||||
|
||||
setLiveWorkflowSent(true)
|
||||
setLiveWorkflowTitle('')
|
||||
setLiveWorkflowDescription('')
|
||||
setTimeout(() => setLiveWorkflowSent(false), 5000)
|
||||
} catch (error) {
|
||||
console.error('Failed to send live workflow:', error)
|
||||
setLiveWorkflowFailed(true)
|
||||
setTimeout(() => setLiveWorkflowFailed(false), 5000)
|
||||
} finally {
|
||||
setSendingLiveWorkflow(false)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog open={showModal} onOpenChange={toggleModal}>
|
||||
<DialogContent className='max-w-3xl'>
|
||||
@@ -335,24 +383,24 @@ export function TrainingModal() {
|
||||
)}
|
||||
|
||||
<Tabs defaultValue={isTraining ? 'datasets' : 'new'} className='mt-4'>
|
||||
<TabsList className='grid w-full grid-cols-2'>
|
||||
<TabsList className='grid w-full grid-cols-3'>
|
||||
<TabsTrigger value='new' disabled={isTraining}>
|
||||
New Session
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value='datasets'>Datasets ({datasets.length})</TabsTrigger>
|
||||
<TabsTrigger value='live'>Send Live State</TabsTrigger>
|
||||
</TabsList>
|
||||
|
||||
{/* New Training Session Tab */}
|
||||
<TabsContent value='new' className='space-y-4'>
|
||||
{startSnapshot && (
|
||||
<div className='rounded-lg border bg-muted/50 p-3'>
|
||||
<p className='font-medium text-muted-foreground text-sm'>Current Workflow State</p>
|
||||
<p className='text-sm'>
|
||||
{Object.keys(startSnapshot.blocks).length} blocks, {startSnapshot.edges.length}{' '}
|
||||
edges
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
<div className='rounded-lg border bg-muted/50 p-3'>
|
||||
<p className='mb-2 font-medium text-muted-foreground text-sm'>
|
||||
Current Workflow State
|
||||
</p>
|
||||
<p className='text-sm'>
|
||||
{currentWorkflow.getBlockCount()} blocks, {currentWorkflow.getEdgeCount()} edges
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className='space-y-2'>
|
||||
<Label htmlFor='title'>Title</Label>
|
||||
@@ -628,6 +676,94 @@ export function TrainingModal() {
|
||||
</>
|
||||
)}
|
||||
</TabsContent>
|
||||
|
||||
{/* Send Live State Tab */}
|
||||
<TabsContent value='live' className='space-y-4'>
|
||||
<div className='rounded-lg border bg-muted/50 p-3'>
|
||||
<p className='mb-2 font-medium text-muted-foreground text-sm'>
|
||||
Current Workflow State
|
||||
</p>
|
||||
<p className='text-sm'>
|
||||
{currentWorkflow.getBlockCount()} blocks, {currentWorkflow.getEdgeCount()} edges
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className='space-y-2'>
|
||||
<Label htmlFor='live-title'>Title</Label>
|
||||
<Input
|
||||
id='live-title'
|
||||
placeholder='e.g., Customer Onboarding Workflow'
|
||||
value={liveWorkflowTitle}
|
||||
onChange={(e) => setLiveWorkflowTitle(e.target.value)}
|
||||
/>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
A short title identifying this workflow
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className='space-y-2'>
|
||||
<Label htmlFor='live-description'>Description</Label>
|
||||
<Textarea
|
||||
id='live-description'
|
||||
placeholder='Describe what this workflow does...'
|
||||
value={liveWorkflowDescription}
|
||||
onChange={(e) => setLiveWorkflowDescription(e.target.value)}
|
||||
rows={3}
|
||||
/>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Explain the purpose and functionality of this workflow
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
onClick={handleSendLiveWorkflow}
|
||||
disabled={
|
||||
!liveWorkflowTitle.trim() ||
|
||||
!liveWorkflowDescription.trim() ||
|
||||
sendingLiveWorkflow ||
|
||||
currentWorkflow.getBlockCount() === 0
|
||||
}
|
||||
className='w-full'
|
||||
>
|
||||
{sendingLiveWorkflow ? (
|
||||
<>
|
||||
<div className='mr-2 h-4 w-4 animate-spin rounded-full border-2 border-current border-t-transparent' />
|
||||
Sending...
|
||||
</>
|
||||
) : liveWorkflowSent ? (
|
||||
<>
|
||||
<CheckCircle2 className='mr-2 h-4 w-4' />
|
||||
Sent Successfully
|
||||
</>
|
||||
) : liveWorkflowFailed ? (
|
||||
<>
|
||||
<XCircle className='mr-2 h-4 w-4' />
|
||||
Failed - Try Again
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Send className='mr-2 h-4 w-4' />
|
||||
Send Live Workflow State
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
|
||||
{liveWorkflowSent && (
|
||||
<div className='rounded-lg border bg-green-50 p-3 dark:bg-green-950/30'>
|
||||
<p className='text-green-700 text-sm dark:text-green-300'>
|
||||
Workflow state sent successfully!
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{liveWorkflowFailed && (
|
||||
<div className='rounded-lg border bg-red-50 p-3 dark:bg-red-950/30'>
|
||||
<p className='text-red-700 text-sm dark:text-red-300'>
|
||||
Failed to send workflow state. Please try again.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
@@ -19,7 +19,6 @@ export interface CurrentWorkflow {
|
||||
deployedAt?: Date
|
||||
deploymentStatuses?: Record<string, DeploymentStatus>
|
||||
needsRedeployment?: boolean
|
||||
hasActiveWebhook?: boolean
|
||||
|
||||
// Mode information
|
||||
isDiffMode: boolean
|
||||
@@ -66,7 +65,6 @@ export function useCurrentWorkflow(): CurrentWorkflow {
|
||||
deployedAt: activeWorkflow.deployedAt,
|
||||
deploymentStatuses: activeWorkflow.deploymentStatuses,
|
||||
needsRedeployment: activeWorkflow.needsRedeployment,
|
||||
hasActiveWebhook: activeWorkflow.hasActiveWebhook,
|
||||
|
||||
// Mode information - update to reflect ready state
|
||||
isDiffMode: shouldUseDiff,
|
||||
|
||||
@@ -204,16 +204,19 @@ export async function applyAutoLayoutAndUpdateStore(
|
||||
useWorkflowStore.getState().updateLastSaved()
|
||||
|
||||
// Clean up the workflow state for API validation
|
||||
// Destructure out UI-only fields that shouldn't be persisted
|
||||
const { deploymentStatuses, needsRedeployment, dragStartPosition, ...stateToSave } =
|
||||
newWorkflowState
|
||||
|
||||
const cleanedWorkflowState = {
|
||||
...newWorkflowState,
|
||||
...stateToSave,
|
||||
// Convert null dates to undefined (since they're optional)
|
||||
deployedAt: newWorkflowState.deployedAt ? new Date(newWorkflowState.deployedAt) : undefined,
|
||||
deployedAt: stateToSave.deployedAt ? new Date(stateToSave.deployedAt) : undefined,
|
||||
// Ensure other optional fields are properly handled
|
||||
loops: newWorkflowState.loops || {},
|
||||
parallels: newWorkflowState.parallels || {},
|
||||
deploymentStatuses: newWorkflowState.deploymentStatuses || {},
|
||||
loops: stateToSave.loops || {},
|
||||
parallels: stateToSave.parallels || {},
|
||||
// Sanitize edges: remove null/empty handle fields to satisfy schema (optional strings)
|
||||
edges: (newWorkflowState.edges || []).map((edge: any) => {
|
||||
edges: (stateToSave.edges || []).map((edge: any) => {
|
||||
const { sourceHandle, targetHandle, ...rest } = edge || {}
|
||||
const sanitized: any = { ...rest }
|
||||
if (typeof sourceHandle === 'string' && sourceHandle.length > 0) {
|
||||
|
||||
@@ -19,10 +19,6 @@ import { Dialog, DialogOverlay, DialogPortal, DialogTitle } from '@/components/u
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { useBrandConfig } from '@/lib/branding/branding'
|
||||
import { cn } from '@/lib/utils'
|
||||
import {
|
||||
TemplateCard,
|
||||
TemplateCardSkeleton,
|
||||
} from '@/app/workspace/[workspaceId]/templates/components/template-card'
|
||||
import { getKeyboardShortcutText } from '@/app/workspace/[workspaceId]/w/hooks/use-keyboard-shortcuts'
|
||||
import { getAllBlocks } from '@/blocks'
|
||||
import { type NavigationSection, useSearchNavigation } from './hooks/use-search-navigation'
|
||||
@@ -30,28 +26,12 @@ import { type NavigationSection, useSearchNavigation } from './hooks/use-search-
|
||||
interface SearchModalProps {
|
||||
open: boolean
|
||||
onOpenChange: (open: boolean) => void
|
||||
templates?: TemplateData[]
|
||||
workflows?: WorkflowItem[]
|
||||
workspaces?: WorkspaceItem[]
|
||||
loading?: boolean
|
||||
knowledgeBases?: KnowledgeBaseItem[]
|
||||
isOnWorkflowPage?: boolean
|
||||
}
|
||||
|
||||
interface TemplateData {
|
||||
id: string
|
||||
title: string
|
||||
description: string
|
||||
author: string
|
||||
usageCount: string
|
||||
stars: number
|
||||
icon: string
|
||||
iconColor: string
|
||||
state?: {
|
||||
blocks?: Record<string, { type: string; name?: string }>
|
||||
}
|
||||
isStarred?: boolean
|
||||
}
|
||||
|
||||
interface WorkflowItem {
|
||||
id: string
|
||||
name: string
|
||||
@@ -93,6 +73,14 @@ interface PageItem {
|
||||
shortcut?: string
|
||||
}
|
||||
|
||||
interface KnowledgeBaseItem {
|
||||
id: string
|
||||
name: string
|
||||
description?: string
|
||||
href: string
|
||||
isCurrent?: boolean
|
||||
}
|
||||
|
||||
interface DocItem {
|
||||
id: string
|
||||
name: string
|
||||
@@ -104,10 +92,9 @@ interface DocItem {
|
||||
export function SearchModal({
|
||||
open,
|
||||
onOpenChange,
|
||||
templates = [],
|
||||
workflows = [],
|
||||
workspaces = [],
|
||||
loading = false,
|
||||
knowledgeBases = [],
|
||||
isOnWorkflowPage = false,
|
||||
}: SearchModalProps) {
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
@@ -116,14 +103,6 @@ export function SearchModal({
|
||||
const workspaceId = params.workspaceId as string
|
||||
const brand = useBrandConfig()
|
||||
|
||||
// Local state for templates to handle star changes
|
||||
const [localTemplates, setLocalTemplates] = useState<TemplateData[]>(templates)
|
||||
|
||||
// Update local templates when props change
|
||||
useEffect(() => {
|
||||
setLocalTemplates(templates)
|
||||
}, [templates])
|
||||
|
||||
// Get all available blocks - only when on workflow page
|
||||
const blocks = useMemo(() => {
|
||||
if (!isOnWorkflowPage) return []
|
||||
@@ -131,10 +110,7 @@ export function SearchModal({
|
||||
const allBlocks = getAllBlocks()
|
||||
const regularBlocks = allBlocks
|
||||
.filter(
|
||||
(block) =>
|
||||
block.type !== 'starter' &&
|
||||
!block.hideFromToolbar &&
|
||||
(block.category === 'blocks' || block.category === 'triggers')
|
||||
(block) => block.type !== 'starter' && !block.hideFromToolbar && block.category === 'blocks'
|
||||
)
|
||||
.map(
|
||||
(block): BlockItem => ({
|
||||
@@ -171,6 +147,30 @@ export function SearchModal({
|
||||
return [...regularBlocks, ...specialBlocks].sort((a, b) => a.name.localeCompare(b.name))
|
||||
}, [isOnWorkflowPage])
|
||||
|
||||
// Get all available triggers - only when on workflow page
|
||||
const triggers = useMemo(() => {
|
||||
if (!isOnWorkflowPage) return []
|
||||
|
||||
const allBlocks = getAllBlocks()
|
||||
return allBlocks
|
||||
.filter(
|
||||
(block) =>
|
||||
block.type !== 'starter' && !block.hideFromToolbar && block.category === 'triggers'
|
||||
)
|
||||
.map(
|
||||
(block): BlockItem => ({
|
||||
id: block.type,
|
||||
name: block.name,
|
||||
description: block.description || '',
|
||||
longDescription: block.longDescription,
|
||||
icon: block.icon,
|
||||
bgColor: block.bgColor || '#6B7280',
|
||||
type: block.type,
|
||||
})
|
||||
)
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
}, [isOnWorkflowPage])
|
||||
|
||||
// Get all available tools - only when on workflow page
|
||||
const tools = useMemo(() => {
|
||||
if (!isOnWorkflowPage) return []
|
||||
@@ -252,24 +252,18 @@ export function SearchModal({
|
||||
return blocks.filter((block) => block.name.toLowerCase().includes(query))
|
||||
}, [blocks, searchQuery])
|
||||
|
||||
const filteredTriggers = useMemo(() => {
|
||||
if (!searchQuery.trim()) return triggers
|
||||
const query = searchQuery.toLowerCase()
|
||||
return triggers.filter((trigger) => trigger.name.toLowerCase().includes(query))
|
||||
}, [triggers, searchQuery])
|
||||
|
||||
const filteredTools = useMemo(() => {
|
||||
if (!searchQuery.trim()) return tools
|
||||
const query = searchQuery.toLowerCase()
|
||||
return tools.filter((tool) => tool.name.toLowerCase().includes(query))
|
||||
}, [tools, searchQuery])
|
||||
|
||||
const filteredTemplates = useMemo(() => {
|
||||
if (!searchQuery.trim()) return localTemplates.slice(0, 8)
|
||||
const query = searchQuery.toLowerCase()
|
||||
return localTemplates
|
||||
.filter(
|
||||
(template) =>
|
||||
template.title.toLowerCase().includes(query) ||
|
||||
template.description.toLowerCase().includes(query)
|
||||
)
|
||||
.slice(0, 8)
|
||||
}, [localTemplates, searchQuery])
|
||||
|
||||
const filteredWorkflows = useMemo(() => {
|
||||
if (!searchQuery.trim()) return workflows
|
||||
const query = searchQuery.toLowerCase()
|
||||
@@ -282,6 +276,14 @@ export function SearchModal({
|
||||
return workspaces.filter((workspace) => workspace.name.toLowerCase().includes(query))
|
||||
}, [workspaces, searchQuery])
|
||||
|
||||
const filteredKnowledgeBases = useMemo(() => {
|
||||
if (!searchQuery.trim()) return knowledgeBases
|
||||
const query = searchQuery.toLowerCase()
|
||||
return knowledgeBases.filter(
|
||||
(kb) => kb.name.toLowerCase().includes(query) || kb.description?.toLowerCase().includes(query)
|
||||
)
|
||||
}, [knowledgeBases, searchQuery])
|
||||
|
||||
const filteredPages = useMemo(() => {
|
||||
if (!searchQuery.trim()) return pages
|
||||
const query = searchQuery.toLowerCase()
|
||||
@@ -308,6 +310,16 @@ export function SearchModal({
|
||||
})
|
||||
}
|
||||
|
||||
if (filteredTriggers.length > 0) {
|
||||
sections.push({
|
||||
id: 'triggers',
|
||||
name: 'Triggers',
|
||||
type: 'grid',
|
||||
items: filteredTriggers,
|
||||
gridCols: filteredTriggers.length, // Single row - all items in one row
|
||||
})
|
||||
}
|
||||
|
||||
if (filteredTools.length > 0) {
|
||||
sections.push({
|
||||
id: 'tools',
|
||||
@@ -318,20 +330,11 @@ export function SearchModal({
|
||||
})
|
||||
}
|
||||
|
||||
if (filteredTemplates.length > 0) {
|
||||
sections.push({
|
||||
id: 'templates',
|
||||
name: 'Templates',
|
||||
type: 'grid',
|
||||
items: filteredTemplates,
|
||||
gridCols: filteredTemplates.length, // Single row - all templates in one row
|
||||
})
|
||||
}
|
||||
|
||||
// Combine all list items into one section
|
||||
const listItems = [
|
||||
...filteredWorkspaces.map((item) => ({ type: 'workspace', data: item })),
|
||||
...filteredWorkflows.map((item) => ({ type: 'workflow', data: item })),
|
||||
...filteredKnowledgeBases.map((item) => ({ type: 'knowledgebase', data: item })),
|
||||
...filteredPages.map((item) => ({ type: 'page', data: item })),
|
||||
...filteredDocs.map((item) => ({ type: 'doc', data: item })),
|
||||
]
|
||||
@@ -348,10 +351,11 @@ export function SearchModal({
|
||||
return sections
|
||||
}, [
|
||||
filteredBlocks,
|
||||
filteredTriggers,
|
||||
filteredTools,
|
||||
filteredTemplates,
|
||||
filteredWorkspaces,
|
||||
filteredWorkflows,
|
||||
filteredKnowledgeBases,
|
||||
filteredPages,
|
||||
filteredDocs,
|
||||
])
|
||||
@@ -463,23 +467,6 @@ export function SearchModal({
|
||||
return () => window.removeEventListener('keydown', handleKeyDown)
|
||||
}, [open, handlePageClick, workspaceId])
|
||||
|
||||
// Handle template usage callback (closes modal after template is used)
|
||||
const handleTemplateUsed = useCallback(() => {
|
||||
onOpenChange(false)
|
||||
}, [onOpenChange])
|
||||
|
||||
// Handle star change callback from template card
|
||||
const handleStarChange = useCallback(
|
||||
(templateId: string, isStarred: boolean, newStarCount: number) => {
|
||||
setLocalTemplates((prevTemplates) =>
|
||||
prevTemplates.map((template) =>
|
||||
template.id === templateId ? { ...template, isStarred, stars: newStarCount } : template
|
||||
)
|
||||
)
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
// Handle item selection based on current item
|
||||
const handleItemSelection = useCallback(() => {
|
||||
const current = getCurrentItem()
|
||||
@@ -487,11 +474,8 @@ export function SearchModal({
|
||||
|
||||
const { section, item } = current
|
||||
|
||||
if (section.id === 'blocks' || section.id === 'tools') {
|
||||
if (section.id === 'blocks' || section.id === 'triggers' || section.id === 'tools') {
|
||||
handleBlockClick(item.type)
|
||||
} else if (section.id === 'templates') {
|
||||
// Templates don't have direct selection, but we close the modal
|
||||
onOpenChange(false)
|
||||
} else if (section.id === 'list') {
|
||||
switch (item.type) {
|
||||
case 'workspace':
|
||||
@@ -508,6 +492,13 @@ export function SearchModal({
|
||||
handleNavigationClick(item.data.href)
|
||||
}
|
||||
break
|
||||
case 'knowledgebase':
|
||||
if (item.data.isCurrent) {
|
||||
onOpenChange(false)
|
||||
} else {
|
||||
handleNavigationClick(item.data.href)
|
||||
}
|
||||
break
|
||||
case 'page':
|
||||
handlePageClick(item.data.href)
|
||||
break
|
||||
@@ -570,15 +561,6 @@ export function SearchModal({
|
||||
[getCurrentItem]
|
||||
)
|
||||
|
||||
// Render skeleton cards for loading state
|
||||
const renderSkeletonCards = () => {
|
||||
return Array.from({ length: 8 }).map((_, index) => (
|
||||
<div key={`skeleton-${index}`} className='w-80 flex-shrink-0'>
|
||||
<TemplateCardSkeleton />
|
||||
</div>
|
||||
))
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogPortal>
|
||||
@@ -654,6 +636,52 @@ export function SearchModal({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Triggers Section */}
|
||||
{filteredTriggers.length > 0 && (
|
||||
<div>
|
||||
<h3 className='mb-3 ml-6 font-normal font-sans text-muted-foreground text-sm leading-none tracking-normal'>
|
||||
Triggers
|
||||
</h3>
|
||||
<div
|
||||
ref={(el) => {
|
||||
if (el) scrollRefs.current.set('triggers', el)
|
||||
}}
|
||||
className='scrollbar-none flex gap-2 overflow-x-auto px-6 pb-1'
|
||||
style={{ scrollbarWidth: 'none', msOverflowStyle: 'none' }}
|
||||
>
|
||||
{filteredTriggers.map((trigger, index) => (
|
||||
<button
|
||||
key={trigger.id}
|
||||
onClick={() => handleBlockClick(trigger.type)}
|
||||
data-nav-item={`triggers-${index}`}
|
||||
className={`flex h-auto w-[180px] flex-shrink-0 cursor-pointer flex-col items-start gap-2 rounded-[8px] border p-3 transition-all duration-200 ${
|
||||
isItemSelected('triggers', index)
|
||||
? 'border-border bg-secondary/80'
|
||||
: 'border-border/40 bg-background/60 hover:border-border hover:bg-secondary/80'
|
||||
}`}
|
||||
>
|
||||
<div className='flex items-center gap-2'>
|
||||
<div
|
||||
className='flex h-5 w-5 items-center justify-center rounded-[4px]'
|
||||
style={{ backgroundColor: trigger.bgColor }}
|
||||
>
|
||||
<trigger.icon className='!h-3.5 !w-3.5 text-white' />
|
||||
</div>
|
||||
<span className='font-medium font-sans text-foreground text-sm leading-none tracking-normal'>
|
||||
{trigger.name}
|
||||
</span>
|
||||
</div>
|
||||
{(trigger.longDescription || trigger.description) && (
|
||||
<p className='line-clamp-2 text-left text-muted-foreground text-xs'>
|
||||
{trigger.longDescription || trigger.description}
|
||||
</p>
|
||||
)}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Tools Section */}
|
||||
{filteredTools.length > 0 && (
|
||||
<div>
|
||||
@@ -700,49 +728,6 @@ export function SearchModal({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Templates Section */}
|
||||
{(loading || filteredTemplates.length > 0) && (
|
||||
<div>
|
||||
<h3 className='mb-3 ml-6 font-normal font-sans text-muted-foreground text-sm leading-none tracking-normal'>
|
||||
Templates
|
||||
</h3>
|
||||
<div
|
||||
ref={(el) => {
|
||||
if (el) scrollRefs.current.set('templates', el)
|
||||
}}
|
||||
className='scrollbar-none flex gap-4 overflow-x-auto pr-6 pb-1 pl-6'
|
||||
style={{ scrollbarWidth: 'none', msOverflowStyle: 'none' }}
|
||||
>
|
||||
{loading
|
||||
? renderSkeletonCards()
|
||||
: filteredTemplates.map((template, index) => (
|
||||
<div
|
||||
key={template.id}
|
||||
data-nav-item={`templates-${index}`}
|
||||
className={`w-80 flex-shrink-0 rounded-lg transition-all duration-200 ${
|
||||
isItemSelected('templates', index) ? 'opacity-75' : 'opacity-100'
|
||||
}`}
|
||||
>
|
||||
<TemplateCard
|
||||
id={template.id}
|
||||
title={template.title}
|
||||
description={template.description}
|
||||
author={template.author}
|
||||
usageCount={template.usageCount}
|
||||
stars={template.stars}
|
||||
icon={template.icon}
|
||||
iconColor={template.iconColor}
|
||||
state={template.state}
|
||||
isStarred={template.isStarred}
|
||||
onTemplateUsed={handleTemplateUsed}
|
||||
onStarChange={handleStarChange}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* List sections (Workspaces, Workflows, Pages, Docs) */}
|
||||
{navigationSections.find((s) => s.id === 'list') && (
|
||||
<div
|
||||
@@ -826,6 +811,43 @@ export function SearchModal({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Knowledge Bases */}
|
||||
{filteredKnowledgeBases.length > 0 && (
|
||||
<div className='mb-6'>
|
||||
<h3 className='mb-3 ml-6 font-normal font-sans text-muted-foreground text-sm leading-none tracking-normal'>
|
||||
Knowledge Bases
|
||||
</h3>
|
||||
<div className='space-y-1 px-6'>
|
||||
{filteredKnowledgeBases.map((kb, kbIndex) => {
|
||||
const globalIndex =
|
||||
filteredWorkspaces.length + filteredWorkflows.length + kbIndex
|
||||
return (
|
||||
<button
|
||||
key={kb.id}
|
||||
onClick={() =>
|
||||
kb.isCurrent ? onOpenChange(false) : handleNavigationClick(kb.href)
|
||||
}
|
||||
data-nav-item={`list-${globalIndex}`}
|
||||
className={`flex h-10 w-full items-center gap-3 rounded-[8px] px-3 py-2 transition-colors focus:outline-none ${
|
||||
isItemSelected('list', globalIndex)
|
||||
? 'bg-accent text-accent-foreground'
|
||||
: 'hover:bg-accent/60 focus:bg-accent/60'
|
||||
}`}
|
||||
>
|
||||
<div className='flex h-5 w-5 items-center justify-center'>
|
||||
<LibraryBig className='h-4 w-4 text-muted-foreground' />
|
||||
</div>
|
||||
<span className='flex-1 text-left font-normal font-sans text-muted-foreground text-sm leading-none tracking-normal'>
|
||||
{kb.name}
|
||||
{kb.isCurrent && ' (current)'}
|
||||
</span>
|
||||
</button>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Pages */}
|
||||
{filteredPages.length > 0 && (
|
||||
<div className='mb-6'>
|
||||
@@ -835,7 +857,10 @@ export function SearchModal({
|
||||
<div className='space-y-1 px-6'>
|
||||
{filteredPages.map((page, pageIndex) => {
|
||||
const globalIndex =
|
||||
filteredWorkspaces.length + filteredWorkflows.length + pageIndex
|
||||
filteredWorkspaces.length +
|
||||
filteredWorkflows.length +
|
||||
filteredKnowledgeBases.length +
|
||||
pageIndex
|
||||
return (
|
||||
<button
|
||||
key={page.id}
|
||||
@@ -872,6 +897,7 @@ export function SearchModal({
|
||||
const globalIndex =
|
||||
filteredWorkspaces.length +
|
||||
filteredWorkflows.length +
|
||||
filteredKnowledgeBases.length +
|
||||
filteredPages.length +
|
||||
docIndex
|
||||
return (
|
||||
@@ -902,14 +928,14 @@ export function SearchModal({
|
||||
|
||||
{/* Empty state */}
|
||||
{searchQuery &&
|
||||
!loading &&
|
||||
filteredWorkflows.length === 0 &&
|
||||
filteredWorkspaces.length === 0 &&
|
||||
filteredKnowledgeBases.length === 0 &&
|
||||
filteredPages.length === 0 &&
|
||||
filteredDocs.length === 0 &&
|
||||
filteredBlocks.length === 0 &&
|
||||
filteredTools.length === 0 &&
|
||||
filteredTemplates.length === 0 && (
|
||||
filteredTriggers.length === 0 &&
|
||||
filteredTools.length === 0 && (
|
||||
<div className='ml-6 py-12 text-center'>
|
||||
<p className='text-muted-foreground'>No results found for "{searchQuery}"</p>
|
||||
</div>
|
||||
|
||||
@@ -18,6 +18,8 @@ import { getEnv, isTruthy } from '@/lib/env'
|
||||
import { isHosted } from '@/lib/environment'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useOrganizationStore } from '@/stores/organization'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
import { useSubscriptionStore } from '@/stores/subscription/store'
|
||||
|
||||
const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED'))
|
||||
|
||||
@@ -200,6 +202,21 @@ export function SettingsNavigation({
|
||||
{navigationItems.map((item) => (
|
||||
<div key={item.id} className='mb-1'>
|
||||
<button
|
||||
onMouseEnter={() => {
|
||||
switch (item.id) {
|
||||
case 'general':
|
||||
useGeneralStore.getState().loadSettings()
|
||||
break
|
||||
case 'subscription':
|
||||
useSubscriptionStore.getState().loadData()
|
||||
break
|
||||
case 'team':
|
||||
useOrganizationStore.getState().loadData()
|
||||
break
|
||||
default:
|
||||
break
|
||||
}
|
||||
}}
|
||||
onClick={() => onSectionChange(item.id)}
|
||||
className={cn(
|
||||
'group flex h-9 w-full cursor-pointer items-center rounded-[8px] px-2 py-2 font-medium font-sans text-sm transition-colors',
|
||||
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
getVisiblePlans,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/subscription-permissions'
|
||||
import { useOrganizationStore } from '@/stores/organization'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
import { useSubscriptionStore } from '@/stores/subscription/store'
|
||||
|
||||
const CONSTANTS = {
|
||||
@@ -531,32 +532,14 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
|
||||
}
|
||||
|
||||
function BillingUsageNotificationsToggle() {
|
||||
const [enabled, setEnabled] = useState<boolean | null>(null)
|
||||
const isLoading = useGeneralStore((s) => s.isBillingUsageNotificationsLoading)
|
||||
const enabled = useGeneralStore((s) => s.isBillingUsageNotificationsEnabled)
|
||||
const setEnabled = useGeneralStore((s) => s.setBillingUsageNotificationsEnabled)
|
||||
const loadSettings = useGeneralStore((s) => s.loadSettings)
|
||||
|
||||
useEffect(() => {
|
||||
let isMounted = true
|
||||
const load = async () => {
|
||||
const res = await fetch('/api/users/me/settings')
|
||||
const json = await res.json()
|
||||
const current = json?.data?.billingUsageNotificationsEnabled
|
||||
if (isMounted) setEnabled(current !== false)
|
||||
}
|
||||
load()
|
||||
return () => {
|
||||
isMounted = false
|
||||
}
|
||||
}, [])
|
||||
|
||||
const update = async (next: boolean) => {
|
||||
setEnabled(next)
|
||||
await fetch('/api/users/me/settings', {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ billingUsageNotificationsEnabled: next }),
|
||||
})
|
||||
}
|
||||
|
||||
if (enabled === null) return null
|
||||
void loadSettings()
|
||||
}, [loadSettings])
|
||||
|
||||
return (
|
||||
<div className='mt-4 flex items-center justify-between'>
|
||||
@@ -564,7 +547,13 @@ function BillingUsageNotificationsToggle() {
|
||||
<span className='font-medium text-sm'>Usage notifications</span>
|
||||
<span className='text-muted-foreground text-xs'>Email me when I reach 80% usage</span>
|
||||
</div>
|
||||
<Switch checked={enabled} onCheckedChange={(v: boolean) => update(v)} />
|
||||
<Switch
|
||||
checked={!!enabled}
|
||||
disabled={isLoading}
|
||||
onCheckedChange={(v: boolean) => {
|
||||
void setEnabled(v)
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -32,6 +32,7 @@ import {
|
||||
getKeyboardShortcutText,
|
||||
useGlobalShortcuts,
|
||||
} from '@/app/workspace/[workspaceId]/w/hooks/use-keyboard-shortcuts'
|
||||
import { useKnowledgeBasesList } from '@/hooks/use-knowledge'
|
||||
import { useSubscriptionStore } from '@/stores/subscription/store'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -115,6 +116,9 @@ export function Sidebar() {
|
||||
const [templates, setTemplates] = useState<TemplateData[]>([])
|
||||
const [isTemplatesLoading, setIsTemplatesLoading] = useState(false)
|
||||
|
||||
// Knowledge bases for search modal
|
||||
const { knowledgeBases } = useKnowledgeBasesList(workspaceId)
|
||||
|
||||
// Refs
|
||||
const workflowScrollAreaRef = useRef<HTMLDivElement | null>(null)
|
||||
const workspaceIdRef = useRef<string>(workspaceId)
|
||||
@@ -726,6 +730,17 @@ export function Sidebar() {
|
||||
}))
|
||||
}, [workspaces, workspaceId])
|
||||
|
||||
// Prepare knowledge bases for search modal
|
||||
const searchKnowledgeBases = useMemo(() => {
|
||||
return knowledgeBases.map((kb) => ({
|
||||
id: kb.id,
|
||||
name: kb.name,
|
||||
description: kb.description,
|
||||
href: `/workspace/${workspaceId}/knowledge/${kb.id}`,
|
||||
isCurrent: knowledgeBaseId === kb.id,
|
||||
}))
|
||||
}, [knowledgeBases, workspaceId, knowledgeBaseId])
|
||||
|
||||
// Create workflow handler
|
||||
const handleCreateWorkflow = async (folderId?: string): Promise<string> => {
|
||||
if (isCreatingWorkflow) {
|
||||
@@ -1035,10 +1050,9 @@ export function Sidebar() {
|
||||
<SearchModal
|
||||
open={showSearchModal}
|
||||
onOpenChange={setShowSearchModal}
|
||||
templates={templates}
|
||||
workflows={searchWorkflows}
|
||||
workspaces={searchWorkspaces}
|
||||
loading={isTemplatesLoading}
|
||||
knowledgeBases={searchKnowledgeBases}
|
||||
isOnWorkflowPage={isOnWorkflowPage}
|
||||
/>
|
||||
</>
|
||||
|
||||
@@ -382,7 +382,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
isDeployed: workflowState.isDeployed ?? false,
|
||||
deployedAt: workflowState.deployedAt,
|
||||
deploymentStatuses: workflowState.deploymentStatuses || {},
|
||||
hasActiveWebhook: workflowState.hasActiveWebhook ?? false,
|
||||
})
|
||||
|
||||
// Replace subblock store values for this workflow
|
||||
|
||||
@@ -479,7 +479,6 @@ export function useCollaborativeWorkflow() {
|
||||
isDeployed: workflowData.state.isDeployed || false,
|
||||
deployedAt: workflowData.state.deployedAt,
|
||||
lastSaved: workflowData.state.lastSaved || Date.now(),
|
||||
hasActiveWebhook: workflowData.state.hasActiveWebhook || false,
|
||||
deploymentStatuses: workflowData.state.deploymentStatuses || {},
|
||||
})
|
||||
|
||||
|
||||
@@ -5,7 +5,34 @@
|
||||
* It respects the user's telemetry preferences stored in localStorage.
|
||||
*
|
||||
*/
|
||||
import { env } from './lib/env'
|
||||
import posthog from 'posthog-js'
|
||||
import { env, getEnv, isTruthy } from './lib/env'
|
||||
|
||||
// Initialize PostHog only if explicitly enabled
|
||||
if (isTruthy(getEnv('NEXT_PUBLIC_POSTHOG_ENABLED')) && getEnv('NEXT_PUBLIC_POSTHOG_KEY')) {
|
||||
posthog.init(getEnv('NEXT_PUBLIC_POSTHOG_KEY')!, {
|
||||
api_host: '/ingest',
|
||||
ui_host: 'https://us.posthog.com',
|
||||
person_profiles: 'identified_only',
|
||||
capture_pageview: true,
|
||||
capture_pageleave: true,
|
||||
capture_performance: true,
|
||||
session_recording: {
|
||||
maskAllInputs: false,
|
||||
maskInputOptions: {
|
||||
password: true,
|
||||
email: false,
|
||||
},
|
||||
recordCrossOriginIframes: false,
|
||||
recordHeaders: true,
|
||||
recordBody: true,
|
||||
},
|
||||
autocapture: true,
|
||||
capture_dead_clicks: true,
|
||||
persistence: 'localStorage+cookie',
|
||||
enable_heatmaps: true,
|
||||
})
|
||||
}
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
const TELEMETRY_STATUS_KEY = 'simstudio-telemetry-status'
|
||||
|
||||
@@ -268,7 +268,6 @@ async function processWorkflowFromDb(
|
||||
logger.info('Processed sanitized workflow context', {
|
||||
workflowId,
|
||||
blocks: Object.keys(sanitizedState.blocks || {}).length,
|
||||
edges: sanitizedState.edges.length,
|
||||
})
|
||||
// Use the provided kind for the type
|
||||
return { type: kind, tag, content }
|
||||
|
||||
@@ -262,6 +262,14 @@ const ExecutionEntry = z.object({
|
||||
totalTokens: z.number().nullable(),
|
||||
blockExecutions: z.array(z.any()), // can be detailed per need
|
||||
output: z.any().optional(),
|
||||
errorMessage: z.string().optional(),
|
||||
errorBlock: z
|
||||
.object({
|
||||
blockId: z.string().optional(),
|
||||
blockName: z.string().optional(),
|
||||
blockType: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
export const ToolResultSchemas = {
|
||||
|
||||
@@ -98,7 +98,35 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
|
||||
// Prepare currentUserWorkflow JSON from stores to preserve block IDs
|
||||
let currentUserWorkflow = args?.currentUserWorkflow
|
||||
if (!currentUserWorkflow) {
|
||||
const diffStoreState = useWorkflowDiffStore.getState()
|
||||
let usedDiffWorkflow = false
|
||||
|
||||
if (!currentUserWorkflow && diffStoreState.isDiffReady && diffStoreState.diffWorkflow) {
|
||||
try {
|
||||
const diffWorkflow = diffStoreState.diffWorkflow
|
||||
const normalizedDiffWorkflow = {
|
||||
...diffWorkflow,
|
||||
blocks: diffWorkflow.blocks || {},
|
||||
edges: diffWorkflow.edges || [],
|
||||
loops: diffWorkflow.loops || {},
|
||||
parallels: diffWorkflow.parallels || {},
|
||||
}
|
||||
currentUserWorkflow = JSON.stringify(normalizedDiffWorkflow)
|
||||
usedDiffWorkflow = true
|
||||
logger.info('Using diff workflow state as base for edit_workflow operations', {
|
||||
toolCallId: this.toolCallId,
|
||||
blocksCount: Object.keys(normalizedDiffWorkflow.blocks).length,
|
||||
edgesCount: normalizedDiffWorkflow.edges.length,
|
||||
})
|
||||
} catch (e) {
|
||||
logger.warn(
|
||||
'Failed to serialize diff workflow state; falling back to active workflow',
|
||||
e as any
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (!currentUserWorkflow && !usedDiffWorkflow) {
|
||||
try {
|
||||
const workflowStore = useWorkflowStore.getState()
|
||||
const fullState = workflowStore.getWorkflowState()
|
||||
|
||||
@@ -77,13 +77,13 @@ export interface CopilotBlockMetadata {
|
||||
name: string
|
||||
description: string
|
||||
bestPractices?: string
|
||||
commonParameters: CopilotSubblockMetadata[]
|
||||
inputs?: Record<string, any>
|
||||
inputSchema: CopilotSubblockMetadata[]
|
||||
inputDefinitions?: Record<string, any>
|
||||
triggerAllowed?: boolean
|
||||
authType?: 'OAuth' | 'API Key' | 'Bot Token'
|
||||
tools: CopilotToolMetadata[]
|
||||
triggers: CopilotTriggerMetadata[]
|
||||
operationParameters: Record<string, CopilotSubblockMetadata[]>
|
||||
operationInputSchema: Record<string, CopilotSubblockMetadata[]>
|
||||
operations?: Record<
|
||||
string,
|
||||
{
|
||||
@@ -92,7 +92,7 @@ export interface CopilotBlockMetadata {
|
||||
description?: string
|
||||
inputs?: Record<string, any>
|
||||
outputs?: Record<string, any>
|
||||
parameters?: CopilotSubblockMetadata[]
|
||||
inputSchema?: CopilotSubblockMetadata[]
|
||||
}
|
||||
>
|
||||
yamlDocumentation?: string
|
||||
@@ -125,11 +125,11 @@ export const getBlocksMetadataServerTool: BaseServerTool<
|
||||
id: specialBlock.id,
|
||||
name: specialBlock.name,
|
||||
description: specialBlock.description || '',
|
||||
commonParameters: commonParameters,
|
||||
inputs: specialBlock.inputs || {},
|
||||
inputSchema: commonParameters,
|
||||
inputDefinitions: specialBlock.inputs || {},
|
||||
tools: [],
|
||||
triggers: [],
|
||||
operationParameters,
|
||||
operationInputSchema: operationParameters,
|
||||
}
|
||||
;(metadata as any).subBlocks = undefined
|
||||
} else {
|
||||
@@ -192,7 +192,7 @@ export const getBlocksMetadataServerTool: BaseServerTool<
|
||||
description: toolCfg?.description || undefined,
|
||||
inputs: { ...filteredToolParams, ...(operationInputs[opId] || {}) },
|
||||
outputs: toolOutputs,
|
||||
parameters: operationParameters[opId] || [],
|
||||
inputSchema: operationParameters[opId] || [],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,13 +201,13 @@ export const getBlocksMetadataServerTool: BaseServerTool<
|
||||
name: blockConfig.name || blockId,
|
||||
description: blockConfig.longDescription || blockConfig.description || '',
|
||||
bestPractices: blockConfig.bestPractices,
|
||||
commonParameters: commonParameters,
|
||||
inputs: blockInputs,
|
||||
inputSchema: commonParameters,
|
||||
inputDefinitions: blockInputs,
|
||||
triggerAllowed: !!blockConfig.triggerAllowed,
|
||||
authType: resolveAuthType(blockConfig.authMode),
|
||||
tools,
|
||||
triggers,
|
||||
operationParameters,
|
||||
operationInputSchema: operationParameters,
|
||||
operations,
|
||||
}
|
||||
}
|
||||
@@ -420,7 +420,7 @@ function splitParametersByOperation(
|
||||
operationParameters[key].push(processed)
|
||||
}
|
||||
} else {
|
||||
// Override description from blockInputs if available (by id or canonicalParamId)
|
||||
// Override description from inputDefinitions if available (by id or canonicalParamId)
|
||||
if (blockInputsForDescriptions) {
|
||||
const candidates = [sb.id, sb.canonicalParamId].filter(Boolean)
|
||||
for (const key of candidates) {
|
||||
|
||||
@@ -11,7 +11,7 @@ import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
interface EditWorkflowOperation {
|
||||
operation_type: 'add' | 'edit' | 'delete'
|
||||
operation_type: 'add' | 'edit' | 'delete' | 'insert_into_subflow' | 'extract_from_subflow'
|
||||
block_id: string
|
||||
params?: Record<string, any>
|
||||
}
|
||||
@@ -22,6 +22,78 @@ interface EditWorkflowParams {
|
||||
currentUserWorkflow?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a block state from operation params
|
||||
*/
|
||||
function createBlockFromParams(blockId: string, params: any, parentId?: string): any {
|
||||
const blockConfig = getAllBlocks().find((b) => b.type === params.type)
|
||||
|
||||
const blockState: any = {
|
||||
id: blockId,
|
||||
type: params.type,
|
||||
name: params.name,
|
||||
position: { x: 0, y: 0 },
|
||||
enabled: params.enabled !== undefined ? params.enabled : true,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
advancedMode: params.advancedMode || false,
|
||||
height: 0,
|
||||
triggerMode: params.triggerMode || false,
|
||||
subBlocks: {},
|
||||
outputs: params.outputs || (blockConfig ? resolveOutputType(blockConfig.outputs) : {}),
|
||||
data: parentId ? { parentId, extent: 'parent' as const } : {},
|
||||
}
|
||||
|
||||
// Add inputs as subBlocks
|
||||
if (params.inputs) {
|
||||
Object.entries(params.inputs).forEach(([key, value]) => {
|
||||
blockState.subBlocks[key] = {
|
||||
id: key,
|
||||
type: 'short-input',
|
||||
value: value,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Set up subBlocks from block configuration
|
||||
if (blockConfig) {
|
||||
blockConfig.subBlocks.forEach((subBlock) => {
|
||||
if (!blockState.subBlocks[subBlock.id]) {
|
||||
blockState.subBlocks[subBlock.id] = {
|
||||
id: subBlock.id,
|
||||
type: subBlock.type,
|
||||
value: null,
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return blockState
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to add connections as edges for a block
|
||||
*/
|
||||
function addConnectionsAsEdges(
|
||||
modifiedState: any,
|
||||
blockId: string,
|
||||
connections: Record<string, any>
|
||||
): void {
|
||||
Object.entries(connections).forEach(([sourceHandle, targets]) => {
|
||||
const targetArray = Array.isArray(targets) ? targets : [targets]
|
||||
targetArray.forEach((targetId: string) => {
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: blockId,
|
||||
sourceHandle,
|
||||
target: targetId,
|
||||
targetHandle: 'target',
|
||||
type: 'default',
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply operations directly to the workflow JSON state
|
||||
*/
|
||||
@@ -43,11 +115,19 @@ function applyOperationsToWorkflowState(
|
||||
})),
|
||||
})
|
||||
|
||||
// Reorder operations: delete -> add -> edit to ensure consistent application semantics
|
||||
// Reorder operations: delete -> extract -> add -> insert -> edit
|
||||
const deletes = operations.filter((op) => op.operation_type === 'delete')
|
||||
const extracts = operations.filter((op) => op.operation_type === 'extract_from_subflow')
|
||||
const adds = operations.filter((op) => op.operation_type === 'add')
|
||||
const inserts = operations.filter((op) => op.operation_type === 'insert_into_subflow')
|
||||
const edits = operations.filter((op) => op.operation_type === 'edit')
|
||||
const orderedOperations: EditWorkflowOperation[] = [...deletes, ...adds, ...edits]
|
||||
const orderedOperations: EditWorkflowOperation[] = [
|
||||
...deletes,
|
||||
...extracts,
|
||||
...adds,
|
||||
...inserts,
|
||||
...edits,
|
||||
]
|
||||
|
||||
for (const operation of orderedOperations) {
|
||||
const { operation_type, block_id, params } = operation
|
||||
@@ -105,6 +185,23 @@ function applyOperationsToWorkflowState(
|
||||
block.subBlocks[key].value = value
|
||||
}
|
||||
})
|
||||
|
||||
// Update loop/parallel configuration in block.data
|
||||
if (block.type === 'loop') {
|
||||
block.data = block.data || {}
|
||||
if (params.inputs.loopType !== undefined) block.data.loopType = params.inputs.loopType
|
||||
if (params.inputs.iterations !== undefined)
|
||||
block.data.count = params.inputs.iterations
|
||||
if (params.inputs.collection !== undefined)
|
||||
block.data.collection = params.inputs.collection
|
||||
} else if (block.type === 'parallel') {
|
||||
block.data = block.data || {}
|
||||
if (params.inputs.parallelType !== undefined)
|
||||
block.data.parallelType = params.inputs.parallelType
|
||||
if (params.inputs.count !== undefined) block.data.count = params.inputs.count
|
||||
if (params.inputs.collection !== undefined)
|
||||
block.data.collection = params.inputs.collection
|
||||
}
|
||||
}
|
||||
|
||||
// Update basic properties
|
||||
@@ -123,6 +220,50 @@ function applyOperationsToWorkflowState(
|
||||
}
|
||||
}
|
||||
|
||||
// Handle advanced mode toggle
|
||||
if (typeof params?.advancedMode === 'boolean') {
|
||||
block.advancedMode = params.advancedMode
|
||||
}
|
||||
|
||||
// Handle nested nodes update (for loops/parallels)
|
||||
if (params?.nestedNodes) {
|
||||
// Remove all existing child blocks
|
||||
const existingChildren = Object.keys(modifiedState.blocks).filter(
|
||||
(id) => modifiedState.blocks[id].data?.parentId === block_id
|
||||
)
|
||||
existingChildren.forEach((childId) => delete modifiedState.blocks[childId])
|
||||
|
||||
// Remove edges to/from removed children
|
||||
modifiedState.edges = modifiedState.edges.filter(
|
||||
(edge: any) =>
|
||||
!existingChildren.includes(edge.source) && !existingChildren.includes(edge.target)
|
||||
)
|
||||
|
||||
// Add new nested blocks
|
||||
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
|
||||
const childBlockState = createBlockFromParams(childId, childBlock, block_id)
|
||||
modifiedState.blocks[childId] = childBlockState
|
||||
|
||||
// Add connections for child block
|
||||
if (childBlock.connections) {
|
||||
addConnectionsAsEdges(modifiedState, childId, childBlock.connections)
|
||||
}
|
||||
})
|
||||
|
||||
// Update loop/parallel configuration based on type
|
||||
if (block.type === 'loop') {
|
||||
block.data = block.data || {}
|
||||
if (params.inputs?.loopType) block.data.loopType = params.inputs.loopType
|
||||
if (params.inputs?.iterations) block.data.count = params.inputs.iterations
|
||||
if (params.inputs?.collection) block.data.collection = params.inputs.collection
|
||||
} else if (block.type === 'parallel') {
|
||||
block.data = block.data || {}
|
||||
if (params.inputs?.parallelType) block.data.parallelType = params.inputs.parallelType
|
||||
if (params.inputs?.count) block.data.count = params.inputs.count
|
||||
if (params.inputs?.collection) block.data.collection = params.inputs.collection
|
||||
}
|
||||
}
|
||||
|
||||
// Handle connections update (convert to edges)
|
||||
if (params?.connections) {
|
||||
// Remove existing edges from this block
|
||||
@@ -191,82 +332,135 @@ function applyOperationsToWorkflowState(
|
||||
|
||||
case 'add': {
|
||||
if (params?.type && params?.name) {
|
||||
// Get block configuration
|
||||
const blockConfig = getAllBlocks().find((block) => block.type === params.type)
|
||||
|
||||
// Create new block with proper structure
|
||||
const newBlock: any = {
|
||||
id: block_id,
|
||||
type: params.type,
|
||||
name: params.name,
|
||||
position: { x: 0, y: 0 }, // Default position
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
advancedMode: false,
|
||||
height: 0,
|
||||
triggerMode: false,
|
||||
subBlocks: {},
|
||||
outputs: blockConfig ? resolveOutputType(blockConfig.outputs) : {},
|
||||
data: {},
|
||||
}
|
||||
const newBlock = createBlockFromParams(block_id, params)
|
||||
|
||||
// Add inputs as subBlocks
|
||||
if (params.inputs) {
|
||||
Object.entries(params.inputs).forEach(([key, value]) => {
|
||||
newBlock.subBlocks[key] = {
|
||||
id: key,
|
||||
type: 'short-input',
|
||||
value: value,
|
||||
// Handle nested nodes (for loops/parallels created from scratch)
|
||||
if (params.nestedNodes) {
|
||||
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
|
||||
const childBlockState = createBlockFromParams(childId, childBlock, block_id)
|
||||
modifiedState.blocks[childId] = childBlockState
|
||||
|
||||
if (childBlock.connections) {
|
||||
addConnectionsAsEdges(modifiedState, childId, childBlock.connections)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Set up subBlocks from block configuration
|
||||
if (blockConfig) {
|
||||
blockConfig.subBlocks.forEach((subBlock) => {
|
||||
if (!newBlock.subBlocks[subBlock.id]) {
|
||||
newBlock.subBlocks[subBlock.id] = {
|
||||
id: subBlock.id,
|
||||
type: subBlock.type,
|
||||
value: null,
|
||||
}
|
||||
// Set loop/parallel data on parent block
|
||||
if (params.type === 'loop') {
|
||||
newBlock.data = {
|
||||
...newBlock.data,
|
||||
loopType: params.inputs?.loopType || 'for',
|
||||
...(params.inputs?.collection && { collection: params.inputs.collection }),
|
||||
...(params.inputs?.iterations && { count: params.inputs.iterations }),
|
||||
}
|
||||
})
|
||||
} else if (params.type === 'parallel') {
|
||||
newBlock.data = {
|
||||
...newBlock.data,
|
||||
parallelType: params.inputs?.parallelType || 'count',
|
||||
...(params.inputs?.collection && { collection: params.inputs.collection }),
|
||||
...(params.inputs?.count && { count: params.inputs.count }),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
modifiedState.blocks[block_id] = newBlock
|
||||
|
||||
// Add connections as edges
|
||||
if (params.connections) {
|
||||
Object.entries(params.connections).forEach(([sourceHandle, targets]) => {
|
||||
const addEdge = (targetBlock: string, targetHandle?: string) => {
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: block_id,
|
||||
sourceHandle: sourceHandle,
|
||||
target: targetBlock,
|
||||
targetHandle: targetHandle || 'target',
|
||||
type: 'default',
|
||||
})
|
||||
}
|
||||
addConnectionsAsEdges(modifiedState, block_id, params.connections)
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if (typeof targets === 'string') {
|
||||
addEdge(targets)
|
||||
} else if (Array.isArray(targets)) {
|
||||
targets.forEach((target: any) => {
|
||||
if (typeof target === 'string') {
|
||||
addEdge(target)
|
||||
} else if (target?.block) {
|
||||
addEdge(target.block, target.handle)
|
||||
}
|
||||
})
|
||||
} else if (typeof targets === 'object' && (targets as any)?.block) {
|
||||
addEdge((targets as any).block, (targets as any).handle)
|
||||
case 'insert_into_subflow': {
|
||||
const subflowId = params?.subflowId
|
||||
if (!subflowId || !params?.type || !params?.name) {
|
||||
logger.warn('Missing required params for insert_into_subflow', { block_id, params })
|
||||
break
|
||||
}
|
||||
|
||||
const subflowBlock = modifiedState.blocks[subflowId]
|
||||
if (!subflowBlock || (subflowBlock.type !== 'loop' && subflowBlock.type !== 'parallel')) {
|
||||
logger.warn('Subflow block not found or invalid type', {
|
||||
subflowId,
|
||||
type: subflowBlock?.type,
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
// Get block configuration
|
||||
const blockConfig = getAllBlocks().find((block) => block.type === params.type)
|
||||
|
||||
// Check if block already exists (moving into subflow) or is new
|
||||
const existingBlock = modifiedState.blocks[block_id]
|
||||
|
||||
if (existingBlock) {
|
||||
// Moving existing block into subflow - just update parent
|
||||
existingBlock.data = {
|
||||
...existingBlock.data,
|
||||
parentId: subflowId,
|
||||
extent: 'parent' as const,
|
||||
}
|
||||
|
||||
// Update inputs if provided
|
||||
if (params.inputs) {
|
||||
Object.entries(params.inputs).forEach(([key, value]) => {
|
||||
if (!existingBlock.subBlocks[key]) {
|
||||
existingBlock.subBlocks[key] = { id: key, type: 'short-input', value }
|
||||
} else {
|
||||
existingBlock.subBlocks[key].value = value
|
||||
}
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// Create new block as child of subflow
|
||||
const newBlock = createBlockFromParams(block_id, params, subflowId)
|
||||
modifiedState.blocks[block_id] = newBlock
|
||||
}
|
||||
|
||||
// Add/update connections as edges
|
||||
if (params.connections) {
|
||||
// Remove existing edges from this block
|
||||
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id)
|
||||
|
||||
// Add new connections
|
||||
addConnectionsAsEdges(modifiedState, block_id, params.connections)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'extract_from_subflow': {
|
||||
const subflowId = params?.subflowId
|
||||
if (!subflowId) {
|
||||
logger.warn('Missing subflowId for extract_from_subflow', { block_id })
|
||||
break
|
||||
}
|
||||
|
||||
const block = modifiedState.blocks[block_id]
|
||||
if (!block) {
|
||||
logger.warn('Block not found for extraction', { block_id })
|
||||
break
|
||||
}
|
||||
|
||||
// Verify it's actually a child of this subflow
|
||||
if (block.data?.parentId !== subflowId) {
|
||||
logger.warn('Block is not a child of specified subflow', {
|
||||
block_id,
|
||||
actualParent: block.data?.parentId,
|
||||
specifiedParent: subflowId,
|
||||
})
|
||||
}
|
||||
|
||||
// Remove parent relationship
|
||||
if (block.data) {
|
||||
block.data.parentId = undefined
|
||||
block.data.extent = undefined
|
||||
}
|
||||
|
||||
// Note: We keep the block and its edges, just remove parent relationship
|
||||
// The block becomes a root-level block
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,6 +43,12 @@ interface ExecutionEntry {
|
||||
totalTokens: number | null
|
||||
blockExecutions: BlockExecution[]
|
||||
output?: any
|
||||
errorMessage?: string
|
||||
errorBlock?: {
|
||||
blockId?: string
|
||||
blockName?: string
|
||||
blockType?: string
|
||||
}
|
||||
}
|
||||
|
||||
function extractBlockExecutionsFromTraceSpans(traceSpans: any[]): BlockExecution[] {
|
||||
@@ -74,6 +80,140 @@ function extractBlockExecutionsFromTraceSpans(traceSpans: any[]): BlockExecution
|
||||
return blockExecutions
|
||||
}
|
||||
|
||||
function normalizeErrorMessage(errorValue: unknown): string | undefined {
|
||||
if (!errorValue) return undefined
|
||||
if (typeof errorValue === 'string') return errorValue
|
||||
if (errorValue instanceof Error) return errorValue.message
|
||||
if (typeof errorValue === 'object') {
|
||||
try {
|
||||
return JSON.stringify(errorValue)
|
||||
} catch {}
|
||||
}
|
||||
try {
|
||||
return String(errorValue)
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
function extractErrorFromExecutionData(executionData: any): ExecutionEntry['errorBlock'] & {
|
||||
message?: string
|
||||
} {
|
||||
if (!executionData) return {}
|
||||
|
||||
const errorDetails = executionData.errorDetails
|
||||
if (errorDetails) {
|
||||
const message = normalizeErrorMessage(errorDetails.error || errorDetails.message)
|
||||
if (message) {
|
||||
return {
|
||||
message,
|
||||
blockId: errorDetails.blockId,
|
||||
blockName: errorDetails.blockName,
|
||||
blockType: errorDetails.blockType,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const finalOutputError = normalizeErrorMessage(executionData.finalOutput?.error)
|
||||
if (finalOutputError) {
|
||||
return {
|
||||
message: finalOutputError,
|
||||
blockName: 'Workflow',
|
||||
}
|
||||
}
|
||||
|
||||
const genericError = normalizeErrorMessage(executionData.error)
|
||||
if (genericError) {
|
||||
return {
|
||||
message: genericError,
|
||||
blockName: 'Workflow',
|
||||
}
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
||||
|
||||
function extractErrorFromTraceSpans(traceSpans: any[]): ExecutionEntry['errorBlock'] & {
|
||||
message?: string
|
||||
} {
|
||||
if (!Array.isArray(traceSpans) || traceSpans.length === 0) return {}
|
||||
|
||||
const queue = [...traceSpans]
|
||||
while (queue.length > 0) {
|
||||
const span = queue.shift()
|
||||
if (!span || typeof span !== 'object') continue
|
||||
|
||||
const message =
|
||||
normalizeErrorMessage(span.output?.error) ||
|
||||
normalizeErrorMessage(span.error) ||
|
||||
normalizeErrorMessage(span.output?.message) ||
|
||||
normalizeErrorMessage(span.message)
|
||||
|
||||
const status = span.status
|
||||
if (status === 'error' || message) {
|
||||
return {
|
||||
message,
|
||||
blockId: span.blockId,
|
||||
blockName: span.blockName || span.name || (span.blockId ? undefined : 'Workflow'),
|
||||
blockType: span.blockType || span.type,
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(span.children)) {
|
||||
queue.push(...span.children)
|
||||
}
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
||||
|
||||
function deriveExecutionErrorSummary(params: {
|
||||
blockExecutions: BlockExecution[]
|
||||
traceSpans: any[]
|
||||
executionData: any
|
||||
}): { message?: string; block?: ExecutionEntry['errorBlock'] } {
|
||||
const { blockExecutions, traceSpans, executionData } = params
|
||||
|
||||
const blockError = blockExecutions.find((block) => block.status === 'error' && block.errorMessage)
|
||||
if (blockError) {
|
||||
return {
|
||||
message: blockError.errorMessage,
|
||||
block: {
|
||||
blockId: blockError.blockId,
|
||||
blockName: blockError.blockName,
|
||||
blockType: blockError.blockType,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const executionDataError = extractErrorFromExecutionData(executionData)
|
||||
if (executionDataError.message) {
|
||||
return {
|
||||
message: executionDataError.message,
|
||||
block: {
|
||||
blockId: executionDataError.blockId,
|
||||
blockName:
|
||||
executionDataError.blockName || (executionDataError.blockId ? undefined : 'Workflow'),
|
||||
blockType: executionDataError.blockType,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const traceError = extractErrorFromTraceSpans(traceSpans)
|
||||
if (traceError.message) {
|
||||
return {
|
||||
message: traceError.message,
|
||||
block: {
|
||||
blockId: traceError.blockId,
|
||||
blockName: traceError.blockName || (traceError.blockId ? undefined : 'Workflow'),
|
||||
blockType: traceError.blockType,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
||||
|
||||
export const getWorkflowConsoleServerTool: BaseServerTool<GetWorkflowConsoleArgs, any> = {
|
||||
name: 'get_workflow_console',
|
||||
async execute(rawArgs: GetWorkflowConsoleArgs): Promise<any> {
|
||||
@@ -108,7 +248,8 @@ export const getWorkflowConsoleServerTool: BaseServerTool<GetWorkflowConsoleArgs
|
||||
.limit(limit)
|
||||
|
||||
const formattedEntries: ExecutionEntry[] = executionLogs.map((log) => {
|
||||
const traceSpans = (log.executionData as any)?.traceSpans || []
|
||||
const executionData = log.executionData as any
|
||||
const traceSpans = executionData?.traceSpans || []
|
||||
const blockExecutions = includeDetails ? extractBlockExecutionsFromTraceSpans(traceSpans) : []
|
||||
|
||||
let finalOutput: any
|
||||
@@ -125,6 +266,12 @@ export const getWorkflowConsoleServerTool: BaseServerTool<GetWorkflowConsoleArgs
|
||||
if (outputBlock) finalOutput = outputBlock.outputData
|
||||
}
|
||||
|
||||
const { message: errorMessage, block: errorBlock } = deriveExecutionErrorSummary({
|
||||
blockExecutions,
|
||||
traceSpans,
|
||||
executionData,
|
||||
})
|
||||
|
||||
return {
|
||||
id: log.id,
|
||||
executionId: log.executionId,
|
||||
@@ -137,6 +284,8 @@ export const getWorkflowConsoleServerTool: BaseServerTool<GetWorkflowConsoleArgs
|
||||
totalTokens: (log.cost as any)?.tokens?.total ?? null,
|
||||
blockExecutions,
|
||||
output: finalOutput,
|
||||
errorMessage: errorMessage,
|
||||
errorBlock: errorBlock,
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -17,6 +17,8 @@ export const env = createEnv({
|
||||
server: {
|
||||
// Core Database & Authentication
|
||||
DATABASE_URL: z.string().url(), // Primary database connection string
|
||||
DATABASE_SSL: z.enum(['disable', 'prefer', 'require', 'verify-ca', 'verify-full']).optional(), // PostgreSQL SSL mode
|
||||
DATABASE_SSL_CA: z.string().optional(), // Base64-encoded CA certificate for SSL verification
|
||||
BETTER_AUTH_URL: z.string().url(), // Base URL for Better Auth service
|
||||
BETTER_AUTH_SECRET: z.string().min(32), // Secret key for Better Auth JWT signing
|
||||
DISABLE_REGISTRATION: z.boolean().optional(), // Flag to disable new user registration
|
||||
@@ -90,6 +92,7 @@ export const env = createEnv({
|
||||
TELEMETRY_ENDPOINT: z.string().url().optional(), // Custom telemetry/analytics endpoint
|
||||
COST_MULTIPLIER: z.number().optional(), // Multiplier for cost calculations
|
||||
LOG_LEVEL: z.enum(['DEBUG', 'INFO', 'WARN', 'ERROR']).optional(), // Minimum log level to display (defaults to ERROR in production, DEBUG in development)
|
||||
POSTHOG_ENABLED: z.boolean().optional(), // Enable PostHog analytics and session recording
|
||||
|
||||
// External Services
|
||||
BROWSERBASE_API_KEY: z.string().min(1).optional(), // Browserbase API key for browser automation
|
||||
@@ -258,6 +261,8 @@ export const env = createEnv({
|
||||
// Analytics & Tracking
|
||||
NEXT_PUBLIC_GOOGLE_API_KEY: z.string().optional(), // Google API key for client-side API calls
|
||||
NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER: z.string().optional(), // Google project number for Drive picker
|
||||
NEXT_PUBLIC_POSTHOG_ENABLED: z.boolean().optional(), // Enable PostHog analytics (client-side)
|
||||
NEXT_PUBLIC_POSTHOG_KEY: z.string().optional(), // PostHog project API key
|
||||
|
||||
// UI Branding & Whitelabeling
|
||||
NEXT_PUBLIC_BRAND_NAME: z.string().optional(), // Custom brand name (defaults to "Sim")
|
||||
@@ -317,6 +322,8 @@ export const env = createEnv({
|
||||
NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED: process.env.NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED,
|
||||
NEXT_PUBLIC_E2B_ENABLED: process.env.NEXT_PUBLIC_E2B_ENABLED,
|
||||
NEXT_PUBLIC_COPILOT_TRAINING_ENABLED: process.env.NEXT_PUBLIC_COPILOT_TRAINING_ENABLED,
|
||||
NEXT_PUBLIC_POSTHOG_ENABLED: process.env.NEXT_PUBLIC_POSTHOG_ENABLED,
|
||||
NEXT_PUBLIC_POSTHOG_KEY: process.env.NEXT_PUBLIC_POSTHOG_KEY,
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
NEXT_TELEMETRY_DISABLED: process.env.NEXT_TELEMETRY_DISABLED,
|
||||
},
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import type React from 'react'
|
||||
import { createContext, useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import posthog from 'posthog-js'
|
||||
import { client } from '@/lib/auth-client'
|
||||
|
||||
export type AppSession = {
|
||||
@@ -52,6 +53,25 @@ export function SessionProvider({ children }: { children: React.ReactNode }) {
|
||||
loadSession()
|
||||
}, [loadSession])
|
||||
|
||||
useEffect(() => {
|
||||
if (isPending || typeof posthog.identify !== 'function') {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
if (data?.user) {
|
||||
posthog.identify(data.user.id, {
|
||||
email: data.user.email,
|
||||
name: data.user.name,
|
||||
email_verified: data.user.emailVerified,
|
||||
created_at: data.user.createdAt,
|
||||
})
|
||||
} else {
|
||||
posthog.reset()
|
||||
}
|
||||
} catch {}
|
||||
}, [data, isPending])
|
||||
|
||||
const value = useMemo<SessionHookResult>(
|
||||
() => ({ data, isPending, error, refetch: loadSession }),
|
||||
[data, isPending, error, loadSession]
|
||||
|
||||
@@ -214,7 +214,6 @@ const mockWorkflowState: WorkflowState = {
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
deploymentStatuses: {},
|
||||
hasActiveWebhook: false,
|
||||
}
|
||||
|
||||
describe('Database Helpers', () => {
|
||||
@@ -452,11 +451,6 @@ describe('Database Helpers', () => {
|
||||
)
|
||||
|
||||
expect(result.success).toBe(true)
|
||||
expect(result.jsonBlob).toBeDefined()
|
||||
expect(result.jsonBlob.blocks).toEqual(mockWorkflowState.blocks)
|
||||
expect(result.jsonBlob.edges).toEqual(mockWorkflowState.edges)
|
||||
expect(result.jsonBlob.loops).toEqual(mockWorkflowState.loops)
|
||||
expect(result.jsonBlob.parallels).toEqual(mockWorkflowState.parallels)
|
||||
|
||||
// Verify transaction was called
|
||||
expect(mockTransaction).toHaveBeenCalledTimes(1)
|
||||
@@ -471,7 +465,6 @@ describe('Database Helpers', () => {
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
deploymentStatuses: {},
|
||||
hasActiveWebhook: false,
|
||||
}
|
||||
|
||||
const mockTransaction = vi.fn().mockImplementation(async (callback) => {
|
||||
@@ -494,10 +487,6 @@ describe('Database Helpers', () => {
|
||||
)
|
||||
|
||||
expect(result.success).toBe(true)
|
||||
expect(result.jsonBlob.blocks).toEqual({})
|
||||
expect(result.jsonBlob.edges).toEqual([])
|
||||
expect(result.jsonBlob.loops).toEqual({})
|
||||
expect(result.jsonBlob.parallels).toEqual({})
|
||||
})
|
||||
|
||||
it('should return error when transaction fails', async () => {
|
||||
@@ -650,7 +639,6 @@ describe('Database Helpers', () => {
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
deploymentStatuses: {},
|
||||
hasActiveWebhook: false,
|
||||
}
|
||||
|
||||
it('should successfully migrate workflow from JSON to normalized tables', async () => {
|
||||
@@ -737,7 +725,6 @@ describe('Database Helpers', () => {
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
deploymentStatuses: {},
|
||||
hasActiveWebhook: false,
|
||||
}
|
||||
|
||||
// Create 1000 blocks
|
||||
@@ -782,8 +769,6 @@ describe('Database Helpers', () => {
|
||||
)
|
||||
|
||||
expect(result.success).toBe(true)
|
||||
expect(Object.keys(result.jsonBlob.blocks)).toHaveLength(1000)
|
||||
expect(result.jsonBlob.edges).toHaveLength(999)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1020,7 +1005,6 @@ describe('Database Helpers', () => {
|
||||
loops: {},
|
||||
parallels: {},
|
||||
deploymentStatuses: {},
|
||||
hasActiveWebhook: false,
|
||||
}
|
||||
|
||||
// Mock the transaction for save operation
|
||||
@@ -1058,10 +1042,6 @@ describe('Database Helpers', () => {
|
||||
)
|
||||
expect(saveResult.success).toBe(true)
|
||||
|
||||
// Step 6: Verify the JSON blob also preserves advancedMode
|
||||
expect(saveResult.jsonBlob?.blocks['agent-original'].advancedMode).toBe(true)
|
||||
expect(saveResult.jsonBlob?.blocks['agent-duplicate'].advancedMode).toBe(true)
|
||||
|
||||
// Verify the database insert was called with the correct values
|
||||
expect(mockTransaction).toHaveBeenCalled()
|
||||
})
|
||||
@@ -1161,7 +1141,6 @@ describe('Database Helpers', () => {
|
||||
loops: {},
|
||||
parallels: {},
|
||||
deploymentStatuses: {},
|
||||
hasActiveWebhook: false,
|
||||
}
|
||||
|
||||
// Mock successful save
|
||||
|
||||
@@ -150,12 +150,7 @@ export async function loadWorkflowFromNormalizedTables(
|
||||
})
|
||||
|
||||
// Sanitize any invalid custom tools in agent blocks to prevent client crashes
|
||||
const { blocks: sanitizedBlocks, warnings } = sanitizeAgentToolsInBlocks(blocksMap)
|
||||
if (warnings.length > 0) {
|
||||
logger.warn(`Sanitized workflow ${workflowId} tools with ${warnings.length} warning(s)`, {
|
||||
warnings,
|
||||
})
|
||||
}
|
||||
const { blocks: sanitizedBlocks } = sanitizeAgentToolsInBlocks(blocksMap)
|
||||
|
||||
// Convert edges to the expected format
|
||||
const edgesArray: Edge[] = edges.map((edge) => ({
|
||||
@@ -221,12 +216,11 @@ export async function loadWorkflowFromNormalizedTables(
|
||||
|
||||
/**
|
||||
* Save workflow state to normalized tables
|
||||
* Also returns the JSON blob for backward compatibility
|
||||
*/
|
||||
export async function saveWorkflowToNormalizedTables(
|
||||
workflowId: string,
|
||||
state: WorkflowState
|
||||
): Promise<{ success: boolean; jsonBlob?: any; error?: string }> {
|
||||
): Promise<{ success: boolean; error?: string }> {
|
||||
try {
|
||||
// Start a transaction
|
||||
await db.transaction(async (tx) => {
|
||||
@@ -302,27 +296,9 @@ export async function saveWorkflowToNormalizedTables(
|
||||
if (subflowInserts.length > 0) {
|
||||
await tx.insert(workflowSubflows).values(subflowInserts)
|
||||
}
|
||||
|
||||
return { success: true }
|
||||
})
|
||||
|
||||
// Create JSON blob for backward compatibility
|
||||
const jsonBlob = {
|
||||
blocks: state.blocks,
|
||||
edges: state.edges,
|
||||
loops: state.loops || {},
|
||||
parallels: state.parallels || {},
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: state.isDeployed,
|
||||
deployedAt: state.deployedAt,
|
||||
deploymentStatuses: state.deploymentStatuses,
|
||||
hasActiveWebhook: state.hasActiveWebhook,
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
jsonBlob,
|
||||
}
|
||||
return { success: true }
|
||||
} catch (error) {
|
||||
logger.error(`Error saving workflow ${workflowId} to normalized tables:`, error)
|
||||
return {
|
||||
@@ -359,6 +335,7 @@ export async function migrateWorkflowToNormalizedTables(
|
||||
): Promise<{ success: boolean; error?: string }> {
|
||||
try {
|
||||
// Convert JSON state to WorkflowState format
|
||||
// Only include fields that are actually persisted to normalized tables
|
||||
const workflowState: WorkflowState = {
|
||||
blocks: jsonState.blocks || {},
|
||||
edges: jsonState.edges || [],
|
||||
@@ -367,16 +344,9 @@ export async function migrateWorkflowToNormalizedTables(
|
||||
lastSaved: jsonState.lastSaved,
|
||||
isDeployed: jsonState.isDeployed,
|
||||
deployedAt: jsonState.deployedAt,
|
||||
deploymentStatuses: jsonState.deploymentStatuses || {},
|
||||
hasActiveWebhook: jsonState.hasActiveWebhook,
|
||||
}
|
||||
|
||||
const result = await saveWorkflowToNormalizedTables(workflowId, workflowState)
|
||||
|
||||
if (result.success) {
|
||||
return { success: true }
|
||||
}
|
||||
return { success: false, error: result.error }
|
||||
return await saveWorkflowToNormalizedTables(workflowId, workflowState)
|
||||
} catch (error) {
|
||||
logger.error(`Error migrating workflow ${workflowId} to normalized tables:`, error)
|
||||
return {
|
||||
|
||||
@@ -68,7 +68,6 @@ export function useWorkflowDiff(): UseWorkflowDiffReturn {
|
||||
isDeployed: currentState.isDeployed,
|
||||
deployedAt: currentState.deployedAt,
|
||||
deploymentStatuses: { ...currentState.deploymentStatuses },
|
||||
hasActiveWebhook: currentState.hasActiveWebhook,
|
||||
},
|
||||
subblockValues: JSON.parse(JSON.stringify(currentSubblockValues)), // Deep copy
|
||||
timestamp: Date.now(),
|
||||
@@ -107,7 +106,6 @@ export function useWorkflowDiff(): UseWorkflowDiffReturn {
|
||||
isDeployed: backup.workflowState.isDeployed,
|
||||
deployedAt: backup.workflowState.deployedAt,
|
||||
deploymentStatuses: backup.workflowState.deploymentStatuses,
|
||||
hasActiveWebhook: backup.workflowState.hasActiveWebhook,
|
||||
})
|
||||
|
||||
// Restore subblock values
|
||||
|
||||
@@ -1,43 +1,30 @@
|
||||
import type { Edge } from 'reactflow'
|
||||
import type {
|
||||
BlockState,
|
||||
Loop,
|
||||
Parallel,
|
||||
Position,
|
||||
WorkflowState,
|
||||
} from '@/stores/workflows/workflow/types'
|
||||
import type { BlockState, Loop, Parallel, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
/**
|
||||
* Sanitized workflow state for copilot (removes all UI-specific data)
|
||||
* Connections are embedded in blocks for consistency with operations format
|
||||
* Loops and parallels use nested structure - no separate loops/parallels objects
|
||||
*/
|
||||
export interface CopilotWorkflowState {
|
||||
blocks: Record<string, CopilotBlockState>
|
||||
edges: CopilotEdge[]
|
||||
loops: Record<string, Loop>
|
||||
parallels: Record<string, Parallel>
|
||||
}
|
||||
|
||||
/**
|
||||
* Block state for copilot (no positions, no UI dimensions)
|
||||
* Block state for copilot (no positions, no UI dimensions, no redundant IDs)
|
||||
* Connections are embedded here instead of separate edges array
|
||||
* Loops and parallels have nested structure for clarity
|
||||
*/
|
||||
export interface CopilotBlockState {
|
||||
id: string
|
||||
type: string
|
||||
name: string
|
||||
subBlocks: BlockState['subBlocks']
|
||||
inputs?: Record<string, string | number | string[][]>
|
||||
outputs: BlockState['outputs']
|
||||
connections?: Record<string, string | string[]>
|
||||
nestedNodes?: Record<string, CopilotBlockState>
|
||||
enabled: boolean
|
||||
advancedMode?: boolean
|
||||
triggerMode?: boolean
|
||||
// Keep semantic data only (no width/height)
|
||||
data?: {
|
||||
parentId?: string
|
||||
extent?: 'parent'
|
||||
loopType?: 'for' | 'forEach'
|
||||
parallelType?: 'collection' | 'count'
|
||||
collection?: any
|
||||
count?: number
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -66,55 +53,208 @@ export interface ExportWorkflowState {
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize workflow state for copilot by removing all UI-specific data
|
||||
* Copilot doesn't need to see positions, dimensions, or visual styling
|
||||
* Check if a subblock contains sensitive/secret data
|
||||
*/
|
||||
export function sanitizeForCopilot(state: WorkflowState): CopilotWorkflowState {
|
||||
const sanitizedBlocks: Record<string, CopilotBlockState> = {}
|
||||
function isSensitiveSubBlock(key: string, subBlock: BlockState['subBlocks'][string]): boolean {
|
||||
// Check if it's an OAuth input type
|
||||
if (subBlock.type === 'oauth-input') {
|
||||
return true
|
||||
}
|
||||
|
||||
// Sanitize blocks - remove position and UI-only fields
|
||||
Object.entries(state.blocks).forEach(([blockId, block]) => {
|
||||
const sanitizedData: CopilotBlockState['data'] = block.data
|
||||
? {
|
||||
// Keep semantic fields only
|
||||
...(block.data.parentId !== undefined && { parentId: block.data.parentId }),
|
||||
...(block.data.extent !== undefined && { extent: block.data.extent }),
|
||||
...(block.data.loopType !== undefined && { loopType: block.data.loopType }),
|
||||
...(block.data.parallelType !== undefined && { parallelType: block.data.parallelType }),
|
||||
...(block.data.collection !== undefined && { collection: block.data.collection }),
|
||||
...(block.data.count !== undefined && { count: block.data.count }),
|
||||
}
|
||||
: undefined
|
||||
// Check if the field name suggests it contains sensitive data
|
||||
const sensitivePattern = /credential|oauth|api[_-]?key|token|secret|auth|password|bearer/i
|
||||
if (sensitivePattern.test(key)) {
|
||||
return true
|
||||
}
|
||||
|
||||
sanitizedBlocks[blockId] = {
|
||||
id: block.id,
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
subBlocks: block.subBlocks,
|
||||
outputs: block.outputs,
|
||||
enabled: block.enabled,
|
||||
...(block.advancedMode !== undefined && { advancedMode: block.advancedMode }),
|
||||
...(block.triggerMode !== undefined && { triggerMode: block.triggerMode }),
|
||||
...(sanitizedData && Object.keys(sanitizedData).length > 0 && { data: sanitizedData }),
|
||||
// Check if the value itself looks like a secret (but not environment variable references)
|
||||
if (typeof subBlock.value === 'string' && subBlock.value.length > 0) {
|
||||
// Don't sanitize environment variable references like {{VAR_NAME}}
|
||||
if (subBlock.value.startsWith('{{') && subBlock.value.endsWith('}}')) {
|
||||
return false
|
||||
}
|
||||
|
||||
// If it matches sensitive patterns in the value, it's likely a hardcoded secret
|
||||
if (sensitivePattern.test(subBlock.value)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize subblocks by removing null values, secrets, and simplifying structure
|
||||
* Maps each subblock key directly to its value instead of the full object
|
||||
*/
|
||||
function sanitizeSubBlocks(
|
||||
subBlocks: BlockState['subBlocks']
|
||||
): Record<string, string | number | string[][]> {
|
||||
const sanitized: Record<string, string | number | string[][]> = {}
|
||||
|
||||
Object.entries(subBlocks).forEach(([key, subBlock]) => {
|
||||
// Skip null/undefined values
|
||||
if (subBlock.value === null || subBlock.value === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
// For sensitive fields, either omit or replace with placeholder
|
||||
if (isSensitiveSubBlock(key, subBlock)) {
|
||||
// If it's an environment variable reference, keep it
|
||||
if (
|
||||
typeof subBlock.value === 'string' &&
|
||||
subBlock.value.startsWith('{{') &&
|
||||
subBlock.value.endsWith('}}')
|
||||
) {
|
||||
sanitized[key] = subBlock.value
|
||||
}
|
||||
// Otherwise omit the sensitive value entirely
|
||||
return
|
||||
}
|
||||
|
||||
// For non-sensitive, non-null values, include them
|
||||
sanitized[key] = subBlock.value
|
||||
})
|
||||
|
||||
return sanitized
|
||||
}
|
||||
|
||||
/**
|
||||
* Reconstruct full subBlock structure from simplified copilot format
|
||||
* Uses existing block structure as template for id and type fields
|
||||
*/
|
||||
function reconstructSubBlocks(
|
||||
simplifiedSubBlocks: Record<string, string | number | string[][]>,
|
||||
existingSubBlocks?: BlockState['subBlocks']
|
||||
): BlockState['subBlocks'] {
|
||||
const reconstructed: BlockState['subBlocks'] = {}
|
||||
|
||||
Object.entries(simplifiedSubBlocks).forEach(([key, value]) => {
|
||||
const existingSubBlock = existingSubBlocks?.[key]
|
||||
|
||||
reconstructed[key] = {
|
||||
id: existingSubBlock?.id || key,
|
||||
type: existingSubBlock?.type || 'short-input',
|
||||
value,
|
||||
}
|
||||
})
|
||||
|
||||
// Sanitize edges - keep only semantic connection data
|
||||
const sanitizedEdges: CopilotEdge[] = state.edges.map((edge) => ({
|
||||
id: edge.id,
|
||||
source: edge.source,
|
||||
target: edge.target,
|
||||
...(edge.sourceHandle !== undefined &&
|
||||
edge.sourceHandle !== null && { sourceHandle: edge.sourceHandle }),
|
||||
...(edge.targetHandle !== undefined &&
|
||||
edge.targetHandle !== null && { targetHandle: edge.targetHandle }),
|
||||
}))
|
||||
return reconstructed
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract connections for a block from edges and format as operations-style connections
|
||||
*/
|
||||
function extractConnectionsForBlock(
|
||||
blockId: string,
|
||||
edges: WorkflowState['edges']
|
||||
): Record<string, string | string[]> | undefined {
|
||||
const connections: Record<string, string[]> = {}
|
||||
|
||||
// Find all outgoing edges from this block
|
||||
const outgoingEdges = edges.filter((edge) => edge.source === blockId)
|
||||
|
||||
if (outgoingEdges.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// Group by source handle
|
||||
for (const edge of outgoingEdges) {
|
||||
const handle = edge.sourceHandle || 'source'
|
||||
|
||||
if (!connections[handle]) {
|
||||
connections[handle] = []
|
||||
}
|
||||
|
||||
connections[handle].push(edge.target)
|
||||
}
|
||||
|
||||
// Simplify single-element arrays to just the string
|
||||
const simplified: Record<string, string | string[]> = {}
|
||||
for (const [handle, targets] of Object.entries(connections)) {
|
||||
simplified[handle] = targets.length === 1 ? targets[0] : targets
|
||||
}
|
||||
|
||||
return simplified
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize workflow state for copilot by removing all UI-specific data
|
||||
* Creates nested structure for loops/parallels with their child blocks inside
|
||||
*/
|
||||
export function sanitizeForCopilot(state: WorkflowState): CopilotWorkflowState {
|
||||
const sanitizedBlocks: Record<string, CopilotBlockState> = {}
|
||||
const processedBlocks = new Set<string>()
|
||||
|
||||
// Helper to find child blocks of a parent (loop/parallel container)
|
||||
const findChildBlocks = (parentId: string): string[] => {
|
||||
return Object.keys(state.blocks).filter(
|
||||
(blockId) => state.blocks[blockId].data?.parentId === parentId
|
||||
)
|
||||
}
|
||||
|
||||
// Helper to recursively sanitize a block and its children
|
||||
const sanitizeBlock = (blockId: string, block: BlockState): CopilotBlockState => {
|
||||
const connections = extractConnectionsForBlock(blockId, state.edges)
|
||||
|
||||
// For loop/parallel blocks, extract config from block.data instead of subBlocks
|
||||
let inputs: Record<string, string | number | string[][]> = {}
|
||||
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
// Extract configuration from block.data
|
||||
if (block.data?.loopType) inputs.loopType = block.data.loopType
|
||||
if (block.data?.count !== undefined) inputs.iterations = block.data.count
|
||||
if (block.data?.collection !== undefined) inputs.collection = block.data.collection
|
||||
if (block.data?.parallelType) inputs.parallelType = block.data.parallelType
|
||||
} else {
|
||||
// For regular blocks, sanitize subBlocks
|
||||
inputs = sanitizeSubBlocks(block.subBlocks)
|
||||
}
|
||||
|
||||
// Check if this is a loop or parallel (has children)
|
||||
const childBlockIds = findChildBlocks(blockId)
|
||||
const nestedNodes: Record<string, CopilotBlockState> = {}
|
||||
|
||||
if (childBlockIds.length > 0) {
|
||||
// Recursively sanitize child blocks
|
||||
childBlockIds.forEach((childId) => {
|
||||
const childBlock = state.blocks[childId]
|
||||
if (childBlock) {
|
||||
nestedNodes[childId] = sanitizeBlock(childId, childBlock)
|
||||
processedBlocks.add(childId)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const result: CopilotBlockState = {
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
outputs: block.outputs,
|
||||
enabled: block.enabled,
|
||||
}
|
||||
|
||||
if (Object.keys(inputs).length > 0) result.inputs = inputs
|
||||
if (connections) result.connections = connections
|
||||
if (Object.keys(nestedNodes).length > 0) result.nestedNodes = nestedNodes
|
||||
if (block.advancedMode !== undefined) result.advancedMode = block.advancedMode
|
||||
if (block.triggerMode !== undefined) result.triggerMode = block.triggerMode
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Process only root-level blocks (those without a parent)
|
||||
Object.entries(state.blocks).forEach(([blockId, block]) => {
|
||||
// Skip if already processed as a child
|
||||
if (processedBlocks.has(blockId)) return
|
||||
|
||||
// Skip if it has a parent (it will be processed as nested)
|
||||
if (block.data?.parentId) return
|
||||
|
||||
sanitizedBlocks[blockId] = sanitizeBlock(blockId, block)
|
||||
})
|
||||
|
||||
return {
|
||||
blocks: sanitizedBlocks,
|
||||
edges: sanitizedEdges,
|
||||
loops: state.loops || {},
|
||||
parallels: state.parallels || {},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -167,204 +307,3 @@ export function sanitizeForExport(state: WorkflowState): ExportWorkflowState {
|
||||
state: clonedState,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that edges reference existing blocks
|
||||
*/
|
||||
export function validateEdges(
|
||||
blocks: Record<string, any>,
|
||||
edges: CopilotEdge[]
|
||||
): {
|
||||
valid: boolean
|
||||
errors: string[]
|
||||
} {
|
||||
const errors: string[] = []
|
||||
const blockIds = new Set(Object.keys(blocks))
|
||||
|
||||
edges.forEach((edge, index) => {
|
||||
if (!blockIds.has(edge.source)) {
|
||||
errors.push(`Edge ${index} references non-existent source block: ${edge.source}`)
|
||||
}
|
||||
if (!blockIds.has(edge.target)) {
|
||||
errors.push(`Edge ${index} references non-existent target block: ${edge.target}`)
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate position for a new block based on its connections
|
||||
* Uses compact horizontal spacing and intelligent positioning
|
||||
*/
|
||||
export function generatePositionForNewBlock(
|
||||
blockId: string,
|
||||
edges: CopilotEdge[],
|
||||
existingBlocks: Record<string, BlockState>
|
||||
): Position {
|
||||
const HORIZONTAL_SPACING = 550
|
||||
const VERTICAL_SPACING = 200
|
||||
|
||||
const incomingEdges = edges.filter((e) => e.target === blockId)
|
||||
|
||||
if (incomingEdges.length > 0) {
|
||||
const sourceBlocks = incomingEdges
|
||||
.map((e) => existingBlocks[e.source])
|
||||
.filter((b) => b !== undefined)
|
||||
|
||||
if (sourceBlocks.length > 0) {
|
||||
const rightmostX = Math.max(...sourceBlocks.map((b) => b.position.x))
|
||||
const avgY = sourceBlocks.reduce((sum, b) => sum + b.position.y, 0) / sourceBlocks.length
|
||||
|
||||
return {
|
||||
x: rightmostX + HORIZONTAL_SPACING,
|
||||
y: avgY,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const outgoingEdges = edges.filter((e) => e.source === blockId)
|
||||
|
||||
if (outgoingEdges.length > 0) {
|
||||
const targetBlocks = outgoingEdges
|
||||
.map((e) => existingBlocks[e.target])
|
||||
.filter((b) => b !== undefined)
|
||||
|
||||
if (targetBlocks.length > 0) {
|
||||
const leftmostX = Math.min(...targetBlocks.map((b) => b.position.x))
|
||||
const avgY = targetBlocks.reduce((sum, b) => sum + b.position.y, 0) / targetBlocks.length
|
||||
|
||||
return {
|
||||
x: Math.max(150, leftmostX - HORIZONTAL_SPACING),
|
||||
y: avgY,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const existingPositions = Object.values(existingBlocks).map((b) => b.position)
|
||||
if (existingPositions.length > 0) {
|
||||
const maxY = Math.max(...existingPositions.map((p) => p.y))
|
||||
return {
|
||||
x: 150,
|
||||
y: maxY + VERTICAL_SPACING,
|
||||
}
|
||||
}
|
||||
|
||||
return { x: 150, y: 300 }
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge sanitized copilot state with full UI state
|
||||
* Preserves positions for existing blocks, generates positions for new blocks
|
||||
*/
|
||||
export function mergeWithUIState(
|
||||
sanitized: CopilotWorkflowState,
|
||||
fullState: WorkflowState
|
||||
): WorkflowState {
|
||||
const mergedBlocks: Record<string, BlockState> = {}
|
||||
const existingBlocks = fullState.blocks
|
||||
|
||||
// Convert sanitized edges to full edges for position generation
|
||||
const sanitizedEdges = sanitized.edges
|
||||
|
||||
// Process each block from sanitized state
|
||||
Object.entries(sanitized.blocks).forEach(([blockId, sanitizedBlock]) => {
|
||||
const existingBlock = existingBlocks[blockId]
|
||||
|
||||
if (existingBlock) {
|
||||
// Existing block - preserve position and UI fields, update semantic fields
|
||||
mergedBlocks[blockId] = {
|
||||
...existingBlock,
|
||||
// Update semantic fields from sanitized
|
||||
type: sanitizedBlock.type,
|
||||
name: sanitizedBlock.name,
|
||||
subBlocks: sanitizedBlock.subBlocks,
|
||||
outputs: sanitizedBlock.outputs,
|
||||
enabled: sanitizedBlock.enabled,
|
||||
advancedMode: sanitizedBlock.advancedMode,
|
||||
triggerMode: sanitizedBlock.triggerMode,
|
||||
// Merge data carefully
|
||||
data: sanitizedBlock.data
|
||||
? {
|
||||
...existingBlock.data,
|
||||
...sanitizedBlock.data,
|
||||
}
|
||||
: existingBlock.data,
|
||||
}
|
||||
} else {
|
||||
// New block - generate position
|
||||
const position = generatePositionForNewBlock(blockId, sanitizedEdges, existingBlocks)
|
||||
|
||||
mergedBlocks[blockId] = {
|
||||
id: sanitizedBlock.id,
|
||||
type: sanitizedBlock.type,
|
||||
name: sanitizedBlock.name,
|
||||
position,
|
||||
subBlocks: sanitizedBlock.subBlocks,
|
||||
outputs: sanitizedBlock.outputs,
|
||||
enabled: sanitizedBlock.enabled,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
height: 0,
|
||||
advancedMode: sanitizedBlock.advancedMode,
|
||||
triggerMode: sanitizedBlock.triggerMode,
|
||||
data: sanitizedBlock.data
|
||||
? {
|
||||
...sanitizedBlock.data,
|
||||
// Add UI dimensions if it's a container
|
||||
...(sanitizedBlock.type === 'loop' || sanitizedBlock.type === 'parallel'
|
||||
? {
|
||||
width: 500,
|
||||
height: 300,
|
||||
type: 'subflowNode',
|
||||
}
|
||||
: {}),
|
||||
}
|
||||
: undefined,
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Convert sanitized edges to full edges
|
||||
const mergedEdges: Edge[] = sanitized.edges.map((edge) => {
|
||||
// Try to find existing edge to preserve styling
|
||||
const existingEdge = fullState.edges.find(
|
||||
(e) =>
|
||||
e.source === edge.source &&
|
||||
e.target === edge.target &&
|
||||
e.sourceHandle === edge.sourceHandle &&
|
||||
e.targetHandle === edge.targetHandle
|
||||
)
|
||||
|
||||
if (existingEdge) {
|
||||
return existingEdge
|
||||
}
|
||||
|
||||
// New edge - create with defaults
|
||||
return {
|
||||
id: edge.id,
|
||||
source: edge.source,
|
||||
target: edge.target,
|
||||
sourceHandle: edge.sourceHandle,
|
||||
targetHandle: edge.targetHandle,
|
||||
type: 'default',
|
||||
data: {},
|
||||
} as Edge
|
||||
})
|
||||
|
||||
return {
|
||||
blocks: mergedBlocks,
|
||||
edges: mergedEdges,
|
||||
loops: sanitized.loops,
|
||||
parallels: sanitized.parallels,
|
||||
lastSaved: Date.now(),
|
||||
// Preserve deployment info
|
||||
isDeployed: fullState.isDeployed,
|
||||
deployedAt: fullState.deployedAt,
|
||||
deploymentStatuses: fullState.deploymentStatuses,
|
||||
hasActiveWebhook: fullState.hasActiveWebhook,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +1,19 @@
|
||||
import type { CopilotWorkflowState } from '@/lib/workflows/json-sanitizer'
|
||||
|
||||
export interface EditOperation {
|
||||
operation_type: 'add' | 'edit' | 'delete'
|
||||
operation_type: 'add' | 'edit' | 'delete' | 'insert_into_subflow' | 'extract_from_subflow'
|
||||
block_id: string
|
||||
params?: {
|
||||
type?: string
|
||||
name?: string
|
||||
outputs?: Record<string, any>
|
||||
enabled?: boolean
|
||||
triggerMode?: boolean
|
||||
advancedMode?: boolean
|
||||
inputs?: Record<string, any>
|
||||
connections?: Record<string, any>
|
||||
removeEdges?: Array<{ targetBlockId: string; sourceHandle?: string }>
|
||||
loopConfig?: {
|
||||
nodes?: string[]
|
||||
iterations?: number
|
||||
loopType?: 'for' | 'forEach'
|
||||
forEachItems?: any
|
||||
}
|
||||
parallelConfig?: {
|
||||
nodes?: string[]
|
||||
distribution?: any
|
||||
count?: number
|
||||
parallelType?: 'count' | 'collection'
|
||||
}
|
||||
parentId?: string
|
||||
extent?: 'parent'
|
||||
nestedNodes?: Record<string, any>
|
||||
subflowId?: string
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,6 +28,79 @@ export interface WorkflowDiff {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Flatten nested blocks into a single-level map for comparison
|
||||
* Returns map of blockId -> {block, parentId}
|
||||
*/
|
||||
function flattenBlocks(
|
||||
blocks: Record<string, any>
|
||||
): Record<string, { block: any; parentId?: string }> {
|
||||
const flattened: Record<string, { block: any; parentId?: string }> = {}
|
||||
|
||||
const processBlock = (blockId: string, block: any, parentId?: string) => {
|
||||
flattened[blockId] = { block, parentId }
|
||||
|
||||
// Recursively process nested nodes
|
||||
if (block.nestedNodes) {
|
||||
Object.entries(block.nestedNodes).forEach(([nestedId, nestedBlock]) => {
|
||||
processBlock(nestedId, nestedBlock, blockId)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
Object.entries(blocks).forEach(([blockId, block]) => {
|
||||
processBlock(blockId, block)
|
||||
})
|
||||
|
||||
return flattened
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract all edges from blocks with embedded connections (including nested)
|
||||
*/
|
||||
function extractAllEdgesFromBlocks(blocks: Record<string, any>): Array<{
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string | null
|
||||
targetHandle?: string | null
|
||||
}> {
|
||||
const edges: Array<{
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string | null
|
||||
targetHandle?: string | null
|
||||
}> = []
|
||||
|
||||
const processBlockConnections = (block: any, blockId: string) => {
|
||||
if (block.connections) {
|
||||
Object.entries(block.connections).forEach(([sourceHandle, targets]) => {
|
||||
const targetArray = Array.isArray(targets) ? targets : [targets]
|
||||
targetArray.forEach((target: string) => {
|
||||
edges.push({
|
||||
source: blockId,
|
||||
target,
|
||||
sourceHandle,
|
||||
targetHandle: 'target',
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// Process nested nodes
|
||||
if (block.nestedNodes) {
|
||||
Object.entries(block.nestedNodes).forEach(([nestedId, nestedBlock]) => {
|
||||
processBlockConnections(nestedBlock, nestedId)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
Object.entries(blocks).forEach(([blockId, block]) => {
|
||||
processBlockConnections(block, blockId)
|
||||
})
|
||||
|
||||
return edges
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the edit sequence (operations) needed to transform startState into endState
|
||||
* This analyzes the differences and generates operations that can recreate the changes
|
||||
@@ -51,12 +114,14 @@ export function computeEditSequence(
|
||||
|
||||
const startBlocks = startState.blocks || {}
|
||||
const endBlocks = endState.blocks || {}
|
||||
const startEdges = startState.edges || []
|
||||
const endEdges = endState.edges || []
|
||||
const startLoops = startState.loops || {}
|
||||
const endLoops = endState.loops || {}
|
||||
const startParallels = startState.parallels || {}
|
||||
const endParallels = endState.parallels || {}
|
||||
|
||||
// Flatten nested blocks for comparison (includes nested nodes at top level)
|
||||
const startFlattened = flattenBlocks(startBlocks)
|
||||
const endFlattened = flattenBlocks(endBlocks)
|
||||
|
||||
// Extract edges from connections for tracking
|
||||
const startEdges = extractAllEdgesFromBlocks(startBlocks)
|
||||
const endEdges = extractAllEdgesFromBlocks(endBlocks)
|
||||
|
||||
// Track statistics
|
||||
let blocksAdded = 0
|
||||
@@ -65,74 +130,171 @@ export function computeEditSequence(
|
||||
let edgesChanged = 0
|
||||
let subflowsChanged = 0
|
||||
|
||||
// Track which blocks are being deleted (including subflows)
|
||||
const deletedBlocks = new Set<string>()
|
||||
for (const blockId in startFlattened) {
|
||||
if (!(blockId in endFlattened)) {
|
||||
deletedBlocks.add(blockId)
|
||||
}
|
||||
}
|
||||
|
||||
// 1. Find deleted blocks (exist in start but not in end)
|
||||
for (const blockId in startBlocks) {
|
||||
if (!(blockId in endBlocks)) {
|
||||
operations.push({
|
||||
operation_type: 'delete',
|
||||
block_id: blockId,
|
||||
})
|
||||
blocksDeleted++
|
||||
for (const blockId in startFlattened) {
|
||||
if (!(blockId in endFlattened)) {
|
||||
const { parentId } = startFlattened[blockId]
|
||||
|
||||
// Skip if parent is also being deleted (cascade delete is implicit)
|
||||
if (parentId && deletedBlocks.has(parentId)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (parentId) {
|
||||
// Block was inside a subflow and was removed (but subflow still exists)
|
||||
operations.push({
|
||||
operation_type: 'extract_from_subflow',
|
||||
block_id: blockId,
|
||||
params: {
|
||||
subflowId: parentId,
|
||||
},
|
||||
})
|
||||
subflowsChanged++
|
||||
} else {
|
||||
// Regular block deletion
|
||||
operations.push({
|
||||
operation_type: 'delete',
|
||||
block_id: blockId,
|
||||
})
|
||||
blocksDeleted++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Find added blocks (exist in end but not in start)
|
||||
for (const blockId in endBlocks) {
|
||||
if (!(blockId in startBlocks)) {
|
||||
const block = endBlocks[blockId]
|
||||
const addParams: EditOperation['params'] = {
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
inputs: extractInputValues(block),
|
||||
connections: extractConnections(blockId, endEdges),
|
||||
triggerMode: Boolean(block?.triggerMode),
|
||||
}
|
||||
for (const blockId in endFlattened) {
|
||||
if (!(blockId in startFlattened)) {
|
||||
const { block, parentId } = endFlattened[blockId]
|
||||
if (parentId) {
|
||||
// Block was added inside a subflow - include full block state
|
||||
const addParams: EditOperation['params'] = {
|
||||
subflowId: parentId,
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
outputs: block.outputs,
|
||||
enabled: block.enabled !== undefined ? block.enabled : true,
|
||||
...(block?.triggerMode !== undefined && { triggerMode: Boolean(block.triggerMode) }),
|
||||
...(block?.advancedMode !== undefined && { advancedMode: Boolean(block.advancedMode) }),
|
||||
}
|
||||
|
||||
// Add loop/parallel configuration if this block is in a subflow
|
||||
const loopConfig = findLoopConfigForBlock(blockId, endLoops)
|
||||
if (loopConfig) {
|
||||
;(addParams as any).loopConfig = loopConfig
|
||||
// Add inputs if present
|
||||
const inputs = extractInputValues(block)
|
||||
if (Object.keys(inputs).length > 0) {
|
||||
addParams.inputs = inputs
|
||||
}
|
||||
|
||||
// Add connections if present
|
||||
const connections = extractConnections(blockId, endEdges)
|
||||
if (connections && Object.keys(connections).length > 0) {
|
||||
addParams.connections = connections
|
||||
}
|
||||
|
||||
operations.push({
|
||||
operation_type: 'insert_into_subflow',
|
||||
block_id: blockId,
|
||||
params: addParams,
|
||||
})
|
||||
subflowsChanged++
|
||||
}
|
||||
} else {
|
||||
// Regular block addition at root level
|
||||
const addParams: EditOperation['params'] = {
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
...(block?.triggerMode !== undefined && { triggerMode: Boolean(block.triggerMode) }),
|
||||
...(block?.advancedMode !== undefined && { advancedMode: Boolean(block.advancedMode) }),
|
||||
}
|
||||
|
||||
const parallelConfig = findParallelConfigForBlock(blockId, endParallels)
|
||||
if (parallelConfig) {
|
||||
;(addParams as any).parallelConfig = parallelConfig
|
||||
subflowsChanged++
|
||||
}
|
||||
// Add inputs if present
|
||||
const inputs = extractInputValues(block)
|
||||
if (Object.keys(inputs).length > 0) {
|
||||
addParams.inputs = inputs
|
||||
}
|
||||
|
||||
// Add parent-child relationship if present
|
||||
if (block.data?.parentId) {
|
||||
addParams.parentId = block.data.parentId
|
||||
addParams.extent = block.data.extent
|
||||
}
|
||||
// Add connections if present
|
||||
const connections = extractConnections(blockId, endEdges)
|
||||
if (connections && Object.keys(connections).length > 0) {
|
||||
addParams.connections = connections
|
||||
}
|
||||
|
||||
operations.push({
|
||||
operation_type: 'add',
|
||||
block_id: blockId,
|
||||
params: addParams,
|
||||
})
|
||||
blocksAdded++
|
||||
// Add nested nodes if present (for loops/parallels created from scratch)
|
||||
if (block.nestedNodes && Object.keys(block.nestedNodes).length > 0) {
|
||||
addParams.nestedNodes = block.nestedNodes
|
||||
subflowsChanged++
|
||||
}
|
||||
|
||||
operations.push({
|
||||
operation_type: 'add',
|
||||
block_id: blockId,
|
||||
params: addParams,
|
||||
})
|
||||
blocksAdded++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Find modified blocks (exist in both but have changes)
|
||||
for (const blockId in endBlocks) {
|
||||
if (blockId in startBlocks) {
|
||||
const startBlock = startBlocks[blockId]
|
||||
const endBlock = endBlocks[blockId]
|
||||
const changes = computeBlockChanges(
|
||||
startBlock,
|
||||
endBlock,
|
||||
blockId,
|
||||
startEdges,
|
||||
endEdges,
|
||||
startLoops,
|
||||
endLoops,
|
||||
startParallels,
|
||||
endParallels
|
||||
)
|
||||
for (const blockId in endFlattened) {
|
||||
if (blockId in startFlattened) {
|
||||
const { block: startBlock, parentId: startParentId } = startFlattened[blockId]
|
||||
const { block: endBlock, parentId: endParentId } = endFlattened[blockId]
|
||||
|
||||
// Check if parent changed (moved in/out of subflow)
|
||||
if (startParentId !== endParentId) {
|
||||
// Extract from old parent if it had one
|
||||
if (startParentId) {
|
||||
operations.push({
|
||||
operation_type: 'extract_from_subflow',
|
||||
block_id: blockId,
|
||||
params: { subflowId: startParentId },
|
||||
})
|
||||
subflowsChanged++
|
||||
}
|
||||
|
||||
// Insert into new parent if it has one - include full block state
|
||||
if (endParentId) {
|
||||
const addParams: EditOperation['params'] = {
|
||||
subflowId: endParentId,
|
||||
type: endBlock.type,
|
||||
name: endBlock.name,
|
||||
outputs: endBlock.outputs,
|
||||
enabled: endBlock.enabled !== undefined ? endBlock.enabled : true,
|
||||
...(endBlock?.triggerMode !== undefined && {
|
||||
triggerMode: Boolean(endBlock.triggerMode),
|
||||
}),
|
||||
...(endBlock?.advancedMode !== undefined && {
|
||||
advancedMode: Boolean(endBlock.advancedMode),
|
||||
}),
|
||||
}
|
||||
|
||||
const inputs = extractInputValues(endBlock)
|
||||
if (Object.keys(inputs).length > 0) {
|
||||
addParams.inputs = inputs
|
||||
}
|
||||
|
||||
const connections = extractConnections(blockId, endEdges)
|
||||
if (connections && Object.keys(connections).length > 0) {
|
||||
addParams.connections = connections
|
||||
}
|
||||
|
||||
operations.push({
|
||||
operation_type: 'insert_into_subflow',
|
||||
block_id: blockId,
|
||||
params: addParams,
|
||||
})
|
||||
subflowsChanged++
|
||||
}
|
||||
}
|
||||
|
||||
// Check for other changes (only if parent didn't change)
|
||||
const changes = computeBlockChanges(startBlock, endBlock, blockId, startEdges, endEdges)
|
||||
if (changes) {
|
||||
operations.push({
|
||||
operation_type: 'edit',
|
||||
@@ -140,24 +302,13 @@ export function computeEditSequence(
|
||||
params: changes,
|
||||
})
|
||||
blocksModified++
|
||||
if (changes.connections || changes.removeEdges) {
|
||||
if (changes.connections) {
|
||||
edgesChanged++
|
||||
}
|
||||
if (changes.loopConfig || changes.parallelConfig) {
|
||||
subflowsChanged++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Check for standalone loop/parallel changes (not tied to specific blocks)
|
||||
const loopChanges = detectSubflowChanges(startLoops, endLoops, 'loop')
|
||||
const parallelChanges = detectSubflowChanges(startParallels, endParallels, 'parallel')
|
||||
|
||||
if (loopChanges > 0 || parallelChanges > 0) {
|
||||
subflowsChanged += loopChanges + parallelChanges
|
||||
}
|
||||
|
||||
return {
|
||||
operations,
|
||||
summary: {
|
||||
@@ -171,20 +322,21 @@ export function computeEditSequence(
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract input values from a block's subBlocks
|
||||
* Extract input values from a block
|
||||
* Works with sanitized format where inputs is Record<string, value>
|
||||
*/
|
||||
function extractInputValues(block: any): Record<string, any> {
|
||||
const inputs: Record<string, any> = {}
|
||||
|
||||
if (block.subBlocks) {
|
||||
for (const [subBlockId, subBlock] of Object.entries(block.subBlocks)) {
|
||||
if ((subBlock as any).value !== undefined && (subBlock as any).value !== null) {
|
||||
inputs[subBlockId] = (subBlock as any).value
|
||||
}
|
||||
}
|
||||
// New sanitized format uses 'inputs' field
|
||||
if (block.inputs) {
|
||||
return { ...block.inputs }
|
||||
}
|
||||
|
||||
return inputs
|
||||
// Fallback for any legacy data
|
||||
if (block.subBlocks) {
|
||||
return { ...block.subBlocks }
|
||||
}
|
||||
|
||||
return {}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -233,101 +385,6 @@ function extractConnections(
|
||||
return connections
|
||||
}
|
||||
|
||||
/**
|
||||
* Find loop configuration for a block
|
||||
*/
|
||||
function findLoopConfigForBlock(
|
||||
blockId: string,
|
||||
loops: Record<string, any>
|
||||
):
|
||||
| {
|
||||
nodes?: string[]
|
||||
iterations?: number
|
||||
loopType?: 'for' | 'forEach'
|
||||
forEachItems?: any
|
||||
}
|
||||
| undefined {
|
||||
for (const loop of Object.values(loops)) {
|
||||
if (loop.id === blockId || loop.nodes?.includes(blockId)) {
|
||||
return {
|
||||
nodes: loop.nodes,
|
||||
iterations: loop.iterations,
|
||||
loopType: loop.loopType,
|
||||
forEachItems: loop.forEachItems,
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Find parallel configuration for a block
|
||||
*/
|
||||
function findParallelConfigForBlock(
|
||||
blockId: string,
|
||||
parallels: Record<string, any>
|
||||
):
|
||||
| {
|
||||
nodes?: string[]
|
||||
distribution?: any
|
||||
count?: number
|
||||
parallelType?: 'count' | 'collection'
|
||||
}
|
||||
| undefined {
|
||||
for (const parallel of Object.values(parallels)) {
|
||||
if (parallel.id === blockId || parallel.nodes?.includes(blockId)) {
|
||||
return {
|
||||
nodes: parallel.nodes,
|
||||
distribution: parallel.distribution,
|
||||
count: parallel.count,
|
||||
parallelType: parallel.parallelType,
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect changes in subflow configurations
|
||||
*/
|
||||
function detectSubflowChanges(
|
||||
startSubflows: Record<string, any>,
|
||||
endSubflows: Record<string, any>,
|
||||
type: 'loop' | 'parallel'
|
||||
): number {
|
||||
let changes = 0
|
||||
|
||||
// Check for added/removed subflows
|
||||
const startIds = new Set(Object.keys(startSubflows))
|
||||
const endIds = new Set(Object.keys(endSubflows))
|
||||
|
||||
for (const id of endIds) {
|
||||
if (!startIds.has(id)) {
|
||||
changes++ // New subflow
|
||||
}
|
||||
}
|
||||
|
||||
for (const id of startIds) {
|
||||
if (!endIds.has(id)) {
|
||||
changes++ // Removed subflow
|
||||
}
|
||||
}
|
||||
|
||||
// Check for modified subflows
|
||||
for (const id of endIds) {
|
||||
if (startIds.has(id)) {
|
||||
const startSubflow = startSubflows[id]
|
||||
const endSubflow = endSubflows[id]
|
||||
|
||||
if (JSON.stringify(startSubflow) !== JSON.stringify(endSubflow)) {
|
||||
changes++ // Modified subflow
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changes
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute what changed in a block between two states
|
||||
*/
|
||||
@@ -346,11 +403,7 @@ function computeBlockChanges(
|
||||
target: string
|
||||
sourceHandle?: string | null
|
||||
targetHandle?: string | null
|
||||
}>,
|
||||
startLoops: Record<string, any>,
|
||||
endLoops: Record<string, any>,
|
||||
startParallels: Record<string, any>,
|
||||
endParallels: Record<string, any>
|
||||
}>
|
||||
): Record<string, any> | null {
|
||||
const changes: Record<string, any> = {}
|
||||
let hasChanges = false
|
||||
@@ -375,6 +428,14 @@ function computeBlockChanges(
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check advanced mode change
|
||||
const startAdvanced = Boolean(startBlock?.advancedMode)
|
||||
const endAdvanced = Boolean(endBlock?.advancedMode)
|
||||
if (startAdvanced !== endAdvanced) {
|
||||
changes.advancedMode = endAdvanced
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check input value changes
|
||||
const startInputs = extractInputValues(startBlock)
|
||||
const endInputs = extractInputValues(endBlock)
|
||||
@@ -389,79 +450,7 @@ function computeBlockChanges(
|
||||
const endConnections = extractConnections(blockId, endEdges)
|
||||
|
||||
if (JSON.stringify(startConnections) !== JSON.stringify(endConnections)) {
|
||||
// Compute which edges were removed
|
||||
const removedEdges: Array<{ targetBlockId: string; sourceHandle?: string }> = []
|
||||
|
||||
for (const handle in startConnections) {
|
||||
const startTargets = Array.isArray(startConnections[handle])
|
||||
? startConnections[handle]
|
||||
: [startConnections[handle]]
|
||||
const endTargets = endConnections[handle]
|
||||
? Array.isArray(endConnections[handle])
|
||||
? endConnections[handle]
|
||||
: [endConnections[handle]]
|
||||
: []
|
||||
|
||||
for (const target of startTargets) {
|
||||
const targetId = typeof target === 'object' ? target.block : target
|
||||
const isPresent = endTargets.some(
|
||||
(t: any) => (typeof t === 'object' ? t.block : t) === targetId
|
||||
)
|
||||
|
||||
if (!isPresent) {
|
||||
removedEdges.push({
|
||||
targetBlockId: targetId,
|
||||
sourceHandle: handle !== 'default' ? handle : undefined,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (removedEdges.length > 0) {
|
||||
changes.removeEdges = removedEdges
|
||||
}
|
||||
|
||||
// Add new connections
|
||||
if (Object.keys(endConnections).length > 0) {
|
||||
changes.connections = endConnections
|
||||
}
|
||||
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check loop membership changes
|
||||
const startLoopConfig = findLoopConfigForBlock(blockId, startLoops)
|
||||
const endLoopConfig = findLoopConfigForBlock(blockId, endLoops)
|
||||
|
||||
if (JSON.stringify(startLoopConfig) !== JSON.stringify(endLoopConfig)) {
|
||||
if (endLoopConfig) {
|
||||
;(changes as any).loopConfig = endLoopConfig
|
||||
}
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check parallel membership changes
|
||||
const startParallelConfig = findParallelConfigForBlock(blockId, startParallels)
|
||||
const endParallelConfig = findParallelConfigForBlock(blockId, endParallels)
|
||||
|
||||
if (JSON.stringify(startParallelConfig) !== JSON.stringify(endParallelConfig)) {
|
||||
if (endParallelConfig) {
|
||||
;(changes as any).parallelConfig = endParallelConfig
|
||||
}
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check parent-child relationship changes
|
||||
const startParentId = startBlock.data?.parentId
|
||||
const endParentId = endBlock.data?.parentId
|
||||
const startExtent = startBlock.data?.extent
|
||||
const endExtent = endBlock.data?.extent
|
||||
|
||||
if (startParentId !== endParentId || startExtent !== endExtent) {
|
||||
if (endParentId) {
|
||||
changes.parentId = endParentId
|
||||
changes.extent = endExtent
|
||||
}
|
||||
changes.connections = endConnections
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
@@ -478,20 +467,29 @@ export function formatEditSequence(operations: EditOperation[]): string[] {
|
||||
return `Add block "${op.params?.name || op.block_id}" (${op.params?.type || 'unknown'})`
|
||||
case 'delete':
|
||||
return `Delete block "${op.block_id}"`
|
||||
case 'insert_into_subflow':
|
||||
return `Insert "${op.params?.name || op.block_id}" into subflow "${op.params?.subflowId}"`
|
||||
case 'extract_from_subflow':
|
||||
return `Extract "${op.block_id}" from subflow "${op.params?.subflowId}"`
|
||||
case 'edit': {
|
||||
const changes: string[] = []
|
||||
if (op.params?.type) changes.push(`type to ${op.params.type}`)
|
||||
if (op.params?.name) changes.push(`name to "${op.params.name}"`)
|
||||
if (op.params?.inputs) changes.push('inputs')
|
||||
if (op.params?.triggerMode !== undefined)
|
||||
changes.push(`trigger mode to ${op.params.triggerMode}`)
|
||||
if (op.params?.advancedMode !== undefined)
|
||||
changes.push(`advanced mode to ${op.params.advancedMode}`)
|
||||
if (op.params?.inputs) {
|
||||
const inputKeys = Object.keys(op.params.inputs)
|
||||
if (inputKeys.length > 0) {
|
||||
changes.push(`inputs (${inputKeys.join(', ')})`)
|
||||
}
|
||||
}
|
||||
if (op.params?.connections) changes.push('connections')
|
||||
if (op.params?.removeEdges) changes.push(`remove ${op.params.removeEdges.length} edge(s)`)
|
||||
if ((op.params as any)?.loopConfig) changes.push('loop configuration')
|
||||
if ((op.params as any)?.parallelConfig) changes.push('parallel configuration')
|
||||
if (op.params?.parentId) changes.push('parent-child relationship')
|
||||
return `Edit block "${op.block_id}": ${changes.join(', ')}`
|
||||
}
|
||||
default:
|
||||
return `Unknown operation on block "${op.block_id}"`
|
||||
return `Unknown operation: ${op.operation_type}`
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -238,6 +238,22 @@ const nextConfig: NextConfig = {
|
||||
|
||||
return redirects
|
||||
},
|
||||
async rewrites() {
|
||||
if (!isTruthy(env.POSTHOG_ENABLED)) {
|
||||
return []
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
source: '/ingest/static/:path*',
|
||||
destination: 'https://us-assets.i.posthog.com/static/:path*',
|
||||
},
|
||||
{
|
||||
source: '/ingest/:path*',
|
||||
destination: 'https://us.i.posthog.com/:path*',
|
||||
},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
export default nextConfig
|
||||
|
||||
@@ -28,8 +28,8 @@
|
||||
"@aws-sdk/s3-request-presigner": "^3.779.0",
|
||||
"@azure/communication-email": "1.0.0",
|
||||
"@azure/storage-blob": "12.27.0",
|
||||
"@better-auth/stripe": "1.3.12",
|
||||
"@better-auth/sso": "1.3.12",
|
||||
"@better-auth/stripe": "1.3.12",
|
||||
"@browserbasehq/stagehand": "^2.0.0",
|
||||
"@cerebras/cerebras_cloud_sdk": "^1.23.0",
|
||||
"@e2b/code-interpreter": "^2.0.0",
|
||||
@@ -93,6 +93,8 @@
|
||||
"openai": "^4.91.1",
|
||||
"papaparse": "5.5.3",
|
||||
"pdf-parse": "1.1.1",
|
||||
"posthog-js": "1.268.9",
|
||||
"posthog-node": "5.9.2",
|
||||
"prismjs": "^1.30.0",
|
||||
"react": "19.1.0",
|
||||
"react-colorful": "5.6.1",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { ConnectionOptions } from 'node:tls'
|
||||
import * as schema from '@sim/db'
|
||||
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@sim/db'
|
||||
import { and, eq, or, sql } from 'drizzle-orm'
|
||||
@@ -10,6 +11,34 @@ import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
const logger = createLogger('SocketDatabase')
|
||||
|
||||
const connectionString = env.DATABASE_URL
|
||||
|
||||
const getSSLConfig = () => {
|
||||
const sslMode = env.DATABASE_SSL
|
||||
|
||||
if (!sslMode) return undefined
|
||||
if (sslMode === 'disable') return false
|
||||
if (sslMode === 'prefer') return 'prefer'
|
||||
|
||||
const sslConfig: ConnectionOptions = {}
|
||||
|
||||
if (sslMode === 'require') {
|
||||
sslConfig.rejectUnauthorized = false
|
||||
} else if (sslMode === 'verify-ca' || sslMode === 'verify-full') {
|
||||
sslConfig.rejectUnauthorized = true
|
||||
if (env.DATABASE_SSL_CA) {
|
||||
try {
|
||||
const ca = Buffer.from(env.DATABASE_SSL_CA, 'base64').toString('utf-8')
|
||||
sslConfig.ca = ca
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse DATABASE_SSL_CA:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sslConfig
|
||||
}
|
||||
|
||||
const sslConfig = getSSLConfig()
|
||||
const socketDb = drizzle(
|
||||
postgres(connectionString, {
|
||||
prepare: false,
|
||||
@@ -18,6 +47,7 @@ const socketDb = drizzle(
|
||||
max: 25,
|
||||
onnotice: () => {},
|
||||
debug: false,
|
||||
...(sslConfig !== undefined && { ssl: sslConfig }),
|
||||
}),
|
||||
{ schema }
|
||||
)
|
||||
@@ -132,7 +162,6 @@ export async function getWorkflowState(workflowId: string) {
|
||||
const finalState = {
|
||||
// Default values for expected properties
|
||||
deploymentStatuses: {},
|
||||
hasActiveWebhook: false,
|
||||
// Data from normalized tables
|
||||
blocks: normalizedData.blocks,
|
||||
edges: normalizedData.edges,
|
||||
@@ -166,23 +195,7 @@ export async function persistWorkflowOperation(workflowId: string, operation: an
|
||||
try {
|
||||
const { operation: op, target, payload, timestamp, userId } = operation
|
||||
|
||||
// Log high-frequency operations for monitoring
|
||||
if (op === 'update-position' && Math.random() < 0.01) {
|
||||
// Log 1% of position updates
|
||||
logger.debug('Socket DB operation sample:', {
|
||||
operation: op,
|
||||
target,
|
||||
workflowId: `${workflowId.substring(0, 8)}...`,
|
||||
})
|
||||
}
|
||||
|
||||
await db.transaction(async (tx) => {
|
||||
// Update the workflow's last modified timestamp first
|
||||
await tx
|
||||
.update(workflow)
|
||||
.set({ updatedAt: new Date(timestamp) })
|
||||
.where(eq(workflow.id, workflowId))
|
||||
|
||||
// Handle different operation types within the transaction
|
||||
switch (target) {
|
||||
case 'block':
|
||||
@@ -200,6 +213,13 @@ export async function persistWorkflowOperation(workflowId: string, operation: an
|
||||
default:
|
||||
throw new Error(`Unknown operation target: ${target}`)
|
||||
}
|
||||
|
||||
if (op !== 'update-position') {
|
||||
await tx
|
||||
.update(workflow)
|
||||
.set({ updatedAt: new Date(timestamp) })
|
||||
.where(eq(workflow.id, workflowId))
|
||||
}
|
||||
})
|
||||
|
||||
// Log slow operations for monitoring
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { ConnectionOptions } from 'node:tls'
|
||||
import * as schema from '@sim/db/schema'
|
||||
import { workflowBlocks, workflowEdges } from '@sim/db/schema'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
@@ -8,6 +9,34 @@ import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const connectionString = env.DATABASE_URL
|
||||
|
||||
const getSSLConfig = () => {
|
||||
const sslMode = env.DATABASE_SSL
|
||||
|
||||
if (!sslMode) return undefined
|
||||
if (sslMode === 'disable') return false
|
||||
if (sslMode === 'prefer') return 'prefer'
|
||||
|
||||
const sslConfig: ConnectionOptions = {}
|
||||
|
||||
if (sslMode === 'require') {
|
||||
sslConfig.rejectUnauthorized = false
|
||||
} else if (sslMode === 'verify-ca' || sslMode === 'verify-full') {
|
||||
sslConfig.rejectUnauthorized = true
|
||||
if (env.DATABASE_SSL_CA) {
|
||||
try {
|
||||
const ca = Buffer.from(env.DATABASE_SSL_CA, 'base64').toString('utf-8')
|
||||
sslConfig.ca = ca
|
||||
} catch (error) {
|
||||
console.error('Failed to parse DATABASE_SSL_CA:', error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sslConfig
|
||||
}
|
||||
|
||||
const sslConfig = getSSLConfig()
|
||||
const db = drizzle(
|
||||
postgres(connectionString, {
|
||||
prepare: false,
|
||||
@@ -15,6 +44,7 @@ const db = drizzle(
|
||||
connect_timeout: 20,
|
||||
max: 5,
|
||||
onnotice: () => {},
|
||||
...(sslConfig !== undefined && { ssl: sslConfig }),
|
||||
}),
|
||||
{ schema }
|
||||
)
|
||||
|
||||
@@ -1953,7 +1953,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
isDeployed: !!reverted.isDeployed,
|
||||
...(reverted.deployedAt ? { deployedAt: new Date(reverted.deployedAt) } : {}),
|
||||
deploymentStatuses: reverted.deploymentStatuses || {},
|
||||
hasActiveWebhook: !!reverted.hasActiveWebhook,
|
||||
})
|
||||
|
||||
// Extract and apply subblock values
|
||||
|
||||
@@ -38,7 +38,6 @@ const initialState = {
|
||||
// New field for per-workflow deployment tracking
|
||||
deploymentStatuses: {},
|
||||
needsRedeployment: false,
|
||||
hasActiveWebhook: false,
|
||||
history: {
|
||||
past: [],
|
||||
present: {
|
||||
@@ -475,7 +474,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
isPublished: false,
|
||||
hasActiveWebhook: false,
|
||||
}
|
||||
set(newState)
|
||||
// Note: Socket.IO handles real-time sync automatically
|
||||
@@ -500,7 +498,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
deployedAt: state.deployedAt,
|
||||
deploymentStatuses: state.deploymentStatuses,
|
||||
needsRedeployment: state.needsRedeployment,
|
||||
hasActiveWebhook: state.hasActiveWebhook,
|
||||
}
|
||||
},
|
||||
|
||||
@@ -902,15 +899,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
}))
|
||||
},
|
||||
|
||||
setWebhookStatus: (hasActiveWebhook: boolean) => {
|
||||
// Only update if the status has changed to avoid unnecessary rerenders
|
||||
if (get().hasActiveWebhook !== hasActiveWebhook) {
|
||||
set({ hasActiveWebhook })
|
||||
get().updateLastSaved()
|
||||
// Note: Socket.IO handles real-time sync automatically
|
||||
}
|
||||
},
|
||||
|
||||
revertToDeployedState: async (deployedState: WorkflowState) => {
|
||||
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
|
||||
|
||||
@@ -931,7 +919,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
parallels: deployedState.parallels || {},
|
||||
isDeployed: true,
|
||||
needsRedeployment: false,
|
||||
hasActiveWebhook: false, // Reset webhook status
|
||||
// Keep existing deployment statuses and update for the active workflow if needed
|
||||
deploymentStatuses: {
|
||||
...get().deploymentStatuses,
|
||||
@@ -966,14 +953,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
|
||||
},
|
||||
})
|
||||
|
||||
// Check if there's an active webhook in the deployed state
|
||||
const starterBlock = Object.values(deployedState.blocks).find(
|
||||
(block) => block.type === 'starter'
|
||||
)
|
||||
if (starterBlock && starterBlock.subBlocks?.startWorkflow?.value === 'webhook') {
|
||||
set({ hasActiveWebhook: true })
|
||||
}
|
||||
|
||||
pushHistory(set, get, newState, 'Reverted to deployed state')
|
||||
get().updateLastSaved()
|
||||
|
||||
|
||||
@@ -154,7 +154,6 @@ export interface WorkflowState {
|
||||
// New field for per-workflow deployment status
|
||||
deploymentStatuses?: Record<string, DeploymentStatus>
|
||||
needsRedeployment?: boolean
|
||||
hasActiveWebhook?: boolean
|
||||
// Drag state for undo/redo
|
||||
dragStartPosition?: DragStartPosition | null
|
||||
}
|
||||
@@ -214,7 +213,6 @@ export interface WorkflowActions {
|
||||
generateLoopBlocks: () => Record<string, Loop>
|
||||
generateParallelBlocks: () => Record<string, Parallel>
|
||||
setNeedsRedeploymentFlag: (needsRedeployment: boolean) => void
|
||||
setWebhookStatus: (hasActiveWebhook: boolean) => void
|
||||
revertToDeployedState: (deployedState: WorkflowState) => void
|
||||
toggleBlockAdvancedMode: (id: string) => void
|
||||
toggleBlockTriggerMode: (id: string) => void
|
||||
|
||||
@@ -82,9 +82,6 @@ export const knowledgeCreateDocumentTool: ToolConfig<any, KnowledgeCreateDocumen
|
||||
if (documentName.length > 255) {
|
||||
throw new Error('Document name must be 255 characters or less')
|
||||
}
|
||||
if (/[<>:"/\\|?*]/.test(documentName)) {
|
||||
throw new Error('Document name contains invalid characters. Avoid: < > : " / \\ | ? *')
|
||||
}
|
||||
if (!textContent || textContent.length < 1) {
|
||||
throw new Error('Document content cannot be empty')
|
||||
}
|
||||
|
||||
32
bun.lock
32
bun.lock
@@ -31,9 +31,9 @@
|
||||
"@vercel/analytics": "1.5.0",
|
||||
"@vercel/og": "^0.6.5",
|
||||
"clsx": "^2.1.1",
|
||||
"fumadocs-core": "^15.7.5",
|
||||
"fumadocs-mdx": "^11.5.6",
|
||||
"fumadocs-ui": "^15.7.5",
|
||||
"fumadocs-core": "15.8.2",
|
||||
"fumadocs-mdx": "11.10.1",
|
||||
"fumadocs-ui": "15.8.2",
|
||||
"lucide-react": "^0.511.0",
|
||||
"next": "15.4.1",
|
||||
"next-themes": "^0.4.6",
|
||||
@@ -127,6 +127,8 @@
|
||||
"openai": "^4.91.1",
|
||||
"papaparse": "5.5.3",
|
||||
"pdf-parse": "1.1.1",
|
||||
"posthog-js": "1.268.9",
|
||||
"posthog-node": "5.9.2",
|
||||
"prismjs": "^1.30.0",
|
||||
"react": "19.1.0",
|
||||
"react-colorful": "5.6.1",
|
||||
@@ -812,6 +814,8 @@
|
||||
|
||||
"@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="],
|
||||
|
||||
"@posthog/core": ["@posthog/core@1.2.2", "", {}, "sha512-f16Ozx6LIigRG+HsJdt+7kgSxZTHeX5f1JlCGKI1lXcvlZgfsCR338FuMI2QRYXGl+jg/vYFzGOTQBxl90lnBg=="],
|
||||
|
||||
"@protobufjs/aspromise": ["@protobufjs/aspromise@1.1.2", "", {}, "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="],
|
||||
|
||||
"@protobufjs/base64": ["@protobufjs/base64@1.1.2", "", {}, "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="],
|
||||
@@ -1622,6 +1626,8 @@
|
||||
|
||||
"copy-anything": ["copy-anything@3.0.5", "", { "dependencies": { "is-what": "^4.1.8" } }, "sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w=="],
|
||||
|
||||
"core-js": ["core-js@3.45.1", "", {}, "sha512-L4NPsJlCfZsPeXukyzHFlg/i7IIVwHSItR0wg0FLNqYClJ4MQYTYLbC7EkjKYRLZF2iof2MUgN0EGy7MdQFChg=="],
|
||||
|
||||
"core-util-is": ["core-util-is@1.0.3", "", {}, "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="],
|
||||
|
||||
"cors": ["cors@2.8.5", "", { "dependencies": { "object-assign": "^4", "vary": "^1" } }, "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g=="],
|
||||
@@ -1912,11 +1918,11 @@
|
||||
|
||||
"fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
|
||||
|
||||
"fumadocs-core": ["fumadocs-core@15.8.1", "", { "dependencies": { "@formatjs/intl-localematcher": "^0.6.1", "@orama/orama": "^3.1.14", "@shikijs/rehype": "^3.13.0", "@shikijs/transformers": "^3.13.0", "github-slugger": "^2.0.0", "hast-util-to-estree": "^3.1.3", "hast-util-to-jsx-runtime": "^2.3.6", "image-size": "^2.0.2", "negotiator": "^1.0.0", "npm-to-yarn": "^3.0.1", "react-remove-scroll": "^2.7.1", "remark": "^15.0.0", "remark-gfm": "^4.0.1", "remark-rehype": "^11.1.2", "scroll-into-view-if-needed": "^3.1.0", "shiki": "^3.13.0", "unist-util-visit": "^5.0.0" }, "peerDependencies": { "@mixedbread/sdk": "^0.19.0", "@oramacloud/client": "1.x.x || 2.x.x", "@tanstack/react-router": "1.x.x", "@types/react": "*", "algoliasearch": "5.x.x", "next": "14.x.x || 15.x.x", "react": "18.x.x || 19.x.x", "react-dom": "18.x.x || 19.x.x", "react-router": "7.x.x", "waku": "^0.26.0" }, "optionalPeers": ["@mixedbread/sdk", "@oramacloud/client", "@tanstack/react-router", "@types/react", "algoliasearch", "next", "react", "react-dom", "react-router", "waku"] }, "sha512-3NBM2U3QlnDr4AwfDCLFaNjRGOj52g3geHSnwC9hU2en34xROe7/I8FI1eLkX68ppGnhSQYm/rIuMAPzvepnsg=="],
|
||||
"fumadocs-core": ["fumadocs-core@15.8.2", "", { "dependencies": { "@formatjs/intl-localematcher": "^0.6.1", "@orama/orama": "^3.1.14", "@shikijs/rehype": "^3.13.0", "@shikijs/transformers": "^3.13.0", "github-slugger": "^2.0.0", "hast-util-to-estree": "^3.1.3", "hast-util-to-jsx-runtime": "^2.3.6", "image-size": "^2.0.2", "negotiator": "^1.0.0", "npm-to-yarn": "^3.0.1", "path-to-regexp": "^8.3.0", "react-remove-scroll": "^2.7.1", "remark": "^15.0.0", "remark-gfm": "^4.0.1", "remark-rehype": "^11.1.2", "scroll-into-view-if-needed": "^3.1.0", "shiki": "^3.13.0", "unist-util-visit": "^5.0.0" }, "peerDependencies": { "@mixedbread/sdk": "^0.19.0", "@oramacloud/client": "1.x.x || 2.x.x", "@tanstack/react-router": "1.x.x", "@types/react": "*", "algoliasearch": "5.x.x", "lucide-react": "*", "next": "14.x.x || 15.x.x", "react": "18.x.x || 19.x.x", "react-dom": "18.x.x || 19.x.x", "react-router": "7.x.x", "waku": "^0.26.0" }, "optionalPeers": ["@mixedbread/sdk", "@oramacloud/client", "@tanstack/react-router", "@types/react", "algoliasearch", "lucide-react", "next", "react", "react-dom", "react-router", "waku"] }, "sha512-8x6nwL9gQyfU94VrX3xOIRb3avG9txEJXd0/XdldxHeH6uIAtSGnZ6YV1LksJfEsSIUL+DRxrxRfvL0rbwBP1g=="],
|
||||
|
||||
"fumadocs-mdx": ["fumadocs-mdx@11.10.1", "", { "dependencies": { "@mdx-js/mdx": "^3.1.1", "@standard-schema/spec": "^1.0.0", "chokidar": "^4.0.3", "esbuild": "^0.25.9", "estree-util-value-to-estree": "^3.4.0", "js-yaml": "^4.1.0", "lru-cache": "^11.2.1", "picocolors": "^1.1.1", "remark-mdx": "^3.1.1", "remark-parse": "^11.0.0", "tinyexec": "^1.0.1", "tinyglobby": "^0.2.15", "unified": "^11.0.5", "unist-util-visit": "^5.0.0", "zod": "^4.1.8" }, "peerDependencies": { "@fumadocs/mdx-remote": "^1.4.0", "fumadocs-core": "^14.0.0 || ^15.0.0", "next": "^15.3.0", "react": "*", "vite": "6.x.x || 7.x.x" }, "optionalPeers": ["@fumadocs/mdx-remote", "next", "react", "vite"], "bin": { "fumadocs-mdx": "dist/bin.js" } }, "sha512-WoEzzzoKncXl7PM++GRxEplAb73y3A4ow+QdTYybhVtoYXgJzvTzkLc5OIlNQm72Dv+OxSAx7uk11zTTOX9YMQ=="],
|
||||
|
||||
"fumadocs-ui": ["fumadocs-ui@15.8.1", "", { "dependencies": { "@radix-ui/react-accordion": "^1.2.12", "@radix-ui/react-collapsible": "^1.1.12", "@radix-ui/react-dialog": "^1.1.15", "@radix-ui/react-direction": "^1.1.1", "@radix-ui/react-navigation-menu": "^1.2.14", "@radix-ui/react-popover": "^1.1.15", "@radix-ui/react-presence": "^1.1.5", "@radix-ui/react-scroll-area": "^1.2.10", "@radix-ui/react-slot": "^1.2.3", "@radix-ui/react-tabs": "^1.1.13", "class-variance-authority": "^0.7.1", "fumadocs-core": "15.8.1", "lodash.merge": "^4.6.2", "next-themes": "^0.4.6", "postcss-selector-parser": "^7.1.0", "react-medium-image-zoom": "^5.4.0", "scroll-into-view-if-needed": "^3.1.0", "tailwind-merge": "^3.3.1" }, "peerDependencies": { "@types/react": "*", "next": "14.x.x || 15.x.x", "react": "18.x.x || 19.x.x", "react-dom": "18.x.x || 19.x.x", "tailwindcss": "^3.4.14 || ^4.0.0" }, "optionalPeers": ["@types/react", "next", "tailwindcss"] }, "sha512-GFJoKiDPTPefXtfwr4cheeprtAF+rHILeyovta3cODPsurSRvSfxqbGVN8YRDUh1ja9GpExSQFgUFwp1+7qVtQ=="],
|
||||
"fumadocs-ui": ["fumadocs-ui@15.8.2", "", { "dependencies": { "@radix-ui/react-accordion": "^1.2.12", "@radix-ui/react-collapsible": "^1.1.12", "@radix-ui/react-dialog": "^1.1.15", "@radix-ui/react-direction": "^1.1.1", "@radix-ui/react-navigation-menu": "^1.2.14", "@radix-ui/react-popover": "^1.1.15", "@radix-ui/react-presence": "^1.1.5", "@radix-ui/react-scroll-area": "^1.2.10", "@radix-ui/react-slot": "^1.2.3", "@radix-ui/react-tabs": "^1.1.13", "class-variance-authority": "^0.7.1", "fumadocs-core": "15.8.2", "lodash.merge": "^4.6.2", "next-themes": "^0.4.6", "postcss-selector-parser": "^7.1.0", "react-medium-image-zoom": "^5.4.0", "scroll-into-view-if-needed": "^3.1.0", "tailwind-merge": "^3.3.1" }, "peerDependencies": { "@types/react": "*", "next": "14.x.x || 15.x.x", "react": "18.x.x || 19.x.x", "react-dom": "18.x.x || 19.x.x", "tailwindcss": "^3.4.14 || ^4.0.0" }, "optionalPeers": ["@types/react", "next", "tailwindcss"] }, "sha512-GLMm0a83mVQ2ubQLHbYQ/BcIRKEkA3cMXuVgKK4F/eB6duD6kLPwahoSb0mijwH3UrpJT+zfpKxPlK3JHXXW1A=="],
|
||||
|
||||
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
|
||||
|
||||
@@ -2502,7 +2508,7 @@
|
||||
|
||||
"path-scurry": ["path-scurry@2.0.0", "", { "dependencies": { "lru-cache": "^11.0.0", "minipass": "^7.1.2" } }, "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg=="],
|
||||
|
||||
"path-to-regexp": ["path-to-regexp@0.1.12", "", {}, "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ=="],
|
||||
"path-to-regexp": ["path-to-regexp@8.3.0", "", {}, "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA=="],
|
||||
|
||||
"pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
|
||||
|
||||
@@ -2564,6 +2570,12 @@
|
||||
|
||||
"postgres": ["postgres@3.4.7", "", {}, "sha512-Jtc2612XINuBjIl/QTWsV5UvE8UHuNblcO3vVADSrKsrc6RqGX6lOW1cEo3CM2v0XG4Nat8nI+YM7/f26VxXLw=="],
|
||||
|
||||
"posthog-js": ["posthog-js@1.268.9", "", { "dependencies": { "@posthog/core": "1.2.2", "core-js": "^3.38.1", "fflate": "^0.4.8", "preact": "^10.19.3", "web-vitals": "^4.2.4" }, "peerDependencies": { "@rrweb/types": "2.0.0-alpha.17", "rrweb-snapshot": "2.0.0-alpha.17" }, "optionalPeers": ["@rrweb/types", "rrweb-snapshot"] }, "sha512-ejK5/i0TUQ8I1SzaIn7xWNf5TzOjWquawpgjKit8DyucD3Z1yf7LTMtgCYZN8oRx9VjiPcP34fSk8YsWQmmkTQ=="],
|
||||
|
||||
"posthog-node": ["posthog-node@5.9.2", "", { "dependencies": { "@posthog/core": "1.2.2" } }, "sha512-oU7FbFcH5cn40nhP04cBeT67zE76EiGWjKKzDvm6IOm5P83sqM0Ij0wMJQSHp+QI6ZN7MLzb+4xfMPUEZ4q6CA=="],
|
||||
|
||||
"preact": ["preact@10.27.2", "", {}, "sha512-5SYSgFKSyhCbk6SrXyMpqjb5+MQBgfvEKE/OC+PujcY34sOpqtr+0AZQtPYx5IA6VxynQ7rUPCtKzyovpj9Bpg=="],
|
||||
|
||||
"prettier": ["prettier@3.4.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ=="],
|
||||
|
||||
"prismjs": ["prismjs@1.30.0", "", {}, "sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw=="],
|
||||
@@ -3050,6 +3062,8 @@
|
||||
|
||||
"web-streams-polyfill": ["web-streams-polyfill@4.0.0-beta.3", "", {}, "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug=="],
|
||||
|
||||
"web-vitals": ["web-vitals@4.2.4", "", {}, "sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw=="],
|
||||
|
||||
"webidl-conversions": ["webidl-conversions@7.0.0", "", {}, "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="],
|
||||
|
||||
"whatwg-encoding": ["whatwg-encoding@3.1.1", "", { "dependencies": { "iconv-lite": "0.6.3" } }, "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ=="],
|
||||
@@ -3396,6 +3410,8 @@
|
||||
|
||||
"express/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="],
|
||||
|
||||
"express/path-to-regexp": ["path-to-regexp@0.1.12", "", {}, "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ=="],
|
||||
|
||||
"express/qs": ["qs@6.13.0", "", { "dependencies": { "side-channel": "^1.0.6" } }, "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg=="],
|
||||
|
||||
"fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
|
||||
@@ -3502,6 +3518,8 @@
|
||||
|
||||
"postcss-nested/postcss-selector-parser": ["postcss-selector-parser@6.1.2", "", { "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg=="],
|
||||
|
||||
"posthog-js/fflate": ["fflate@0.4.8", "", {}, "sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA=="],
|
||||
|
||||
"protobufjs/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"raw-body/iconv-lite": ["iconv-lite@0.7.0", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ=="],
|
||||
@@ -3520,8 +3538,6 @@
|
||||
|
||||
"restore-cursor/signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="],
|
||||
|
||||
"router/path-to-regexp": ["path-to-regexp@8.3.0", "", {}, "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA=="],
|
||||
|
||||
"samlify/uuid": ["uuid@8.3.2", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="],
|
||||
|
||||
"send/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="],
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { ConnectionOptions } from 'node:tls'
|
||||
import { drizzle, type PostgresJsDatabase } from 'drizzle-orm/postgres-js'
|
||||
import postgres from 'postgres'
|
||||
import * as schema from './schema'
|
||||
@@ -10,12 +11,52 @@ if (!connectionString) {
|
||||
throw new Error('Missing DATABASE_URL environment variable')
|
||||
}
|
||||
|
||||
const getSSLConfig = () => {
|
||||
const sslMode = process.env.DATABASE_SSL?.toLowerCase()
|
||||
|
||||
if (!sslMode) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (sslMode === 'disable') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (sslMode === 'prefer') {
|
||||
return 'prefer'
|
||||
}
|
||||
|
||||
const sslConfig: ConnectionOptions = {}
|
||||
|
||||
if (sslMode === 'require') {
|
||||
sslConfig.rejectUnauthorized = false
|
||||
} else if (sslMode === 'verify-ca' || sslMode === 'verify-full') {
|
||||
sslConfig.rejectUnauthorized = true
|
||||
if (process.env.DATABASE_SSL_CA) {
|
||||
try {
|
||||
const ca = Buffer.from(process.env.DATABASE_SSL_CA, 'base64').toString('utf-8')
|
||||
sslConfig.ca = ca
|
||||
} catch (error) {
|
||||
console.error('Failed to parse DATABASE_SSL_CA:', error)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new Error(
|
||||
`Invalid DATABASE_SSL mode: ${sslMode}. Must be one of: disable, prefer, require, verify-ca, verify-full`
|
||||
)
|
||||
}
|
||||
|
||||
return sslConfig
|
||||
}
|
||||
|
||||
const sslConfig = getSSLConfig()
|
||||
const postgresClient = postgres(connectionString, {
|
||||
prepare: false,
|
||||
idle_timeout: 20,
|
||||
connect_timeout: 30,
|
||||
max: 80,
|
||||
onnotice: () => {},
|
||||
...(sslConfig !== undefined && { ssl: sslConfig }),
|
||||
})
|
||||
|
||||
const drizzleClient = drizzle(postgresClient, { schema })
|
||||
|
||||
211
packages/db/scripts/deregister-sso-provider.ts
Normal file
211
packages/db/scripts/deregister-sso-provider.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
/**
|
||||
* Deregister SSO Provider Script
|
||||
*
|
||||
* This script removes an SSO provider from the database for a specific user.
|
||||
*
|
||||
* Usage: bun run packages/db/scripts/deregister-sso-provider.ts
|
||||
*
|
||||
* Required Environment Variables:
|
||||
* DATABASE_URL=your-database-url
|
||||
* SSO_USER_EMAIL=user@domain.com (user whose SSO provider to remove)
|
||||
* SSO_PROVIDER_ID=provider-id (optional, if not provided will remove all providers for user)
|
||||
*/
|
||||
|
||||
import type { ConnectionOptions } from 'node:tls'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { drizzle } from 'drizzle-orm/postgres-js'
|
||||
import postgres from 'postgres'
|
||||
import { ssoProvider, user } from '../schema'
|
||||
|
||||
// Simple console logger
|
||||
const logger = {
|
||||
info: (message: string, meta?: any) => {
|
||||
const timestamp = new Date().toISOString()
|
||||
console.log(
|
||||
`[${timestamp}] [INFO] [DeregisterSSODB] ${message}`,
|
||||
meta ? JSON.stringify(meta, null, 2) : ''
|
||||
)
|
||||
},
|
||||
error: (message: string, meta?: any) => {
|
||||
const timestamp = new Date().toISOString()
|
||||
console.error(
|
||||
`[${timestamp}] [ERROR] [DeregisterSSODB] ${message}`,
|
||||
meta ? JSON.stringify(meta, null, 2) : ''
|
||||
)
|
||||
},
|
||||
warn: (message: string, meta?: any) => {
|
||||
const timestamp = new Date().toISOString()
|
||||
console.warn(
|
||||
`[${timestamp}] [WARN] [DeregisterSSODB] ${message}`,
|
||||
meta ? JSON.stringify(meta, null, 2) : ''
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
// Get database URL from environment
|
||||
const CONNECTION_STRING = process.env.DATABASE_URL
|
||||
if (!CONNECTION_STRING) {
|
||||
console.error('❌ DATABASE_URL environment variable is required')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const getSSLConfig = () => {
|
||||
const sslMode = process.env.DATABASE_SSL?.toLowerCase()
|
||||
|
||||
if (!sslMode) return undefined
|
||||
if (sslMode === 'disable') return false
|
||||
if (sslMode === 'prefer') return 'prefer'
|
||||
|
||||
const sslConfig: ConnectionOptions = {}
|
||||
|
||||
if (sslMode === 'require') {
|
||||
sslConfig.rejectUnauthorized = false
|
||||
} else if (sslMode === 'verify-ca' || sslMode === 'verify-full') {
|
||||
sslConfig.rejectUnauthorized = true
|
||||
if (process.env.DATABASE_SSL_CA) {
|
||||
try {
|
||||
const ca = Buffer.from(process.env.DATABASE_SSL_CA, 'base64').toString('utf-8')
|
||||
sslConfig.ca = ca
|
||||
} catch (error) {
|
||||
console.error('Failed to parse DATABASE_SSL_CA:', error)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new Error(
|
||||
`Invalid DATABASE_SSL mode: ${sslMode}. Must be one of: disable, prefer, require, verify-ca, verify-full`
|
||||
)
|
||||
}
|
||||
|
||||
return sslConfig
|
||||
}
|
||||
|
||||
const sslConfig = getSSLConfig()
|
||||
const postgresClient = postgres(CONNECTION_STRING, {
|
||||
prepare: false,
|
||||
idle_timeout: 20,
|
||||
connect_timeout: 30,
|
||||
max: 10,
|
||||
onnotice: () => {},
|
||||
...(sslConfig !== undefined && { ssl: sslConfig }),
|
||||
})
|
||||
const db = drizzle(postgresClient)
|
||||
|
||||
async function getUser(email: string): Promise<{ id: string; email: string } | null> {
|
||||
try {
|
||||
const users = await db.select().from(user).where(eq(user.email, email))
|
||||
if (users.length === 0) {
|
||||
logger.error(`No user found with email: ${email}`)
|
||||
return null
|
||||
}
|
||||
return { id: users[0].id, email: users[0].email }
|
||||
} catch (error) {
|
||||
logger.error('Failed to query user:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async function deregisterSSOProvider(): Promise<boolean> {
|
||||
try {
|
||||
const userEmail = process.env.SSO_USER_EMAIL
|
||||
if (!userEmail) {
|
||||
logger.error('❌ SSO_USER_EMAIL environment variable is required')
|
||||
logger.error('')
|
||||
logger.error('Example usage:')
|
||||
logger.error(
|
||||
' SSO_USER_EMAIL=admin@company.com bun run packages/db/scripts/deregister-sso-provider.ts'
|
||||
)
|
||||
logger.error('')
|
||||
logger.error('Optional: SSO_PROVIDER_ID=provider-id (to remove specific provider)')
|
||||
return false
|
||||
}
|
||||
|
||||
// Get user
|
||||
const targetUser = await getUser(userEmail)
|
||||
if (!targetUser) {
|
||||
return false
|
||||
}
|
||||
|
||||
logger.info(`Found user: ${targetUser.email} (ID: ${targetUser.id})`)
|
||||
|
||||
// Get SSO providers for this user
|
||||
const providers = await db
|
||||
.select()
|
||||
.from(ssoProvider)
|
||||
.where(eq(ssoProvider.userId, targetUser.id))
|
||||
|
||||
if (providers.length === 0) {
|
||||
logger.warn(`No SSO providers found for user: ${targetUser.email}`)
|
||||
return false
|
||||
}
|
||||
|
||||
logger.info(`Found ${providers.length} SSO provider(s) for user ${targetUser.email}`)
|
||||
for (const provider of providers) {
|
||||
logger.info(` - Provider ID: ${provider.providerId}, Domain: ${provider.domain}`)
|
||||
}
|
||||
|
||||
// Check if specific provider ID was requested
|
||||
const specificProviderId = process.env.SSO_PROVIDER_ID
|
||||
|
||||
if (specificProviderId) {
|
||||
// Delete specific provider
|
||||
const providerToDelete = providers.find((p) => p.providerId === specificProviderId)
|
||||
if (!providerToDelete) {
|
||||
logger.error(`Provider '${specificProviderId}' not found for user ${targetUser.email}`)
|
||||
return false
|
||||
}
|
||||
|
||||
await db
|
||||
.delete(ssoProvider)
|
||||
.where(
|
||||
and(eq(ssoProvider.userId, targetUser.id), eq(ssoProvider.providerId, specificProviderId))
|
||||
)
|
||||
|
||||
logger.info(
|
||||
`✅ Successfully deleted SSO provider '${specificProviderId}' for user ${targetUser.email}`
|
||||
)
|
||||
} else {
|
||||
// Delete all providers for this user
|
||||
await db.delete(ssoProvider).where(eq(ssoProvider.userId, targetUser.id))
|
||||
|
||||
logger.info(
|
||||
`✅ Successfully deleted all ${providers.length} SSO provider(s) for user ${targetUser.email}`
|
||||
)
|
||||
}
|
||||
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('❌ Failed to deregister SSO provider:', {
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
return false
|
||||
} finally {
|
||||
try {
|
||||
await postgresClient.end({ timeout: 5 })
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log('🗑️ Deregister SSO Provider Script')
|
||||
console.log('====================================')
|
||||
console.log('This script removes SSO provider records from the database.\n')
|
||||
|
||||
const success = await deregisterSSOProvider()
|
||||
|
||||
if (success) {
|
||||
console.log('\n🎉 SSO provider deregistration completed successfully!')
|
||||
process.exit(0)
|
||||
} else {
|
||||
console.log('\n💥 SSO deregistration failed. Check the logs above for details.')
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// Handle script execution
|
||||
main().catch((error) => {
|
||||
logger.error('Script execution failed:', { error })
|
||||
process.exit(1)
|
||||
})
|
||||
@@ -3,6 +3,7 @@
|
||||
// This script is intentionally self-contained for execution in the migrations image.
|
||||
// Do not import from the main app code; duplicate minimal schema and DB setup here.
|
||||
|
||||
import type { ConnectionOptions } from 'node:tls'
|
||||
import { sql } from 'drizzle-orm'
|
||||
import { drizzle } from 'drizzle-orm/postgres-js'
|
||||
import postgres from 'postgres'
|
||||
@@ -117,12 +118,44 @@ const workflowDeploymentVersion = pgTable(
|
||||
)
|
||||
|
||||
// ---------- DB client ----------
|
||||
const getSSLConfig = () => {
|
||||
const sslMode = process.env.DATABASE_SSL?.toLowerCase()
|
||||
|
||||
if (!sslMode) return undefined
|
||||
if (sslMode === 'disable') return false
|
||||
if (sslMode === 'prefer') return 'prefer'
|
||||
|
||||
const sslConfig: ConnectionOptions = {}
|
||||
|
||||
if (sslMode === 'require') {
|
||||
sslConfig.rejectUnauthorized = false
|
||||
} else if (sslMode === 'verify-ca' || sslMode === 'verify-full') {
|
||||
sslConfig.rejectUnauthorized = true
|
||||
if (process.env.DATABASE_SSL_CA) {
|
||||
try {
|
||||
const ca = Buffer.from(process.env.DATABASE_SSL_CA, 'base64').toString('utf-8')
|
||||
sslConfig.ca = ca
|
||||
} catch (error) {
|
||||
console.error('Failed to parse DATABASE_SSL_CA:', error)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new Error(
|
||||
`Invalid DATABASE_SSL mode: ${sslMode}. Must be one of: disable, prefer, require, verify-ca, verify-full`
|
||||
)
|
||||
}
|
||||
|
||||
return sslConfig
|
||||
}
|
||||
|
||||
const sslConfig = getSSLConfig()
|
||||
const postgresClient = postgres(CONNECTION_STRING, {
|
||||
prepare: false,
|
||||
idle_timeout: 20,
|
||||
connect_timeout: 30,
|
||||
max: 10,
|
||||
onnotice: () => {},
|
||||
...(sslConfig !== undefined && { ssl: sslConfig }),
|
||||
})
|
||||
const db = drizzle(postgresClient)
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@
|
||||
* SSO_SAML_WANT_ASSERTIONS_SIGNED=true (optional, defaults to false)
|
||||
*/
|
||||
|
||||
import type { ConnectionOptions } from 'node:tls'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { drizzle } from 'drizzle-orm/postgres-js'
|
||||
import postgres from 'postgres'
|
||||
@@ -140,13 +141,44 @@ if (!CONNECTION_STRING) {
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Initialize database connection (following migration script pattern)
|
||||
const getSSLConfig = () => {
|
||||
const sslMode = process.env.DATABASE_SSL?.toLowerCase()
|
||||
|
||||
if (!sslMode) return undefined
|
||||
if (sslMode === 'disable') return false
|
||||
if (sslMode === 'prefer') return 'prefer'
|
||||
|
||||
const sslConfig: ConnectionOptions = {}
|
||||
|
||||
if (sslMode === 'require') {
|
||||
sslConfig.rejectUnauthorized = false
|
||||
} else if (sslMode === 'verify-ca' || sslMode === 'verify-full') {
|
||||
sslConfig.rejectUnauthorized = true
|
||||
if (process.env.DATABASE_SSL_CA) {
|
||||
try {
|
||||
const ca = Buffer.from(process.env.DATABASE_SSL_CA, 'base64').toString('utf-8')
|
||||
sslConfig.ca = ca
|
||||
} catch (error) {
|
||||
console.error('Failed to parse DATABASE_SSL_CA:', error)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new Error(
|
||||
`Invalid DATABASE_SSL mode: ${sslMode}. Must be one of: disable, prefer, require, verify-ca, verify-full`
|
||||
)
|
||||
}
|
||||
|
||||
return sslConfig
|
||||
}
|
||||
|
||||
const sslConfig = getSSLConfig()
|
||||
const postgresClient = postgres(CONNECTION_STRING, {
|
||||
prepare: false,
|
||||
idle_timeout: 20,
|
||||
connect_timeout: 30,
|
||||
max: 10,
|
||||
onnotice: () => {},
|
||||
...(sslConfig !== undefined && { ssl: sslConfig }),
|
||||
})
|
||||
const db = drizzle(postgresClient)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user