Compare commits
1 Commits
v0.6.2
...
improvemen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95b17ff820 |
@@ -4,484 +4,14 @@
|
||||
* SEO:
|
||||
* - `<section id="enterprise" aria-labelledby="enterprise-heading">`.
|
||||
* - `<h2 id="enterprise-heading">` for the section title.
|
||||
* - Compliance certs (SOC 2, HIPAA) as visible `<strong>` text.
|
||||
* - Compliance certs (SOC2, HIPAA) as visible `<strong>` text.
|
||||
* - Enterprise CTA links to contact form via `<a>` with `rel="noopener noreferrer"`.
|
||||
*
|
||||
* GEO:
|
||||
* - Entity-rich: "Sim is SOC 2 and HIPAA compliant" — not "We are compliant."
|
||||
* - Entity-rich: "Sim is SOC2 and HIPAA compliant" — not "We are compliant."
|
||||
* - `<ul>` checklist of features (SSO, RBAC, audit logs, SLA, on-premise deployment)
|
||||
* as an atomic answer block for "What enterprise features does Sim offer?".
|
||||
*/
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { AnimatePresence, motion } from 'framer-motion'
|
||||
import Image from 'next/image'
|
||||
import Link from 'next/link'
|
||||
import { Badge, ChevronDown } from '@/components/emcn'
|
||||
import { Lock } from '@/components/emcn/icons'
|
||||
import { GithubIcon } from '@/components/icons'
|
||||
|
||||
/** Consistent color per actor — same pattern as Collaboration section cursors. */
|
||||
const ACTOR_COLORS: Record<string, string> = {
|
||||
'Sarah K.': '#2ABBF8',
|
||||
'Sid G.': '#33C482',
|
||||
'Theo L.': '#FA4EDF',
|
||||
'Abhay K.': '#FFCC02',
|
||||
'Danny S.': '#FF6B35',
|
||||
}
|
||||
|
||||
/** Left accent bar opacity by recency — newest is brightest. */
|
||||
const ACCENT_OPACITIES = [0.75, 0.45, 0.28, 0.15, 0.07] as const
|
||||
|
||||
/** Human-readable label per resource type. */
|
||||
const RESOURCE_TYPE_LABEL: Record<string, string> = {
|
||||
workflow: 'Workflow',
|
||||
member: 'Member',
|
||||
byok_key: 'BYOK Key',
|
||||
api_key: 'API Key',
|
||||
permission_group: 'Permission Group',
|
||||
credential_set: 'Credential Set',
|
||||
knowledge_base: 'Knowledge Base',
|
||||
environment: 'Environment',
|
||||
mcp_server: 'MCP Server',
|
||||
file: 'File',
|
||||
webhook: 'Webhook',
|
||||
chat: 'Chat',
|
||||
table: 'Table',
|
||||
folder: 'Folder',
|
||||
document: 'Document',
|
||||
}
|
||||
|
||||
interface LogEntry {
|
||||
id: number
|
||||
actor: string
|
||||
/** Matches the `description` field stored by recordAudit() */
|
||||
description: string
|
||||
resourceType: string
|
||||
/** Unix ms timestamp of when this entry was "received" */
|
||||
insertedAt: number
|
||||
}
|
||||
|
||||
function formatTimeAgo(insertedAt: number): string {
|
||||
const elapsed = Date.now() - insertedAt
|
||||
if (elapsed < 8_000) return 'just now'
|
||||
if (elapsed < 60_000) return `${Math.floor(elapsed / 1000)}s ago`
|
||||
return `${Math.floor(elapsed / 60_000)}m ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Entry templates using real description strings from the actual recordAudit()
|
||||
* calls across the codebase (e.g. `Added BYOK key for openai`,
|
||||
* `Invited alex@acme.com to workspace as member`).
|
||||
*/
|
||||
const ENTRY_TEMPLATES: Omit<LogEntry, 'id' | 'insertedAt'>[] = [
|
||||
{ actor: 'Sarah K.', description: 'Deployed workflow "Email Triage"', resourceType: 'workflow' },
|
||||
{
|
||||
actor: 'Sid G.',
|
||||
description: 'Invited alex@acme.com to workspace as member',
|
||||
resourceType: 'member',
|
||||
},
|
||||
{ actor: 'Theo L.', description: 'Added BYOK key for openai', resourceType: 'byok_key' },
|
||||
{ actor: 'Sarah K.', description: 'Created workflow "Invoice Parser"', resourceType: 'workflow' },
|
||||
{
|
||||
actor: 'Abhay K.',
|
||||
description: 'Created permission group "Engineering"',
|
||||
resourceType: 'permission_group',
|
||||
},
|
||||
{ actor: 'Danny S.', description: 'Created API key "Production Key"', resourceType: 'api_key' },
|
||||
{
|
||||
actor: 'Theo L.',
|
||||
description: 'Changed permissions for sam@acme.com to editor',
|
||||
resourceType: 'member',
|
||||
},
|
||||
{ actor: 'Sarah K.', description: 'Uploaded file "Q3_Report.pdf"', resourceType: 'file' },
|
||||
{
|
||||
actor: 'Sid G.',
|
||||
description: 'Created credential set "Prod Keys"',
|
||||
resourceType: 'credential_set',
|
||||
},
|
||||
{
|
||||
actor: 'Abhay K.',
|
||||
description: 'Created knowledge base "Internal Docs"',
|
||||
resourceType: 'knowledge_base',
|
||||
},
|
||||
{ actor: 'Danny S.', description: 'Updated environment variables', resourceType: 'environment' },
|
||||
{
|
||||
actor: 'Sarah K.',
|
||||
description: 'Added tool "search_web" to MCP server',
|
||||
resourceType: 'mcp_server',
|
||||
},
|
||||
{ actor: 'Sid G.', description: 'Created webhook "Stripe Payment"', resourceType: 'webhook' },
|
||||
{ actor: 'Theo L.', description: 'Deployed chat "Support Assistant"', resourceType: 'chat' },
|
||||
{ actor: 'Abhay K.', description: 'Created table "Lead Tracker"', resourceType: 'table' },
|
||||
{ actor: 'Danny S.', description: 'Revoked API key "Staging Key"', resourceType: 'api_key' },
|
||||
{
|
||||
actor: 'Sarah K.',
|
||||
description: 'Duplicated workflow "Data Enrichment"',
|
||||
resourceType: 'workflow',
|
||||
},
|
||||
{
|
||||
actor: 'Sid G.',
|
||||
description: 'Removed member theo@acme.com from workspace',
|
||||
resourceType: 'member',
|
||||
},
|
||||
{
|
||||
actor: 'Theo L.',
|
||||
description: 'Updated knowledge base "Product Docs"',
|
||||
resourceType: 'knowledge_base',
|
||||
},
|
||||
{ actor: 'Abhay K.', description: 'Created folder "Finance Workflows"', resourceType: 'folder' },
|
||||
{
|
||||
actor: 'Danny S.',
|
||||
description: 'Uploaded document "onboarding-guide.pdf"',
|
||||
resourceType: 'document',
|
||||
},
|
||||
{
|
||||
actor: 'Sarah K.',
|
||||
description: 'Updated credential set "Prod Keys"',
|
||||
resourceType: 'credential_set',
|
||||
},
|
||||
{
|
||||
actor: 'Sid G.',
|
||||
description: 'Added member abhay@acme.com to permission group "Engineering"',
|
||||
resourceType: 'permission_group',
|
||||
},
|
||||
{ actor: 'Theo L.', description: 'Locked workflow "Customer Sync"', resourceType: 'workflow' },
|
||||
]
|
||||
|
||||
const INITIAL_OFFSETS_MS = [0, 20_000, 75_000, 240_000, 540_000]
|
||||
|
||||
const MARQUEE_KEYFRAMES = `
|
||||
@keyframes marquee {
|
||||
0% { transform: translateX(0); }
|
||||
100% { transform: translateX(-25%); }
|
||||
}
|
||||
@media (prefers-reduced-motion: reduce) {
|
||||
@keyframes marquee { 0%, 100% { transform: none; } }
|
||||
}
|
||||
`
|
||||
|
||||
const FEATURE_TAGS = [
|
||||
'Access Control',
|
||||
'Self-Hosting',
|
||||
'Bring Your Own Key',
|
||||
'Credential Sharing',
|
||||
'Custom Limits',
|
||||
'Admin API',
|
||||
'White Labeling',
|
||||
'Dedicated Support',
|
||||
'99.99% Uptime SLA',
|
||||
'Workflow Versioning',
|
||||
'On-Premise',
|
||||
'Organizations',
|
||||
'Workspace Export',
|
||||
'Audit Logs',
|
||||
] as const
|
||||
|
||||
interface AuditRowProps {
|
||||
entry: LogEntry
|
||||
index: number
|
||||
}
|
||||
|
||||
function AuditRow({ entry, index }: AuditRowProps) {
|
||||
const color = ACTOR_COLORS[entry.actor] ?? '#F6F6F6'
|
||||
const accentOpacity = ACCENT_OPACITIES[index] ?? 0.04
|
||||
const timeAgo = formatTimeAgo(entry.insertedAt)
|
||||
const resourceLabel = RESOURCE_TYPE_LABEL[entry.resourceType]
|
||||
|
||||
return (
|
||||
<div className='group relative overflow-hidden border-[#2A2A2A] border-b bg-[#191919] transition-colors duration-150 last:border-b-0 hover:bg-[#212121]'>
|
||||
{/* Left accent bar — brightness encodes recency */}
|
||||
<div
|
||||
aria-hidden='true'
|
||||
className='absolute top-0 bottom-0 left-0 w-[2px] transition-opacity duration-150 group-hover:opacity-100'
|
||||
style={{ backgroundColor: color, opacity: accentOpacity }}
|
||||
/>
|
||||
|
||||
{/* Row content */}
|
||||
<div className='flex min-w-0 items-center gap-3 py-[10px] pr-4 pl-5'>
|
||||
{/* Actor avatar */}
|
||||
<div
|
||||
className='flex h-[22px] w-[22px] shrink-0 items-center justify-center rounded-full'
|
||||
style={{ backgroundColor: `${color}20` }}
|
||||
>
|
||||
<span className='font-[500] font-season text-[9px] leading-none' style={{ color }}>
|
||||
{entry.actor[0]}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Time */}
|
||||
<span className='w-[56px] shrink-0 font-[430] font-season text-[#F6F6F6]/30 text-[11px] leading-none tracking-[0.02em]'>
|
||||
{timeAgo}
|
||||
</span>
|
||||
|
||||
{/* Description — description hidden on mobile to avoid truncation */}
|
||||
<span className='min-w-0 truncate font-[430] font-season text-[12px] leading-none tracking-[0.02em]'>
|
||||
<span className='text-[#F6F6F6]/80'>{entry.actor}</span>
|
||||
<span className='hidden sm:inline'>
|
||||
<span className='text-[#F6F6F6]/40'> · </span>
|
||||
<span className='text-[#F6F6F6]/55'>{entry.description}</span>
|
||||
</span>
|
||||
</span>
|
||||
|
||||
{/* Resource type label — formatted name, neutral so it doesn't compete with actor colors */}
|
||||
{resourceLabel && (
|
||||
<span className='ml-auto shrink-0 rounded border border-[#2A2A2A] px-[7px] py-[3px] font-[430] font-season text-[#F6F6F6]/25 text-[10px] leading-none tracking-[0.04em]'>
|
||||
{resourceLabel}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
function AuditLogPreview() {
|
||||
const counterRef = useRef(ENTRY_TEMPLATES.length)
|
||||
const templateIndexRef = useRef(5 % ENTRY_TEMPLATES.length)
|
||||
|
||||
const now = Date.now()
|
||||
const [entries, setEntries] = useState<LogEntry[]>(() =>
|
||||
ENTRY_TEMPLATES.slice(0, 5).map((t, i) => ({
|
||||
...t,
|
||||
id: i,
|
||||
insertedAt: now - INITIAL_OFFSETS_MS[i],
|
||||
}))
|
||||
)
|
||||
const [, tick] = useState(0)
|
||||
|
||||
useEffect(() => {
|
||||
const addInterval = setInterval(() => {
|
||||
const template = ENTRY_TEMPLATES[templateIndexRef.current]
|
||||
templateIndexRef.current = (templateIndexRef.current + 1) % ENTRY_TEMPLATES.length
|
||||
|
||||
setEntries((prev) => [
|
||||
{ ...template, id: counterRef.current++, insertedAt: Date.now() },
|
||||
...prev.slice(0, 4),
|
||||
])
|
||||
}, 2600)
|
||||
|
||||
// Refresh time labels every 5s so "just now" ages to "Xs ago"
|
||||
const tickInterval = setInterval(() => tick((n) => n + 1), 5_000)
|
||||
|
||||
return () => {
|
||||
clearInterval(addInterval)
|
||||
clearInterval(tickInterval)
|
||||
}
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<div className='mx-6 mt-6 overflow-hidden rounded-[8px] border border-[#2A2A2A] md:mx-8 md:mt-8'>
|
||||
{/* Header */}
|
||||
<div className='flex items-center justify-between border-[#2A2A2A] border-b bg-[#161616] px-4 py-[10px]'>
|
||||
<div className='flex items-center gap-2'>
|
||||
{/* Pulsing live indicator */}
|
||||
<span className='relative flex h-[8px] w-[8px]'>
|
||||
<span
|
||||
className='absolute inline-flex h-full w-full animate-ping rounded-full opacity-50'
|
||||
style={{ backgroundColor: '#33C482' }}
|
||||
/>
|
||||
<span
|
||||
className='relative inline-flex h-[8px] w-[8px] rounded-full'
|
||||
style={{ backgroundColor: '#33C482' }}
|
||||
/>
|
||||
</span>
|
||||
<span className='font-[430] font-season text-[#F6F6F6]/40 text-[11px] uppercase tracking-[0.08em]'>
|
||||
Audit Log
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex items-center gap-2'>
|
||||
<span className='rounded border border-[#2A2A2A] px-[8px] py-[3px] font-[430] font-season text-[#F6F6F6]/20 text-[11px] tracking-[0.02em]'>
|
||||
Export
|
||||
</span>
|
||||
<span className='rounded border border-[#2A2A2A] px-[8px] py-[3px] font-[430] font-season text-[#F6F6F6]/20 text-[11px] tracking-[0.02em]'>
|
||||
Filter
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Log entries — new items push existing ones down */}
|
||||
<div className='overflow-hidden'>
|
||||
<AnimatePresence mode='popLayout' initial={false}>
|
||||
{entries.map((entry, index) => (
|
||||
<motion.div
|
||||
key={entry.id}
|
||||
layout
|
||||
initial={{ y: -48, opacity: 0 }}
|
||||
animate={{ y: 0, opacity: 1 }}
|
||||
exit={{ opacity: 0 }}
|
||||
transition={{
|
||||
layout: {
|
||||
type: 'spring',
|
||||
stiffness: 380,
|
||||
damping: 38,
|
||||
mass: 0.8,
|
||||
},
|
||||
y: { duration: 0.32, ease: [0.25, 0.46, 0.45, 0.94] },
|
||||
opacity: { duration: 0.25 },
|
||||
}}
|
||||
>
|
||||
<AuditRow entry={entry} index={index} />
|
||||
</motion.div>
|
||||
))}
|
||||
</AnimatePresence>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
function TrustStrip() {
|
||||
return (
|
||||
<div className='mx-6 mt-4 grid grid-cols-1 overflow-hidden rounded-[8px] border border-[#2A2A2A] sm:grid-cols-3 md:mx-8'>
|
||||
{/* SOC 2 + HIPAA combined */}
|
||||
<Link
|
||||
href='https://trust.delve.co/sim-studio'
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
className='group flex items-center gap-3 border-[#2A2A2A] border-b px-4 py-[14px] transition-colors hover:bg-[#212121] sm:border-r sm:border-b-0'
|
||||
>
|
||||
<Image
|
||||
src='/footer/soc2.png'
|
||||
alt='SOC 2 Type II'
|
||||
width={22}
|
||||
height={22}
|
||||
className='shrink-0 object-contain'
|
||||
/>
|
||||
<div className='flex flex-col gap-[3px]'>
|
||||
<strong className='font-[430] font-season text-[13px] text-white leading-none'>
|
||||
SOC 2 & HIPAA
|
||||
</strong>
|
||||
<span className='font-[430] font-season text-[#F6F6F6]/30 text-[11px] leading-none tracking-[0.02em] transition-colors group-hover:text-[#F6F6F6]/55'>
|
||||
Type II · PHI protected →
|
||||
</span>
|
||||
</div>
|
||||
</Link>
|
||||
|
||||
{/* Open Source — center */}
|
||||
<Link
|
||||
href='https://github.com/simstudioai/sim'
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
className='group flex items-center gap-3 border-[#2A2A2A] border-b px-4 py-[14px] transition-colors hover:bg-[#212121] sm:border-r sm:border-b-0'
|
||||
>
|
||||
<div className='flex h-[22px] w-[22px] shrink-0 items-center justify-center rounded-full bg-[#FFCC02]/10'>
|
||||
<GithubIcon width={11} height={11} className='text-[#FFCC02]/75' />
|
||||
</div>
|
||||
<div className='flex flex-col gap-[3px]'>
|
||||
<strong className='font-[430] font-season text-[13px] text-white leading-none'>
|
||||
Open Source
|
||||
</strong>
|
||||
<span className='font-[430] font-season text-[#F6F6F6]/30 text-[11px] leading-none tracking-[0.02em] transition-colors group-hover:text-[#F6F6F6]/55'>
|
||||
View on GitHub →
|
||||
</span>
|
||||
</div>
|
||||
</Link>
|
||||
|
||||
{/* SSO */}
|
||||
<div className='flex items-center gap-3 px-4 py-[14px]'>
|
||||
<div className='flex h-[22px] w-[22px] shrink-0 items-center justify-center rounded-full bg-[#2ABBF8]/10'>
|
||||
<Lock className='h-[14px] w-[14px] text-[#2ABBF8]/75' />
|
||||
</div>
|
||||
<div className='flex flex-col gap-[3px]'>
|
||||
<strong className='font-[430] font-season text-[13px] text-white leading-none'>
|
||||
SSO & SCIM
|
||||
</strong>
|
||||
<span className='font-[430] font-season text-[#F6F6F6]/30 text-[11px] leading-none tracking-[0.02em]'>
|
||||
Okta, Azure AD, Google
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default function Enterprise() {
|
||||
return (
|
||||
<section id='enterprise' aria-labelledby='enterprise-heading' className='bg-[#F6F6F6]'>
|
||||
<div className='px-4 pt-[60px] pb-[40px] sm:px-8 sm:pt-[80px] sm:pb-0 md:px-[80px] md:pt-[100px]'>
|
||||
<div className='flex flex-col items-start gap-3 sm:gap-4 md:gap-[20px]'>
|
||||
<Badge
|
||||
variant='blue'
|
||||
size='md'
|
||||
dot
|
||||
className='bg-[#FFCC02]/10 font-season text-[#FFCC02] uppercase tracking-[0.02em]'
|
||||
>
|
||||
Enterprise
|
||||
</Badge>
|
||||
|
||||
<h2
|
||||
id='enterprise-heading'
|
||||
className='max-w-[600px] font-[430] font-season text-[#1C1C1C] text-[32px] leading-[100%] tracking-[-0.02em] sm:text-[36px] md:text-[40px]'
|
||||
>
|
||||
Enterprise features for
|
||||
<br />
|
||||
fast, scalable workflows
|
||||
</h2>
|
||||
</div>
|
||||
|
||||
<div className='mt-8 overflow-hidden rounded-[12px] bg-[#1C1C1C] sm:mt-10 md:mt-12'>
|
||||
<AuditLogPreview />
|
||||
<TrustStrip />
|
||||
|
||||
{/* Scrolling feature ticker */}
|
||||
<div className='relative mt-6 overflow-hidden border-[#2A2A2A] border-t'>
|
||||
<style dangerouslySetInnerHTML={{ __html: MARQUEE_KEYFRAMES }} />
|
||||
{/* Fade edges */}
|
||||
<div
|
||||
aria-hidden='true'
|
||||
className='pointer-events-none absolute top-0 bottom-0 left-0 z-10 w-16'
|
||||
style={{ background: 'linear-gradient(to right, #1C1C1C, transparent)' }}
|
||||
/>
|
||||
<div
|
||||
aria-hidden='true'
|
||||
className='pointer-events-none absolute top-0 right-0 bottom-0 z-10 w-16'
|
||||
style={{ background: 'linear-gradient(to left, #1C1C1C, transparent)' }}
|
||||
/>
|
||||
{/* Duplicate tags for seamless loop */}
|
||||
<div className='flex w-max' style={{ animation: 'marquee 30s linear infinite' }}>
|
||||
{[...FEATURE_TAGS, ...FEATURE_TAGS, ...FEATURE_TAGS, ...FEATURE_TAGS].map(
|
||||
(tag, i) => (
|
||||
<span
|
||||
key={i}
|
||||
className='whitespace-nowrap border-[#2A2A2A] border-r px-5 py-4 font-[430] font-season text-[#F6F6F6]/40 text-[13px] leading-none tracking-[0.02em]'
|
||||
>
|
||||
{tag}
|
||||
</span>
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex items-center justify-between border-[#2A2A2A] border-t px-6 py-5 md:px-8 md:py-6'>
|
||||
<p className='font-[430] font-season text-[#F6F6F6]/40 text-[15px] leading-[150%] tracking-[0.02em]'>
|
||||
Ready for growth?
|
||||
</p>
|
||||
<Link
|
||||
href='/contact'
|
||||
className='group/cta inline-flex h-[32px] items-center gap-[6px] rounded-[5px] border border-white bg-white px-[10px] font-[430] font-season text-[14px] text-black transition-colors hover:border-[#E0E0E0] hover:bg-[#E0E0E0]'
|
||||
>
|
||||
Book a demo
|
||||
<span className='relative h-[10px] w-[10px] shrink-0'>
|
||||
<ChevronDown className='-rotate-90 absolute inset-0 h-[10px] w-[10px] transition-opacity duration-150 group-hover/cta:opacity-0' />
|
||||
<svg
|
||||
className='absolute inset-0 h-[10px] w-[10px] opacity-0 transition-opacity duration-150 group-hover/cta:opacity-100'
|
||||
viewBox='0 0 10 10'
|
||||
fill='none'
|
||||
>
|
||||
<path
|
||||
d='M1 5H8M5.5 2L8.5 5L5.5 8'
|
||||
stroke='currentColor'
|
||||
strokeWidth='1.33'
|
||||
strokeLinecap='square'
|
||||
strokeLinejoin='miter'
|
||||
fill='none'
|
||||
/>
|
||||
</svg>
|
||||
</span>
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
)
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
import { type SVGProps, useEffect, useRef, useState } from 'react'
|
||||
import { AnimatePresence, motion, useInView } from 'framer-motion'
|
||||
import ReactMarkdown, { type Components } from 'react-markdown'
|
||||
import remarkGfm from 'remark-gfm'
|
||||
import { ChevronDown } from '@/components/emcn'
|
||||
import { Database, File, Library, Table } from '@/components/emcn/icons'
|
||||
import {
|
||||
@@ -18,7 +16,6 @@ import {
|
||||
xAIIcon,
|
||||
} from '@/components/icons'
|
||||
import { CsvIcon, JsonIcon, MarkdownIcon, PdfIcon } from '@/components/icons/document-icons'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
|
||||
interface FeaturesPreviewProps {
|
||||
activeTab: number
|
||||
@@ -127,7 +124,7 @@ const EXPAND_TARGETS: Record<number, { row: number; col: number }> = {
|
||||
}
|
||||
|
||||
const EXPAND_ROW_COUNTS: Record<number, number> = {
|
||||
1: 8,
|
||||
1: 10,
|
||||
2: 10,
|
||||
3: 10,
|
||||
4: 7,
|
||||
@@ -606,28 +603,7 @@ const MOCK_KB_DATA = [
|
||||
['metrics.csv', '1.4 MB', '5.8k', '38', 'enabled'],
|
||||
] as const
|
||||
|
||||
const MD_COMPONENTS: Components = {
|
||||
h1: ({ children }) => (
|
||||
<h1 className='mb-4 border-[#E5E5E5] border-b pb-2 font-semibold text-[#1C1C1C] text-[20px]'>
|
||||
{children}
|
||||
</h1>
|
||||
),
|
||||
h2: ({ children }) => (
|
||||
<h2 className='mt-5 mb-3 border-[#E5E5E5] border-b pb-1.5 font-semibold text-[#1C1C1C] text-[16px]'>
|
||||
{children}
|
||||
</h2>
|
||||
),
|
||||
ul: ({ children }) => <ul className='mb-3 list-disc pl-[24px]'>{children}</ul>,
|
||||
ol: ({ children }) => <ol className='mb-3 list-decimal pl-[24px]'>{children}</ol>,
|
||||
li: ({ children }) => (
|
||||
<li className='mb-1 text-[#1C1C1C] text-[14px] leading-[1.6]'>{children}</li>
|
||||
),
|
||||
p: ({ children }) => <p className='mb-3 text-[#1C1C1C] text-[14px] leading-[1.6]'>{children}</p>,
|
||||
}
|
||||
|
||||
function MockFullFiles() {
|
||||
const [source, setSource] = useState(MOCK_MD_SOURCE)
|
||||
|
||||
return (
|
||||
<div className='flex h-full flex-col'>
|
||||
<div className='flex h-[44px] shrink-0 items-center border-[#E5E5E5] border-b px-[24px]'>
|
||||
@@ -646,13 +622,9 @@ function MockFullFiles() {
|
||||
animate={{ opacity: 1 }}
|
||||
transition={{ duration: 0.4, delay: 0.3 }}
|
||||
>
|
||||
<textarea
|
||||
value={source}
|
||||
onChange={(e) => setSource(e.target.value)}
|
||||
spellCheck={false}
|
||||
autoCorrect='off'
|
||||
className='h-full w-full resize-none overflow-auto whitespace-pre-wrap bg-transparent p-[24px] font-[300] font-mono text-[#1C1C1C] text-[12px] leading-[1.7] outline-none'
|
||||
/>
|
||||
<pre className='h-full overflow-auto whitespace-pre-wrap p-[24px] font-[300] font-mono text-[#1C1C1C] text-[12px] leading-[1.7]'>
|
||||
{MOCK_MD_SOURCE}
|
||||
</pre>
|
||||
</motion.div>
|
||||
|
||||
<div className='h-full w-px shrink-0 bg-[#E5E5E5]' />
|
||||
@@ -664,9 +636,47 @@ function MockFullFiles() {
|
||||
transition={{ duration: 0.4, delay: 0.5 }}
|
||||
>
|
||||
<div className='h-full overflow-auto p-[24px]'>
|
||||
<ReactMarkdown remarkPlugins={[remarkGfm]} components={MD_COMPONENTS}>
|
||||
{source}
|
||||
</ReactMarkdown>
|
||||
<h1 className='mb-4 border-[#E5E5E5] border-b pb-2 font-semibold text-[#1C1C1C] text-[20px]'>
|
||||
Meeting Notes
|
||||
</h1>
|
||||
<h2 className='mt-5 mb-3 border-[#E5E5E5] border-b pb-1.5 font-semibold text-[#1C1C1C] text-[16px]'>
|
||||
Action Items
|
||||
</h2>
|
||||
<ul className='mb-3 list-disc pl-[24px]'>
|
||||
<li className='mb-1 text-[#1C1C1C] text-[14px] leading-[1.6]'>
|
||||
Review Q1 metrics with Sarah
|
||||
</li>
|
||||
<li className='mb-1 text-[#1C1C1C] text-[14px] leading-[1.6]'>
|
||||
Update API documentation
|
||||
</li>
|
||||
<li className='mb-1 text-[#1C1C1C] text-[14px] leading-[1.6]'>
|
||||
Schedule design review for v2.0
|
||||
</li>
|
||||
</ul>
|
||||
<h2 className='mt-5 mb-3 border-[#E5E5E5] border-b pb-1.5 font-semibold text-[#1C1C1C] text-[16px]'>
|
||||
Discussion Points
|
||||
</h2>
|
||||
<p className='mb-3 text-[#1C1C1C] text-[14px] leading-[1.6]'>
|
||||
The team agreed to prioritize the new onboarding flow. Key decisions:
|
||||
</p>
|
||||
<ol className='mb-3 list-decimal pl-[24px]'>
|
||||
<li className='mb-1 text-[#1C1C1C] text-[14px] leading-[1.6]'>
|
||||
Migrate to the new auth provider by end of March
|
||||
</li>
|
||||
<li className='mb-1 text-[#1C1C1C] text-[14px] leading-[1.6]'>
|
||||
Ship the dashboard redesign in two phases
|
||||
</li>
|
||||
<li className='mb-1 text-[#1C1C1C] text-[14px] leading-[1.6]'>
|
||||
Add automated testing for all critical paths
|
||||
</li>
|
||||
</ol>
|
||||
<h2 className='mt-5 mb-3 border-[#E5E5E5] border-b pb-1.5 font-semibold text-[#1C1C1C] text-[16px]'>
|
||||
Next Steps
|
||||
</h2>
|
||||
<p className='mb-3 text-[#1C1C1C] text-[14px] leading-[1.6]'>
|
||||
Follow up with engineering on the timeline for the API v2 migration. Draft the
|
||||
proposal for the board meeting next week.
|
||||
</p>
|
||||
</div>
|
||||
</motion.div>
|
||||
</div>
|
||||
@@ -799,79 +809,8 @@ const LOG_STATUS_STYLES: Record<string, { bg: string; text: string; label: strin
|
||||
error: { bg: '#FEE2E2', text: '#991B1B', label: 'Error' },
|
||||
}
|
||||
|
||||
interface MockLogDetail {
|
||||
output: string
|
||||
spans: { name: string; ms: number; depth: number }[]
|
||||
}
|
||||
|
||||
const MOCK_LOG_DETAILS: MockLogDetail[] = [
|
||||
{
|
||||
output: '{\n "result": "processed",\n "emails": 3,\n "status": "complete"\n}',
|
||||
spans: [
|
||||
{ name: 'Agent Block', ms: 800, depth: 0 },
|
||||
{ name: 'search_web', ms: 210, depth: 1 },
|
||||
{ name: 'Function Block', ms: 180, depth: 0 },
|
||||
],
|
||||
},
|
||||
{
|
||||
output: '{\n "score": 87,\n "label": "high",\n "confidence": 0.94\n}',
|
||||
spans: [
|
||||
{ name: 'Agent Block', ms: 2100, depth: 0 },
|
||||
{ name: 'hubspot_get_contact', ms: 340, depth: 1 },
|
||||
{ name: 'Function Block', ms: 180, depth: 0 },
|
||||
{ name: 'Condition', ms: 50, depth: 0 },
|
||||
],
|
||||
},
|
||||
{
|
||||
output: '{\n "error": "timeout",\n "message": "LLM request exceeded limit"\n}',
|
||||
spans: [
|
||||
{ name: 'Agent Block', ms: 650, depth: 0 },
|
||||
{ name: 'search_kb', ms: 120, depth: 1 },
|
||||
],
|
||||
},
|
||||
{
|
||||
output: '{\n "user": "james@globex.io",\n "steps_completed": 4,\n "status": "sent"\n}',
|
||||
spans: [
|
||||
{ name: 'Agent Block', ms: 980, depth: 0 },
|
||||
{ name: 'send_email', ms: 290, depth: 1 },
|
||||
{ name: 'Function Block', ms: 210, depth: 0 },
|
||||
{ name: 'Agent Block', ms: 420, depth: 0 },
|
||||
],
|
||||
},
|
||||
{
|
||||
output: '{\n "records_processed": 142,\n "inserted": 138,\n "errors": 4\n}',
|
||||
spans: [
|
||||
{ name: 'Agent Block', ms: 1800, depth: 0 },
|
||||
{ name: 'salesforce_query', ms: 820, depth: 1 },
|
||||
{ name: 'Function Block', ms: 340, depth: 0 },
|
||||
{ name: 'Agent Block', ms: 1200, depth: 0 },
|
||||
{ name: 'insert_rows', ms: 610, depth: 1 },
|
||||
],
|
||||
},
|
||||
{
|
||||
output: '{\n "result": "processed",\n "emails": 1,\n "status": "complete"\n}',
|
||||
spans: [
|
||||
{ name: 'Agent Block', ms: 720, depth: 0 },
|
||||
{ name: 'gmail_read', ms: 190, depth: 1 },
|
||||
{ name: 'Function Block', ms: 160, depth: 0 },
|
||||
],
|
||||
},
|
||||
{
|
||||
output: '{\n "ticket_id": "TKT-4291",\n "priority": "medium",\n "assigned": "support"\n}',
|
||||
spans: [
|
||||
{ name: 'Agent Block', ms: 1400, depth: 0 },
|
||||
{ name: 'classify_intent', ms: 380, depth: 1 },
|
||||
{ name: 'Function Block', ms: 220, depth: 0 },
|
||||
{ name: 'Agent Block', ms: 780, depth: 0 },
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
const MOCK_LOG_DETAIL_MAX_MS = MOCK_LOG_DETAILS.map((d) => Math.max(...d.spans.map((s) => s.ms)))
|
||||
|
||||
function MockFullLogs({ revealedRows }: { revealedRows: number }) {
|
||||
const [showSidebar, setShowSidebar] = useState(false)
|
||||
const [selectedRow, setSelectedRow] = useState(0)
|
||||
|
||||
useEffect(() => {
|
||||
if (revealedRows < MOCK_LOG_DATA.length) return
|
||||
@@ -879,6 +818,8 @@ function MockFullLogs({ revealedRows }: { revealedRows: number }) {
|
||||
return () => clearTimeout(timer)
|
||||
}, [revealedRows])
|
||||
|
||||
const selectedRow = 0
|
||||
|
||||
return (
|
||||
<div className='relative flex h-full'>
|
||||
<div className='flex min-w-0 flex-1 flex-col'>
|
||||
@@ -915,11 +856,7 @@ function MockFullLogs({ revealedRows }: { revealedRows: number }) {
|
||||
initial={{ opacity: 0, y: 4 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
transition={{ duration: 0.2, ease: 'easeOut' }}
|
||||
className={cn(
|
||||
'cursor-pointer',
|
||||
isSelected ? 'bg-[#F5F5F5]' : 'hover:bg-[#FAFAFA]'
|
||||
)}
|
||||
onClick={() => setSelectedRow(i)}
|
||||
className={isSelected ? 'bg-[#F5F5F5]' : 'hover:bg-[#FAFAFA]'}
|
||||
>
|
||||
<td className='px-[24px] py-[10px] align-middle'>
|
||||
<span className='flex items-center gap-[12px] font-medium text-[#1C1C1C] text-[14px]'>
|
||||
@@ -971,59 +908,24 @@ function MockFullLogs({ revealedRows }: { revealedRows: number }) {
|
||||
transition={{ duration: 0.25, ease: [0.4, 0, 0.2, 1] }}
|
||||
style={{ width: '45%' }}
|
||||
>
|
||||
<MockLogDetailsSidebar
|
||||
selectedRow={selectedRow}
|
||||
onPrev={() => setSelectedRow((r) => Math.max(0, r - 1))}
|
||||
onNext={() => setSelectedRow((r) => Math.min(MOCK_LOG_DATA.length - 1, r + 1))}
|
||||
/>
|
||||
<MockLogDetailsSidebar />
|
||||
</motion.div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface MockLogDetailsSidebarProps {
|
||||
selectedRow: number
|
||||
onPrev: () => void
|
||||
onNext: () => void
|
||||
}
|
||||
|
||||
function MockLogDetailsSidebar({ selectedRow, onPrev, onNext }: MockLogDetailsSidebarProps) {
|
||||
const row = MOCK_LOG_DATA[selectedRow]
|
||||
const detail = MOCK_LOG_DETAILS[selectedRow]
|
||||
const statusStyle = LOG_STATUS_STYLES[row[2]] ?? LOG_STATUS_STYLES.success
|
||||
const [date, time] = row[1].split(', ')
|
||||
const color = MOCK_LOG_COLORS[selectedRow]
|
||||
const maxMs = MOCK_LOG_DETAIL_MAX_MS[selectedRow]
|
||||
const isPrevDisabled = selectedRow === 0
|
||||
const isNextDisabled = selectedRow === MOCK_LOG_DATA.length - 1
|
||||
|
||||
function MockLogDetailsSidebar() {
|
||||
return (
|
||||
<div className='flex h-full flex-col overflow-y-auto px-[14px] pt-[12px]'>
|
||||
<div className='flex h-full flex-col px-[14px] pt-[12px]'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='font-medium text-[#1C1C1C] text-[14px]'>Log Details</span>
|
||||
<div className='flex items-center gap-[1px]'>
|
||||
<button
|
||||
type='button'
|
||||
onClick={onPrev}
|
||||
disabled={isPrevDisabled}
|
||||
className={cn(
|
||||
'flex h-[24px] w-[24px] items-center justify-center rounded-[4px] text-[#999]',
|
||||
isPrevDisabled ? 'cursor-not-allowed opacity-40' : 'hover:bg-[#F5F5F5]'
|
||||
)}
|
||||
>
|
||||
<div className='flex h-[24px] w-[24px] items-center justify-center rounded-[4px] text-[#999] hover:bg-[#F5F5F5]'>
|
||||
<ChevronDown className='h-[14px] w-[14px] rotate-180' />
|
||||
</button>
|
||||
<button
|
||||
type='button'
|
||||
onClick={onNext}
|
||||
disabled={isNextDisabled}
|
||||
className={cn(
|
||||
'flex h-[24px] w-[24px] items-center justify-center rounded-[4px] text-[#999]',
|
||||
isNextDisabled ? 'cursor-not-allowed opacity-40' : 'hover:bg-[#F5F5F5]'
|
||||
)}
|
||||
>
|
||||
</div>
|
||||
<div className='flex h-[24px] w-[24px] items-center justify-center rounded-[4px] text-[#999] hover:bg-[#F5F5F5]'>
|
||||
<ChevronDown className='h-[14px] w-[14px]' />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1032,8 +934,8 @@ function MockLogDetailsSidebar({ selectedRow, onPrev, onNext }: MockLogDetailsSi
|
||||
<div className='flex w-[120px] shrink-0 flex-col gap-[8px]'>
|
||||
<span className='font-medium text-[#999] text-[12px]'>Timestamp</span>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<span className='font-medium text-[#666] text-[13px]'>{date}</span>
|
||||
<span className='font-medium text-[#666] text-[13px]'>{time}</span>
|
||||
<span className='font-medium text-[#666] text-[13px]'>Mar 17</span>
|
||||
<span className='font-medium text-[#666] text-[13px]'>2:14 PM</span>
|
||||
</div>
|
||||
</div>
|
||||
<div className='flex min-w-0 flex-1 flex-col gap-[8px]'>
|
||||
@@ -1042,12 +944,12 @@ function MockLogDetailsSidebar({ selectedRow, onPrev, onNext }: MockLogDetailsSi
|
||||
<div
|
||||
className='h-[10px] w-[10px] shrink-0 rounded-[3px] border-[1.5px]'
|
||||
style={{
|
||||
backgroundColor: color,
|
||||
borderColor: `${color}60`,
|
||||
backgroundColor: '#7C3AED',
|
||||
borderColor: '#7C3AED60',
|
||||
backgroundClip: 'padding-box',
|
||||
}}
|
||||
/>
|
||||
<span className='truncate font-medium text-[#666] text-[13px]'>{row[0]}</span>
|
||||
<span className='truncate font-medium text-[#666] text-[13px]'>Email Bot</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -1055,52 +957,26 @@ function MockLogDetailsSidebar({ selectedRow, onPrev, onNext }: MockLogDetailsSi
|
||||
<div className='flex flex-col'>
|
||||
<div className='flex h-[42px] items-center justify-between border-[#E5E5E5] border-b px-[8px]'>
|
||||
<span className='font-medium text-[#999] text-[12px]'>Level</span>
|
||||
<span
|
||||
className='inline-flex items-center rounded-full px-[8px] py-[2px] font-medium text-[11px]'
|
||||
style={{ backgroundColor: statusStyle.bg, color: statusStyle.text }}
|
||||
>
|
||||
{statusStyle.label}
|
||||
<span className='inline-flex items-center rounded-full bg-[#DCFCE7] px-[8px] py-[2px] font-medium text-[#166534] text-[11px]'>
|
||||
Success
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex h-[42px] items-center justify-between border-[#E5E5E5] border-b px-[8px]'>
|
||||
<span className='font-medium text-[#999] text-[12px]'>Trigger</span>
|
||||
<span className='rounded-[4px] bg-[#F5F5F5] px-[6px] py-[2px] text-[#666] text-[11px]'>
|
||||
{row[4]}
|
||||
API
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex h-[42px] items-center justify-between px-[8px]'>
|
||||
<span className='font-medium text-[#999] text-[12px]'>Duration</span>
|
||||
<span className='font-medium text-[#666] text-[13px]'>{row[5]}</span>
|
||||
<span className='font-medium text-[#666] text-[13px]'>1.2s</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[6px] rounded-[6px] border border-[#E5E5E5] bg-[#FAFAFA] px-[10px] py-[8px]'>
|
||||
<span className='font-medium text-[#999] text-[12px]'>Workflow Output</span>
|
||||
<div className='rounded-[6px] bg-[#F0F0F0] p-[10px] font-mono text-[#555] text-[11px] leading-[1.5]'>
|
||||
{detail.output}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[6px] rounded-[6px] border border-[#E5E5E5] bg-[#FAFAFA] px-[10px] py-[8px]'>
|
||||
<span className='font-medium text-[#999] text-[12px]'>Trace Spans</span>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
{detail.spans.map((span, i) => (
|
||||
<div
|
||||
key={i}
|
||||
className={cn('flex flex-col gap-[3px]', span.depth === 1 && 'ml-[12px]')}
|
||||
>
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='font-mono text-[#555] text-[11px]'>{span.name}</span>
|
||||
<span className='font-medium text-[#999] text-[11px]'>{span.ms}ms</span>
|
||||
</div>
|
||||
<div className='h-[4px] w-full overflow-hidden rounded-full bg-[#F0F0F0]'>
|
||||
<div
|
||||
className='h-full rounded-full bg-[#2F6FED]'
|
||||
style={{ width: `${(span.ms / maxMs) * 100}%` }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
{'{\n "result": "processed",\n "emails": 3,\n "status": "complete"\n}'}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -1109,8 +985,6 @@ function MockLogDetailsSidebar({ selectedRow, onPrev, onNext }: MockLogDetailsSi
|
||||
}
|
||||
|
||||
function MockFullTable({ revealedRows }: { revealedRows: number }) {
|
||||
const [selectedRow, setSelectedRow] = useState<number | null>(null)
|
||||
|
||||
return (
|
||||
<div className='flex h-full flex-col'>
|
||||
<div className='flex h-[44px] shrink-0 items-center border-[#E5E5E5] border-b px-[24px]'>
|
||||
@@ -1163,48 +1037,26 @@ function MockFullTable({ revealedRows }: { revealedRows: number }) {
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{MOCK_TABLE_DATA.slice(0, revealedRows).map((row, i) => {
|
||||
const isSelected = selectedRow === i
|
||||
return (
|
||||
<motion.tr
|
||||
key={i}
|
||||
initial={{ opacity: 0, y: 4 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
transition={{ duration: 0.2, ease: 'easeOut' }}
|
||||
className='cursor-pointer'
|
||||
onClick={() => setSelectedRow(i)}
|
||||
>
|
||||
{MOCK_TABLE_DATA.slice(0, revealedRows).map((row, i) => (
|
||||
<motion.tr
|
||||
key={i}
|
||||
initial={{ opacity: 0, y: 4 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
transition={{ duration: 0.2, ease: 'easeOut' }}
|
||||
>
|
||||
<td className='border-[#E5E5E5] border-r border-b px-[4px] py-[7px] text-center align-middle'>
|
||||
<span className='text-[#999] text-[11px] tabular-nums'>{i + 1}</span>
|
||||
</td>
|
||||
{row.map((cell, j) => (
|
||||
<td
|
||||
className={cn(
|
||||
'border-[#E5E5E5] border-r border-b px-[4px] py-[7px] text-center align-middle',
|
||||
isSelected ? 'bg-[rgba(37,99,235,0.06)]' : 'hover:bg-[#FAFAFA]'
|
||||
)}
|
||||
key={j}
|
||||
className='border-[#E5E5E5] border-r border-b px-[8px] py-[7px] align-middle'
|
||||
>
|
||||
<span className='text-[#999] text-[11px] tabular-nums'>{i + 1}</span>
|
||||
<span className='block truncate text-[#1C1C1C] text-[13px]'>{cell}</span>
|
||||
</td>
|
||||
{row.map((cell, j) => (
|
||||
<td
|
||||
key={j}
|
||||
className={cn(
|
||||
'relative border-[#E5E5E5] border-r border-b px-[8px] py-[7px] align-middle',
|
||||
isSelected ? 'bg-[rgba(37,99,235,0.06)]' : 'hover:bg-[#FAFAFA]'
|
||||
)}
|
||||
>
|
||||
{isSelected && (
|
||||
<div
|
||||
className={cn(
|
||||
'-bottom-px -top-px pointer-events-none absolute left-0 z-[5] border-[#1a5cf6] border-t border-b',
|
||||
j === 0 && 'border-l',
|
||||
j === row.length - 1 ? '-right-px border-r' : 'right-0'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
<span className='block truncate text-[#1C1C1C] text-[13px]'>{cell}</span>
|
||||
</td>
|
||||
))}
|
||||
</motion.tr>
|
||||
)
|
||||
})}
|
||||
))}
|
||||
</motion.tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
@@ -22,10 +22,9 @@ const PRICING_TIERS: PricingTier[] = [
|
||||
features: [
|
||||
'1,000 credits (trial)',
|
||||
'5GB file storage',
|
||||
'3 tables · 1,000 rows each',
|
||||
'5 min execution limit',
|
||||
'7-day log retention',
|
||||
'CLI/SDK/MCP Access',
|
||||
'Limited log retention',
|
||||
'CLI/SDK Access',
|
||||
],
|
||||
cta: { label: 'Get started', href: '/signup' },
|
||||
},
|
||||
@@ -37,12 +36,11 @@ const PRICING_TIERS: PricingTier[] = [
|
||||
billingPeriod: 'per month',
|
||||
color: '#00F701',
|
||||
features: [
|
||||
'6,000 credits/mo · +50/day',
|
||||
'6,000 credits/mo',
|
||||
'+50 daily refresh credits',
|
||||
'150 runs/min (sync)',
|
||||
'50 min sync execution limit',
|
||||
'50GB file storage',
|
||||
'25 tables · 5,000 rows each',
|
||||
'50 min execution · 150 runs/min',
|
||||
'Unlimited log retention',
|
||||
'CLI/SDK/MCP Access',
|
||||
],
|
||||
cta: { label: 'Get started', href: '/signup' },
|
||||
},
|
||||
@@ -54,12 +52,11 @@ const PRICING_TIERS: PricingTier[] = [
|
||||
billingPeriod: 'per month',
|
||||
color: '#FA4EDF',
|
||||
features: [
|
||||
'25,000 credits/mo · +200/day',
|
||||
'25,000 credits/mo',
|
||||
'+200 daily refresh credits',
|
||||
'300 runs/min (sync)',
|
||||
'50 min sync execution limit',
|
||||
'500GB file storage',
|
||||
'25 tables · 5,000 rows each',
|
||||
'50 min execution · 300 runs/min',
|
||||
'Unlimited log retention',
|
||||
'CLI/SDK/MCP Access',
|
||||
],
|
||||
cta: { label: 'Get started', href: '/signup' },
|
||||
},
|
||||
@@ -69,15 +66,7 @@ const PRICING_TIERS: PricingTier[] = [
|
||||
description: 'For organizations needing security and scale',
|
||||
price: 'Custom',
|
||||
color: '#FFCC02',
|
||||
features: [
|
||||
'Custom credits & infra limits',
|
||||
'Custom file storage',
|
||||
'10,000 tables · 1M rows each',
|
||||
'Custom execution limits',
|
||||
'Unlimited log retention',
|
||||
'SSO & SCIM · SOC2 & HIPAA',
|
||||
'Self hosting · Dedicated support',
|
||||
],
|
||||
features: ['Custom infra limits', 'SSO', 'SOC2', 'Self hosting', 'Dedicated support'],
|
||||
cta: { label: 'Book a demo', href: '/contact' },
|
||||
},
|
||||
]
|
||||
@@ -125,12 +114,12 @@ function PricingCard({ tier }: PricingCardProps) {
|
||||
</p>
|
||||
<div className='mt-4'>
|
||||
{isEnterprise ? (
|
||||
<Link
|
||||
<a
|
||||
href={tier.cta.href}
|
||||
className='flex h-[32px] w-full items-center justify-center rounded-[5px] border border-[#E5E5E5] px-[10px] font-[430] font-season text-[#1C1C1C] text-[14px] transition-colors hover:bg-[#F0F0F0]'
|
||||
>
|
||||
{tier.cta.label}
|
||||
</Link>
|
||||
</a>
|
||||
) : isPro ? (
|
||||
<Link
|
||||
href={tier.cta.href}
|
||||
|
||||
@@ -28,8 +28,8 @@ import {
|
||||
* for immediate availability to AI crawlers.
|
||||
* - Section `id` attributes serve as fragment anchors for precise AI citations.
|
||||
* - Content ordering prioritizes answer-first patterns: definition (Hero) ->
|
||||
* examples (Templates) -> capabilities (Features) -> social proof (Collaboration) ->
|
||||
* enterprise (Enterprise) -> pricing (Pricing) -> testimonials (Testimonials).
|
||||
* examples (Templates) -> capabilities (Features) -> social proof (Collaboration, Testimonials) ->
|
||||
* pricing (Pricing) -> enterprise (Enterprise).
|
||||
*/
|
||||
export default async function Landing() {
|
||||
return (
|
||||
@@ -43,8 +43,8 @@ export default async function Landing() {
|
||||
<Templates />
|
||||
<Features />
|
||||
<Collaboration />
|
||||
<Enterprise />
|
||||
<Pricing />
|
||||
<Enterprise />
|
||||
<Testimonials />
|
||||
</main>
|
||||
<Footer />
|
||||
|
||||
@@ -1,248 +0,0 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { db } from '@sim/db'
|
||||
import { document } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
createDocumentRecords,
|
||||
deleteDocument,
|
||||
getProcessingConfig,
|
||||
processDocumentsWithQueue,
|
||||
} from '@/lib/knowledge/documents/service'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
|
||||
|
||||
const logger = createLogger('DocumentUpsertAPI')
|
||||
|
||||
const UpsertDocumentSchema = z.object({
|
||||
documentId: z.string().optional(),
|
||||
filename: z.string().min(1, 'Filename is required'),
|
||||
fileUrl: z.string().min(1, 'File URL is required'),
|
||||
fileSize: z.number().min(1, 'File size must be greater than 0'),
|
||||
mimeType: z.string().min(1, 'MIME type is required'),
|
||||
documentTagsData: z.string().optional(),
|
||||
processingOptions: z.object({
|
||||
chunkSize: z.number().min(100).max(4000),
|
||||
minCharactersPerChunk: z.number().min(1).max(2000),
|
||||
recipe: z.string(),
|
||||
lang: z.string(),
|
||||
chunkOverlap: z.number().min(0).max(500),
|
||||
}),
|
||||
workflowId: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
const { id: knowledgeBaseId } = await params
|
||||
|
||||
try {
|
||||
const body = await req.json()
|
||||
|
||||
logger.info(`[${requestId}] Knowledge base document upsert request`, {
|
||||
knowledgeBaseId,
|
||||
hasDocumentId: !!body.documentId,
|
||||
filename: body.filename,
|
||||
})
|
||||
|
||||
const auth = await checkSessionOrInternalAuth(req, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
logger.warn(`[${requestId}] Authentication failed: ${auth.error || 'Unauthorized'}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
const userId = auth.userId
|
||||
|
||||
const validatedData = UpsertDocumentSchema.parse(body)
|
||||
|
||||
if (validatedData.workflowId) {
|
||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId: validatedData.workflowId,
|
||||
userId,
|
||||
action: 'write',
|
||||
})
|
||||
if (!authorization.allowed) {
|
||||
return NextResponse.json(
|
||||
{ error: authorization.message || 'Access denied' },
|
||||
{ status: authorization.status }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const accessCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, userId)
|
||||
|
||||
if (!accessCheck.hasAccess) {
|
||||
if ('notFound' in accessCheck && accessCheck.notFound) {
|
||||
logger.warn(`[${requestId}] Knowledge base not found: ${knowledgeBaseId}`)
|
||||
return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 })
|
||||
}
|
||||
logger.warn(
|
||||
`[${requestId}] User ${userId} attempted to upsert document in unauthorized knowledge base ${knowledgeBaseId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
let existingDocumentId: string | null = null
|
||||
let isUpdate = false
|
||||
|
||||
if (validatedData.documentId) {
|
||||
const existingDoc = await db
|
||||
.select({ id: document.id })
|
||||
.from(document)
|
||||
.where(
|
||||
and(
|
||||
eq(document.id, validatedData.documentId),
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(document.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingDoc.length > 0) {
|
||||
existingDocumentId = existingDoc[0].id
|
||||
}
|
||||
} else {
|
||||
const docsByFilename = await db
|
||||
.select({ id: document.id })
|
||||
.from(document)
|
||||
.where(
|
||||
and(
|
||||
eq(document.filename, validatedData.filename),
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
isNull(document.deletedAt)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (docsByFilename.length > 0) {
|
||||
existingDocumentId = docsByFilename[0].id
|
||||
}
|
||||
}
|
||||
|
||||
if (existingDocumentId) {
|
||||
isUpdate = true
|
||||
logger.info(
|
||||
`[${requestId}] Found existing document ${existingDocumentId}, creating replacement before deleting old`
|
||||
)
|
||||
}
|
||||
|
||||
const createdDocuments = await createDocumentRecords(
|
||||
[
|
||||
{
|
||||
filename: validatedData.filename,
|
||||
fileUrl: validatedData.fileUrl,
|
||||
fileSize: validatedData.fileSize,
|
||||
mimeType: validatedData.mimeType,
|
||||
...(validatedData.documentTagsData && {
|
||||
documentTagsData: validatedData.documentTagsData,
|
||||
}),
|
||||
},
|
||||
],
|
||||
knowledgeBaseId,
|
||||
requestId
|
||||
)
|
||||
|
||||
const firstDocument = createdDocuments[0]
|
||||
if (!firstDocument) {
|
||||
logger.error(`[${requestId}] createDocumentRecords returned empty array unexpectedly`)
|
||||
return NextResponse.json({ error: 'Failed to create document record' }, { status: 500 })
|
||||
}
|
||||
|
||||
if (existingDocumentId) {
|
||||
try {
|
||||
await deleteDocument(existingDocumentId, requestId)
|
||||
} catch (deleteError) {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to delete old document ${existingDocumentId}, rolling back new record`,
|
||||
deleteError
|
||||
)
|
||||
await deleteDocument(firstDocument.documentId, requestId).catch(() => {})
|
||||
return NextResponse.json({ error: 'Failed to replace existing document' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
processDocumentsWithQueue(
|
||||
createdDocuments,
|
||||
knowledgeBaseId,
|
||||
validatedData.processingOptions,
|
||||
requestId
|
||||
).catch((error: unknown) => {
|
||||
logger.error(`[${requestId}] Critical error in document processing pipeline:`, error)
|
||||
})
|
||||
|
||||
try {
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
PlatformEvents.knowledgeBaseDocumentsUploaded({
|
||||
knowledgeBaseId,
|
||||
documentsCount: 1,
|
||||
uploadType: 'single',
|
||||
chunkSize: validatedData.processingOptions.chunkSize,
|
||||
recipe: validatedData.processingOptions.recipe,
|
||||
})
|
||||
} catch (_e) {
|
||||
// Silently fail
|
||||
}
|
||||
|
||||
recordAudit({
|
||||
workspaceId: accessCheck.knowledgeBase?.workspaceId ?? null,
|
||||
actorId: userId,
|
||||
actorName: auth.userName,
|
||||
actorEmail: auth.userEmail,
|
||||
action: isUpdate ? AuditAction.DOCUMENT_UPDATED : AuditAction.DOCUMENT_UPLOADED,
|
||||
resourceType: AuditResourceType.DOCUMENT,
|
||||
resourceId: knowledgeBaseId,
|
||||
resourceName: validatedData.filename,
|
||||
description: isUpdate
|
||||
? `Upserted (replaced) document "${validatedData.filename}" in knowledge base "${knowledgeBaseId}"`
|
||||
: `Upserted (created) document "${validatedData.filename}" in knowledge base "${knowledgeBaseId}"`,
|
||||
metadata: {
|
||||
fileName: validatedData.filename,
|
||||
previousDocumentId: existingDocumentId,
|
||||
isUpdate,
|
||||
},
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
documentsCreated: [
|
||||
{
|
||||
documentId: firstDocument.documentId,
|
||||
filename: firstDocument.filename,
|
||||
status: 'pending',
|
||||
},
|
||||
],
|
||||
isUpdate,
|
||||
previousDocumentId: existingDocumentId,
|
||||
processingMethod: 'background',
|
||||
processingConfig: {
|
||||
maxConcurrentDocuments: getProcessingConfig().maxConcurrentDocuments,
|
||||
batchSize: getProcessingConfig().batchSize,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid upsert request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error upserting document`, error)
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to upsert document'
|
||||
const isStorageLimitError =
|
||||
errorMessage.includes('Storage limit exceeded') || errorMessage.includes('storage limit')
|
||||
const isMissingKnowledgeBase = errorMessage === 'Knowledge base not found'
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: errorMessage },
|
||||
{ status: isMissingKnowledgeBase ? 404 : isStorageLimitError ? 413 : 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -279,7 +279,6 @@ export async function POST(req: NextRequest) {
|
||||
role: 'assistant' as const,
|
||||
content: result.content,
|
||||
timestamp: new Date().toISOString(),
|
||||
...(result.requestId ? { requestId: result.requestId } : {}),
|
||||
}
|
||||
if (result.toolCalls.length > 0) {
|
||||
assistantMessage.toolCalls = result.toolCalls
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
export { ErrorState, type ErrorStateProps } from './error'
|
||||
export { InlineRenameInput } from './inline-rename-input'
|
||||
export { MessageActions } from './message-actions'
|
||||
export { ownerCell } from './resource/components/owner-cell/owner-cell'
|
||||
export type {
|
||||
BreadcrumbEditing,
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
export { MessageActions } from './message-actions'
|
||||
@@ -1,84 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { Check, Copy, Ellipsis, Hash } from 'lucide-react'
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from '@/components/emcn'
|
||||
|
||||
interface MessageActionsProps {
|
||||
content: string
|
||||
requestId?: string
|
||||
}
|
||||
|
||||
export function MessageActions({ content, requestId }: MessageActionsProps) {
|
||||
const [copied, setCopied] = useState<'message' | 'request' | null>(null)
|
||||
const resetTimeoutRef = useRef<number | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (resetTimeoutRef.current !== null) {
|
||||
window.clearTimeout(resetTimeoutRef.current)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const copyToClipboard = useCallback(async (text: string, type: 'message' | 'request') => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(text)
|
||||
setCopied(type)
|
||||
if (resetTimeoutRef.current !== null) {
|
||||
window.clearTimeout(resetTimeoutRef.current)
|
||||
}
|
||||
resetTimeoutRef.current = window.setTimeout(() => setCopied(null), 1500)
|
||||
} catch {
|
||||
return
|
||||
}
|
||||
}, [])
|
||||
|
||||
if (!content && !requestId) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<DropdownMenu modal={false}>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<button
|
||||
type='button'
|
||||
aria-label='More options'
|
||||
className='flex h-5 w-5 items-center justify-center rounded-sm text-[var(--text-icon)] opacity-0 transition-colors transition-opacity hover:bg-[var(--surface-3)] hover:text-[var(--text-primary)] focus-visible:opacity-100 focus-visible:outline-none group-hover/msg:opacity-100 data-[state=open]:opacity-100'
|
||||
onClick={(event) => event.stopPropagation()}
|
||||
>
|
||||
<Ellipsis className='h-3 w-3' strokeWidth={2} />
|
||||
</button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align='end' side='top' sideOffset={4}>
|
||||
<DropdownMenuItem
|
||||
disabled={!content}
|
||||
onSelect={(event) => {
|
||||
event.stopPropagation()
|
||||
void copyToClipboard(content, 'message')
|
||||
}}
|
||||
>
|
||||
{copied === 'message' ? <Check /> : <Copy />}
|
||||
<span>Copy Message</span>
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
disabled={!requestId}
|
||||
onSelect={(event) => {
|
||||
event.stopPropagation()
|
||||
if (requestId) {
|
||||
void copyToClipboard(requestId, 'request')
|
||||
}
|
||||
}}
|
||||
>
|
||||
{copied === 'request' ? <Check /> : <Hash />}
|
||||
<span>Copy Request ID</span>
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)
|
||||
}
|
||||
@@ -215,13 +215,16 @@ function TextEditor({
|
||||
onSaveStatusChange?.(saveStatus)
|
||||
}, [saveStatus, onSaveStatusChange])
|
||||
|
||||
if (saveRef) saveRef.current = saveImmediately
|
||||
useEffect(
|
||||
() => () => {
|
||||
if (saveRef) saveRef.current = null
|
||||
},
|
||||
[saveRef]
|
||||
)
|
||||
useEffect(() => {
|
||||
if (saveRef) {
|
||||
saveRef.current = saveImmediately
|
||||
}
|
||||
return () => {
|
||||
if (saveRef) {
|
||||
saveRef.current = null
|
||||
}
|
||||
}
|
||||
}, [saveRef, saveImmediately])
|
||||
|
||||
useEffect(() => {
|
||||
if (!isResizing) return
|
||||
|
||||
@@ -160,8 +160,8 @@ export function EmbeddedWorkflowActions({ workspaceId, workflowId }: EmbeddedWor
|
||||
])
|
||||
|
||||
const handleOpenWorkflow = useCallback(() => {
|
||||
window.open(`/workspace/${workspaceId}/w/${workflowId}`, '_blank')
|
||||
}, [workspaceId, workflowId])
|
||||
router.push(`/workspace/${workspaceId}/w/${workflowId}`)
|
||||
}, [router, workspaceId, workflowId])
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { forwardRef, memo, useCallback, useState } from 'react'
|
||||
import { memo, useCallback, useEffect, useState } from 'react'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getFileExtension } from '@/lib/uploads/utils/file-utils'
|
||||
import type { PreviewMode } from '@/app/workspace/[workspaceId]/files/components/file-viewer'
|
||||
@@ -31,79 +31,68 @@ interface MothershipViewProps {
|
||||
className?: string
|
||||
}
|
||||
|
||||
export const MothershipView = memo(
|
||||
forwardRef<HTMLDivElement, MothershipViewProps>(function MothershipView(
|
||||
{
|
||||
workspaceId,
|
||||
chatId,
|
||||
resources,
|
||||
activeResourceId,
|
||||
onSelectResource,
|
||||
onAddResource,
|
||||
onRemoveResource,
|
||||
onReorderResources,
|
||||
onCollapse,
|
||||
isCollapsed,
|
||||
className,
|
||||
}: MothershipViewProps,
|
||||
ref
|
||||
) {
|
||||
const active = resources.find((r) => r.id === activeResourceId) ?? resources[0] ?? null
|
||||
export const MothershipView = memo(function MothershipView({
|
||||
workspaceId,
|
||||
chatId,
|
||||
resources,
|
||||
activeResourceId,
|
||||
onSelectResource,
|
||||
onAddResource,
|
||||
onRemoveResource,
|
||||
onReorderResources,
|
||||
onCollapse,
|
||||
isCollapsed,
|
||||
className,
|
||||
}: MothershipViewProps) {
|
||||
const active = resources.find((r) => r.id === activeResourceId) ?? resources[0] ?? null
|
||||
|
||||
const [previewMode, setPreviewMode] = useState<PreviewMode>('preview')
|
||||
const [prevActiveId, setPrevActiveId] = useState<string | null | undefined>(active?.id)
|
||||
const handleCyclePreview = useCallback(() => setPreviewMode((m) => PREVIEW_CYCLE[m]), [])
|
||||
const [previewMode, setPreviewMode] = useState<PreviewMode>('preview')
|
||||
const handleCyclePreview = useCallback(() => setPreviewMode((m) => PREVIEW_CYCLE[m]), [])
|
||||
|
||||
// Reset preview mode to default when the active resource changes (guarded render-phase update)
|
||||
if (active?.id !== prevActiveId) {
|
||||
setPrevActiveId(active?.id)
|
||||
setPreviewMode('preview')
|
||||
}
|
||||
useEffect(() => {
|
||||
setPreviewMode('preview')
|
||||
}, [active?.id])
|
||||
|
||||
const isActivePreviewable =
|
||||
active?.type === 'file' && RICH_PREVIEWABLE_EXTENSIONS.has(getFileExtension(active.title))
|
||||
const isActivePreviewable =
|
||||
active?.type === 'file' && RICH_PREVIEWABLE_EXTENSIONS.has(getFileExtension(active.title))
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
'relative z-10 flex h-full flex-col overflow-hidden border-[var(--border)] bg-[var(--bg)] transition-[width,min-width,border-width] duration-300 ease-out',
|
||||
isCollapsed ? 'w-0 min-w-0 border-l-0' : 'w-[60%] border-l',
|
||||
className
|
||||
)}
|
||||
>
|
||||
<div className='flex min-h-0 flex-1 flex-col'>
|
||||
<ResourceTabs
|
||||
workspaceId={workspaceId}
|
||||
chatId={chatId}
|
||||
resources={resources}
|
||||
activeId={active?.id ?? null}
|
||||
onSelect={onSelectResource}
|
||||
onAddResource={onAddResource}
|
||||
onRemoveResource={onRemoveResource}
|
||||
onReorderResources={onReorderResources}
|
||||
onCollapse={onCollapse}
|
||||
actions={
|
||||
active ? <ResourceActions workspaceId={workspaceId} resource={active} /> : null
|
||||
}
|
||||
previewMode={isActivePreviewable ? previewMode : undefined}
|
||||
onCyclePreviewMode={isActivePreviewable ? handleCyclePreview : undefined}
|
||||
/>
|
||||
<div className='min-h-0 flex-1 overflow-hidden'>
|
||||
{active ? (
|
||||
<ResourceContent
|
||||
workspaceId={workspaceId}
|
||||
resource={active}
|
||||
previewMode={isActivePreviewable ? previewMode : undefined}
|
||||
/>
|
||||
) : (
|
||||
<div className='flex h-full items-center justify-center text-[14px] text-[var(--text-muted)]'>
|
||||
Click "+" above to add a resource
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'relative z-10 flex h-full flex-col overflow-hidden border-[var(--border)] bg-[var(--bg)] transition-[width,min-width,border-width] duration-300 ease-out',
|
||||
isCollapsed ? 'w-0 min-w-0 border-l-0' : 'w-[60%] border-l',
|
||||
className
|
||||
)}
|
||||
>
|
||||
<div className='flex min-h-0 flex-1 flex-col'>
|
||||
<ResourceTabs
|
||||
workspaceId={workspaceId}
|
||||
chatId={chatId}
|
||||
resources={resources}
|
||||
activeId={active?.id ?? null}
|
||||
onSelect={onSelectResource}
|
||||
onAddResource={onAddResource}
|
||||
onRemoveResource={onRemoveResource}
|
||||
onReorderResources={onReorderResources}
|
||||
onCollapse={onCollapse}
|
||||
actions={active ? <ResourceActions workspaceId={workspaceId} resource={active} /> : null}
|
||||
previewMode={isActivePreviewable ? previewMode : undefined}
|
||||
onCyclePreviewMode={isActivePreviewable ? handleCyclePreview : undefined}
|
||||
/>
|
||||
<div className='min-h-0 flex-1 overflow-hidden'>
|
||||
{active ? (
|
||||
<ResourceContent
|
||||
workspaceId={workspaceId}
|
||||
resource={active}
|
||||
previewMode={isActivePreviewable ? previewMode : undefined}
|
||||
/>
|
||||
) : (
|
||||
<div className='flex h-full items-center justify-center text-[14px] text-[var(--text-muted)]'>
|
||||
Click "+" above to add a resource
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
})
|
||||
)
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
@@ -202,7 +202,9 @@ export function UserInput({
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (editValue) onEditValueConsumed?.()
|
||||
if (editValue) {
|
||||
onEditValueConsumed?.()
|
||||
}
|
||||
}, [editValue, onEditValueConsumed])
|
||||
|
||||
const animatedPlaceholder = useAnimatedPlaceholder(isInitialView)
|
||||
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
LandingWorkflowSeedStorage,
|
||||
} from '@/lib/core/utils/browser-storage'
|
||||
import { persistImportedWorkflow } from '@/lib/workflows/operations/import-export'
|
||||
import { MessageActions } from '@/app/workspace/[workspaceId]/components'
|
||||
import { useChatHistory, useMarkTaskRead } from '@/hooks/queries/tasks'
|
||||
import type { ChatContext } from '@/stores/panel'
|
||||
import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
@@ -26,7 +25,7 @@ import {
|
||||
UserMessageContent,
|
||||
} from './components'
|
||||
import { PendingTagIndicator } from './components/message-content/components/special-tags'
|
||||
import { useAutoScroll, useChat, useMothershipResize } from './hooks'
|
||||
import { useAutoScroll, useChat } from './hooks'
|
||||
import type { FileAttachmentForApi, MothershipResource, MothershipResourceType } from './types'
|
||||
|
||||
const logger = createLogger('Home')
|
||||
@@ -138,41 +137,26 @@ export function Home({ chatId }: HomeProps = {}) {
|
||||
useChatHistory(chatId)
|
||||
const { mutate: markRead } = useMarkTaskRead(workspaceId)
|
||||
|
||||
const { mothershipRef, handleResizePointerDown, clearWidth } = useMothershipResize()
|
||||
|
||||
const [isResourceCollapsed, setIsResourceCollapsed] = useState(true)
|
||||
const [isResourceAnimatingIn, setIsResourceAnimatingIn] = useState(false)
|
||||
const [skipResourceTransition, setSkipResourceTransition] = useState(false)
|
||||
const isResourceCollapsedRef = useRef(isResourceCollapsed)
|
||||
isResourceCollapsedRef.current = isResourceCollapsed
|
||||
|
||||
const collapseResource = useCallback(() => {
|
||||
clearWidth()
|
||||
setIsResourceCollapsed(true)
|
||||
}, [clearWidth])
|
||||
const animatingInTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null)
|
||||
const startAnimatingIn = useCallback(() => {
|
||||
if (animatingInTimerRef.current) clearTimeout(animatingInTimerRef.current)
|
||||
setIsResourceAnimatingIn(true)
|
||||
animatingInTimerRef.current = setTimeout(() => {
|
||||
setIsResourceAnimatingIn(false)
|
||||
animatingInTimerRef.current = null
|
||||
}, 400)
|
||||
}, [])
|
||||
|
||||
const collapseResource = useCallback(() => setIsResourceCollapsed(true), [])
|
||||
const expandResource = useCallback(() => {
|
||||
setIsResourceCollapsed(false)
|
||||
startAnimatingIn()
|
||||
}, [startAnimatingIn])
|
||||
setIsResourceAnimatingIn(true)
|
||||
}, [])
|
||||
|
||||
const handleResourceEvent = useCallback(() => {
|
||||
if (isResourceCollapsedRef.current) {
|
||||
const { isCollapsed, toggleCollapsed } = useSidebarStore.getState()
|
||||
if (!isCollapsed) toggleCollapsed()
|
||||
setIsResourceCollapsed(false)
|
||||
startAnimatingIn()
|
||||
setIsResourceAnimatingIn(true)
|
||||
}
|
||||
}, [startAnimatingIn])
|
||||
}, [])
|
||||
|
||||
const {
|
||||
messages,
|
||||
@@ -193,15 +177,8 @@ export function Home({ chatId }: HomeProps = {}) {
|
||||
} = useChat(workspaceId, chatId, { onResourceEvent: handleResourceEvent })
|
||||
|
||||
const [editingInputValue, setEditingInputValue] = useState('')
|
||||
const [prevChatId, setPrevChatId] = useState(chatId)
|
||||
const clearEditingValue = useCallback(() => setEditingInputValue(''), [])
|
||||
|
||||
// Clear editing value when navigating to a different chat (guarded render-phase update)
|
||||
if (chatId !== prevChatId) {
|
||||
setPrevChatId(chatId)
|
||||
setEditingInputValue('')
|
||||
}
|
||||
|
||||
const handleEditQueuedMessage = useCallback(
|
||||
(id: string) => {
|
||||
const msg = editQueuedMessage(id)
|
||||
@@ -212,6 +189,10 @@ export function Home({ chatId }: HomeProps = {}) {
|
||||
[editQueuedMessage]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
setEditingInputValue('')
|
||||
}, [chatId])
|
||||
|
||||
useEffect(() => {
|
||||
wasSendingRef.current = false
|
||||
if (resolvedChatId) markRead(resolvedChatId)
|
||||
@@ -225,12 +206,23 @@ export function Home({ chatId }: HomeProps = {}) {
|
||||
}, [isSending, resolvedChatId, markRead])
|
||||
|
||||
useEffect(() => {
|
||||
if (!(resources.length > 0 && isResourceCollapsedRef.current)) return
|
||||
setIsResourceCollapsed(false)
|
||||
setSkipResourceTransition(true)
|
||||
if (!isResourceAnimatingIn) return
|
||||
const timer = setTimeout(() => setIsResourceAnimatingIn(false), 400)
|
||||
return () => clearTimeout(timer)
|
||||
}, [isResourceAnimatingIn])
|
||||
|
||||
useEffect(() => {
|
||||
if (resources.length > 0 && isResourceCollapsedRef.current) {
|
||||
setSkipResourceTransition(true)
|
||||
setIsResourceCollapsed(false)
|
||||
}
|
||||
}, [resources])
|
||||
|
||||
useEffect(() => {
|
||||
if (!skipResourceTransition) return
|
||||
const id = requestAnimationFrame(() => setSkipResourceTransition(false))
|
||||
return () => cancelAnimationFrame(id)
|
||||
}, [resources])
|
||||
}, [skipResourceTransition])
|
||||
|
||||
const handleSubmit = useCallback(
|
||||
(text: string, fileAttachments?: FileAttachmentForApi[], contexts?: ChatContext[]) => {
|
||||
@@ -366,7 +358,7 @@ export function Home({ chatId }: HomeProps = {}) {
|
||||
|
||||
return (
|
||||
<div className='relative flex h-full bg-[var(--bg)]'>
|
||||
<div className='flex h-full min-w-[320px] flex-1 flex-col'>
|
||||
<div className='flex h-full min-w-0 flex-1 flex-col'>
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
className='min-h-0 flex-1 overflow-y-auto overflow-x-hidden px-6 pt-4 pb-8 [scrollbar-gutter:stable]'
|
||||
@@ -422,12 +414,7 @@ export function Home({ chatId }: HomeProps = {}) {
|
||||
const isLastMessage = index === messages.length - 1
|
||||
|
||||
return (
|
||||
<div key={msg.id} className='group/msg relative pb-5'>
|
||||
{!isThisStreaming && (msg.content || msg.contentBlocks?.length) && (
|
||||
<div className='absolute right-0 bottom-0 z-10'>
|
||||
<MessageActions content={msg.content} requestId={msg.requestId} />
|
||||
</div>
|
||||
)}
|
||||
<div key={msg.id} className='pb-4'>
|
||||
<MessageContent
|
||||
blocks={msg.contentBlocks || []}
|
||||
fallbackContent={msg.content}
|
||||
@@ -465,21 +452,7 @@ export function Home({ chatId }: HomeProps = {}) {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Resize handle — zero-width flex child whose absolute child straddles the border */}
|
||||
{!isResourceCollapsed && (
|
||||
<div className='relative z-20 w-0 flex-none'>
|
||||
<div
|
||||
className='absolute inset-y-0 left-[-4px] w-[8px] cursor-ew-resize'
|
||||
role='separator'
|
||||
aria-orientation='vertical'
|
||||
aria-label='Resize resource panel'
|
||||
onPointerDown={handleResizePointerDown}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<MothershipView
|
||||
ref={mothershipRef}
|
||||
workspaceId={workspaceId}
|
||||
chatId={resolvedChatId}
|
||||
resources={resources}
|
||||
|
||||
@@ -2,5 +2,4 @@ export { useAnimatedPlaceholder } from './use-animated-placeholder'
|
||||
export { useAutoScroll } from './use-auto-scroll'
|
||||
export type { UseChatReturn } from './use-chat'
|
||||
export { useChat } from './use-chat'
|
||||
export { useMothershipResize } from './use-mothership-resize'
|
||||
export { useStreamingReveal } from './use-streaming-reveal'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useLayoutEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { usePathname } from 'next/navigation'
|
||||
@@ -132,7 +132,7 @@ function toDisplayAttachment(f: TaskStoredFileAttachment): ChatMessageAttachment
|
||||
media_type: f.media_type,
|
||||
size: f.size,
|
||||
previewUrl: f.media_type.startsWith('image/')
|
||||
? `/api/files/serve/${encodeURIComponent(f.key)}?context=mothership`
|
||||
? `/api/files/serve/${encodeURIComponent(f.key)}?context=copilot`
|
||||
: undefined,
|
||||
}
|
||||
}
|
||||
@@ -142,7 +142,6 @@ function mapStoredMessage(msg: TaskStoredMessage): ChatMessage {
|
||||
id: msg.id,
|
||||
role: msg.role,
|
||||
content: msg.content,
|
||||
...(msg.requestId ? { requestId: msg.requestId } : {}),
|
||||
}
|
||||
|
||||
const hasContentBlocks = Array.isArray(msg.contentBlocks) && msg.contentBlocks.length > 0
|
||||
@@ -269,22 +268,14 @@ export function useChat(
|
||||
onResourceEventRef.current = options?.onResourceEvent
|
||||
const resourcesRef = useRef(resources)
|
||||
resourcesRef.current = resources
|
||||
|
||||
// Derive the effective active resource ID — auto-selects the last resource when the stored ID is
|
||||
// absent or no longer in the list, avoiding a separate Effect-based state correction loop.
|
||||
const effectiveActiveResourceId = useMemo(() => {
|
||||
if (resources.length === 0) return null
|
||||
if (activeResourceId && resources.some((r) => r.id === activeResourceId))
|
||||
return activeResourceId
|
||||
return resources[resources.length - 1].id
|
||||
}, [resources, activeResourceId])
|
||||
|
||||
const activeResourceIdRef = useRef(effectiveActiveResourceId)
|
||||
activeResourceIdRef.current = effectiveActiveResourceId
|
||||
const activeResourceIdRef = useRef(activeResourceId)
|
||||
activeResourceIdRef.current = activeResourceId
|
||||
|
||||
const [messageQueue, setMessageQueue] = useState<QueuedMessage[]>([])
|
||||
const messageQueueRef = useRef<QueuedMessage[]>([])
|
||||
messageQueueRef.current = messageQueue
|
||||
useEffect(() => {
|
||||
messageQueueRef.current = messageQueue
|
||||
}, [messageQueue])
|
||||
|
||||
const sendMessageRef = useRef<UseChatReturn['sendMessage']>(async () => {})
|
||||
const processSSEStreamRef = useRef<
|
||||
@@ -490,6 +481,19 @@ export function useChat(
|
||||
}
|
||||
}, [chatHistory, workspaceId, queryClient])
|
||||
|
||||
useEffect(() => {
|
||||
if (resources.length === 0) {
|
||||
if (activeResourceId !== null) {
|
||||
setActiveResourceId(null)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (!activeResourceId || !resources.some((resource) => resource.id === activeResourceId)) {
|
||||
setActiveResourceId(resources[resources.length - 1].id)
|
||||
}
|
||||
}, [activeResourceId, resources])
|
||||
|
||||
const processSSEStream = useCallback(
|
||||
async (
|
||||
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||
@@ -505,7 +509,6 @@ export function useChat(
|
||||
let activeSubagent: string | undefined
|
||||
let runningText = ''
|
||||
let lastContentSource: 'main' | 'subagent' | null = null
|
||||
let streamRequestId: string | undefined
|
||||
|
||||
streamingContentRef.current = ''
|
||||
streamingBlocksRef.current = []
|
||||
@@ -523,21 +526,14 @@ export function useChat(
|
||||
const flush = () => {
|
||||
if (isStale()) return
|
||||
streamingBlocksRef.current = [...blocks]
|
||||
const snapshot: Partial<ChatMessage> = {
|
||||
content: runningText,
|
||||
contentBlocks: [...blocks],
|
||||
}
|
||||
if (streamRequestId) snapshot.requestId = streamRequestId
|
||||
const snapshot = { content: runningText, contentBlocks: [...blocks] }
|
||||
setMessages((prev) => {
|
||||
if (expectedGen !== undefined && streamGenRef.current !== expectedGen) return prev
|
||||
const idx = prev.findIndex((m) => m.id === assistantId)
|
||||
if (idx >= 0) {
|
||||
return prev.map((m) => (m.id === assistantId ? { ...m, ...snapshot } : m))
|
||||
}
|
||||
return [
|
||||
...prev,
|
||||
{ id: assistantId, role: 'assistant' as const, content: '', ...snapshot },
|
||||
]
|
||||
return [...prev, { id: assistantId, role: 'assistant' as const, ...snapshot }]
|
||||
})
|
||||
}
|
||||
|
||||
@@ -601,14 +597,6 @@ export function useChat(
|
||||
}
|
||||
break
|
||||
}
|
||||
case 'request_id': {
|
||||
const rid = typeof parsed.data === 'string' ? parsed.data : undefined
|
||||
if (rid) {
|
||||
streamRequestId = rid
|
||||
flush()
|
||||
}
|
||||
break
|
||||
}
|
||||
case 'content': {
|
||||
const chunk = typeof parsed.data === 'string' ? parsed.data : (parsed.content ?? '')
|
||||
if (chunk) {
|
||||
@@ -866,7 +854,9 @@ export function useChat(
|
||||
},
|
||||
[workspaceId, queryClient, addResource, removeResource]
|
||||
)
|
||||
processSSEStreamRef.current = processSSEStream
|
||||
useLayoutEffect(() => {
|
||||
processSSEStreamRef.current = processSSEStream
|
||||
})
|
||||
|
||||
const persistPartialResponse = useCallback(async () => {
|
||||
const chatId = chatIdRef.current
|
||||
@@ -955,7 +945,9 @@ export function useChat(
|
||||
},
|
||||
[invalidateChatQueries]
|
||||
)
|
||||
finalizeRef.current = finalize
|
||||
useLayoutEffect(() => {
|
||||
finalizeRef.current = finalize
|
||||
})
|
||||
|
||||
const sendMessage = useCallback(
|
||||
async (message: string, fileAttachments?: FileAttachmentForApi[], contexts?: ChatContext[]) => {
|
||||
@@ -1091,7 +1083,9 @@ export function useChat(
|
||||
},
|
||||
[workspaceId, queryClient, processSSEStream, finalize]
|
||||
)
|
||||
sendMessageRef.current = sendMessage
|
||||
useLayoutEffect(() => {
|
||||
sendMessageRef.current = sendMessage
|
||||
})
|
||||
|
||||
const stopGeneration = useCallback(async () => {
|
||||
if (sendingRef.current && !chatIdRef.current) {
|
||||
@@ -1229,7 +1223,7 @@ export function useChat(
|
||||
sendMessage,
|
||||
stopGeneration,
|
||||
resources,
|
||||
activeResourceId: effectiveActiveResourceId,
|
||||
activeResourceId,
|
||||
setActiveResourceId,
|
||||
addResource,
|
||||
removeResource,
|
||||
|
||||
@@ -1,101 +0,0 @@
|
||||
import { useCallback, useEffect, useRef } from 'react'
|
||||
import { MOTHERSHIP_WIDTH } from '@/stores/constants'
|
||||
|
||||
/**
|
||||
* Hook for managing resize of the MothershipView resource panel.
|
||||
*
|
||||
* Uses imperative DOM manipulation (zero React re-renders during drag) with
|
||||
* Pointer Events + setPointerCapture for unified mouse/touch/stylus support.
|
||||
* Attach `mothershipRef` to the MothershipView root div and bind
|
||||
* `handleResizePointerDown` to the drag handle's onPointerDown.
|
||||
* Call `clearWidth` when the panel collapses so the CSS class retakes control.
|
||||
*/
|
||||
export function useMothershipResize() {
|
||||
const mothershipRef = useRef<HTMLDivElement | null>(null)
|
||||
// Stored so the useEffect cleanup can tear down listeners if the component unmounts mid-drag
|
||||
const cleanupRef = useRef<(() => void) | null>(null)
|
||||
|
||||
const handleResizePointerDown = useCallback((e: React.PointerEvent) => {
|
||||
e.preventDefault()
|
||||
|
||||
const el = mothershipRef.current
|
||||
if (!el) return
|
||||
|
||||
const handle = e.currentTarget as HTMLElement
|
||||
handle.setPointerCapture(e.pointerId)
|
||||
|
||||
// Pin to current rendered width so drag starts from the visual position
|
||||
el.style.width = `${el.getBoundingClientRect().width}px`
|
||||
|
||||
// Disable CSS transition to prevent animation lag during drag
|
||||
const prevTransition = el.style.transition
|
||||
el.style.transition = 'none'
|
||||
document.body.style.cursor = 'ew-resize'
|
||||
document.body.style.userSelect = 'none'
|
||||
|
||||
// AbortController removes all listeners at once on cleanup/cancel/unmount
|
||||
const ac = new AbortController()
|
||||
const { signal } = ac
|
||||
|
||||
const cleanup = () => {
|
||||
ac.abort()
|
||||
el.style.transition = prevTransition
|
||||
document.body.style.cursor = ''
|
||||
document.body.style.userSelect = ''
|
||||
cleanupRef.current = null
|
||||
}
|
||||
cleanupRef.current = cleanup
|
||||
|
||||
handle.addEventListener(
|
||||
'pointermove',
|
||||
(moveEvent: PointerEvent) => {
|
||||
const newWidth = window.innerWidth - moveEvent.clientX
|
||||
const maxWidth = window.innerWidth * MOTHERSHIP_WIDTH.MAX_PERCENTAGE
|
||||
el.style.width = `${Math.min(Math.max(newWidth, MOTHERSHIP_WIDTH.MIN), maxWidth)}px`
|
||||
},
|
||||
{ signal }
|
||||
)
|
||||
|
||||
handle.addEventListener(
|
||||
'pointerup',
|
||||
(upEvent: PointerEvent) => {
|
||||
handle.releasePointerCapture(upEvent.pointerId)
|
||||
cleanup()
|
||||
},
|
||||
{ signal }
|
||||
)
|
||||
|
||||
// Browser fires pointercancel when it reclaims the gesture (scroll, palm rejection, etc.)
|
||||
// Without this, body cursor/userSelect and transition would be permanently stuck
|
||||
handle.addEventListener('pointercancel', cleanup, { signal })
|
||||
}, [])
|
||||
|
||||
// Tear down any active drag if the component unmounts mid-drag
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
cleanupRef.current?.()
|
||||
}
|
||||
}, [])
|
||||
|
||||
// Re-clamp panel width when the viewport is resized (inline px width can exceed max after narrowing)
|
||||
useEffect(() => {
|
||||
const handleWindowResize = () => {
|
||||
const el = mothershipRef.current
|
||||
if (!el || !el.style.width) return
|
||||
const maxWidth = window.innerWidth * MOTHERSHIP_WIDTH.MAX_PERCENTAGE
|
||||
const current = el.getBoundingClientRect().width
|
||||
if (current > maxWidth) {
|
||||
el.style.width = `${maxWidth}px`
|
||||
}
|
||||
}
|
||||
window.addEventListener('resize', handleWindowResize)
|
||||
return () => window.removeEventListener('resize', handleWindowResize)
|
||||
}, [])
|
||||
|
||||
/** Remove inline width so the collapse CSS class retakes control */
|
||||
const clearWidth = useCallback(() => {
|
||||
mothershipRef.current?.style.removeProperty('width')
|
||||
}, [])
|
||||
|
||||
return { mothershipRef, handleResizePointerDown, clearWidth }
|
||||
}
|
||||
@@ -33,7 +33,6 @@ export interface QueuedMessage {
|
||||
*/
|
||||
export type SSEEventType =
|
||||
| 'chat_id'
|
||||
| 'request_id'
|
||||
| 'title_updated'
|
||||
| 'content'
|
||||
| 'reasoning' // openai reasoning - render as thinking text
|
||||
@@ -200,7 +199,6 @@ export interface ChatMessage {
|
||||
contentBlocks?: ContentBlock[]
|
||||
attachments?: ChatMessageAttachment[]
|
||||
contexts?: ChatMessageContext[]
|
||||
requestId?: string
|
||||
}
|
||||
|
||||
export const SUBAGENT_LABELS: Record<SubagentName, string> = {
|
||||
|
||||
@@ -169,13 +169,16 @@ export function ChunkEditor({
|
||||
|
||||
const saveFunction = isCreateMode ? handleSave : saveImmediately
|
||||
|
||||
if (saveRef) saveRef.current = saveFunction
|
||||
useEffect(
|
||||
() => () => {
|
||||
if (saveRef) saveRef.current = null
|
||||
},
|
||||
[saveRef]
|
||||
)
|
||||
useEffect(() => {
|
||||
if (saveRef) {
|
||||
saveRef.current = saveFunction
|
||||
}
|
||||
return () => {
|
||||
if (saveRef) {
|
||||
saveRef.current = null
|
||||
}
|
||||
}
|
||||
}, [saveRef, saveFunction])
|
||||
|
||||
const tokenStrings = useMemo(() => {
|
||||
if (!tokenizerOn || !editedContent) return []
|
||||
|
||||
@@ -274,7 +274,9 @@ export function KnowledgeBase({
|
||||
const { data: connectors = [], isLoading: isLoadingConnectors } = useConnectorList(id)
|
||||
const hasSyncingConnectors = connectors.some((c) => c.status === 'syncing')
|
||||
const hasSyncingConnectorsRef = useRef(hasSyncingConnectors)
|
||||
hasSyncingConnectorsRef.current = hasSyncingConnectors
|
||||
useEffect(() => {
|
||||
hasSyncingConnectorsRef.current = hasSyncingConnectors
|
||||
}, [hasSyncingConnectors])
|
||||
|
||||
const {
|
||||
documents,
|
||||
@@ -750,9 +752,11 @@ export function KnowledgeBase({
|
||||
const prevKnowledgeBaseIdRef = useRef<string>(id)
|
||||
const isNavigatingToNewKB = prevKnowledgeBaseIdRef.current !== id
|
||||
|
||||
if (knowledgeBase && knowledgeBase.id === id) {
|
||||
prevKnowledgeBaseIdRef.current = id
|
||||
}
|
||||
useEffect(() => {
|
||||
if (knowledgeBase && knowledgeBase.id === id) {
|
||||
prevKnowledgeBaseIdRef.current = id
|
||||
}
|
||||
}, [knowledgeBase, id])
|
||||
|
||||
const isInitialLoad = isLoadingKnowledgeBase && !knowledgeBase
|
||||
const isFetchingNewKB = isNavigatingToNewKB && isFetchingDocuments
|
||||
|
||||
@@ -220,7 +220,10 @@ function DashboardInner({ stats, isLoading, error }: DashboardProps) {
|
||||
|
||||
return result
|
||||
}, [rawExecutions])
|
||||
prevExecutionsRef.current = executions
|
||||
|
||||
useEffect(() => {
|
||||
prevExecutionsRef.current = executions
|
||||
}, [executions])
|
||||
|
||||
const lastExecutionByWorkflow = useMemo(() => {
|
||||
const map = new Map<string, number>()
|
||||
|
||||
@@ -31,8 +31,7 @@ export function Admin() {
|
||||
|
||||
const [workflowId, setWorkflowId] = useState('')
|
||||
const [usersOffset, setUsersOffset] = useState(0)
|
||||
const [searchInput, setSearchInput] = useState('')
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [usersEnabled, setUsersEnabled] = useState(false)
|
||||
const [banUserId, setBanUserId] = useState<string | null>(null)
|
||||
const [banReason, setBanReason] = useState('')
|
||||
|
||||
@@ -40,12 +39,8 @@ export function Admin() {
|
||||
data: usersData,
|
||||
isLoading: usersLoading,
|
||||
error: usersError,
|
||||
} = useAdminUsers(usersOffset, PAGE_SIZE, searchQuery)
|
||||
|
||||
const handleSearch = () => {
|
||||
setUsersOffset(0)
|
||||
setSearchQuery(searchInput.trim())
|
||||
}
|
||||
refetch: refetchUsers,
|
||||
} = useAdminUsers(usersOffset, PAGE_SIZE, usersEnabled)
|
||||
|
||||
const totalPages = useMemo(
|
||||
() => Math.ceil((usersData?.total ?? 0) / PAGE_SIZE),
|
||||
@@ -67,6 +62,14 @@ export function Admin() {
|
||||
)
|
||||
}
|
||||
|
||||
const handleLoadUsers = () => {
|
||||
if (usersEnabled) {
|
||||
refetchUsers()
|
||||
} else {
|
||||
setUsersEnabled(true)
|
||||
}
|
||||
}
|
||||
|
||||
const pendingUserIds = useMemo(() => {
|
||||
const ids = new Set<string>()
|
||||
if (setUserRole.isPending && (setUserRole.variables as { userId?: string })?.userId)
|
||||
@@ -133,16 +136,10 @@ export function Admin() {
|
||||
<div className='h-px bg-[var(--border-secondary)]' />
|
||||
|
||||
<div className='flex flex-col gap-[12px]'>
|
||||
<p className='font-medium text-[14px] text-[var(--text-primary)]'>User Management</p>
|
||||
<div className='flex gap-[8px]'>
|
||||
<EmcnInput
|
||||
value={searchInput}
|
||||
onChange={(e) => setSearchInput(e.target.value)}
|
||||
onKeyDown={(e) => e.key === 'Enter' && handleSearch()}
|
||||
placeholder='Search by email or paste a user ID...'
|
||||
/>
|
||||
<Button variant='primary' onClick={handleSearch} disabled={usersLoading}>
|
||||
{usersLoading ? 'Searching...' : 'Search'}
|
||||
<div className='flex items-center justify-between'>
|
||||
<p className='font-medium text-[14px] text-[var(--text-primary)]'>User Management</p>
|
||||
<Button variant='active' onClick={handleLoadUsers} disabled={usersLoading}>
|
||||
{usersLoading ? 'Loading...' : usersEnabled ? 'Refresh' : 'Load Users'}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
@@ -167,9 +164,9 @@ export function Admin() {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{searchQuery.length > 0 && usersData && (
|
||||
{usersData && (
|
||||
<>
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
<div className='flex flex-col gap-[2px] rounded-[8px] border border-[var(--border-secondary)]'>
|
||||
<div className='flex items-center gap-[12px] border-[var(--border-secondary)] border-b px-[12px] py-[8px] text-[12px] text-[var(--text-tertiary)]'>
|
||||
<span className='w-[200px]'>Name</span>
|
||||
<span className='flex-1'>Email</span>
|
||||
@@ -179,7 +176,7 @@ export function Admin() {
|
||||
</div>
|
||||
|
||||
{usersData.users.length === 0 && (
|
||||
<div className='py-[16px] text-center text-[13px] text-[var(--text-tertiary)]'>
|
||||
<div className='px-[12px] py-[16px] text-center text-[13px] text-[var(--text-tertiary)]'>
|
||||
No users found.
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useMemo, useRef, useState } from 'react'
|
||||
import { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Info, Plus, Search } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
@@ -64,19 +64,13 @@ export function ApiKeys() {
|
||||
const [deleteKey, setDeleteKey] = useState<ApiKey | null>(null)
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [searchTerm, setSearchTerm] = useState('')
|
||||
const [shouldScrollToBottom, setShouldScrollToBottom] = useState(false)
|
||||
|
||||
const defaultKeyType = allowPersonalApiKeys ? 'personal' : 'workspace'
|
||||
const createButtonDisabled = isLoading || (!allowPersonalApiKeys && !canManageWorkspaceKeys)
|
||||
|
||||
const scrollContainerRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
const scrollToBottom = useCallback(() => {
|
||||
scrollContainerRef.current?.scrollTo({
|
||||
top: scrollContainerRef.current.scrollHeight,
|
||||
behavior: 'smooth',
|
||||
})
|
||||
}, [])
|
||||
|
||||
const filteredWorkspaceKeys = useMemo(() => {
|
||||
if (!searchTerm.trim()) {
|
||||
return workspaceKeys.map((key, index) => ({ key, originalIndex: index }))
|
||||
@@ -117,6 +111,16 @@ export function ApiKeys() {
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (shouldScrollToBottom && scrollContainerRef.current) {
|
||||
scrollContainerRef.current.scrollTo({
|
||||
top: scrollContainerRef.current.scrollHeight,
|
||||
behavior: 'smooth',
|
||||
})
|
||||
setShouldScrollToBottom(false)
|
||||
}
|
||||
}, [shouldScrollToBottom])
|
||||
|
||||
const formatLastUsed = (dateString?: string) => {
|
||||
if (!dateString) return 'Never'
|
||||
return formatDate(new Date(dateString))
|
||||
|
||||
@@ -316,9 +316,6 @@ export function CredentialsManager() {
|
||||
|
||||
// --- Detail view state ---
|
||||
const [selectedCredentialId, setSelectedCredentialId] = useState<string | null>(null)
|
||||
const [prevSelectedCredentialId, setPrevSelectedCredentialId] = useState<
|
||||
string | null | undefined
|
||||
>(undefined)
|
||||
const [selectedDisplayNameDraft, setSelectedDisplayNameDraft] = useState('')
|
||||
const [selectedDescriptionDraft, setSelectedDescriptionDraft] = useState('')
|
||||
const [copyIdSuccess, setCopyIdSuccess] = useState(false)
|
||||
@@ -350,19 +347,6 @@ export function CredentialsManager() {
|
||||
[envCredentials, selectedCredentialId]
|
||||
)
|
||||
|
||||
if (selectedCredential?.id !== prevSelectedCredentialId) {
|
||||
setPrevSelectedCredentialId(selectedCredential?.id ?? null)
|
||||
if (!selectedCredential) {
|
||||
setSelectedDescriptionDraft('')
|
||||
setSelectedDisplayNameDraft('')
|
||||
setDetailsError(null)
|
||||
} else {
|
||||
setDetailsError(null)
|
||||
setSelectedDescriptionDraft(selectedCredential.description || '')
|
||||
setSelectedDisplayNameDraft(selectedCredential.displayName)
|
||||
}
|
||||
}
|
||||
|
||||
// --- Detail view hooks ---
|
||||
const { data: members = [], isPending: membersLoading } = useWorkspaceCredentialMembers(
|
||||
selectedCredential?.id
|
||||
@@ -474,10 +458,12 @@ export function CredentialsManager() {
|
||||
return personalInvalid || workspaceInvalid
|
||||
}, [envVars, newWorkspaceRows])
|
||||
|
||||
hasChangesRef.current = hasChanges
|
||||
shouldBlockNavRef.current = hasChanges || isDetailsDirty
|
||||
|
||||
// --- Effects ---
|
||||
useEffect(() => {
|
||||
hasChangesRef.current = hasChanges
|
||||
shouldBlockNavRef.current = hasChanges || isDetailsDirty
|
||||
}, [hasChanges, isDetailsDirty])
|
||||
|
||||
useEffect(() => {
|
||||
if (hasSavedRef.current) return
|
||||
|
||||
@@ -563,6 +549,19 @@ export function CredentialsManager() {
|
||||
}
|
||||
}, [])
|
||||
|
||||
// --- Detail view: sync drafts when credential changes ---
|
||||
useEffect(() => {
|
||||
if (!selectedCredential) {
|
||||
setSelectedDescriptionDraft('')
|
||||
setSelectedDisplayNameDraft('')
|
||||
return
|
||||
}
|
||||
|
||||
setDetailsError(null)
|
||||
setSelectedDescriptionDraft(selectedCredential.description || '')
|
||||
setSelectedDisplayNameDraft(selectedCredential.displayName)
|
||||
}, [selectedCredential])
|
||||
|
||||
// --- Pending credential create request ---
|
||||
const applyPendingCredentialCreateRequest = useCallback(
|
||||
(request: PendingCredentialCreateRequest) => {
|
||||
|
||||
@@ -68,12 +68,6 @@ export function General() {
|
||||
const [name, setName] = useState(profile?.name || '')
|
||||
const [isEditingName, setIsEditingName] = useState(false)
|
||||
const inputRef = useRef<HTMLInputElement>(null)
|
||||
const [prevProfileName, setPrevProfileName] = useState(profile?.name)
|
||||
|
||||
if (profile?.name && profile.name !== prevProfileName) {
|
||||
setPrevProfileName(profile.name)
|
||||
setName(profile.name)
|
||||
}
|
||||
|
||||
const [showResetPasswordModal, setShowResetPasswordModal] = useState(false)
|
||||
const resetPassword = useResetPassword()
|
||||
@@ -82,6 +76,12 @@ export function General() {
|
||||
|
||||
const snapToGridValue = settings?.snapToGridSize ?? 0
|
||||
|
||||
useEffect(() => {
|
||||
if (profile?.name) {
|
||||
setName(profile.name)
|
||||
}
|
||||
}, [profile?.name])
|
||||
|
||||
const {
|
||||
previewUrl: profilePictureUrl,
|
||||
fileInputRef: profilePictureInputRef,
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import { type FC, memo, useCallback, useMemo, useRef, useState } from 'react'
|
||||
import { RotateCcw } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { MessageActions } from '@/app/workspace/[workspaceId]/components'
|
||||
import {
|
||||
OptionsSelector,
|
||||
parseSpecialTags,
|
||||
@@ -410,15 +409,10 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
if (isAssistant) {
|
||||
return (
|
||||
<div
|
||||
className={`group/msg relative w-full max-w-full flex-none overflow-hidden [max-width:var(--panel-max-width)] ${isDimmed ? 'opacity-40' : 'opacity-100'}`}
|
||||
className={`w-full max-w-full flex-none overflow-hidden [max-width:var(--panel-max-width)] ${isDimmed ? 'opacity-40' : 'opacity-100'}`}
|
||||
style={{ '--panel-max-width': `${panelWidth - 16}px` } as React.CSSProperties}
|
||||
>
|
||||
{!isStreaming && (message.content || message.contentBlocks?.length) && (
|
||||
<div className='absolute right-0 bottom-0 z-10'>
|
||||
<MessageActions content={message.content} requestId={message.requestId} />
|
||||
</div>
|
||||
)}
|
||||
<div className='max-w-full space-y-[4px] px-[2px] pb-5'>
|
||||
<div className='max-w-full space-y-[4px] px-[2px] pb-[4px]'>
|
||||
{/* Content blocks in chronological order */}
|
||||
{memoizedContentBlocks || (isStreaming && <div className='min-h-0' />)}
|
||||
|
||||
|
||||
@@ -97,14 +97,16 @@ const PlanModeSection: React.FC<PlanModeSectionProps> = ({
|
||||
const [isResizing, setIsResizing] = React.useState(false)
|
||||
const [isEditing, setIsEditing] = React.useState(false)
|
||||
const [editedContent, setEditedContent] = React.useState(content)
|
||||
const [prevContent, setPrevContent] = React.useState(content)
|
||||
if (!isEditing && content !== prevContent) {
|
||||
setPrevContent(content)
|
||||
setEditedContent(content)
|
||||
}
|
||||
const resizeStartRef = React.useRef({ y: 0, startHeight: 0 })
|
||||
const textareaRef = React.useRef<HTMLTextAreaElement>(null)
|
||||
|
||||
// Update edited content when content prop changes
|
||||
React.useEffect(() => {
|
||||
if (!isEditing) {
|
||||
setEditedContent(content)
|
||||
}
|
||||
}, [content, isEditing])
|
||||
|
||||
const handleResizeStart = React.useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.preventDefault()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { memo, useState } from 'react'
|
||||
import { memo, useEffect, useState } from 'react'
|
||||
import { Check, ChevronDown, ChevronRight, Loader2, X } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
@@ -47,11 +47,13 @@ export const TodoList = memo(function TodoList({
|
||||
className,
|
||||
}: TodoListProps) {
|
||||
const [isCollapsed, setIsCollapsed] = useState(collapsed)
|
||||
const [prevCollapsed, setPrevCollapsed] = useState(collapsed)
|
||||
if (collapsed !== prevCollapsed) {
|
||||
setPrevCollapsed(collapsed)
|
||||
|
||||
/**
|
||||
* Sync collapsed prop with internal state
|
||||
*/
|
||||
useEffect(() => {
|
||||
setIsCollapsed(collapsed)
|
||||
}
|
||||
}, [collapsed])
|
||||
|
||||
if (!todos || todos.length === 0) {
|
||||
return null
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import {
|
||||
escapeRegex,
|
||||
filterOutContext,
|
||||
@@ -22,6 +22,15 @@ interface UseContextManagementProps {
|
||||
*/
|
||||
export function useContextManagement({ message, initialContexts }: UseContextManagementProps) {
|
||||
const [selectedContexts, setSelectedContexts] = useState<ChatContext[]>(initialContexts ?? [])
|
||||
const initializedRef = useRef(false)
|
||||
|
||||
// Initialize with initial contexts when they're first provided (for edit mode)
|
||||
useEffect(() => {
|
||||
if (initialContexts && initialContexts.length > 0 && !initializedRef.current) {
|
||||
setSelectedContexts(initialContexts)
|
||||
initializedRef.current = true
|
||||
}
|
||||
}, [initialContexts])
|
||||
|
||||
/**
|
||||
* Adds a context to the selected contexts list, avoiding duplicates
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import {
|
||||
Button,
|
||||
@@ -49,10 +49,7 @@ export function GeneralDeploy({
|
||||
onLoadDeploymentComplete,
|
||||
}: GeneralDeployProps) {
|
||||
const [selectedVersion, setSelectedVersion] = useState<number | null>(null)
|
||||
const [showActiveDespiteSelection, setShowActiveDespiteSelection] = useState(false)
|
||||
// Derived — no useEffect needed
|
||||
const previewMode: PreviewMode =
|
||||
selectedVersion !== null && !showActiveDespiteSelection ? 'selected' : 'active'
|
||||
const [previewMode, setPreviewMode] = useState<PreviewMode>('active')
|
||||
const [showLoadDialog, setShowLoadDialog] = useState(false)
|
||||
const [showPromoteDialog, setShowPromoteDialog] = useState(false)
|
||||
const [showExpandedPreview, setShowExpandedPreview] = useState(false)
|
||||
@@ -67,9 +64,16 @@ export function GeneralDeploy({
|
||||
|
||||
const revertMutation = useRevertToVersion()
|
||||
|
||||
useEffect(() => {
|
||||
if (selectedVersion !== null) {
|
||||
setPreviewMode('selected')
|
||||
} else {
|
||||
setPreviewMode('active')
|
||||
}
|
||||
}, [selectedVersion])
|
||||
|
||||
const handleSelectVersion = useCallback((version: number | null) => {
|
||||
setSelectedVersion(version)
|
||||
setShowActiveDespiteSelection(false)
|
||||
}, [])
|
||||
|
||||
const handleLoadDeployment = useCallback((version: number) => {
|
||||
@@ -160,9 +164,7 @@ export function GeneralDeploy({
|
||||
>
|
||||
<ButtonGroup
|
||||
value={previewMode}
|
||||
onValueChange={(val) =>
|
||||
setShowActiveDespiteSelection((val as PreviewMode) === 'active')
|
||||
}
|
||||
onValueChange={(val) => setPreviewMode(val as PreviewMode)}
|
||||
>
|
||||
<ButtonGroupItem value='active'>Live</ButtonGroupItem>
|
||||
<ButtonGroupItem value='selected' className='truncate'>
|
||||
|
||||
@@ -227,39 +227,12 @@ export function DeployModal({
|
||||
getApiKeyLabel,
|
||||
])
|
||||
|
||||
const selectedStreamingOutputsRef = useRef(selectedStreamingOutputs)
|
||||
selectedStreamingOutputsRef.current = selectedStreamingOutputs
|
||||
|
||||
useEffect(() => {
|
||||
if (open && workflowId) {
|
||||
setActiveTab('general')
|
||||
setDeployError(null)
|
||||
setDeployWarnings([])
|
||||
setChatSuccess(false)
|
||||
|
||||
const currentOutputs = selectedStreamingOutputsRef.current
|
||||
if (currentOutputs.length > 0) {
|
||||
const blocks = Object.values(useWorkflowStore.getState().blocks)
|
||||
const validOutputs = currentOutputs.filter((outputId) => {
|
||||
if (startsWithUuid(outputId)) {
|
||||
const underscoreIndex = outputId.indexOf('_')
|
||||
if (underscoreIndex === -1) return false
|
||||
const blockId = outputId.substring(0, underscoreIndex)
|
||||
return blocks.some((b) => b.id === blockId)
|
||||
}
|
||||
const parts = outputId.split('.')
|
||||
if (parts.length >= 2) {
|
||||
const blockName = parts[0]
|
||||
return blocks.some(
|
||||
(b) => b.name?.toLowerCase().replace(/\s+/g, '') === blockName.toLowerCase()
|
||||
)
|
||||
}
|
||||
return true
|
||||
})
|
||||
if (validOutputs.length !== currentOutputs.length) {
|
||||
setSelectedStreamingOutputs(validOutputs)
|
||||
}
|
||||
}
|
||||
}
|
||||
return () => {
|
||||
if (chatSuccessTimeoutRef.current) {
|
||||
@@ -268,6 +241,38 @@ export function DeployModal({
|
||||
}
|
||||
}, [open, workflowId])
|
||||
|
||||
useEffect(() => {
|
||||
if (!open || selectedStreamingOutputs.length === 0) return
|
||||
|
||||
const blocks = Object.values(useWorkflowStore.getState().blocks)
|
||||
|
||||
const validOutputs = selectedStreamingOutputs.filter((outputId) => {
|
||||
if (startsWithUuid(outputId)) {
|
||||
const underscoreIndex = outputId.indexOf('_')
|
||||
if (underscoreIndex === -1) return false
|
||||
|
||||
const blockId = outputId.substring(0, underscoreIndex)
|
||||
const block = blocks.find((b) => b.id === blockId)
|
||||
return !!block
|
||||
}
|
||||
|
||||
const parts = outputId.split('.')
|
||||
if (parts.length >= 2) {
|
||||
const blockName = parts[0]
|
||||
const block = blocks.find(
|
||||
(b) => b.name?.toLowerCase().replace(/\s+/g, '') === blockName.toLowerCase()
|
||||
)
|
||||
return !!block
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
if (validOutputs.length !== selectedStreamingOutputs.length) {
|
||||
setSelectedStreamingOutputs(validOutputs)
|
||||
}
|
||||
}, [open, selectedStreamingOutputs, setSelectedStreamingOutputs])
|
||||
|
||||
useEffect(() => {
|
||||
const handleOpenDeployModal = (event: Event) => {
|
||||
const customEvent = event as CustomEvent<{ tab?: TabView }>
|
||||
|
||||
@@ -120,6 +120,7 @@ export const ComboBox = memo(function ComboBox({
|
||||
)
|
||||
|
||||
// State management
|
||||
const [storeInitialized, setStoreInitialized] = useState(false)
|
||||
const [fetchedOptions, setFetchedOptions] = useState<Array<{ label: string; id: string }>>([])
|
||||
const [isLoadingOptions, setIsLoadingOptions] = useState(false)
|
||||
const [fetchError, setFetchError] = useState<string | null>(null)
|
||||
@@ -279,22 +280,27 @@ export const ComboBox = memo(function ComboBox({
|
||||
}, [value, evaluatedOptions])
|
||||
|
||||
const [inputValue, setInputValue] = useState(displayValue)
|
||||
const [prevDisplayValue, setPrevDisplayValue] = useState(displayValue)
|
||||
if (displayValue !== prevDisplayValue) {
|
||||
setPrevDisplayValue(displayValue)
|
||||
setInputValue(displayValue)
|
||||
}
|
||||
|
||||
// Set default value once permissions are loaded
|
||||
useEffect(() => {
|
||||
setInputValue(displayValue)
|
||||
}, [displayValue])
|
||||
|
||||
// Mark store as initialized on first render
|
||||
useEffect(() => {
|
||||
setStoreInitialized(true)
|
||||
}, [])
|
||||
|
||||
// Set default value once store is initialized and permissions are loaded
|
||||
useEffect(() => {
|
||||
if (isPermissionLoading) return
|
||||
if (!storeInitialized) return
|
||||
if (defaultOptionValue === undefined) return
|
||||
|
||||
// Only set default when no value exists (initial block add)
|
||||
if (value === null || value === undefined) {
|
||||
setStoreValue(defaultOptionValue)
|
||||
}
|
||||
}, [value, defaultOptionValue, setStoreValue, isPermissionLoading])
|
||||
}, [storeInitialized, value, defaultOptionValue, setStoreValue, isPermissionLoading])
|
||||
|
||||
// Clear fetched options and hydrated option when dependencies change
|
||||
useEffect(() => {
|
||||
|
||||
@@ -124,6 +124,7 @@ export const Dropdown = memo(function Dropdown({
|
||||
isEqual
|
||||
)
|
||||
|
||||
const [storeInitialized, setStoreInitialized] = useState(false)
|
||||
const [fetchedOptions, setFetchedOptions] = useState<Array<{ label: string; id: string }>>([])
|
||||
const [isLoadingOptions, setIsLoadingOptions] = useState(false)
|
||||
const [fetchError, setFetchError] = useState<string | null>(null)
|
||||
@@ -241,13 +242,17 @@ export const Dropdown = memo(function Dropdown({
|
||||
}, [defaultValue, comboboxOptions, multiSelect])
|
||||
|
||||
useEffect(() => {
|
||||
if (multiSelect || defaultOptionValue === undefined) {
|
||||
setStoreInitialized(true)
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
if (multiSelect || !storeInitialized || defaultOptionValue === undefined) {
|
||||
return
|
||||
}
|
||||
if (storeValue === null || storeValue === undefined || storeValue === '') {
|
||||
setStoreValue(defaultOptionValue)
|
||||
}
|
||||
}, [storeValue, defaultOptionValue, setStoreValue, multiSelect])
|
||||
}, [storeInitialized, storeValue, defaultOptionValue, setStoreValue, multiSelect])
|
||||
|
||||
/**
|
||||
* Normalizes variable references in JSON strings by wrapping them in quotes
|
||||
|
||||
@@ -122,9 +122,11 @@ export function LongInput({
|
||||
isStreaming: wandHook.isStreaming,
|
||||
})
|
||||
|
||||
persistSubBlockValueRef.current = (value: string) => {
|
||||
setSubBlockValue(value)
|
||||
}
|
||||
useEffect(() => {
|
||||
persistSubBlockValueRef.current = (value: string) => {
|
||||
setSubBlockValue(value)
|
||||
}
|
||||
}, [setSubBlockValue])
|
||||
|
||||
// Check if wand is actually enabled
|
||||
const isWandEnabled = config.wandConfig?.enabled ?? false
|
||||
@@ -191,12 +193,12 @@ export function LongInput({
|
||||
// Sync local content with base value when not streaming
|
||||
useEffect(() => {
|
||||
if (!wandHook.isStreaming) {
|
||||
setLocalContent((prev) => {
|
||||
const baseValueString = baseValue?.toString() ?? ''
|
||||
return baseValueString !== prev ? baseValueString : prev
|
||||
})
|
||||
const baseValueString = baseValue?.toString() ?? ''
|
||||
if (baseValueString !== localContent) {
|
||||
setLocalContent(baseValueString)
|
||||
}
|
||||
}
|
||||
}, [baseValue, wandHook.isStreaming])
|
||||
}, [baseValue, wandHook.isStreaming]) // Removed localContent to prevent infinite loop
|
||||
|
||||
// Update height when rows prop changes
|
||||
useLayoutEffect(() => {
|
||||
|
||||
@@ -109,9 +109,11 @@ export const ShortInput = memo(function ShortInput({
|
||||
isStreaming: wandHook.isStreaming,
|
||||
})
|
||||
|
||||
persistSubBlockValueRef.current = (value: string) => {
|
||||
setSubBlockValue(value)
|
||||
}
|
||||
useEffect(() => {
|
||||
persistSubBlockValueRef.current = (value: string) => {
|
||||
setSubBlockValue(value)
|
||||
}
|
||||
}, [setSubBlockValue])
|
||||
|
||||
const isWandEnabled = config.wandConfig?.enabled ?? false
|
||||
|
||||
@@ -212,12 +214,12 @@ export const ShortInput = memo(function ShortInput({
|
||||
|
||||
useEffect(() => {
|
||||
if (!wandHook.isStreaming) {
|
||||
setLocalContent((prev) => {
|
||||
const baseValueString = baseValue?.toString() ?? ''
|
||||
return baseValueString !== prev ? baseValueString : prev
|
||||
})
|
||||
const baseValueString = baseValue?.toString() ?? ''
|
||||
if (baseValueString !== localContent) {
|
||||
setLocalContent(baseValueString)
|
||||
}
|
||||
}
|
||||
}, [baseValue, wandHook.isStreaming])
|
||||
}, [baseValue, wandHook.isStreaming, localContent])
|
||||
|
||||
const handleScroll = useCallback((e: React.UIEvent<HTMLInputElement>) => {
|
||||
if (overlayRef.current) {
|
||||
|
||||
@@ -310,14 +310,6 @@ export const Toolbar = memo(
|
||||
// Search state
|
||||
const [isSearchActive, setIsSearchActive] = useState(false)
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [prevIsActive, setPrevIsActive] = useState(isActive)
|
||||
if (isActive !== prevIsActive) {
|
||||
setPrevIsActive(isActive)
|
||||
if (!isActive) {
|
||||
setIsSearchActive(false)
|
||||
setSearchQuery('')
|
||||
}
|
||||
}
|
||||
|
||||
// Toggle animation state
|
||||
const [isToggling, setIsToggling] = useState(false)
|
||||
@@ -358,8 +350,14 @@ export const Toolbar = memo(
|
||||
const isTriggersAtMinimum = toolbarTriggersHeight <= TRIGGERS_MIN_THRESHOLD
|
||||
|
||||
/**
|
||||
* Filter items based on search query
|
||||
* Clear search when tab becomes inactive
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!isActive) {
|
||||
setIsSearchActive(false)
|
||||
setSearchQuery('')
|
||||
}
|
||||
}, [isActive])
|
||||
|
||||
/**
|
||||
* Filter items based on search query
|
||||
|
||||
@@ -604,13 +604,11 @@ export const Terminal = memo(function Terminal() {
|
||||
const [autoSelectEnabled, setAutoSelectEnabled] = useState(true)
|
||||
const [mainOptionsOpen, setMainOptionsOpen] = useState(false)
|
||||
|
||||
const [isTrainingEnvEnabled] = useState(() =>
|
||||
isTruthy(getEnv('NEXT_PUBLIC_COPILOT_TRAINING_ENABLED'))
|
||||
)
|
||||
const [isTrainingEnvEnabled, setIsTrainingEnvEnabled] = useState(false)
|
||||
const showTrainingControls = useShowTrainingControls()
|
||||
const { isTraining, toggleModal: toggleTrainingModal, stopTraining } = useCopilotTrainingStore()
|
||||
|
||||
const [isPlaygroundEnabled] = useState(() => isTruthy(getEnv('NEXT_PUBLIC_ENABLE_PLAYGROUND')))
|
||||
const [isPlaygroundEnabled, setIsPlaygroundEnabled] = useState(false)
|
||||
|
||||
const { handleMouseDown } = useTerminalResize()
|
||||
const { handleMouseDown: handleOutputPanelResizeMouseDown } = useOutputPanelResize()
|
||||
@@ -711,21 +709,21 @@ export const Terminal = memo(function Terminal() {
|
||||
}, [outputData])
|
||||
|
||||
// Keep refs in sync for keyboard handler
|
||||
selectedEntryRef.current = selectedEntry
|
||||
navigableEntriesRef.current = navigableEntries
|
||||
showInputRef.current = showInput
|
||||
hasInputDataRef.current = hasInputData
|
||||
isExpandedRef.current = isExpanded
|
||||
useEffect(() => {
|
||||
selectedEntryRef.current = selectedEntry
|
||||
navigableEntriesRef.current = navigableEntries
|
||||
showInputRef.current = showInput
|
||||
hasInputDataRef.current = hasInputData
|
||||
isExpandedRef.current = isExpanded
|
||||
}, [selectedEntry, navigableEntries, showInput, hasInputData, isExpanded])
|
||||
|
||||
/**
|
||||
* Reset entry tracking when switching workflows to ensure auto-open
|
||||
* works correctly for each workflow independently.
|
||||
*/
|
||||
const prevActiveWorkflowIdRef = useRef(activeWorkflowId)
|
||||
if (prevActiveWorkflowIdRef.current !== activeWorkflowId) {
|
||||
prevActiveWorkflowIdRef.current = activeWorkflowId
|
||||
useEffect(() => {
|
||||
hasInitializedEntriesRef.current = false
|
||||
}
|
||||
}, [activeWorkflowId])
|
||||
|
||||
/**
|
||||
* Auto-open the terminal on new entries when "Open on run" is enabled.
|
||||
@@ -963,6 +961,11 @@ export const Terminal = memo(function Terminal() {
|
||||
return unsub
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
setIsTrainingEnvEnabled(isTruthy(getEnv('NEXT_PUBLIC_COPILOT_TRAINING_ENABLED')))
|
||||
setIsPlaygroundEnabled(isTruthy(getEnv('NEXT_PUBLIC_ENABLE_PLAYGROUND')))
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
if (!selectedEntry) {
|
||||
setShowInput(false)
|
||||
|
||||
@@ -29,11 +29,6 @@ export function WandPromptBar({
|
||||
}: WandPromptBarProps) {
|
||||
const promptBarRef = useRef<HTMLDivElement>(null)
|
||||
const [isExiting, setIsExiting] = useState(false)
|
||||
const [prevIsVisible, setPrevIsVisible] = useState(isVisible)
|
||||
if (isVisible !== prevIsVisible) {
|
||||
setPrevIsVisible(isVisible)
|
||||
if (isVisible) setIsExiting(false)
|
||||
}
|
||||
|
||||
// Handle the fade-out animation
|
||||
const handleCancel = () => {
|
||||
@@ -71,6 +66,13 @@ export function WandPromptBar({
|
||||
}
|
||||
}, [isVisible, isStreaming, isLoading, isExiting, onCancel])
|
||||
|
||||
// Reset the exit state when visibility changes
|
||||
useEffect(() => {
|
||||
if (isVisible) {
|
||||
setIsExiting(false)
|
||||
}
|
||||
}, [isVisible])
|
||||
|
||||
if (!isVisible && !isStreaming && !isExiting) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useLayoutEffect, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
|
||||
const AUTO_SCROLL_GRACE_MS = 120
|
||||
|
||||
@@ -38,13 +38,6 @@ export function useScrollManagement(
|
||||
) {
|
||||
const scrollAreaRef = useRef<HTMLDivElement>(null)
|
||||
const [userHasScrolledAway, setUserHasScrolledAway] = useState(false)
|
||||
const [prevIsSendingMessage, setPrevIsSendingMessage] = useState(isSendingMessage)
|
||||
if (prevIsSendingMessage !== isSendingMessage) {
|
||||
setPrevIsSendingMessage(isSendingMessage)
|
||||
if (!isSendingMessage) {
|
||||
setUserHasScrolledAway(false)
|
||||
}
|
||||
}
|
||||
const programmaticUntilRef = useRef(0)
|
||||
const lastScrollTopRef = useRef(0)
|
||||
|
||||
@@ -145,6 +138,12 @@ export function useScrollManagement(
|
||||
}
|
||||
}, [messages, userHasScrolledAway, scrollToBottom])
|
||||
|
||||
useEffect(() => {
|
||||
if (!isSendingMessage) {
|
||||
setUserHasScrolledAway(false)
|
||||
}
|
||||
}, [isSendingMessage])
|
||||
|
||||
useEffect(() => {
|
||||
if (!isSendingMessage || userHasScrolledAway) return
|
||||
|
||||
@@ -168,7 +167,7 @@ export function useScrollManagement(
|
||||
// overflow-anchor: none during streaming prevents the browser from
|
||||
// fighting our programmatic scrollToBottom calls (Chromium/Firefox only;
|
||||
// Safari does not support this property).
|
||||
useLayoutEffect(() => {
|
||||
useEffect(() => {
|
||||
const container = scrollAreaRef.current
|
||||
if (!container) return
|
||||
|
||||
|
||||
@@ -336,7 +336,9 @@ const WorkflowContent = React.memo(
|
||||
|
||||
const isAutoConnectEnabled = useAutoConnect()
|
||||
const autoConnectRef = useRef(isAutoConnectEnabled)
|
||||
autoConnectRef.current = isAutoConnectEnabled
|
||||
useEffect(() => {
|
||||
autoConnectRef.current = isAutoConnectEnabled
|
||||
}, [isAutoConnectEnabled])
|
||||
|
||||
// Panel open states for context menu
|
||||
const isVariablesOpen = useVariablesStore((state) => state.isOpen)
|
||||
|
||||
@@ -277,20 +277,16 @@ function ConnectionsSection({
|
||||
onResizeMouseDown,
|
||||
onToggleCollapsed,
|
||||
}: ConnectionsSectionProps) {
|
||||
/** Stable string of connection IDs to prevent guard from running on every render */
|
||||
const connectionIds = useMemo(() => connections.map((c) => c.blockId).join(','), [connections])
|
||||
|
||||
const [expandedBlocks, setExpandedBlocks] = useState<Set<string>>(
|
||||
() => new Set(connectionIds.split(',').filter(Boolean))
|
||||
)
|
||||
const [expandedBlocks, setExpandedBlocks] = useState<Set<string>>(() => new Set())
|
||||
const [expandedVariables, setExpandedVariables] = useState(true)
|
||||
const [expandedEnvVars, setExpandedEnvVars] = useState(true)
|
||||
|
||||
const [prevConnectionIds, setPrevConnectionIds] = useState(connectionIds)
|
||||
if (connectionIds !== prevConnectionIds) {
|
||||
setPrevConnectionIds(connectionIds)
|
||||
/** Stable string of connection IDs to prevent effect from running on every render */
|
||||
const connectionIds = useMemo(() => connections.map((c) => c.blockId).join(','), [connections])
|
||||
|
||||
useEffect(() => {
|
||||
setExpandedBlocks(new Set(connectionIds.split(',').filter(Boolean)))
|
||||
}
|
||||
}, [connectionIds])
|
||||
|
||||
const hasContent = connections.length > 0 || workflowVars.length > 0 || envVars.length > 0
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { ArrowLeft } from 'lucide-react'
|
||||
import { Button, Tooltip } from '@/components/emcn'
|
||||
import { redactApiKeys } from '@/lib/core/security/redaction'
|
||||
@@ -161,11 +161,6 @@ export function Preview({
|
||||
})
|
||||
|
||||
const [workflowStack, setWorkflowStack] = useState<WorkflowStackEntry[]>([])
|
||||
const [prevRootState, setPrevRootState] = useState(rootWorkflowState)
|
||||
if (rootWorkflowState !== prevRootState) {
|
||||
setPrevRootState(rootWorkflowState)
|
||||
setWorkflowStack([])
|
||||
}
|
||||
|
||||
const rootBlockExecutions = useMemo(() => {
|
||||
if (providedBlockExecutions) return providedBlockExecutions
|
||||
@@ -232,6 +227,10 @@ export function Preview({
|
||||
setPinnedBlockId(null)
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
setWorkflowStack([])
|
||||
}, [rootWorkflowState])
|
||||
|
||||
const isNested = workflowStack.length > 0
|
||||
|
||||
const currentWorkflowName = isNested ? workflowStack[workflowStack.length - 1].workflowName : null
|
||||
|
||||
@@ -175,26 +175,24 @@ export function SearchModal({
|
||||
]
|
||||
)
|
||||
|
||||
const [search, setSearch] = useState('')
|
||||
const [prevOpen, setPrevOpen] = useState(open)
|
||||
if (open !== prevOpen) {
|
||||
setPrevOpen(open)
|
||||
if (open) setSearch('')
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (!open || !inputRef.current) return
|
||||
const nativeInputValueSetter = Object.getOwnPropertyDescriptor(
|
||||
window.HTMLInputElement.prototype,
|
||||
'value'
|
||||
)?.set
|
||||
if (nativeInputValueSetter) {
|
||||
nativeInputValueSetter.call(inputRef.current, '')
|
||||
inputRef.current.dispatchEvent(new Event('input', { bubbles: true }))
|
||||
if (open) {
|
||||
setSearch('')
|
||||
if (inputRef.current) {
|
||||
const nativeInputValueSetter = Object.getOwnPropertyDescriptor(
|
||||
window.HTMLInputElement.prototype,
|
||||
'value'
|
||||
)?.set
|
||||
if (nativeInputValueSetter) {
|
||||
nativeInputValueSetter.call(inputRef.current, '')
|
||||
inputRef.current.dispatchEvent(new Event('input', { bubbles: true }))
|
||||
}
|
||||
inputRef.current.focus()
|
||||
}
|
||||
}
|
||||
inputRef.current.focus()
|
||||
}, [open])
|
||||
|
||||
const [search, setSearch] = useState('')
|
||||
const deferredSearch = useDeferredValue(search)
|
||||
|
||||
const handleSearchChange = useCallback((value: string) => {
|
||||
|
||||
@@ -124,6 +124,13 @@ export function useDragDrop(options: UseDragDropOptions = {}) {
|
||||
}
|
||||
}, [hoverFolderId, isDragging, expandedFolders, setExpanded])
|
||||
|
||||
useEffect(() => {
|
||||
if (!isDragging) {
|
||||
setHoverFolderId(null)
|
||||
setDropIndicator(null)
|
||||
}
|
||||
}, [isDragging])
|
||||
|
||||
const calculateDropPosition = useCallback(
|
||||
(e: React.DragEvent, element: HTMLElement): 'before' | 'after' => {
|
||||
const rect = element.getBoundingClientRect()
|
||||
|
||||
@@ -42,6 +42,13 @@ export function useItemRename({ initialName, onSave, itemType, itemId }: UseItem
|
||||
const [isRenaming, setIsRenaming] = useState(false)
|
||||
const inputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
/**
|
||||
* Update edit value when initial name changes
|
||||
*/
|
||||
useEffect(() => {
|
||||
setEditValue(initialName)
|
||||
}, [initialName])
|
||||
|
||||
/**
|
||||
* Focus and select input when entering edit mode
|
||||
*/
|
||||
|
||||
@@ -29,7 +29,6 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
{ label: 'List Documents', id: 'list_documents' },
|
||||
{ label: 'Get Document', id: 'get_document' },
|
||||
{ label: 'Create Document', id: 'create_document' },
|
||||
{ label: 'Upsert Document', id: 'upsert_document' },
|
||||
{ label: 'Delete Document', id: 'delete_document' },
|
||||
{ label: 'List Chunks', id: 'list_chunks' },
|
||||
{ label: 'Upload Chunk', id: 'upload_chunk' },
|
||||
@@ -176,14 +175,14 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
condition: { field: 'operation', value: 'upload_chunk' },
|
||||
},
|
||||
|
||||
// --- Create Document / Upsert Document ---
|
||||
// --- Create Document ---
|
||||
{
|
||||
id: 'name',
|
||||
title: 'Document Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter document name',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['create_document', 'upsert_document'] },
|
||||
condition: { field: 'operation', value: 'create_document' },
|
||||
},
|
||||
{
|
||||
id: 'content',
|
||||
@@ -192,21 +191,14 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
placeholder: 'Enter the document content',
|
||||
rows: 6,
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['create_document', 'upsert_document'] },
|
||||
},
|
||||
{
|
||||
id: 'upsertDocumentId',
|
||||
title: 'Document ID (Optional)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter existing document ID to update (or leave empty to match by name)',
|
||||
condition: { field: 'operation', value: 'upsert_document' },
|
||||
condition: { field: 'operation', value: 'create_document' },
|
||||
},
|
||||
{
|
||||
id: 'documentTags',
|
||||
title: 'Document Tags',
|
||||
type: 'document-tag-entry',
|
||||
dependsOn: ['knowledgeBaseSelector'],
|
||||
condition: { field: 'operation', value: ['create_document', 'upsert_document'] },
|
||||
condition: { field: 'operation', value: 'create_document' },
|
||||
},
|
||||
|
||||
// --- Update Chunk / Delete Chunk ---
|
||||
@@ -272,7 +264,6 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
'knowledge_search',
|
||||
'knowledge_upload_chunk',
|
||||
'knowledge_create_document',
|
||||
'knowledge_upsert_document',
|
||||
'knowledge_list_tags',
|
||||
'knowledge_list_documents',
|
||||
'knowledge_get_document',
|
||||
@@ -293,8 +284,6 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
return 'knowledge_upload_chunk'
|
||||
case 'create_document':
|
||||
return 'knowledge_create_document'
|
||||
case 'upsert_document':
|
||||
return 'knowledge_upsert_document'
|
||||
case 'list_tags':
|
||||
return 'knowledge_list_tags'
|
||||
case 'list_documents':
|
||||
@@ -366,11 +355,6 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
if (params.chunkEnabledFilter) params.enabled = params.chunkEnabledFilter
|
||||
}
|
||||
|
||||
// Map upsert sub-block field to tool param
|
||||
if (params.operation === 'upsert_document' && params.upsertDocumentId) {
|
||||
params.documentId = String(params.upsertDocumentId).trim()
|
||||
}
|
||||
|
||||
// Convert enabled dropdown string to boolean for update_chunk
|
||||
if (params.operation === 'update_chunk' && typeof params.enabled === 'string') {
|
||||
params.enabled = params.enabled === 'true'
|
||||
@@ -398,7 +382,6 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
documentTags: { type: 'string', description: 'Document tags' },
|
||||
chunkSearch: { type: 'string', description: 'Search filter for chunks' },
|
||||
chunkEnabledFilter: { type: 'string', description: 'Filter chunks by enabled status' },
|
||||
upsertDocumentId: { type: 'string', description: 'Document ID for upsert operation' },
|
||||
connectorId: { type: 'string', description: 'Connector identifier' },
|
||||
},
|
||||
outputs: {
|
||||
|
||||
@@ -1,140 +0,0 @@
|
||||
---
|
||||
slug: mothership
|
||||
title: 'Introducing Mothership'
|
||||
description: 'Sim v0.6 introduces Mothership—a central intelligence layer for orchestrating your AI agents—alongside Tables, Files, Knowledge Base Connectors, and Scheduled Tasks.'
|
||||
date: 2026-03-17
|
||||
updated: 2026-03-17
|
||||
authors:
|
||||
- emir
|
||||
readingTime: 10
|
||||
tags: [Release, Mothership, Tables, Knowledge Base, Connectors, RAG, Sim]
|
||||
ogImage: /blog/mothership/cover.png
|
||||
ogAlt: 'Sim v0.6 release announcement'
|
||||
about: ['AI Agents', 'Workflow Automation', 'Developer Tools']
|
||||
timeRequired: PT10M
|
||||
canonical: https://sim.ai/blog/mothership
|
||||
featured: true
|
||||
draft: false
|
||||
---
|
||||
|
||||
I often wonder why AI agents don't already do everything for me today. Why don't they take my meetings for me? Why can't they send follow-ups to customers? Why can't they track the progress of our product launches and just start solving problems for us?
|
||||
|
||||
It seems like this is an engineering challenge more than a scientific one. Models today are already remarkably capable at solving complex problems. Look at how far AI coding has come in just two years.
|
||||
|
||||

|
||||
|
||||
In 2023, the best models could solve around 5% of real-world GitHub issues. By early 2025, that number crossed 70%. Today we're approaching 80%. These aren't toy benchmarks—they're actual pull requests on open-source repositories that require multi-file reasoning, debugging, and testing.
|
||||
|
||||
If a model can navigate a codebase, diagnose a bug, and write a working fix, why can't it update my CRM after a sales call? Why can't it pull last week's metrics from three different tools and post a summary to Slack every Monday?
|
||||
|
||||
The answer isn't intelligence. It's infrastructure. Models need the right workspace around them—persistent data, access to your tools, knowledge from your documents, and the ability to act on a schedule without you in the loop.
|
||||
|
||||
That's what we built.
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## Mothership
|
||||
|
||||

|
||||
|
||||
Mothership is the control plane for your entire workspace. It's not a chatbot—it's an orchestrator with full context over your workflows, tables, knowledge bases, files, and every integration you've connected.
|
||||
|
||||
Ask it to "create a CRM table, seed it with my existing leads data, build a self-healing sync workflow, and schedule it to run daily"—and it does exactly that. It creates the table, writes the rows, wires up the workflow blocks, configures the integrations, tests it, and deploys it. One prompt, end to end.
|
||||
|
||||
Mothership can call any of your 100+ integration tools directly—query a CRM, send a Slack message, search a knowledge base, create a Linear issue—all from a single conversation. It can read, write, and edit files in your workspace. It can build entire workflows from a description and deploy them as APIs, chat interfaces, or MCP tools.
|
||||
|
||||
When something breaks, it debugs. When you need a report, it writes one. When you describe a workflow in plain English, it builds it, tests it, and ships it.
|
||||
|
||||

|
||||
|
||||
You can even email your workspace. Enable a Sim inbox, and Mothership reads incoming messages, executes tasks using your integrations and data, and replies. Your workspace becomes an email-driven automation layer—send it a request, and it handles the rest.
|
||||
|
||||
This is the difference between an assistant that answers questions and one that actually runs things.
|
||||
|
||||
---
|
||||
|
||||
## Tables
|
||||
|
||||
For agents to do real work, they need somewhere to put things. Not ephemeral context that disappears after a conversation—persistent, structured data they can read, write, and query over time.
|
||||
|
||||
Tables give your workspace a database. Create tables with typed columns—string, number, boolean, date, JSON—insert and update rows, query with filters and sorting, and use them as the state layer for any automation. 10+ operations are available as workflow blocks: query, insert, upsert, batch insert, update, delete, get row, get schema.
|
||||
|
||||
The Mothership can manage tables directly—creating schemas, seeding data, and querying results as part of a larger task. It's not just storage. It's memory.
|
||||
|
||||
A customer support workflow writes new tickets to a table. A lead enrichment pipeline upserts contact data after every sync. A reporting agent queries last week's rows and generates a summary. Tables are how your agents remember things and build on previous work.
|
||||
|
||||
Think about why this matters. An agent that forgets everything between runs can't track a product launch. An agent with a table can see what happened yesterday, what's overdue, and what needs attention—and act on it.
|
||||
|
||||
---
|
||||
|
||||
## Files
|
||||
|
||||

|
||||
|
||||
Your workspace has a built-in file system. Upload documents, create new files, and use them across your workflows and the Mothership.
|
||||
|
||||
Upload PDFs, spreadsheets, Markdown, JSON, YAML, CSV, HTML, SVG, audio, video—up to 100MB per file. Each format gets a rich preview: Markdown renders with syntax highlighting, HTML and SVG render inline, CSVs display as scrollable data tables, and media files play with native controls.
|
||||
|
||||
Edit files directly in the browser with a split-view editor—write on the left, see the rendered preview on the right. The Mothership can also create and edit files programmatically. Ask it to "write a report summarizing last week's metrics" and it creates the Markdown file, populates it, and shows you the preview inline.
|
||||
|
||||
CSV files can be imported directly into Tables. Sim auto-detects delimiters, infers column types from the data, and batch-inserts up to 1,000 rows. One click from a raw export to a queryable, workflow-ready table.
|
||||
|
||||
Files give agents the ability to produce real artifacts. Not just chat responses—actual documents, reports, configs, and exports that you and your team can use.
|
||||
|
||||
---
|
||||
|
||||
## Knowledge Base
|
||||
|
||||

|
||||
|
||||
Agents need access to what your team already knows—docs, wikis, meeting notes, support articles, design specs. But that knowledge lives scattered across dozens of tools. Without it, agents hallucinate or give generic answers. With it, they're grounded in your actual business context.
|
||||
|
||||
Knowledge Base Connectors bring it all together. Connect a source—Google Docs, Notion, Confluence, Slack, GitHub, Jira, Linear, HubSpot, Salesforce, Zendesk, Dropbox, OneDrive, Gmail, Discord, and 15+ more—and Sim handles the rest. Documents are fetched, chunked, embedded, and indexed automatically. Incremental sync keeps everything up to date on a schedule.
|
||||
|
||||
The pipeline handles PDFs with OCR, Markdown, JSON, YAML, and plain text. Chunking is configurable—size, overlap, minimum size. Embeddings use OpenAI's text-embedding-3-small with BYOK support. The result is a vector-searchable knowledge base that's instantly available to any workflow block or to Mothership directly.
|
||||
|
||||
This is RAG without the setup. Connect a source, and within minutes your agents can search your team's knowledge, retrieve relevant context, and ground their responses in real information. A support agent that can search your actual help docs. A sales assistant that pulls from your real product specs and competitive intel. A research workflow that synthesizes information from your actual documents.
|
||||
|
||||
---
|
||||
|
||||
## Scheduled Tasks
|
||||
|
||||
Everything above means agents can orchestrate, store data, manage files, and access knowledge. The last piece is autonomy—the ability to act without you in the loop.
|
||||
|
||||
Scheduled Tasks let you tell Mothership what you need done and when. Set up recurring or one-time jobs that execute on a cron schedule, with full access to your integrations, tables, and knowledge bases.
|
||||
|
||||
Two modes: **persistent** tasks run indefinitely on schedule, and **until_complete** tasks poll until a success condition is met, then stop automatically. Configurable max runs, 40+ timezone support, and automatic failure tracking that disables a task after 3 consecutive failures.
|
||||
|
||||
**"Every morning at 9am, check my Gmail for new support tickets and create rows in my Tickets table."** A persistent task that runs daily, searches Gmail, and writes structured data to a Table.
|
||||
|
||||
**"Every hour, poll the Stripe API for failed payments and send a Slack summary to #billing."** Recurring monitoring across two integrations in a single prompt.
|
||||
|
||||
**"Check if the Q1 report has been uploaded to Google Drive. When it appears, summarize it and email the team."** An until_complete task that polls until the file exists, then acts and stops.
|
||||
|
||||
**"Every Monday at 8am, pull this week's Linear issues, cross-reference with our product roadmap in Notion, and post a status update to Slack."** Multi-step reasoning across three integrations on a weekly schedule.
|
||||
|
||||
Each run tracks execution history, so the Mothership has context about what it did previously—making each run smarter.
|
||||
|
||||
This is the part that closes the gap. You described a task. It runs on schedule. You're not in the loop. The agent is just doing its job.
|
||||
|
||||
---
|
||||
|
||||
## It All Connects
|
||||
|
||||
None of these features exist in isolation. They compose into something larger.
|
||||
|
||||
Mothership queries your Tables, searches your Knowledge Bases, reads and writes your Files, calls your integrations, and runs on a schedule through Scheduled Tasks. A user emails your workspace and the Mothership reads the message, researches the answer across synced Notion docs, stores the result in a Table, and triggers a Slack notification workflow. One control plane, all your data, all your tools.
|
||||
|
||||
The reason AI agents can't run your life today isn't that they aren't smart enough. It's that they don't have the right workspace. They need persistent memory, access to your knowledge, the ability to act across your tools, and the autonomy to do it on a schedule.
|
||||
|
||||
That's what v0.6 is.
|
||||
|
||||
---
|
||||
|
||||
## Get Started
|
||||
|
||||
Sim v0.6 is available now at [sim.ai](https://sim.ai). Check out our [documentation](https://docs.sim.ai) for detailed guides on Mothership, Tables, Connectors, and more.
|
||||
|
||||
*Questions? [help@sim.ai](mailto:help@sim.ai) · [Discord](https://sim.ai/discord)*
|
||||
@@ -1,335 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>What's New at Sim</title>
|
||||
</head>
|
||||
<body style="margin:0;padding:0;background-color:#ffffff;">
|
||||
<table width="100%" cellspacing="0" cellpadding="0" border="0" role="presentation" style="background-color:#ffffff;">
|
||||
<tr>
|
||||
<td align="center" style="padding:0 16px;">
|
||||
<table width="600" cellspacing="0" cellpadding="0" border="0" role="presentation" style="max-width:600px;width:100%;">
|
||||
|
||||
<!-- Logo -->
|
||||
<tr>
|
||||
<td align="center" style="padding-top:32px;padding-bottom:16px;">
|
||||
<a href="https://sim.ai" style="color:#000;text-decoration:none;" target="_blank">
|
||||
<img src="https://sim.ai/email/broadcast/v0.6/logo.png" width="79" alt="Sim Logo" style="display:block;width:79px;" border="0">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Intro Paragraph -->
|
||||
<tr>
|
||||
<td align="left" style="color:#404040;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:16px;line-height:24px;word-break:break-word;padding-bottom:20px;padding-top:8px;text-align:left;">
|
||||
<p style="margin:0;">Introducing <strong>Sim v0.6</strong>—your workspace is now a living system. The <strong>Mothership</strong> is the control plane for everything you build: an AI that understands your workflows, your data, and your tools, and can act on all of them. Alongside it, we're shipping <strong>Tables</strong> for structured data and <strong>Knowledge Base Connectors</strong> that automatically sync from 30+ sources.</p>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- CTA Button -->
|
||||
<tr>
|
||||
<td align="center" style="padding-bottom:25px;padding-top:5px;">
|
||||
<table cellspacing="0" cellpadding="0" border="0" role="presentation">
|
||||
<tr>
|
||||
<td align="center" style="background-color:#32bd7e;border-radius:5px;">
|
||||
<a href="https://sim.ai" style="display:inline-block;font-weight:500;font-size:14px;padding:7px 16px;text-decoration:none;color:#ffffff;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;" target="_blank">Try Sim</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Header Text -->
|
||||
<tr>
|
||||
<td align="left" valign="top" style="color:#2d2d2d;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;word-break:break-word;padding-top:10px;padding-bottom:8px;text-align:left;">
|
||||
<h2 style="margin:0;color:#2d2d2d;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:28px;font-weight:600;word-break:break-word;">One control plane, <em>all your data</em></h2>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="left" style="color:#404040;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:16px;line-height:24px;word-break:break-word;padding-bottom:30px;text-align:left;">
|
||||
<p style="margin:0;">The Mothership orchestrates your workflows, tables, knowledge bases, and integrations from a single conversation.</p>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- FEATURE 1: Mothership -->
|
||||
<tr>
|
||||
<td align="left" style="color:#2d2d2d;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:18px;word-break:break-word;padding-top:10px;padding-bottom:12px;text-align:left;">
|
||||
<strong>The Mothership</strong>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="center" style="border-radius:8px;font-size:0;line-height:0;">
|
||||
<a href="https://sim.ai" style="color:#000;text-decoration:none;" target="_blank">
|
||||
<!-- TODO: Replace with actual mothership screenshot -->
|
||||
<img src="https://sim.ai/email/broadcast/v0.6/mothership.jpg" width="570" alt="The Mothership" style="display:block;width:100%;border-radius:8px;" border="0">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="left" style="color:#404040;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:16px;line-height:24px;word-break:break-word;padding-bottom:25px;padding-top:12px;text-align:left;">
|
||||
<!-- TODO: Write final copy -->
|
||||
<p style="margin:0;">The Mothership is the central intelligence of your Sim workspace. It already knows your workflows, tables, knowledge bases, files, and credentials—no context needed. Ask it to query a CRM, search your synced documents, edit a file, update a table, or build an entire workflow from a single prompt. It opens resources in a split-view panel right next to the chat so you can see and edit your data live. Email your workspace and it reads the message, runs tasks, and replies. It's not an assistant—it's the operating system for your AI workspace.</p>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- FEATURE 2: Build from Chat -->
|
||||
<tr>
|
||||
<td align="left" style="color:#2d2d2d;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:18px;word-break:break-word;padding-top:20px;padding-bottom:12px;text-align:left;">
|
||||
<strong>Build, Test, and Deploy from Chat</strong>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://sim.ai" style="text-decoration:none;display:block;" target="_blank">
|
||||
<table cellspacing="0" cellpadding="0" border="0" role="presentation" width="100%" style="background-color:#181C1E;border-radius:8px;">
|
||||
<tr>
|
||||
<td align="center" style="padding:40px 30px;">
|
||||
<table cellspacing="0" cellpadding="0" border="0" role="presentation">
|
||||
<tr>
|
||||
<td align="center" style="padding-bottom:16px;">
|
||||
<!-- TODO: Replace with build icon -->
|
||||
<img src="https://sim.ai/email/broadcast/v0.6/build.png" width="48" height="48" alt="Build" style="display:block;" border="0">
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<p style="margin:0;color:#ffffff;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:20px;font-weight:500;">Describe it in words, ship it in seconds</p>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="left" style="color:#404040;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:16px;line-height:24px;word-break:break-word;padding-bottom:25px;padding-top:12px;text-align:left;">
|
||||
<!-- TODO: Write final copy -->
|
||||
<p style="margin:0;">Tell the Mothership what you need: "Build a workflow that monitors GitHub issues, searches our docs, and posts a suggested fix to Slack." It plans the workflow, wires the blocks, configures your integrations, tests the result, and deploys it as an API, chat UI, or MCP tool—all from a single conversation. When something breaks, the debug agent diagnoses the issue and suggests a fix. No drag-and-drop required.</p>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- FEATURE 3: Tables & Files -->
|
||||
<tr>
|
||||
<td align="left" style="color:#2d2d2d;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:18px;word-break:break-word;padding-top:20px;padding-bottom:12px;text-align:left;">
|
||||
<strong>Tables & Files</strong>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="center" style="border-radius:8px;font-size:0;line-height:0;">
|
||||
<a href="https://sim.ai" style="color:#000;text-decoration:none;" target="_blank">
|
||||
<!-- TODO: Replace with actual tables screenshot -->
|
||||
<img src="https://sim.ai/email/broadcast/v0.6/tables.jpg" width="570" alt="Tables & Files" style="display:block;width:100%;border-radius:8px;" border="0">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="left" style="color:#404040;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:16px;line-height:24px;word-break:break-word;padding-bottom:25px;padding-top:12px;text-align:left;">
|
||||
<!-- TODO: Write final copy -->
|
||||
<p style="margin:0;">Tables give your workflows persistent, structured storage—no external database needed. Create typed columns, query with filters, insert and upsert rows, or batch operations. Import CSVs directly into Tables with automatic column type inference. Your workspace also has a built-in file system: upload PDFs, Markdown, JSON, HTML, SVGs, and more—preview and edit them in the browser with a split-view editor, or let the Mothership read and write them programmatically. Everything is accessible via REST API.</p>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- FEATURE 3: KB Connectors -->
|
||||
<tr>
|
||||
<td align="left" style="color:#2d2d2d;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:18px;word-break:break-word;padding-top:20px;padding-bottom:12px;text-align:left;">
|
||||
<strong>Knowledge Base Connectors</strong>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://sim.ai" style="text-decoration:none;display:block;" target="_blank">
|
||||
<table cellspacing="0" cellpadding="0" border="0" role="presentation" width="100%" style="background-color:#181C1E;border-radius:8px;">
|
||||
<tr>
|
||||
<td align="center" style="padding:40px 30px;">
|
||||
<table cellspacing="0" cellpadding="0" border="0" role="presentation">
|
||||
<tr>
|
||||
<td align="center" style="padding-bottom:16px;">
|
||||
<!-- TODO: Replace with connector icon -->
|
||||
<img src="https://sim.ai/email/broadcast/v0.6/connectors.png" width="48" height="48" alt="Connectors" style="display:block;" border="0">
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<p style="margin:0;color:#ffffff;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:20px;font-weight:500;">30+ sources, automatic sync, instant RAG</p>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="left" style="color:#404040;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:16px;line-height:24px;word-break:break-word;padding-bottom:25px;padding-top:12px;text-align:left;">
|
||||
<!-- TODO: Write final copy -->
|
||||
<p style="margin:0;">Connect Google Docs, Notion, Confluence, Slack, GitHub, Jira, HubSpot, Salesforce, and 20+ more sources to your knowledge bases. We handle the rest—fetching documents on a schedule, chunking content, generating embeddings, and keeping everything up to date with incremental sync. Search semantically with tag filtering across any connected source, directly from your workflows or the Mothership.</p>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- FEATURE 4: Scheduled Tasks -->
|
||||
<tr>
|
||||
<td align="left" style="color:#2d2d2d;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:18px;word-break:break-word;padding-top:20px;padding-bottom:12px;text-align:left;">
|
||||
<strong>Scheduled Tasks</strong>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="center" style="border-radius:8px;font-size:0;line-height:0;">
|
||||
<a href="https://sim.ai" style="color:#000;text-decoration:none;" target="_blank">
|
||||
<!-- TODO: Replace with actual scheduled tasks screenshot -->
|
||||
<img src="https://sim.ai/email/broadcast/v0.6/scheduled-tasks.jpg" width="570" alt="Scheduled Tasks" style="display:block;width:100%;border-radius:8px;" border="0">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="left" style="color:#404040;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:16px;line-height:24px;word-break:break-word;padding-bottom:25px;padding-top:12px;text-align:left;">
|
||||
<!-- TODO: Write final copy -->
|
||||
<p style="margin:0;">Tell the Mothership what you need done and when. Schedule recurring jobs—"every morning, check Gmail for support tickets and add them to my table"—or one-time tasks that poll until a condition is met. The Mothership executes autonomously with full access to your integrations, tracks its own history across runs, and stops itself when the job is done.</p>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Divider -->
|
||||
<tr>
|
||||
<td align="center" style="padding-bottom:10px;padding-top:10px;">
|
||||
<table width="100%" cellspacing="0" cellpadding="0" border="0" role="presentation" height="1" style="border-top:1px solid #ededed;">
|
||||
<tr>
|
||||
<td height="0" style="font-size:0;line-height:0;">­</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Final CTA -->
|
||||
<tr>
|
||||
<td align="left" style="color:#404040;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;font-size:16px;line-height:24px;word-break:break-word;padding-bottom:15px;padding-top:15px;text-align:left;">
|
||||
<p style="margin:0;">Ready to build? Tables, Connectors, Scheduled Tasks, and the Mothership are available now in Sim.</p>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td align="center" style="padding-bottom:30px;padding-top:15px;">
|
||||
<table cellspacing="0" cellpadding="0" border="0" role="presentation">
|
||||
<tr>
|
||||
<td align="center" style="background-color:#32bd7e;border-radius:5px;">
|
||||
<a href="https://sim.ai" style="display:inline-block;font-weight:500;font-size:14px;padding:7px 16px;text-decoration:none;color:#ffffff;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;" target="_blank">Get Started</a>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Footer Divider -->
|
||||
<tr>
|
||||
<td align="center" style="padding-top:20px;padding-bottom:20px;">
|
||||
<table width="100%" cellspacing="0" cellpadding="0" border="0" role="presentation" height="1" style="border-top:1px solid #ededed;">
|
||||
<tr>
|
||||
<td height="0" style="font-size:0;line-height:0;">­</td>
|
||||
</tr>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Social links row -->
|
||||
<tr>
|
||||
<td align="center">
|
||||
<table cellspacing="0" cellpadding="0" border="0" role="presentation">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td align="center" style="padding:0 8px 0 0;">
|
||||
<a href="https://sim.ai/x" rel="noopener noreferrer" target="_blank">
|
||||
<img src="https://sim.ai/static/x-icon.png" width="20" height="20" alt="X" style="display:block;" border="0">
|
||||
</a>
|
||||
</td>
|
||||
<td align="center" style="padding:0 8px;">
|
||||
<a href="https://sim.ai/discord" rel="noopener noreferrer" target="_blank">
|
||||
<img src="https://sim.ai/static/discord-icon.png" width="20" height="20" alt="Discord" style="display:block;" border="0">
|
||||
</a>
|
||||
</td>
|
||||
<td align="center" style="padding:0 8px;">
|
||||
<a href="https://sim.ai/github" rel="noopener noreferrer" target="_blank">
|
||||
<img src="https://sim.ai/static/github-icon.png" width="20" height="20" alt="GitHub" style="display:block;" border="0">
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Spacer -->
|
||||
<tr>
|
||||
<td height="16" style="font-size:1px;line-height:1px;"> </td>
|
||||
</tr>
|
||||
|
||||
<!-- Address row -->
|
||||
<tr>
|
||||
<td align="center" style="font-size:12px;line-height:20px;color:#737373;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;margin:0;">
|
||||
Sim, 80 Langton St, San Francisco, CA 94103, USA
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Spacer -->
|
||||
<tr>
|
||||
<td height="8" style="font-size:1px;line-height:1px;"> </td>
|
||||
</tr>
|
||||
|
||||
<!-- Contact row -->
|
||||
<tr>
|
||||
<td align="center" style="font-size:12px;line-height:20px;color:#737373;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;margin:0;">
|
||||
Questions? <a href="mailto:support@sim.ai" style="color:#737373;text-decoration:underline;font-weight:normal;">support@sim.ai</a>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Spacer -->
|
||||
<tr>
|
||||
<td height="8" style="font-size:1px;line-height:1px;"> </td>
|
||||
</tr>
|
||||
|
||||
<!-- Links row -->
|
||||
<tr>
|
||||
<td align="center" style="font-size:12px;line-height:20px;color:#737373;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;margin:0;">
|
||||
<a href="https://sim.ai/privacy" style="color:#737373;text-decoration:underline;font-weight:normal;" rel="noopener noreferrer" target="_blank">Privacy Policy</a>
|
||||
•
|
||||
<a href="https://sim.ai/terms" style="color:#737373;text-decoration:underline;font-weight:normal;" rel="noopener noreferrer" target="_blank">Terms of Service</a>
|
||||
•
|
||||
<a href="{{{RESEND_UNSUBSCRIBE_URL}}}" style="color:#737373;text-decoration:underline;font-weight:normal;" rel="noopener noreferrer" target="_blank">Unsubscribe</a>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Spacer -->
|
||||
<tr>
|
||||
<td height="16" style="font-size:1px;line-height:1px;"> </td>
|
||||
</tr>
|
||||
|
||||
<!-- Copyright row -->
|
||||
<tr>
|
||||
<td align="center" style="font-size:12px;line-height:20px;color:#737373;font-family:-apple-system,'SF Pro Display','SF Pro Text','Helvetica',sans-serif;margin:0;">
|
||||
© 2026 Sim, All Rights Reserved
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
<!-- Bottom spacer -->
|
||||
<tr>
|
||||
<td height="32" style="font-size:1px;line-height:1px;"> </td>
|
||||
</tr>
|
||||
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
||||
@@ -77,7 +77,7 @@ export class BlockExecutor {
|
||||
if (!isSentinel) {
|
||||
blockLog = this.createBlockLog(ctx, node.id, block, node)
|
||||
ctx.blockLogs.push(blockLog)
|
||||
await this.callOnBlockStart(ctx, node, block, blockLog.executionOrder)
|
||||
this.callOnBlockStart(ctx, node, block, blockLog.executionOrder)
|
||||
}
|
||||
|
||||
const startTime = performance.now()
|
||||
@@ -105,7 +105,7 @@ export class BlockExecutor {
|
||||
}
|
||||
} catch (error) {
|
||||
cleanupSelfReference?.()
|
||||
return await this.handleBlockError(
|
||||
return this.handleBlockError(
|
||||
error,
|
||||
ctx,
|
||||
node,
|
||||
@@ -179,7 +179,7 @@ export class BlockExecutor {
|
||||
const displayOutput = filterOutputForLog(block.metadata?.id || '', normalizedOutput, {
|
||||
block,
|
||||
})
|
||||
await this.callOnBlockComplete(
|
||||
this.callOnBlockComplete(
|
||||
ctx,
|
||||
node,
|
||||
block,
|
||||
@@ -195,7 +195,7 @@ export class BlockExecutor {
|
||||
|
||||
return normalizedOutput
|
||||
} catch (error) {
|
||||
return await this.handleBlockError(
|
||||
return this.handleBlockError(
|
||||
error,
|
||||
ctx,
|
||||
node,
|
||||
@@ -226,7 +226,7 @@ export class BlockExecutor {
|
||||
return this.blockHandlers.find((h) => h.canHandle(block))
|
||||
}
|
||||
|
||||
private async handleBlockError(
|
||||
private handleBlockError(
|
||||
error: unknown,
|
||||
ctx: ExecutionContext,
|
||||
node: DAGNode,
|
||||
@@ -236,7 +236,7 @@ export class BlockExecutor {
|
||||
resolvedInputs: Record<string, any>,
|
||||
isSentinel: boolean,
|
||||
phase: 'input_resolution' | 'execution'
|
||||
): Promise<NormalizedBlockOutput> {
|
||||
): NormalizedBlockOutput {
|
||||
const duration = performance.now() - startTime
|
||||
const errorMessage = normalizeError(error)
|
||||
const hasResolvedInputs =
|
||||
@@ -287,7 +287,7 @@ export class BlockExecutor {
|
||||
? error.childWorkflowInstanceId
|
||||
: undefined
|
||||
const displayOutput = filterOutputForLog(block.metadata?.id || '', errorOutput, { block })
|
||||
await this.callOnBlockComplete(
|
||||
this.callOnBlockComplete(
|
||||
ctx,
|
||||
node,
|
||||
block,
|
||||
@@ -439,12 +439,12 @@ export class BlockExecutor {
|
||||
return redactApiKeys(result)
|
||||
}
|
||||
|
||||
private async callOnBlockStart(
|
||||
private callOnBlockStart(
|
||||
ctx: ExecutionContext,
|
||||
node: DAGNode,
|
||||
block: SerializedBlock,
|
||||
executionOrder: number
|
||||
): Promise<void> {
|
||||
): void {
|
||||
const blockId = node.metadata?.originalBlockId ?? node.id
|
||||
const blockName = block.metadata?.name ?? blockId
|
||||
const blockType = block.metadata?.id ?? DEFAULTS.BLOCK_TYPE
|
||||
@@ -452,26 +452,18 @@ export class BlockExecutor {
|
||||
const iterationContext = getIterationContext(ctx, node?.metadata)
|
||||
|
||||
if (this.contextExtensions.onBlockStart) {
|
||||
try {
|
||||
await this.contextExtensions.onBlockStart(
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
executionOrder,
|
||||
iterationContext,
|
||||
ctx.childWorkflowContext
|
||||
)
|
||||
} catch (error) {
|
||||
logger.warn('Block start callback failed', {
|
||||
blockId,
|
||||
blockType,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
this.contextExtensions.onBlockStart(
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
executionOrder,
|
||||
iterationContext,
|
||||
ctx.childWorkflowContext
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private async callOnBlockComplete(
|
||||
private callOnBlockComplete(
|
||||
ctx: ExecutionContext,
|
||||
node: DAGNode,
|
||||
block: SerializedBlock,
|
||||
@@ -482,7 +474,7 @@ export class BlockExecutor {
|
||||
executionOrder: number,
|
||||
endedAt: string,
|
||||
childWorkflowInstanceId?: string
|
||||
): Promise<void> {
|
||||
): void {
|
||||
const blockId = node.metadata?.originalBlockId ?? node.id
|
||||
const blockName = block.metadata?.name ?? blockId
|
||||
const blockType = block.metadata?.id ?? DEFAULTS.BLOCK_TYPE
|
||||
@@ -490,30 +482,22 @@ export class BlockExecutor {
|
||||
const iterationContext = getIterationContext(ctx, node?.metadata)
|
||||
|
||||
if (this.contextExtensions.onBlockComplete) {
|
||||
try {
|
||||
await this.contextExtensions.onBlockComplete(
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
{
|
||||
input,
|
||||
output,
|
||||
executionTime: duration,
|
||||
startedAt,
|
||||
executionOrder,
|
||||
endedAt,
|
||||
childWorkflowInstanceId,
|
||||
},
|
||||
iterationContext,
|
||||
ctx.childWorkflowContext
|
||||
)
|
||||
} catch (error) {
|
||||
logger.warn('Block completion callback failed', {
|
||||
blockId,
|
||||
blockType,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
this.contextExtensions.onBlockComplete(
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
{
|
||||
input,
|
||||
output,
|
||||
executionTime: duration,
|
||||
startedAt,
|
||||
executionOrder,
|
||||
endedAt,
|
||||
childWorkflowInstanceId,
|
||||
},
|
||||
iterationContext,
|
||||
ctx.childWorkflowContext
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -51,7 +51,7 @@ export class LoopOrchestrator {
|
||||
private edgeManager: EdgeManager | null = null
|
||||
) {}
|
||||
|
||||
async initializeLoopScope(ctx: ExecutionContext, loopId: string): Promise<LoopScope> {
|
||||
initializeLoopScope(ctx: ExecutionContext, loopId: string): LoopScope {
|
||||
const loopConfig = this.dag.loopConfigs.get(loopId) as SerializedLoop | undefined
|
||||
if (!loopConfig) {
|
||||
throw new Error(`Loop config not found: ${loopId}`)
|
||||
@@ -76,7 +76,7 @@ export class LoopOrchestrator {
|
||||
)
|
||||
if (iterationError) {
|
||||
logger.error(iterationError, { loopId, requestedIterations })
|
||||
await this.addLoopErrorLog(ctx, loopId, loopType, iterationError, {
|
||||
this.addLoopErrorLog(ctx, loopId, loopType, iterationError, {
|
||||
iterations: requestedIterations,
|
||||
})
|
||||
scope.maxIterations = 0
|
||||
@@ -99,7 +99,7 @@ export class LoopOrchestrator {
|
||||
} catch (error) {
|
||||
const errorMessage = `ForEach loop resolution failed: ${error instanceof Error ? error.message : String(error)}`
|
||||
logger.error(errorMessage, { loopId, forEachItems: loopConfig.forEachItems })
|
||||
await this.addLoopErrorLog(ctx, loopId, loopType, errorMessage, {
|
||||
this.addLoopErrorLog(ctx, loopId, loopType, errorMessage, {
|
||||
forEachItems: loopConfig.forEachItems,
|
||||
})
|
||||
scope.items = []
|
||||
@@ -117,7 +117,7 @@ export class LoopOrchestrator {
|
||||
)
|
||||
if (sizeError) {
|
||||
logger.error(sizeError, { loopId, collectionSize: items.length })
|
||||
await this.addLoopErrorLog(ctx, loopId, loopType, sizeError, {
|
||||
this.addLoopErrorLog(ctx, loopId, loopType, sizeError, {
|
||||
forEachItems: loopConfig.forEachItems,
|
||||
collectionSize: items.length,
|
||||
})
|
||||
@@ -155,7 +155,7 @@ export class LoopOrchestrator {
|
||||
)
|
||||
if (iterationError) {
|
||||
logger.error(iterationError, { loopId, requestedIterations })
|
||||
await this.addLoopErrorLog(ctx, loopId, loopType, iterationError, {
|
||||
this.addLoopErrorLog(ctx, loopId, loopType, iterationError, {
|
||||
iterations: requestedIterations,
|
||||
})
|
||||
scope.maxIterations = 0
|
||||
@@ -182,14 +182,14 @@ export class LoopOrchestrator {
|
||||
return scope
|
||||
}
|
||||
|
||||
private async addLoopErrorLog(
|
||||
private addLoopErrorLog(
|
||||
ctx: ExecutionContext,
|
||||
loopId: string,
|
||||
loopType: string,
|
||||
errorMessage: string,
|
||||
inputData?: any
|
||||
): Promise<void> {
|
||||
await addSubflowErrorLog(
|
||||
): void {
|
||||
addSubflowErrorLog(
|
||||
ctx,
|
||||
loopId,
|
||||
'loop',
|
||||
@@ -238,7 +238,7 @@ export class LoopOrchestrator {
|
||||
}
|
||||
if (isCancelled) {
|
||||
logger.info('Loop execution cancelled', { loopId, iteration: scope.iteration })
|
||||
return await this.createExitResult(ctx, loopId, scope)
|
||||
return this.createExitResult(ctx, loopId, scope)
|
||||
}
|
||||
|
||||
const iterationResults: NormalizedBlockOutput[] = []
|
||||
@@ -253,7 +253,7 @@ export class LoopOrchestrator {
|
||||
scope.currentIterationOutputs.clear()
|
||||
|
||||
if (!(await this.evaluateCondition(ctx, scope, scope.iteration + 1))) {
|
||||
return await this.createExitResult(ctx, loopId, scope)
|
||||
return this.createExitResult(ctx, loopId, scope)
|
||||
}
|
||||
|
||||
scope.iteration++
|
||||
@@ -269,11 +269,11 @@ export class LoopOrchestrator {
|
||||
}
|
||||
}
|
||||
|
||||
private async createExitResult(
|
||||
private createExitResult(
|
||||
ctx: ExecutionContext,
|
||||
loopId: string,
|
||||
scope: LoopScope
|
||||
): Promise<LoopContinuationResult> {
|
||||
): LoopContinuationResult {
|
||||
const results = scope.allIterationOutputs
|
||||
const output = { results }
|
||||
this.state.setBlockOutput(loopId, output, DEFAULTS.EXECUTION_TIME)
|
||||
@@ -282,26 +282,19 @@ export class LoopOrchestrator {
|
||||
const now = new Date().toISOString()
|
||||
const iterationContext = buildContainerIterationContext(ctx, loopId)
|
||||
|
||||
try {
|
||||
await this.contextExtensions.onBlockComplete(
|
||||
loopId,
|
||||
'Loop',
|
||||
'loop',
|
||||
{
|
||||
output,
|
||||
executionTime: DEFAULTS.EXECUTION_TIME,
|
||||
startedAt: now,
|
||||
executionOrder: getNextExecutionOrder(ctx),
|
||||
endedAt: now,
|
||||
},
|
||||
iterationContext
|
||||
)
|
||||
} catch (error) {
|
||||
logger.warn('Loop completion callback failed', {
|
||||
loopId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
this.contextExtensions.onBlockComplete(
|
||||
loopId,
|
||||
'Loop',
|
||||
'loop',
|
||||
{
|
||||
output,
|
||||
executionTime: DEFAULTS.EXECUTION_TIME,
|
||||
startedAt: now,
|
||||
executionOrder: getNextExecutionOrder(ctx),
|
||||
endedAt: now,
|
||||
},
|
||||
iterationContext
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -604,7 +597,7 @@ export class LoopOrchestrator {
|
||||
if (!scope.items || scope.items.length === 0) {
|
||||
logger.info('ForEach loop has empty collection, skipping loop body', { loopId })
|
||||
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
|
||||
await emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
@@ -614,7 +607,7 @@ export class LoopOrchestrator {
|
||||
if (scope.maxIterations === 0) {
|
||||
logger.info('For loop has 0 iterations, skipping loop body', { loopId })
|
||||
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
|
||||
await emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
@@ -628,7 +621,7 @@ export class LoopOrchestrator {
|
||||
if (!scope.condition) {
|
||||
logger.warn('No condition defined for while loop', { loopId })
|
||||
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
|
||||
await emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -641,7 +634,7 @@ export class LoopOrchestrator {
|
||||
|
||||
if (!result) {
|
||||
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
|
||||
await emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
emitEmptySubflowEvents(ctx, loopId, 'loop', this.contextExtensions)
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
@@ -53,14 +53,14 @@ export class NodeExecutionOrchestrator {
|
||||
|
||||
const loopId = node.metadata.loopId
|
||||
if (loopId && !this.loopOrchestrator.getLoopScope(ctx, loopId)) {
|
||||
await this.loopOrchestrator.initializeLoopScope(ctx, loopId)
|
||||
this.loopOrchestrator.initializeLoopScope(ctx, loopId)
|
||||
}
|
||||
|
||||
const parallelId = node.metadata.parallelId
|
||||
if (parallelId && !this.parallelOrchestrator.getParallelScope(ctx, parallelId)) {
|
||||
const parallelConfig = this.dag.parallelConfigs.get(parallelId)
|
||||
const nodesInParallel = parallelConfig?.nodes?.length || 1
|
||||
await this.parallelOrchestrator.initializeParallelScope(ctx, parallelId, nodesInParallel)
|
||||
this.parallelOrchestrator.initializeParallelScope(ctx, parallelId, nodesInParallel)
|
||||
}
|
||||
|
||||
if (node.metadata.isSentinel) {
|
||||
@@ -92,7 +92,7 @@ export class NodeExecutionOrchestrator {
|
||||
const isParallelSentinel = node.metadata.isParallelSentinel
|
||||
|
||||
if (isParallelSentinel) {
|
||||
return await this.handleParallelSentinel(ctx, node, sentinelType, parallelId)
|
||||
return this.handleParallelSentinel(ctx, node, sentinelType, parallelId)
|
||||
}
|
||||
|
||||
switch (sentinelType) {
|
||||
@@ -142,12 +142,12 @@ export class NodeExecutionOrchestrator {
|
||||
}
|
||||
}
|
||||
|
||||
private async handleParallelSentinel(
|
||||
private handleParallelSentinel(
|
||||
ctx: ExecutionContext,
|
||||
node: DAGNode,
|
||||
sentinelType: string | undefined,
|
||||
parallelId: string | undefined
|
||||
): Promise<NormalizedBlockOutput> {
|
||||
): NormalizedBlockOutput {
|
||||
if (!parallelId) {
|
||||
logger.warn('Parallel sentinel called without parallelId')
|
||||
return {}
|
||||
@@ -158,7 +158,7 @@ export class NodeExecutionOrchestrator {
|
||||
const parallelConfig = this.dag.parallelConfigs.get(parallelId)
|
||||
if (parallelConfig) {
|
||||
const nodesInParallel = parallelConfig.nodes?.length || 1
|
||||
await this.parallelOrchestrator.initializeParallelScope(ctx, parallelId, nodesInParallel)
|
||||
this.parallelOrchestrator.initializeParallelScope(ctx, parallelId, nodesInParallel)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -176,7 +176,7 @@ export class NodeExecutionOrchestrator {
|
||||
}
|
||||
|
||||
if (sentinelType === 'end') {
|
||||
const result = await this.parallelOrchestrator.aggregateParallelResults(ctx, parallelId)
|
||||
const result = this.parallelOrchestrator.aggregateParallelResults(ctx, parallelId)
|
||||
return {
|
||||
results: result.results || [],
|
||||
sentinelEnd: true,
|
||||
@@ -210,7 +210,7 @@ export class NodeExecutionOrchestrator {
|
||||
} else if (isParallelBranch) {
|
||||
const parallelId = this.findParallelIdForNode(node.id)
|
||||
if (parallelId) {
|
||||
await this.handleParallelNodeCompletion(ctx, node, output, parallelId)
|
||||
this.handleParallelNodeCompletion(ctx, node, output, parallelId)
|
||||
} else {
|
||||
this.handleRegularNodeCompletion(ctx, node, output)
|
||||
}
|
||||
@@ -229,17 +229,17 @@ export class NodeExecutionOrchestrator {
|
||||
this.state.setBlockOutput(node.id, output)
|
||||
}
|
||||
|
||||
private async handleParallelNodeCompletion(
|
||||
private handleParallelNodeCompletion(
|
||||
ctx: ExecutionContext,
|
||||
node: DAGNode,
|
||||
output: NormalizedBlockOutput,
|
||||
parallelId: string
|
||||
): Promise<void> {
|
||||
): void {
|
||||
const scope = this.parallelOrchestrator.getParallelScope(ctx, parallelId)
|
||||
if (!scope) {
|
||||
const parallelConfig = this.dag.parallelConfigs.get(parallelId)
|
||||
const nodesInParallel = parallelConfig?.nodes?.length || 1
|
||||
await this.parallelOrchestrator.initializeParallelScope(ctx, parallelId, nodesInParallel)
|
||||
this.parallelOrchestrator.initializeParallelScope(ctx, parallelId, nodesInParallel)
|
||||
}
|
||||
const allComplete = this.parallelOrchestrator.handleParallelBranchCompletion(
|
||||
ctx,
|
||||
@@ -248,7 +248,7 @@ export class NodeExecutionOrchestrator {
|
||||
output
|
||||
)
|
||||
if (allComplete) {
|
||||
await this.parallelOrchestrator.aggregateParallelResults(ctx, parallelId)
|
||||
this.parallelOrchestrator.aggregateParallelResults(ctx, parallelId)
|
||||
}
|
||||
|
||||
this.state.setBlockOutput(node.id, output)
|
||||
|
||||
@@ -1,142 +0,0 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import type { DAG } from '@/executor/dag/builder'
|
||||
import type { BlockStateWriter, ContextExtensions } from '@/executor/execution/types'
|
||||
import { ParallelOrchestrator } from '@/executor/orchestrators/parallel'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
|
||||
vi.mock('@sim/logger', () => ({
|
||||
createLogger: () => ({
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
|
||||
function createDag(): DAG {
|
||||
return {
|
||||
nodes: new Map(),
|
||||
loopConfigs: new Map(),
|
||||
parallelConfigs: new Map([
|
||||
[
|
||||
'parallel-1',
|
||||
{
|
||||
id: 'parallel-1',
|
||||
nodes: ['task-1'],
|
||||
distribution: [],
|
||||
parallelType: 'collection',
|
||||
},
|
||||
],
|
||||
]),
|
||||
}
|
||||
}
|
||||
|
||||
function createState(): BlockStateWriter {
|
||||
return {
|
||||
setBlockOutput: vi.fn(),
|
||||
setBlockState: vi.fn(),
|
||||
deleteBlockState: vi.fn(),
|
||||
unmarkExecuted: vi.fn(),
|
||||
}
|
||||
}
|
||||
|
||||
function createContext(overrides: Partial<ExecutionContext> = {}): ExecutionContext {
|
||||
return {
|
||||
workflowId: 'workflow-1',
|
||||
workspaceId: 'workspace-1',
|
||||
executionId: 'execution-1',
|
||||
userId: 'user-1',
|
||||
blockStates: new Map(),
|
||||
executedBlocks: new Set(),
|
||||
blockLogs: [],
|
||||
metadata: { duration: 0 },
|
||||
environmentVariables: {},
|
||||
decisions: {
|
||||
router: new Map(),
|
||||
condition: new Map(),
|
||||
},
|
||||
completedLoops: new Set(),
|
||||
activeExecutionPath: new Set(),
|
||||
workflow: {
|
||||
version: '1',
|
||||
blocks: [
|
||||
{
|
||||
id: 'parallel-1',
|
||||
position: { x: 0, y: 0 },
|
||||
config: { tool: '', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
metadata: { id: 'parallel', name: 'Parallel 1' },
|
||||
enabled: true,
|
||||
},
|
||||
],
|
||||
connections: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
},
|
||||
...overrides,
|
||||
}
|
||||
}
|
||||
|
||||
describe('ParallelOrchestrator', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('awaits empty-subflow lifecycle callbacks before returning the empty scope', async () => {
|
||||
let releaseStart: (() => void) | undefined
|
||||
const onBlockStart = vi.fn(
|
||||
() =>
|
||||
new Promise<void>((resolve) => {
|
||||
releaseStart = resolve
|
||||
})
|
||||
)
|
||||
const onBlockComplete = vi.fn()
|
||||
const contextExtensions: ContextExtensions = {
|
||||
onBlockStart,
|
||||
onBlockComplete,
|
||||
}
|
||||
const orchestrator = new ParallelOrchestrator(
|
||||
createDag(),
|
||||
createState(),
|
||||
null,
|
||||
contextExtensions
|
||||
)
|
||||
const ctx = createContext()
|
||||
|
||||
const initializePromise = orchestrator.initializeParallelScope(ctx, 'parallel-1', 1)
|
||||
await Promise.resolve()
|
||||
|
||||
expect(onBlockStart).toHaveBeenCalledTimes(1)
|
||||
expect(onBlockComplete).not.toHaveBeenCalled()
|
||||
|
||||
releaseStart?.()
|
||||
const scope = await initializePromise
|
||||
|
||||
expect(onBlockComplete).toHaveBeenCalledTimes(1)
|
||||
expect(scope.isEmpty).toBe(true)
|
||||
})
|
||||
|
||||
it('swallows helper callback failures on empty parallel paths', async () => {
|
||||
const contextExtensions: ContextExtensions = {
|
||||
onBlockStart: vi.fn().mockRejectedValue(new Error('start failed')),
|
||||
onBlockComplete: vi.fn().mockRejectedValue(new Error('complete failed')),
|
||||
}
|
||||
const orchestrator = new ParallelOrchestrator(
|
||||
createDag(),
|
||||
createState(),
|
||||
null,
|
||||
contextExtensions
|
||||
)
|
||||
|
||||
await expect(
|
||||
orchestrator.initializeParallelScope(createContext(), 'parallel-1', 1)
|
||||
).resolves.toMatchObject({
|
||||
parallelId: 'parallel-1',
|
||||
isEmpty: true,
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -47,11 +47,11 @@ export class ParallelOrchestrator {
|
||||
private contextExtensions: ContextExtensions | null = null
|
||||
) {}
|
||||
|
||||
async initializeParallelScope(
|
||||
initializeParallelScope(
|
||||
ctx: ExecutionContext,
|
||||
parallelId: string,
|
||||
terminalNodesCount = 1
|
||||
): Promise<ParallelScope> {
|
||||
): ParallelScope {
|
||||
const parallelConfig = this.dag.parallelConfigs.get(parallelId)
|
||||
if (!parallelConfig) {
|
||||
throw new Error(`Parallel config not found: ${parallelId}`)
|
||||
@@ -69,7 +69,7 @@ export class ParallelOrchestrator {
|
||||
} catch (error) {
|
||||
const errorMessage = `Parallel Items did not resolve: ${error instanceof Error ? error.message : String(error)}`
|
||||
logger.error(errorMessage, { parallelId, distribution: parallelConfig.distribution })
|
||||
await this.addParallelErrorLog(ctx, parallelId, errorMessage, {
|
||||
this.addParallelErrorLog(ctx, parallelId, errorMessage, {
|
||||
distribution: parallelConfig.distribution,
|
||||
})
|
||||
this.setErrorScope(ctx, parallelId, errorMessage)
|
||||
@@ -83,7 +83,7 @@ export class ParallelOrchestrator {
|
||||
)
|
||||
if (branchError) {
|
||||
logger.error(branchError, { parallelId, branchCount })
|
||||
await this.addParallelErrorLog(ctx, parallelId, branchError, {
|
||||
this.addParallelErrorLog(ctx, parallelId, branchError, {
|
||||
distribution: parallelConfig.distribution,
|
||||
branchCount,
|
||||
})
|
||||
@@ -109,7 +109,7 @@ export class ParallelOrchestrator {
|
||||
|
||||
this.state.setBlockOutput(parallelId, { results: [] })
|
||||
|
||||
await emitEmptySubflowEvents(ctx, parallelId, 'parallel', this.contextExtensions)
|
||||
emitEmptySubflowEvents(ctx, parallelId, 'parallel', this.contextExtensions)
|
||||
|
||||
logger.info('Parallel scope initialized with empty distribution, skipping body', {
|
||||
parallelId,
|
||||
@@ -220,13 +220,13 @@ export class ParallelOrchestrator {
|
||||
return { branchCount: items.length, items }
|
||||
}
|
||||
|
||||
private async addParallelErrorLog(
|
||||
private addParallelErrorLog(
|
||||
ctx: ExecutionContext,
|
||||
parallelId: string,
|
||||
errorMessage: string,
|
||||
inputData?: any
|
||||
): Promise<void> {
|
||||
await addSubflowErrorLog(
|
||||
): void {
|
||||
addSubflowErrorLog(
|
||||
ctx,
|
||||
parallelId,
|
||||
'parallel',
|
||||
@@ -291,10 +291,7 @@ export class ParallelOrchestrator {
|
||||
return allComplete
|
||||
}
|
||||
|
||||
async aggregateParallelResults(
|
||||
ctx: ExecutionContext,
|
||||
parallelId: string
|
||||
): Promise<ParallelAggregationResult> {
|
||||
aggregateParallelResults(ctx: ExecutionContext, parallelId: string): ParallelAggregationResult {
|
||||
const scope = ctx.parallelExecutions?.get(parallelId)
|
||||
if (!scope) {
|
||||
logger.error('Parallel scope not found for aggregation', { parallelId })
|
||||
@@ -319,26 +316,19 @@ export class ParallelOrchestrator {
|
||||
const now = new Date().toISOString()
|
||||
const iterationContext = buildContainerIterationContext(ctx, parallelId)
|
||||
|
||||
try {
|
||||
await this.contextExtensions.onBlockComplete(
|
||||
parallelId,
|
||||
'Parallel',
|
||||
'parallel',
|
||||
{
|
||||
output,
|
||||
executionTime: 0,
|
||||
startedAt: now,
|
||||
executionOrder: getNextExecutionOrder(ctx),
|
||||
endedAt: now,
|
||||
},
|
||||
iterationContext
|
||||
)
|
||||
} catch (error) {
|
||||
logger.warn('Parallel completion callback failed', {
|
||||
parallelId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
this.contextExtensions.onBlockComplete(
|
||||
parallelId,
|
||||
'Parallel',
|
||||
'parallel',
|
||||
{
|
||||
output,
|
||||
executionTime: 0,
|
||||
startedAt: now,
|
||||
executionOrder: getNextExecutionOrder(ctx),
|
||||
endedAt: now,
|
||||
},
|
||||
iterationContext
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { DEFAULTS, LOOP, PARALLEL, REFERENCE } from '@/executor/constants'
|
||||
import type { ContextExtensions } from '@/executor/execution/types'
|
||||
import { type BlockLog, type ExecutionContext, getNextExecutionOrder } from '@/executor/types'
|
||||
import { buildContainerIterationContext } from '@/executor/utils/iteration-context'
|
||||
import type { VariableResolver } from '@/executor/variables/resolver'
|
||||
|
||||
const logger = createLogger('SubflowUtils')
|
||||
|
||||
const BRANCH_PATTERN = new RegExp(`${PARALLEL.BRANCH.PREFIX}\\d+${PARALLEL.BRANCH.SUFFIX}$`)
|
||||
const BRANCH_INDEX_PATTERN = new RegExp(`${PARALLEL.BRANCH.PREFIX}(\\d+)${PARALLEL.BRANCH.SUFFIX}$`)
|
||||
const LOOP_SENTINEL_START_PATTERN = new RegExp(
|
||||
@@ -268,14 +265,14 @@ export function resolveArrayInput(
|
||||
/**
|
||||
* Creates and logs an error for a subflow (loop or parallel).
|
||||
*/
|
||||
export async function addSubflowErrorLog(
|
||||
export function addSubflowErrorLog(
|
||||
ctx: ExecutionContext,
|
||||
blockId: string,
|
||||
blockType: 'loop' | 'parallel',
|
||||
errorMessage: string,
|
||||
inputData: Record<string, any>,
|
||||
contextExtensions: ContextExtensions | null
|
||||
): Promise<void> {
|
||||
): void {
|
||||
const now = new Date().toISOString()
|
||||
const execOrder = getNextExecutionOrder(ctx)
|
||||
|
||||
@@ -299,34 +296,18 @@ export async function addSubflowErrorLog(
|
||||
ctx.blockLogs.push(blockLog)
|
||||
|
||||
if (contextExtensions?.onBlockStart) {
|
||||
try {
|
||||
await contextExtensions.onBlockStart(blockId, blockName, blockType, execOrder)
|
||||
} catch (error) {
|
||||
logger.warn('Subflow error start callback failed', {
|
||||
blockId,
|
||||
blockType,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
contextExtensions.onBlockStart(blockId, blockName, blockType, execOrder)
|
||||
}
|
||||
|
||||
if (contextExtensions?.onBlockComplete) {
|
||||
try {
|
||||
await contextExtensions.onBlockComplete(blockId, blockName, blockType, {
|
||||
input: inputData,
|
||||
output: { error: errorMessage },
|
||||
executionTime: 0,
|
||||
startedAt: now,
|
||||
executionOrder: execOrder,
|
||||
endedAt: now,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.warn('Subflow error completion callback failed', {
|
||||
blockId,
|
||||
blockType,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
contextExtensions.onBlockComplete(blockId, blockName, blockType, {
|
||||
input: inputData,
|
||||
output: { error: errorMessage },
|
||||
executionTime: 0,
|
||||
startedAt: now,
|
||||
executionOrder: execOrder,
|
||||
endedAt: now,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -335,12 +316,12 @@ export async function addSubflowErrorLog(
|
||||
* empty collection or false initial condition. This ensures the container block
|
||||
* appears in terminal logs, execution snapshots, and edge highlighting.
|
||||
*/
|
||||
export async function emitEmptySubflowEvents(
|
||||
export function emitEmptySubflowEvents(
|
||||
ctx: ExecutionContext,
|
||||
blockId: string,
|
||||
blockType: 'loop' | 'parallel',
|
||||
contextExtensions: ContextExtensions | null
|
||||
): Promise<void> {
|
||||
): void {
|
||||
const now = new Date().toISOString()
|
||||
const executionOrder = getNextExecutionOrder(ctx)
|
||||
const output = { results: [] }
|
||||
@@ -361,38 +342,22 @@ export async function emitEmptySubflowEvents(
|
||||
})
|
||||
|
||||
if (contextExtensions?.onBlockStart) {
|
||||
try {
|
||||
await contextExtensions.onBlockStart(blockId, blockName, blockType, executionOrder)
|
||||
} catch (error) {
|
||||
logger.warn('Empty subflow start callback failed', {
|
||||
blockId,
|
||||
blockType,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
contextExtensions.onBlockStart(blockId, blockName, blockType, executionOrder)
|
||||
}
|
||||
|
||||
if (contextExtensions?.onBlockComplete) {
|
||||
try {
|
||||
await contextExtensions.onBlockComplete(
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
{
|
||||
output,
|
||||
executionTime: DEFAULTS.EXECUTION_TIME,
|
||||
startedAt: now,
|
||||
executionOrder,
|
||||
endedAt: now,
|
||||
},
|
||||
iterationContext
|
||||
)
|
||||
} catch (error) {
|
||||
logger.warn('Empty subflow completion callback failed', {
|
||||
blockId,
|
||||
blockType,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
contextExtensions.onBlockComplete(
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
{
|
||||
output,
|
||||
executionTime: DEFAULTS.EXECUTION_TIME,
|
||||
startedAt: now,
|
||||
executionOrder,
|
||||
endedAt: now,
|
||||
},
|
||||
iterationContext
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,8 +7,7 @@ const logger = createLogger('AdminUsersQuery')
|
||||
export const adminUserKeys = {
|
||||
all: ['adminUsers'] as const,
|
||||
lists: () => [...adminUserKeys.all, 'list'] as const,
|
||||
list: (offset: number, limit: number, searchQuery: string) =>
|
||||
[...adminUserKeys.lists(), offset, limit, searchQuery] as const,
|
||||
list: (offset: number, limit: number) => [...adminUserKeys.lists(), offset, limit] as const,
|
||||
}
|
||||
|
||||
interface AdminUser {
|
||||
@@ -25,59 +24,31 @@ interface AdminUsersResponse {
|
||||
total: number
|
||||
}
|
||||
|
||||
const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i
|
||||
|
||||
function mapUser(u: {
|
||||
id: string
|
||||
name: string
|
||||
email: string
|
||||
role?: string | null
|
||||
banned?: boolean | null
|
||||
banReason?: string | null
|
||||
}): AdminUser {
|
||||
return {
|
||||
id: u.id,
|
||||
name: u.name || '',
|
||||
email: u.email,
|
||||
role: u.role ?? 'user',
|
||||
banned: u.banned ?? false,
|
||||
banReason: u.banReason ?? null,
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchAdminUsers(
|
||||
offset: number,
|
||||
limit: number,
|
||||
searchQuery: string
|
||||
): Promise<AdminUsersResponse> {
|
||||
if (UUID_REGEX.test(searchQuery.trim())) {
|
||||
const { data, error } = await client.admin.getUser({ query: { id: searchQuery.trim() } })
|
||||
if (error) throw new Error(error.message ?? 'Failed to fetch user')
|
||||
if (!data) return { users: [], total: 0 }
|
||||
return { users: [mapUser(data)], total: 1 }
|
||||
}
|
||||
|
||||
async function fetchAdminUsers(offset: number, limit: number): Promise<AdminUsersResponse> {
|
||||
const { data, error } = await client.admin.listUsers({
|
||||
query: {
|
||||
limit,
|
||||
offset,
|
||||
searchField: 'email',
|
||||
searchValue: searchQuery,
|
||||
searchOperator: 'contains',
|
||||
},
|
||||
query: { limit, offset },
|
||||
})
|
||||
if (error) throw new Error(error.message ?? 'Failed to fetch users')
|
||||
if (error) {
|
||||
throw new Error(error.message ?? 'Failed to fetch users')
|
||||
}
|
||||
return {
|
||||
users: (data?.users ?? []).map(mapUser),
|
||||
users: (data?.users ?? []).map((u) => ({
|
||||
id: u.id,
|
||||
name: u.name || '',
|
||||
email: u.email,
|
||||
role: u.role ?? 'user',
|
||||
banned: u.banned ?? false,
|
||||
banReason: u.banReason ?? null,
|
||||
})),
|
||||
total: data?.total ?? 0,
|
||||
}
|
||||
}
|
||||
|
||||
export function useAdminUsers(offset: number, limit: number, searchQuery: string) {
|
||||
export function useAdminUsers(offset: number, limit: number, enabled: boolean) {
|
||||
return useQuery({
|
||||
queryKey: adminUserKeys.list(offset, limit, searchQuery),
|
||||
queryFn: () => fetchAdminUsers(offset, limit, searchQuery),
|
||||
enabled: searchQuery.length > 0,
|
||||
queryKey: adminUserKeys.list(offset, limit),
|
||||
queryFn: () => fetchAdminUsers(offset, limit),
|
||||
enabled,
|
||||
staleTime: 30 * 1000,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
|
||||
@@ -54,7 +54,6 @@ export interface TaskStoredMessage {
|
||||
id: string
|
||||
role: 'user' | 'assistant'
|
||||
content: string
|
||||
requestId?: string
|
||||
toolCalls?: TaskStoredToolCall[]
|
||||
contentBlocks?: TaskStoredContentBlock[]
|
||||
fileAttachments?: TaskStoredFileAttachment[]
|
||||
|
||||
@@ -113,15 +113,10 @@ export function useWorkflows(
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
syncRegistry &&
|
||||
scope === 'active' &&
|
||||
workspaceId &&
|
||||
(query.status === 'pending' || query.isPlaceholderData)
|
||||
) {
|
||||
if (syncRegistry && scope === 'active' && workspaceId && query.status === 'pending') {
|
||||
beginMetadataLoad(workspaceId)
|
||||
}
|
||||
}, [syncRegistry, scope, workspaceId, query.status, query.isPlaceholderData, beginMetadataLoad])
|
||||
}, [syncRegistry, scope, workspaceId, query.status, beginMetadataLoad])
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
@@ -129,20 +124,11 @@ export function useWorkflows(
|
||||
scope === 'active' &&
|
||||
workspaceId &&
|
||||
query.status === 'success' &&
|
||||
query.data &&
|
||||
!query.isPlaceholderData
|
||||
query.data
|
||||
) {
|
||||
completeMetadataLoad(workspaceId, query.data)
|
||||
}
|
||||
}, [
|
||||
syncRegistry,
|
||||
scope,
|
||||
workspaceId,
|
||||
query.status,
|
||||
query.data,
|
||||
query.isPlaceholderData,
|
||||
completeMetadataLoad,
|
||||
])
|
||||
}, [syncRegistry, scope, workspaceId, query.status, query.data, completeMetadataLoad])
|
||||
|
||||
useEffect(() => {
|
||||
if (syncRegistry && scope === 'active' && workspaceId && query.status === 'error') {
|
||||
|
||||
@@ -86,9 +86,14 @@ export function useCreateWorkspace() {
|
||||
const data = await response.json()
|
||||
return data.workspace as Workspace
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: workspaceKeys.lists() })
|
||||
queryClient.invalidateQueries({ queryKey: workspaceKeys.adminLists() })
|
||||
onSuccess: (data) => {
|
||||
queryClient.invalidateQueries({ queryKey: workspaceKeys.all })
|
||||
if (data?.id) {
|
||||
queryClient.removeQueries({ queryKey: workspaceKeys.detail(data.id) })
|
||||
queryClient.removeQueries({ queryKey: workspaceKeys.settings(data.id) })
|
||||
queryClient.removeQueries({ queryKey: workspaceKeys.permissions(data.id) })
|
||||
queryClient.removeQueries({ queryKey: workspaceKeys.members(data.id) })
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -32,7 +32,6 @@ export function useAutosave({
|
||||
}: UseAutosaveOptions): UseAutosaveReturn {
|
||||
const [saveStatus, setSaveStatus] = useState<SaveStatus>('idle')
|
||||
const timerRef = useRef<ReturnType<typeof setTimeout>>(undefined)
|
||||
const idleTimerRef = useRef<ReturnType<typeof setTimeout>>(undefined)
|
||||
const savingRef = useRef(false)
|
||||
const onSaveRef = useRef(onSave)
|
||||
onSaveRef.current = onSave
|
||||
@@ -60,8 +59,6 @@ export function useAutosave({
|
||||
const remaining = Math.max(0, MIN_SAVING_DISPLAY_MS - elapsed)
|
||||
setTimeout(() => {
|
||||
setSaveStatus(nextStatus)
|
||||
clearTimeout(idleTimerRef.current)
|
||||
idleTimerRef.current = setTimeout(() => setSaveStatus('idle'), 2000)
|
||||
savingRef.current = false
|
||||
if (nextStatus !== 'error' && contentRef.current !== savedContentRef.current) {
|
||||
save()
|
||||
@@ -77,10 +74,16 @@ export function useAutosave({
|
||||
return () => clearTimeout(timerRef.current)
|
||||
}, [content, enabled, isDirty, delay, save])
|
||||
|
||||
useEffect(() => {
|
||||
if (saveStatus === 'saved' || saveStatus === 'error') {
|
||||
const t = setTimeout(() => setSaveStatus('idle'), 2000)
|
||||
return () => clearTimeout(t)
|
||||
}
|
||||
}, [saveStatus])
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
clearTimeout(timerRef.current)
|
||||
clearTimeout(idleTimerRef.current)
|
||||
if (contentRef.current !== savedContentRef.current && !savingRef.current) {
|
||||
onSaveRef.current().catch(() => {})
|
||||
}
|
||||
|
||||
@@ -119,22 +119,22 @@ export function useWebhookManagement({
|
||||
|
||||
const queryEnabled = useWebhookUrl && !isPreview && Boolean(workflowId && blockId)
|
||||
|
||||
// Reset sync flag when blockId changes or query becomes disabled (render-phase guard)
|
||||
const prevBlockIdRef = useRef(blockId)
|
||||
if (blockId !== prevBlockIdRef.current) {
|
||||
prevBlockIdRef.current = blockId
|
||||
syncedRef.current = false
|
||||
}
|
||||
if (!queryEnabled) {
|
||||
syncedRef.current = false
|
||||
}
|
||||
|
||||
const { data: webhook, isLoading: queryLoading } = useWebhookQuery(
|
||||
workflowId,
|
||||
blockId,
|
||||
queryEnabled
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
syncedRef.current = false
|
||||
}, [blockId])
|
||||
|
||||
useEffect(() => {
|
||||
if (!queryEnabled) {
|
||||
syncedRef.current = false
|
||||
}
|
||||
}, [queryEnabled])
|
||||
|
||||
useEffect(() => {
|
||||
if (!queryEnabled || syncedRef.current) return
|
||||
if (webhook === undefined) return
|
||||
|
||||
@@ -92,7 +92,6 @@ export function flushStreamingUpdates(set: StoreSet) {
|
||||
if (update) {
|
||||
return {
|
||||
...msg,
|
||||
requestId: update.requestId ?? msg.requestId,
|
||||
content: '',
|
||||
contentBlocks:
|
||||
update.contentBlocks.length > 0
|
||||
@@ -130,7 +129,6 @@ export function updateStreamingMessage(set: StoreSet, context: ClientStreamingCo
|
||||
const newMessages = [...messages]
|
||||
newMessages[messages.length - 1] = {
|
||||
...lastMessage,
|
||||
requestId: lastMessageUpdate.requestId ?? lastMessage.requestId,
|
||||
content: '',
|
||||
contentBlocks:
|
||||
lastMessageUpdate.contentBlocks.length > 0
|
||||
@@ -145,7 +143,6 @@ export function updateStreamingMessage(set: StoreSet, context: ClientStreamingCo
|
||||
if (update) {
|
||||
return {
|
||||
...msg,
|
||||
requestId: update.requestId ?? msg.requestId,
|
||||
content: '',
|
||||
contentBlocks:
|
||||
update.contentBlocks.length > 0
|
||||
@@ -432,12 +429,6 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
writeActiveStreamToStorage(updatedStream)
|
||||
}
|
||||
},
|
||||
request_id: (data, context) => {
|
||||
const requestId = typeof data.data === 'string' ? data.data : undefined
|
||||
if (requestId) {
|
||||
context.requestId = requestId
|
||||
}
|
||||
},
|
||||
title_updated: (_data, _context, get, set) => {
|
||||
const title = _data.title
|
||||
if (!title) return
|
||||
|
||||
@@ -22,7 +22,6 @@ export interface ClientContentBlock {
|
||||
|
||||
export interface StreamingContext {
|
||||
messageId: string
|
||||
requestId?: string
|
||||
accumulatedContent: string
|
||||
contentBlocks: ClientContentBlock[]
|
||||
currentTextBlock: ClientContentBlock | null
|
||||
|
||||
@@ -141,10 +141,6 @@ export function serializeMessagesForDB(
|
||||
timestamp,
|
||||
}
|
||||
|
||||
if (msg.requestId) {
|
||||
serialized.requestId = msg.requestId
|
||||
}
|
||||
|
||||
if (Array.isArray(msg.contentBlocks) && msg.contentBlocks.length > 0) {
|
||||
serialized.contentBlocks = deepClone(msg.contentBlocks)
|
||||
}
|
||||
|
||||
@@ -76,7 +76,6 @@ export async function orchestrateCopilotStream(
|
||||
contentBlocks: context.contentBlocks,
|
||||
toolCalls: buildToolCallSummaries(context),
|
||||
chatId: context.chatId,
|
||||
requestId: context.requestId,
|
||||
errors: context.errors.length ? context.errors : undefined,
|
||||
usage: context.usage,
|
||||
cost: context.cost,
|
||||
|
||||
@@ -187,12 +187,6 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
execContext.chatId = chatId
|
||||
}
|
||||
},
|
||||
request_id: (event, context) => {
|
||||
const rid = typeof event.data === 'string' ? event.data : undefined
|
||||
if (rid) {
|
||||
context.requestId = rid
|
||||
}
|
||||
},
|
||||
title_updated: () => {},
|
||||
tool_result: (event, context) => {
|
||||
const data = getEventData(event)
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { MothershipResource } from '@/lib/copilot/resource-types'
|
||||
|
||||
export type SSEEventType =
|
||||
| 'chat_id'
|
||||
| 'request_id'
|
||||
| 'title_updated'
|
||||
| 'content'
|
||||
| 'reasoning'
|
||||
@@ -89,7 +88,6 @@ export interface ContentBlock {
|
||||
|
||||
export interface StreamingContext {
|
||||
chatId?: string
|
||||
requestId?: string
|
||||
messageId: string
|
||||
accumulatedContent: string
|
||||
contentBlocks: ContentBlock[]
|
||||
@@ -156,7 +154,6 @@ export interface OrchestratorResult {
|
||||
contentBlocks: ContentBlock[]
|
||||
toolCalls: ToolCallSummary[]
|
||||
chatId?: string
|
||||
requestId?: string
|
||||
error?: string
|
||||
errors?: string[]
|
||||
usage?: { prompt: number; completion: number }
|
||||
|
||||
@@ -202,28 +202,15 @@ export function getWorkflowExecutionCSPPolicy(): string {
|
||||
}
|
||||
|
||||
/**
|
||||
* Shared CSP for embeddable pages (chat, forms)
|
||||
* CSP for embeddable form pages
|
||||
* Allows embedding in iframes from any origin while maintaining other security policies
|
||||
*/
|
||||
function getEmbedCSPPolicy(): string {
|
||||
return buildCSPString({
|
||||
export function getFormEmbedCSPPolicy(): string {
|
||||
const basePolicy = buildCSPString({
|
||||
...buildTimeCSPDirectives,
|
||||
'frame-ancestors': ['*'],
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* CSP for embeddable chat pages
|
||||
*/
|
||||
export function getChatEmbedCSPPolicy(): string {
|
||||
return getEmbedCSPPolicy()
|
||||
}
|
||||
|
||||
/**
|
||||
* CSP for embeddable form pages
|
||||
*/
|
||||
export function getFormEmbedCSPPolicy(): string {
|
||||
return getEmbedCSPPolicy()
|
||||
return basePolicy
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import { beforeEach, describe, expect, test, vi } from 'vitest'
|
||||
import { ExecutionLogger } from '@/lib/logs/execution/logger'
|
||||
import { ExecutionLogger } from './logger'
|
||||
|
||||
vi.mock('@sim/db', () => databaseMock)
|
||||
|
||||
@@ -112,7 +112,7 @@ describe('ExecutionLogger', () => {
|
||||
expect(typeof logger.getWorkflowExecution).toBe('function')
|
||||
})
|
||||
|
||||
test('preserves correlation and diagnostics when execution completes', () => {
|
||||
test('preserves start correlation data when execution completes', () => {
|
||||
const loggerInstance = new ExecutionLogger() as any
|
||||
|
||||
const completedData = loggerInstance.buildCompletedExecutionData({
|
||||
@@ -140,24 +140,9 @@ describe('ExecutionLogger', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
lastStartedBlock: {
|
||||
blockId: 'block-start',
|
||||
blockName: 'Start',
|
||||
blockType: 'agent',
|
||||
startedAt: '2025-01-01T00:00:00.000Z',
|
||||
},
|
||||
lastCompletedBlock: {
|
||||
blockId: 'block-end',
|
||||
blockName: 'Finish',
|
||||
blockType: 'api',
|
||||
endedAt: '2025-01-01T00:00:05.000Z',
|
||||
success: true,
|
||||
},
|
||||
},
|
||||
traceSpans: [],
|
||||
finalOutput: { ok: true },
|
||||
finalizationPath: 'completed',
|
||||
completionFailure: 'fallback failure',
|
||||
executionCost: {
|
||||
tokens: { input: 0, output: 0, total: 0 },
|
||||
models: {},
|
||||
@@ -176,12 +161,6 @@ describe('ExecutionLogger', () => {
|
||||
})
|
||||
expect(completedData.correlation).toEqual(completedData.trigger?.data?.correlation)
|
||||
expect(completedData.finalOutput).toEqual({ ok: true })
|
||||
expect(completedData.lastStartedBlock?.blockId).toBe('block-start')
|
||||
expect(completedData.lastCompletedBlock?.blockId).toBe('block-end')
|
||||
expect(completedData.finalizationPath).toBe('completed')
|
||||
expect(completedData.completionFailure).toBe('fallback failure')
|
||||
expect(completedData.hasTraceSpans).toBe(false)
|
||||
expect(completedData.traceSpanCount).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -27,7 +27,6 @@ import { snapshotService } from '@/lib/logs/execution/snapshot/service'
|
||||
import type {
|
||||
BlockOutputData,
|
||||
ExecutionEnvironment,
|
||||
ExecutionFinalizationPath,
|
||||
ExecutionTrigger,
|
||||
ExecutionLoggerService as IExecutionLoggerService,
|
||||
TraceSpan,
|
||||
@@ -50,21 +49,11 @@ export interface ToolCall {
|
||||
|
||||
const logger = createLogger('ExecutionLogger')
|
||||
|
||||
function countTraceSpans(traceSpans?: TraceSpan[]): number {
|
||||
if (!Array.isArray(traceSpans) || traceSpans.length === 0) {
|
||||
return 0
|
||||
}
|
||||
|
||||
return traceSpans.reduce((count, span) => count + 1 + countTraceSpans(span.children), 0)
|
||||
}
|
||||
|
||||
export class ExecutionLogger implements IExecutionLoggerService {
|
||||
private buildCompletedExecutionData(params: {
|
||||
existingExecutionData?: WorkflowExecutionLog['executionData']
|
||||
traceSpans?: TraceSpan[]
|
||||
finalOutput: BlockOutputData
|
||||
finalizationPath?: ExecutionFinalizationPath
|
||||
completionFailure?: string
|
||||
executionCost: {
|
||||
tokens: {
|
||||
input: number
|
||||
@@ -75,16 +64,7 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
}
|
||||
executionState?: SerializableExecutionState
|
||||
}): WorkflowExecutionLog['executionData'] {
|
||||
const {
|
||||
existingExecutionData,
|
||||
traceSpans,
|
||||
finalOutput,
|
||||
finalizationPath,
|
||||
completionFailure,
|
||||
executionCost,
|
||||
executionState,
|
||||
} = params
|
||||
const traceSpanCount = countTraceSpans(traceSpans)
|
||||
const { existingExecutionData, traceSpans, finalOutput, executionCost, executionState } = params
|
||||
|
||||
return {
|
||||
...(existingExecutionData?.environment
|
||||
@@ -98,17 +78,6 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
existingExecutionData?.trigger?.data?.correlation,
|
||||
}
|
||||
: {}),
|
||||
...(existingExecutionData?.error ? { error: existingExecutionData.error } : {}),
|
||||
...(existingExecutionData?.lastStartedBlock
|
||||
? { lastStartedBlock: existingExecutionData.lastStartedBlock }
|
||||
: {}),
|
||||
...(existingExecutionData?.lastCompletedBlock
|
||||
? { lastCompletedBlock: existingExecutionData.lastCompletedBlock }
|
||||
: {}),
|
||||
...(completionFailure ? { completionFailure } : {}),
|
||||
...(finalizationPath ? { finalizationPath } : {}),
|
||||
hasTraceSpans: traceSpanCount > 0,
|
||||
traceSpanCount,
|
||||
traceSpans,
|
||||
finalOutput,
|
||||
tokens: {
|
||||
@@ -204,8 +173,6 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
environment,
|
||||
trigger,
|
||||
...(trigger.data?.correlation ? { correlation: trigger.data.correlation } : {}),
|
||||
hasTraceSpans: false,
|
||||
traceSpanCount: 0,
|
||||
},
|
||||
cost: {
|
||||
total: BASE_EXECUTION_CHARGE,
|
||||
@@ -265,8 +232,6 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
traceSpans?: TraceSpan[]
|
||||
workflowInput?: any
|
||||
executionState?: SerializableExecutionState
|
||||
finalizationPath?: ExecutionFinalizationPath
|
||||
completionFailure?: string
|
||||
isResume?: boolean
|
||||
level?: 'info' | 'error'
|
||||
status?: 'completed' | 'failed' | 'cancelled' | 'pending'
|
||||
@@ -280,8 +245,6 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
traceSpans,
|
||||
workflowInput,
|
||||
executionState,
|
||||
finalizationPath,
|
||||
completionFailure,
|
||||
isResume,
|
||||
level: levelOverride,
|
||||
status: statusOverride,
|
||||
@@ -352,16 +315,6 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
? Math.max(0, Math.round(rawDurationMs))
|
||||
: 0
|
||||
|
||||
const completedExecutionData = this.buildCompletedExecutionData({
|
||||
existingExecutionData,
|
||||
traceSpans: redactedTraceSpans,
|
||||
finalOutput: redactedFinalOutput,
|
||||
finalizationPath,
|
||||
completionFailure,
|
||||
executionCost,
|
||||
executionState,
|
||||
})
|
||||
|
||||
const [updatedLog] = await db
|
||||
.update(workflowExecutionLogs)
|
||||
.set({
|
||||
@@ -370,7 +323,13 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
endedAt: new Date(endedAt),
|
||||
totalDurationMs: totalDuration,
|
||||
files: executionFiles.length > 0 ? executionFiles : null,
|
||||
executionData: completedExecutionData,
|
||||
executionData: this.buildCompletedExecutionData({
|
||||
existingExecutionData,
|
||||
traceSpans: redactedTraceSpans,
|
||||
finalOutput: redactedFinalOutput,
|
||||
executionCost,
|
||||
executionState,
|
||||
}),
|
||||
cost: executionCost,
|
||||
})
|
||||
.where(eq(workflowExecutionLogs.executionId, executionId))
|
||||
|
||||
@@ -1,48 +1,11 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const dbMocks = vi.hoisted(() => {
|
||||
const selectLimit = vi.fn()
|
||||
const selectWhere = vi.fn()
|
||||
const selectFrom = vi.fn()
|
||||
const select = vi.fn()
|
||||
const updateWhere = vi.fn()
|
||||
const updateSet = vi.fn()
|
||||
const update = vi.fn()
|
||||
const execute = vi.fn()
|
||||
const eq = vi.fn()
|
||||
const sql = vi.fn((strings: TemplateStringsArray, ...values: unknown[]) => ({ strings, values }))
|
||||
|
||||
select.mockReturnValue({ from: selectFrom })
|
||||
selectFrom.mockReturnValue({ where: selectWhere })
|
||||
selectWhere.mockReturnValue({ limit: selectLimit })
|
||||
|
||||
update.mockReturnValue({ set: updateSet })
|
||||
updateSet.mockReturnValue({ where: updateWhere })
|
||||
|
||||
return {
|
||||
select,
|
||||
selectFrom,
|
||||
selectWhere,
|
||||
selectLimit,
|
||||
update,
|
||||
updateSet,
|
||||
updateWhere,
|
||||
execute,
|
||||
eq,
|
||||
sql,
|
||||
}
|
||||
})
|
||||
|
||||
const { completeWorkflowExecutionMock } = vi.hoisted(() => ({
|
||||
completeWorkflowExecutionMock: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@sim/db', () => ({
|
||||
db: {
|
||||
select: dbMocks.select,
|
||||
update: dbMocks.update,
|
||||
execute: dbMocks.execute,
|
||||
},
|
||||
db: {},
|
||||
}))
|
||||
|
||||
vi.mock('@sim/db/schema', () => ({
|
||||
@@ -59,8 +22,8 @@ vi.mock('@sim/logger', () => ({
|
||||
}))
|
||||
|
||||
vi.mock('drizzle-orm', () => ({
|
||||
eq: dbMocks.eq,
|
||||
sql: dbMocks.sql,
|
||||
eq: vi.fn(),
|
||||
sql: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/logs/execution/logger', () => ({
|
||||
@@ -93,9 +56,6 @@ import { LoggingSession } from './logging-session'
|
||||
describe('LoggingSession completion retries', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
dbMocks.selectLimit.mockResolvedValue([{ executionData: {} }])
|
||||
dbMocks.updateWhere.mockResolvedValue(undefined)
|
||||
dbMocks.execute.mockResolvedValue(undefined)
|
||||
})
|
||||
|
||||
it('keeps completion best-effort when a later error completion retries after full completion and fallback both fail', async () => {
|
||||
@@ -126,6 +86,7 @@ describe('LoggingSession completion retries', () => {
|
||||
.mockRejectedValueOnce(new Error('cost only failed'))
|
||||
|
||||
await expect(session.safeComplete({ finalOutput: { ok: true } })).resolves.toBeUndefined()
|
||||
|
||||
await expect(session.safeComplete({ finalOutput: { ok: true } })).resolves.toBeUndefined()
|
||||
|
||||
expect(completeWorkflowExecutionMock).toHaveBeenCalledTimes(2)
|
||||
@@ -157,64 +118,6 @@ describe('LoggingSession completion retries', () => {
|
||||
expect(session.hasCompleted()).toBe(true)
|
||||
})
|
||||
|
||||
it('preserves successful final output during fallback completion', async () => {
|
||||
const session = new LoggingSession('workflow-1', 'execution-5', 'api', 'req-1')
|
||||
|
||||
completeWorkflowExecutionMock
|
||||
.mockRejectedValueOnce(new Error('success finalize failed'))
|
||||
.mockResolvedValueOnce({})
|
||||
|
||||
await expect(
|
||||
session.safeComplete({ finalOutput: { ok: true, stage: 'done' } })
|
||||
).resolves.toBeUndefined()
|
||||
|
||||
expect(completeWorkflowExecutionMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
executionId: 'execution-5',
|
||||
finalOutput: { ok: true, stage: 'done' },
|
||||
finalizationPath: 'fallback_completed',
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('preserves accumulated cost during fallback completion', async () => {
|
||||
const session = new LoggingSession('workflow-1', 'execution-6', 'api', 'req-1') as any
|
||||
|
||||
session.accumulatedCost = {
|
||||
total: 12,
|
||||
input: 5,
|
||||
output: 7,
|
||||
tokens: { input: 11, output: 13, total: 24 },
|
||||
models: {
|
||||
'test-model': {
|
||||
input: 5,
|
||||
output: 7,
|
||||
total: 12,
|
||||
tokens: { input: 11, output: 13, total: 24 },
|
||||
},
|
||||
},
|
||||
}
|
||||
session.costFlushed = true
|
||||
|
||||
completeWorkflowExecutionMock
|
||||
.mockRejectedValueOnce(new Error('success finalize failed'))
|
||||
.mockResolvedValueOnce({})
|
||||
|
||||
await expect(session.safeComplete({ finalOutput: { ok: true } })).resolves.toBeUndefined()
|
||||
|
||||
expect(completeWorkflowExecutionMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
executionId: 'execution-6',
|
||||
costSummary: expect.objectContaining({
|
||||
totalCost: 12,
|
||||
totalInputCost: 5,
|
||||
totalOutputCost: 7,
|
||||
totalTokens: 24,
|
||||
}),
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('persists failed error semantics when completeWithError receives non-error trace spans', async () => {
|
||||
const session = new LoggingSession('workflow-1', 'execution-4', 'api', 'req-1')
|
||||
const traceSpans = [
|
||||
@@ -245,8 +148,6 @@ describe('LoggingSession completion retries', () => {
|
||||
traceSpans,
|
||||
level: 'error',
|
||||
status: 'failed',
|
||||
finalizationPath: 'force_failed',
|
||||
completionFailure: 'persist me as failed',
|
||||
})
|
||||
)
|
||||
})
|
||||
@@ -295,168 +196,4 @@ describe('LoggingSession completion retries', () => {
|
||||
expect(session.hasCompleted()).toBe(true)
|
||||
expect(completeWorkflowExecutionMock).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('persists last started block independently from cost accumulation', async () => {
|
||||
const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1')
|
||||
|
||||
await session.onBlockStart('block-1', 'Fetch', 'api', '2025-01-01T00:00:00.000Z')
|
||||
|
||||
expect(dbMocks.select).not.toHaveBeenCalled()
|
||||
expect(dbMocks.execute).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('enforces started marker monotonicity in the database write path', async () => {
|
||||
const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1')
|
||||
|
||||
await session.onBlockStart('block-1', 'Fetch', 'api', '2025-01-01T00:00:00.000Z')
|
||||
|
||||
expect(dbMocks.sql).toHaveBeenCalled()
|
||||
expect(dbMocks.execute).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('allows same-millisecond started markers to replace the prior marker', async () => {
|
||||
const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1')
|
||||
|
||||
await session.onBlockStart('block-1', 'Fetch', 'api', '2025-01-01T00:00:00.000Z')
|
||||
|
||||
const queryCall = dbMocks.sql.mock.calls.at(-1)
|
||||
expect(queryCall).toBeDefined()
|
||||
|
||||
const [query] = queryCall!
|
||||
expect(Array.from(query).join(' ')).toContain('<=')
|
||||
})
|
||||
|
||||
it('persists last completed block for zero-cost outputs', async () => {
|
||||
const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1')
|
||||
|
||||
await session.onBlockComplete('block-2', 'Transform', 'function', {
|
||||
endedAt: '2025-01-01T00:00:01.000Z',
|
||||
output: { value: true },
|
||||
})
|
||||
|
||||
expect(dbMocks.select).not.toHaveBeenCalled()
|
||||
expect(dbMocks.execute).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('allows same-millisecond completed markers to replace the prior marker', async () => {
|
||||
const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1')
|
||||
|
||||
await session.onBlockComplete('block-2', 'Transform', 'function', {
|
||||
endedAt: '2025-01-01T00:00:01.000Z',
|
||||
output: { value: true },
|
||||
})
|
||||
|
||||
const queryCall = dbMocks.sql.mock.calls.at(-1)
|
||||
expect(queryCall).toBeDefined()
|
||||
|
||||
const [query] = queryCall!
|
||||
expect(Array.from(query).join(' ')).toContain('<=')
|
||||
})
|
||||
|
||||
it('drains pending lifecycle writes before terminal completion', async () => {
|
||||
let releasePersist: (() => void) | undefined
|
||||
const persistPromise = new Promise<void>((resolve) => {
|
||||
releasePersist = resolve
|
||||
})
|
||||
|
||||
const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1') as any
|
||||
session.persistLastStartedBlock = vi.fn(() => persistPromise)
|
||||
session.complete = vi.fn().mockResolvedValue(undefined)
|
||||
|
||||
const startPromise = session.onBlockStart('block-1', 'Fetch', 'api', '2025-01-01T00:00:00.000Z')
|
||||
const completionPromise = session.safeComplete({ finalOutput: { ok: true } })
|
||||
|
||||
await Promise.resolve()
|
||||
|
||||
expect(session.complete).not.toHaveBeenCalled()
|
||||
|
||||
releasePersist?.()
|
||||
|
||||
await startPromise
|
||||
await completionPromise
|
||||
|
||||
expect(session.persistLastStartedBlock).toHaveBeenCalledTimes(1)
|
||||
expect(session.complete).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('drains fire-and-forget cost flushes before terminal completion', async () => {
|
||||
let releaseFlush: (() => void) | undefined
|
||||
const flushPromise = new Promise<void>((resolve) => {
|
||||
releaseFlush = resolve
|
||||
})
|
||||
|
||||
const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1') as any
|
||||
session.flushAccumulatedCost = vi.fn(() => flushPromise)
|
||||
session.complete = vi.fn().mockResolvedValue(undefined)
|
||||
|
||||
await session.onBlockComplete('block-2', 'Transform', 'function', {
|
||||
endedAt: '2025-01-01T00:00:01.000Z',
|
||||
output: { value: true },
|
||||
cost: { total: 1, input: 1, output: 0 },
|
||||
tokens: { input: 1, output: 0, total: 1 },
|
||||
model: 'test-model',
|
||||
})
|
||||
|
||||
const completionPromise = session.safeComplete({ finalOutput: { ok: true } })
|
||||
|
||||
await Promise.resolve()
|
||||
|
||||
expect(session.complete).not.toHaveBeenCalled()
|
||||
|
||||
releaseFlush?.()
|
||||
|
||||
await completionPromise
|
||||
|
||||
expect(session.flushAccumulatedCost).toHaveBeenCalledTimes(1)
|
||||
expect(session.complete).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('keeps draining when new progress writes arrive during drain', async () => {
|
||||
let releaseFirst: (() => void) | undefined
|
||||
let releaseSecond: (() => void) | undefined
|
||||
const firstPromise = new Promise<void>((resolve) => {
|
||||
releaseFirst = resolve
|
||||
})
|
||||
const secondPromise = new Promise<void>((resolve) => {
|
||||
releaseSecond = resolve
|
||||
})
|
||||
|
||||
const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1') as any
|
||||
|
||||
void session.trackProgressWrite(firstPromise)
|
||||
|
||||
const drainPromise = session.drainPendingProgressWrites()
|
||||
|
||||
await Promise.resolve()
|
||||
|
||||
void session.trackProgressWrite(secondPromise)
|
||||
releaseFirst?.()
|
||||
|
||||
await Promise.resolve()
|
||||
|
||||
let drained = false
|
||||
void drainPromise.then(() => {
|
||||
drained = true
|
||||
})
|
||||
|
||||
await Promise.resolve()
|
||||
expect(drained).toBe(false)
|
||||
|
||||
releaseSecond?.()
|
||||
await drainPromise
|
||||
|
||||
expect(session.pendingProgressWrites.size).toBe(0)
|
||||
})
|
||||
|
||||
it('marks pause completion as terminal and prevents duplicate pause finalization', async () => {
|
||||
const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1') as any
|
||||
session.completeExecutionWithFinalization = vi.fn().mockResolvedValue(undefined)
|
||||
|
||||
await session.completeWithPause({ workflowInput: { ok: true } })
|
||||
await session.completeWithPause({ workflowInput: { ok: true } })
|
||||
|
||||
expect(session.completeExecutionWithFinalization).toHaveBeenCalledTimes(1)
|
||||
expect(session.completed).toBe(true)
|
||||
expect(session.completing).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -13,9 +13,6 @@ import {
|
||||
} from '@/lib/logs/execution/logging-factory'
|
||||
import type {
|
||||
ExecutionEnvironment,
|
||||
ExecutionFinalizationPath,
|
||||
ExecutionLastCompletedBlock,
|
||||
ExecutionLastStartedBlock,
|
||||
ExecutionTrigger,
|
||||
TraceSpan,
|
||||
WorkflowState,
|
||||
@@ -26,46 +23,6 @@ type TriggerData = Record<string, unknown> & {
|
||||
correlation?: NonNullable<ExecutionTrigger['data']>['correlation']
|
||||
}
|
||||
|
||||
function buildStartedMarkerPersistenceQuery(params: {
|
||||
executionId: string
|
||||
marker: ExecutionLastStartedBlock
|
||||
}) {
|
||||
const markerJson = JSON.stringify(params.marker)
|
||||
|
||||
return sql`UPDATE workflow_execution_logs
|
||||
SET execution_data = jsonb_set(
|
||||
COALESCE(execution_data, '{}'::jsonb),
|
||||
'{lastStartedBlock}',
|
||||
${markerJson}::jsonb,
|
||||
true
|
||||
)
|
||||
WHERE execution_id = ${params.executionId}
|
||||
AND COALESCE(
|
||||
jsonb_extract_path_text(COALESCE(execution_data, '{}'::jsonb), 'lastStartedBlock', 'startedAt'),
|
||||
''
|
||||
) <= ${params.marker.startedAt}`
|
||||
}
|
||||
|
||||
function buildCompletedMarkerPersistenceQuery(params: {
|
||||
executionId: string
|
||||
marker: ExecutionLastCompletedBlock
|
||||
}) {
|
||||
const markerJson = JSON.stringify(params.marker)
|
||||
|
||||
return sql`UPDATE workflow_execution_logs
|
||||
SET execution_data = jsonb_set(
|
||||
COALESCE(execution_data, '{}'::jsonb),
|
||||
'{lastCompletedBlock}',
|
||||
${markerJson}::jsonb,
|
||||
true
|
||||
)
|
||||
WHERE execution_id = ${params.executionId}
|
||||
AND COALESCE(
|
||||
jsonb_extract_path_text(COALESCE(execution_data, '{}'::jsonb), 'lastCompletedBlock', 'endedAt'),
|
||||
''
|
||||
) <= ${params.marker.endedAt}`
|
||||
}
|
||||
|
||||
const logger = createLogger('LoggingSession')
|
||||
|
||||
type CompletionAttempt = 'complete' | 'error' | 'cancelled' | 'paused'
|
||||
@@ -152,7 +109,6 @@ export class LoggingSession {
|
||||
tokens: { input: 0, output: 0, total: 0 },
|
||||
models: {},
|
||||
}
|
||||
private pendingProgressWrites = new Set<Promise<void>>()
|
||||
private costFlushed = false
|
||||
private postExecutionPromise: Promise<void> | null = null
|
||||
|
||||
@@ -168,132 +124,12 @@ export class LoggingSession {
|
||||
this.requestId = requestId
|
||||
}
|
||||
|
||||
async onBlockStart(
|
||||
blockId: string,
|
||||
blockName: string,
|
||||
blockType: string,
|
||||
startedAt: string
|
||||
): Promise<void> {
|
||||
await this.trackProgressWrite(
|
||||
this.persistLastStartedBlock({
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
startedAt,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
private async persistLastStartedBlock(marker: ExecutionLastStartedBlock): Promise<void> {
|
||||
try {
|
||||
await db.execute(
|
||||
buildStartedMarkerPersistenceQuery({
|
||||
executionId: this.executionId,
|
||||
marker,
|
||||
})
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to persist last started block for execution ${this.executionId}:`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private async persistLastCompletedBlock(marker: ExecutionLastCompletedBlock): Promise<void> {
|
||||
try {
|
||||
await db.execute(
|
||||
buildCompletedMarkerPersistenceQuery({
|
||||
executionId: this.executionId,
|
||||
marker,
|
||||
})
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to persist last completed block for execution ${this.executionId}:`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private async trackProgressWrite(writePromise: Promise<void>): Promise<void> {
|
||||
this.pendingProgressWrites.add(writePromise)
|
||||
|
||||
try {
|
||||
await writePromise
|
||||
} finally {
|
||||
this.pendingProgressWrites.delete(writePromise)
|
||||
}
|
||||
}
|
||||
|
||||
private async drainPendingProgressWrites(): Promise<void> {
|
||||
while (this.pendingProgressWrites.size > 0) {
|
||||
await Promise.allSettled(Array.from(this.pendingProgressWrites))
|
||||
}
|
||||
}
|
||||
|
||||
private async completeExecutionWithFinalization(params: {
|
||||
endedAt: string
|
||||
totalDurationMs: number
|
||||
costSummary: {
|
||||
totalCost: number
|
||||
totalInputCost: number
|
||||
totalOutputCost: number
|
||||
totalTokens: number
|
||||
totalPromptTokens: number
|
||||
totalCompletionTokens: number
|
||||
baseExecutionCharge: number
|
||||
modelCost: number
|
||||
models: Record<
|
||||
string,
|
||||
{
|
||||
input: number
|
||||
output: number
|
||||
total: number
|
||||
tokens: { input: number; output: number; total: number }
|
||||
}
|
||||
>
|
||||
}
|
||||
finalOutput: Record<string, unknown>
|
||||
traceSpans: TraceSpan[]
|
||||
workflowInput?: unknown
|
||||
executionState?: SerializableExecutionState
|
||||
finalizationPath: ExecutionFinalizationPath
|
||||
completionFailure?: string
|
||||
level?: 'info' | 'error'
|
||||
status?: 'completed' | 'failed' | 'cancelled' | 'pending'
|
||||
}): Promise<void> {
|
||||
await executionLogger.completeWorkflowExecution({
|
||||
executionId: this.executionId,
|
||||
endedAt: params.endedAt,
|
||||
totalDurationMs: params.totalDurationMs,
|
||||
costSummary: params.costSummary,
|
||||
finalOutput: params.finalOutput,
|
||||
traceSpans: params.traceSpans,
|
||||
workflowInput: params.workflowInput,
|
||||
executionState: params.executionState,
|
||||
finalizationPath: params.finalizationPath,
|
||||
completionFailure: params.completionFailure,
|
||||
isResume: this.isResume,
|
||||
level: params.level,
|
||||
status: params.status,
|
||||
})
|
||||
}
|
||||
|
||||
async onBlockComplete(
|
||||
blockId: string,
|
||||
blockName: string,
|
||||
blockType: string,
|
||||
output: any
|
||||
): Promise<void> {
|
||||
await this.trackProgressWrite(
|
||||
this.persistLastCompletedBlock({
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
endedAt: output?.endedAt || new Date().toISOString(),
|
||||
success: !output?.output?.error,
|
||||
})
|
||||
)
|
||||
|
||||
if (!output?.cost || typeof output.cost.total !== 'number' || output.cost.total <= 0) {
|
||||
return
|
||||
}
|
||||
@@ -329,7 +165,7 @@ export class LoggingSession {
|
||||
}
|
||||
}
|
||||
|
||||
void this.trackProgressWrite(this.flushAccumulatedCost())
|
||||
await this.flushAccumulatedCost()
|
||||
}
|
||||
|
||||
private async flushAccumulatedCost(): Promise<void> {
|
||||
@@ -440,7 +276,8 @@ export class LoggingSession {
|
||||
const endTime = endedAt || new Date().toISOString()
|
||||
const duration = totalDurationMs || 0
|
||||
|
||||
await this.completeExecutionWithFinalization({
|
||||
await executionLogger.completeWorkflowExecution({
|
||||
executionId: this.executionId,
|
||||
endedAt: endTime,
|
||||
totalDurationMs: duration,
|
||||
costSummary,
|
||||
@@ -448,7 +285,7 @@ export class LoggingSession {
|
||||
traceSpans: traceSpans || [],
|
||||
workflowInput,
|
||||
executionState,
|
||||
finalizationPath: 'completed',
|
||||
isResume: this.isResume,
|
||||
})
|
||||
|
||||
this.completed = true
|
||||
@@ -566,7 +403,8 @@ export class LoggingSession {
|
||||
|
||||
const spans = hasProvidedSpans ? traceSpans : [errorSpan]
|
||||
|
||||
await this.completeExecutionWithFinalization({
|
||||
await executionLogger.completeWorkflowExecution({
|
||||
executionId: this.executionId,
|
||||
endedAt: endTime.toISOString(),
|
||||
totalDurationMs: Math.max(1, durationMs),
|
||||
costSummary,
|
||||
@@ -574,8 +412,6 @@ export class LoggingSession {
|
||||
traceSpans: spans,
|
||||
level: 'error',
|
||||
status: 'failed',
|
||||
finalizationPath: 'force_failed',
|
||||
completionFailure: message,
|
||||
})
|
||||
|
||||
this.completed = true
|
||||
@@ -654,13 +490,13 @@ export class LoggingSession {
|
||||
models: {},
|
||||
}
|
||||
|
||||
await this.completeExecutionWithFinalization({
|
||||
await executionLogger.completeWorkflowExecution({
|
||||
executionId: this.executionId,
|
||||
endedAt: endTime.toISOString(),
|
||||
totalDurationMs: Math.max(1, durationMs),
|
||||
costSummary,
|
||||
finalOutput: { cancelled: true },
|
||||
traceSpans: traceSpans || [],
|
||||
finalizationPath: 'cancelled',
|
||||
status: 'cancelled',
|
||||
})
|
||||
|
||||
@@ -741,14 +577,14 @@ export class LoggingSession {
|
||||
models: {},
|
||||
}
|
||||
|
||||
await this.completeExecutionWithFinalization({
|
||||
await executionLogger.completeWorkflowExecution({
|
||||
executionId: this.executionId,
|
||||
endedAt: endTime.toISOString(),
|
||||
totalDurationMs: Math.max(1, durationMs),
|
||||
costSummary,
|
||||
finalOutput: { paused: true },
|
||||
traceSpans: traceSpans || [],
|
||||
workflowInput,
|
||||
finalizationPath: 'paused',
|
||||
status: 'pending',
|
||||
})
|
||||
|
||||
@@ -910,6 +746,7 @@ export class LoggingSession {
|
||||
this.completionAttemptFailed = true
|
||||
throw error
|
||||
})
|
||||
|
||||
return this.completionPromise
|
||||
}
|
||||
|
||||
@@ -919,7 +756,6 @@ export class LoggingSession {
|
||||
|
||||
private async _safeCompleteImpl(params: SessionCompleteParams = {}): Promise<void> {
|
||||
try {
|
||||
await this.drainPendingProgressWrites()
|
||||
await this.complete(params)
|
||||
} catch (error) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||
@@ -933,8 +769,6 @@ export class LoggingSession {
|
||||
totalDurationMs: params.totalDurationMs,
|
||||
errorMessage: `Failed to store trace spans: ${errorMsg}`,
|
||||
isError: false,
|
||||
finalizationPath: 'fallback_completed',
|
||||
finalOutput: params.finalOutput || {},
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -945,7 +779,6 @@ export class LoggingSession {
|
||||
|
||||
private async _safeCompleteWithErrorImpl(params?: SessionErrorCompleteParams): Promise<void> {
|
||||
try {
|
||||
await this.drainPendingProgressWrites()
|
||||
await this.completeWithError(params)
|
||||
} catch (error) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||
@@ -960,10 +793,6 @@ export class LoggingSession {
|
||||
errorMessage:
|
||||
params?.error?.message || `Execution failed to store trace spans: ${errorMsg}`,
|
||||
isError: true,
|
||||
finalizationPath: 'force_failed',
|
||||
finalOutput: {
|
||||
error: params?.error?.message || `Execution failed to store trace spans: ${errorMsg}`,
|
||||
},
|
||||
status: 'failed',
|
||||
})
|
||||
}
|
||||
@@ -977,7 +806,6 @@ export class LoggingSession {
|
||||
|
||||
private async _safeCompleteWithCancellationImpl(params?: SessionCancelledParams): Promise<void> {
|
||||
try {
|
||||
await this.drainPendingProgressWrites()
|
||||
await this.completeWithCancellation(params)
|
||||
} catch (error) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||
@@ -991,8 +819,6 @@ export class LoggingSession {
|
||||
totalDurationMs: params?.totalDurationMs,
|
||||
errorMessage: 'Execution was cancelled',
|
||||
isError: false,
|
||||
finalizationPath: 'cancelled',
|
||||
finalOutput: { cancelled: true },
|
||||
status: 'cancelled',
|
||||
})
|
||||
}
|
||||
@@ -1004,7 +830,6 @@ export class LoggingSession {
|
||||
|
||||
private async _safeCompleteWithPauseImpl(params?: SessionPausedParams): Promise<void> {
|
||||
try {
|
||||
await this.drainPendingProgressWrites()
|
||||
await this.completeWithPause(params)
|
||||
} catch (error) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||
@@ -1018,8 +843,6 @@ export class LoggingSession {
|
||||
totalDurationMs: params?.totalDurationMs,
|
||||
errorMessage: 'Execution paused but failed to store full trace spans',
|
||||
isError: false,
|
||||
finalizationPath: 'paused',
|
||||
finalOutput: { paused: true },
|
||||
status: 'pending',
|
||||
})
|
||||
}
|
||||
@@ -1044,16 +867,12 @@ export class LoggingSession {
|
||||
status: 'failed',
|
||||
executionData: sql`jsonb_set(
|
||||
jsonb_set(
|
||||
jsonb_set(
|
||||
COALESCE(execution_data, '{}'::jsonb),
|
||||
ARRAY['error'],
|
||||
to_jsonb(${message}::text)
|
||||
),
|
||||
ARRAY['finalOutput'],
|
||||
jsonb_build_object('error', ${message}::text)
|
||||
COALESCE(execution_data, '{}'::jsonb),
|
||||
ARRAY['error'],
|
||||
to_jsonb(${message}::text)
|
||||
),
|
||||
ARRAY['finalizationPath'],
|
||||
to_jsonb('force_failed'::text)
|
||||
ARRAY['finalOutput'],
|
||||
jsonb_build_object('error', ${message}::text)
|
||||
)`,
|
||||
})
|
||||
.where(eq(workflowExecutionLogs.executionId, executionId))
|
||||
@@ -1072,8 +891,6 @@ export class LoggingSession {
|
||||
totalDurationMs?: number
|
||||
errorMessage: string
|
||||
isError: boolean
|
||||
finalizationPath: ExecutionFinalizationPath
|
||||
finalOutput?: Record<string, unknown>
|
||||
status?: 'completed' | 'failed' | 'cancelled' | 'pending'
|
||||
}): Promise<void> {
|
||||
if (this.completed || this.completing) {
|
||||
@@ -1086,48 +903,28 @@ export class LoggingSession {
|
||||
)
|
||||
|
||||
try {
|
||||
const hasAccumulatedCost =
|
||||
this.costFlushed ||
|
||||
this.accumulatedCost.total > BASE_EXECUTION_CHARGE ||
|
||||
this.accumulatedCost.tokens.total > 0 ||
|
||||
Object.keys(this.accumulatedCost.models).length > 0
|
||||
|
||||
const costSummary = hasAccumulatedCost
|
||||
? {
|
||||
totalCost: this.accumulatedCost.total,
|
||||
totalInputCost: this.accumulatedCost.input,
|
||||
totalOutputCost: this.accumulatedCost.output,
|
||||
totalTokens: this.accumulatedCost.tokens.total,
|
||||
totalPromptTokens: this.accumulatedCost.tokens.input,
|
||||
totalCompletionTokens: this.accumulatedCost.tokens.output,
|
||||
const costSummary = params.traceSpans?.length
|
||||
? calculateCostSummary(params.traceSpans)
|
||||
: {
|
||||
totalCost: BASE_EXECUTION_CHARGE,
|
||||
totalInputCost: 0,
|
||||
totalOutputCost: 0,
|
||||
totalTokens: 0,
|
||||
totalPromptTokens: 0,
|
||||
totalCompletionTokens: 0,
|
||||
baseExecutionCharge: BASE_EXECUTION_CHARGE,
|
||||
modelCost: Math.max(0, this.accumulatedCost.total - BASE_EXECUTION_CHARGE),
|
||||
models: this.accumulatedCost.models,
|
||||
modelCost: 0,
|
||||
models: {},
|
||||
}
|
||||
: params.traceSpans?.length
|
||||
? calculateCostSummary(params.traceSpans)
|
||||
: {
|
||||
totalCost: BASE_EXECUTION_CHARGE,
|
||||
totalInputCost: 0,
|
||||
totalOutputCost: 0,
|
||||
totalTokens: 0,
|
||||
totalPromptTokens: 0,
|
||||
totalCompletionTokens: 0,
|
||||
baseExecutionCharge: BASE_EXECUTION_CHARGE,
|
||||
modelCost: 0,
|
||||
models: {},
|
||||
}
|
||||
|
||||
const finalOutput = params.finalOutput || { _fallback: true, error: params.errorMessage }
|
||||
|
||||
await this.completeExecutionWithFinalization({
|
||||
await executionLogger.completeWorkflowExecution({
|
||||
executionId: this.executionId,
|
||||
endedAt: params.endedAt || new Date().toISOString(),
|
||||
totalDurationMs: params.totalDurationMs || 0,
|
||||
costSummary,
|
||||
finalOutput,
|
||||
finalOutput: { _fallback: true, error: params.errorMessage },
|
||||
traceSpans: [],
|
||||
finalizationPath: params.finalizationPath,
|
||||
completionFailure: params.errorMessage,
|
||||
isResume: this.isResume,
|
||||
level: params.isError ? 'error' : 'info',
|
||||
status: params.status,
|
||||
})
|
||||
|
||||
@@ -71,31 +71,6 @@ export interface ExecutionStatus {
|
||||
durationMs?: number
|
||||
}
|
||||
|
||||
export const EXECUTION_FINALIZATION_PATHS = [
|
||||
'completed',
|
||||
'fallback_completed',
|
||||
'force_failed',
|
||||
'cancelled',
|
||||
'paused',
|
||||
] as const
|
||||
|
||||
export type ExecutionFinalizationPath = (typeof EXECUTION_FINALIZATION_PATHS)[number]
|
||||
|
||||
export interface ExecutionLastStartedBlock {
|
||||
blockId: string
|
||||
blockName: string
|
||||
blockType: string
|
||||
startedAt: string
|
||||
}
|
||||
|
||||
export interface ExecutionLastCompletedBlock {
|
||||
blockId: string
|
||||
blockName: string
|
||||
blockType: string
|
||||
endedAt: string
|
||||
success: boolean
|
||||
}
|
||||
|
||||
export interface WorkflowExecutionSnapshot {
|
||||
id: string
|
||||
workflowId: string | null
|
||||
@@ -130,13 +105,6 @@ export interface WorkflowExecutionLog {
|
||||
environment?: ExecutionEnvironment
|
||||
trigger?: ExecutionTrigger
|
||||
correlation?: AsyncExecutionCorrelation
|
||||
error?: string
|
||||
lastStartedBlock?: ExecutionLastStartedBlock
|
||||
lastCompletedBlock?: ExecutionLastCompletedBlock
|
||||
hasTraceSpans?: boolean
|
||||
traceSpanCount?: number
|
||||
completionFailure?: string
|
||||
finalizationPath?: ExecutionFinalizationPath
|
||||
traceSpans?: TraceSpan[]
|
||||
tokens?: { input?: number; output?: number; total?: number }
|
||||
models?: Record<
|
||||
@@ -410,9 +378,6 @@ export interface ExecutionLoggerService {
|
||||
finalOutput: BlockOutputData
|
||||
traceSpans?: TraceSpan[]
|
||||
workflowInput?: any
|
||||
executionState?: SerializableExecutionState
|
||||
finalizationPath?: ExecutionFinalizationPath
|
||||
completionFailure?: string
|
||||
isResume?: boolean
|
||||
level?: 'info' | 'error'
|
||||
status?: 'completed' | 'failed' | 'cancelled' | 'pending'
|
||||
|
||||
@@ -209,9 +209,8 @@ export async function uploadWorkspaceFile(
|
||||
|
||||
/**
|
||||
* Track a file that was already uploaded to workspace S3 as a chat-scoped upload.
|
||||
* Links the existing workspaceFiles metadata record (created by the storage service
|
||||
* during upload) to the chat by setting chatId and context='mothership'.
|
||||
* Falls back to inserting a new record if none exists for the key.
|
||||
* Creates a workspaceFiles record with context='mothership' and the given chatId.
|
||||
* No S3 operations -- the file is already in storage from the presigned/upload step.
|
||||
*/
|
||||
export async function trackChatUpload(
|
||||
workspaceId: string,
|
||||
@@ -222,17 +221,6 @@ export async function trackChatUpload(
|
||||
contentType: string,
|
||||
size: number
|
||||
): Promise<void> {
|
||||
const updated = await db
|
||||
.update(workspaceFiles)
|
||||
.set({ chatId, context: 'mothership' })
|
||||
.where(and(eq(workspaceFiles.key, s3Key), eq(workspaceFiles.workspaceId, workspaceId), isNull(workspaceFiles.deletedAt)))
|
||||
.returning({ id: workspaceFiles.id })
|
||||
|
||||
if (updated.length > 0) {
|
||||
logger.info(`Linked existing file record to chat: ${fileName} for chat ${chatId}`)
|
||||
return
|
||||
}
|
||||
|
||||
const fileId = `wf_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`
|
||||
|
||||
await db.insert(workspaceFiles).values({
|
||||
|
||||
@@ -16,8 +16,6 @@ const {
|
||||
buildTraceSpansMock,
|
||||
serializeWorkflowMock,
|
||||
executorExecuteMock,
|
||||
onBlockStartPersistenceMock,
|
||||
executorConstructorMock,
|
||||
} = vi.hoisted(() => ({
|
||||
loadWorkflowFromNormalizedTablesMock: vi.fn(),
|
||||
loadDeployedWorkflowStateMock: vi.fn(),
|
||||
@@ -34,8 +32,6 @@ const {
|
||||
buildTraceSpansMock: vi.fn(),
|
||||
serializeWorkflowMock: vi.fn(),
|
||||
executorExecuteMock: vi.fn(),
|
||||
onBlockStartPersistenceMock: vi.fn(),
|
||||
executorConstructorMock: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@sim/logger', () => ({
|
||||
@@ -83,13 +79,10 @@ vi.mock('@/lib/workflows/utils', () => ({
|
||||
}))
|
||||
|
||||
vi.mock('@/executor', () => ({
|
||||
Executor: vi.fn().mockImplementation((args) => {
|
||||
executorConstructorMock(args)
|
||||
return {
|
||||
execute: executorExecuteMock,
|
||||
executeFromBlock: executorExecuteMock,
|
||||
}
|
||||
}),
|
||||
Executor: vi.fn().mockImplementation(() => ({
|
||||
execute: executorExecuteMock,
|
||||
executeFromBlock: executorExecuteMock,
|
||||
})),
|
||||
}))
|
||||
|
||||
vi.mock('@/serializer', () => ({
|
||||
@@ -112,8 +105,6 @@ describe('executeWorkflowCore terminal finalization sequencing', () => {
|
||||
safeCompleteWithCancellation: safeCompleteWithCancellationMock,
|
||||
safeCompleteWithPause: safeCompleteWithPauseMock,
|
||||
hasCompleted: hasCompletedMock,
|
||||
onBlockStart: onBlockStartPersistenceMock,
|
||||
onBlockComplete: vi.fn(),
|
||||
setPostExecutionPromise: vi.fn(),
|
||||
waitForPostExecution: vi.fn().mockResolvedValue(undefined),
|
||||
}
|
||||
@@ -185,72 +176,10 @@ describe('executeWorkflowCore terminal finalization sequencing', () => {
|
||||
safeCompleteWithCancellationMock.mockResolvedValue(undefined)
|
||||
safeCompleteWithPauseMock.mockResolvedValue(undefined)
|
||||
hasCompletedMock.mockReturnValue(true)
|
||||
onBlockStartPersistenceMock.mockResolvedValue(undefined)
|
||||
updateWorkflowRunCountsMock.mockResolvedValue(undefined)
|
||||
clearExecutionCancellationMock.mockResolvedValue(undefined)
|
||||
})
|
||||
|
||||
it('routes onBlockStart through logging session persistence path', async () => {
|
||||
executorExecuteMock.mockResolvedValue({
|
||||
success: true,
|
||||
status: 'completed',
|
||||
output: { done: true },
|
||||
logs: [],
|
||||
metadata: { duration: 123, startTime: 'start', endTime: 'end' },
|
||||
})
|
||||
|
||||
await executeWorkflowCore({
|
||||
snapshot: createSnapshot() as any,
|
||||
callbacks: {
|
||||
onBlockStart: async (blockId) => {
|
||||
expect(blockId).toBe('block-1')
|
||||
},
|
||||
},
|
||||
loggingSession: loggingSession as any,
|
||||
})
|
||||
|
||||
const contextExtensions = executorConstructorMock.mock.calls[0]?.[0]?.contextExtensions
|
||||
await contextExtensions.onBlockStart('block-1', 'Fetch', 'api', 1)
|
||||
|
||||
expect(onBlockStartPersistenceMock).toHaveBeenCalledWith(
|
||||
'block-1',
|
||||
'Fetch',
|
||||
'api',
|
||||
expect.any(String)
|
||||
)
|
||||
})
|
||||
|
||||
it('does not await user block start callback after persistence completes', async () => {
|
||||
let releaseCallback: (() => void) | undefined
|
||||
const callbackPromise = new Promise<void>((resolve) => {
|
||||
releaseCallback = resolve
|
||||
})
|
||||
|
||||
executorExecuteMock.mockResolvedValue({
|
||||
success: true,
|
||||
status: 'completed',
|
||||
output: { done: true },
|
||||
logs: [],
|
||||
metadata: { duration: 123, startTime: 'start', endTime: 'end' },
|
||||
})
|
||||
|
||||
await executeWorkflowCore({
|
||||
snapshot: createSnapshot() as any,
|
||||
callbacks: {
|
||||
onBlockStart: vi.fn(() => callbackPromise),
|
||||
},
|
||||
loggingSession: loggingSession as any,
|
||||
})
|
||||
|
||||
const contextExtensions = executorConstructorMock.mock.calls[0]?.[0]?.contextExtensions
|
||||
|
||||
await expect(
|
||||
contextExtensions.onBlockStart('block-1', 'Fetch', 'api', 1)
|
||||
).resolves.toBeUndefined()
|
||||
|
||||
releaseCallback?.()
|
||||
})
|
||||
|
||||
it('awaits terminal completion before updating run counts and returning', async () => {
|
||||
const callOrder: string[] = []
|
||||
|
||||
@@ -293,57 +222,7 @@ describe('executeWorkflowCore terminal finalization sequencing', () => {
|
||||
])
|
||||
})
|
||||
|
||||
it('awaits wrapped lifecycle persistence before terminal finalization returns', async () => {
|
||||
let releaseBlockStart: (() => void) | undefined
|
||||
const blockStartPromise = new Promise<void>((resolve) => {
|
||||
releaseBlockStart = resolve
|
||||
})
|
||||
const callOrder: string[] = []
|
||||
|
||||
onBlockStartPersistenceMock.mockImplementation(async () => {
|
||||
callOrder.push('persist:start')
|
||||
await blockStartPromise
|
||||
callOrder.push('persist:end')
|
||||
})
|
||||
|
||||
safeCompleteMock.mockImplementation(async () => {
|
||||
callOrder.push('safeComplete')
|
||||
})
|
||||
|
||||
executorExecuteMock.mockImplementation(async () => {
|
||||
const contextExtensions = executorConstructorMock.mock.calls[0]?.[0]?.contextExtensions
|
||||
const startLifecycle = contextExtensions.onBlockStart('block-1', 'Fetch', 'api', 1)
|
||||
await Promise.resolve()
|
||||
callOrder.push('executor:before-release')
|
||||
releaseBlockStart?.()
|
||||
await startLifecycle
|
||||
callOrder.push('executor:after-start')
|
||||
|
||||
return {
|
||||
success: true,
|
||||
status: 'completed',
|
||||
output: { done: true },
|
||||
logs: [],
|
||||
metadata: { duration: 123, startTime: 'start', endTime: 'end' },
|
||||
}
|
||||
})
|
||||
|
||||
await executeWorkflowCore({
|
||||
snapshot: createSnapshot() as any,
|
||||
callbacks: {},
|
||||
loggingSession: loggingSession as any,
|
||||
})
|
||||
|
||||
expect(callOrder).toEqual([
|
||||
'persist:start',
|
||||
'executor:before-release',
|
||||
'persist:end',
|
||||
'executor:after-start',
|
||||
'safeComplete',
|
||||
])
|
||||
})
|
||||
|
||||
it('preserves successful execution when success finalization throws', async () => {
|
||||
it('clears cancellation even when success finalization throws', async () => {
|
||||
executorExecuteMock.mockResolvedValue({
|
||||
success: true,
|
||||
status: 'completed',
|
||||
@@ -365,7 +244,7 @@ describe('executeWorkflowCore terminal finalization sequencing', () => {
|
||||
|
||||
expect(result.status).toBe('completed')
|
||||
expect(clearExecutionCancellationMock).toHaveBeenCalledWith('execution-1')
|
||||
expect(updateWorkflowRunCountsMock).toHaveBeenCalledWith('workflow-1')
|
||||
expect(updateWorkflowRunCountsMock).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('routes cancelled executions through safeCompleteWithCancellation', async () => {
|
||||
@@ -425,61 +304,6 @@ describe('executeWorkflowCore terminal finalization sequencing', () => {
|
||||
expect(updateWorkflowRunCountsMock).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('swallows wrapped block start callback failures without breaking execution', async () => {
|
||||
onBlockStartPersistenceMock.mockRejectedValue(new Error('start persistence failed'))
|
||||
|
||||
executorExecuteMock.mockImplementation(async () => {
|
||||
const contextExtensions = executorConstructorMock.mock.calls[0]?.[0]?.contextExtensions
|
||||
await contextExtensions.onBlockStart('block-1', 'Fetch', 'api', 1)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
status: 'completed',
|
||||
output: { done: true },
|
||||
logs: [],
|
||||
metadata: { duration: 123, startTime: 'start', endTime: 'end' },
|
||||
}
|
||||
})
|
||||
|
||||
const result = await executeWorkflowCore({
|
||||
snapshot: createSnapshot() as any,
|
||||
callbacks: {},
|
||||
loggingSession: loggingSession as any,
|
||||
})
|
||||
|
||||
expect(result.status).toBe('completed')
|
||||
expect(safeCompleteMock).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('swallows wrapped block complete callback failures without blocking completion', async () => {
|
||||
executorExecuteMock.mockResolvedValue({
|
||||
success: true,
|
||||
status: 'completed',
|
||||
output: { done: true },
|
||||
logs: [],
|
||||
metadata: { duration: 123, startTime: 'start', endTime: 'end' },
|
||||
})
|
||||
|
||||
await executeWorkflowCore({
|
||||
snapshot: createSnapshot() as any,
|
||||
callbacks: {
|
||||
onBlockComplete: vi.fn().mockRejectedValue(new Error('complete callback failed')),
|
||||
},
|
||||
loggingSession: loggingSession as any,
|
||||
})
|
||||
|
||||
const contextExtensions = executorConstructorMock.mock.calls[0]?.[0]?.contextExtensions
|
||||
|
||||
await expect(
|
||||
contextExtensions.onBlockComplete('block-1', 'Fetch', 'api', {
|
||||
output: { ok: true },
|
||||
executionTime: 1,
|
||||
startedAt: 'start',
|
||||
endedAt: 'end',
|
||||
})
|
||||
).resolves.toBeUndefined()
|
||||
})
|
||||
|
||||
it('finalizes errors before rethrowing and marks them as core-finalized', async () => {
|
||||
const error = new Error('engine failed')
|
||||
const executionResult = {
|
||||
@@ -621,7 +445,7 @@ describe('executeWorkflowCore terminal finalization sequencing', () => {
|
||||
expect(wasExecutionFinalizedByCore('engine failed', 'execution-a')).toBe(true)
|
||||
})
|
||||
|
||||
it('does not replace a successful outcome when success finalization rejects', async () => {
|
||||
it('logs error without rejecting when success finalization rejects', async () => {
|
||||
executorExecuteMock.mockResolvedValue({
|
||||
success: true,
|
||||
status: 'completed',
|
||||
@@ -640,7 +464,7 @@ describe('executeWorkflowCore terminal finalization sequencing', () => {
|
||||
|
||||
await loggingSession.setPostExecutionPromise.mock.calls[0][0]
|
||||
|
||||
expect(result).toMatchObject({ status: 'completed', success: true })
|
||||
expect(result.status).toBe('completed')
|
||||
expect(clearExecutionCancellationMock).toHaveBeenCalledWith('execution-1')
|
||||
expect(safeCompleteWithErrorMock).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
@@ -179,41 +179,33 @@ async function finalizeExecutionOutcome(params: {
|
||||
const endedAt = new Date().toISOString()
|
||||
|
||||
try {
|
||||
try {
|
||||
if (result.status === 'cancelled') {
|
||||
await loggingSession.safeCompleteWithCancellation({
|
||||
endedAt,
|
||||
totalDurationMs: totalDuration || 0,
|
||||
traceSpans: traceSpans || [],
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (result.status === 'paused') {
|
||||
await loggingSession.safeCompleteWithPause({
|
||||
endedAt,
|
||||
totalDurationMs: totalDuration || 0,
|
||||
traceSpans: traceSpans || [],
|
||||
workflowInput,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
await loggingSession.safeComplete({
|
||||
if (result.status === 'cancelled') {
|
||||
await loggingSession.safeCompleteWithCancellation({
|
||||
endedAt,
|
||||
totalDurationMs: totalDuration || 0,
|
||||
traceSpans: traceSpans || [],
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (result.status === 'paused') {
|
||||
await loggingSession.safeCompleteWithPause({
|
||||
endedAt,
|
||||
totalDurationMs: totalDuration || 0,
|
||||
finalOutput: result.output || {},
|
||||
traceSpans: traceSpans || [],
|
||||
workflowInput,
|
||||
executionState: result.executionState,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Post-execution finalization failed`, {
|
||||
executionId,
|
||||
status: result.status,
|
||||
error,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
await loggingSession.safeComplete({
|
||||
endedAt,
|
||||
totalDurationMs: totalDuration || 0,
|
||||
finalOutput: result.output || {},
|
||||
traceSpans: traceSpans || [],
|
||||
workflowInput,
|
||||
executionState: result.executionState,
|
||||
})
|
||||
} finally {
|
||||
await clearExecutionCancellationSafely(executionId, requestId)
|
||||
}
|
||||
@@ -432,69 +424,16 @@ export async function executeWorkflowCore(
|
||||
iterationContext?: IterationContext,
|
||||
childWorkflowContext?: ChildWorkflowContext
|
||||
) => {
|
||||
try {
|
||||
await loggingSession.onBlockComplete(blockId, blockName, blockType, output)
|
||||
if (onBlockComplete) {
|
||||
void onBlockComplete(
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
output,
|
||||
iterationContext,
|
||||
childWorkflowContext
|
||||
).catch((error) => {
|
||||
logger.warn(`[${requestId}] Block completion callback failed`, {
|
||||
executionId,
|
||||
blockId,
|
||||
blockType,
|
||||
error,
|
||||
})
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Block completion persistence failed`, {
|
||||
executionId,
|
||||
await loggingSession.onBlockComplete(blockId, blockName, blockType, output)
|
||||
if (onBlockComplete) {
|
||||
await onBlockComplete(
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
error,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const wrappedOnBlockStart = async (
|
||||
blockId: string,
|
||||
blockName: string,
|
||||
blockType: string,
|
||||
executionOrder: number,
|
||||
iterationContext?: IterationContext,
|
||||
childWorkflowContext?: ChildWorkflowContext
|
||||
) => {
|
||||
try {
|
||||
await loggingSession.onBlockStart(blockId, blockName, blockType, new Date().toISOString())
|
||||
if (onBlockStart) {
|
||||
void onBlockStart(
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
executionOrder,
|
||||
iterationContext,
|
||||
childWorkflowContext
|
||||
).catch((error) => {
|
||||
logger.warn(`[${requestId}] Block start callback failed`, {
|
||||
executionId,
|
||||
blockId,
|
||||
blockType,
|
||||
error,
|
||||
})
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Block start persistence failed`, {
|
||||
executionId,
|
||||
blockId,
|
||||
blockType,
|
||||
error,
|
||||
})
|
||||
output,
|
||||
iterationContext,
|
||||
childWorkflowContext
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -506,7 +445,7 @@ export async function executeWorkflowCore(
|
||||
userId,
|
||||
isDeployedContext: !metadata.isClientSession,
|
||||
enforceCredentialAccess: metadata.enforceCredentialAccess ?? false,
|
||||
onBlockStart: wrappedOnBlockStart,
|
||||
onBlockStart,
|
||||
onBlockComplete: wrappedOnBlockComplete,
|
||||
onStream,
|
||||
resumeFromSnapshot,
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { NextConfig } from 'next'
|
||||
import { env, getEnv, isTruthy } from './lib/core/config/env'
|
||||
import { isDev } from './lib/core/config/feature-flags'
|
||||
import {
|
||||
getChatEmbedCSPPolicy,
|
||||
getFormEmbedCSPPolicy,
|
||||
getMainCSPPolicy,
|
||||
getWorkflowExecutionCSPPolicy,
|
||||
@@ -256,24 +255,6 @@ const nextConfig: NextConfig = {
|
||||
},
|
||||
],
|
||||
},
|
||||
// Chat pages - allow iframe embedding from any origin
|
||||
{
|
||||
source: '/chat/:path*',
|
||||
headers: [
|
||||
{
|
||||
key: 'X-Content-Type-Options',
|
||||
value: 'nosniff',
|
||||
},
|
||||
// No X-Frame-Options to allow iframe embedding
|
||||
{
|
||||
key: 'Content-Security-Policy',
|
||||
value: getChatEmbedCSPPolicy(),
|
||||
},
|
||||
// Permissive CORS for chat requests from embedded chats
|
||||
{ key: 'Cross-Origin-Embedder-Policy', value: 'unsafe-none' },
|
||||
{ key: 'Cross-Origin-Opener-Policy', value: 'unsafe-none' },
|
||||
],
|
||||
},
|
||||
// Form pages - allow iframe embedding from any origin
|
||||
{
|
||||
source: '/form/:path*',
|
||||
@@ -303,10 +284,10 @@ const nextConfig: NextConfig = {
|
||||
],
|
||||
},
|
||||
// Apply security headers to routes not handled by middleware runtime CSP
|
||||
// Middleware handles: /, /workspace/*
|
||||
// Exclude chat and form routes which have their own permissive embed headers
|
||||
// Middleware handles: /, /workspace/*, /chat/*
|
||||
// Exclude form routes which have their own permissive headers
|
||||
{
|
||||
source: '/((?!workspace|chat|form).*)',
|
||||
source: '/((?!workspace|chat$|form).*)',
|
||||
headers: [
|
||||
{
|
||||
key: 'X-Content-Type-Options',
|
||||
|
||||
@@ -155,7 +155,6 @@ export async function proxy(request: NextRequest) {
|
||||
return response
|
||||
}
|
||||
|
||||
// Chat pages are publicly accessible embeds — CSP is set in next.config.ts headers
|
||||
if (url.pathname.startsWith('/chat/')) {
|
||||
return NextResponse.next()
|
||||
}
|
||||
@@ -189,7 +188,11 @@ export async function proxy(request: NextRequest) {
|
||||
const response = NextResponse.next()
|
||||
response.headers.set('Vary', 'User-Agent')
|
||||
|
||||
if (url.pathname.startsWith('/workspace') || url.pathname === '/') {
|
||||
if (
|
||||
url.pathname.startsWith('/workspace') ||
|
||||
url.pathname.startsWith('/chat') ||
|
||||
url.pathname === '/'
|
||||
) {
|
||||
response.headers.set('Content-Security-Policy', generateRuntimeCSP())
|
||||
}
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 531 KiB |
|
Before Width: | Height: | Size: 6.2 MiB |
|
Before Width: | Height: | Size: 555 KiB |
|
Before Width: | Height: | Size: 402 KiB |
|
Before Width: | Height: | Size: 516 KiB |
|
Before Width: | Height: | Size: 37 KiB |
|
Before Width: | Height: | Size: 316 KiB |
@@ -29,7 +29,7 @@ const socketDb = drizzle(
|
||||
prepare: false,
|
||||
idle_timeout: 10,
|
||||
connect_timeout: 20,
|
||||
max: 10,
|
||||
max: 15,
|
||||
onnotice: () => {},
|
||||
}),
|
||||
{ schema }
|
||||
|
||||
@@ -64,12 +64,5 @@ export const OUTPUT_PANEL_WIDTH = {
|
||||
MIN: 280,
|
||||
} as const
|
||||
|
||||
/** Home chat resource panel (MothershipView) width constraints */
|
||||
export const MOTHERSHIP_WIDTH = {
|
||||
MIN: 280,
|
||||
/** Maximum is 65% of viewport, enforced dynamically */
|
||||
MAX_PERCENTAGE: 0.65,
|
||||
} as const
|
||||
|
||||
/** Terminal block column width - minimum width for the logs column */
|
||||
export const TERMINAL_BLOCK_COLUMN_WIDTH = 240 as const
|
||||
|
||||
@@ -224,7 +224,6 @@ function replaceTextBlocks(blocks: ClientContentBlock[], text: string): ClientCo
|
||||
function createClientStreamingContext(messageId: string): ClientStreamingContext {
|
||||
return {
|
||||
messageId,
|
||||
requestId: undefined,
|
||||
accumulatedContent: '',
|
||||
contentBlocks: [],
|
||||
currentTextBlock: null,
|
||||
@@ -2044,7 +2043,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
msg.id === assistantMessageId
|
||||
? {
|
||||
...msg,
|
||||
requestId: context.requestId ?? msg.requestId,
|
||||
content: finalContentWithOptions,
|
||||
contentBlocks: sanitizedContentBlocks,
|
||||
}
|
||||
|
||||
@@ -70,7 +70,6 @@ export interface CopilotMessage {
|
||||
role: 'user' | 'assistant' | 'system'
|
||||
content: string
|
||||
timestamp: string
|
||||
requestId?: string
|
||||
citations?: { id: number; title: string; url: string; similarity?: number }[]
|
||||
toolCalls?: CopilotToolCall[]
|
||||
contentBlocks?: ClientContentBlock[]
|
||||
|
||||
@@ -11,7 +11,6 @@ import { knowledgeSearchTool } from '@/tools/knowledge/search'
|
||||
import { knowledgeTriggerSyncTool } from '@/tools/knowledge/trigger_sync'
|
||||
import { knowledgeUpdateChunkTool } from '@/tools/knowledge/update_chunk'
|
||||
import { knowledgeUploadChunkTool } from '@/tools/knowledge/upload_chunk'
|
||||
import { knowledgeUpsertDocumentTool } from '@/tools/knowledge/upsert_document'
|
||||
|
||||
export {
|
||||
knowledgeSearchTool,
|
||||
@@ -27,5 +26,4 @@ export {
|
||||
knowledgeListConnectorsTool,
|
||||
knowledgeGetConnectorTool,
|
||||
knowledgeTriggerSyncTool,
|
||||
knowledgeUpsertDocumentTool,
|
||||
}
|
||||
|
||||
@@ -286,33 +286,3 @@ export interface KnowledgeTriggerSyncResponse {
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
export interface KnowledgeUpsertDocumentParams {
|
||||
knowledgeBaseId: string
|
||||
name: string
|
||||
content: string
|
||||
documentId?: string
|
||||
documentTags?: Record<string, unknown>
|
||||
_context?: { workflowId?: string }
|
||||
}
|
||||
|
||||
export interface KnowledgeUpsertDocumentResult {
|
||||
documentId: string
|
||||
documentName: string
|
||||
type: string
|
||||
enabled: boolean
|
||||
isUpdate: boolean
|
||||
previousDocumentId: string | null
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export interface KnowledgeUpsertDocumentResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
data: KnowledgeUpsertDocumentResult
|
||||
message: string
|
||||
documentId: string
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
@@ -1,189 +0,0 @@
|
||||
import type {
|
||||
KnowledgeUpsertDocumentParams,
|
||||
KnowledgeUpsertDocumentResponse,
|
||||
} from '@/tools/knowledge/types'
|
||||
import { enrichKBTagsSchema } from '@/tools/schema-enrichers'
|
||||
import { formatDocumentTagsForAPI, parseDocumentTags } from '@/tools/shared/tags'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const knowledgeUpsertDocumentTool: ToolConfig<
|
||||
KnowledgeUpsertDocumentParams,
|
||||
KnowledgeUpsertDocumentResponse
|
||||
> = {
|
||||
id: 'knowledge_upsert_document',
|
||||
name: 'Knowledge Upsert Document',
|
||||
description:
|
||||
'Create or update a document in a knowledge base. If a document with the given ID or filename already exists, it will be replaced with the new content.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
knowledgeBaseId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'ID of the knowledge base containing the document',
|
||||
},
|
||||
documentId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Optional ID of an existing document to update. If not provided, lookup is done by filename.',
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Name of the document',
|
||||
},
|
||||
content: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Content of the document',
|
||||
},
|
||||
documentTags: {
|
||||
type: 'json',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Document tags',
|
||||
},
|
||||
},
|
||||
|
||||
schemaEnrichment: {
|
||||
documentTags: {
|
||||
dependsOn: 'knowledgeBaseId',
|
||||
enrichSchema: enrichKBTagsSchema,
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => `/api/knowledge/${params.knowledgeBaseId}/documents/upsert`,
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => {
|
||||
const workflowId = params._context?.workflowId
|
||||
const textContent = params.content?.trim()
|
||||
const documentName = params.name?.trim()
|
||||
|
||||
if (!documentName || documentName.length === 0) {
|
||||
throw new Error('Document name is required')
|
||||
}
|
||||
if (documentName.length > 255) {
|
||||
throw new Error('Document name must be 255 characters or less')
|
||||
}
|
||||
if (!textContent || textContent.length < 1) {
|
||||
throw new Error('Document content cannot be empty')
|
||||
}
|
||||
const utf8Bytes = new TextEncoder().encode(textContent)
|
||||
const contentBytes = utf8Bytes.length
|
||||
|
||||
if (contentBytes > 1_000_000) {
|
||||
throw new Error('Document content exceeds maximum size of 1MB')
|
||||
}
|
||||
let base64Content: string
|
||||
if (typeof Buffer !== 'undefined') {
|
||||
base64Content = Buffer.from(textContent, 'utf8').toString('base64')
|
||||
} else {
|
||||
let binary = ''
|
||||
for (let i = 0; i < utf8Bytes.length; i++) {
|
||||
binary += String.fromCharCode(utf8Bytes[i])
|
||||
}
|
||||
base64Content = btoa(binary)
|
||||
}
|
||||
|
||||
const dataUri = `data:text/plain;base64,${base64Content}`
|
||||
|
||||
const parsedTags = parseDocumentTags(params.documentTags)
|
||||
const tagData = formatDocumentTagsForAPI(parsedTags)
|
||||
|
||||
const filename = documentName.endsWith('.txt') ? documentName : `${documentName}.txt`
|
||||
|
||||
const requestBody: Record<string, unknown> = {
|
||||
filename,
|
||||
fileUrl: dataUri,
|
||||
fileSize: contentBytes,
|
||||
mimeType: 'text/plain',
|
||||
...tagData,
|
||||
processingOptions: {
|
||||
chunkSize: 1024,
|
||||
minCharactersPerChunk: 1,
|
||||
chunkOverlap: 200,
|
||||
recipe: 'default',
|
||||
lang: 'en',
|
||||
},
|
||||
...(workflowId && { workflowId }),
|
||||
}
|
||||
|
||||
if (params.documentId && String(params.documentId).trim().length > 0) {
|
||||
requestBody.documentId = String(params.documentId).trim()
|
||||
}
|
||||
|
||||
return requestBody
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response): Promise<KnowledgeUpsertDocumentResponse> => {
|
||||
const result = await response.json()
|
||||
const data = result.data ?? result
|
||||
const documentsCreated = data.documentsCreated ?? []
|
||||
const firstDocument = documentsCreated[0]
|
||||
const isUpdate = data.isUpdate ?? false
|
||||
const previousDocumentId = data.previousDocumentId ?? null
|
||||
const documentId = firstDocument?.documentId ?? firstDocument?.id ?? ''
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: isUpdate
|
||||
? 'Successfully updated document in knowledge base'
|
||||
: 'Successfully created document in knowledge base',
|
||||
documentId,
|
||||
data: {
|
||||
documentId,
|
||||
documentName: firstDocument?.filename ?? 'Unknown',
|
||||
type: 'document',
|
||||
enabled: true,
|
||||
isUpdate,
|
||||
previousDocumentId,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
data: {
|
||||
type: 'object',
|
||||
description: 'Information about the upserted document',
|
||||
properties: {
|
||||
documentId: { type: 'string', description: 'Document ID' },
|
||||
documentName: { type: 'string', description: 'Document name' },
|
||||
type: { type: 'string', description: 'Document type' },
|
||||
enabled: { type: 'boolean', description: 'Whether the document is enabled' },
|
||||
isUpdate: {
|
||||
type: 'boolean',
|
||||
description: 'Whether an existing document was replaced',
|
||||
},
|
||||
previousDocumentId: {
|
||||
type: 'string',
|
||||
description: 'ID of the document that was replaced, if any',
|
||||
optional: true,
|
||||
},
|
||||
createdAt: { type: 'string', description: 'Creation timestamp' },
|
||||
updatedAt: { type: 'string', description: 'Last update timestamp' },
|
||||
},
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Success or error message describing the operation result',
|
||||
},
|
||||
documentId: {
|
||||
type: 'string',
|
||||
description: 'ID of the upserted document',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1195,7 +1195,6 @@ import {
|
||||
knowledgeTriggerSyncTool,
|
||||
knowledgeUpdateChunkTool,
|
||||
knowledgeUploadChunkTool,
|
||||
knowledgeUpsertDocumentTool,
|
||||
} from '@/tools/knowledge'
|
||||
import { langsmithCreateRunsBatchTool, langsmithCreateRunTool } from '@/tools/langsmith'
|
||||
import { lemlistGetActivitiesTool, lemlistGetLeadTool, lemlistSendEmailTool } from '@/tools/lemlist'
|
||||
@@ -3704,7 +3703,6 @@ export const tools: Record<string, ToolConfig> = {
|
||||
knowledge_list_connectors: knowledgeListConnectorsTool,
|
||||
knowledge_get_connector: knowledgeGetConnectorTool,
|
||||
knowledge_trigger_sync: knowledgeTriggerSyncTool,
|
||||
knowledge_upsert_document: knowledgeUpsertDocumentTool,
|
||||
search_tool: searchTool,
|
||||
elevenlabs_tts: elevenLabsTtsTool,
|
||||
fathom_list_meetings: fathomListMeetingsTool,
|
||||
|
||||
@@ -14,7 +14,7 @@ const postgresClient = postgres(connectionString, {
|
||||
prepare: false,
|
||||
idle_timeout: 20,
|
||||
connect_timeout: 30,
|
||||
max: 10,
|
||||
max: 30,
|
||||
onnotice: () => {},
|
||||
})
|
||||
|
||||
|
||||