Compare commits

..

6 Commits

Author SHA1 Message Date
Vikhyath Mondreti
d4c171c6d7 fix(sortOrder): initial ordering must be deterministic (#2833)
* fix(sortOrder): initial ordering must be deterministic

* fix initial ordering issue

* add created at to child item
2026-01-14 22:21:49 -08:00
Waleed
26d0799d22 fix(popover): fix frozen workspace popover (#2832) 2026-01-14 22:08:10 -08:00
Waleed
45bd1e8cd7 feat(starter): in start block input format, don't prevent deletion if only one field remaining, just clear form (#2830) 2026-01-14 21:24:02 -08:00
Waleed
85d6e3e3bd fix(misc): added trace spans back to notifications for webhooks, updated verification code for users signing in with email, updated welcome email (#2828)
* added back trace spans to notifications

* fixed double verification code

* fix dashboard

* updated welcome email

* added link to cal for team

* update dashboard stats route

* added react grab URL to CSP if FF is enabled, removed dead db hook

* fix failing test

* ensure MCP add server tool is centered

* updated A2A copy button and MCP location, and default description matching

* updated button on chat page

* added vite version override

* fix
2026-01-14 21:17:20 -08:00
Siddharth Ganesan
ccf268595e improvement(copilot): update copilot to match copilot repo (#2829)
* Ux

* Fix lint

* Clean up model options

* Codex
2026-01-14 20:52:49 -08:00
Vikhyath Mondreti
5eca660c5c improvement(langsmith): add wand for batch ingestion schemas (#2827) 2026-01-14 19:50:35 -08:00
42 changed files with 580 additions and 496 deletions

View File

@@ -359,15 +359,6 @@ function SignupFormContent({
}
}
try {
await client.emailOtp.sendVerificationOtp({
email: emailValue,
type: 'sign-in',
})
} catch (otpErr) {
logger.warn('Failed to send sign-in OTP after signup; user can press Resend', otpErr)
}
router.push('/verify?fromSignup=true')
} catch (error) {
logger.error('Signup error:', error)

View File

@@ -93,7 +93,7 @@ export function useVerification({
try {
const normalizedEmail = email.trim().toLowerCase()
const response = await client.signIn.emailOtp({
const response = await client.emailOtp.verifyEmail({
email: normalizedEmail,
otp,
})
@@ -169,7 +169,7 @@ export function useVerification({
client.emailOtp
.sendVerificationOtp({
email: normalizedEmail,
type: 'sign-in',
type: 'email-verification',
})
.then(() => {})
.catch(() => {

View File

@@ -52,6 +52,9 @@ const ChatMessageSchema = z.object({
'gpt-5.1-high',
'gpt-5-codex',
'gpt-5.1-codex',
'gpt-5.2',
'gpt-5.2-codex',
'gpt-5.2-pro',
'gpt-4o',
'gpt-4.1',
'o3',

View File

@@ -15,11 +15,14 @@ const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
'gpt-5-medium': false,
'gpt-5-high': false,
'gpt-5.1-fast': false,
'gpt-5.1': true,
'gpt-5.1-medium': true,
'gpt-5.1': false,
'gpt-5.1-medium': false,
'gpt-5.1-high': false,
'gpt-5-codex': false,
'gpt-5.1-codex': true,
'gpt-5.1-codex': false,
'gpt-5.2': false,
'gpt-5.2-codex': true,
'gpt-5.2-pro': true,
o3: true,
'claude-4-sonnet': false,
'claude-4.5-haiku': true,

View File

@@ -99,13 +99,14 @@ export async function GET(request: NextRequest) {
const totalMs = Math.max(1, endTime.getTime() - startTime.getTime())
const segmentMs = Math.max(60000, Math.floor(totalMs / params.segmentCount))
const startTimeIso = startTime.toISOString()
const statsQuery = await db
.select({
workflowId: workflowExecutionLogs.workflowId,
workflowName: workflow.name,
segmentIndex:
sql<number>`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTime}::timestamp)) * 1000 / ${segmentMs})`.as(
sql<number>`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTimeIso}::timestamp)) * 1000 / ${segmentMs})`.as(
'segment_index'
),
totalExecutions: sql<number>`COUNT(*)`.as('total_executions'),
@@ -129,12 +130,7 @@ export async function GET(request: NextRequest) {
)
)
.where(whereCondition)
.groupBy(
workflowExecutionLogs.workflowId,
workflow.name,
sql`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTime}::timestamp)) * 1000 / ${segmentMs})`
)
.orderBy(workflowExecutionLogs.workflowId, sql`segment_index`)
.groupBy(workflowExecutionLogs.workflowId, workflow.name, sql`segment_index`)
const workflowMap = new Map<
string,

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull, max } from 'drizzle-orm'
import { and, asc, eq, isNull, min } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
@@ -64,10 +64,20 @@ export async function GET(request: Request) {
let workflows
const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)]
if (workspaceId) {
workflows = await db.select().from(workflow).where(eq(workflow.workspaceId, workspaceId))
workflows = await db
.select()
.from(workflow)
.where(eq(workflow.workspaceId, workspaceId))
.orderBy(...orderByClause)
} else {
workflows = await db.select().from(workflow).where(eq(workflow.userId, userId))
workflows = await db
.select()
.from(workflow)
.where(eq(workflow.userId, userId))
.orderBy(...orderByClause)
}
return NextResponse.json({ data: workflows }, { status: 200 })
@@ -140,15 +150,15 @@ export async function POST(req: NextRequest) {
sortOrder = providedSortOrder
} else {
const folderCondition = folderId ? eq(workflow.folderId, folderId) : isNull(workflow.folderId)
const [maxResult] = await db
.select({ maxOrder: max(workflow.sortOrder) })
const [minResult] = await db
.select({ minOrder: min(workflow.sortOrder) })
.from(workflow)
.where(
workspaceId
? and(eq(workflow.workspaceId, workspaceId), folderCondition)
: and(eq(workflow.userId, session.user.id), folderCondition)
)
sortOrder = (maxResult?.maxOrder ?? -1) + 1
sortOrder = (minResult?.minOrder ?? 1) - 1
}
await db.insert(workflow).values({

View File

@@ -80,6 +80,7 @@ const updateNotificationSchema = z
levelFilter: levelFilterSchema.optional(),
triggerFilter: triggerFilterSchema.optional(),
includeFinalOutput: z.boolean().optional(),
includeTraceSpans: z.boolean().optional(),
includeRateLimits: z.boolean().optional(),
includeUsageData: z.boolean().optional(),
alertConfig: alertConfigSchema.optional(),
@@ -146,6 +147,7 @@ export async function GET(request: NextRequest, { params }: RouteParams) {
levelFilter: subscription.levelFilter,
triggerFilter: subscription.triggerFilter,
includeFinalOutput: subscription.includeFinalOutput,
includeTraceSpans: subscription.includeTraceSpans,
includeRateLimits: subscription.includeRateLimits,
includeUsageData: subscription.includeUsageData,
webhookConfig: subscription.webhookConfig,
@@ -220,6 +222,7 @@ export async function PUT(request: NextRequest, { params }: RouteParams) {
if (data.triggerFilter !== undefined) updateData.triggerFilter = data.triggerFilter
if (data.includeFinalOutput !== undefined)
updateData.includeFinalOutput = data.includeFinalOutput
if (data.includeTraceSpans !== undefined) updateData.includeTraceSpans = data.includeTraceSpans
if (data.includeRateLimits !== undefined) updateData.includeRateLimits = data.includeRateLimits
if (data.includeUsageData !== undefined) updateData.includeUsageData = data.includeUsageData
if (data.alertConfig !== undefined) updateData.alertConfig = data.alertConfig
@@ -257,6 +260,7 @@ export async function PUT(request: NextRequest, { params }: RouteParams) {
levelFilter: subscription.levelFilter,
triggerFilter: subscription.triggerFilter,
includeFinalOutput: subscription.includeFinalOutput,
includeTraceSpans: subscription.includeTraceSpans,
includeRateLimits: subscription.includeRateLimits,
includeUsageData: subscription.includeUsageData,
webhookConfig: subscription.webhookConfig,

View File

@@ -92,6 +92,22 @@ function buildTestPayload(subscription: typeof workspaceNotificationSubscription
data.usage = { currentPeriodCost: 2.45, limit: 20, percentUsed: 12.25, isExceeded: false }
}
if (subscription.includeTraceSpans && subscription.notificationType === 'webhook') {
data.traceSpans = [
{
name: 'test-block',
startTime: timestamp,
endTime: timestamp + 150,
duration: 150,
status: 'success',
blockId: 'block_test_1',
blockType: 'agent',
blockName: 'Test Agent',
children: [],
},
]
}
return { payload, timestamp }
}

View File

@@ -83,6 +83,7 @@ const createNotificationSchema = z
levelFilter: levelFilterSchema.default(['info', 'error']),
triggerFilter: triggerFilterSchema.default([...CORE_TRIGGER_TYPES]),
includeFinalOutput: z.boolean().default(false),
includeTraceSpans: z.boolean().default(false),
includeRateLimits: z.boolean().default(false),
includeUsageData: z.boolean().default(false),
alertConfig: alertConfigSchema.optional(),
@@ -137,6 +138,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
levelFilter: workspaceNotificationSubscription.levelFilter,
triggerFilter: workspaceNotificationSubscription.triggerFilter,
includeFinalOutput: workspaceNotificationSubscription.includeFinalOutput,
includeTraceSpans: workspaceNotificationSubscription.includeTraceSpans,
includeRateLimits: workspaceNotificationSubscription.includeRateLimits,
includeUsageData: workspaceNotificationSubscription.includeUsageData,
webhookConfig: workspaceNotificationSubscription.webhookConfig,
@@ -220,7 +222,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
}
// Encrypt webhook secret if provided
let webhookConfig = data.webhookConfig || null
if (webhookConfig?.secret) {
const { encrypted } = await encryptSecret(webhookConfig.secret)
@@ -238,6 +239,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
levelFilter: data.levelFilter,
triggerFilter: data.triggerFilter,
includeFinalOutput: data.includeFinalOutput,
includeTraceSpans: data.includeTraceSpans,
includeRateLimits: data.includeRateLimits,
includeUsageData: data.includeUsageData,
alertConfig: data.alertConfig || null,
@@ -263,6 +265,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
levelFilter: subscription.levelFilter,
triggerFilter: subscription.triggerFilter,
includeFinalOutput: subscription.includeFinalOutput,
includeTraceSpans: subscription.includeTraceSpans,
includeRateLimits: subscription.includeRateLimits,
includeUsageData: subscription.includeUsageData,
webhookConfig: subscription.webhookConfig,

View File

@@ -136,6 +136,7 @@ export function NotificationSettings({
levelFilter: ['info', 'error'] as LogLevel[],
triggerFilter: [...CORE_TRIGGER_TYPES] as CoreTriggerType[],
includeFinalOutput: false,
includeTraceSpans: false,
includeRateLimits: false,
includeUsageData: false,
webhookUrl: '',
@@ -202,6 +203,7 @@ export function NotificationSettings({
levelFilter: ['info', 'error'],
triggerFilter: [...CORE_TRIGGER_TYPES],
includeFinalOutput: false,
includeTraceSpans: false,
includeRateLimits: false,
includeUsageData: false,
webhookUrl: '',
@@ -420,6 +422,8 @@ export function NotificationSettings({
levelFilter: formData.levelFilter,
triggerFilter: formData.triggerFilter,
includeFinalOutput: formData.includeFinalOutput,
// Trace spans only available for webhooks (too large for email/Slack)
includeTraceSpans: activeTab === 'webhook' ? formData.includeTraceSpans : false,
includeRateLimits: formData.includeRateLimits,
includeUsageData: formData.includeUsageData,
alertConfig,
@@ -471,6 +475,7 @@ export function NotificationSettings({
levelFilter: subscription.levelFilter as LogLevel[],
triggerFilter: subscription.triggerFilter as CoreTriggerType[],
includeFinalOutput: subscription.includeFinalOutput,
includeTraceSpans: subscription.includeTraceSpans,
includeRateLimits: subscription.includeRateLimits,
includeUsageData: subscription.includeUsageData,
webhookUrl: subscription.webhookConfig?.url || '',
@@ -826,6 +831,10 @@ export function NotificationSettings({
<Combobox
options={[
{ label: 'Final Output', value: 'includeFinalOutput' },
// Trace spans only available for webhooks (too large for email/Slack)
...(activeTab === 'webhook'
? [{ label: 'Trace Spans', value: 'includeTraceSpans' }]
: []),
{ label: 'Rate Limits', value: 'includeRateLimits' },
{ label: 'Usage Data', value: 'includeUsageData' },
]}
@@ -833,6 +842,7 @@ export function NotificationSettings({
multiSelectValues={
[
formData.includeFinalOutput && 'includeFinalOutput',
formData.includeTraceSpans && activeTab === 'webhook' && 'includeTraceSpans',
formData.includeRateLimits && 'includeRateLimits',
formData.includeUsageData && 'includeUsageData',
].filter(Boolean) as string[]
@@ -841,6 +851,7 @@ export function NotificationSettings({
setFormData({
...formData,
includeFinalOutput: values.includes('includeFinalOutput'),
includeTraceSpans: values.includes('includeTraceSpans'),
includeRateLimits: values.includes('includeRateLimits'),
includeUsageData: values.includes('includeUsageData'),
})
@@ -849,11 +860,13 @@ export function NotificationSettings({
overlayContent={(() => {
const labels: Record<string, string> = {
includeFinalOutput: 'Final Output',
includeTraceSpans: 'Trace Spans',
includeRateLimits: 'Rate Limits',
includeUsageData: 'Usage Data',
}
const selected = [
formData.includeFinalOutput && 'includeFinalOutput',
formData.includeTraceSpans && activeTab === 'webhook' && 'includeTraceSpans',
formData.includeRateLimits && 'includeRateLimits',
formData.includeUsageData && 'includeUsageData',
].filter(Boolean) as string[]

View File

@@ -2,29 +2,9 @@ import { memo, useEffect, useRef, useState } from 'react'
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
/**
* Minimum delay between characters (fast catch-up mode)
* Character animation delay in milliseconds
*/
const MIN_DELAY = 1
/**
* Maximum delay between characters (when waiting for content)
*/
const MAX_DELAY = 12
/**
* Default delay when streaming normally
*/
const DEFAULT_DELAY = 4
/**
* How far behind (in characters) before we speed up
*/
const CATCH_UP_THRESHOLD = 20
/**
* How close to content before we slow down
*/
const SLOW_DOWN_THRESHOLD = 5
const CHARACTER_DELAY = 3
/**
* StreamingIndicator shows animated dots during message streaming
@@ -54,50 +34,21 @@ interface SmoothStreamingTextProps {
isStreaming: boolean
}
/**
* Calculates adaptive delay based on how far behind animation is from actual content
*
* @param displayedLength - Current displayed content length
* @param totalLength - Total available content length
* @returns Delay in milliseconds
*/
function calculateAdaptiveDelay(displayedLength: number, totalLength: number): number {
const charsRemaining = totalLength - displayedLength
if (charsRemaining > CATCH_UP_THRESHOLD) {
// Far behind - speed up to catch up
// Scale from MIN_DELAY to DEFAULT_DELAY based on how far behind
const catchUpFactor = Math.min(1, (charsRemaining - CATCH_UP_THRESHOLD) / 50)
return MIN_DELAY + (DEFAULT_DELAY - MIN_DELAY) * (1 - catchUpFactor)
}
if (charsRemaining <= SLOW_DOWN_THRESHOLD) {
// Close to content edge - slow down to feel natural
// The closer we are, the slower we go (up to MAX_DELAY)
const slowFactor = 1 - charsRemaining / SLOW_DOWN_THRESHOLD
return DEFAULT_DELAY + (MAX_DELAY - DEFAULT_DELAY) * slowFactor
}
// Normal streaming speed
return DEFAULT_DELAY
}
/**
* SmoothStreamingText component displays text with character-by-character animation
* Creates a smooth streaming effect for AI responses with adaptive speed
*
* Uses adaptive pacing: speeds up when catching up, slows down near content edge
* Creates a smooth streaming effect for AI responses
*
* @param props - Component props
* @returns Streaming text with smooth animation
*/
export const SmoothStreamingText = memo(
({ content, isStreaming }: SmoothStreamingTextProps) => {
const [displayedContent, setDisplayedContent] = useState('')
// Initialize with full content when not streaming to avoid flash on page load
const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content))
const contentRef = useRef(content)
const rafRef = useRef<number | null>(null)
const indexRef = useRef(0)
const lastFrameTimeRef = useRef<number>(0)
const timeoutRef = useRef<NodeJS.Timeout | null>(null)
// Initialize index based on streaming state
const indexRef = useRef(isStreaming ? 0 : content.length)
const isAnimatingRef = useRef(false)
useEffect(() => {
@@ -110,42 +61,33 @@ export const SmoothStreamingText = memo(
}
if (isStreaming) {
if (indexRef.current < content.length && !isAnimatingRef.current) {
isAnimatingRef.current = true
lastFrameTimeRef.current = performance.now()
const animateText = (timestamp: number) => {
if (indexRef.current < content.length) {
const animateText = () => {
const currentContent = contentRef.current
const currentIndex = indexRef.current
const elapsed = timestamp - lastFrameTimeRef.current
// Calculate adaptive delay based on how far behind we are
const delay = calculateAdaptiveDelay(currentIndex, currentContent.length)
if (elapsed >= delay) {
if (currentIndex < currentContent.length) {
const newDisplayed = currentContent.slice(0, currentIndex + 1)
setDisplayedContent(newDisplayed)
indexRef.current = currentIndex + 1
lastFrameTimeRef.current = timestamp
}
}
if (indexRef.current < currentContent.length) {
rafRef.current = requestAnimationFrame(animateText)
if (currentIndex < currentContent.length) {
const newDisplayed = currentContent.slice(0, currentIndex + 1)
setDisplayedContent(newDisplayed)
indexRef.current = currentIndex + 1
timeoutRef.current = setTimeout(animateText, CHARACTER_DELAY)
} else {
isAnimatingRef.current = false
}
}
rafRef.current = requestAnimationFrame(animateText)
} else if (indexRef.current < content.length && isAnimatingRef.current) {
// Animation already running, it will pick up new content automatically
if (!isAnimatingRef.current) {
if (timeoutRef.current) {
clearTimeout(timeoutRef.current)
}
isAnimatingRef.current = true
animateText()
}
}
} else {
// Streaming ended - show full content immediately
if (rafRef.current) {
cancelAnimationFrame(rafRef.current)
if (timeoutRef.current) {
clearTimeout(timeoutRef.current)
}
setDisplayedContent(content)
indexRef.current = content.length
@@ -153,8 +95,8 @@ export const SmoothStreamingText = memo(
}
return () => {
if (rafRef.current) {
cancelAnimationFrame(rafRef.current)
if (timeoutRef.current) {
clearTimeout(timeoutRef.current)
}
isAnimatingRef.current = false
}

View File

@@ -46,12 +46,14 @@ interface SmoothThinkingTextProps {
*/
const SmoothThinkingText = memo(
({ content, isStreaming }: SmoothThinkingTextProps) => {
const [displayedContent, setDisplayedContent] = useState('')
// Initialize with full content when not streaming to avoid flash on page load
const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content))
const [showGradient, setShowGradient] = useState(false)
const contentRef = useRef(content)
const textRef = useRef<HTMLDivElement>(null)
const rafRef = useRef<number | null>(null)
const indexRef = useRef(0)
// Initialize index based on streaming state
const indexRef = useRef(isStreaming ? 0 : content.length)
const lastFrameTimeRef = useRef<number>(0)
const isAnimatingRef = useRef(false)

View File

@@ -1952,7 +1952,12 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
}, [params])
// Skip rendering some internal tools
if (toolCall.name === 'checkoff_todo' || toolCall.name === 'mark_todo_in_progress') return null
if (
toolCall.name === 'checkoff_todo' ||
toolCall.name === 'mark_todo_in_progress' ||
toolCall.name === 'tool_search_tool_regex'
)
return null
// Special rendering for subagent tools - show as thinking text with tool calls at top level
const SUBAGENT_TOOLS = [

View File

@@ -32,13 +32,6 @@ function getModelIconComponent(modelValue: string) {
return <IconComponent className='h-3.5 w-3.5' />
}
/**
* Checks if a model should display the MAX badge
*/
function isMaxModel(modelValue: string): boolean {
return modelValue === 'claude-4.5-sonnet' || modelValue === 'claude-4.5-opus'
}
/**
* Model selector dropdown for choosing AI model.
* Displays model icon and label.
@@ -139,11 +132,6 @@ export function ModelSelector({ selectedModel, isNearTop, onModelSelect }: Model
>
{getModelIconComponent(option.value)}
<span>{option.label}</span>
{isMaxModel(option.value) && (
<Badge size='sm' className='ml-auto'>
MAX
</Badge>
)}
</PopoverItem>
))}
</PopoverScrollArea>

View File

@@ -238,8 +238,8 @@ export const MODEL_OPTIONS = [
{ value: 'claude-4.5-opus', label: 'Claude 4.5 Opus' },
{ value: 'claude-4.5-sonnet', label: 'Claude 4.5 Sonnet' },
{ value: 'claude-4.5-haiku', label: 'Claude 4.5 Haiku' },
{ value: 'gpt-5.1-codex', label: 'GPT 5.1 Codex' },
{ value: 'gpt-5.1-medium', label: 'GPT 5.1 Medium' },
{ value: 'gpt-5.2-codex', label: 'GPT 5.2 Codex' },
{ value: 'gpt-5.2-pro', label: 'GPT 5.2 Pro' },
{ value: 'gemini-3-pro', label: 'Gemini 3 Pro' },
] as const

View File

@@ -52,7 +52,10 @@ function isDefaultDescription(desc: string | null | undefined, workflowName: str
if (!desc) return true
const normalized = desc.toLowerCase().trim()
return (
normalized === '' || normalized === 'new workflow' || normalized === workflowName.toLowerCase()
normalized === '' ||
normalized === 'new workflow' ||
normalized === 'your first workflow - start building here!' ||
normalized === workflowName.toLowerCase()
)
}
@@ -685,9 +688,31 @@ console.log(data);`
{/* Endpoint URL (shown when agent exists) */}
{existingAgent && endpoint && (
<div>
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
URL
</Label>
<div className='mb-[6.5px] flex items-center justify-between'>
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
URL
</Label>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
type='button'
variant='ghost'
onClick={() => {
navigator.clipboard.writeText(endpoint)
setUrlCopied(true)
setTimeout(() => setUrlCopied(false), 2000)
}}
aria-label='Copy URL'
className='!p-1.5 -my-1.5'
>
{urlCopied ? <Check className='h-3 w-3' /> : <Clipboard className='h-3 w-3' />}
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
<span>{urlCopied ? 'Copied' : 'Copy'}</span>
</Tooltip.Content>
</Tooltip.Root>
</div>
<div className='relative flex items-stretch overflow-hidden rounded-[4px] border border-[var(--border-1)]'>
<div className='flex items-center whitespace-nowrap bg-[var(--surface-5)] pr-[6px] pl-[8px] font-medium text-[var(--text-secondary)] text-sm dark:bg-[var(--surface-5)]'>
{baseUrl.replace(/^https?:\/\//, '')}/api/a2a/serve/
@@ -696,30 +721,8 @@ console.log(data);`
<Input
value={existingAgent.id}
readOnly
className='rounded-none border-0 pr-[32px] pl-0 text-[var(--text-tertiary)] shadow-none'
className='rounded-none border-0 pl-0 text-[var(--text-tertiary)] shadow-none'
/>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<button
type='button'
onClick={() => {
navigator.clipboard.writeText(endpoint)
setUrlCopied(true)
setTimeout(() => setUrlCopied(false), 2000)
}}
className='-translate-y-1/2 absolute top-1/2 right-2'
>
{urlCopied ? (
<Check className='h-3 w-3 text-[var(--brand-tertiary-2)]' />
) : (
<Clipboard className='h-3 w-3 text-[var(--text-tertiary)]' />
)}
</button>
</Tooltip.Trigger>
<Tooltip.Content>
<span>{urlCopied ? 'Copied' : 'Copy'}</span>
</Tooltip.Content>
</Tooltip.Root>
</div>
</div>
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>

View File

@@ -415,7 +415,7 @@ export function ChatDeploy({
>
Cancel
</Button>
<Button variant='destructive' onClick={handleDelete} disabled={isDeleting}>
<Button variant='default' onClick={handleDelete} disabled={isDeleting}>
{isDeleting ? 'Deleting...' : 'Delete'}
</Button>
</ModalFooter>
@@ -532,7 +532,8 @@ function IdentifierInput({
</div>
) : (
isValid &&
value && (
value &&
value !== originalIdentifier && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<div className='-translate-y-1/2 absolute top-1/2 right-2'>

View File

@@ -138,10 +138,12 @@ export function McpDeploy({
const [toolName, setToolName] = useState(() => sanitizeToolName(workflowName))
const [toolDescription, setToolDescription] = useState(() => {
const normalizedDesc = workflowDescription?.toLowerCase().trim()
const isDefaultDescription =
!workflowDescription ||
workflowDescription === workflowName ||
workflowDescription.toLowerCase() === 'new workflow'
normalizedDesc === 'new workflow' ||
normalizedDesc === 'your first workflow - start building here!'
return isDefaultDescription ? '' : workflowDescription
})
@@ -193,10 +195,12 @@ export function McpDeploy({
setToolName(toolInfo.tool.toolName)
const loadedDescription = toolInfo.tool.toolDescription || ''
const normalizedLoadedDesc = loadedDescription.toLowerCase().trim()
const isDefaultDescription =
!loadedDescription ||
loadedDescription === workflowName ||
loadedDescription.toLowerCase() === 'new workflow'
normalizedLoadedDesc === 'new workflow' ||
normalizedLoadedDesc === 'your first workflow - start building here!'
setToolDescription(isDefaultDescription ? '' : loadedDescription)
const schema = toolInfo.tool.parameterSchema as Record<string, unknown> | undefined

View File

@@ -734,7 +734,7 @@ export function DeployModal({
)}
</ModalTabsContent> */}
<ModalTabsContent value='mcp'>
<ModalTabsContent value='mcp' className='h-full'>
{workflowId && (
<McpDeploy
workflowId={workflowId}
@@ -800,7 +800,7 @@ export function DeployModal({
{chatExists && (
<Button
type='button'
variant='destructive'
variant='default'
onClick={handleChatDelete}
disabled={chatSubmitting}
>

View File

@@ -125,10 +125,16 @@ export function FieldFormat({
}
/**
* Removes a field by ID, preventing removal of the last field
* Removes a field by ID, or clears it if it's the last field
*/
const removeField = (id: string) => {
if (isReadOnly || fields.length === 1) return
if (isReadOnly) return
if (fields.length === 1) {
setStoreValue([createDefaultField()])
return
}
setStoreValue(fields.filter((field) => field.id !== id))
}
@@ -273,7 +279,7 @@ export function FieldFormat({
<Button
variant='ghost'
onClick={() => removeField(field.id)}
disabled={isReadOnly || fields.length === 1}
disabled={isReadOnly}
className='h-auto p-0 text-[var(--text-error)] hover:text-[var(--text-error)]'
>
<Trash className='h-[14px] w-[14px]' />

View File

@@ -414,7 +414,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
Cancel
</Button>
<Button
variant='ghost'
variant='destructive'
onClick={handleDeleteKey}
disabled={deleteApiKeyMutation.isPending}
>

View File

@@ -268,14 +268,7 @@ export function ContextMenu({
height: '1px',
}}
/>
<PopoverContent
ref={menuRef}
align='start'
side='bottom'
sideOffset={4}
onPointerDownOutside={(e) => e.preventDefault()}
onInteractOutside={(e) => e.preventDefault()}
>
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
{/* Back button - shown only when in a folder */}
<PopoverBackButton />

View File

@@ -18,6 +18,17 @@ const TREE_SPACING = {
INDENT_PER_LEVEL: 20,
} as const
function compareByOrder<T extends { sortOrder: number; createdAt?: Date; id: string }>(
a: T,
b: T
): number {
if (a.sortOrder !== b.sortOrder) return a.sortOrder - b.sortOrder
const timeA = a.createdAt?.getTime() ?? 0
const timeB = b.createdAt?.getTime() ?? 0
if (timeA !== timeB) return timeA - timeB
return a.id.localeCompare(b.id)
}
interface WorkflowListProps {
regularWorkflows: WorkflowMetadata[]
isLoading?: boolean
@@ -97,7 +108,7 @@ export function WorkflowList({
{} as Record<string, WorkflowMetadata[]>
)
for (const folderId of Object.keys(grouped)) {
grouped[folderId].sort((a, b) => a.sortOrder - b.sortOrder)
grouped[folderId].sort(compareByOrder)
}
return grouped
}, [regularWorkflows])
@@ -208,6 +219,7 @@ export function WorkflowList({
type: 'folder' | 'workflow'
id: string
sortOrder: number
createdAt?: Date
data: FolderTreeNode | WorkflowMetadata
}> = []
for (const childFolder of folder.children) {
@@ -215,6 +227,7 @@ export function WorkflowList({
type: 'folder',
id: childFolder.id,
sortOrder: childFolder.sortOrder,
createdAt: childFolder.createdAt,
data: childFolder,
})
}
@@ -223,10 +236,11 @@ export function WorkflowList({
type: 'workflow',
id: workflow.id,
sortOrder: workflow.sortOrder,
createdAt: workflow.createdAt,
data: workflow,
})
}
childItems.sort((a, b) => a.sortOrder - b.sortOrder)
childItems.sort(compareByOrder)
return (
<div key={folder.id} className='relative'>
@@ -294,20 +308,28 @@ export function WorkflowList({
type: 'folder' | 'workflow'
id: string
sortOrder: number
createdAt?: Date
data: FolderTreeNode | WorkflowMetadata
}> = []
for (const folder of folderTree) {
items.push({ type: 'folder', id: folder.id, sortOrder: folder.sortOrder, data: folder })
items.push({
type: 'folder',
id: folder.id,
sortOrder: folder.sortOrder,
createdAt: folder.createdAt,
data: folder,
})
}
for (const workflow of rootWorkflows) {
items.push({
type: 'workflow',
id: workflow.id,
sortOrder: workflow.sortOrder,
createdAt: workflow.createdAt,
data: workflow,
})
}
return items.sort((a, b) => a.sortOrder - b.sortOrder)
return items.sort(compareByOrder)
}, [folderTree, rootWorkflows])
const hasRootItems = rootItems.length > 0

View File

@@ -211,10 +211,11 @@ export function WorkspaceHeader({
}
/**
* Close context menu
* Close context menu and the workspace dropdown
*/
const closeContextMenu = () => {
setIsContextMenuOpen(false)
setIsWorkspaceMenuOpen(false)
}
/**

View File

@@ -133,7 +133,20 @@ export function useDragDrop() {
[]
)
type SiblingItem = { type: 'folder' | 'workflow'; id: string; sortOrder: number }
type SiblingItem = {
type: 'folder' | 'workflow'
id: string
sortOrder: number
createdAt: Date
}
const compareSiblingItems = (a: SiblingItem, b: SiblingItem): number => {
if (a.sortOrder !== b.sortOrder) return a.sortOrder - b.sortOrder
const timeA = a.createdAt.getTime()
const timeB = b.createdAt.getTime()
if (timeA !== timeB) return timeA - timeB
return a.id.localeCompare(b.id)
}
const getDestinationFolderId = useCallback((indicator: DropIndicator): string | null => {
return indicator.position === 'inside'
@@ -202,11 +215,21 @@ export function useDragDrop() {
return [
...Object.values(currentFolders)
.filter((f) => f.parentId === folderId)
.map((f) => ({ type: 'folder' as const, id: f.id, sortOrder: f.sortOrder })),
.map((f) => ({
type: 'folder' as const,
id: f.id,
sortOrder: f.sortOrder,
createdAt: f.createdAt,
})),
...Object.values(currentWorkflows)
.filter((w) => w.folderId === folderId)
.map((w) => ({ type: 'workflow' as const, id: w.id, sortOrder: w.sortOrder })),
].sort((a, b) => a.sortOrder - b.sortOrder)
.map((w) => ({
type: 'workflow' as const,
id: w.id,
sortOrder: w.sortOrder,
createdAt: w.createdAt,
})),
].sort(compareSiblingItems)
}, [])
const setNormalizedDropIndicator = useCallback(
@@ -299,8 +322,9 @@ export function useDragDrop() {
type: 'workflow' as const,
id,
sortOrder: currentWorkflows[id]?.sortOrder ?? 0,
createdAt: currentWorkflows[id]?.createdAt ?? new Date(),
}))
.sort((a, b) => a.sortOrder - b.sortOrder)
.sort(compareSiblingItems)
const insertAt = calculateInsertIndex(remaining, indicator)
@@ -369,7 +393,12 @@ export function useDragDrop() {
const newOrder: SiblingItem[] = [
...remaining.slice(0, insertAt),
{ type: 'folder', id: draggedFolderId, sortOrder: 0 },
{
type: 'folder',
id: draggedFolderId,
sortOrder: 0,
createdAt: draggedFolder?.createdAt ?? new Date(),
},
...remaining.slice(insertAt),
]

View File

@@ -20,7 +20,7 @@ import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { RateLimiter } from '@/lib/core/rate-limiter'
import { decryptSecret } from '@/lib/core/security/encryption'
import { getBaseUrl } from '@/lib/core/utils/urls'
import type { WorkflowExecutionLog } from '@/lib/logs/types'
import type { TraceSpan, WorkflowExecutionLog } from '@/lib/logs/types'
import { sendEmail } from '@/lib/messaging/email/mailer'
import type { AlertConfig } from '@/lib/notifications/alert-rules'
@@ -50,6 +50,7 @@ interface NotificationPayload {
totalDurationMs: number
cost?: Record<string, unknown>
finalOutput?: unknown
traceSpans?: TraceSpan[]
rateLimits?: EmailRateLimitsData
usage?: EmailUsageData
}
@@ -98,6 +99,15 @@ async function buildPayload(
payload.data.finalOutput = executionData.finalOutput
}
// Trace spans only included for webhooks (too large for email/Slack)
if (
subscription.includeTraceSpans &&
subscription.notificationType === 'webhook' &&
executionData.traceSpans
) {
payload.data.traceSpans = executionData.traceSpans as TraceSpan[]
}
if (subscription.includeRateLimits && userId) {
try {
const userSubscription = await getHighestPrioritySubscription(userId)

View File

@@ -22,7 +22,7 @@ export function WelcomeEmail({ userName }: WelcomeEmailProps) {
workflows in minutes.
</Text>
<Link href={`${baseUrl}/w`} style={{ textDecoration: 'none' }}>
<Link href={`${baseUrl}/login`} style={{ textDecoration: 'none' }}>
<Text style={baseStyles.button}>Get Started</Text>
</Link>
@@ -30,13 +30,21 @@ export function WelcomeEmail({ userName }: WelcomeEmailProps) {
If you have any questions or feedback, just reply to this email. I read every message!
</Text>
<Text style={baseStyles.paragraph}>
Want to chat?{' '}
<Link href={`${baseUrl}/team`} style={baseStyles.link}>
Schedule a call
</Link>{' '}
with our team.
</Text>
<Text style={baseStyles.paragraph}>- Emir, co-founder of {brand.name}</Text>
{/* Divider */}
<div style={baseStyles.divider} />
<Text style={{ ...baseStyles.footerText, textAlign: 'left' }}>
You're on the free plan with $10 in credits to get started.
You're on the free plan with $20 in credits to get started.
</Text>
</EmailLayout>
)

View File

@@ -33,7 +33,7 @@ export function PlanWelcomeEmail({ planName, userName, loginLink }: PlanWelcomeE
<Text style={baseStyles.paragraph}>
Want help getting started?{' '}
<Link href='https://cal.com/emirkarabeg/sim-team' style={baseStyles.link}>
<Link href={`${baseUrl}/team`} style={baseStyles.link}>
Schedule a call
</Link>{' '}
with our team.

View File

@@ -154,68 +154,18 @@ export function useLogDetail(logId: string | undefined) {
})
}
interface DashboardFilters {
timeRange: TimeRange
startDate?: string
endDate?: string
level: string
workflowIds: string[]
folderIds: string[]
triggers: string[]
searchQuery: string
segmentCount?: number
}
/**
* Fetches aggregated dashboard statistics from the server.
* Uses SQL aggregation for efficient computation without row limits.
* Fetches dashboard stats from the server-side aggregation endpoint.
* Uses SQL aggregation for efficient computation without arbitrary limits.
*/
async function fetchDashboardStats(
workspaceId: string,
filters: DashboardFilters
filters: Omit<LogFilters, 'limit'>
): Promise<DashboardStatsResponse> {
const params = new URLSearchParams()
params.set('workspaceId', workspaceId)
if (filters.segmentCount) {
params.set('segmentCount', filters.segmentCount.toString())
}
if (filters.level !== 'all') {
params.set('level', filters.level)
}
if (filters.triggers.length > 0) {
params.set('triggers', filters.triggers.join(','))
}
if (filters.workflowIds.length > 0) {
params.set('workflowIds', filters.workflowIds.join(','))
}
if (filters.folderIds.length > 0) {
params.set('folderIds', filters.folderIds.join(','))
}
const startDate = getStartDateFromTimeRange(filters.timeRange, filters.startDate)
if (startDate) {
params.set('startDate', startDate.toISOString())
}
const endDate = getEndDateFromTimeRange(filters.timeRange, filters.endDate)
if (endDate) {
params.set('endDate', endDate.toISOString())
}
if (filters.searchQuery.trim()) {
const parsedQuery = parseQuery(filters.searchQuery.trim())
const searchParams = queryToApiParams(parsedQuery)
for (const [key, value] of Object.entries(searchParams)) {
params.set(key, value)
}
}
applyFilterParams(params, filters)
const response = await fetch(`/api/logs/stats?${params.toString()}`)
@@ -232,13 +182,12 @@ interface UseDashboardStatsOptions {
}
/**
* Hook for fetching aggregated dashboard statistics.
* Uses server-side SQL aggregation for efficient computation
* without any row limits - all matching logs are included in the stats.
* Hook for fetching dashboard stats using server-side aggregation.
* No arbitrary limits - uses SQL aggregation for accurate metrics.
*/
export function useDashboardStats(
workspaceId: string | undefined,
filters: DashboardFilters,
filters: Omit<LogFilters, 'limit'>,
options?: UseDashboardStatsOptions
) {
return useQuery({

View File

@@ -61,6 +61,7 @@ export interface NotificationSubscription {
levelFilter: LogLevel[]
triggerFilter: TriggerType[]
includeFinalOutput: boolean
includeTraceSpans: boolean
includeRateLimits: boolean
includeUsageData: boolean
webhookConfig?: WebhookConfig | null
@@ -105,6 +106,7 @@ interface CreateNotificationParams {
levelFilter: LogLevel[]
triggerFilter: TriggerType[]
includeFinalOutput: boolean
includeTraceSpans: boolean
includeRateLimits: boolean
includeUsageData: boolean
alertConfig?: AlertConfig | null

View File

@@ -194,7 +194,7 @@ export function useCreateWorkflow() {
const workflowsInFolder = Object.values(currentWorkflows).filter(
(w) => w.folderId === targetFolderId
)
sortOrder = workflowsInFolder.reduce((max, w) => Math.max(max, w.sortOrder ?? 0), -1) + 1
sortOrder = workflowsInFolder.reduce((min, w) => Math.min(min, w.sortOrder ?? 0), 1) - 1
}
return {
@@ -294,7 +294,7 @@ export function useDuplicateWorkflowMutation() {
const workflowsInFolder = Object.values(currentWorkflows).filter(
(w) => w.folderId === targetFolderId
)
const maxSortOrder = workflowsInFolder.reduce((max, w) => Math.max(max, w.sortOrder ?? 0), -1)
const minSortOrder = workflowsInFolder.reduce((min, w) => Math.min(min, w.sortOrder ?? 0), 1)
return {
id: tempId,
@@ -305,7 +305,7 @@ export function useDuplicateWorkflowMutation() {
color: variables.color,
workspaceId: variables.workspaceId,
folderId: targetFolderId,
sortOrder: maxSortOrder + 1,
sortOrder: minSortOrder - 1,
}
}
)

View File

@@ -426,7 +426,8 @@ export const auth = betterAuth({
},
emailVerification: {
autoSignInAfterVerification: true,
afterEmailVerification: async (user) => {
// onEmailVerification is called by the emailOTP plugin when email is verified via OTP
onEmailVerification: async (user) => {
if (isHosted && user.email) {
try {
const html = await renderWelcomeEmail(user.name || undefined)
@@ -441,11 +442,11 @@ export const auth = betterAuth({
emailType: 'transactional',
})
logger.info('[emailVerification.afterEmailVerification] Welcome email sent', {
logger.info('[emailVerification.onEmailVerification] Welcome email sent', {
userId: user.id,
})
} catch (error) {
logger.error('[emailVerification.afterEmailVerification] Failed to send welcome email', {
logger.error('[emailVerification.onEmailVerification] Failed to send welcome email', {
userId: user.id,
error,
})
@@ -456,7 +457,7 @@ export const auth = betterAuth({
emailAndPassword: {
enabled: true,
requireEmailVerification: isEmailVerificationEnabled,
sendVerificationOnSignUp: false,
sendVerificationOnSignUp: isEmailVerificationEnabled, // Auto-send verification OTP on signup when verification is required
throwOnMissingCredentials: true,
throwOnInvalidCredentials: true,
sendResetPassword: async ({ user, url, token }, request) => {

View File

@@ -77,6 +77,9 @@ export interface SendMessageRequest {
| 'gpt-5.1-high'
| 'gpt-5-codex'
| 'gpt-5.1-codex'
| 'gpt-5.2'
| 'gpt-5.2-codex'
| 'gpt-5.2-pro'
| 'gpt-4o'
| 'gpt-4.1'
| 'o3'

View File

@@ -19,6 +19,7 @@ vi.mock('@/lib/core/config/env', () =>
vi.mock('@/lib/core/config/feature-flags', () => ({
isDev: false,
isReactGrabEnabled: false,
}))
import {

View File

@@ -1,5 +1,5 @@
import { env, getEnv } from '../config/env'
import { isDev } from '../config/feature-flags'
import { isDev, isReactGrabEnabled } from '../config/feature-flags'
/**
* Content Security Policy (CSP) configuration builder
@@ -40,6 +40,7 @@ export const buildTimeCSPDirectives: CSPDirectives = {
'https://*.google.com',
'https://apis.google.com',
'https://assets.onedollarstats.com',
...(isReactGrabEnabled ? ['https://unpkg.com'] : []),
],
'style-src': ["'self'", "'unsafe-inline'", 'https://fonts.googleapis.com'],
@@ -166,10 +167,11 @@ export function generateRuntimeCSP(): string {
const dynamicDomainsStr = uniqueDynamicDomains.join(' ')
const brandLogoDomain = brandLogoDomains[0] || ''
const brandFaviconDomain = brandFaviconDomains[0] || ''
const reactGrabScript = isReactGrabEnabled ? 'https://unpkg.com' : ''
return `
default-src 'self';
script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://assets.onedollarstats.com;
script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://assets.onedollarstats.com ${reactGrabScript};
style-src 'self' 'unsafe-inline' https://fonts.googleapis.com;
img-src 'self' data: blob: https://*.googleusercontent.com https://*.google.com https://*.atlassian.com https://cdn.discordapp.com https://*.githubusercontent.com https://*.s3.amazonaws.com https://s3.amazonaws.com https://*.amazonaws.com https://*.blob.core.windows.net https://github.com/* https://collector.onedollarstats.com ${brandLogoDomain} ${brandFaviconDomain};
media-src 'self' blob:;

View File

@@ -25,6 +25,7 @@ function prepareLogData(
log: WorkflowExecutionLog,
subscription: {
includeFinalOutput: boolean
includeTraceSpans: boolean
}
) {
const preparedLog = { ...log, executionData: {} as Record<string, unknown> }
@@ -37,6 +38,10 @@ function prepareLogData(
webhookData.finalOutput = data.finalOutput
}
if (subscription.includeTraceSpans && data.traceSpans) {
webhookData.traceSpans = data.traceSpans
}
preparedLog.executionData = webhookData
}

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { and, eq, isNull, min } from 'drizzle-orm'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import type { Variable } from '@/stores/panel/variables/types'
import type { LoopConfig, ParallelConfig } from '@/stores/workflows/workflow/types'
@@ -26,6 +26,7 @@ interface DuplicateWorkflowResult {
color: string
workspaceId: string
folderId: string | null
sortOrder: number
blocksCount: number
edgesCount: number
subflowsCount: number
@@ -88,12 +89,29 @@ export async function duplicateWorkflow(
throw new Error('Source workflow not found or access denied')
}
const targetWorkspaceId = workspaceId || source.workspaceId
const targetFolderId = folderId !== undefined ? folderId : source.folderId
const folderCondition = targetFolderId
? eq(workflow.folderId, targetFolderId)
: isNull(workflow.folderId)
const [minResult] = await tx
.select({ minOrder: min(workflow.sortOrder) })
.from(workflow)
.where(
targetWorkspaceId
? and(eq(workflow.workspaceId, targetWorkspaceId), folderCondition)
: and(eq(workflow.userId, userId), folderCondition)
)
const sortOrder = (minResult?.minOrder ?? 1) - 1
// Create the new workflow first (required for foreign key constraints)
await tx.insert(workflow).values({
id: newWorkflowId,
userId,
workspaceId: workspaceId || source.workspaceId,
folderId: folderId !== undefined ? folderId : source.folderId,
workspaceId: targetWorkspaceId,
folderId: targetFolderId,
sortOrder,
name,
description: description || source.description,
color: color || source.color,
@@ -286,7 +304,8 @@ export async function duplicateWorkflow(
description: description || source.description,
color: color || source.color,
workspaceId: finalWorkspaceId,
folderId: folderId !== undefined ? folderId : source.folderId,
folderId: targetFolderId,
sortOrder,
blocksCount: sourceBlocks.length,
edgesCount: sourceEdges.length,
subflowsCount: sourceSubflows.length,

View File

@@ -269,6 +269,11 @@ const nextConfig: NextConfig = {
source: '/github',
destination: 'https://github.com/simstudioai/sim',
permanent: false,
},
{
source: '/team',
destination: 'https://cal.com/emirkarabeg/sim-team',
permanent: false,
}
)

View File

@@ -422,7 +422,8 @@ function abortAllInProgressTools(set: any, get: () => CopilotStore) {
* Loads messages from DB for UI rendering.
* Messages are stored exactly as they render, so we just need to:
* 1. Register client tool instances for any tool calls
* 2. Return the messages as-is
* 2. Clear any streaming flags (messages loaded from DB are never actively streaming)
* 3. Return the messages
*/
function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
try {
@@ -438,23 +439,54 @@ function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
}
}
// Register client tool instances for all tool calls so they can be looked up
// Register client tool instances and clear streaming flags for all tool calls
for (const message of messages) {
if (message.contentBlocks) {
for (const block of message.contentBlocks as any[]) {
if (block?.type === 'tool_call' && block.toolCall) {
registerToolCallInstances(block.toolCall)
clearStreamingFlags(block.toolCall)
}
}
}
// Also clear from toolCalls array (legacy format)
if (message.toolCalls) {
for (const toolCall of message.toolCalls) {
clearStreamingFlags(toolCall)
}
}
}
// Return messages as-is - they're already in the correct format for rendering
return messages
} catch {
return messages
}
}
/**
* Recursively clears streaming flags from a tool call and its nested subagent tool calls.
* This ensures messages loaded from DB don't appear to be streaming.
*/
function clearStreamingFlags(toolCall: any): void {
if (!toolCall) return
// Always set subAgentStreaming to false - messages loaded from DB are never streaming
toolCall.subAgentStreaming = false
// Clear nested subagent tool calls
if (Array.isArray(toolCall.subAgentBlocks)) {
for (const block of toolCall.subAgentBlocks) {
if (block?.type === 'subagent_tool_call' && block.toolCall) {
clearStreamingFlags(block.toolCall)
}
}
}
if (Array.isArray(toolCall.subAgentToolCalls)) {
for (const subTc of toolCall.subAgentToolCalls) {
clearStreamingFlags(subTc)
}
}
}
/**
* Recursively registers client tool instances for a tool call and its nested subagent tool calls.
*/

View File

@@ -106,6 +106,9 @@ export interface CopilotState {
| 'gpt-5.1-high'
| 'gpt-5-codex'
| 'gpt-5.1-codex'
| 'gpt-5.2'
| 'gpt-5.2-codex'
| 'gpt-5.2-pro'
| 'gpt-4o'
| 'gpt-4.1'
| 'o3'

View File

@@ -32,5 +32,5 @@
"trigger.config.ts",
".next/dev/types/**/*.ts"
],
"exclude": ["node_modules"]
"exclude": ["node_modules", "vitest.config.ts", "vitest.setup.ts"]
}

473
bun.lock

File diff suppressed because it is too large Load Diff