mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-12 15:34:58 -05:00
Compare commits
13 Commits
feat/creat
...
feat/smart
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
311c4d38f3 | ||
|
|
e7abcd34df | ||
|
|
433552019e | ||
|
|
f733b8dd88 | ||
|
|
76bd405293 | ||
|
|
c22bd2caaa | ||
|
|
462aa15341 | ||
|
|
52aff4d60b | ||
|
|
3a3bddd6f8 | ||
|
|
639d50d6b9 | ||
|
|
cec74e09c2 | ||
|
|
d5a756c9f2 | ||
|
|
f3e994baf0 |
@@ -1,215 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /api/attribution
|
|
||||||
*
|
|
||||||
* Automatic UTM-based referral attribution for new signups.
|
|
||||||
*
|
|
||||||
* Reads the `sim_utm` cookie (set by proxy on auth pages), verifies the user
|
|
||||||
* account was created after the cookie was set, matches a campaign by UTM
|
|
||||||
* specificity, and atomically inserts an attribution record + applies bonus credits.
|
|
||||||
*
|
|
||||||
* Idempotent — the unique constraint on `userId` prevents double-attribution.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { db } from '@sim/db'
|
|
||||||
import { referralAttribution, referralCampaigns, user, userStats } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { nanoid } from 'nanoid'
|
|
||||||
import { cookies } from 'next/headers'
|
|
||||||
import { NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
|
|
||||||
|
|
||||||
const logger = createLogger('AttributionAPI')
|
|
||||||
|
|
||||||
const COOKIE_NAME = 'sim_utm'
|
|
||||||
const CLOCK_DRIFT_TOLERANCE_MS = 60 * 1000
|
|
||||||
|
|
||||||
const UtmCookieSchema = z.object({
|
|
||||||
utm_source: z.string().optional(),
|
|
||||||
utm_medium: z.string().optional(),
|
|
||||||
utm_campaign: z.string().optional(),
|
|
||||||
utm_content: z.string().optional(),
|
|
||||||
referrer_url: z.string().optional(),
|
|
||||||
landing_page: z.string().optional(),
|
|
||||||
created_at: z.string().min(1),
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Finds the most specific active campaign matching the given UTM params.
|
|
||||||
* Null fields on a campaign act as wildcards. Ties broken by newest campaign.
|
|
||||||
*/
|
|
||||||
async function findMatchingCampaign(utmData: z.infer<typeof UtmCookieSchema>) {
|
|
||||||
const campaigns = await db
|
|
||||||
.select()
|
|
||||||
.from(referralCampaigns)
|
|
||||||
.where(eq(referralCampaigns.isActive, true))
|
|
||||||
|
|
||||||
let bestMatch: (typeof campaigns)[number] | null = null
|
|
||||||
let bestScore = -1
|
|
||||||
|
|
||||||
for (const campaign of campaigns) {
|
|
||||||
let score = 0
|
|
||||||
let mismatch = false
|
|
||||||
|
|
||||||
const fields = [
|
|
||||||
{ campaignVal: campaign.utmSource, utmVal: utmData.utm_source },
|
|
||||||
{ campaignVal: campaign.utmMedium, utmVal: utmData.utm_medium },
|
|
||||||
{ campaignVal: campaign.utmCampaign, utmVal: utmData.utm_campaign },
|
|
||||||
{ campaignVal: campaign.utmContent, utmVal: utmData.utm_content },
|
|
||||||
] as const
|
|
||||||
|
|
||||||
for (const { campaignVal, utmVal } of fields) {
|
|
||||||
if (campaignVal === null) continue
|
|
||||||
if (campaignVal === utmVal) {
|
|
||||||
score++
|
|
||||||
} else {
|
|
||||||
mismatch = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!mismatch && score > 0) {
|
|
||||||
if (
|
|
||||||
score > bestScore ||
|
|
||||||
(score === bestScore &&
|
|
||||||
bestMatch &&
|
|
||||||
campaign.createdAt.getTime() > bestMatch.createdAt.getTime())
|
|
||||||
) {
|
|
||||||
bestScore = score
|
|
||||||
bestMatch = campaign
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return bestMatch
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function POST() {
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cookieStore = await cookies()
|
|
||||||
const utmCookie = cookieStore.get(COOKIE_NAME)
|
|
||||||
if (!utmCookie?.value) {
|
|
||||||
return NextResponse.json({ attributed: false, reason: 'no_utm_cookie' })
|
|
||||||
}
|
|
||||||
|
|
||||||
let utmData: z.infer<typeof UtmCookieSchema>
|
|
||||||
try {
|
|
||||||
let decoded: string
|
|
||||||
try {
|
|
||||||
decoded = decodeURIComponent(utmCookie.value)
|
|
||||||
} catch {
|
|
||||||
decoded = utmCookie.value
|
|
||||||
}
|
|
||||||
utmData = UtmCookieSchema.parse(JSON.parse(decoded))
|
|
||||||
} catch {
|
|
||||||
logger.warn('Failed to parse UTM cookie', { userId: session.user.id })
|
|
||||||
cookieStore.delete(COOKIE_NAME)
|
|
||||||
return NextResponse.json({ attributed: false, reason: 'invalid_cookie' })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cookieCreatedAt = Number(utmData.created_at)
|
|
||||||
if (!Number.isFinite(cookieCreatedAt)) {
|
|
||||||
logger.warn('UTM cookie has invalid created_at timestamp', { userId: session.user.id })
|
|
||||||
cookieStore.delete(COOKIE_NAME)
|
|
||||||
return NextResponse.json({ attributed: false, reason: 'invalid_cookie' })
|
|
||||||
}
|
|
||||||
|
|
||||||
const userRows = await db
|
|
||||||
.select({ createdAt: user.createdAt })
|
|
||||||
.from(user)
|
|
||||||
.where(eq(user.id, session.user.id))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (userRows.length === 0) {
|
|
||||||
return NextResponse.json({ error: 'User not found' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const userCreatedAt = userRows[0].createdAt.getTime()
|
|
||||||
if (userCreatedAt < cookieCreatedAt - CLOCK_DRIFT_TOLERANCE_MS) {
|
|
||||||
logger.info('User account predates UTM cookie, skipping attribution', {
|
|
||||||
userId: session.user.id,
|
|
||||||
userCreatedAt: new Date(userCreatedAt).toISOString(),
|
|
||||||
cookieCreatedAt: new Date(cookieCreatedAt).toISOString(),
|
|
||||||
})
|
|
||||||
cookieStore.delete(COOKIE_NAME)
|
|
||||||
return NextResponse.json({ attributed: false, reason: 'account_predates_cookie' })
|
|
||||||
}
|
|
||||||
|
|
||||||
const matchedCampaign = await findMatchingCampaign(utmData)
|
|
||||||
if (!matchedCampaign) {
|
|
||||||
cookieStore.delete(COOKIE_NAME)
|
|
||||||
return NextResponse.json({ attributed: false, reason: 'no_matching_campaign' })
|
|
||||||
}
|
|
||||||
|
|
||||||
const bonusAmount = Number(matchedCampaign.bonusCreditAmount)
|
|
||||||
|
|
||||||
let attributed = false
|
|
||||||
await db.transaction(async (tx) => {
|
|
||||||
const [existingStats] = await tx
|
|
||||||
.select({ id: userStats.id })
|
|
||||||
.from(userStats)
|
|
||||||
.where(eq(userStats.userId, session.user.id))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!existingStats) {
|
|
||||||
await tx.insert(userStats).values({
|
|
||||||
id: nanoid(),
|
|
||||||
userId: session.user.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await tx
|
|
||||||
.insert(referralAttribution)
|
|
||||||
.values({
|
|
||||||
id: nanoid(),
|
|
||||||
userId: session.user.id,
|
|
||||||
campaignId: matchedCampaign.id,
|
|
||||||
utmSource: utmData.utm_source || null,
|
|
||||||
utmMedium: utmData.utm_medium || null,
|
|
||||||
utmCampaign: utmData.utm_campaign || null,
|
|
||||||
utmContent: utmData.utm_content || null,
|
|
||||||
referrerUrl: utmData.referrer_url || null,
|
|
||||||
landingPage: utmData.landing_page || null,
|
|
||||||
bonusCreditAmount: bonusAmount.toString(),
|
|
||||||
})
|
|
||||||
.onConflictDoNothing({ target: referralAttribution.userId })
|
|
||||||
.returning({ id: referralAttribution.id })
|
|
||||||
|
|
||||||
if (result.length > 0) {
|
|
||||||
await applyBonusCredits(session.user.id, bonusAmount, tx)
|
|
||||||
attributed = true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (attributed) {
|
|
||||||
logger.info('Referral attribution created and bonus credits applied', {
|
|
||||||
userId: session.user.id,
|
|
||||||
campaignId: matchedCampaign.id,
|
|
||||||
campaignName: matchedCampaign.name,
|
|
||||||
utmSource: utmData.utm_source,
|
|
||||||
utmCampaign: utmData.utm_campaign,
|
|
||||||
utmContent: utmData.utm_content,
|
|
||||||
bonusAmount,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
logger.info('User already attributed, skipping', { userId: session.user.id })
|
|
||||||
}
|
|
||||||
|
|
||||||
cookieStore.delete(COOKIE_NAME)
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
attributed,
|
|
||||||
bonusAmount: attributed ? bonusAmount : undefined,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Attribution error', { error })
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,145 +1,81 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { settings } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
|
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
||||||
|
|
||||||
/**
|
function copilotHeaders(): HeadersInit {
|
||||||
* GET - Fetch user's auto-allowed integration tools
|
const headers: Record<string, string> = {
|
||||||
*/
|
'Content-Type': 'application/json',
|
||||||
export async function GET() {
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const userId = session.user.id
|
|
||||||
|
|
||||||
const [userSettings] = await db
|
|
||||||
.select()
|
|
||||||
.from(settings)
|
|
||||||
.where(eq(settings.userId, userId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (userSettings) {
|
|
||||||
const autoAllowedTools = (userSettings.copilotAutoAllowedTools as string[]) || []
|
|
||||||
return NextResponse.json({ autoAllowedTools })
|
|
||||||
}
|
|
||||||
|
|
||||||
await db.insert(settings).values({
|
|
||||||
id: userId,
|
|
||||||
userId,
|
|
||||||
copilotAutoAllowedTools: [],
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({ autoAllowedTools: [] })
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to fetch auto-allowed tools', { error })
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
}
|
||||||
|
if (env.COPILOT_API_KEY) {
|
||||||
|
headers['x-api-key'] = env.COPILOT_API_KEY
|
||||||
|
}
|
||||||
|
return headers
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* POST - Add a tool to the auto-allowed list
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const userId = session.user.id
|
|
||||||
const body = await request.json()
|
|
||||||
|
|
||||||
if (!body.toolId || typeof body.toolId !== 'string') {
|
|
||||||
return NextResponse.json({ error: 'toolId must be a string' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const toolId = body.toolId
|
|
||||||
|
|
||||||
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
|
||||||
|
|
||||||
if (existing) {
|
|
||||||
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
|
||||||
|
|
||||||
if (!currentTools.includes(toolId)) {
|
|
||||||
const updatedTools = [...currentTools, toolId]
|
|
||||||
await db
|
|
||||||
.update(settings)
|
|
||||||
.set({
|
|
||||||
copilotAutoAllowedTools: updatedTools,
|
|
||||||
updatedAt: new Date(),
|
|
||||||
})
|
|
||||||
.where(eq(settings.userId, userId))
|
|
||||||
|
|
||||||
logger.info('Added tool to auto-allowed list', { userId, toolId })
|
|
||||||
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ success: true, autoAllowedTools: currentTools })
|
|
||||||
}
|
|
||||||
|
|
||||||
await db.insert(settings).values({
|
|
||||||
id: userId,
|
|
||||||
userId,
|
|
||||||
copilotAutoAllowedTools: [toolId],
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info('Created settings and added tool to auto-allowed list', { userId, toolId })
|
|
||||||
return NextResponse.json({ success: true, autoAllowedTools: [toolId] })
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to add auto-allowed tool', { error })
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* DELETE - Remove a tool from the auto-allowed list
|
|
||||||
*/
|
|
||||||
export async function DELETE(request: NextRequest) {
|
export async function DELETE(request: NextRequest) {
|
||||||
|
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||||
|
if (!isAuthenticated || !userId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolIdFromQuery = new URL(request.url).searchParams.get('toolId') || undefined
|
||||||
|
const toolIdFromBody = await request
|
||||||
|
.json()
|
||||||
|
.then((body) => (typeof body?.toolId === 'string' ? body.toolId : undefined))
|
||||||
|
.catch(() => undefined)
|
||||||
|
const toolId = toolIdFromBody || toolIdFromQuery
|
||||||
|
if (!toolId) {
|
||||||
|
return NextResponse.json({ error: 'toolId is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const session = await getSession()
|
const res = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
headers: copilotHeaders(),
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId,
|
||||||
|
toolId,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
if (!session?.user?.id) {
|
const payload = await res.json().catch(() => ({}))
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
if (!res.ok) {
|
||||||
|
logger.warn('Failed to remove auto-allowed tool via copilot backend', {
|
||||||
|
status: res.status,
|
||||||
|
userId,
|
||||||
|
toolId,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: payload?.error || 'Failed to remove auto-allowed tool',
|
||||||
|
autoAllowedTools: [],
|
||||||
|
},
|
||||||
|
{ status: res.status }
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const userId = session.user.id
|
return NextResponse.json({
|
||||||
const { searchParams } = new URL(request.url)
|
success: true,
|
||||||
const toolId = searchParams.get('toolId')
|
autoAllowedTools: Array.isArray(payload?.autoAllowedTools) ? payload.autoAllowedTools : [],
|
||||||
|
})
|
||||||
if (!toolId) {
|
|
||||||
return NextResponse.json({ error: 'toolId query parameter is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
|
||||||
|
|
||||||
if (existing) {
|
|
||||||
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
|
||||||
const updatedTools = currentTools.filter((t) => t !== toolId)
|
|
||||||
|
|
||||||
await db
|
|
||||||
.update(settings)
|
|
||||||
.set({
|
|
||||||
copilotAutoAllowedTools: updatedTools,
|
|
||||||
updatedAt: new Date(),
|
|
||||||
})
|
|
||||||
.where(eq(settings.userId, userId))
|
|
||||||
|
|
||||||
logger.info('Removed tool from auto-allowed list', { userId, toolId })
|
|
||||||
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ success: true, autoAllowedTools: [] })
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed to remove auto-allowed tool', { error })
|
logger.error('Error removing auto-allowed tool', {
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
userId,
|
||||||
|
toolId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to remove auto-allowed tool',
|
||||||
|
autoAllowedTools: [],
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,13 +28,24 @@ import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
|||||||
|
|
||||||
const logger = createLogger('CopilotChatAPI')
|
const logger = createLogger('CopilotChatAPI')
|
||||||
|
|
||||||
|
function truncateForLog(value: string, maxLength = 120): string {
|
||||||
|
if (!value || maxLength <= 0) return ''
|
||||||
|
return value.length <= maxLength ? value : `${value.slice(0, maxLength)}...`
|
||||||
|
}
|
||||||
|
|
||||||
async function requestChatTitleFromCopilot(params: {
|
async function requestChatTitleFromCopilot(params: {
|
||||||
message: string
|
message: string
|
||||||
model: string
|
model: string
|
||||||
provider?: string
|
provider?: string
|
||||||
}): Promise<string | null> {
|
}): Promise<string | null> {
|
||||||
const { message, model, provider } = params
|
const { message, model, provider } = params
|
||||||
if (!message || !model) return null
|
if (!message || !model) {
|
||||||
|
logger.warn('Skipping chat title request because message/model is missing', {
|
||||||
|
hasMessage: !!message,
|
||||||
|
hasModel: !!model,
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
const headers: Record<string, string> = {
|
const headers: Record<string, string> = {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -44,6 +55,13 @@ async function requestChatTitleFromCopilot(params: {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
logger.info('Requesting chat title from copilot backend', {
|
||||||
|
model,
|
||||||
|
provider: provider || null,
|
||||||
|
messageLength: message.length,
|
||||||
|
messagePreview: truncateForLog(message),
|
||||||
|
})
|
||||||
|
|
||||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/generate-chat-title`, {
|
const response = await fetch(`${SIM_AGENT_API_URL}/api/generate-chat-title`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers,
|
headers,
|
||||||
@@ -63,10 +81,32 @@ async function requestChatTitleFromCopilot(params: {
|
|||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const title = typeof payload?.title === 'string' ? payload.title.trim() : ''
|
const rawTitle = typeof payload?.title === 'string' ? payload.title : ''
|
||||||
|
const title = rawTitle.trim()
|
||||||
|
logger.info('Received chat title response from copilot backend', {
|
||||||
|
status: response.status,
|
||||||
|
hasRawTitle: !!rawTitle,
|
||||||
|
rawTitle,
|
||||||
|
normalizedTitle: title,
|
||||||
|
messagePreview: truncateForLog(message),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!title) {
|
||||||
|
logger.warn('Copilot backend returned empty chat title', {
|
||||||
|
payload,
|
||||||
|
model,
|
||||||
|
provider: provider || null,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
return title || null
|
return title || null
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error generating chat title:', error)
|
logger.error('Error generating chat title:', {
|
||||||
|
error,
|
||||||
|
model,
|
||||||
|
provider: provider || null,
|
||||||
|
messagePreview: truncateForLog(message),
|
||||||
|
})
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -238,6 +278,7 @@ export async function POST(req: NextRequest) {
|
|||||||
let currentChat: any = null
|
let currentChat: any = null
|
||||||
let conversationHistory: any[] = []
|
let conversationHistory: any[] = []
|
||||||
let actualChatId = chatId
|
let actualChatId = chatId
|
||||||
|
let chatWasCreatedForRequest = false
|
||||||
const selectedModel = model || 'claude-opus-4-6'
|
const selectedModel = model || 'claude-opus-4-6'
|
||||||
|
|
||||||
if (chatId || createNewChat) {
|
if (chatId || createNewChat) {
|
||||||
@@ -249,6 +290,7 @@ export async function POST(req: NextRequest) {
|
|||||||
})
|
})
|
||||||
currentChat = chatResult.chat
|
currentChat = chatResult.chat
|
||||||
actualChatId = chatResult.chatId || chatId
|
actualChatId = chatResult.chatId || chatId
|
||||||
|
chatWasCreatedForRequest = chatResult.isNew
|
||||||
const history = buildConversationHistory(
|
const history = buildConversationHistory(
|
||||||
chatResult.conversationHistory,
|
chatResult.conversationHistory,
|
||||||
(chatResult.chat?.conversationId as string | undefined) || conversationId
|
(chatResult.chat?.conversationId as string | undefined) || conversationId
|
||||||
@@ -256,6 +298,18 @@ export async function POST(req: NextRequest) {
|
|||||||
conversationHistory = history.history
|
conversationHistory = history.history
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const shouldGenerateTitleForRequest =
|
||||||
|
!!actualChatId &&
|
||||||
|
chatWasCreatedForRequest &&
|
||||||
|
!currentChat?.title &&
|
||||||
|
conversationHistory.length === 0
|
||||||
|
|
||||||
|
const titleGenerationParams = {
|
||||||
|
message,
|
||||||
|
model: selectedModel,
|
||||||
|
provider,
|
||||||
|
}
|
||||||
|
|
||||||
const effectiveMode = mode === 'agent' ? 'build' : mode
|
const effectiveMode = mode === 'agent' ? 'build' : mode
|
||||||
const effectiveConversationId =
|
const effectiveConversationId =
|
||||||
(currentChat?.conversationId as string | undefined) || conversationId
|
(currentChat?.conversationId as string | undefined) || conversationId
|
||||||
@@ -348,10 +402,22 @@ export async function POST(req: NextRequest) {
|
|||||||
await pushEvent({ type: 'chat_id', chatId: actualChatId })
|
await pushEvent({ type: 'chat_id', chatId: actualChatId })
|
||||||
}
|
}
|
||||||
|
|
||||||
if (actualChatId && !currentChat?.title && conversationHistory.length === 0) {
|
if (shouldGenerateTitleForRequest) {
|
||||||
requestChatTitleFromCopilot({ message, model: selectedModel, provider })
|
logger.info(`[${tracker.requestId}] Starting title generation for streaming response`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
model: titleGenerationParams.model,
|
||||||
|
provider: provider || null,
|
||||||
|
messageLength: message.length,
|
||||||
|
messagePreview: truncateForLog(message),
|
||||||
|
chatWasCreatedForRequest,
|
||||||
|
})
|
||||||
|
requestChatTitleFromCopilot(titleGenerationParams)
|
||||||
.then(async (title) => {
|
.then(async (title) => {
|
||||||
if (title) {
|
if (title) {
|
||||||
|
logger.info(`[${tracker.requestId}] Generated title for streaming response`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
title,
|
||||||
|
})
|
||||||
await db
|
await db
|
||||||
.update(copilotChats)
|
.update(copilotChats)
|
||||||
.set({
|
.set({
|
||||||
@@ -359,12 +425,30 @@ export async function POST(req: NextRequest) {
|
|||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
await pushEvent({ type: 'title_updated', title })
|
await pushEvent({ type: 'title_updated', title, chatId: actualChatId })
|
||||||
|
logger.info(`[${tracker.requestId}] Emitted title_updated SSE event`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
title,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
logger.warn(`[${tracker.requestId}] No title returned for streaming response`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
model: selectedModel,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||||
})
|
})
|
||||||
|
} else if (actualChatId && !chatWasCreatedForRequest) {
|
||||||
|
logger.info(
|
||||||
|
`[${tracker.requestId}] Skipping title generation because chat already exists`,
|
||||||
|
{
|
||||||
|
chatId: actualChatId,
|
||||||
|
model: titleGenerationParams.model,
|
||||||
|
provider: provider || null,
|
||||||
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -479,9 +563,9 @@ export async function POST(req: NextRequest) {
|
|||||||
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
|
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
|
||||||
|
|
||||||
// Start title generation in parallel if this is first message (non-streaming)
|
// Start title generation in parallel if this is first message (non-streaming)
|
||||||
if (actualChatId && !currentChat.title && conversationHistory.length === 0) {
|
if (shouldGenerateTitleForRequest) {
|
||||||
logger.info(`[${tracker.requestId}] Starting title generation for non-streaming response`)
|
logger.info(`[${tracker.requestId}] Starting title generation for non-streaming response`)
|
||||||
requestChatTitleFromCopilot({ message, model: selectedModel, provider })
|
requestChatTitleFromCopilot(titleGenerationParams)
|
||||||
.then(async (title) => {
|
.then(async (title) => {
|
||||||
if (title) {
|
if (title) {
|
||||||
await db
|
await db
|
||||||
@@ -492,11 +576,22 @@ export async function POST(req: NextRequest) {
|
|||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
logger.info(`[${tracker.requestId}] Generated and saved title: ${title}`)
|
logger.info(`[${tracker.requestId}] Generated and saved title: ${title}`)
|
||||||
|
} else {
|
||||||
|
logger.warn(`[${tracker.requestId}] No title returned for non-streaming response`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
model: selectedModel,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||||
})
|
})
|
||||||
|
} else if (actualChatId && !chatWasCreatedForRequest) {
|
||||||
|
logger.info(`[${tracker.requestId}] Skipping title generation because chat already exists`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
model: titleGenerationParams.model,
|
||||||
|
provider: provider || null,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update chat in database immediately (without blocking for title)
|
// Update chat in database immediately (without blocking for title)
|
||||||
|
|||||||
@@ -1,7 +1,11 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { REDIS_TOOL_CALL_PREFIX, REDIS_TOOL_CALL_TTL_SECONDS } from '@/lib/copilot/constants'
|
import {
|
||||||
|
REDIS_TOOL_CALL_PREFIX,
|
||||||
|
REDIS_TOOL_CALL_TTL_SECONDS,
|
||||||
|
SIM_AGENT_API_URL,
|
||||||
|
} from '@/lib/copilot/constants'
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -10,6 +14,7 @@ import {
|
|||||||
createUnauthorizedResponse,
|
createUnauthorizedResponse,
|
||||||
type NotificationStatus,
|
type NotificationStatus,
|
||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
import { getRedisClient } from '@/lib/core/config/redis'
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
|
||||||
const logger = createLogger('CopilotConfirmAPI')
|
const logger = createLogger('CopilotConfirmAPI')
|
||||||
@@ -21,6 +26,8 @@ const ConfirmationSchema = z.object({
|
|||||||
errorMap: () => ({ message: 'Invalid notification status' }),
|
errorMap: () => ({ message: 'Invalid notification status' }),
|
||||||
}),
|
}),
|
||||||
message: z.string().optional(), // Optional message for background moves or additional context
|
message: z.string().optional(), // Optional message for background moves or additional context
|
||||||
|
toolName: z.string().optional(),
|
||||||
|
remember: z.boolean().optional(),
|
||||||
})
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -57,6 +64,44 @@ async function updateToolCallStatus(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function saveAutoAllowedToolPreference(userId: string, toolName: string): Promise<boolean> {
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
}
|
||||||
|
if (env.COPILOT_API_KEY) {
|
||||||
|
headers['x-api-key'] = env.COPILOT_API_KEY
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers,
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId,
|
||||||
|
toolId: toolName,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
logger.warn('Failed to persist auto-allowed tool preference', {
|
||||||
|
userId,
|
||||||
|
toolName,
|
||||||
|
status: response.status,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error persisting auto-allowed tool preference', {
|
||||||
|
userId,
|
||||||
|
toolName,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* POST /api/copilot/confirm
|
* POST /api/copilot/confirm
|
||||||
* Update tool call status (Accept/Reject)
|
* Update tool call status (Accept/Reject)
|
||||||
@@ -74,7 +119,7 @@ export async function POST(req: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const body = await req.json()
|
const body = await req.json()
|
||||||
const { toolCallId, status, message } = ConfirmationSchema.parse(body)
|
const { toolCallId, status, message, toolName, remember } = ConfirmationSchema.parse(body)
|
||||||
|
|
||||||
// Update the tool call status in Redis
|
// Update the tool call status in Redis
|
||||||
const updated = await updateToolCallStatus(toolCallId, status, message)
|
const updated = await updateToolCallStatus(toolCallId, status, message)
|
||||||
@@ -90,14 +135,22 @@ export async function POST(req: NextRequest) {
|
|||||||
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
||||||
}
|
}
|
||||||
|
|
||||||
const duration = tracker.getDuration()
|
let rememberSaved = false
|
||||||
|
if (status === 'accepted' && remember === true && toolName && authenticatedUserId) {
|
||||||
|
rememberSaved = await saveAutoAllowedToolPreference(authenticatedUserId, toolName)
|
||||||
|
}
|
||||||
|
|
||||||
return NextResponse.json({
|
const response: Record<string, unknown> = {
|
||||||
success: true,
|
success: true,
|
||||||
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
})
|
}
|
||||||
|
if (remember === true) {
|
||||||
|
response.rememberSaved = rememberSaved
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(response)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const duration = tracker.getDuration()
|
const duration = tracker.getDuration()
|
||||||
|
|
||||||
|
|||||||
@@ -1,170 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /api/referral-code/redeem
|
|
||||||
*
|
|
||||||
* Redeem a referral/promo code to receive bonus credits.
|
|
||||||
*
|
|
||||||
* Body:
|
|
||||||
* - code: string — The referral code to redeem
|
|
||||||
*
|
|
||||||
* Response: { redeemed: boolean, bonusAmount?: number, error?: string }
|
|
||||||
*
|
|
||||||
* Constraints:
|
|
||||||
* - Enterprise users cannot redeem codes
|
|
||||||
* - One redemption per user, ever (unique constraint on userId)
|
|
||||||
* - One redemption per organization for team users (partial unique on organizationId)
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { db } from '@sim/db'
|
|
||||||
import { referralAttribution, referralCampaigns, userStats } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { and, eq } from 'drizzle-orm'
|
|
||||||
import { nanoid } from 'nanoid'
|
|
||||||
import { NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
|
||||||
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
|
|
||||||
|
|
||||||
const logger = createLogger('ReferralCodeRedemption')
|
|
||||||
|
|
||||||
const RedeemCodeSchema = z.object({
|
|
||||||
code: z.string().min(1, 'Code is required'),
|
|
||||||
})
|
|
||||||
|
|
||||||
export async function POST(request: Request) {
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
const { code } = RedeemCodeSchema.parse(body)
|
|
||||||
|
|
||||||
const subscription = await getHighestPrioritySubscription(session.user.id)
|
|
||||||
|
|
||||||
if (subscription?.plan === 'enterprise') {
|
|
||||||
return NextResponse.json({
|
|
||||||
redeemed: false,
|
|
||||||
error: 'Enterprise accounts cannot redeem referral codes',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const isTeam = subscription?.plan === 'team'
|
|
||||||
const orgId = isTeam ? subscription.referenceId : null
|
|
||||||
|
|
||||||
const normalizedCode = code.trim().toUpperCase()
|
|
||||||
|
|
||||||
const [campaign] = await db
|
|
||||||
.select()
|
|
||||||
.from(referralCampaigns)
|
|
||||||
.where(and(eq(referralCampaigns.code, normalizedCode), eq(referralCampaigns.isActive, true)))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!campaign) {
|
|
||||||
logger.info('Invalid code redemption attempt', {
|
|
||||||
userId: session.user.id,
|
|
||||||
code: normalizedCode,
|
|
||||||
})
|
|
||||||
return NextResponse.json({ error: 'Invalid or expired code' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const [existingUserAttribution] = await db
|
|
||||||
.select({ id: referralAttribution.id })
|
|
||||||
.from(referralAttribution)
|
|
||||||
.where(eq(referralAttribution.userId, session.user.id))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (existingUserAttribution) {
|
|
||||||
return NextResponse.json({
|
|
||||||
redeemed: false,
|
|
||||||
error: 'You have already redeemed a code',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (orgId) {
|
|
||||||
const [existingOrgAttribution] = await db
|
|
||||||
.select({ id: referralAttribution.id })
|
|
||||||
.from(referralAttribution)
|
|
||||||
.where(eq(referralAttribution.organizationId, orgId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (existingOrgAttribution) {
|
|
||||||
return NextResponse.json({
|
|
||||||
redeemed: false,
|
|
||||||
error: 'A code has already been redeemed for your organization',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const bonusAmount = Number(campaign.bonusCreditAmount)
|
|
||||||
|
|
||||||
let redeemed = false
|
|
||||||
await db.transaction(async (tx) => {
|
|
||||||
const [existingStats] = await tx
|
|
||||||
.select({ id: userStats.id })
|
|
||||||
.from(userStats)
|
|
||||||
.where(eq(userStats.userId, session.user.id))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!existingStats) {
|
|
||||||
await tx.insert(userStats).values({
|
|
||||||
id: nanoid(),
|
|
||||||
userId: session.user.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await tx
|
|
||||||
.insert(referralAttribution)
|
|
||||||
.values({
|
|
||||||
id: nanoid(),
|
|
||||||
userId: session.user.id,
|
|
||||||
organizationId: orgId,
|
|
||||||
campaignId: campaign.id,
|
|
||||||
utmSource: null,
|
|
||||||
utmMedium: null,
|
|
||||||
utmCampaign: null,
|
|
||||||
utmContent: null,
|
|
||||||
referrerUrl: null,
|
|
||||||
landingPage: null,
|
|
||||||
bonusCreditAmount: bonusAmount.toString(),
|
|
||||||
})
|
|
||||||
.onConflictDoNothing()
|
|
||||||
.returning({ id: referralAttribution.id })
|
|
||||||
|
|
||||||
if (result.length > 0) {
|
|
||||||
await applyBonusCredits(session.user.id, bonusAmount, tx)
|
|
||||||
redeemed = true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (redeemed) {
|
|
||||||
logger.info('Referral code redeemed', {
|
|
||||||
userId: session.user.id,
|
|
||||||
organizationId: orgId,
|
|
||||||
code: normalizedCode,
|
|
||||||
campaignId: campaign.id,
|
|
||||||
campaignName: campaign.name,
|
|
||||||
bonusAmount,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!redeemed) {
|
|
||||||
return NextResponse.json({
|
|
||||||
redeemed: false,
|
|
||||||
error: 'You have already redeemed a code',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
redeemed: true,
|
|
||||||
bonusAmount,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
|
|
||||||
}
|
|
||||||
logger.error('Referral code redemption error', { error })
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -66,12 +66,6 @@
|
|||||||
* Credits:
|
* Credits:
|
||||||
* POST /api/v1/admin/credits - Issue credits to user (by userId or email)
|
* POST /api/v1/admin/credits - Issue credits to user (by userId or email)
|
||||||
*
|
*
|
||||||
* Referral Campaigns:
|
|
||||||
* GET /api/v1/admin/referral-campaigns - List campaigns (?active=true/false)
|
|
||||||
* POST /api/v1/admin/referral-campaigns - Create campaign
|
|
||||||
* GET /api/v1/admin/referral-campaigns/:id - Get campaign details
|
|
||||||
* PATCH /api/v1/admin/referral-campaigns/:id - Update campaign fields
|
|
||||||
*
|
|
||||||
* Access Control (Permission Groups):
|
* Access Control (Permission Groups):
|
||||||
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
|
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
|
||||||
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)
|
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)
|
||||||
@@ -103,7 +97,6 @@ export type {
|
|||||||
AdminOrganization,
|
AdminOrganization,
|
||||||
AdminOrganizationBillingSummary,
|
AdminOrganizationBillingSummary,
|
||||||
AdminOrganizationDetail,
|
AdminOrganizationDetail,
|
||||||
AdminReferralCampaign,
|
|
||||||
AdminSeatAnalytics,
|
AdminSeatAnalytics,
|
||||||
AdminSingleResponse,
|
AdminSingleResponse,
|
||||||
AdminSubscription,
|
AdminSubscription,
|
||||||
@@ -118,7 +111,6 @@ export type {
|
|||||||
AdminWorkspaceMember,
|
AdminWorkspaceMember,
|
||||||
DbMember,
|
DbMember,
|
||||||
DbOrganization,
|
DbOrganization,
|
||||||
DbReferralCampaign,
|
|
||||||
DbSubscription,
|
DbSubscription,
|
||||||
DbUser,
|
DbUser,
|
||||||
DbUserStats,
|
DbUserStats,
|
||||||
@@ -147,7 +139,6 @@ export {
|
|||||||
parseWorkflowVariables,
|
parseWorkflowVariables,
|
||||||
toAdminFolder,
|
toAdminFolder,
|
||||||
toAdminOrganization,
|
toAdminOrganization,
|
||||||
toAdminReferralCampaign,
|
|
||||||
toAdminSubscription,
|
toAdminSubscription,
|
||||||
toAdminUser,
|
toAdminUser,
|
||||||
toAdminWorkflow,
|
toAdminWorkflow,
|
||||||
|
|||||||
@@ -1,142 +0,0 @@
|
|||||||
/**
|
|
||||||
* GET /api/v1/admin/referral-campaigns/:id
|
|
||||||
*
|
|
||||||
* Get a single referral campaign by ID.
|
|
||||||
*
|
|
||||||
* PATCH /api/v1/admin/referral-campaigns/:id
|
|
||||||
*
|
|
||||||
* Update campaign fields. All fields are optional.
|
|
||||||
*
|
|
||||||
* Body:
|
|
||||||
* - name: string (non-empty) - Campaign name
|
|
||||||
* - bonusCreditAmount: number (> 0) - Bonus credits in dollars
|
|
||||||
* - isActive: boolean - Enable/disable the campaign
|
|
||||||
* - code: string | null (min 6 chars, auto-uppercased, null to remove) - Redeemable code
|
|
||||||
* - utmSource: string | null - UTM source match (null = wildcard)
|
|
||||||
* - utmMedium: string | null - UTM medium match (null = wildcard)
|
|
||||||
* - utmCampaign: string | null - UTM campaign match (null = wildcard)
|
|
||||||
* - utmContent: string | null - UTM content match (null = wildcard)
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { db } from '@sim/db'
|
|
||||||
import { referralCampaigns } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
|
|
||||||
import {
|
|
||||||
badRequestResponse,
|
|
||||||
internalErrorResponse,
|
|
||||||
notFoundResponse,
|
|
||||||
singleResponse,
|
|
||||||
} from '@/app/api/v1/admin/responses'
|
|
||||||
import { toAdminReferralCampaign } from '@/app/api/v1/admin/types'
|
|
||||||
|
|
||||||
const logger = createLogger('AdminReferralCampaignDetailAPI')
|
|
||||||
|
|
||||||
interface RouteParams {
|
|
||||||
id: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export const GET = withAdminAuthParams<RouteParams>(async (_, context) => {
|
|
||||||
try {
|
|
||||||
const { id: campaignId } = await context.params
|
|
||||||
|
|
||||||
const [campaign] = await db
|
|
||||||
.select()
|
|
||||||
.from(referralCampaigns)
|
|
||||||
.where(eq(referralCampaigns.id, campaignId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!campaign) {
|
|
||||||
return notFoundResponse('Campaign')
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`Admin API: Retrieved referral campaign ${campaignId}`)
|
|
||||||
|
|
||||||
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Admin API: Failed to get referral campaign', { error })
|
|
||||||
return internalErrorResponse('Failed to get referral campaign')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) => {
|
|
||||||
try {
|
|
||||||
const { id: campaignId } = await context.params
|
|
||||||
const body = await request.json()
|
|
||||||
|
|
||||||
const [existing] = await db
|
|
||||||
.select()
|
|
||||||
.from(referralCampaigns)
|
|
||||||
.where(eq(referralCampaigns.id, campaignId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!existing) {
|
|
||||||
return notFoundResponse('Campaign')
|
|
||||||
}
|
|
||||||
|
|
||||||
const updateData: Record<string, unknown> = { updatedAt: new Date() }
|
|
||||||
|
|
||||||
if (body.name !== undefined) {
|
|
||||||
if (typeof body.name !== 'string' || body.name.trim().length === 0) {
|
|
||||||
return badRequestResponse('name must be a non-empty string')
|
|
||||||
}
|
|
||||||
updateData.name = body.name.trim()
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.bonusCreditAmount !== undefined) {
|
|
||||||
if (
|
|
||||||
typeof body.bonusCreditAmount !== 'number' ||
|
|
||||||
!Number.isFinite(body.bonusCreditAmount) ||
|
|
||||||
body.bonusCreditAmount <= 0
|
|
||||||
) {
|
|
||||||
return badRequestResponse('bonusCreditAmount must be a positive number')
|
|
||||||
}
|
|
||||||
updateData.bonusCreditAmount = body.bonusCreditAmount.toString()
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.isActive !== undefined) {
|
|
||||||
if (typeof body.isActive !== 'boolean') {
|
|
||||||
return badRequestResponse('isActive must be a boolean')
|
|
||||||
}
|
|
||||||
updateData.isActive = body.isActive
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.code !== undefined) {
|
|
||||||
if (body.code !== null) {
|
|
||||||
if (typeof body.code !== 'string') {
|
|
||||||
return badRequestResponse('code must be a string or null')
|
|
||||||
}
|
|
||||||
if (body.code.trim().length < 6) {
|
|
||||||
return badRequestResponse('code must be at least 6 characters')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
updateData.code = body.code ? body.code.trim().toUpperCase() : null
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const field of ['utmSource', 'utmMedium', 'utmCampaign', 'utmContent'] as const) {
|
|
||||||
if (body[field] !== undefined) {
|
|
||||||
if (body[field] !== null && typeof body[field] !== 'string') {
|
|
||||||
return badRequestResponse(`${field} must be a string or null`)
|
|
||||||
}
|
|
||||||
updateData[field] = body[field] || null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const [updated] = await db
|
|
||||||
.update(referralCampaigns)
|
|
||||||
.set(updateData)
|
|
||||||
.where(eq(referralCampaigns.id, campaignId))
|
|
||||||
.returning()
|
|
||||||
|
|
||||||
logger.info(`Admin API: Updated referral campaign ${campaignId}`, {
|
|
||||||
fields: Object.keys(updateData).filter((k) => k !== 'updatedAt'),
|
|
||||||
})
|
|
||||||
|
|
||||||
return singleResponse(toAdminReferralCampaign(updated, getBaseUrl()))
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Admin API: Failed to update referral campaign', { error })
|
|
||||||
return internalErrorResponse('Failed to update referral campaign')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
/**
|
|
||||||
* GET /api/v1/admin/referral-campaigns
|
|
||||||
*
|
|
||||||
* List referral campaigns with optional filtering and pagination.
|
|
||||||
*
|
|
||||||
* Query Parameters:
|
|
||||||
* - active: string (optional) - Filter by active status ('true' or 'false')
|
|
||||||
* - limit: number (default: 50, max: 250)
|
|
||||||
* - offset: number (default: 0)
|
|
||||||
*
|
|
||||||
* POST /api/v1/admin/referral-campaigns
|
|
||||||
*
|
|
||||||
* Create a new referral campaign.
|
|
||||||
*
|
|
||||||
* Body:
|
|
||||||
* - name: string (required) - Campaign name
|
|
||||||
* - bonusCreditAmount: number (required, > 0) - Bonus credits in dollars
|
|
||||||
* - code: string | null (optional, min 6 chars, auto-uppercased) - Redeemable code
|
|
||||||
* - utmSource: string | null (optional) - UTM source match (null = wildcard)
|
|
||||||
* - utmMedium: string | null (optional) - UTM medium match (null = wildcard)
|
|
||||||
* - utmCampaign: string | null (optional) - UTM campaign match (null = wildcard)
|
|
||||||
* - utmContent: string | null (optional) - UTM content match (null = wildcard)
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { db } from '@sim/db'
|
|
||||||
import { referralCampaigns } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { count, eq, type SQL } from 'drizzle-orm'
|
|
||||||
import { nanoid } from 'nanoid'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
|
|
||||||
import {
|
|
||||||
badRequestResponse,
|
|
||||||
internalErrorResponse,
|
|
||||||
listResponse,
|
|
||||||
singleResponse,
|
|
||||||
} from '@/app/api/v1/admin/responses'
|
|
||||||
import {
|
|
||||||
type AdminReferralCampaign,
|
|
||||||
createPaginationMeta,
|
|
||||||
parsePaginationParams,
|
|
||||||
toAdminReferralCampaign,
|
|
||||||
} from '@/app/api/v1/admin/types'
|
|
||||||
|
|
||||||
const logger = createLogger('AdminReferralCampaignsAPI')
|
|
||||||
|
|
||||||
export const GET = withAdminAuth(async (request) => {
|
|
||||||
const url = new URL(request.url)
|
|
||||||
const { limit, offset } = parsePaginationParams(url)
|
|
||||||
const activeFilter = url.searchParams.get('active')
|
|
||||||
|
|
||||||
try {
|
|
||||||
const conditions: SQL<unknown>[] = []
|
|
||||||
if (activeFilter === 'true') {
|
|
||||||
conditions.push(eq(referralCampaigns.isActive, true))
|
|
||||||
} else if (activeFilter === 'false') {
|
|
||||||
conditions.push(eq(referralCampaigns.isActive, false))
|
|
||||||
}
|
|
||||||
|
|
||||||
const whereClause = conditions.length > 0 ? conditions[0] : undefined
|
|
||||||
const baseUrl = getBaseUrl()
|
|
||||||
|
|
||||||
const [countResult, campaigns] = await Promise.all([
|
|
||||||
db.select({ total: count() }).from(referralCampaigns).where(whereClause),
|
|
||||||
db
|
|
||||||
.select()
|
|
||||||
.from(referralCampaigns)
|
|
||||||
.where(whereClause)
|
|
||||||
.orderBy(referralCampaigns.createdAt)
|
|
||||||
.limit(limit)
|
|
||||||
.offset(offset),
|
|
||||||
])
|
|
||||||
|
|
||||||
const total = countResult[0].total
|
|
||||||
const data: AdminReferralCampaign[] = campaigns.map((c) => toAdminReferralCampaign(c, baseUrl))
|
|
||||||
const pagination = createPaginationMeta(total, limit, offset)
|
|
||||||
|
|
||||||
logger.info(`Admin API: Listed ${data.length} referral campaigns (total: ${total})`)
|
|
||||||
|
|
||||||
return listResponse(data, pagination)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Admin API: Failed to list referral campaigns', { error })
|
|
||||||
return internalErrorResponse('Failed to list referral campaigns')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
export const POST = withAdminAuth(async (request) => {
|
|
||||||
try {
|
|
||||||
const body = await request.json()
|
|
||||||
const { name, code, utmSource, utmMedium, utmCampaign, utmContent, bonusCreditAmount } = body
|
|
||||||
|
|
||||||
if (!name || typeof name !== 'string') {
|
|
||||||
return badRequestResponse('name is required and must be a string')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
typeof bonusCreditAmount !== 'number' ||
|
|
||||||
!Number.isFinite(bonusCreditAmount) ||
|
|
||||||
bonusCreditAmount <= 0
|
|
||||||
) {
|
|
||||||
return badRequestResponse('bonusCreditAmount must be a positive number')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (code !== undefined && code !== null) {
|
|
||||||
if (typeof code !== 'string') {
|
|
||||||
return badRequestResponse('code must be a string or null')
|
|
||||||
}
|
|
||||||
if (code.trim().length < 6) {
|
|
||||||
return badRequestResponse('code must be at least 6 characters')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const id = nanoid()
|
|
||||||
|
|
||||||
const [campaign] = await db
|
|
||||||
.insert(referralCampaigns)
|
|
||||||
.values({
|
|
||||||
id,
|
|
||||||
name,
|
|
||||||
code: code ? code.trim().toUpperCase() : null,
|
|
||||||
utmSource: utmSource || null,
|
|
||||||
utmMedium: utmMedium || null,
|
|
||||||
utmCampaign: utmCampaign || null,
|
|
||||||
utmContent: utmContent || null,
|
|
||||||
bonusCreditAmount: bonusCreditAmount.toString(),
|
|
||||||
})
|
|
||||||
.returning()
|
|
||||||
|
|
||||||
logger.info(`Admin API: Created referral campaign ${id}`, {
|
|
||||||
name,
|
|
||||||
code: campaign.code,
|
|
||||||
bonusCreditAmount,
|
|
||||||
})
|
|
||||||
|
|
||||||
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Admin API: Failed to create referral campaign', { error })
|
|
||||||
return internalErrorResponse('Failed to create referral campaign')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
@@ -8,7 +8,6 @@
|
|||||||
import type {
|
import type {
|
||||||
member,
|
member,
|
||||||
organization,
|
organization,
|
||||||
referralCampaigns,
|
|
||||||
subscription,
|
subscription,
|
||||||
user,
|
user,
|
||||||
userStats,
|
userStats,
|
||||||
@@ -32,7 +31,6 @@ export type DbOrganization = InferSelectModel<typeof organization>
|
|||||||
export type DbSubscription = InferSelectModel<typeof subscription>
|
export type DbSubscription = InferSelectModel<typeof subscription>
|
||||||
export type DbMember = InferSelectModel<typeof member>
|
export type DbMember = InferSelectModel<typeof member>
|
||||||
export type DbUserStats = InferSelectModel<typeof userStats>
|
export type DbUserStats = InferSelectModel<typeof userStats>
|
||||||
export type DbReferralCampaign = InferSelectModel<typeof referralCampaigns>
|
|
||||||
|
|
||||||
// =============================================================================
|
// =============================================================================
|
||||||
// Pagination
|
// Pagination
|
||||||
@@ -648,49 +646,3 @@ export interface AdminDeployResult {
|
|||||||
export interface AdminUndeployResult {
|
export interface AdminUndeployResult {
|
||||||
isDeployed: boolean
|
isDeployed: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
// =============================================================================
|
|
||||||
// Referral Campaign Types
|
|
||||||
// =============================================================================
|
|
||||||
|
|
||||||
export interface AdminReferralCampaign {
|
|
||||||
id: string
|
|
||||||
name: string
|
|
||||||
code: string | null
|
|
||||||
utmSource: string | null
|
|
||||||
utmMedium: string | null
|
|
||||||
utmCampaign: string | null
|
|
||||||
utmContent: string | null
|
|
||||||
bonusCreditAmount: string
|
|
||||||
isActive: boolean
|
|
||||||
signupUrl: string | null
|
|
||||||
createdAt: string
|
|
||||||
updatedAt: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export function toAdminReferralCampaign(
|
|
||||||
dbCampaign: DbReferralCampaign,
|
|
||||||
baseUrl: string
|
|
||||||
): AdminReferralCampaign {
|
|
||||||
const utmParams = new URLSearchParams()
|
|
||||||
if (dbCampaign.utmSource) utmParams.set('utm_source', dbCampaign.utmSource)
|
|
||||||
if (dbCampaign.utmMedium) utmParams.set('utm_medium', dbCampaign.utmMedium)
|
|
||||||
if (dbCampaign.utmCampaign) utmParams.set('utm_campaign', dbCampaign.utmCampaign)
|
|
||||||
if (dbCampaign.utmContent) utmParams.set('utm_content', dbCampaign.utmContent)
|
|
||||||
const query = utmParams.toString()
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: dbCampaign.id,
|
|
||||||
name: dbCampaign.name,
|
|
||||||
code: dbCampaign.code,
|
|
||||||
utmSource: dbCampaign.utmSource,
|
|
||||||
utmMedium: dbCampaign.utmMedium,
|
|
||||||
utmCampaign: dbCampaign.utmCampaign,
|
|
||||||
utmContent: dbCampaign.utmContent,
|
|
||||||
bonusCreditAmount: dbCampaign.bonusCreditAmount,
|
|
||||||
isActive: dbCampaign.isActive,
|
|
||||||
signupUrl: query ? `${baseUrl}/signup?${query}` : null,
|
|
||||||
createdAt: dbCampaign.createdAt.toISOString(),
|
|
||||||
updatedAt: dbCampaign.updatedAt.toISOString(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ const patchBodySchema = z
|
|||||||
description: z
|
description: z
|
||||||
.string()
|
.string()
|
||||||
.trim()
|
.trim()
|
||||||
.max(2000, 'Description must be 2000 characters or less')
|
.max(500, 'Description must be 500 characters or less')
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
isActive: z.literal(true).optional(), // Set to true to activate this version
|
isActive: z.literal(true).optional(), // Set to true to activate this version
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import {
|
|||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/event-buffer'
|
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||||
import { processInputFileFields } from '@/lib/execution/files'
|
import { processInputFileFields } from '@/lib/execution/files'
|
||||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||||
@@ -700,27 +700,15 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
||||||
let isStreamClosed = false
|
let isStreamClosed = false
|
||||||
|
|
||||||
const eventWriter = createExecutionEventWriter(executionId)
|
|
||||||
setExecutionMeta(executionId, {
|
|
||||||
status: 'active',
|
|
||||||
userId: actorUserId,
|
|
||||||
workflowId,
|
|
||||||
}).catch(() => {})
|
|
||||||
|
|
||||||
const stream = new ReadableStream<Uint8Array>({
|
const stream = new ReadableStream<Uint8Array>({
|
||||||
async start(controller) {
|
async start(controller) {
|
||||||
let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null
|
|
||||||
|
|
||||||
const sendEvent = (event: ExecutionEvent) => {
|
const sendEvent = (event: ExecutionEvent) => {
|
||||||
if (!isStreamClosed) {
|
if (isStreamClosed) return
|
||||||
try {
|
|
||||||
controller.enqueue(encodeSSEEvent(event))
|
try {
|
||||||
} catch {
|
controller.enqueue(encodeSSEEvent(event))
|
||||||
isStreamClosed = true
|
} catch {
|
||||||
}
|
isStreamClosed = true
|
||||||
}
|
|
||||||
if (event.type !== 'stream:chunk' && event.type !== 'stream:done') {
|
|
||||||
eventWriter.write(event).catch(() => {})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -841,12 +829,14 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
|
|
||||||
const reader = streamingExec.stream.getReader()
|
const reader = streamingExec.stream.getReader()
|
||||||
const decoder = new TextDecoder()
|
const decoder = new TextDecoder()
|
||||||
|
let chunkCount = 0
|
||||||
|
|
||||||
try {
|
try {
|
||||||
while (true) {
|
while (true) {
|
||||||
const { done, value } = await reader.read()
|
const { done, value } = await reader.read()
|
||||||
if (done) break
|
if (done) break
|
||||||
|
|
||||||
|
chunkCount++
|
||||||
const chunk = decoder.decode(value, { stream: true })
|
const chunk = decoder.decode(value, { stream: true })
|
||||||
sendEvent({
|
sendEvent({
|
||||||
type: 'stream:chunk',
|
type: 'stream:chunk',
|
||||||
@@ -961,7 +951,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
duration: result.metadata?.duration || 0,
|
duration: result.metadata?.duration || 0,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
finalMetaStatus = 'error'
|
|
||||||
} else {
|
} else {
|
||||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||||
|
|
||||||
@@ -974,7 +963,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
duration: result.metadata?.duration || 0,
|
duration: result.metadata?.duration || 0,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
finalMetaStatus = 'cancelled'
|
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -998,7 +986,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
finalMetaStatus = 'complete'
|
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
||||||
const errorMessage = isTimeout
|
const errorMessage = isTimeout
|
||||||
@@ -1030,18 +1017,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
duration: executionResult?.metadata?.duration || 0,
|
duration: executionResult?.metadata?.duration || 0,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
finalMetaStatus = 'error'
|
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
|
||||||
await eventWriter.close()
|
|
||||||
} catch (closeError) {
|
|
||||||
logger.warn(`[${requestId}] Failed to close event writer`, {
|
|
||||||
error: closeError instanceof Error ? closeError.message : String(closeError),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (finalMetaStatus) {
|
|
||||||
setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {})
|
|
||||||
}
|
|
||||||
timeoutController.cleanup()
|
timeoutController.cleanup()
|
||||||
if (executionId) {
|
if (executionId) {
|
||||||
await cleanupExecutionBase64Cache(executionId)
|
await cleanupExecutionBase64Cache(executionId)
|
||||||
@@ -1056,7 +1032,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
},
|
},
|
||||||
cancel() {
|
cancel() {
|
||||||
isStreamClosed = true
|
isStreamClosed = true
|
||||||
logger.info(`[${requestId}] Client disconnected from SSE stream`)
|
timeoutController.cleanup()
|
||||||
|
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
||||||
|
timeoutController.abort()
|
||||||
|
markExecutionCancelled(executionId).catch(() => {})
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -1,170 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
|
||||||
import {
|
|
||||||
type ExecutionStreamStatus,
|
|
||||||
getExecutionMeta,
|
|
||||||
readExecutionEvents,
|
|
||||||
} from '@/lib/execution/event-buffer'
|
|
||||||
import { formatSSEEvent } from '@/lib/workflows/executor/execution-events'
|
|
||||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('ExecutionStreamReconnectAPI')
|
|
||||||
|
|
||||||
const POLL_INTERVAL_MS = 500
|
|
||||||
const MAX_POLL_DURATION_MS = 10 * 60 * 1000 // 10 minutes
|
|
||||||
|
|
||||||
function isTerminalStatus(status: ExecutionStreamStatus): boolean {
|
|
||||||
return status === 'complete' || status === 'error' || status === 'cancelled'
|
|
||||||
}
|
|
||||||
|
|
||||||
export const runtime = 'nodejs'
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
export async function GET(
|
|
||||||
req: NextRequest,
|
|
||||||
{ params }: { params: Promise<{ id: string; executionId: string }> }
|
|
||||||
) {
|
|
||||||
const { id: workflowId, executionId } = await params
|
|
||||||
|
|
||||||
try {
|
|
||||||
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
|
|
||||||
workflowId,
|
|
||||||
userId: auth.userId,
|
|
||||||
action: 'read',
|
|
||||||
})
|
|
||||||
if (!workflowAuthorization.allowed) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: workflowAuthorization.message || 'Access denied' },
|
|
||||||
{ status: workflowAuthorization.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const meta = await getExecutionMeta(executionId)
|
|
||||||
if (!meta) {
|
|
||||||
return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (meta.workflowId && meta.workflowId !== workflowId) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: 'Execution does not belong to this workflow' },
|
|
||||||
{ status: 403 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const fromParam = req.nextUrl.searchParams.get('from')
|
|
||||||
const parsed = fromParam ? Number.parseInt(fromParam, 10) : 0
|
|
||||||
const fromEventId = Number.isFinite(parsed) && parsed >= 0 ? parsed : 0
|
|
||||||
|
|
||||||
logger.info('Reconnection stream requested', {
|
|
||||||
workflowId,
|
|
||||||
executionId,
|
|
||||||
fromEventId,
|
|
||||||
metaStatus: meta.status,
|
|
||||||
})
|
|
||||||
|
|
||||||
const encoder = new TextEncoder()
|
|
||||||
|
|
||||||
let closed = false
|
|
||||||
|
|
||||||
const stream = new ReadableStream<Uint8Array>({
|
|
||||||
async start(controller) {
|
|
||||||
let lastEventId = fromEventId
|
|
||||||
const pollDeadline = Date.now() + MAX_POLL_DURATION_MS
|
|
||||||
|
|
||||||
const enqueue = (text: string) => {
|
|
||||||
if (closed) return
|
|
||||||
try {
|
|
||||||
controller.enqueue(encoder.encode(text))
|
|
||||||
} catch {
|
|
||||||
closed = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const events = await readExecutionEvents(executionId, lastEventId)
|
|
||||||
for (const entry of events) {
|
|
||||||
if (closed) return
|
|
||||||
enqueue(formatSSEEvent(entry.event))
|
|
||||||
lastEventId = entry.eventId
|
|
||||||
}
|
|
||||||
|
|
||||||
const currentMeta = await getExecutionMeta(executionId)
|
|
||||||
if (!currentMeta || isTerminalStatus(currentMeta.status)) {
|
|
||||||
enqueue('data: [DONE]\n\n')
|
|
||||||
if (!closed) controller.close()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
while (!closed && Date.now() < pollDeadline) {
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
|
|
||||||
if (closed) return
|
|
||||||
|
|
||||||
const newEvents = await readExecutionEvents(executionId, lastEventId)
|
|
||||||
for (const entry of newEvents) {
|
|
||||||
if (closed) return
|
|
||||||
enqueue(formatSSEEvent(entry.event))
|
|
||||||
lastEventId = entry.eventId
|
|
||||||
}
|
|
||||||
|
|
||||||
const polledMeta = await getExecutionMeta(executionId)
|
|
||||||
if (!polledMeta || isTerminalStatus(polledMeta.status)) {
|
|
||||||
const finalEvents = await readExecutionEvents(executionId, lastEventId)
|
|
||||||
for (const entry of finalEvents) {
|
|
||||||
if (closed) return
|
|
||||||
enqueue(formatSSEEvent(entry.event))
|
|
||||||
lastEventId = entry.eventId
|
|
||||||
}
|
|
||||||
enqueue('data: [DONE]\n\n')
|
|
||||||
if (!closed) controller.close()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!closed) {
|
|
||||||
logger.warn('Reconnection stream poll deadline reached', { executionId })
|
|
||||||
enqueue('data: [DONE]\n\n')
|
|
||||||
controller.close()
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error in reconnection stream', {
|
|
||||||
executionId,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
if (!closed) {
|
|
||||||
try {
|
|
||||||
controller.close()
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
cancel() {
|
|
||||||
closed = true
|
|
||||||
logger.info('Client disconnected from reconnection stream', { executionId })
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
return new NextResponse(stream, {
|
|
||||||
headers: {
|
|
||||||
...SSE_HEADERS,
|
|
||||||
'X-Execution-Id': executionId,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error('Failed to start reconnection stream', {
|
|
||||||
workflowId,
|
|
||||||
executionId,
|
|
||||||
error: error.message,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: error.message || 'Failed to start reconnection stream' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -14,6 +14,14 @@ const logger = createLogger('DiffControls')
|
|||||||
const NOTIFICATION_WIDTH = 240
|
const NOTIFICATION_WIDTH = 240
|
||||||
const NOTIFICATION_GAP = 16
|
const NOTIFICATION_GAP = 16
|
||||||
|
|
||||||
|
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
||||||
|
if (name !== 'workflow_change') return false
|
||||||
|
|
||||||
|
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'apply') return true
|
||||||
|
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
export const DiffControls = memo(function DiffControls() {
|
export const DiffControls = memo(function DiffControls() {
|
||||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||||
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
||||||
@@ -64,7 +72,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (tn === 'edit_workflow') {
|
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -72,7 +80,9 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
const candidates = Object.values(toolCallsById).filter((t) =>
|
||||||
|
isWorkflowEditToolCall(t.name, t.params)
|
||||||
|
)
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
if (id) updatePreviewToolCallState('accepted', id)
|
if (id) updatePreviewToolCallState('accepted', id)
|
||||||
@@ -102,7 +112,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (tn === 'edit_workflow') {
|
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -110,7 +120,9 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
const candidates = Object.values(toolCallsById).filter((t) =>
|
||||||
|
isWorkflowEditToolCall(t.name, t.params)
|
||||||
|
)
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
if (id) updatePreviewToolCallState('rejected', id)
|
if (id) updatePreviewToolCallState('rejected', id)
|
||||||
|
|||||||
@@ -47,6 +47,27 @@ interface ParsedTags {
|
|||||||
cleanContent: string
|
cleanContent: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getToolCallParams(toolCall?: CopilotToolCall): Record<string, unknown> {
|
||||||
|
const candidate = ((toolCall as any)?.parameters ||
|
||||||
|
(toolCall as any)?.input ||
|
||||||
|
(toolCall as any)?.params ||
|
||||||
|
{}) as Record<string, unknown>
|
||||||
|
return candidate && typeof candidate === 'object' ? candidate : {}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isWorkflowChangeApplyMode(toolCall?: CopilotToolCall): boolean {
|
||||||
|
if (!toolCall || toolCall.name !== 'workflow_change') return false
|
||||||
|
const params = getToolCallParams(toolCall)
|
||||||
|
const mode = typeof params.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'apply') return true
|
||||||
|
return typeof params.proposalId === 'string' && params.proposalId.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
function isWorkflowEditSummaryTool(toolCall?: CopilotToolCall): boolean {
|
||||||
|
if (!toolCall) return false
|
||||||
|
return isWorkflowChangeApplyMode(toolCall)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
||||||
* @param blocks - The subagent content blocks to search
|
* @param blocks - The subagent content blocks to search
|
||||||
@@ -871,7 +892,10 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (segment.type === 'tool' && segment.block.toolCall) {
|
if (segment.type === 'tool' && segment.block.toolCall) {
|
||||||
if (toolCall.name === 'edit' && segment.block.toolCall.name === 'edit_workflow') {
|
if (
|
||||||
|
(toolCall.name === 'edit' || toolCall.name === 'build') &&
|
||||||
|
isWorkflowEditSummaryTool(segment.block.toolCall)
|
||||||
|
) {
|
||||||
return (
|
return (
|
||||||
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
||||||
<WorkflowEditSummary toolCall={segment.block.toolCall} />
|
<WorkflowEditSummary toolCall={segment.block.toolCall} />
|
||||||
@@ -968,12 +992,11 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
}
|
}
|
||||||
}, [blocks])
|
}, [blocks])
|
||||||
|
|
||||||
if (toolCall.name !== 'edit_workflow') {
|
if (!isWorkflowEditSummaryTool(toolCall)) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const params =
|
const params = getToolCallParams(toolCall)
|
||||||
(toolCall as any).parameters || (toolCall as any).input || (toolCall as any).params || {}
|
|
||||||
let operations = Array.isArray(params.operations) ? params.operations : []
|
let operations = Array.isArray(params.operations) ? params.operations : []
|
||||||
|
|
||||||
if (operations.length === 0 && Array.isArray((toolCall as any).operations)) {
|
if (operations.length === 0 && Array.isArray((toolCall as any).operations)) {
|
||||||
@@ -1219,11 +1242,6 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
/** Checks if a tool is server-side executed (not a client tool) */
|
|
||||||
function isIntegrationTool(toolName: string): boolean {
|
|
||||||
return !TOOL_DISPLAY_REGISTRY[toolName]
|
|
||||||
}
|
|
||||||
|
|
||||||
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||||
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
||||||
return false
|
return false
|
||||||
@@ -1233,59 +1251,96 @@ function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Never show buttons for tools the user has marked as always-allowed
|
if (toolCall.ui?.showInterrupt !== true) {
|
||||||
if (useCopilotStore.getState().isToolAutoAllowed(toolCall.name)) {
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
const hasInterrupt = !!TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.interrupt
|
return true
|
||||||
if (hasInterrupt) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Integration tools (user-installed) always require approval
|
|
||||||
if (isIntegrationTool(toolCall.name)) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const toolCallLogger = createLogger('CopilotToolCall')
|
const toolCallLogger = createLogger('CopilotToolCall')
|
||||||
|
|
||||||
async function sendToolDecision(
|
async function sendToolDecision(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
status: 'accepted' | 'rejected' | 'background'
|
status: 'accepted' | 'rejected' | 'background',
|
||||||
|
options?: {
|
||||||
|
toolName?: string
|
||||||
|
remember?: boolean
|
||||||
|
}
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
await fetch('/api/copilot/confirm', {
|
await fetch('/api/copilot/confirm', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({ toolCallId, status }),
|
body: JSON.stringify({
|
||||||
|
toolCallId,
|
||||||
|
status,
|
||||||
|
...(options?.toolName ? { toolName: options.toolName } : {}),
|
||||||
|
...(options?.remember ? { remember: true } : {}),
|
||||||
|
}),
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toolCallLogger.warn('Failed to send tool decision', {
|
toolCallLogger.warn('Failed to send tool decision', {
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
|
remember: options?.remember === true,
|
||||||
|
toolName: options?.toolName,
|
||||||
error: error instanceof Error ? error.message : String(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function removeAutoAllowedToolPreference(toolName: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/copilot/auto-allowed-tools?toolId=${encodeURIComponent(toolName)}`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
})
|
||||||
|
return response.ok
|
||||||
|
} catch (error) {
|
||||||
|
toolCallLogger.warn('Failed to remove auto-allowed tool preference', {
|
||||||
|
toolName,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ToolUiAction = NonNullable<NonNullable<CopilotToolCall['ui']>['actions']>[number]
|
||||||
|
|
||||||
|
function actionDecision(action: ToolUiAction): 'accepted' | 'rejected' | 'background' {
|
||||||
|
const id = action.id.toLowerCase()
|
||||||
|
if (id.includes('background')) return 'background'
|
||||||
|
if (action.kind === 'reject') return 'rejected'
|
||||||
|
return 'accepted'
|
||||||
|
}
|
||||||
|
|
||||||
|
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
||||||
|
if (toolCall.execution?.target === 'sim_client_capability') {
|
||||||
|
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
||||||
|
}
|
||||||
|
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
||||||
|
}
|
||||||
|
|
||||||
async function handleRun(
|
async function handleRun(
|
||||||
toolCall: CopilotToolCall,
|
toolCall: CopilotToolCall,
|
||||||
setToolCallState: any,
|
setToolCallState: any,
|
||||||
onStateChange?: any,
|
onStateChange?: any,
|
||||||
editedParams?: any
|
editedParams?: any,
|
||||||
|
options?: {
|
||||||
|
remember?: boolean
|
||||||
|
}
|
||||||
) {
|
) {
|
||||||
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
||||||
onStateChange?.('executing')
|
onStateChange?.('executing')
|
||||||
await sendToolDecision(toolCall.id, 'accepted')
|
await sendToolDecision(toolCall.id, 'accepted', {
|
||||||
|
toolName: toolCall.name,
|
||||||
|
remember: options?.remember === true,
|
||||||
|
})
|
||||||
|
|
||||||
// Client-executable run tools: execute on the client for real-time feedback
|
// Client-executable run tools: execute on the client for real-time feedback
|
||||||
// (block pulsing, console logs, stop button). The server defers execution
|
// (block pulsing, console logs, stop button). The server defers execution
|
||||||
// for these tools; the client reports back via mark-complete.
|
// for these tools; the client reports back via mark-complete.
|
||||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)) {
|
if (isClientRunCapability(toolCall)) {
|
||||||
const params = editedParams || toolCall.params || {}
|
const params = editedParams || toolCall.params || {}
|
||||||
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
||||||
}
|
}
|
||||||
@@ -1298,6 +1353,9 @@ async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onSt
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getDisplayName(toolCall: CopilotToolCall): string {
|
function getDisplayName(toolCall: CopilotToolCall): string {
|
||||||
|
if (toolCall.ui?.phaseLabel) return toolCall.ui.phaseLabel
|
||||||
|
if (toolCall.ui?.title) return `${getStateVerb(toolCall.state)} ${toolCall.ui.title}`
|
||||||
|
|
||||||
const fromStore = (toolCall as any).display?.text
|
const fromStore = (toolCall as any).display?.text
|
||||||
if (fromStore) return fromStore
|
if (fromStore) return fromStore
|
||||||
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
@@ -1342,53 +1400,37 @@ function RunSkipButtons({
|
|||||||
toolCall,
|
toolCall,
|
||||||
onStateChange,
|
onStateChange,
|
||||||
editedParams,
|
editedParams,
|
||||||
|
actions,
|
||||||
}: {
|
}: {
|
||||||
toolCall: CopilotToolCall
|
toolCall: CopilotToolCall
|
||||||
onStateChange?: (state: any) => void
|
onStateChange?: (state: any) => void
|
||||||
editedParams?: any
|
editedParams?: any
|
||||||
|
actions: ToolUiAction[]
|
||||||
}) {
|
}) {
|
||||||
const [isProcessing, setIsProcessing] = useState(false)
|
const [isProcessing, setIsProcessing] = useState(false)
|
||||||
const [buttonsHidden, setButtonsHidden] = useState(false)
|
const [buttonsHidden, setButtonsHidden] = useState(false)
|
||||||
const actionInProgressRef = useRef(false)
|
const actionInProgressRef = useRef(false)
|
||||||
const { setToolCallState, addAutoAllowedTool } = useCopilotStore()
|
const { setToolCallState } = useCopilotStore()
|
||||||
|
|
||||||
const onRun = async () => {
|
const onAction = async (action: ToolUiAction) => {
|
||||||
// Prevent race condition - check ref synchronously
|
// Prevent race condition - check ref synchronously
|
||||||
if (actionInProgressRef.current) return
|
if (actionInProgressRef.current) return
|
||||||
actionInProgressRef.current = true
|
actionInProgressRef.current = true
|
||||||
setIsProcessing(true)
|
setIsProcessing(true)
|
||||||
setButtonsHidden(true)
|
setButtonsHidden(true)
|
||||||
try {
|
try {
|
||||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
const decision = actionDecision(action)
|
||||||
} finally {
|
if (decision === 'accepted') {
|
||||||
setIsProcessing(false)
|
await handleRun(toolCall, setToolCallState, onStateChange, editedParams, {
|
||||||
actionInProgressRef.current = false
|
remember: action.remember === true,
|
||||||
}
|
})
|
||||||
}
|
} else if (decision === 'rejected') {
|
||||||
|
await handleSkip(toolCall, setToolCallState, onStateChange)
|
||||||
const onAlwaysAllow = async () => {
|
} else {
|
||||||
// Prevent race condition - check ref synchronously
|
setToolCallState(toolCall, ClientToolCallState.background)
|
||||||
if (actionInProgressRef.current) return
|
onStateChange?.('background')
|
||||||
actionInProgressRef.current = true
|
await sendToolDecision(toolCall.id, 'background')
|
||||||
setIsProcessing(true)
|
}
|
||||||
setButtonsHidden(true)
|
|
||||||
try {
|
|
||||||
await addAutoAllowedTool(toolCall.name)
|
|
||||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
|
||||||
} finally {
|
|
||||||
setIsProcessing(false)
|
|
||||||
actionInProgressRef.current = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const onSkip = async () => {
|
|
||||||
// Prevent race condition - check ref synchronously
|
|
||||||
if (actionInProgressRef.current) return
|
|
||||||
actionInProgressRef.current = true
|
|
||||||
setIsProcessing(true)
|
|
||||||
setButtonsHidden(true)
|
|
||||||
try {
|
|
||||||
await handleSkip(toolCall, setToolCallState, onStateChange)
|
|
||||||
} finally {
|
} finally {
|
||||||
setIsProcessing(false)
|
setIsProcessing(false)
|
||||||
actionInProgressRef.current = false
|
actionInProgressRef.current = false
|
||||||
@@ -1397,23 +1439,22 @@ function RunSkipButtons({
|
|||||||
|
|
||||||
if (buttonsHidden) return null
|
if (buttonsHidden) return null
|
||||||
|
|
||||||
// Show "Always Allow" for all tools that require confirmation
|
|
||||||
const showAlwaysAllow = true
|
|
||||||
|
|
||||||
// Standardized buttons for all interrupt tools: Allow, Always Allow, Skip
|
|
||||||
return (
|
return (
|
||||||
<div className='mt-[10px] flex gap-[6px]'>
|
<div className='mt-[10px] flex gap-[6px]'>
|
||||||
<Button onClick={onRun} disabled={isProcessing} variant='tertiary'>
|
{actions.map((action, index) => {
|
||||||
{isProcessing ? 'Allowing...' : 'Allow'}
|
const variant =
|
||||||
</Button>
|
action.kind === 'reject' ? 'default' : action.remember ? 'default' : 'tertiary'
|
||||||
{showAlwaysAllow && (
|
return (
|
||||||
<Button onClick={onAlwaysAllow} disabled={isProcessing} variant='default'>
|
<Button
|
||||||
{isProcessing ? 'Allowing...' : 'Always Allow'}
|
key={action.id}
|
||||||
</Button>
|
onClick={() => onAction(action)}
|
||||||
)}
|
disabled={isProcessing}
|
||||||
<Button onClick={onSkip} disabled={isProcessing} variant='default'>
|
variant={variant}
|
||||||
Skip
|
>
|
||||||
</Button>
|
{isProcessing && index === 0 ? 'Working...' : action.label}
|
||||||
|
</Button>
|
||||||
|
)
|
||||||
|
})}
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -1430,10 +1471,16 @@ export function ToolCall({
|
|||||||
const liveToolCall = useCopilotStore((s) =>
|
const liveToolCall = useCopilotStore((s) =>
|
||||||
effectiveId ? s.toolCallsById[effectiveId] : undefined
|
effectiveId ? s.toolCallsById[effectiveId] : undefined
|
||||||
)
|
)
|
||||||
const toolCall = liveToolCall || toolCallProp
|
const rawToolCall = liveToolCall || toolCallProp
|
||||||
|
const hasRealToolCall = !!rawToolCall
|
||||||
// Guard: nothing to render without a toolCall
|
const toolCall: CopilotToolCall =
|
||||||
if (!toolCall) return null
|
rawToolCall ||
|
||||||
|
({
|
||||||
|
id: effectiveId || '',
|
||||||
|
name: '',
|
||||||
|
state: ClientToolCallState.generating,
|
||||||
|
params: {},
|
||||||
|
} as CopilotToolCall)
|
||||||
|
|
||||||
const isExpandablePending =
|
const isExpandablePending =
|
||||||
toolCall?.state === 'pending' &&
|
toolCall?.state === 'pending' &&
|
||||||
@@ -1441,17 +1488,15 @@ export function ToolCall({
|
|||||||
|
|
||||||
const [expanded, setExpanded] = useState(isExpandablePending)
|
const [expanded, setExpanded] = useState(isExpandablePending)
|
||||||
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
||||||
|
const [autoAllowRemovedForCall, setAutoAllowRemovedForCall] = useState(false)
|
||||||
|
|
||||||
// State for editable parameters
|
// State for editable parameters
|
||||||
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
||||||
const [editedParams, setEditedParams] = useState(params)
|
const [editedParams, setEditedParams] = useState(params)
|
||||||
const paramsRef = useRef(params)
|
const paramsRef = useRef(params)
|
||||||
|
|
||||||
// Check if this integration tool is auto-allowed
|
const { setToolCallState } = useCopilotStore()
|
||||||
const { removeAutoAllowedTool, setToolCallState } = useCopilotStore()
|
const isAutoAllowed = toolCall.ui?.autoAllowed === true && !autoAllowRemovedForCall
|
||||||
const isAutoAllowed = useCopilotStore(
|
|
||||||
(s) => isIntegrationTool(toolCall.name) && s.isToolAutoAllowed(toolCall.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -1461,6 +1506,14 @@ export function ToolCall({
|
|||||||
}
|
}
|
||||||
}, [params])
|
}, [params])
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
setAutoAllowRemovedForCall(false)
|
||||||
|
setShowRemoveAutoAllow(false)
|
||||||
|
}, [toolCall.id])
|
||||||
|
|
||||||
|
// Guard: nothing to render without a toolCall
|
||||||
|
if (!hasRealToolCall) return null
|
||||||
|
|
||||||
// Skip rendering some internal tools
|
// Skip rendering some internal tools
|
||||||
if (
|
if (
|
||||||
toolCall.name === 'checkoff_todo' ||
|
toolCall.name === 'checkoff_todo' ||
|
||||||
@@ -1472,7 +1525,9 @@ export function ToolCall({
|
|||||||
return null
|
return null
|
||||||
|
|
||||||
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
||||||
const isSubagentTool = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
const isSubagentTool =
|
||||||
|
toolCall.execution?.target === 'go_subagent' ||
|
||||||
|
TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
||||||
|
|
||||||
// For ALL subagent tools, don't show anything until we have blocks with content
|
// For ALL subagent tools, don't show anything until we have blocks with content
|
||||||
if (isSubagentTool) {
|
if (isSubagentTool) {
|
||||||
@@ -1499,28 +1554,6 @@ export function ToolCall({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get current mode from store to determine if we should render integration tools
|
|
||||||
const mode = useCopilotStore.getState().mode
|
|
||||||
|
|
||||||
// Check if this is a completed/historical tool call (not pending/executing)
|
|
||||||
// Use string comparison to handle both enum values and string values from DB
|
|
||||||
const stateStr = String(toolCall.state)
|
|
||||||
const isCompletedToolCall =
|
|
||||||
stateStr === 'success' ||
|
|
||||||
stateStr === 'error' ||
|
|
||||||
stateStr === 'rejected' ||
|
|
||||||
stateStr === 'aborted'
|
|
||||||
|
|
||||||
// Allow rendering if:
|
|
||||||
// 1. Tool is in TOOL_DISPLAY_REGISTRY (client tools), OR
|
|
||||||
// 2. We're in build mode (integration tools are executed server-side), OR
|
|
||||||
// 3. Tool call is already completed (historical - should always render)
|
|
||||||
const isClientTool = !!TOOL_DISPLAY_REGISTRY[toolCall.name]
|
|
||||||
const isIntegrationToolInBuildMode = mode === 'build' && !isClientTool
|
|
||||||
|
|
||||||
if (!isClientTool && !isIntegrationToolInBuildMode && !isCompletedToolCall) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
||||||
// Check if tool has params table config (meaning it's expandable)
|
// Check if tool has params table config (meaning it's expandable)
|
||||||
const hasParamsTable = !!toolUIConfig?.paramsTable
|
const hasParamsTable = !!toolUIConfig?.paramsTable
|
||||||
@@ -1530,6 +1563,14 @@ export function ToolCall({
|
|||||||
toolCall.name === 'make_api_request' ||
|
toolCall.name === 'make_api_request' ||
|
||||||
toolCall.name === 'set_global_workflow_variables'
|
toolCall.name === 'set_global_workflow_variables'
|
||||||
|
|
||||||
|
const interruptActions =
|
||||||
|
(toolCall.ui?.actions && toolCall.ui.actions.length > 0
|
||||||
|
? toolCall.ui.actions
|
||||||
|
: [
|
||||||
|
{ id: 'allow_once', label: 'Allow', kind: 'accept' as const },
|
||||||
|
{ id: 'allow_always', label: 'Always Allow', kind: 'accept' as const, remember: true },
|
||||||
|
{ id: 'reject', label: 'Skip', kind: 'reject' as const },
|
||||||
|
]) as ToolUiAction[]
|
||||||
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
||||||
|
|
||||||
// Check UI config for secondary action - only show for current message tool calls
|
// Check UI config for secondary action - only show for current message tool calls
|
||||||
@@ -1987,9 +2028,12 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
await removeAutoAllowedTool(toolCall.name)
|
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
||||||
setShowRemoveAutoAllow(false)
|
if (removed) {
|
||||||
forceUpdate({})
|
setAutoAllowRemovedForCall(true)
|
||||||
|
setShowRemoveAutoAllow(false)
|
||||||
|
forceUpdate({})
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2003,6 +2047,7 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
|
actions={interruptActions}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
@@ -2048,9 +2093,12 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
await removeAutoAllowedTool(toolCall.name)
|
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
||||||
setShowRemoveAutoAllow(false)
|
if (removed) {
|
||||||
forceUpdate({})
|
setAutoAllowRemovedForCall(true)
|
||||||
|
setShowRemoveAutoAllow(false)
|
||||||
|
forceUpdate({})
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2064,6 +2112,7 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
|
actions={interruptActions}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
@@ -2087,7 +2136,7 @@ export function ToolCall({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const isEditWorkflow = toolCall.name === 'edit_workflow'
|
const isEditWorkflow = isWorkflowEditSummaryTool(toolCall)
|
||||||
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
||||||
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
||||||
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
||||||
@@ -2109,9 +2158,12 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
await removeAutoAllowedTool(toolCall.name)
|
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
||||||
setShowRemoveAutoAllow(false)
|
if (removed) {
|
||||||
forceUpdate({})
|
setAutoAllowRemovedForCall(true)
|
||||||
|
setShowRemoveAutoAllow(false)
|
||||||
|
forceUpdate({})
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2125,6 +2177,7 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
|
actions={interruptActions}
|
||||||
/>
|
/>
|
||||||
) : showMoveToBackground ? (
|
) : showMoveToBackground ? (
|
||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
@@ -2155,7 +2208,7 @@ export function ToolCall({
|
|||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
) : null}
|
) : null}
|
||||||
{/* Workflow edit summary - shows block changes after edit_workflow completes */}
|
{/* Workflow edit summary - shows block changes after workflow_change(apply) */}
|
||||||
<WorkflowEditSummary toolCall={toolCall} />
|
<WorkflowEditSummary toolCall={toolCall} />
|
||||||
|
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
|
|||||||
@@ -113,7 +113,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
clearPlanArtifact,
|
clearPlanArtifact,
|
||||||
savePlanArtifact,
|
savePlanArtifact,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
loadAutoAllowedTools,
|
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
} = useCopilotStore()
|
} = useCopilotStore()
|
||||||
|
|
||||||
@@ -125,8 +124,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
setCopilotWorkflowId,
|
setCopilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
loadAutoAllowedTools,
|
|
||||||
currentChat,
|
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
})
|
})
|
||||||
@@ -154,6 +151,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
planTodos,
|
planTodos,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const renderedChatTitle = currentChat?.title || 'New Chat'
|
||||||
|
|
||||||
/** Gets markdown content for design document section (available in all modes once created) */
|
/** Gets markdown content for design document section (available in all modes once created) */
|
||||||
const designDocumentContent = useMemo(() => {
|
const designDocumentContent = useMemo(() => {
|
||||||
if (streamingPlanContent) {
|
if (streamingPlanContent) {
|
||||||
@@ -166,6 +165,14 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
return ''
|
return ''
|
||||||
}, [streamingPlanContent])
|
}, [streamingPlanContent])
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
logger.info('[TitleRender] Copilot header title changed', {
|
||||||
|
currentChatId: currentChat?.id || null,
|
||||||
|
currentChatTitle: currentChat?.title || null,
|
||||||
|
renderedTitle: renderedChatTitle,
|
||||||
|
})
|
||||||
|
}, [currentChat?.id, currentChat?.title, renderedChatTitle])
|
||||||
|
|
||||||
/** Focuses the copilot input */
|
/** Focuses the copilot input */
|
||||||
const focusInput = useCallback(() => {
|
const focusInput = useCallback(() => {
|
||||||
userInputRef.current?.focus()
|
userInputRef.current?.focus()
|
||||||
@@ -348,7 +355,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
{/* Header */}
|
{/* Header */}
|
||||||
<div className='mx-[-1px] flex flex-shrink-0 items-center justify-between gap-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] px-[12px] py-[6px]'>
|
<div className='mx-[-1px] flex flex-shrink-0 items-center justify-between gap-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] px-[12px] py-[6px]'>
|
||||||
<h2 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
<h2 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
||||||
{currentChat?.title || 'New Chat'}
|
{renderedChatTitle}
|
||||||
</h2>
|
</h2>
|
||||||
<div className='flex items-center gap-[8px]'>
|
<div className='flex items-center gap-[8px]'>
|
||||||
<Button variant='ghost' className='p-0' onClick={handleStartNewChat}>
|
<Button variant='ghost' className='p-0' onClick={handleStartNewChat}>
|
||||||
|
|||||||
@@ -12,8 +12,6 @@ interface UseCopilotInitializationProps {
|
|||||||
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
||||||
loadChats: (forceRefresh?: boolean) => Promise<void>
|
loadChats: (forceRefresh?: boolean) => Promise<void>
|
||||||
loadAvailableModels: () => Promise<void>
|
loadAvailableModels: () => Promise<void>
|
||||||
loadAutoAllowedTools: () => Promise<void>
|
|
||||||
currentChat: any
|
|
||||||
isSendingMessage: boolean
|
isSendingMessage: boolean
|
||||||
resumeActiveStream: () => Promise<boolean>
|
resumeActiveStream: () => Promise<boolean>
|
||||||
}
|
}
|
||||||
@@ -32,8 +30,6 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
setCopilotWorkflowId,
|
setCopilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
loadAutoAllowedTools,
|
|
||||||
currentChat,
|
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
} = props
|
} = props
|
||||||
@@ -120,17 +116,6 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
})
|
})
|
||||||
}, [isSendingMessage, resumeActiveStream])
|
}, [isSendingMessage, resumeActiveStream])
|
||||||
|
|
||||||
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
|
||||||
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
|
||||||
useEffect(() => {
|
|
||||||
if (!hasLoadedAutoAllowedToolsRef.current) {
|
|
||||||
hasLoadedAutoAllowedToolsRef.current = true
|
|
||||||
loadAutoAllowedTools().catch((err) => {
|
|
||||||
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}, [loadAutoAllowedTools])
|
|
||||||
|
|
||||||
/** Load available models once on mount */
|
/** Load available models once on mount */
|
||||||
const hasLoadedModelsRef = useRef(false)
|
const hasLoadedModelsRef = useRef(false)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -113,7 +113,7 @@ export function VersionDescriptionModal({
|
|||||||
className='min-h-[120px] resize-none'
|
className='min-h-[120px] resize-none'
|
||||||
value={description}
|
value={description}
|
||||||
onChange={(e) => setDescription(e.target.value)}
|
onChange={(e) => setDescription(e.target.value)}
|
||||||
maxLength={2000}
|
maxLength={500}
|
||||||
disabled={isGenerating}
|
disabled={isGenerating}
|
||||||
/>
|
/>
|
||||||
<div className='flex items-center justify-between'>
|
<div className='flex items-center justify-between'>
|
||||||
@@ -123,7 +123,7 @@ export function VersionDescriptionModal({
|
|||||||
</p>
|
</p>
|
||||||
)}
|
)}
|
||||||
{!updateMutation.error && !generateMutation.error && <div />}
|
{!updateMutation.error && !generateMutation.error && <div />}
|
||||||
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/2000</p>
|
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/500</p>
|
||||||
</div>
|
</div>
|
||||||
</ModalBody>
|
</ModalBody>
|
||||||
<ModalFooter>
|
<ModalFooter>
|
||||||
|
|||||||
@@ -57,21 +57,6 @@ export function useChangeDetection({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (block.triggerMode) {
|
|
||||||
const triggerConfigValue = blockSubValues?.triggerConfig
|
|
||||||
if (
|
|
||||||
triggerConfigValue &&
|
|
||||||
typeof triggerConfigValue === 'object' &&
|
|
||||||
!subBlocks.triggerConfig
|
|
||||||
) {
|
|
||||||
subBlocks.triggerConfig = {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: triggerConfigValue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
blocksWithSubBlocks[blockId] = {
|
blocksWithSubBlocks[blockId] = {
|
||||||
...block,
|
...block,
|
||||||
subBlocks,
|
subBlocks,
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
import { useCallback, useRef, useState } from 'react'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { useQueryClient } from '@tanstack/react-query'
|
import { useQueryClient } from '@tanstack/react-query'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
@@ -46,13 +46,7 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
|||||||
|
|
||||||
const logger = createLogger('useWorkflowExecution')
|
const logger = createLogger('useWorkflowExecution')
|
||||||
|
|
||||||
/**
|
// Debug state validation result
|
||||||
* Module-level Set tracking which workflows have an active reconnection effect.
|
|
||||||
* Prevents multiple hook instances (from different components) from starting
|
|
||||||
* concurrent reconnection streams for the same workflow during the same mount cycle.
|
|
||||||
*/
|
|
||||||
const activeReconnections = new Set<string>()
|
|
||||||
|
|
||||||
interface DebugValidationResult {
|
interface DebugValidationResult {
|
||||||
isValid: boolean
|
isValid: boolean
|
||||||
error?: string
|
error?: string
|
||||||
@@ -60,7 +54,7 @@ interface DebugValidationResult {
|
|||||||
|
|
||||||
interface BlockEventHandlerConfig {
|
interface BlockEventHandlerConfig {
|
||||||
workflowId?: string
|
workflowId?: string
|
||||||
executionIdRef: { current: string }
|
executionId?: string
|
||||||
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
|
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
|
||||||
activeBlocksSet: Set<string>
|
activeBlocksSet: Set<string>
|
||||||
accumulatedBlockLogs: BlockLog[]
|
accumulatedBlockLogs: BlockLog[]
|
||||||
@@ -114,15 +108,12 @@ export function useWorkflowExecution() {
|
|||||||
const queryClient = useQueryClient()
|
const queryClient = useQueryClient()
|
||||||
const currentWorkflow = useCurrentWorkflow()
|
const currentWorkflow = useCurrentWorkflow()
|
||||||
const { activeWorkflowId, workflows } = useWorkflowRegistry()
|
const { activeWorkflowId, workflows } = useWorkflowRegistry()
|
||||||
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries, clearExecutionEntries } =
|
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } =
|
||||||
useTerminalConsoleStore()
|
useTerminalConsoleStore()
|
||||||
const hasHydrated = useTerminalConsoleStore((s) => s._hasHydrated)
|
|
||||||
const { getAllVariables } = useEnvironmentStore()
|
const { getAllVariables } = useEnvironmentStore()
|
||||||
const { getVariablesByWorkflowId, variables } = useVariablesStore()
|
const { getVariablesByWorkflowId, variables } = useVariablesStore()
|
||||||
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
|
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
|
||||||
useCurrentWorkflowExecution()
|
useCurrentWorkflowExecution()
|
||||||
const setCurrentExecutionId = useExecutionStore((s) => s.setCurrentExecutionId)
|
|
||||||
const getCurrentExecutionId = useExecutionStore((s) => s.getCurrentExecutionId)
|
|
||||||
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
|
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
|
||||||
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
|
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
|
||||||
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
|
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
|
||||||
@@ -306,7 +297,7 @@ export function useWorkflowExecution() {
|
|||||||
(config: BlockEventHandlerConfig) => {
|
(config: BlockEventHandlerConfig) => {
|
||||||
const {
|
const {
|
||||||
workflowId,
|
workflowId,
|
||||||
executionIdRef,
|
executionId,
|
||||||
workflowEdges,
|
workflowEdges,
|
||||||
activeBlocksSet,
|
activeBlocksSet,
|
||||||
accumulatedBlockLogs,
|
accumulatedBlockLogs,
|
||||||
@@ -317,14 +308,6 @@ export function useWorkflowExecution() {
|
|||||||
onBlockCompleteCallback,
|
onBlockCompleteCallback,
|
||||||
} = config
|
} = config
|
||||||
|
|
||||||
/** Returns true if this execution was cancelled or superseded by another run. */
|
|
||||||
const isStaleExecution = () =>
|
|
||||||
!!(
|
|
||||||
workflowId &&
|
|
||||||
executionIdRef.current &&
|
|
||||||
useExecutionStore.getState().getCurrentExecutionId(workflowId) !== executionIdRef.current
|
|
||||||
)
|
|
||||||
|
|
||||||
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
||||||
if (!workflowId) return
|
if (!workflowId) return
|
||||||
if (isActive) {
|
if (isActive) {
|
||||||
@@ -377,7 +360,7 @@ export function useWorkflowExecution() {
|
|||||||
endedAt: data.endedAt,
|
endedAt: data.endedAt,
|
||||||
workflowId,
|
workflowId,
|
||||||
blockId: data.blockId,
|
blockId: data.blockId,
|
||||||
executionId: executionIdRef.current,
|
executionId,
|
||||||
blockName: data.blockName || 'Unknown Block',
|
blockName: data.blockName || 'Unknown Block',
|
||||||
blockType: data.blockType || 'unknown',
|
blockType: data.blockType || 'unknown',
|
||||||
iterationCurrent: data.iterationCurrent,
|
iterationCurrent: data.iterationCurrent,
|
||||||
@@ -400,7 +383,7 @@ export function useWorkflowExecution() {
|
|||||||
endedAt: data.endedAt,
|
endedAt: data.endedAt,
|
||||||
workflowId,
|
workflowId,
|
||||||
blockId: data.blockId,
|
blockId: data.blockId,
|
||||||
executionId: executionIdRef.current,
|
executionId,
|
||||||
blockName: data.blockName || 'Unknown Block',
|
blockName: data.blockName || 'Unknown Block',
|
||||||
blockType: data.blockType || 'unknown',
|
blockType: data.blockType || 'unknown',
|
||||||
iterationCurrent: data.iterationCurrent,
|
iterationCurrent: data.iterationCurrent,
|
||||||
@@ -427,7 +410,7 @@ export function useWorkflowExecution() {
|
|||||||
iterationType: data.iterationType,
|
iterationType: data.iterationType,
|
||||||
iterationContainerId: data.iterationContainerId,
|
iterationContainerId: data.iterationContainerId,
|
||||||
},
|
},
|
||||||
executionIdRef.current
|
executionId
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -449,12 +432,11 @@ export function useWorkflowExecution() {
|
|||||||
iterationType: data.iterationType,
|
iterationType: data.iterationType,
|
||||||
iterationContainerId: data.iterationContainerId,
|
iterationContainerId: data.iterationContainerId,
|
||||||
},
|
},
|
||||||
executionIdRef.current
|
executionId
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const onBlockStarted = (data: BlockStartedData) => {
|
const onBlockStarted = (data: BlockStartedData) => {
|
||||||
if (isStaleExecution()) return
|
|
||||||
updateActiveBlocks(data.blockId, true)
|
updateActiveBlocks(data.blockId, true)
|
||||||
markIncomingEdges(data.blockId)
|
markIncomingEdges(data.blockId)
|
||||||
|
|
||||||
@@ -471,7 +453,7 @@ export function useWorkflowExecution() {
|
|||||||
endedAt: undefined,
|
endedAt: undefined,
|
||||||
workflowId,
|
workflowId,
|
||||||
blockId: data.blockId,
|
blockId: data.blockId,
|
||||||
executionId: executionIdRef.current,
|
executionId,
|
||||||
blockName: data.blockName || 'Unknown Block',
|
blockName: data.blockName || 'Unknown Block',
|
||||||
blockType: data.blockType || 'unknown',
|
blockType: data.blockType || 'unknown',
|
||||||
isRunning: true,
|
isRunning: true,
|
||||||
@@ -483,7 +465,6 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const onBlockCompleted = (data: BlockCompletedData) => {
|
const onBlockCompleted = (data: BlockCompletedData) => {
|
||||||
if (isStaleExecution()) return
|
|
||||||
updateActiveBlocks(data.blockId, false)
|
updateActiveBlocks(data.blockId, false)
|
||||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
|
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
|
||||||
|
|
||||||
@@ -514,7 +495,6 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const onBlockError = (data: BlockErrorData) => {
|
const onBlockError = (data: BlockErrorData) => {
|
||||||
if (isStaleExecution()) return
|
|
||||||
updateActiveBlocks(data.blockId, false)
|
updateActiveBlocks(data.blockId, false)
|
||||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
|
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
|
||||||
|
|
||||||
@@ -922,6 +902,10 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
// Update block logs with actual stream completion times
|
// Update block logs with actual stream completion times
|
||||||
if (result.logs && streamCompletionTimes.size > 0) {
|
if (result.logs && streamCompletionTimes.size > 0) {
|
||||||
|
const streamCompletionEndTime = new Date(
|
||||||
|
Math.max(...Array.from(streamCompletionTimes.values()))
|
||||||
|
).toISOString()
|
||||||
|
|
||||||
result.logs.forEach((log: BlockLog) => {
|
result.logs.forEach((log: BlockLog) => {
|
||||||
if (streamCompletionTimes.has(log.blockId)) {
|
if (streamCompletionTimes.has(log.blockId)) {
|
||||||
const completionTime = streamCompletionTimes.get(log.blockId)!
|
const completionTime = streamCompletionTimes.get(log.blockId)!
|
||||||
@@ -1003,6 +987,7 @@ export function useWorkflowExecution() {
|
|||||||
return { success: true, stream }
|
return { success: true, stream }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For manual (non-chat) execution
|
||||||
const manualExecutionId = uuidv4()
|
const manualExecutionId = uuidv4()
|
||||||
try {
|
try {
|
||||||
const result = await executeWorkflow(
|
const result = await executeWorkflow(
|
||||||
@@ -1017,10 +1002,29 @@ export function useWorkflowExecution() {
|
|||||||
if (result.metadata.pendingBlocks) {
|
if (result.metadata.pendingBlocks) {
|
||||||
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
|
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
|
||||||
}
|
}
|
||||||
|
} else if (result && 'success' in result) {
|
||||||
|
setExecutionResult(result)
|
||||||
|
// Reset execution state after successful non-debug execution
|
||||||
|
setIsExecuting(activeWorkflowId, false)
|
||||||
|
setIsDebugging(activeWorkflowId, false)
|
||||||
|
setActiveBlocks(activeWorkflowId, new Set())
|
||||||
|
|
||||||
|
if (isChatExecution) {
|
||||||
|
if (!result.metadata) {
|
||||||
|
result.metadata = { duration: 0, startTime: new Date().toISOString() }
|
||||||
|
}
|
||||||
|
;(result.metadata as any).source = 'chat'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Invalidate subscription queries to update usage
|
||||||
|
setTimeout(() => {
|
||||||
|
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||||
|
}, 1000)
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
|
const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
|
||||||
|
// Note: Error logs are already persisted server-side via execution-core.ts
|
||||||
return errorResult
|
return errorResult
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1271,7 +1275,7 @@ export function useWorkflowExecution() {
|
|||||||
if (activeWorkflowId) {
|
if (activeWorkflowId) {
|
||||||
logger.info('Using server-side executor')
|
logger.info('Using server-side executor')
|
||||||
|
|
||||||
const executionIdRef = { current: '' }
|
const executionId = uuidv4()
|
||||||
|
|
||||||
let executionResult: ExecutionResult = {
|
let executionResult: ExecutionResult = {
|
||||||
success: false,
|
success: false,
|
||||||
@@ -1289,7 +1293,7 @@ export function useWorkflowExecution() {
|
|||||||
try {
|
try {
|
||||||
const blockHandlers = buildBlockEventHandlers({
|
const blockHandlers = buildBlockEventHandlers({
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
executionIdRef,
|
executionId,
|
||||||
workflowEdges,
|
workflowEdges,
|
||||||
activeBlocksSet,
|
activeBlocksSet,
|
||||||
accumulatedBlockLogs,
|
accumulatedBlockLogs,
|
||||||
@@ -1322,10 +1326,6 @@ export function useWorkflowExecution() {
|
|||||||
loops: clientWorkflowState.loops,
|
loops: clientWorkflowState.loops,
|
||||||
parallels: clientWorkflowState.parallels,
|
parallels: clientWorkflowState.parallels,
|
||||||
},
|
},
|
||||||
onExecutionId: (id) => {
|
|
||||||
executionIdRef.current = id
|
|
||||||
setCurrentExecutionId(activeWorkflowId, id)
|
|
||||||
},
|
|
||||||
callbacks: {
|
callbacks: {
|
||||||
onExecutionStarted: (data) => {
|
onExecutionStarted: (data) => {
|
||||||
logger.info('Server execution started:', data)
|
logger.info('Server execution started:', data)
|
||||||
@@ -1368,18 +1368,6 @@ export function useWorkflowExecution() {
|
|||||||
},
|
},
|
||||||
|
|
||||||
onExecutionCompleted: (data) => {
|
onExecutionCompleted: (data) => {
|
||||||
if (
|
|
||||||
activeWorkflowId &&
|
|
||||||
executionIdRef.current &&
|
|
||||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
|
||||||
executionIdRef.current
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
if (activeWorkflowId) {
|
|
||||||
setCurrentExecutionId(activeWorkflowId, null)
|
|
||||||
}
|
|
||||||
|
|
||||||
executionResult = {
|
executionResult = {
|
||||||
success: data.success,
|
success: data.success,
|
||||||
output: data.output,
|
output: data.output,
|
||||||
@@ -1437,33 +1425,9 @@ export function useWorkflowExecution() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflowExecState = activeWorkflowId
|
|
||||||
? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId)
|
|
||||||
: null
|
|
||||||
if (activeWorkflowId && !workflowExecState?.isDebugging) {
|
|
||||||
setExecutionResult(executionResult)
|
|
||||||
setIsExecuting(activeWorkflowId, false)
|
|
||||||
setActiveBlocks(activeWorkflowId, new Set())
|
|
||||||
setTimeout(() => {
|
|
||||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
|
||||||
}, 1000)
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionError: (data) => {
|
onExecutionError: (data) => {
|
||||||
if (
|
|
||||||
activeWorkflowId &&
|
|
||||||
executionIdRef.current &&
|
|
||||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
|
||||||
executionIdRef.current
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
if (activeWorkflowId) {
|
|
||||||
setCurrentExecutionId(activeWorkflowId, null)
|
|
||||||
}
|
|
||||||
|
|
||||||
executionResult = {
|
executionResult = {
|
||||||
success: false,
|
success: false,
|
||||||
output: {},
|
output: {},
|
||||||
@@ -1477,53 +1441,43 @@ export function useWorkflowExecution() {
|
|||||||
const isPreExecutionError = accumulatedBlockLogs.length === 0
|
const isPreExecutionError = accumulatedBlockLogs.length === 0
|
||||||
handleExecutionErrorConsole({
|
handleExecutionErrorConsole({
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
executionId: executionIdRef.current,
|
executionId,
|
||||||
error: data.error,
|
error: data.error,
|
||||||
durationMs: data.duration,
|
durationMs: data.duration,
|
||||||
blockLogs: accumulatedBlockLogs,
|
blockLogs: accumulatedBlockLogs,
|
||||||
isPreExecutionError,
|
isPreExecutionError,
|
||||||
})
|
})
|
||||||
|
|
||||||
if (activeWorkflowId) {
|
|
||||||
setIsExecuting(activeWorkflowId, false)
|
|
||||||
setIsDebugging(activeWorkflowId, false)
|
|
||||||
setActiveBlocks(activeWorkflowId, new Set())
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionCancelled: (data) => {
|
onExecutionCancelled: (data) => {
|
||||||
if (
|
|
||||||
activeWorkflowId &&
|
|
||||||
executionIdRef.current &&
|
|
||||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
|
||||||
executionIdRef.current
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
if (activeWorkflowId) {
|
|
||||||
setCurrentExecutionId(activeWorkflowId, null)
|
|
||||||
}
|
|
||||||
|
|
||||||
handleExecutionCancelledConsole({
|
handleExecutionCancelledConsole({
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
executionId: executionIdRef.current,
|
executionId,
|
||||||
durationMs: data?.duration,
|
durationMs: data?.duration,
|
||||||
})
|
})
|
||||||
|
|
||||||
if (activeWorkflowId) {
|
|
||||||
setIsExecuting(activeWorkflowId, false)
|
|
||||||
setIsDebugging(activeWorkflowId, false)
|
|
||||||
setActiveBlocks(activeWorkflowId, new Set())
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
return executionResult
|
return executionResult
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
|
// Don't log abort errors - they're intentional user actions
|
||||||
if (error.name === 'AbortError' || error.message?.includes('aborted')) {
|
if (error.name === 'AbortError' || error.message?.includes('aborted')) {
|
||||||
logger.info('Execution aborted by user')
|
logger.info('Execution aborted by user')
|
||||||
return executionResult
|
|
||||||
|
// Reset execution state
|
||||||
|
if (activeWorkflowId) {
|
||||||
|
setIsExecuting(activeWorkflowId, false)
|
||||||
|
setActiveBlocks(activeWorkflowId, new Set())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return gracefully without error
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
output: {},
|
||||||
|
metadata: { duration: 0 },
|
||||||
|
logs: [],
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.error('Server-side execution failed:', error)
|
logger.error('Server-side execution failed:', error)
|
||||||
@@ -1531,6 +1485,7 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Fallback: should never reach here
|
||||||
throw new Error('Server-side execution is required')
|
throw new Error('Server-side execution is required')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1762,28 +1717,25 @@ export function useWorkflowExecution() {
|
|||||||
* Handles cancelling the current workflow execution
|
* Handles cancelling the current workflow execution
|
||||||
*/
|
*/
|
||||||
const handleCancelExecution = useCallback(() => {
|
const handleCancelExecution = useCallback(() => {
|
||||||
if (!activeWorkflowId) return
|
|
||||||
logger.info('Workflow execution cancellation requested')
|
logger.info('Workflow execution cancellation requested')
|
||||||
|
|
||||||
const storedExecutionId = getCurrentExecutionId(activeWorkflowId)
|
// Cancel the execution stream for this workflow (server-side)
|
||||||
|
executionStream.cancel(activeWorkflowId ?? undefined)
|
||||||
|
|
||||||
if (storedExecutionId) {
|
// Mark current chat execution as superseded so its cleanup won't affect new executions
|
||||||
setCurrentExecutionId(activeWorkflowId, null)
|
currentChatExecutionIdRef.current = null
|
||||||
fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, {
|
|
||||||
method: 'POST',
|
// Mark all running entries as canceled in the terminal
|
||||||
}).catch(() => {})
|
if (activeWorkflowId) {
|
||||||
handleExecutionCancelledConsole({
|
cancelRunningEntries(activeWorkflowId)
|
||||||
workflowId: activeWorkflowId,
|
|
||||||
executionId: storedExecutionId,
|
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
|
||||||
})
|
setIsExecuting(activeWorkflowId, false)
|
||||||
|
setIsDebugging(activeWorkflowId, false)
|
||||||
|
setActiveBlocks(activeWorkflowId, new Set())
|
||||||
}
|
}
|
||||||
|
|
||||||
executionStream.cancel(activeWorkflowId)
|
// If in debug mode, also reset debug state
|
||||||
currentChatExecutionIdRef.current = null
|
|
||||||
setIsExecuting(activeWorkflowId, false)
|
|
||||||
setIsDebugging(activeWorkflowId, false)
|
|
||||||
setActiveBlocks(activeWorkflowId, new Set())
|
|
||||||
|
|
||||||
if (isDebugging) {
|
if (isDebugging) {
|
||||||
resetDebugState()
|
resetDebugState()
|
||||||
}
|
}
|
||||||
@@ -1795,9 +1747,7 @@ export function useWorkflowExecution() {
|
|||||||
setIsDebugging,
|
setIsDebugging,
|
||||||
setActiveBlocks,
|
setActiveBlocks,
|
||||||
activeWorkflowId,
|
activeWorkflowId,
|
||||||
getCurrentExecutionId,
|
cancelRunningEntries,
|
||||||
setCurrentExecutionId,
|
|
||||||
handleExecutionCancelledConsole,
|
|
||||||
])
|
])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1897,7 +1847,7 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
setIsExecuting(workflowId, true)
|
setIsExecuting(workflowId, true)
|
||||||
const executionIdRef = { current: '' }
|
const executionId = uuidv4()
|
||||||
const accumulatedBlockLogs: BlockLog[] = []
|
const accumulatedBlockLogs: BlockLog[] = []
|
||||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||||
const executedBlockIds = new Set<string>()
|
const executedBlockIds = new Set<string>()
|
||||||
@@ -1906,7 +1856,7 @@ export function useWorkflowExecution() {
|
|||||||
try {
|
try {
|
||||||
const blockHandlers = buildBlockEventHandlers({
|
const blockHandlers = buildBlockEventHandlers({
|
||||||
workflowId,
|
workflowId,
|
||||||
executionIdRef,
|
executionId,
|
||||||
workflowEdges,
|
workflowEdges,
|
||||||
activeBlocksSet,
|
activeBlocksSet,
|
||||||
accumulatedBlockLogs,
|
accumulatedBlockLogs,
|
||||||
@@ -1921,10 +1871,6 @@ export function useWorkflowExecution() {
|
|||||||
startBlockId: blockId,
|
startBlockId: blockId,
|
||||||
sourceSnapshot: effectiveSnapshot,
|
sourceSnapshot: effectiveSnapshot,
|
||||||
input: workflowInput,
|
input: workflowInput,
|
||||||
onExecutionId: (id) => {
|
|
||||||
executionIdRef.current = id
|
|
||||||
setCurrentExecutionId(workflowId, id)
|
|
||||||
},
|
|
||||||
callbacks: {
|
callbacks: {
|
||||||
onBlockStarted: blockHandlers.onBlockStarted,
|
onBlockStarted: blockHandlers.onBlockStarted,
|
||||||
onBlockCompleted: blockHandlers.onBlockCompleted,
|
onBlockCompleted: blockHandlers.onBlockCompleted,
|
||||||
@@ -1932,6 +1878,7 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
onExecutionCompleted: (data) => {
|
onExecutionCompleted: (data) => {
|
||||||
if (data.success) {
|
if (data.success) {
|
||||||
|
// Add the start block (trigger) to executed blocks
|
||||||
executedBlockIds.add(blockId)
|
executedBlockIds.add(blockId)
|
||||||
|
|
||||||
const mergedBlockStates: Record<string, BlockState> = {
|
const mergedBlockStates: Record<string, BlockState> = {
|
||||||
@@ -1955,10 +1902,6 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
||||||
}
|
}
|
||||||
|
|
||||||
setCurrentExecutionId(workflowId, null)
|
|
||||||
setIsExecuting(workflowId, false)
|
|
||||||
setActiveBlocks(workflowId, new Set())
|
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionError: (data) => {
|
onExecutionError: (data) => {
|
||||||
@@ -1978,27 +1921,19 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
handleExecutionErrorConsole({
|
handleExecutionErrorConsole({
|
||||||
workflowId,
|
workflowId,
|
||||||
executionId: executionIdRef.current,
|
executionId,
|
||||||
error: data.error,
|
error: data.error,
|
||||||
durationMs: data.duration,
|
durationMs: data.duration,
|
||||||
blockLogs: accumulatedBlockLogs,
|
blockLogs: accumulatedBlockLogs,
|
||||||
})
|
})
|
||||||
|
|
||||||
setCurrentExecutionId(workflowId, null)
|
|
||||||
setIsExecuting(workflowId, false)
|
|
||||||
setActiveBlocks(workflowId, new Set())
|
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionCancelled: (data) => {
|
onExecutionCancelled: (data) => {
|
||||||
handleExecutionCancelledConsole({
|
handleExecutionCancelledConsole({
|
||||||
workflowId,
|
workflowId,
|
||||||
executionId: executionIdRef.current,
|
executionId,
|
||||||
durationMs: data?.duration,
|
durationMs: data?.duration,
|
||||||
})
|
})
|
||||||
|
|
||||||
setCurrentExecutionId(workflowId, null)
|
|
||||||
setIsExecuting(workflowId, false)
|
|
||||||
setActiveBlocks(workflowId, new Set())
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@@ -2007,20 +1942,14 @@ export function useWorkflowExecution() {
|
|||||||
logger.error('Run-from-block failed:', error)
|
logger.error('Run-from-block failed:', error)
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
const currentId = getCurrentExecutionId(workflowId)
|
setIsExecuting(workflowId, false)
|
||||||
if (currentId === null || currentId === executionIdRef.current) {
|
setActiveBlocks(workflowId, new Set())
|
||||||
setCurrentExecutionId(workflowId, null)
|
|
||||||
setIsExecuting(workflowId, false)
|
|
||||||
setActiveBlocks(workflowId, new Set())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[
|
[
|
||||||
getLastExecutionSnapshot,
|
getLastExecutionSnapshot,
|
||||||
setLastExecutionSnapshot,
|
setLastExecutionSnapshot,
|
||||||
clearLastExecutionSnapshot,
|
clearLastExecutionSnapshot,
|
||||||
getCurrentExecutionId,
|
|
||||||
setCurrentExecutionId,
|
|
||||||
setIsExecuting,
|
setIsExecuting,
|
||||||
setActiveBlocks,
|
setActiveBlocks,
|
||||||
setBlockRunStatus,
|
setBlockRunStatus,
|
||||||
@@ -2050,213 +1979,29 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
const executionId = uuidv4()
|
const executionId = uuidv4()
|
||||||
try {
|
try {
|
||||||
await executeWorkflow(undefined, undefined, executionId, undefined, 'manual', blockId)
|
const result = await executeWorkflow(
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
executionId,
|
||||||
|
undefined,
|
||||||
|
'manual',
|
||||||
|
blockId
|
||||||
|
)
|
||||||
|
if (result && 'success' in result) {
|
||||||
|
setExecutionResult(result)
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorResult = handleExecutionError(error, { executionId })
|
const errorResult = handleExecutionError(error, { executionId })
|
||||||
return errorResult
|
return errorResult
|
||||||
} finally {
|
} finally {
|
||||||
setCurrentExecutionId(workflowId, null)
|
|
||||||
setIsExecuting(workflowId, false)
|
setIsExecuting(workflowId, false)
|
||||||
setIsDebugging(workflowId, false)
|
setIsDebugging(workflowId, false)
|
||||||
setActiveBlocks(workflowId, new Set())
|
setActiveBlocks(workflowId, new Set())
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[
|
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
|
||||||
activeWorkflowId,
|
|
||||||
setCurrentExecutionId,
|
|
||||||
setExecutionResult,
|
|
||||||
setIsExecuting,
|
|
||||||
setIsDebugging,
|
|
||||||
setActiveBlocks,
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (!activeWorkflowId || !hasHydrated) return
|
|
||||||
|
|
||||||
const entries = useTerminalConsoleStore.getState().entries
|
|
||||||
const runningEntries = entries.filter(
|
|
||||||
(e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId
|
|
||||||
)
|
|
||||||
if (runningEntries.length === 0) return
|
|
||||||
|
|
||||||
if (activeReconnections.has(activeWorkflowId)) return
|
|
||||||
activeReconnections.add(activeWorkflowId)
|
|
||||||
|
|
||||||
executionStream.cancel(activeWorkflowId)
|
|
||||||
|
|
||||||
const sorted = [...runningEntries].sort((a, b) => {
|
|
||||||
const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0
|
|
||||||
const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0
|
|
||||||
return bTime - aTime
|
|
||||||
})
|
|
||||||
const executionId = sorted[0].executionId!
|
|
||||||
|
|
||||||
const otherExecutionIds = new Set(
|
|
||||||
sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!)
|
|
||||||
)
|
|
||||||
if (otherExecutionIds.size > 0) {
|
|
||||||
cancelRunningEntries(activeWorkflowId)
|
|
||||||
}
|
|
||||||
|
|
||||||
setCurrentExecutionId(activeWorkflowId, executionId)
|
|
||||||
setIsExecuting(activeWorkflowId, true)
|
|
||||||
|
|
||||||
const workflowEdges = useWorkflowStore.getState().edges
|
|
||||||
const activeBlocksSet = new Set<string>()
|
|
||||||
const accumulatedBlockLogs: BlockLog[] = []
|
|
||||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
|
||||||
const executedBlockIds = new Set<string>()
|
|
||||||
|
|
||||||
const executionIdRef = { current: executionId }
|
|
||||||
|
|
||||||
const handlers = buildBlockEventHandlers({
|
|
||||||
workflowId: activeWorkflowId,
|
|
||||||
executionIdRef,
|
|
||||||
workflowEdges,
|
|
||||||
activeBlocksSet,
|
|
||||||
accumulatedBlockLogs,
|
|
||||||
accumulatedBlockStates,
|
|
||||||
executedBlockIds,
|
|
||||||
consoleMode: 'update',
|
|
||||||
includeStartConsoleEntry: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
const originalEntries = entries
|
|
||||||
.filter((e) => e.executionId === executionId)
|
|
||||||
.map((e) => ({ ...e }))
|
|
||||||
|
|
||||||
let cleared = false
|
|
||||||
let reconnectionComplete = false
|
|
||||||
let cleanupRan = false
|
|
||||||
const clearOnce = () => {
|
|
||||||
if (!cleared) {
|
|
||||||
cleared = true
|
|
||||||
clearExecutionEntries(executionId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const reconnectWorkflowId = activeWorkflowId
|
|
||||||
|
|
||||||
executionStream
|
|
||||||
.reconnect({
|
|
||||||
workflowId: reconnectWorkflowId,
|
|
||||||
executionId,
|
|
||||||
callbacks: {
|
|
||||||
onBlockStarted: (data) => {
|
|
||||||
clearOnce()
|
|
||||||
handlers.onBlockStarted(data)
|
|
||||||
},
|
|
||||||
onBlockCompleted: (data) => {
|
|
||||||
clearOnce()
|
|
||||||
handlers.onBlockCompleted(data)
|
|
||||||
},
|
|
||||||
onBlockError: (data) => {
|
|
||||||
clearOnce()
|
|
||||||
handlers.onBlockError(data)
|
|
||||||
},
|
|
||||||
onExecutionCompleted: () => {
|
|
||||||
const currentId = useExecutionStore
|
|
||||||
.getState()
|
|
||||||
.getCurrentExecutionId(reconnectWorkflowId)
|
|
||||||
if (currentId !== executionId) {
|
|
||||||
reconnectionComplete = true
|
|
||||||
activeReconnections.delete(reconnectWorkflowId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
clearOnce()
|
|
||||||
reconnectionComplete = true
|
|
||||||
activeReconnections.delete(reconnectWorkflowId)
|
|
||||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
|
||||||
setIsExecuting(reconnectWorkflowId, false)
|
|
||||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
|
||||||
},
|
|
||||||
onExecutionError: (data) => {
|
|
||||||
const currentId = useExecutionStore
|
|
||||||
.getState()
|
|
||||||
.getCurrentExecutionId(reconnectWorkflowId)
|
|
||||||
if (currentId !== executionId) {
|
|
||||||
reconnectionComplete = true
|
|
||||||
activeReconnections.delete(reconnectWorkflowId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
clearOnce()
|
|
||||||
reconnectionComplete = true
|
|
||||||
activeReconnections.delete(reconnectWorkflowId)
|
|
||||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
|
||||||
setIsExecuting(reconnectWorkflowId, false)
|
|
||||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
|
||||||
handleExecutionErrorConsole({
|
|
||||||
workflowId: reconnectWorkflowId,
|
|
||||||
executionId,
|
|
||||||
error: data.error,
|
|
||||||
blockLogs: accumulatedBlockLogs,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
onExecutionCancelled: () => {
|
|
||||||
const currentId = useExecutionStore
|
|
||||||
.getState()
|
|
||||||
.getCurrentExecutionId(reconnectWorkflowId)
|
|
||||||
if (currentId !== executionId) {
|
|
||||||
reconnectionComplete = true
|
|
||||||
activeReconnections.delete(reconnectWorkflowId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
clearOnce()
|
|
||||||
reconnectionComplete = true
|
|
||||||
activeReconnections.delete(reconnectWorkflowId)
|
|
||||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
|
||||||
setIsExecuting(reconnectWorkflowId, false)
|
|
||||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
|
||||||
handleExecutionCancelledConsole({
|
|
||||||
workflowId: reconnectWorkflowId,
|
|
||||||
executionId,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
logger.warn('Execution reconnection failed', { executionId, error })
|
|
||||||
})
|
|
||||||
.finally(() => {
|
|
||||||
if (reconnectionComplete || cleanupRan) return
|
|
||||||
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
|
|
||||||
if (currentId !== executionId) return
|
|
||||||
reconnectionComplete = true
|
|
||||||
activeReconnections.delete(reconnectWorkflowId)
|
|
||||||
clearExecutionEntries(executionId)
|
|
||||||
for (const entry of originalEntries) {
|
|
||||||
addConsole({
|
|
||||||
workflowId: entry.workflowId,
|
|
||||||
blockId: entry.blockId,
|
|
||||||
blockName: entry.blockName,
|
|
||||||
blockType: entry.blockType,
|
|
||||||
executionId: entry.executionId,
|
|
||||||
executionOrder: entry.executionOrder,
|
|
||||||
isRunning: false,
|
|
||||||
warning: 'Execution result unavailable — check the logs page',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
|
||||||
setIsExecuting(reconnectWorkflowId, false)
|
|
||||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
|
||||||
})
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
cleanupRan = true
|
|
||||||
executionStream.cancel(reconnectWorkflowId)
|
|
||||||
activeReconnections.delete(reconnectWorkflowId)
|
|
||||||
|
|
||||||
if (cleared && !reconnectionComplete) {
|
|
||||||
clearExecutionEntries(executionId)
|
|
||||||
for (const entry of originalEntries) {
|
|
||||||
addConsole(entry)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [activeWorkflowId, hasHydrated])
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
isExecuting,
|
isExecuting,
|
||||||
isDebugging,
|
isDebugging,
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
export { CancelSubscription } from './cancel-subscription'
|
export { CancelSubscription } from './cancel-subscription'
|
||||||
export { CreditBalance } from './credit-balance'
|
export { CreditBalance } from './credit-balance'
|
||||||
export { PlanCard, type PlanCardProps, type PlanFeature } from './plan-card'
|
export { PlanCard, type PlanCardProps, type PlanFeature } from './plan-card'
|
||||||
export { ReferralCode } from './referral-code'
|
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
export { ReferralCode } from './referral-code'
|
|
||||||
@@ -1,101 +0,0 @@
|
|||||||
'use client'
|
|
||||||
|
|
||||||
import { useState } from 'react'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Button, Input, Label } from '@/components/emcn'
|
|
||||||
|
|
||||||
const logger = createLogger('ReferralCode')
|
|
||||||
|
|
||||||
interface ReferralCodeProps {
|
|
||||||
onRedeemComplete?: () => void
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Inline referral/promo code entry field with redeem button.
|
|
||||||
* One-time use per account — shows success or "already redeemed" state.
|
|
||||||
*/
|
|
||||||
export function ReferralCode({ onRedeemComplete }: ReferralCodeProps) {
|
|
||||||
const [code, setCode] = useState('')
|
|
||||||
const [isRedeeming, setIsRedeeming] = useState(false)
|
|
||||||
const [error, setError] = useState<string | null>(null)
|
|
||||||
const [success, setSuccess] = useState<{ bonusAmount: number } | null>(null)
|
|
||||||
|
|
||||||
const handleRedeem = async () => {
|
|
||||||
const trimmed = code.trim()
|
|
||||||
if (!trimmed || isRedeeming) return
|
|
||||||
|
|
||||||
setIsRedeeming(true)
|
|
||||||
setError(null)
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch('/api/referral-code/redeem', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ code: trimmed }),
|
|
||||||
})
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(data.error || 'Failed to redeem code')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data.redeemed) {
|
|
||||||
setSuccess({ bonusAmount: data.bonusAmount })
|
|
||||||
setCode('')
|
|
||||||
onRedeemComplete?.()
|
|
||||||
} else {
|
|
||||||
setError(data.error || 'Code could not be redeemed')
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error('Referral code redemption failed', { error: err })
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to redeem code')
|
|
||||||
} finally {
|
|
||||||
setIsRedeeming(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (success) {
|
|
||||||
return (
|
|
||||||
<div className='flex items-center justify-between'>
|
|
||||||
<Label>Referral Code</Label>
|
|
||||||
<span className='text-[12px] text-[var(--text-secondary)]'>
|
|
||||||
+${success.bonusAmount} credits applied
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className='flex items-center justify-between gap-[12px]'>
|
|
||||||
<Label className='shrink-0'>Referral Code</Label>
|
|
||||||
<div className='flex items-center gap-[8px]'>
|
|
||||||
<div className='flex flex-col'>
|
|
||||||
<Input
|
|
||||||
type='text'
|
|
||||||
value={code}
|
|
||||||
onChange={(e) => {
|
|
||||||
setCode(e.target.value)
|
|
||||||
setError(null)
|
|
||||||
}}
|
|
||||||
onKeyDown={(e) => {
|
|
||||||
if (e.key === 'Enter') handleRedeem()
|
|
||||||
}}
|
|
||||||
placeholder='Enter code'
|
|
||||||
className='h-[32px] w-[140px] text-[12px]'
|
|
||||||
disabled={isRedeeming}
|
|
||||||
/>
|
|
||||||
{error && <span className='mt-[4px] text-[11px] text-[var(--text-error)]'>{error}</span>}
|
|
||||||
</div>
|
|
||||||
<Button
|
|
||||||
variant='active'
|
|
||||||
className='h-[32px] shrink-0 rounded-[6px] text-[12px]'
|
|
||||||
onClick={handleRedeem}
|
|
||||||
disabled={isRedeeming || !code.trim()}
|
|
||||||
>
|
|
||||||
{isRedeeming ? 'Redeeming...' : 'Redeem'}
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
@@ -17,7 +17,6 @@ import {
|
|||||||
CancelSubscription,
|
CancelSubscription,
|
||||||
CreditBalance,
|
CreditBalance,
|
||||||
PlanCard,
|
PlanCard,
|
||||||
ReferralCode,
|
|
||||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components'
|
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components'
|
||||||
import {
|
import {
|
||||||
ENTERPRISE_PLAN_FEATURES,
|
ENTERPRISE_PLAN_FEATURES,
|
||||||
@@ -550,10 +549,6 @@ export function Subscription() {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{!subscription.isEnterprise && (
|
|
||||||
<ReferralCode onRedeemComplete={() => refetchSubscription()} />
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Next Billing Date - hidden from team members */}
|
{/* Next Billing Date - hidden from team members */}
|
||||||
{subscription.isPaid &&
|
{subscription.isPaid &&
|
||||||
subscriptionData?.data?.periodEnd &&
|
subscriptionData?.data?.periodEnd &&
|
||||||
|
|||||||
@@ -4,14 +4,12 @@ import { useEffect } from 'react'
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { useRouter } from 'next/navigation'
|
import { useRouter } from 'next/navigation'
|
||||||
import { useSession } from '@/lib/auth/auth-client'
|
import { useSession } from '@/lib/auth/auth-client'
|
||||||
import { useReferralAttribution } from '@/hooks/use-referral-attribution'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkspacePage')
|
const logger = createLogger('WorkspacePage')
|
||||||
|
|
||||||
export default function WorkspacePage() {
|
export default function WorkspacePage() {
|
||||||
const router = useRouter()
|
const router = useRouter()
|
||||||
const { data: session, isPending } = useSession()
|
const { data: session, isPending } = useSession()
|
||||||
useReferralAttribution()
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const redirectToFirstWorkspace = async () => {
|
const redirectToFirstWorkspace = async () => {
|
||||||
|
|||||||
@@ -423,7 +423,7 @@ interface GenerateVersionDescriptionVariables {
|
|||||||
|
|
||||||
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
|
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
|
||||||
|
|
||||||
Write a brief, factual description (1-3 sentences, under 2000 characters) that states what changed between versions.
|
Write a brief, factual description (1-3 sentences, under 400 characters) that states what changed between versions.
|
||||||
|
|
||||||
Guidelines:
|
Guidelines:
|
||||||
- Use the specific values provided (credential names, channel names, model names)
|
- Use the specific values provided (credential names, channel names, model names)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useCallback } from 'react'
|
import { useCallback, useRef } from 'react'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import type {
|
import type {
|
||||||
BlockCompletedData,
|
BlockCompletedData,
|
||||||
@@ -16,18 +16,6 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
|
|||||||
|
|
||||||
const logger = createLogger('useExecutionStream')
|
const logger = createLogger('useExecutionStream')
|
||||||
|
|
||||||
/**
|
|
||||||
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
|
|
||||||
* These should be treated as clean disconnects, not execution errors.
|
|
||||||
*/
|
|
||||||
function isClientDisconnectError(error: any): boolean {
|
|
||||||
if (error.name === 'AbortError') return true
|
|
||||||
const msg = (error.message ?? '').toLowerCase()
|
|
||||||
return (
|
|
||||||
msg.includes('network error') || msg.includes('failed to fetch') || msg.includes('load failed')
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Processes SSE events from a response body and invokes appropriate callbacks.
|
* Processes SSE events from a response body and invokes appropriate callbacks.
|
||||||
*/
|
*/
|
||||||
@@ -133,7 +121,6 @@ export interface ExecuteStreamOptions {
|
|||||||
parallels?: Record<string, any>
|
parallels?: Record<string, any>
|
||||||
}
|
}
|
||||||
stopAfterBlockId?: string
|
stopAfterBlockId?: string
|
||||||
onExecutionId?: (executionId: string) => void
|
|
||||||
callbacks?: ExecutionStreamCallbacks
|
callbacks?: ExecutionStreamCallbacks
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -142,40 +129,30 @@ export interface ExecuteFromBlockOptions {
|
|||||||
startBlockId: string
|
startBlockId: string
|
||||||
sourceSnapshot: SerializableExecutionState
|
sourceSnapshot: SerializableExecutionState
|
||||||
input?: any
|
input?: any
|
||||||
onExecutionId?: (executionId: string) => void
|
|
||||||
callbacks?: ExecutionStreamCallbacks
|
callbacks?: ExecutionStreamCallbacks
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ReconnectStreamOptions {
|
|
||||||
workflowId: string
|
|
||||||
executionId: string
|
|
||||||
fromEventId?: number
|
|
||||||
callbacks?: ExecutionStreamCallbacks
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Module-level map shared across all hook instances.
|
|
||||||
* Ensures ANY instance can cancel streams started by ANY other instance,
|
|
||||||
* which is critical for SPA navigation where the original hook instance unmounts
|
|
||||||
* but the SSE stream must be cancellable from the new instance.
|
|
||||||
*/
|
|
||||||
const sharedAbortControllers = new Map<string, AbortController>()
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Hook for executing workflows via server-side SSE streaming.
|
* Hook for executing workflows via server-side SSE streaming.
|
||||||
* Supports concurrent executions via per-workflow AbortController maps.
|
* Supports concurrent executions via per-workflow AbortController maps.
|
||||||
*/
|
*/
|
||||||
export function useExecutionStream() {
|
export function useExecutionStream() {
|
||||||
const execute = useCallback(async (options: ExecuteStreamOptions) => {
|
const abortControllersRef = useRef<Map<string, AbortController>>(new Map())
|
||||||
const { workflowId, callbacks = {}, onExecutionId, ...payload } = options
|
const currentExecutionsRef = useRef<Map<string, { workflowId: string; executionId: string }>>(
|
||||||
|
new Map()
|
||||||
|
)
|
||||||
|
|
||||||
const existing = sharedAbortControllers.get(workflowId)
|
const execute = useCallback(async (options: ExecuteStreamOptions) => {
|
||||||
|
const { workflowId, callbacks = {}, ...payload } = options
|
||||||
|
|
||||||
|
const existing = abortControllersRef.current.get(workflowId)
|
||||||
if (existing) {
|
if (existing) {
|
||||||
existing.abort()
|
existing.abort()
|
||||||
}
|
}
|
||||||
|
|
||||||
const abortController = new AbortController()
|
const abortController = new AbortController()
|
||||||
sharedAbortControllers.set(workflowId, abortController)
|
abortControllersRef.current.set(workflowId, abortController)
|
||||||
|
currentExecutionsRef.current.delete(workflowId)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||||
@@ -200,48 +177,42 @@ export function useExecutionStream() {
|
|||||||
throw new Error('No response body')
|
throw new Error('No response body')
|
||||||
}
|
}
|
||||||
|
|
||||||
const serverExecutionId = response.headers.get('X-Execution-Id')
|
const executionId = response.headers.get('X-Execution-Id')
|
||||||
if (serverExecutionId) {
|
if (executionId) {
|
||||||
onExecutionId?.(serverExecutionId)
|
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
||||||
}
|
}
|
||||||
|
|
||||||
const reader = response.body.getReader()
|
const reader = response.body.getReader()
|
||||||
await processSSEStream(reader, callbacks, 'Execution')
|
await processSSEStream(reader, callbacks, 'Execution')
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
if (isClientDisconnectError(error)) {
|
if (error.name === 'AbortError') {
|
||||||
logger.info('Execution stream disconnected (page unload or abort)')
|
logger.info('Execution stream cancelled')
|
||||||
return
|
callbacks.onExecutionCancelled?.({ duration: 0 })
|
||||||
|
} else {
|
||||||
|
logger.error('Execution stream error:', error)
|
||||||
|
callbacks.onExecutionError?.({
|
||||||
|
error: error.message || 'Unknown error',
|
||||||
|
duration: 0,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
logger.error('Execution stream error:', error)
|
|
||||||
callbacks.onExecutionError?.({
|
|
||||||
error: error.message || 'Unknown error',
|
|
||||||
duration: 0,
|
|
||||||
})
|
|
||||||
throw error
|
throw error
|
||||||
} finally {
|
} finally {
|
||||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
abortControllersRef.current.delete(workflowId)
|
||||||
sharedAbortControllers.delete(workflowId)
|
currentExecutionsRef.current.delete(workflowId)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
||||||
const {
|
const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options
|
||||||
workflowId,
|
|
||||||
startBlockId,
|
|
||||||
sourceSnapshot,
|
|
||||||
input,
|
|
||||||
onExecutionId,
|
|
||||||
callbacks = {},
|
|
||||||
} = options
|
|
||||||
|
|
||||||
const existing = sharedAbortControllers.get(workflowId)
|
const existing = abortControllersRef.current.get(workflowId)
|
||||||
if (existing) {
|
if (existing) {
|
||||||
existing.abort()
|
existing.abort()
|
||||||
}
|
}
|
||||||
|
|
||||||
const abortController = new AbortController()
|
const abortController = new AbortController()
|
||||||
sharedAbortControllers.set(workflowId, abortController)
|
abortControllersRef.current.set(workflowId, abortController)
|
||||||
|
currentExecutionsRef.current.delete(workflowId)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||||
@@ -275,80 +246,64 @@ export function useExecutionStream() {
|
|||||||
throw new Error('No response body')
|
throw new Error('No response body')
|
||||||
}
|
}
|
||||||
|
|
||||||
const serverExecutionId = response.headers.get('X-Execution-Id')
|
const executionId = response.headers.get('X-Execution-Id')
|
||||||
if (serverExecutionId) {
|
if (executionId) {
|
||||||
onExecutionId?.(serverExecutionId)
|
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
||||||
}
|
}
|
||||||
|
|
||||||
const reader = response.body.getReader()
|
const reader = response.body.getReader()
|
||||||
await processSSEStream(reader, callbacks, 'Run-from-block')
|
await processSSEStream(reader, callbacks, 'Run-from-block')
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
if (isClientDisconnectError(error)) {
|
if (error.name === 'AbortError') {
|
||||||
logger.info('Run-from-block stream disconnected (page unload or abort)')
|
logger.info('Run-from-block execution cancelled')
|
||||||
return
|
callbacks.onExecutionCancelled?.({ duration: 0 })
|
||||||
|
} else {
|
||||||
|
logger.error('Run-from-block execution error:', error)
|
||||||
|
callbacks.onExecutionError?.({
|
||||||
|
error: error.message || 'Unknown error',
|
||||||
|
duration: 0,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
logger.error('Run-from-block execution error:', error)
|
|
||||||
callbacks.onExecutionError?.({
|
|
||||||
error: error.message || 'Unknown error',
|
|
||||||
duration: 0,
|
|
||||||
})
|
|
||||||
throw error
|
throw error
|
||||||
} finally {
|
} finally {
|
||||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
abortControllersRef.current.delete(workflowId)
|
||||||
sharedAbortControllers.delete(workflowId)
|
currentExecutionsRef.current.delete(workflowId)
|
||||||
}
|
|
||||||
}
|
|
||||||
}, [])
|
|
||||||
|
|
||||||
const reconnect = useCallback(async (options: ReconnectStreamOptions) => {
|
|
||||||
const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options
|
|
||||||
|
|
||||||
const existing = sharedAbortControllers.get(workflowId)
|
|
||||||
if (existing) {
|
|
||||||
existing.abort()
|
|
||||||
}
|
|
||||||
|
|
||||||
const abortController = new AbortController()
|
|
||||||
sharedAbortControllers.set(workflowId, abortController)
|
|
||||||
try {
|
|
||||||
const response = await fetch(
|
|
||||||
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
|
|
||||||
{ signal: abortController.signal }
|
|
||||||
)
|
|
||||||
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
|
|
||||||
if (!response.body) throw new Error('No response body')
|
|
||||||
|
|
||||||
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
|
|
||||||
} catch (error: any) {
|
|
||||||
if (isClientDisconnectError(error)) return
|
|
||||||
logger.error('Reconnection stream error:', error)
|
|
||||||
throw error
|
|
||||||
} finally {
|
|
||||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
|
||||||
sharedAbortControllers.delete(workflowId)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
const cancel = useCallback((workflowId?: string) => {
|
const cancel = useCallback((workflowId?: string) => {
|
||||||
if (workflowId) {
|
if (workflowId) {
|
||||||
const controller = sharedAbortControllers.get(workflowId)
|
const execution = currentExecutionsRef.current.get(workflowId)
|
||||||
|
if (execution) {
|
||||||
|
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
||||||
|
method: 'POST',
|
||||||
|
}).catch(() => {})
|
||||||
|
}
|
||||||
|
|
||||||
|
const controller = abortControllersRef.current.get(workflowId)
|
||||||
if (controller) {
|
if (controller) {
|
||||||
controller.abort()
|
controller.abort()
|
||||||
sharedAbortControllers.delete(workflowId)
|
abortControllersRef.current.delete(workflowId)
|
||||||
}
|
}
|
||||||
|
currentExecutionsRef.current.delete(workflowId)
|
||||||
} else {
|
} else {
|
||||||
for (const [, controller] of sharedAbortControllers) {
|
for (const [, execution] of currentExecutionsRef.current) {
|
||||||
|
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
||||||
|
method: 'POST',
|
||||||
|
}).catch(() => {})
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [, controller] of abortControllersRef.current) {
|
||||||
controller.abort()
|
controller.abort()
|
||||||
}
|
}
|
||||||
sharedAbortControllers.clear()
|
abortControllersRef.current.clear()
|
||||||
|
currentExecutionsRef.current.clear()
|
||||||
}
|
}
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
return {
|
return {
|
||||||
execute,
|
execute,
|
||||||
executeFromBlock,
|
executeFromBlock,
|
||||||
reconnect,
|
|
||||||
cancel,
|
cancel,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,46 +0,0 @@
|
|||||||
'use client'
|
|
||||||
|
|
||||||
import { useEffect, useRef } from 'react'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
|
|
||||||
const logger = createLogger('ReferralAttribution')
|
|
||||||
|
|
||||||
const COOKIE_NAME = 'sim_utm'
|
|
||||||
|
|
||||||
const TERMINAL_REASONS = new Set([
|
|
||||||
'account_predates_cookie',
|
|
||||||
'invalid_cookie',
|
|
||||||
'no_utm_cookie',
|
|
||||||
'no_matching_campaign',
|
|
||||||
])
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fires a one-shot `POST /api/attribution` when a `sim_utm` cookie is present.
|
|
||||||
* Retries on transient failures; stops on terminal outcomes.
|
|
||||||
*/
|
|
||||||
export function useReferralAttribution() {
|
|
||||||
const calledRef = useRef(false)
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (calledRef.current) return
|
|
||||||
if (!document.cookie.includes(COOKIE_NAME)) return
|
|
||||||
|
|
||||||
calledRef.current = true
|
|
||||||
|
|
||||||
fetch('/api/attribution', { method: 'POST' })
|
|
||||||
.then((res) => res.json())
|
|
||||||
.then((data) => {
|
|
||||||
if (data.attributed) {
|
|
||||||
logger.info('Referral attribution successful', { bonusAmount: data.bonusAmount })
|
|
||||||
} else if (data.error || TERMINAL_REASONS.has(data.reason)) {
|
|
||||||
logger.info('Referral attribution skipped', { reason: data.reason || data.error })
|
|
||||||
} else {
|
|
||||||
calledRef.current = false
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
logger.warn('Referral attribution failed, will retry', { error: err })
|
|
||||||
calledRef.current = false
|
|
||||||
})
|
|
||||||
}, [])
|
|
||||||
}
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { organization, userStats } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq, sql } from 'drizzle-orm'
|
|
||||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
|
||||||
import type { DbOrTx } from '@/lib/db/types'
|
|
||||||
|
|
||||||
const logger = createLogger('BonusCredits')
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply bonus credits to a user (e.g. referral bonuses, promotional codes).
|
|
||||||
*
|
|
||||||
* Detects the user's current plan and routes credits accordingly:
|
|
||||||
* - Free/Pro: adds to `userStats.creditBalance` and increments `currentUsageLimit`
|
|
||||||
* - Team/Enterprise: adds to `organization.creditBalance` and increments `orgUsageLimit`
|
|
||||||
*
|
|
||||||
* Uses direct increment (not recalculation) so it works correctly for free-tier
|
|
||||||
* users where `setUsageLimitForCredits` would compute planBase=0 and skip the update.
|
|
||||||
*
|
|
||||||
* @param tx - Optional Drizzle transaction context. When provided, all DB writes
|
|
||||||
* participate in the caller's transaction for atomicity.
|
|
||||||
*/
|
|
||||||
export async function applyBonusCredits(
|
|
||||||
userId: string,
|
|
||||||
amount: number,
|
|
||||||
tx?: DbOrTx
|
|
||||||
): Promise<void> {
|
|
||||||
const dbCtx = tx ?? db
|
|
||||||
const subscription = await getHighestPrioritySubscription(userId)
|
|
||||||
const isTeamOrEnterprise = subscription?.plan === 'team' || subscription?.plan === 'enterprise'
|
|
||||||
|
|
||||||
if (isTeamOrEnterprise && subscription?.referenceId) {
|
|
||||||
const orgId = subscription.referenceId
|
|
||||||
|
|
||||||
await dbCtx
|
|
||||||
.update(organization)
|
|
||||||
.set({
|
|
||||||
creditBalance: sql`${organization.creditBalance} + ${amount}`,
|
|
||||||
orgUsageLimit: sql`COALESCE(${organization.orgUsageLimit}, '0')::decimal + ${amount}`,
|
|
||||||
})
|
|
||||||
.where(eq(organization.id, orgId))
|
|
||||||
|
|
||||||
logger.info('Applied bonus credits to organization', {
|
|
||||||
userId,
|
|
||||||
organizationId: orgId,
|
|
||||||
plan: subscription.plan,
|
|
||||||
amount,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
await dbCtx
|
|
||||||
.update(userStats)
|
|
||||||
.set({
|
|
||||||
creditBalance: sql`${userStats.creditBalance} + ${amount}`,
|
|
||||||
currentUsageLimit: sql`COALESCE(${userStats.currentUsageLimit}, '0')::decimal + ${amount}`,
|
|
||||||
})
|
|
||||||
.where(eq(userStats.userId, userId))
|
|
||||||
|
|
||||||
logger.info('Applied bonus credits to user', {
|
|
||||||
userId,
|
|
||||||
plan: subscription?.plan || 'free',
|
|
||||||
amount,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,22 +1,21 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { COPILOT_CONFIRM_API_PATH, STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
import { STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
||||||
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
import {
|
import { isBackgroundState, isRejectedState, isReviewState } from '@/lib/copilot/store-utils'
|
||||||
isBackgroundState,
|
|
||||||
isRejectedState,
|
|
||||||
isReviewState,
|
|
||||||
resolveToolDisplay,
|
|
||||||
} from '@/lib/copilot/store-utils'
|
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
|
||||||
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
|
||||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
|
||||||
import { appendTextBlock, beginThinkingBlock, finalizeThinkingBlock } from './content-blocks'
|
import { appendTextBlock, beginThinkingBlock, finalizeThinkingBlock } from './content-blocks'
|
||||||
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
||||||
|
import {
|
||||||
|
extractOperationListFromResultPayload,
|
||||||
|
extractToolExecutionMetadata,
|
||||||
|
extractToolUiMetadata,
|
||||||
|
isWorkflowChangeApplyCall,
|
||||||
|
mapServerStateToClientState,
|
||||||
|
resolveDisplayFromServerUi,
|
||||||
|
} from './tool-call-helpers'
|
||||||
|
import { applyToolEffects } from './tool-effects'
|
||||||
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
||||||
|
|
||||||
const logger = createLogger('CopilotClientSseHandlers')
|
const logger = createLogger('CopilotClientSseHandlers')
|
||||||
@@ -26,21 +25,11 @@ const MAX_BATCH_INTERVAL = 50
|
|||||||
const MIN_BATCH_INTERVAL = 16
|
const MIN_BATCH_INTERVAL = 16
|
||||||
const MAX_QUEUE_SIZE = 5
|
const MAX_QUEUE_SIZE = 5
|
||||||
|
|
||||||
/**
|
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
||||||
* Send an auto-accept confirmation to the server for auto-allowed tools.
|
if (toolCall.execution?.target === 'sim_client_capability') {
|
||||||
* The server-side orchestrator polls Redis for this decision.
|
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
||||||
*/
|
}
|
||||||
export function sendAutoAcceptConfirmation(toolCallId: string): void {
|
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
||||||
fetch(COPILOT_CONFIRM_API_PATH, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolCallId, status: 'accepted' }),
|
|
||||||
}).catch((error) => {
|
|
||||||
logger.warn('Failed to send auto-accept confirmation', {
|
|
||||||
toolCallId,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
||||||
@@ -230,28 +219,86 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
title_updated: (_data, _context, get, set) => {
|
title_updated: (_data, _context, get, set) => {
|
||||||
const title = _data.title
|
const title = typeof _data.title === 'string' ? _data.title.trim() : ''
|
||||||
if (!title) return
|
const eventChatId = typeof _data.chatId === 'string' ? _data.chatId : undefined
|
||||||
const { currentChat, chats } = get()
|
const { currentChat, chats } = get()
|
||||||
if (currentChat) {
|
|
||||||
set({
|
logger.info('[Title] Received title_updated SSE event', {
|
||||||
currentChat: { ...currentChat, title },
|
eventTitle: title,
|
||||||
chats: chats.map((c) => (c.id === currentChat.id ? { ...c, title } : c)),
|
eventChatId: eventChatId || null,
|
||||||
|
currentChatId: currentChat?.id || null,
|
||||||
|
currentChatTitle: currentChat?.title || null,
|
||||||
|
chatCount: chats.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!title) {
|
||||||
|
logger.warn('[Title] Ignoring title_updated event with empty title', {
|
||||||
|
payload: _data,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!currentChat) {
|
||||||
|
logger.warn('[Title] Received title_updated event without an active currentChat', {
|
||||||
|
eventChatId: eventChatId || null,
|
||||||
|
title,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const targetChatId = eventChatId || currentChat.id
|
||||||
|
if (eventChatId && eventChatId !== currentChat.id) {
|
||||||
|
logger.warn('[Title] title_updated event chatId does not match currentChat', {
|
||||||
|
eventChatId,
|
||||||
|
currentChatId: currentChat.id,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
set({
|
||||||
|
currentChat:
|
||||||
|
currentChat.id === targetChatId
|
||||||
|
? {
|
||||||
|
...currentChat,
|
||||||
|
title,
|
||||||
|
}
|
||||||
|
: currentChat,
|
||||||
|
chats: chats.map((c) => (c.id === targetChatId ? { ...c, title } : c)),
|
||||||
|
})
|
||||||
|
|
||||||
|
const updatedState = get()
|
||||||
|
logger.info('[Title] Applied title_updated event to copilot store', {
|
||||||
|
targetChatId,
|
||||||
|
renderedCurrentChatId: updatedState.currentChat?.id || null,
|
||||||
|
renderedCurrentChatTitle: updatedState.currentChat?.title || null,
|
||||||
|
chatListTitle: updatedState.chats.find((c) => c.id === targetChatId)?.title || null,
|
||||||
|
})
|
||||||
},
|
},
|
||||||
tool_result: (data, context, get, set) => {
|
tool_result: (data, context, get, set) => {
|
||||||
try {
|
try {
|
||||||
const eventData = asRecord(data?.data)
|
const eventData = asRecord(data?.data)
|
||||||
const toolCallId: string | undefined =
|
const toolCallId: string | undefined =
|
||||||
data?.toolCallId || (eventData.id as string | undefined)
|
data?.toolCallId ||
|
||||||
|
(eventData.id as string | undefined) ||
|
||||||
|
(eventData.callId as string | undefined)
|
||||||
const success: boolean | undefined = data?.success
|
const success: boolean | undefined = data?.success
|
||||||
const failedDependency: boolean = data?.failedDependency === true
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
const resultObj = asRecord(data?.result)
|
const resultObj = asRecord(data?.result)
|
||||||
const skipped: boolean = resultObj.skipped === true
|
const skipped: boolean = resultObj.skipped === true
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
|
const uiMetadata = extractToolUiMetadata(eventData)
|
||||||
|
const executionMetadata = extractToolExecutionMetadata(eventData)
|
||||||
|
const serverState = (eventData.state as string | undefined) || undefined
|
||||||
|
const targetState = serverState
|
||||||
|
? mapServerStateToClientState(serverState)
|
||||||
|
: success
|
||||||
|
? ClientToolCallState.success
|
||||||
|
: failedDependency || skipped
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
const resultPayload = asRecord(data?.result || eventData.result || eventData.data || data?.data)
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
const current = toolCallsById[toolCallId]
|
const current = toolCallsById[toolCallId]
|
||||||
|
let paramsForCurrentToolCall: Record<string, unknown> | undefined = current?.params
|
||||||
if (current) {
|
if (current) {
|
||||||
if (
|
if (
|
||||||
isRejectedState(current.state) ||
|
isRejectedState(current.state) ||
|
||||||
@@ -260,16 +307,33 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const targetState = success
|
if (
|
||||||
? ClientToolCallState.success
|
targetState === ClientToolCallState.success &&
|
||||||
: failedDependency || skipped
|
isWorkflowChangeApplyCall(current.name, paramsForCurrentToolCall)
|
||||||
? ClientToolCallState.rejected
|
) {
|
||||||
: ClientToolCallState.error
|
const operations = extractOperationListFromResultPayload(resultPayload || {})
|
||||||
|
if (operations && operations.length > 0) {
|
||||||
|
paramsForCurrentToolCall = {
|
||||||
|
...(current.params || {}),
|
||||||
|
operations,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const updatedMap = { ...toolCallsById }
|
const updatedMap = { ...toolCallsById }
|
||||||
updatedMap[toolCallId] = {
|
updatedMap[toolCallId] = {
|
||||||
...current,
|
...current,
|
||||||
|
ui: uiMetadata || current.ui,
|
||||||
|
execution: executionMetadata || current.execution,
|
||||||
|
params: paramsForCurrentToolCall,
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
display: resolveDisplayFromServerUi(
|
||||||
|
current.name,
|
||||||
|
targetState,
|
||||||
|
current.id,
|
||||||
|
paramsForCurrentToolCall,
|
||||||
|
uiMetadata || current.ui
|
||||||
|
),
|
||||||
}
|
}
|
||||||
set({ toolCallsById: updatedMap })
|
set({ toolCallsById: updatedMap })
|
||||||
|
|
||||||
@@ -312,138 +376,11 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (current.name === 'edit_workflow') {
|
applyToolEffects({
|
||||||
try {
|
effectsRaw: eventData.effects,
|
||||||
const resultPayload = asRecord(
|
toolCall: updatedMap[toolCallId],
|
||||||
data?.result || eventData.result || eventData.data || data?.data
|
resultPayload,
|
||||||
)
|
})
|
||||||
const workflowState = asRecord(resultPayload?.workflowState)
|
|
||||||
const hasWorkflowState = !!resultPayload?.workflowState
|
|
||||||
logger.info('[SSE] edit_workflow result received', {
|
|
||||||
hasWorkflowState,
|
|
||||||
blockCount: hasWorkflowState ? Object.keys(workflowState.blocks ?? {}).length : 0,
|
|
||||||
edgeCount: Array.isArray(workflowState.edges) ? workflowState.edges.length : 0,
|
|
||||||
})
|
|
||||||
if (hasWorkflowState) {
|
|
||||||
const diffStore = useWorkflowDiffStore.getState()
|
|
||||||
diffStore
|
|
||||||
.setProposedChanges(resultPayload.workflowState as WorkflowState)
|
|
||||||
.catch((err) => {
|
|
||||||
logger.error('[SSE] Failed to apply edit_workflow diff', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error('[SSE] edit_workflow result handling failed', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Deploy tools: update deployment status in workflow registry
|
|
||||||
if (
|
|
||||||
targetState === ClientToolCallState.success &&
|
|
||||||
(current.name === 'deploy_api' ||
|
|
||||||
current.name === 'deploy_chat' ||
|
|
||||||
current.name === 'deploy_mcp' ||
|
|
||||||
current.name === 'redeploy')
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
const resultPayload = asRecord(
|
|
||||||
data?.result || eventData.result || eventData.data || data?.data
|
|
||||||
)
|
|
||||||
const input = asRecord(current.params)
|
|
||||||
const workflowId =
|
|
||||||
(resultPayload?.workflowId as string) ||
|
|
||||||
(input?.workflowId as string) ||
|
|
||||||
useWorkflowRegistry.getState().activeWorkflowId
|
|
||||||
const isDeployed = resultPayload?.isDeployed !== false
|
|
||||||
if (workflowId) {
|
|
||||||
useWorkflowRegistry
|
|
||||||
.getState()
|
|
||||||
.setDeploymentStatus(workflowId, isDeployed, isDeployed ? new Date() : undefined)
|
|
||||||
logger.info('[SSE] Updated deployment status from tool result', {
|
|
||||||
toolName: current.name,
|
|
||||||
workflowId,
|
|
||||||
isDeployed,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn('[SSE] Failed to hydrate deployment status', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Environment variables: reload store after successful set
|
|
||||||
if (
|
|
||||||
targetState === ClientToolCallState.success &&
|
|
||||||
current.name === 'set_environment_variables'
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
useEnvironmentStore.getState().loadEnvironmentVariables()
|
|
||||||
logger.info('[SSE] Triggered environment variables reload')
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn('[SSE] Failed to reload environment variables', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Workflow variables: reload store after successful set
|
|
||||||
if (
|
|
||||||
targetState === ClientToolCallState.success &&
|
|
||||||
current.name === 'set_global_workflow_variables'
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
const input = asRecord(current.params)
|
|
||||||
const workflowId =
|
|
||||||
(input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId
|
|
||||||
if (workflowId) {
|
|
||||||
useVariablesStore.getState().loadForWorkflow(workflowId)
|
|
||||||
logger.info('[SSE] Triggered workflow variables reload', { workflowId })
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn('[SSE] Failed to reload workflow variables', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate API key: update deployment status with the new key
|
|
||||||
if (targetState === ClientToolCallState.success && current.name === 'generate_api_key') {
|
|
||||||
try {
|
|
||||||
const resultPayload = asRecord(
|
|
||||||
data?.result || eventData.result || eventData.data || data?.data
|
|
||||||
)
|
|
||||||
const input = asRecord(current.params)
|
|
||||||
const workflowId =
|
|
||||||
(input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId
|
|
||||||
const apiKey = (resultPayload?.apiKey || resultPayload?.key) as string | undefined
|
|
||||||
if (workflowId) {
|
|
||||||
const existingStatus = useWorkflowRegistry
|
|
||||||
.getState()
|
|
||||||
.getWorkflowDeploymentStatus(workflowId)
|
|
||||||
useWorkflowRegistry
|
|
||||||
.getState()
|
|
||||||
.setDeploymentStatus(
|
|
||||||
workflowId,
|
|
||||||
existingStatus?.isDeployed ?? false,
|
|
||||||
existingStatus?.deployedAt,
|
|
||||||
apiKey
|
|
||||||
)
|
|
||||||
logger.info('[SSE] Updated deployment status with API key', {
|
|
||||||
workflowId,
|
|
||||||
hasKey: !!apiKey,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn('[SSE] Failed to hydrate API key status', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (let i = 0; i < context.contentBlocks.length; i++) {
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
@@ -460,16 +397,24 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
: failedDependency || skipped
|
: failedDependency || skipped
|
||||||
? ClientToolCallState.rejected
|
? ClientToolCallState.rejected
|
||||||
: ClientToolCallState.error
|
: ClientToolCallState.error
|
||||||
|
const paramsForBlock =
|
||||||
|
b.toolCall?.id === toolCallId
|
||||||
|
? paramsForCurrentToolCall || b.toolCall?.params
|
||||||
|
: b.toolCall?.params
|
||||||
context.contentBlocks[i] = {
|
context.contentBlocks[i] = {
|
||||||
...b,
|
...b,
|
||||||
toolCall: {
|
toolCall: {
|
||||||
...b.toolCall,
|
...b.toolCall,
|
||||||
|
params: paramsForBlock,
|
||||||
|
ui: uiMetadata || b.toolCall?.ui,
|
||||||
|
execution: executionMetadata || b.toolCall?.execution,
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(
|
display: resolveDisplayFromServerUi(
|
||||||
b.toolCall?.name,
|
b.toolCall?.name,
|
||||||
targetState,
|
targetState,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
b.toolCall?.params
|
paramsForBlock,
|
||||||
|
uiMetadata || b.toolCall?.ui
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -487,7 +432,9 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
try {
|
try {
|
||||||
const errorData = asRecord(data?.data)
|
const errorData = asRecord(data?.data)
|
||||||
const toolCallId: string | undefined =
|
const toolCallId: string | undefined =
|
||||||
data?.toolCallId || (errorData.id as string | undefined)
|
data?.toolCallId ||
|
||||||
|
(errorData.id as string | undefined) ||
|
||||||
|
(errorData.callId as string | undefined)
|
||||||
const failedDependency: boolean = data?.failedDependency === true
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
@@ -500,14 +447,26 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const targetState = failedDependency
|
const targetState = errorData.state
|
||||||
? ClientToolCallState.rejected
|
? mapServerStateToClientState(errorData.state)
|
||||||
: ClientToolCallState.error
|
: failedDependency
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
const uiMetadata = extractToolUiMetadata(errorData)
|
||||||
|
const executionMetadata = extractToolExecutionMetadata(errorData)
|
||||||
const updatedMap = { ...toolCallsById }
|
const updatedMap = { ...toolCallsById }
|
||||||
updatedMap[toolCallId] = {
|
updatedMap[toolCallId] = {
|
||||||
...current,
|
...current,
|
||||||
|
ui: uiMetadata || current.ui,
|
||||||
|
execution: executionMetadata || current.execution,
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
display: resolveDisplayFromServerUi(
|
||||||
|
current.name,
|
||||||
|
targetState,
|
||||||
|
current.id,
|
||||||
|
current.params,
|
||||||
|
uiMetadata || current.ui
|
||||||
|
),
|
||||||
}
|
}
|
||||||
set({ toolCallsById: updatedMap })
|
set({ toolCallsById: updatedMap })
|
||||||
}
|
}
|
||||||
@@ -520,19 +479,26 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
isBackgroundState(b.toolCall?.state)
|
isBackgroundState(b.toolCall?.state)
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
const targetState = failedDependency
|
const targetState = errorData.state
|
||||||
? ClientToolCallState.rejected
|
? mapServerStateToClientState(errorData.state)
|
||||||
: ClientToolCallState.error
|
: failedDependency
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
const uiMetadata = extractToolUiMetadata(errorData)
|
||||||
|
const executionMetadata = extractToolExecutionMetadata(errorData)
|
||||||
context.contentBlocks[i] = {
|
context.contentBlocks[i] = {
|
||||||
...b,
|
...b,
|
||||||
toolCall: {
|
toolCall: {
|
||||||
...b.toolCall,
|
...b.toolCall,
|
||||||
|
ui: uiMetadata || b.toolCall?.ui,
|
||||||
|
execution: executionMetadata || b.toolCall?.execution,
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(
|
display: resolveDisplayFromServerUi(
|
||||||
b.toolCall?.name,
|
b.toolCall?.name,
|
||||||
targetState,
|
targetState,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
b.toolCall?.params
|
b.toolCall?.params,
|
||||||
|
uiMetadata || b.toolCall?.ui
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -547,20 +513,28 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
tool_generating: (data, context, get, set) => {
|
tool_generating: (data, context, get, set) => {
|
||||||
const { toolCallId, toolName } = data
|
const eventData = asRecord(data?.data)
|
||||||
|
const toolCallId =
|
||||||
|
data?.toolCallId ||
|
||||||
|
(eventData.id as string | undefined) ||
|
||||||
|
(eventData.callId as string | undefined)
|
||||||
|
const toolName =
|
||||||
|
data?.toolName ||
|
||||||
|
(eventData.name as string | undefined) ||
|
||||||
|
(eventData.toolName as string | undefined)
|
||||||
if (!toolCallId || !toolName) return
|
if (!toolCallId || !toolName) return
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
if (!toolCallsById[toolCallId]) {
|
if (!toolCallsById[toolCallId]) {
|
||||||
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
const initialState = ClientToolCallState.generating
|
||||||
const initialState = isAutoAllowed
|
const uiMetadata = extractToolUiMetadata(eventData)
|
||||||
? ClientToolCallState.executing
|
|
||||||
: ClientToolCallState.pending
|
|
||||||
const tc: CopilotToolCall = {
|
const tc: CopilotToolCall = {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
display: resolveToolDisplay(toolName, initialState, toolCallId),
|
ui: uiMetadata,
|
||||||
|
execution: extractToolExecutionMetadata(eventData),
|
||||||
|
display: resolveDisplayFromServerUi(toolName, initialState, toolCallId, undefined, uiMetadata),
|
||||||
}
|
}
|
||||||
const updated = { ...toolCallsById, [toolCallId]: tc }
|
const updated = { ...toolCallsById, [toolCallId]: tc }
|
||||||
set({ toolCallsById: updated })
|
set({ toolCallsById: updated })
|
||||||
@@ -572,17 +546,27 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
},
|
},
|
||||||
tool_call: (data, context, get, set) => {
|
tool_call: (data, context, get, set) => {
|
||||||
const toolData = asRecord(data?.data)
|
const toolData = asRecord(data?.data)
|
||||||
const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId
|
const id: string | undefined =
|
||||||
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
(toolData.id as string | undefined) ||
|
||||||
|
(toolData.callId as string | undefined) ||
|
||||||
|
data?.toolCallId
|
||||||
|
const name: string | undefined =
|
||||||
|
(toolData.name as string | undefined) ||
|
||||||
|
(toolData.toolName as string | undefined) ||
|
||||||
|
data?.toolName
|
||||||
if (!id) return
|
if (!id) return
|
||||||
const args = toolData.arguments as Record<string, unknown> | undefined
|
const args = toolData.arguments as Record<string, unknown> | undefined
|
||||||
const isPartial = toolData.partial === true
|
const isPartial = toolData.partial === true
|
||||||
|
const uiMetadata = extractToolUiMetadata(toolData)
|
||||||
|
const executionMetadata = extractToolExecutionMetadata(toolData)
|
||||||
|
const serverState = toolData.state
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
const existing = toolCallsById[id]
|
const existing = toolCallsById[id]
|
||||||
const toolName = name || existing?.name || 'unknown_tool'
|
const toolName = name || existing?.name || 'unknown_tool'
|
||||||
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
let initialState = serverState
|
||||||
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
? mapServerStateToClientState(serverState)
|
||||||
|
: ClientToolCallState.pending
|
||||||
|
|
||||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||||
if (
|
if (
|
||||||
@@ -597,15 +581,25 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
...existing,
|
...existing,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
|
ui: uiMetadata || existing.ui,
|
||||||
|
execution: executionMetadata || existing.execution,
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveToolDisplay(toolName, initialState, id, args || existing.params),
|
display: resolveDisplayFromServerUi(
|
||||||
|
toolName,
|
||||||
|
initialState,
|
||||||
|
id,
|
||||||
|
args || existing.params,
|
||||||
|
uiMetadata || existing.ui
|
||||||
|
),
|
||||||
}
|
}
|
||||||
: {
|
: {
|
||||||
id,
|
id,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
|
ui: uiMetadata,
|
||||||
|
execution: executionMetadata,
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveToolDisplay(toolName, initialState, id, args),
|
display: resolveDisplayFromServerUi(toolName, initialState, id, args, uiMetadata),
|
||||||
}
|
}
|
||||||
const updated = { ...toolCallsById, [id]: next }
|
const updated = { ...toolCallsById, [id]: next }
|
||||||
set({ toolCallsById: updated })
|
set({ toolCallsById: updated })
|
||||||
@@ -618,20 +612,12 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Auto-allowed tools: send confirmation to the server so it can proceed
|
const shouldInterrupt = next.ui?.showInterrupt === true
|
||||||
// without waiting for the user to click "Allow".
|
|
||||||
if (isAutoAllowed) {
|
|
||||||
sendAutoAcceptConfirmation(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Client-executable run tools: execute on the client for real-time feedback
|
// Client-run capability: execution is delegated to the browser.
|
||||||
// (block pulsing, console logs, stop button). The server defers execution
|
// We run immediately only when no interrupt is required.
|
||||||
// for these tools in interactive mode; the client reports back via mark-complete.
|
if (isClientRunCapability(next) && !shouldInterrupt) {
|
||||||
if (
|
executeRunToolOnClient(id, toolName, args || next.params || {})
|
||||||
CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName) &&
|
|
||||||
initialState === ClientToolCallState.executing
|
|
||||||
) {
|
|
||||||
executeRunToolOnClient(id, toolName, args || existing?.params || {})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// OAuth: dispatch event to open the OAuth connect modal
|
// OAuth: dispatch event to open the OAuth connect modal
|
||||||
|
|||||||
@@ -15,10 +15,7 @@ const logger = createLogger('CopilotRunToolExecution')
|
|||||||
* (block pulsing, logs, stop button, etc.).
|
* (block pulsing, logs, stop button, etc.).
|
||||||
*/
|
*/
|
||||||
export const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
export const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
||||||
'run_workflow',
|
'workflow_run',
|
||||||
'run_workflow_until_block',
|
|
||||||
'run_from_block',
|
|
||||||
'run_block',
|
|
||||||
])
|
])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -74,21 +71,44 @@ async function doExecuteRunTool(
|
|||||||
| Record<string, unknown>
|
| Record<string, unknown>
|
||||||
| undefined
|
| undefined
|
||||||
|
|
||||||
|
const runMode =
|
||||||
|
toolName === 'workflow_run' ? ((params.mode as string | undefined) || 'full').toLowerCase() : undefined
|
||||||
|
|
||||||
|
if (
|
||||||
|
toolName === 'workflow_run' &&
|
||||||
|
runMode !== 'full' &&
|
||||||
|
runMode !== 'until_block' &&
|
||||||
|
runMode !== 'from_block' &&
|
||||||
|
runMode !== 'block'
|
||||||
|
) {
|
||||||
|
const error = `Unsupported workflow_run mode: ${String(params.mode)}`
|
||||||
|
logger.warn('[RunTool] Execution prevented: unsupported workflow_run mode', {
|
||||||
|
toolCallId,
|
||||||
|
mode: params.mode,
|
||||||
|
})
|
||||||
|
setToolState(toolCallId, ClientToolCallState.error)
|
||||||
|
await reportCompletion(toolCallId, false, error)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
const stopAfterBlockId = (() => {
|
const stopAfterBlockId = (() => {
|
||||||
if (toolName === 'run_workflow_until_block')
|
if (toolName === 'workflow_run' && runMode === 'until_block') {
|
||||||
return params.stopAfterBlockId as string | undefined
|
return params.stopAfterBlockId as string | undefined
|
||||||
if (toolName === 'run_block') return params.blockId as string | undefined
|
}
|
||||||
|
if (toolName === 'workflow_run' && runMode === 'block') {
|
||||||
|
return params.blockId as string | undefined
|
||||||
|
}
|
||||||
return undefined
|
return undefined
|
||||||
})()
|
})()
|
||||||
|
|
||||||
const runFromBlock = (() => {
|
const runFromBlock = (() => {
|
||||||
if (toolName === 'run_from_block' && params.startBlockId) {
|
if (toolName === 'workflow_run' && runMode === 'from_block' && params.startBlockId) {
|
||||||
return {
|
return {
|
||||||
startBlockId: params.startBlockId as string,
|
startBlockId: params.startBlockId as string,
|
||||||
executionId: (params.executionId as string | undefined) || 'latest',
|
executionId: (params.executionId as string | undefined) || 'latest',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (toolName === 'run_block' && params.blockId) {
|
if (toolName === 'workflow_run' && runMode === 'block' && params.blockId) {
|
||||||
return {
|
return {
|
||||||
startBlockId: params.blockId as string,
|
startBlockId: params.blockId as string,
|
||||||
executionId: (params.executionId as string | undefined) || 'latest',
|
executionId: (params.executionId as string | undefined) || 'latest',
|
||||||
|
|||||||
@@ -6,16 +6,23 @@ import {
|
|||||||
shouldSkipToolResultEvent,
|
shouldSkipToolResultEvent,
|
||||||
} from '@/lib/copilot/orchestrator/sse-utils'
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
import { resolveToolDisplay } from '@/lib/copilot/store-utils'
|
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
import {
|
import {
|
||||||
type SSEHandler,
|
type SSEHandler,
|
||||||
sendAutoAcceptConfirmation,
|
|
||||||
sseHandlers,
|
sseHandlers,
|
||||||
updateStreamingMessage,
|
updateStreamingMessage,
|
||||||
} from './handlers'
|
} from './handlers'
|
||||||
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
||||||
|
import {
|
||||||
|
extractOperationListFromResultPayload,
|
||||||
|
extractToolExecutionMetadata,
|
||||||
|
extractToolUiMetadata,
|
||||||
|
isWorkflowChangeApplyCall,
|
||||||
|
mapServerStateToClientState,
|
||||||
|
resolveDisplayFromServerUi,
|
||||||
|
} from './tool-call-helpers'
|
||||||
|
import { applyToolEffects } from './tool-effects'
|
||||||
import type { ClientStreamingContext } from './types'
|
import type { ClientStreamingContext } from './types'
|
||||||
|
|
||||||
const logger = createLogger('CopilotClientSubagentHandlers')
|
const logger = createLogger('CopilotClientSubagentHandlers')
|
||||||
@@ -24,6 +31,13 @@ type StoreSet = (
|
|||||||
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
) => void
|
) => void
|
||||||
|
|
||||||
|
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
||||||
|
if (toolCall.execution?.target === 'sim_client_capability') {
|
||||||
|
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
||||||
|
}
|
||||||
|
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
||||||
|
}
|
||||||
|
|
||||||
export function appendSubAgentContent(
|
export function appendSubAgentContent(
|
||||||
context: ClientStreamingContext,
|
context: ClientStreamingContext,
|
||||||
parentToolCallId: string,
|
parentToolCallId: string,
|
||||||
@@ -164,6 +178,8 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
if (!id || !name) return
|
if (!id || !name) return
|
||||||
const isPartial = toolData.partial === true
|
const isPartial = toolData.partial === true
|
||||||
|
const uiMetadata = extractToolUiMetadata(toolData)
|
||||||
|
const executionMetadata = extractToolExecutionMetadata(toolData)
|
||||||
|
|
||||||
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
||||||
| Record<string, unknown>
|
| Record<string, unknown>
|
||||||
@@ -199,9 +215,10 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
const existingToolCall =
|
const existingToolCall =
|
||||||
existingIndex >= 0 ? context.subAgentToolCalls[parentToolCallId][existingIndex] : undefined
|
existingIndex >= 0 ? context.subAgentToolCalls[parentToolCallId][existingIndex] : undefined
|
||||||
|
|
||||||
// Auto-allowed tools skip pending state to avoid flashing interrupt buttons
|
const serverState = toolData.state
|
||||||
const isAutoAllowed = get().isToolAutoAllowed(name)
|
let initialState = serverState
|
||||||
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
? mapServerStateToClientState(serverState)
|
||||||
|
: ClientToolCallState.pending
|
||||||
|
|
||||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||||
if (
|
if (
|
||||||
@@ -215,8 +232,10 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
id,
|
id,
|
||||||
name,
|
name,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
|
ui: uiMetadata,
|
||||||
|
execution: executionMetadata,
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveToolDisplay(name, initialState, id, args),
|
display: resolveDisplayFromServerUi(name, initialState, id, args, uiMetadata),
|
||||||
}
|
}
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
@@ -241,16 +260,11 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Auto-allowed tools: send confirmation to the server so it can proceed
|
const shouldInterrupt = subAgentToolCall.ui?.showInterrupt === true
|
||||||
// without waiting for the user to click "Allow".
|
|
||||||
if (isAutoAllowed) {
|
|
||||||
sendAutoAcceptConfirmation(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Client-executable run tools: if auto-allowed, execute immediately for
|
// Client-run capability: execution is delegated to the browser.
|
||||||
// real-time feedback. For non-auto-allowed, the user must click "Allow"
|
// Execute immediately only for non-interrupting calls.
|
||||||
// first — handleRun in tool-call.tsx triggers executeRunToolOnClient.
|
if (isClientRunCapability(subAgentToolCall) && !shouldInterrupt) {
|
||||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(name) && isAutoAllowed) {
|
|
||||||
executeRunToolOnClient(id, name, args || {})
|
executeRunToolOnClient(id, name, args || {})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -275,17 +289,51 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
if (!context.subAgentToolCalls[parentToolCallId]) return
|
if (!context.subAgentToolCalls[parentToolCallId]) return
|
||||||
if (!context.subAgentBlocks[parentToolCallId]) return
|
if (!context.subAgentBlocks[parentToolCallId]) return
|
||||||
|
|
||||||
const targetState = success ? ClientToolCallState.success : ClientToolCallState.error
|
const serverState = resultData.state
|
||||||
|
const targetState = serverState
|
||||||
|
? mapServerStateToClientState(serverState)
|
||||||
|
: success
|
||||||
|
? ClientToolCallState.success
|
||||||
|
: ClientToolCallState.error
|
||||||
|
const uiMetadata = extractToolUiMetadata(resultData)
|
||||||
|
const executionMetadata = extractToolExecutionMetadata(resultData)
|
||||||
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
||||||
(tc: CopilotToolCall) => tc.id === toolCallId
|
(tc: CopilotToolCall) => tc.id === toolCallId
|
||||||
)
|
)
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
||||||
|
let nextParams = existing.params
|
||||||
|
const resultPayload = asRecord(
|
||||||
|
data?.result || resultData.result || resultData.data || data?.data
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
isWorkflowChangeApplyCall(existing.name, existing.params as Record<string, unknown>) &&
|
||||||
|
resultPayload
|
||||||
|
) {
|
||||||
|
const operations = extractOperationListFromResultPayload(resultPayload)
|
||||||
|
if (operations && operations.length > 0) {
|
||||||
|
nextParams = {
|
||||||
|
...(existing.params || {}),
|
||||||
|
operations,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const updatedSubAgentToolCall = {
|
const updatedSubAgentToolCall = {
|
||||||
...existing,
|
...existing,
|
||||||
|
params: nextParams,
|
||||||
|
ui: uiMetadata || existing.ui,
|
||||||
|
execution: executionMetadata || existing.execution,
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params),
|
display: resolveDisplayFromServerUi(
|
||||||
|
existing.name,
|
||||||
|
targetState,
|
||||||
|
toolCallId,
|
||||||
|
nextParams,
|
||||||
|
uiMetadata || existing.ui
|
||||||
|
),
|
||||||
}
|
}
|
||||||
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
||||||
|
|
||||||
@@ -309,6 +357,12 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
state: targetState,
|
state: targetState,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
applyToolEffects({
|
||||||
|
effectsRaw: resultData.effects,
|
||||||
|
toolCall: updatedSubAgentToolCall,
|
||||||
|
resultPayload,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
|||||||
134
apps/sim/lib/copilot/client-sse/tool-call-helpers.ts
Normal file
134
apps/sim/lib/copilot/client-sse/tool-call-helpers.ts
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import { humanizedFallback, resolveToolDisplay } from '@/lib/copilot/store-utils'
|
||||||
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
|
import type { CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
|
||||||
|
export function mapServerStateToClientState(state: unknown): ClientToolCallState {
|
||||||
|
switch (String(state || '')) {
|
||||||
|
case 'generating':
|
||||||
|
return ClientToolCallState.generating
|
||||||
|
case 'pending':
|
||||||
|
case 'awaiting_approval':
|
||||||
|
return ClientToolCallState.pending
|
||||||
|
case 'executing':
|
||||||
|
return ClientToolCallState.executing
|
||||||
|
case 'success':
|
||||||
|
return ClientToolCallState.success
|
||||||
|
case 'rejected':
|
||||||
|
case 'skipped':
|
||||||
|
return ClientToolCallState.rejected
|
||||||
|
case 'aborted':
|
||||||
|
return ClientToolCallState.aborted
|
||||||
|
case 'error':
|
||||||
|
case 'failed':
|
||||||
|
return ClientToolCallState.error
|
||||||
|
default:
|
||||||
|
return ClientToolCallState.pending
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function extractToolUiMetadata(
|
||||||
|
data: Record<string, unknown>
|
||||||
|
): CopilotToolCall['ui'] | undefined {
|
||||||
|
const ui = asRecord(data.ui)
|
||||||
|
if (!ui || Object.keys(ui).length === 0) return undefined
|
||||||
|
const autoAllowedFromUi = ui.autoAllowed === true
|
||||||
|
const autoAllowedFromData = data.autoAllowed === true
|
||||||
|
return {
|
||||||
|
title: typeof ui.title === 'string' ? ui.title : undefined,
|
||||||
|
phaseLabel: typeof ui.phaseLabel === 'string' ? ui.phaseLabel : undefined,
|
||||||
|
icon: typeof ui.icon === 'string' ? ui.icon : undefined,
|
||||||
|
showInterrupt: ui.showInterrupt === true,
|
||||||
|
showRemember: ui.showRemember === true,
|
||||||
|
autoAllowed: autoAllowedFromUi || autoAllowedFromData,
|
||||||
|
actions: Array.isArray(ui.actions)
|
||||||
|
? ui.actions
|
||||||
|
.map((action) => {
|
||||||
|
const a = asRecord(action)
|
||||||
|
const id = typeof a.id === 'string' ? a.id : undefined
|
||||||
|
const label = typeof a.label === 'string' ? a.label : undefined
|
||||||
|
const kind: 'accept' | 'reject' = a.kind === 'reject' ? 'reject' : 'accept'
|
||||||
|
if (!id || !label) return null
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
label,
|
||||||
|
kind,
|
||||||
|
remember: a.remember === true,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter((a): a is NonNullable<typeof a> => !!a)
|
||||||
|
: undefined,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function extractToolExecutionMetadata(
|
||||||
|
data: Record<string, unknown>
|
||||||
|
): CopilotToolCall['execution'] | undefined {
|
||||||
|
const execution = asRecord(data.execution)
|
||||||
|
if (!execution || Object.keys(execution).length === 0) return undefined
|
||||||
|
return {
|
||||||
|
target: typeof execution.target === 'string' ? execution.target : undefined,
|
||||||
|
capabilityId: typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function displayVerb(state: ClientToolCallState): string {
|
||||||
|
switch (state) {
|
||||||
|
case ClientToolCallState.success:
|
||||||
|
return 'Completed'
|
||||||
|
case ClientToolCallState.error:
|
||||||
|
return 'Failed'
|
||||||
|
case ClientToolCallState.rejected:
|
||||||
|
return 'Skipped'
|
||||||
|
case ClientToolCallState.aborted:
|
||||||
|
return 'Aborted'
|
||||||
|
case ClientToolCallState.generating:
|
||||||
|
return 'Preparing'
|
||||||
|
case ClientToolCallState.pending:
|
||||||
|
return 'Waiting'
|
||||||
|
default:
|
||||||
|
return 'Running'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function resolveDisplayFromServerUi(
|
||||||
|
toolName: string,
|
||||||
|
state: ClientToolCallState,
|
||||||
|
toolCallId: string,
|
||||||
|
params: Record<string, unknown> | undefined,
|
||||||
|
ui?: CopilotToolCall['ui']
|
||||||
|
) {
|
||||||
|
const fallback =
|
||||||
|
resolveToolDisplay(toolName, state, toolCallId, params) ||
|
||||||
|
humanizedFallback(toolName, state)
|
||||||
|
if (!fallback) return undefined
|
||||||
|
if (ui?.phaseLabel) {
|
||||||
|
return { text: ui.phaseLabel, icon: fallback.icon }
|
||||||
|
}
|
||||||
|
if (ui?.title) {
|
||||||
|
return { text: `${displayVerb(state)} ${ui.title}`, icon: fallback.icon }
|
||||||
|
}
|
||||||
|
return fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isWorkflowChangeApplyCall(
|
||||||
|
toolName?: string,
|
||||||
|
params?: Record<string, unknown>
|
||||||
|
): boolean {
|
||||||
|
if (toolName !== 'workflow_change') return false
|
||||||
|
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'apply') return true
|
||||||
|
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
export function extractOperationListFromResultPayload(
|
||||||
|
resultPayload: Record<string, unknown>
|
||||||
|
): Array<Record<string, unknown>> | undefined {
|
||||||
|
const operations = resultPayload.operations
|
||||||
|
if (Array.isArray(operations)) return operations as Array<Record<string, unknown>>
|
||||||
|
|
||||||
|
const compiled = resultPayload.compiledOperations
|
||||||
|
if (Array.isArray(compiled)) return compiled as Array<Record<string, unknown>>
|
||||||
|
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
175
apps/sim/lib/copilot/client-sse/tool-effects.ts
Normal file
175
apps/sim/lib/copilot/client-sse/tool-effects.ts
Normal file
@@ -0,0 +1,175 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import type { CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||||
|
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
||||||
|
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||||
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
|
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotToolEffects')
|
||||||
|
|
||||||
|
type ParsedToolEffect = {
|
||||||
|
kind: string
|
||||||
|
payload: Record<string, unknown>
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseToolEffects(raw: unknown): ParsedToolEffect[] {
|
||||||
|
if (!Array.isArray(raw)) return []
|
||||||
|
const effects: ParsedToolEffect[] = []
|
||||||
|
for (const item of raw) {
|
||||||
|
const effect = asRecord(item)
|
||||||
|
const kind = typeof effect.kind === 'string' ? effect.kind : ''
|
||||||
|
if (!kind) continue
|
||||||
|
effects.push({
|
||||||
|
kind,
|
||||||
|
payload: asRecord(effect.payload) || {},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return effects
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveWorkflowId(
|
||||||
|
payload: Record<string, unknown>,
|
||||||
|
toolCall?: CopilotToolCall
|
||||||
|
): string | undefined {
|
||||||
|
const payloadWorkflowId = typeof payload.workflowId === 'string' ? payload.workflowId : undefined
|
||||||
|
if (payloadWorkflowId) return payloadWorkflowId
|
||||||
|
|
||||||
|
const params = asRecord(toolCall?.params)
|
||||||
|
const paramWorkflowId = typeof params?.workflowId === 'string' ? params.workflowId : undefined
|
||||||
|
if (paramWorkflowId) return paramWorkflowId
|
||||||
|
|
||||||
|
return useWorkflowRegistry.getState().activeWorkflowId || undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveWorkflowState(
|
||||||
|
payload: Record<string, unknown>,
|
||||||
|
resultPayload?: Record<string, unknown>
|
||||||
|
): WorkflowState | null {
|
||||||
|
const payloadState = asRecord(payload.workflowState)
|
||||||
|
if (payloadState) return payloadState as unknown as WorkflowState
|
||||||
|
|
||||||
|
if (resultPayload) {
|
||||||
|
const directState = asRecord(resultPayload.workflowState)
|
||||||
|
if (directState) return directState as unknown as WorkflowState
|
||||||
|
const editResult = asRecord(resultPayload.editResult)
|
||||||
|
const nestedState = asRecord(editResult?.workflowState)
|
||||||
|
if (nestedState) return nestedState as unknown as WorkflowState
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyDeploymentSyncEffect(payload: Record<string, unknown>, toolCall?: CopilotToolCall): void {
|
||||||
|
const workflowId = resolveWorkflowId(payload, toolCall)
|
||||||
|
if (!workflowId) return
|
||||||
|
|
||||||
|
const registry = useWorkflowRegistry.getState()
|
||||||
|
const existingStatus = registry.getWorkflowDeploymentStatus(workflowId)
|
||||||
|
|
||||||
|
const isDeployed =
|
||||||
|
typeof payload.isDeployed === 'boolean'
|
||||||
|
? payload.isDeployed
|
||||||
|
: (existingStatus?.isDeployed ?? true)
|
||||||
|
|
||||||
|
const deployedAt = (() => {
|
||||||
|
if (typeof payload.deployedAt === 'string' && payload.deployedAt) {
|
||||||
|
const parsed = new Date(payload.deployedAt)
|
||||||
|
if (!Number.isNaN(parsed.getTime())) return parsed
|
||||||
|
}
|
||||||
|
return existingStatus?.deployedAt
|
||||||
|
})()
|
||||||
|
|
||||||
|
const apiKey =
|
||||||
|
typeof payload.apiKey === 'string' && payload.apiKey.length > 0
|
||||||
|
? payload.apiKey
|
||||||
|
: existingStatus?.apiKey
|
||||||
|
|
||||||
|
registry.setDeploymentStatus(workflowId, isDeployed, deployedAt, apiKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyApiKeySyncEffect(payload: Record<string, unknown>, toolCall?: CopilotToolCall): void {
|
||||||
|
const workflowId = resolveWorkflowId(payload, toolCall)
|
||||||
|
if (!workflowId) return
|
||||||
|
|
||||||
|
const apiKey = typeof payload.apiKey === 'string' ? payload.apiKey : undefined
|
||||||
|
const registry = useWorkflowRegistry.getState()
|
||||||
|
const existingStatus = registry.getWorkflowDeploymentStatus(workflowId)
|
||||||
|
registry.setDeploymentStatus(
|
||||||
|
workflowId,
|
||||||
|
existingStatus?.isDeployed ?? false,
|
||||||
|
existingStatus?.deployedAt,
|
||||||
|
apiKey || existingStatus?.apiKey
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyWorkflowVariablesReload(
|
||||||
|
payload: Record<string, unknown>,
|
||||||
|
toolCall?: CopilotToolCall
|
||||||
|
): void {
|
||||||
|
const workflowId = resolveWorkflowId(payload, toolCall)
|
||||||
|
if (!workflowId) return
|
||||||
|
useVariablesStore.getState().loadForWorkflow(workflowId)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function applyToolEffects(params: {
|
||||||
|
effectsRaw: unknown
|
||||||
|
toolCall?: CopilotToolCall
|
||||||
|
resultPayload?: Record<string, unknown>
|
||||||
|
}): void {
|
||||||
|
const effects = parseToolEffects(params.effectsRaw)
|
||||||
|
if (effects.length === 0) {
|
||||||
|
if (params.toolCall?.name === 'workflow_change' && params.resultPayload) {
|
||||||
|
const workflowState = resolveWorkflowState({}, params.resultPayload)
|
||||||
|
if (!workflowState) return
|
||||||
|
useWorkflowDiffStore
|
||||||
|
.getState()
|
||||||
|
.setProposedChanges(workflowState)
|
||||||
|
.catch((error) => {
|
||||||
|
logger.error('Failed to apply fallback workflow diff from result payload', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const effect of effects) {
|
||||||
|
switch (effect.kind) {
|
||||||
|
case 'workflow.diff.proposed': {
|
||||||
|
const workflowState = resolveWorkflowState(effect.payload, params.resultPayload)
|
||||||
|
if (!workflowState) break
|
||||||
|
useWorkflowDiffStore
|
||||||
|
.getState()
|
||||||
|
.setProposedChanges(workflowState)
|
||||||
|
.catch((error) => {
|
||||||
|
logger.error('Failed to apply workflow diff effect', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'workflow.deployment.sync':
|
||||||
|
applyDeploymentSyncEffect(effect.payload, params.toolCall)
|
||||||
|
break
|
||||||
|
|
||||||
|
case 'workflow.api_key.sync':
|
||||||
|
applyApiKeySyncEffect(effect.payload, params.toolCall)
|
||||||
|
break
|
||||||
|
|
||||||
|
case 'environment.variables.reload':
|
||||||
|
useEnvironmentStore.getState().loadEnvironmentVariables()
|
||||||
|
break
|
||||||
|
|
||||||
|
case 'workflow.variables.reload':
|
||||||
|
applyWorkflowVariablesReload(effect.payload, params.toolCall)
|
||||||
|
break
|
||||||
|
|
||||||
|
default:
|
||||||
|
logger.debug('Ignoring unknown tool effect', { kind: effect.kind })
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -101,9 +101,6 @@ export const COPILOT_CHECKPOINTS_API_PATH = '/api/copilot/checkpoints'
|
|||||||
/** POST — revert to a checkpoint. */
|
/** POST — revert to a checkpoint. */
|
||||||
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
||||||
|
|
||||||
/** GET/POST/DELETE — manage auto-allowed tools. */
|
|
||||||
export const COPILOT_AUTO_ALLOWED_TOOLS_API_PATH = '/api/copilot/auto-allowed-tools'
|
|
||||||
|
|
||||||
/** GET — fetch dynamically available copilot models. */
|
/** GET — fetch dynamically available copilot models. */
|
||||||
export const COPILOT_MODELS_API_PATH = '/api/copilot/models'
|
export const COPILOT_MODELS_API_PATH = '/api/copilot/models'
|
||||||
|
|
||||||
|
|||||||
@@ -1,67 +0,0 @@
|
|||||||
export const INTERRUPT_TOOL_NAMES = [
|
|
||||||
'set_global_workflow_variables',
|
|
||||||
'run_workflow',
|
|
||||||
'run_workflow_until_block',
|
|
||||||
'run_from_block',
|
|
||||||
'run_block',
|
|
||||||
'manage_mcp_tool',
|
|
||||||
'manage_custom_tool',
|
|
||||||
'deploy_mcp',
|
|
||||||
'deploy_chat',
|
|
||||||
'deploy_api',
|
|
||||||
'create_workspace_mcp_server',
|
|
||||||
'set_environment_variables',
|
|
||||||
'make_api_request',
|
|
||||||
'oauth_request_access',
|
|
||||||
'navigate_ui',
|
|
||||||
'knowledge_base',
|
|
||||||
'generate_api_key',
|
|
||||||
] as const
|
|
||||||
|
|
||||||
export const INTERRUPT_TOOL_SET = new Set<string>(INTERRUPT_TOOL_NAMES)
|
|
||||||
|
|
||||||
export const SUBAGENT_TOOL_NAMES = [
|
|
||||||
'debug',
|
|
||||||
'edit',
|
|
||||||
'build',
|
|
||||||
'plan',
|
|
||||||
'test',
|
|
||||||
'deploy',
|
|
||||||
'auth',
|
|
||||||
'research',
|
|
||||||
'knowledge',
|
|
||||||
'custom_tool',
|
|
||||||
'tour',
|
|
||||||
'info',
|
|
||||||
'workflow',
|
|
||||||
'evaluate',
|
|
||||||
'superagent',
|
|
||||||
'discovery',
|
|
||||||
] as const
|
|
||||||
|
|
||||||
export const SUBAGENT_TOOL_SET = new Set<string>(SUBAGENT_TOOL_NAMES)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Respond tools are internal to the copilot's subagent system.
|
|
||||||
* They're used by subagents to signal completion and should NOT be executed by the sim side.
|
|
||||||
* The copilot backend handles these internally.
|
|
||||||
*/
|
|
||||||
export const RESPOND_TOOL_NAMES = [
|
|
||||||
'plan_respond',
|
|
||||||
'edit_respond',
|
|
||||||
'build_respond',
|
|
||||||
'debug_respond',
|
|
||||||
'info_respond',
|
|
||||||
'research_respond',
|
|
||||||
'deploy_respond',
|
|
||||||
'superagent_respond',
|
|
||||||
'discovery_respond',
|
|
||||||
'tour_respond',
|
|
||||||
'auth_respond',
|
|
||||||
'workflow_respond',
|
|
||||||
'knowledge_respond',
|
|
||||||
'custom_tool_respond',
|
|
||||||
'test_respond',
|
|
||||||
] as const
|
|
||||||
|
|
||||||
export const RESPOND_TOOL_SET = new Set<string>(RESPOND_TOOL_NAMES)
|
|
||||||
@@ -1,17 +1,12 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
||||||
import { RESPOND_TOOL_SET, SUBAGENT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
|
||||||
import {
|
import {
|
||||||
asRecord,
|
asRecord,
|
||||||
getEventData,
|
getEventData,
|
||||||
markToolResultSeen,
|
markToolResultSeen,
|
||||||
wasToolResultSeen,
|
wasToolResultSeen,
|
||||||
} from '@/lib/copilot/orchestrator/sse-utils'
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import {
|
import { markToolComplete } from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
isIntegrationTool,
|
|
||||||
isToolAvailableOnSimSide,
|
|
||||||
markToolComplete,
|
|
||||||
} from '@/lib/copilot/orchestrator/tool-executor'
|
|
||||||
import type {
|
import type {
|
||||||
ContentBlock,
|
ContentBlock,
|
||||||
ExecutionContext,
|
ExecutionContext,
|
||||||
@@ -22,7 +17,6 @@ import type {
|
|||||||
} from '@/lib/copilot/orchestrator/types'
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
import {
|
import {
|
||||||
executeToolAndReport,
|
executeToolAndReport,
|
||||||
isInterruptToolName,
|
|
||||||
waitForToolCompletion,
|
waitForToolCompletion,
|
||||||
waitForToolDecision,
|
waitForToolDecision,
|
||||||
} from './tool-execution'
|
} from './tool-execution'
|
||||||
@@ -35,12 +29,208 @@ const logger = createLogger('CopilotSseHandlers')
|
|||||||
* execution to the browser client instead of running executeWorkflow directly.
|
* execution to the browser client instead of running executeWorkflow directly.
|
||||||
*/
|
*/
|
||||||
const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
||||||
'run_workflow',
|
'workflow_run',
|
||||||
'run_workflow_until_block',
|
|
||||||
'run_from_block',
|
|
||||||
'run_block',
|
|
||||||
])
|
])
|
||||||
|
|
||||||
|
function mapServerStateToToolStatus(state: unknown): ToolCallState['status'] {
|
||||||
|
switch (String(state || '')) {
|
||||||
|
case 'generating':
|
||||||
|
case 'pending':
|
||||||
|
case 'awaiting_approval':
|
||||||
|
return 'pending'
|
||||||
|
case 'executing':
|
||||||
|
return 'executing'
|
||||||
|
case 'success':
|
||||||
|
return 'success'
|
||||||
|
case 'rejected':
|
||||||
|
case 'skipped':
|
||||||
|
return 'rejected'
|
||||||
|
case 'aborted':
|
||||||
|
return 'skipped'
|
||||||
|
case 'error':
|
||||||
|
case 'failed':
|
||||||
|
return 'error'
|
||||||
|
default:
|
||||||
|
return 'pending'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getExecutionTarget(
|
||||||
|
toolData: Record<string, unknown>,
|
||||||
|
toolName: string
|
||||||
|
): { target: string; capabilityId?: string } {
|
||||||
|
const execution = asRecord(toolData.execution)
|
||||||
|
if (typeof execution.target === 'string' && execution.target.length > 0) {
|
||||||
|
return {
|
||||||
|
target: execution.target,
|
||||||
|
capabilityId:
|
||||||
|
typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback only when metadata is missing.
|
||||||
|
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
||||||
|
return { target: 'sim_client_capability', capabilityId: 'workflow.run' }
|
||||||
|
}
|
||||||
|
return { target: 'sim_server' }
|
||||||
|
}
|
||||||
|
|
||||||
|
function needsApproval(toolData: Record<string, unknown>): boolean {
|
||||||
|
const ui = asRecord(toolData.ui)
|
||||||
|
return ui.showInterrupt === true
|
||||||
|
}
|
||||||
|
|
||||||
|
async function waitForClientCapabilityAndReport(
|
||||||
|
toolCall: ToolCallState,
|
||||||
|
options: OrchestratorOptions,
|
||||||
|
logScope: string
|
||||||
|
): Promise<void> {
|
||||||
|
toolCall.status = 'executing'
|
||||||
|
const completion = await waitForToolCompletion(
|
||||||
|
toolCall.id,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
|
||||||
|
if (completion?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
completion.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error(`markToolComplete fire-and-forget failed (${logScope} background)`, {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (completion?.status === 'rejected') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, 400, completion.message || 'Tool execution rejected')
|
||||||
|
.catch((err) => {
|
||||||
|
logger.error(`markToolComplete fire-and-forget failed (${logScope} rejected)`, {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const success = completion?.status === 'success'
|
||||||
|
toolCall.status = success ? 'success' : 'error'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
||||||
|
logger.error(`markToolComplete fire-and-forget failed (${logScope})`, {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
function markToolCallAndNotify(
|
||||||
|
toolCall: ToolCallState,
|
||||||
|
statusCode: number,
|
||||||
|
message: string,
|
||||||
|
data: Record<string, unknown> | undefined,
|
||||||
|
logScope: string
|
||||||
|
): void {
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, statusCode, message, data).catch((err) => {
|
||||||
|
logger.error(`markToolComplete fire-and-forget failed (${logScope})`, {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function executeToolCallWithPolicy(
|
||||||
|
toolCall: ToolCallState,
|
||||||
|
toolName: string,
|
||||||
|
toolData: Record<string, unknown>,
|
||||||
|
context: StreamingContext,
|
||||||
|
execContext: ExecutionContext,
|
||||||
|
options: OrchestratorOptions,
|
||||||
|
logScope: string
|
||||||
|
): Promise<void> {
|
||||||
|
const execution = getExecutionTarget(toolData, toolName)
|
||||||
|
const isInteractive = options.interactive === true
|
||||||
|
const requiresApproval = isInteractive && needsApproval(toolData)
|
||||||
|
|
||||||
|
if (toolData.state) {
|
||||||
|
toolCall.status = mapServerStateToToolStatus(toolData.state)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (requiresApproval) {
|
||||||
|
const decision = await waitForToolDecision(
|
||||||
|
toolCall.id,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
|
||||||
|
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||||
|
// Continue below into normal execution path.
|
||||||
|
} else if (decision?.status === 'rejected' || decision?.status === 'error') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolCallAndNotify(
|
||||||
|
toolCall,
|
||||||
|
400,
|
||||||
|
decision.message || 'Tool execution rejected',
|
||||||
|
{ skipped: true, reason: 'user_rejected' },
|
||||||
|
`${logScope} rejected`
|
||||||
|
)
|
||||||
|
return
|
||||||
|
} else if (decision?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolCallAndNotify(
|
||||||
|
toolCall,
|
||||||
|
202,
|
||||||
|
decision.message || 'Tool execution moved to background',
|
||||||
|
{ background: true },
|
||||||
|
`${logScope} background`
|
||||||
|
)
|
||||||
|
return
|
||||||
|
} else {
|
||||||
|
// Decision was null (timeout/abort).
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolCallAndNotify(
|
||||||
|
toolCall,
|
||||||
|
408,
|
||||||
|
'Tool approval timed out',
|
||||||
|
{ skipped: true, reason: 'timeout' },
|
||||||
|
`${logScope} timeout`
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (execution.target === 'sim_client_capability' && isInteractive) {
|
||||||
|
await waitForClientCapabilityAndReport(toolCall, options, logScope)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
(execution.target === 'sim_server' || execution.target === 'sim_client_capability') &&
|
||||||
|
options.autoExecuteTools !== false
|
||||||
|
) {
|
||||||
|
await executeToolAndReport(toolCall.id, context, execContext, options)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
||||||
|
|
||||||
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
||||||
@@ -85,7 +275,11 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
current.status = success ? 'success' : 'error'
|
current.status = data?.state
|
||||||
|
? mapServerStateToToolStatus(data.state)
|
||||||
|
: success
|
||||||
|
? 'success'
|
||||||
|
: 'error'
|
||||||
current.endTime = Date.now()
|
current.endTime = Date.now()
|
||||||
if (hasResultData) {
|
if (hasResultData) {
|
||||||
current.result = {
|
current.result = {
|
||||||
@@ -104,7 +298,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
const current = context.toolCalls.get(toolCallId)
|
const current = context.toolCalls.get(toolCallId)
|
||||||
if (!current) return
|
if (!current) return
|
||||||
current.status = 'error'
|
current.status = data?.state ? mapServerStateToToolStatus(data.state) : 'error'
|
||||||
current.error = (data?.error as string | undefined) || 'Tool execution failed'
|
current.error = (data?.error as string | undefined) || 'Tool execution failed'
|
||||||
current.endTime = Date.now()
|
current.endTime = Date.now()
|
||||||
},
|
},
|
||||||
@@ -121,7 +315,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.toolCalls.set(toolCallId, {
|
context.toolCalls.set(toolCallId, {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: 'pending',
|
status: data?.state ? mapServerStateToToolStatus(data.state) : 'pending',
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -156,7 +350,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.toolCalls.set(toolCallId, {
|
context.toolCalls.set(toolCallId, {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: 'pending',
|
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
||||||
params: args,
|
params: args,
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
})
|
})
|
||||||
@@ -170,147 +364,15 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
const toolCall = context.toolCalls.get(toolCallId)
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
if (!toolCall) return
|
if (!toolCall) return
|
||||||
|
|
||||||
// Subagent tools are executed by the copilot backend, not sim side.
|
await executeToolCallWithPolicy(
|
||||||
if (SUBAGENT_TOOL_SET.has(toolName)) {
|
toolCall,
|
||||||
return
|
toolName,
|
||||||
}
|
toolData,
|
||||||
|
context,
|
||||||
// Respond tools are internal to copilot's subagent system - skip execution.
|
execContext,
|
||||||
// The copilot backend handles these internally to signal subagent completion.
|
options,
|
||||||
if (RESPOND_TOOL_SET.has(toolName)) {
|
'run tool'
|
||||||
toolCall.status = 'success'
|
)
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
toolCall.result = {
|
|
||||||
success: true,
|
|
||||||
output: 'Internal respond tool - handled by copilot backend',
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const isInterruptTool = isInterruptToolName(toolName)
|
|
||||||
const isInteractive = options.interactive === true
|
|
||||||
// Integration tools (user-installed) also require approval in interactive mode
|
|
||||||
const needsApproval = isInterruptTool || isIntegrationTool(toolName)
|
|
||||||
|
|
||||||
if (needsApproval && isInteractive) {
|
|
||||||
const decision = await waitForToolDecision(
|
|
||||||
toolCallId,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
|
||||||
// Client-executable run tools: defer execution to the browser client.
|
|
||||||
// The client calls executeWorkflowWithFullLogging for real-time feedback
|
|
||||||
// (block pulsing, logs, stop button) and reports completion via
|
|
||||||
// /api/copilot/confirm with status success/error. We poll Redis for
|
|
||||||
// that completion signal, then fire-and-forget markToolComplete to Go.
|
|
||||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
|
||||||
toolCall.status = 'executing'
|
|
||||||
const completion = await waitForToolCompletion(
|
|
||||||
toolCallId,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
if (completion?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
202,
|
|
||||||
completion.message || 'Tool execution moved to background',
|
|
||||||
{ background: true }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (run tool background)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCallId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const success = completion?.status === 'success'
|
|
||||||
toolCall.status = success ? 'success' : 'error'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
const msg =
|
|
||||||
completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
|
||||||
// Fire-and-forget: tell Go backend the tool is done
|
|
||||||
// (must NOT await — see deadlock note in executeToolAndReport)
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (run tool)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
toolName: toolCall.name,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCallId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
400,
|
|
||||||
decision.message || 'Tool execution rejected',
|
|
||||||
{ skipped: true, reason: 'user_rejected' }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (rejected)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (decision?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
202,
|
|
||||||
decision.message || 'Tool execution moved to background',
|
|
||||||
{ background: true }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (background)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decision was null — timed out or aborted.
|
|
||||||
// Do NOT fall through to auto-execute. Mark the tool as timed out
|
|
||||||
// and notify Go so it can unblock waitForExternalTool.
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, 408, 'Tool approval timed out', {
|
|
||||||
skipped: true,
|
|
||||||
reason: 'timeout',
|
|
||||||
}).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (timeout)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.autoExecuteTools !== false) {
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
reasoning: (event, context) => {
|
reasoning: (event, context) => {
|
||||||
const d = asRecord(event.data)
|
const d = asRecord(event.data)
|
||||||
@@ -410,7 +472,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
const toolCall: ToolCallState = {
|
const toolCall: ToolCallState = {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: 'pending',
|
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
||||||
params: args,
|
params: args,
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
}
|
}
|
||||||
@@ -428,157 +490,15 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
if (isPartial) return
|
if (isPartial) return
|
||||||
|
|
||||||
// Respond tools are internal to copilot's subagent system - skip execution.
|
await executeToolCallWithPolicy(
|
||||||
if (RESPOND_TOOL_SET.has(toolName)) {
|
toolCall,
|
||||||
toolCall.status = 'success'
|
toolName,
|
||||||
toolCall.endTime = Date.now()
|
toolData,
|
||||||
toolCall.result = {
|
context,
|
||||||
success: true,
|
execContext,
|
||||||
output: 'Internal respond tool - handled by copilot backend',
|
options,
|
||||||
}
|
'subagent run tool'
|
||||||
return
|
)
|
||||||
}
|
|
||||||
|
|
||||||
// Tools that only exist on the Go backend (e.g. search_patterns,
|
|
||||||
// search_errors, remember_debug) should NOT be re-executed on the Sim side.
|
|
||||||
// The Go backend already executed them and will send its own tool_result
|
|
||||||
// SSE event with the real outcome. Trying to execute them here would fail
|
|
||||||
// with "Tool not found" and incorrectly mark the tool as failed.
|
|
||||||
if (!isToolAvailableOnSimSide(toolName)) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Interrupt tools and integration tools (user-installed) require approval
|
|
||||||
// in interactive mode, same as top-level handler.
|
|
||||||
const needsSubagentApproval = isInterruptToolName(toolName) || isIntegrationTool(toolName)
|
|
||||||
if (options.interactive === true && needsSubagentApproval) {
|
|
||||||
const decision = await waitForToolDecision(
|
|
||||||
toolCallId,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
400,
|
|
||||||
decision.message || 'Tool execution rejected',
|
|
||||||
{ skipped: true, reason: 'user_rejected' }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent rejected)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (decision?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
202,
|
|
||||||
decision.message || 'Tool execution moved to background',
|
|
||||||
{ background: true }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent background)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decision was null — timed out or aborted.
|
|
||||||
// Do NOT fall through to auto-execute.
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, 408, 'Tool approval timed out', {
|
|
||||||
skipped: true,
|
|
||||||
reason: 'timeout',
|
|
||||||
}).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent timeout)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Client-executable run tools in interactive mode: defer to client.
|
|
||||||
// Same pattern as main handler: wait for client completion, then tell Go.
|
|
||||||
if (options.interactive === true && CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
|
||||||
toolCall.status = 'executing'
|
|
||||||
const completion = await waitForToolCompletion(
|
|
||||||
toolCallId,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
if (completion?.status === 'rejected') {
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
400,
|
|
||||||
completion.message || 'Tool execution rejected'
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent run tool rejected)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCallId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (completion?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
202,
|
|
||||||
completion.message || 'Tool execution moved to background',
|
|
||||||
{ background: true }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent run tool background)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCallId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const success = completion?.status === 'success'
|
|
||||||
toolCall.status = success ? 'success' : 'error'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent run tool)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
toolName: toolCall.name,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCallId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.autoExecuteTools !== false) {
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
tool_result: (event, context) => {
|
tool_result: (event, context) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
@@ -596,7 +516,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
const status = success ? 'success' : 'error'
|
const status = data?.state ? mapServerStateToToolStatus(data.state) : success ? 'success' : 'error'
|
||||||
const endTime = Date.now()
|
const endTime = Date.now()
|
||||||
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import {
|
|||||||
TOOL_DECISION_MAX_POLL_MS,
|
TOOL_DECISION_MAX_POLL_MS,
|
||||||
TOOL_DECISION_POLL_BACKOFF,
|
TOOL_DECISION_POLL_BACKOFF,
|
||||||
} from '@/lib/copilot/constants'
|
} from '@/lib/copilot/constants'
|
||||||
import { INTERRUPT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
|
||||||
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
||||||
import {
|
import {
|
||||||
asRecord,
|
asRecord,
|
||||||
@@ -21,10 +20,6 @@ import type {
|
|||||||
|
|
||||||
const logger = createLogger('CopilotSseToolExecution')
|
const logger = createLogger('CopilotSseToolExecution')
|
||||||
|
|
||||||
export function isInterruptToolName(toolName: string): boolean {
|
|
||||||
return INTERRUPT_TOOL_SET.has(toolName)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function executeToolAndReport(
|
export async function executeToolAndReport(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
context: StreamingContext,
|
context: StreamingContext,
|
||||||
@@ -34,9 +29,11 @@ export async function executeToolAndReport(
|
|||||||
const toolCall = context.toolCalls.get(toolCallId)
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
if (!toolCall) return
|
if (!toolCall) return
|
||||||
|
|
||||||
if (toolCall.status === 'executing') return
|
const lockable = toolCall as typeof toolCall & { __simExecuting?: boolean }
|
||||||
|
if (lockable.__simExecuting) return
|
||||||
if (wasToolResultSeen(toolCall.id)) return
|
if (wasToolResultSeen(toolCall.id)) return
|
||||||
|
|
||||||
|
lockable.__simExecuting = true
|
||||||
toolCall.status = 'executing'
|
toolCall.status = 'executing'
|
||||||
try {
|
try {
|
||||||
const result = await executeToolServerSide(toolCall, execContext)
|
const result = await executeToolServerSide(toolCall, execContext)
|
||||||
@@ -122,6 +119,8 @@ export async function executeToolAndReport(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
await options?.onEvent?.(errorEvent)
|
await options?.onEvent?.(errorEvent)
|
||||||
|
} finally {
|
||||||
|
delete lockable.__simExecuting
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ describe('sse-utils', () => {
|
|||||||
type: 'tool_result',
|
type: 'tool_result',
|
||||||
data: JSON.stringify({
|
data: JSON.stringify({
|
||||||
id: 'tool_1',
|
id: 'tool_1',
|
||||||
name: 'edit_workflow',
|
name: 'workflow_change',
|
||||||
success: true,
|
success: true,
|
||||||
result: { ok: true },
|
result: { ok: true },
|
||||||
}),
|
}),
|
||||||
@@ -23,7 +23,7 @@ describe('sse-utils', () => {
|
|||||||
const normalized = normalizeSseEvent(event as any)
|
const normalized = normalizeSseEvent(event as any)
|
||||||
|
|
||||||
expect(normalized.toolCallId).toBe('tool_1')
|
expect(normalized.toolCallId).toBe('tool_1')
|
||||||
expect(normalized.toolName).toBe('edit_workflow')
|
expect(normalized.toolName).toBe('workflow_change')
|
||||||
expect(normalized.success).toBe(true)
|
expect(normalized.success).toBe(true)
|
||||||
expect(normalized.result).toEqual({ ok: true })
|
expect(normalized.result).toEqual({ ok: true })
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -220,7 +220,8 @@ export async function executeDeployMcp(
|
|||||||
if (!workflowRecord.isDeployed) {
|
if (!workflowRecord.isDeployed) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error: 'Workflow must be deployed before adding as an MCP tool. Use deploy_api first.',
|
error:
|
||||||
|
'Workflow must be deployed before adding as an MCP tool. Use workflow_deploy(mode: "api") first.',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -50,6 +50,8 @@ import type {
|
|||||||
RunWorkflowParams,
|
RunWorkflowParams,
|
||||||
RunWorkflowUntilBlockParams,
|
RunWorkflowUntilBlockParams,
|
||||||
SetGlobalWorkflowVariablesParams,
|
SetGlobalWorkflowVariablesParams,
|
||||||
|
WorkflowDeployParams,
|
||||||
|
WorkflowRunParams,
|
||||||
} from './param-types'
|
} from './param-types'
|
||||||
import { PLATFORM_ACTIONS_CONTENT } from './platform-actions'
|
import { PLATFORM_ACTIONS_CONTENT } from './platform-actions'
|
||||||
import {
|
import {
|
||||||
@@ -318,13 +320,91 @@ async function executeManageCustomTool(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function executeWorkflowRunUnified(
|
||||||
|
rawParams: Record<string, unknown>,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
const params = rawParams as WorkflowRunParams
|
||||||
|
const mode = params.mode || 'full'
|
||||||
|
|
||||||
|
switch (mode) {
|
||||||
|
case 'full':
|
||||||
|
return executeRunWorkflow(params as RunWorkflowParams, context)
|
||||||
|
case 'until_block':
|
||||||
|
if (!params.stopAfterBlockId) {
|
||||||
|
return { success: false, error: 'stopAfterBlockId is required for mode=until_block' }
|
||||||
|
}
|
||||||
|
return executeRunWorkflowUntilBlock(params as RunWorkflowUntilBlockParams, context)
|
||||||
|
case 'from_block':
|
||||||
|
if (!params.startBlockId) {
|
||||||
|
return { success: false, error: 'startBlockId is required for mode=from_block' }
|
||||||
|
}
|
||||||
|
return executeRunFromBlock(params as RunFromBlockParams, context)
|
||||||
|
case 'block':
|
||||||
|
if (!params.blockId) {
|
||||||
|
return { success: false, error: 'blockId is required for mode=block' }
|
||||||
|
}
|
||||||
|
return executeRunBlock(params as RunBlockParams, context)
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Unsupported workflow_run mode: ${String(mode)}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function executeWorkflowDeployUnified(
|
||||||
|
rawParams: Record<string, unknown>,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
const params = rawParams as unknown as WorkflowDeployParams
|
||||||
|
const mode = params.mode
|
||||||
|
|
||||||
|
if (!mode) {
|
||||||
|
return { success: false, error: 'mode is required for workflow_deploy' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const scopedContext =
|
||||||
|
params.workflowId && params.workflowId !== context.workflowId
|
||||||
|
? { ...context, workflowId: params.workflowId }
|
||||||
|
: context
|
||||||
|
|
||||||
|
switch (mode) {
|
||||||
|
case 'status':
|
||||||
|
return executeCheckDeploymentStatus(params as CheckDeploymentStatusParams, scopedContext)
|
||||||
|
case 'redeploy':
|
||||||
|
return executeRedeploy(scopedContext)
|
||||||
|
case 'api':
|
||||||
|
return executeDeployApi(params as DeployApiParams, scopedContext)
|
||||||
|
case 'chat':
|
||||||
|
return executeDeployChat(params as DeployChatParams, scopedContext)
|
||||||
|
case 'mcp':
|
||||||
|
return executeDeployMcp(params as DeployMcpParams, scopedContext)
|
||||||
|
case 'list_mcp_servers':
|
||||||
|
return executeListWorkspaceMcpServers(params as ListWorkspaceMcpServersParams, scopedContext)
|
||||||
|
case 'create_mcp_server':
|
||||||
|
return executeCreateWorkspaceMcpServer(
|
||||||
|
params as CreateWorkspaceMcpServerParams,
|
||||||
|
scopedContext
|
||||||
|
)
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Unsupported workflow_deploy mode: ${String(mode)}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const SERVER_TOOLS = new Set<string>([
|
const SERVER_TOOLS = new Set<string>([
|
||||||
'get_blocks_and_tools',
|
'get_blocks_and_tools',
|
||||||
'get_blocks_metadata',
|
'get_blocks_metadata',
|
||||||
'get_block_options',
|
'get_block_options',
|
||||||
'get_block_config',
|
'get_block_config',
|
||||||
'get_trigger_blocks',
|
'get_trigger_blocks',
|
||||||
'edit_workflow',
|
'workflow_context_get',
|
||||||
|
'workflow_context_expand',
|
||||||
|
'workflow_change',
|
||||||
|
'workflow_verify',
|
||||||
'get_workflow_console',
|
'get_workflow_console',
|
||||||
'search_documentation',
|
'search_documentation',
|
||||||
'search_online',
|
'search_online',
|
||||||
@@ -352,11 +432,7 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
|||||||
get_block_outputs: (p, c) => executeGetBlockOutputs(p as GetBlockOutputsParams, c),
|
get_block_outputs: (p, c) => executeGetBlockOutputs(p as GetBlockOutputsParams, c),
|
||||||
get_block_upstream_references: (p, c) =>
|
get_block_upstream_references: (p, c) =>
|
||||||
executeGetBlockUpstreamReferences(p as unknown as GetBlockUpstreamReferencesParams, c),
|
executeGetBlockUpstreamReferences(p as unknown as GetBlockUpstreamReferencesParams, c),
|
||||||
run_workflow: (p, c) => executeRunWorkflow(p as RunWorkflowParams, c),
|
workflow_run: (p, c) => executeWorkflowRunUnified(p, c),
|
||||||
run_workflow_until_block: (p, c) =>
|
|
||||||
executeRunWorkflowUntilBlock(p as unknown as RunWorkflowUntilBlockParams, c),
|
|
||||||
run_from_block: (p, c) => executeRunFromBlock(p as unknown as RunFromBlockParams, c),
|
|
||||||
run_block: (p, c) => executeRunBlock(p as unknown as RunBlockParams, c),
|
|
||||||
get_deployed_workflow_state: (p, c) =>
|
get_deployed_workflow_state: (p, c) =>
|
||||||
executeGetDeployedWorkflowState(p as GetDeployedWorkflowStateParams, c),
|
executeGetDeployedWorkflowState(p as GetDeployedWorkflowStateParams, c),
|
||||||
generate_api_key: (p, c) => executeGenerateApiKey(p as unknown as GenerateApiKeyParams, c),
|
generate_api_key: (p, c) => executeGenerateApiKey(p as unknown as GenerateApiKeyParams, c),
|
||||||
@@ -367,10 +443,7 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
|||||||
}),
|
}),
|
||||||
set_global_workflow_variables: (p, c) =>
|
set_global_workflow_variables: (p, c) =>
|
||||||
executeSetGlobalWorkflowVariables(p as SetGlobalWorkflowVariablesParams, c),
|
executeSetGlobalWorkflowVariables(p as SetGlobalWorkflowVariablesParams, c),
|
||||||
deploy_api: (p, c) => executeDeployApi(p as DeployApiParams, c),
|
workflow_deploy: (p, c) => executeWorkflowDeployUnified(p, c),
|
||||||
deploy_chat: (p, c) => executeDeployChat(p as DeployChatParams, c),
|
|
||||||
deploy_mcp: (p, c) => executeDeployMcp(p as DeployMcpParams, c),
|
|
||||||
redeploy: (_p, c) => executeRedeploy(c),
|
|
||||||
check_deployment_status: (p, c) =>
|
check_deployment_status: (p, c) =>
|
||||||
executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
|
executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
|
||||||
list_workspace_mcp_servers: (p, c) =>
|
list_workspace_mcp_servers: (p, c) =>
|
||||||
|
|||||||
@@ -93,6 +93,18 @@ export interface RunBlockParams {
|
|||||||
useDeployedState?: boolean
|
useDeployedState?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface WorkflowRunParams {
|
||||||
|
mode?: 'full' | 'until_block' | 'from_block' | 'block'
|
||||||
|
workflowId?: string
|
||||||
|
workflow_input?: unknown
|
||||||
|
input?: unknown
|
||||||
|
useDeployedState?: boolean
|
||||||
|
stopAfterBlockId?: string
|
||||||
|
startBlockId?: string
|
||||||
|
blockId?: string
|
||||||
|
executionId?: string
|
||||||
|
}
|
||||||
|
|
||||||
export interface GetDeployedWorkflowStateParams {
|
export interface GetDeployedWorkflowStateParams {
|
||||||
workflowId?: string
|
workflowId?: string
|
||||||
}
|
}
|
||||||
@@ -169,6 +181,39 @@ export interface CreateWorkspaceMcpServerParams {
|
|||||||
workflowIds?: string[]
|
workflowIds?: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface WorkflowDeployParams {
|
||||||
|
mode:
|
||||||
|
| 'status'
|
||||||
|
| 'redeploy'
|
||||||
|
| 'api'
|
||||||
|
| 'chat'
|
||||||
|
| 'mcp'
|
||||||
|
| 'list_mcp_servers'
|
||||||
|
| 'create_mcp_server'
|
||||||
|
workflowId?: string
|
||||||
|
action?: 'deploy' | 'undeploy'
|
||||||
|
identifier?: string
|
||||||
|
title?: string
|
||||||
|
description?: string
|
||||||
|
customizations?: {
|
||||||
|
primaryColor?: string
|
||||||
|
secondaryColor?: string
|
||||||
|
welcomeMessage?: string
|
||||||
|
iconUrl?: string
|
||||||
|
}
|
||||||
|
authType?: 'none' | 'password' | 'public' | 'email' | 'sso'
|
||||||
|
password?: string
|
||||||
|
allowedEmails?: string[]
|
||||||
|
outputConfigs?: unknown[]
|
||||||
|
serverId?: string
|
||||||
|
toolName?: string
|
||||||
|
toolDescription?: string
|
||||||
|
parameterSchema?: Record<string, unknown>
|
||||||
|
name?: string
|
||||||
|
isPublic?: boolean
|
||||||
|
workflowIds?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
// === Workflow Organization Params ===
|
// === Workflow Organization Params ===
|
||||||
|
|
||||||
export interface RenameWorkflowParams {
|
export interface RenameWorkflowParams {
|
||||||
|
|||||||
@@ -592,16 +592,40 @@ const META_edit: ToolMetadata = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const META_edit_workflow: ToolMetadata = {
|
const META_workflow_change: ToolMetadata = {
|
||||||
displayNames: {
|
displayNames: {
|
||||||
[ClientToolCallState.generating]: { text: 'Editing your workflow', icon: Loader2 },
|
[ClientToolCallState.generating]: { text: 'Planning workflow changes', icon: Loader2 },
|
||||||
[ClientToolCallState.executing]: { text: 'Editing your workflow', icon: Loader2 },
|
[ClientToolCallState.executing]: { text: 'Applying workflow changes', icon: Loader2 },
|
||||||
[ClientToolCallState.success]: { text: 'Edited your workflow', icon: Grid2x2Check },
|
[ClientToolCallState.success]: { text: 'Updated your workflow', icon: Grid2x2Check },
|
||||||
[ClientToolCallState.error]: { text: 'Failed to edit your workflow', icon: XCircle },
|
[ClientToolCallState.error]: { text: 'Failed to update your workflow', icon: XCircle },
|
||||||
[ClientToolCallState.review]: { text: 'Review your workflow changes', icon: Grid2x2 },
|
[ClientToolCallState.review]: { text: 'Review your workflow changes', icon: Grid2x2 },
|
||||||
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted editing your workflow', icon: MinusCircle },
|
[ClientToolCallState.aborted]: { text: 'Aborted workflow changes', icon: MinusCircle },
|
||||||
[ClientToolCallState.pending]: { text: 'Editing your workflow', icon: Loader2 },
|
[ClientToolCallState.pending]: { text: 'Planning workflow changes', icon: Loader2 },
|
||||||
|
},
|
||||||
|
getDynamicText: (params, state) => {
|
||||||
|
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'dry_run') {
|
||||||
|
switch (state) {
|
||||||
|
case ClientToolCallState.success:
|
||||||
|
return 'Planned workflow changes'
|
||||||
|
case ClientToolCallState.executing:
|
||||||
|
case ClientToolCallState.generating:
|
||||||
|
case ClientToolCallState.pending:
|
||||||
|
return 'Planning workflow changes'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (mode === 'apply' || typeof params?.proposalId === 'string') {
|
||||||
|
switch (state) {
|
||||||
|
case ClientToolCallState.success:
|
||||||
|
return 'Applied workflow changes'
|
||||||
|
case ClientToolCallState.executing:
|
||||||
|
case ClientToolCallState.generating:
|
||||||
|
case ClientToolCallState.pending:
|
||||||
|
return 'Applying workflow changes'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined
|
||||||
},
|
},
|
||||||
uiConfig: {
|
uiConfig: {
|
||||||
isSpecial: true,
|
isSpecial: true,
|
||||||
@@ -609,6 +633,42 @@ const META_edit_workflow: ToolMetadata = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const META_workflow_context_get: ToolMetadata = {
|
||||||
|
displayNames: {
|
||||||
|
[ClientToolCallState.generating]: { text: 'Gathering workflow context', icon: Loader2 },
|
||||||
|
[ClientToolCallState.pending]: { text: 'Gathering workflow context', icon: Loader2 },
|
||||||
|
[ClientToolCallState.executing]: { text: 'Gathering workflow context', icon: Loader2 },
|
||||||
|
[ClientToolCallState.success]: { text: 'Gathered workflow context', icon: FileText },
|
||||||
|
[ClientToolCallState.error]: { text: 'Failed to gather workflow context', icon: XCircle },
|
||||||
|
[ClientToolCallState.rejected]: { text: 'Skipped workflow context', icon: MinusCircle },
|
||||||
|
[ClientToolCallState.aborted]: { text: 'Aborted workflow context', icon: MinusCircle },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const META_workflow_context_expand: ToolMetadata = {
|
||||||
|
displayNames: {
|
||||||
|
[ClientToolCallState.generating]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
||||||
|
[ClientToolCallState.pending]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
||||||
|
[ClientToolCallState.executing]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
||||||
|
[ClientToolCallState.success]: { text: 'Expanded workflow schemas', icon: FileText },
|
||||||
|
[ClientToolCallState.error]: { text: 'Failed to expand workflow schemas', icon: XCircle },
|
||||||
|
[ClientToolCallState.rejected]: { text: 'Skipped schema expansion', icon: MinusCircle },
|
||||||
|
[ClientToolCallState.aborted]: { text: 'Aborted schema expansion', icon: MinusCircle },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const META_workflow_verify: ToolMetadata = {
|
||||||
|
displayNames: {
|
||||||
|
[ClientToolCallState.generating]: { text: 'Verifying workflow', icon: Loader2 },
|
||||||
|
[ClientToolCallState.pending]: { text: 'Verifying workflow', icon: Loader2 },
|
||||||
|
[ClientToolCallState.executing]: { text: 'Verifying workflow', icon: Loader2 },
|
||||||
|
[ClientToolCallState.success]: { text: 'Verified workflow', icon: CheckCircle2 },
|
||||||
|
[ClientToolCallState.error]: { text: 'Workflow verification failed', icon: XCircle },
|
||||||
|
[ClientToolCallState.rejected]: { text: 'Skipped workflow verification', icon: MinusCircle },
|
||||||
|
[ClientToolCallState.aborted]: { text: 'Aborted workflow verification', icon: MinusCircle },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
const META_evaluate: ToolMetadata = {
|
const META_evaluate: ToolMetadata = {
|
||||||
displayNames: {
|
displayNames: {
|
||||||
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
||||||
@@ -2541,7 +2601,12 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
|||||||
deploy_chat: META_deploy_chat,
|
deploy_chat: META_deploy_chat,
|
||||||
deploy_mcp: META_deploy_mcp,
|
deploy_mcp: META_deploy_mcp,
|
||||||
edit: META_edit,
|
edit: META_edit,
|
||||||
edit_workflow: META_edit_workflow,
|
workflow_context_get: META_workflow_context_get,
|
||||||
|
workflow_context_expand: META_workflow_context_expand,
|
||||||
|
workflow_change: META_workflow_change,
|
||||||
|
workflow_verify: META_workflow_verify,
|
||||||
|
workflow_run: META_run_workflow,
|
||||||
|
workflow_deploy: META_deploy_api,
|
||||||
evaluate: META_evaluate,
|
evaluate: META_evaluate,
|
||||||
get_block_config: META_get_block_config,
|
get_block_config: META_get_block_config,
|
||||||
get_block_options: META_get_block_options,
|
get_block_options: META_get_block_options,
|
||||||
|
|||||||
@@ -191,10 +191,10 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'run_workflow',
|
name: 'workflow_run',
|
||||||
toolId: 'run_workflow',
|
toolId: 'workflow_run',
|
||||||
description:
|
description:
|
||||||
'Run a workflow and return its output. Works on both draft and deployed states. By default runs the draft (live) state.',
|
'Run a workflow using one unified interface. Supports full runs and partial execution modes.',
|
||||||
inputSchema: {
|
inputSchema: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
properties: {
|
properties: {
|
||||||
@@ -202,112 +202,38 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
|||||||
type: 'string',
|
type: 'string',
|
||||||
description: 'REQUIRED. The workflow ID to run.',
|
description: 'REQUIRED. The workflow ID to run.',
|
||||||
},
|
},
|
||||||
|
mode: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Execution mode: full, until_block, from_block, or block. Default: full.',
|
||||||
|
enum: ['full', 'until_block', 'from_block', 'block'],
|
||||||
|
},
|
||||||
workflow_input: {
|
workflow_input: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
description:
|
description:
|
||||||
'JSON object with input values. Keys should match the workflow start block input field names.',
|
'JSON object with input values. Keys should match workflow start block input names.',
|
||||||
},
|
|
||||||
useDeployedState: {
|
|
||||||
type: 'boolean',
|
|
||||||
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'run_workflow_until_block',
|
|
||||||
toolId: 'run_workflow_until_block',
|
|
||||||
description:
|
|
||||||
'Run a workflow and stop after a specific block completes. Useful for testing partial execution or debugging specific blocks.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The workflow ID to run.',
|
|
||||||
},
|
},
|
||||||
stopAfterBlockId: {
|
stopAfterBlockId: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
description:
|
description: 'Required when mode is until_block.',
|
||||||
'REQUIRED. The block ID to stop after. Execution halts once this block completes.',
|
|
||||||
},
|
|
||||||
workflow_input: {
|
|
||||||
type: 'object',
|
|
||||||
description: 'JSON object with input values for the workflow.',
|
|
||||||
},
|
|
||||||
useDeployedState: {
|
|
||||||
type: 'boolean',
|
|
||||||
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId', 'stopAfterBlockId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'run_from_block',
|
|
||||||
toolId: 'run_from_block',
|
|
||||||
description:
|
|
||||||
'Run a workflow starting from a specific block, using cached outputs from a prior execution for upstream blocks. The workflow must have been run at least once first.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The workflow ID to run.',
|
|
||||||
},
|
},
|
||||||
startBlockId: {
|
startBlockId: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
description: 'REQUIRED. The block ID to start execution from.',
|
description: 'Required when mode is from_block.',
|
||||||
},
|
|
||||||
executionId: {
|
|
||||||
type: 'string',
|
|
||||||
description:
|
|
||||||
'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
|
|
||||||
},
|
|
||||||
workflow_input: {
|
|
||||||
type: 'object',
|
|
||||||
description: 'Optional input values for the workflow.',
|
|
||||||
},
|
|
||||||
useDeployedState: {
|
|
||||||
type: 'boolean',
|
|
||||||
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId', 'startBlockId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'run_block',
|
|
||||||
toolId: 'run_block',
|
|
||||||
description:
|
|
||||||
'Run a single block in isolation using cached outputs from a prior execution. Only the specified block executes — nothing upstream or downstream. The workflow must have been run at least once first.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The workflow ID.',
|
|
||||||
},
|
},
|
||||||
blockId: {
|
blockId: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
description: 'REQUIRED. The block ID to run in isolation.',
|
description: 'Required when mode is block.',
|
||||||
},
|
},
|
||||||
executionId: {
|
executionId: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
description:
|
description: 'Optional execution snapshot ID for from_block or block modes.',
|
||||||
'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
|
|
||||||
},
|
|
||||||
workflow_input: {
|
|
||||||
type: 'object',
|
|
||||||
description: 'Optional input values for the workflow.',
|
|
||||||
},
|
},
|
||||||
useDeployedState: {
|
useDeployedState: {
|
||||||
type: 'boolean',
|
type: 'boolean',
|
||||||
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
description: 'When true, runs deployed state instead of draft. Default: false.',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
required: ['workflowId', 'blockId'],
|
required: ['workflowId'],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -531,10 +457,10 @@ ALSO CAN:
|
|||||||
description: `Run a workflow and verify its outputs. Works on both deployed and undeployed (draft) workflows. Use after building to verify correctness.
|
description: `Run a workflow and verify its outputs. Works on both deployed and undeployed (draft) workflows. Use after building to verify correctness.
|
||||||
|
|
||||||
Supports full and partial execution:
|
Supports full and partial execution:
|
||||||
- Full run with test inputs
|
- Full run with test inputs using workflow_run mode "full"
|
||||||
- Stop after a specific block (run_workflow_until_block)
|
- Stop after a specific block using workflow_run mode "until_block"
|
||||||
- Run a single block in isolation (run_block)
|
- Run a single block in isolation using workflow_run mode "block"
|
||||||
- Resume from a specific block (run_from_block)`,
|
- Resume from a specific block using workflow_run mode "from_block"`,
|
||||||
inputSchema: {
|
inputSchema: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
properties: {
|
properties: {
|
||||||
|
|||||||
@@ -109,7 +109,7 @@ function resolveSubBlockOptions(sb: SubBlockConfig): string[] | undefined {
|
|||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return the actual option ID/value that edit_workflow expects, not the display label
|
// Return canonical option IDs/values expected by workflow_change compilation and apply
|
||||||
return rawOptions
|
return rawOptions
|
||||||
.map((opt: any) => {
|
.map((opt: any) => {
|
||||||
if (!opt) return undefined
|
if (!opt) return undefined
|
||||||
|
|||||||
@@ -11,8 +11,13 @@ import { makeApiRequestServerTool } from '@/lib/copilot/tools/server/other/make-
|
|||||||
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
||||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||||
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
||||||
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
|
||||||
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
||||||
|
import { workflowChangeServerTool } from '@/lib/copilot/tools/server/workflow/workflow-change'
|
||||||
|
import {
|
||||||
|
workflowContextExpandServerTool,
|
||||||
|
workflowContextGetServerTool,
|
||||||
|
} from '@/lib/copilot/tools/server/workflow/workflow-context'
|
||||||
|
import { workflowVerifyServerTool } from '@/lib/copilot/tools/server/workflow/workflow-verify'
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||||
|
|
||||||
export { ExecuteResponseSuccessSchema }
|
export { ExecuteResponseSuccessSchema }
|
||||||
@@ -27,7 +32,6 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
|
|||||||
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
||||||
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
||||||
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
||||||
[editWorkflowServerTool.name]: editWorkflowServerTool,
|
|
||||||
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
||||||
[searchDocumentationServerTool.name]: searchDocumentationServerTool,
|
[searchDocumentationServerTool.name]: searchDocumentationServerTool,
|
||||||
[searchOnlineServerTool.name]: searchOnlineServerTool,
|
[searchOnlineServerTool.name]: searchOnlineServerTool,
|
||||||
@@ -35,6 +39,10 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
|
|||||||
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
||||||
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
||||||
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
||||||
|
[workflowContextGetServerTool.name]: workflowContextGetServerTool,
|
||||||
|
[workflowContextExpandServerTool.name]: workflowContextExpandServerTool,
|
||||||
|
[workflowChangeServerTool.name]: workflowChangeServerTool,
|
||||||
|
[workflowVerifyServerTool.name]: workflowVerifyServerTool,
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
185
apps/sim/lib/copilot/tools/server/workflow/change-store.ts
Normal file
185
apps/sim/lib/copilot/tools/server/workflow/change-store.ts
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
import crypto from 'crypto'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
|
||||||
|
type StoreEntry<T> = {
|
||||||
|
value: T
|
||||||
|
expiresAt: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_TTL_MS = 30 * 60 * 1000
|
||||||
|
const MAX_ENTRIES = 500
|
||||||
|
const DEFAULT_TTL_SECONDS = Math.floor(DEFAULT_TTL_MS / 1000)
|
||||||
|
const CONTEXT_PREFIX = 'copilot:workflow_change:context'
|
||||||
|
const PROPOSAL_PREFIX = 'copilot:workflow_change:proposal'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkflowChangeStore')
|
||||||
|
|
||||||
|
class TTLStore<T> {
|
||||||
|
private readonly data = new Map<string, StoreEntry<T>>()
|
||||||
|
|
||||||
|
constructor(private readonly ttlMs = DEFAULT_TTL_MS) {}
|
||||||
|
|
||||||
|
set(value: T): string {
|
||||||
|
this.gc()
|
||||||
|
if (this.data.size >= MAX_ENTRIES) {
|
||||||
|
const firstKey = this.data.keys().next().value as string | undefined
|
||||||
|
if (firstKey) {
|
||||||
|
this.data.delete(firstKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const id = crypto.randomUUID()
|
||||||
|
this.data.set(id, {
|
||||||
|
value,
|
||||||
|
expiresAt: Date.now() + this.ttlMs,
|
||||||
|
})
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
get(id: string): T | null {
|
||||||
|
const entry = this.data.get(id)
|
||||||
|
if (!entry) return null
|
||||||
|
if (entry.expiresAt <= Date.now()) {
|
||||||
|
this.data.delete(id)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return entry.value
|
||||||
|
}
|
||||||
|
|
||||||
|
private gc(): void {
|
||||||
|
const now = Date.now()
|
||||||
|
for (const [key, entry] of this.data.entries()) {
|
||||||
|
if (entry.expiresAt <= now) {
|
||||||
|
this.data.delete(key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type WorkflowContextPack = {
|
||||||
|
workflowId: string
|
||||||
|
snapshotHash: string
|
||||||
|
workflowState: {
|
||||||
|
blocks: Record<string, any>
|
||||||
|
edges: Array<Record<string, any>>
|
||||||
|
loops: Record<string, any>
|
||||||
|
parallels: Record<string, any>
|
||||||
|
}
|
||||||
|
schemasByType: Record<string, any>
|
||||||
|
schemaRefsByType: Record<string, string>
|
||||||
|
summary: Record<string, any>
|
||||||
|
}
|
||||||
|
|
||||||
|
export type WorkflowChangeProposal = {
|
||||||
|
workflowId: string
|
||||||
|
baseSnapshotHash: string
|
||||||
|
compiledOperations: Array<Record<string, any>>
|
||||||
|
diffSummary: Record<string, any>
|
||||||
|
warnings: string[]
|
||||||
|
diagnostics: string[]
|
||||||
|
touchedBlocks: string[]
|
||||||
|
acceptanceAssertions: string[]
|
||||||
|
postApply?: {
|
||||||
|
verify?: boolean
|
||||||
|
run?: Record<string, any>
|
||||||
|
evaluator?: Record<string, any>
|
||||||
|
}
|
||||||
|
handoff?: {
|
||||||
|
objective?: string
|
||||||
|
constraints?: string[]
|
||||||
|
resolvedIds?: Record<string, string>
|
||||||
|
assumptions?: string[]
|
||||||
|
unresolvedRisks?: string[]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const contextPackStore = new TTLStore<WorkflowContextPack>()
|
||||||
|
const proposalStore = new TTLStore<WorkflowChangeProposal>()
|
||||||
|
|
||||||
|
function getContextRedisKey(id: string): string {
|
||||||
|
return `${CONTEXT_PREFIX}:${id}`
|
||||||
|
}
|
||||||
|
|
||||||
|
function getProposalRedisKey(id: string): string {
|
||||||
|
return `${PROPOSAL_PREFIX}:${id}`
|
||||||
|
}
|
||||||
|
|
||||||
|
async function writeRedisJson(key: string, value: unknown): Promise<void> {
|
||||||
|
const redis = getRedisClient()!
|
||||||
|
await redis.set(key, JSON.stringify(value), 'EX', DEFAULT_TTL_SECONDS)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function readRedisJson<T>(key: string): Promise<T | null> {
|
||||||
|
const redis = getRedisClient()!
|
||||||
|
|
||||||
|
const raw = await redis.get(key)
|
||||||
|
if (!raw) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return JSON.parse(raw) as T
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed parsing workflow change store JSON payload', { key, error })
|
||||||
|
await redis.del(key).catch(() => {})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveContextPack(pack: WorkflowContextPack): Promise<string> {
|
||||||
|
if (!getRedisClient()) {
|
||||||
|
return contextPackStore.set(pack)
|
||||||
|
}
|
||||||
|
const id = crypto.randomUUID()
|
||||||
|
try {
|
||||||
|
await writeRedisJson(getContextRedisKey(id), pack)
|
||||||
|
return id
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Redis write failed for workflow context pack, using memory fallback', { error })
|
||||||
|
return contextPackStore.set(pack)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getContextPack(id: string): Promise<WorkflowContextPack | null> {
|
||||||
|
if (!getRedisClient()) {
|
||||||
|
return contextPackStore.get(id)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const redisPayload = await readRedisJson<WorkflowContextPack>(getContextRedisKey(id))
|
||||||
|
if (redisPayload) {
|
||||||
|
return redisPayload
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Redis read failed for workflow context pack, using memory fallback', { error })
|
||||||
|
}
|
||||||
|
return contextPackStore.get(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveProposal(proposal: WorkflowChangeProposal): Promise<string> {
|
||||||
|
if (!getRedisClient()) {
|
||||||
|
return proposalStore.set(proposal)
|
||||||
|
}
|
||||||
|
const id = crypto.randomUUID()
|
||||||
|
try {
|
||||||
|
await writeRedisJson(getProposalRedisKey(id), proposal)
|
||||||
|
return id
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Redis write failed for workflow proposal, using memory fallback', { error })
|
||||||
|
return proposalStore.set(proposal)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getProposal(id: string): Promise<WorkflowChangeProposal | null> {
|
||||||
|
if (!getRedisClient()) {
|
||||||
|
return proposalStore.get(id)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const redisPayload = await readRedisJson<WorkflowChangeProposal>(getProposalRedisKey(id))
|
||||||
|
if (redisPayload) {
|
||||||
|
return redisPayload
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Redis read failed for workflow proposal, using memory fallback', { error })
|
||||||
|
}
|
||||||
|
return proposalStore.get(id)
|
||||||
|
}
|
||||||
@@ -68,8 +68,8 @@ async function getCurrentWorkflowStateFromDb(
|
|||||||
return { workflowState, subBlockValues }
|
return { workflowState, subBlockValues }
|
||||||
}
|
}
|
||||||
|
|
||||||
export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown> = {
|
export const applyWorkflowOperationsServerTool: BaseServerTool<EditWorkflowParams, unknown> = {
|
||||||
name: 'edit_workflow',
|
name: '__internal_apply_workflow_operations',
|
||||||
async execute(params: EditWorkflowParams, context?: { userId: string }): Promise<unknown> {
|
async execute(params: EditWorkflowParams, context?: { userId: string }): Promise<unknown> {
|
||||||
const logger = createLogger('EditWorkflowServerTool')
|
const logger = createLogger('EditWorkflowServerTool')
|
||||||
const { operations, workflowId, currentUserWorkflow } = params
|
const { operations, workflowId, currentUserWorkflow } = params
|
||||||
@@ -90,7 +90,7 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown>
|
|||||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Executing edit_workflow', {
|
logger.info('Executing internal workflow operation apply', {
|
||||||
operationCount: operations.length,
|
operationCount: operations.length,
|
||||||
workflowId,
|
workflowId,
|
||||||
hasCurrentUserWorkflow: !!currentUserWorkflow,
|
hasCurrentUserWorkflow: !!currentUserWorkflow,
|
||||||
@@ -210,7 +210,7 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown>
|
|||||||
logger.warn('No userId in context - skipping custom tools persistence', { workflowId })
|
logger.warn('No userId in context - skipping custom tools persistence', { workflowId })
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('edit_workflow successfully applied operations', {
|
logger.info('Internal workflow operation apply succeeded', {
|
||||||
operationCount: operations.length,
|
operationCount: operations.length,
|
||||||
blocksCount: Object.keys(modifiedWorkflowState.blocks).length,
|
blocksCount: Object.keys(modifiedWorkflowState.blocks).length,
|
||||||
edgesCount: modifiedWorkflowState.edges.length,
|
edgesCount: modifiedWorkflowState.edges.length,
|
||||||
|
|||||||
1367
apps/sim/lib/copilot/tools/server/workflow/workflow-change.ts
Normal file
1367
apps/sim/lib/copilot/tools/server/workflow/workflow-change.ts
Normal file
File diff suppressed because it is too large
Load Diff
166
apps/sim/lib/copilot/tools/server/workflow/workflow-context.ts
Normal file
166
apps/sim/lib/copilot/tools/server/workflow/workflow-context.ts
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
import { getContextPack, saveContextPack } from './change-store'
|
||||||
|
import {
|
||||||
|
buildSchemasByType,
|
||||||
|
getAllKnownBlockTypes,
|
||||||
|
hashWorkflowState,
|
||||||
|
loadWorkflowStateFromDb,
|
||||||
|
summarizeWorkflowState,
|
||||||
|
} from './workflow-state'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkflowContextServerTool')
|
||||||
|
|
||||||
|
const WorkflowContextGetInputSchema = z.object({
|
||||||
|
workflowId: z.string(),
|
||||||
|
objective: z.string().optional(),
|
||||||
|
includeBlockTypes: z.array(z.string()).optional(),
|
||||||
|
includeAllSchemas: z.boolean().optional(),
|
||||||
|
schemaMode: z.enum(['minimal', 'workflow', 'all']).optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
type WorkflowContextGetParams = z.infer<typeof WorkflowContextGetInputSchema>
|
||||||
|
|
||||||
|
const WorkflowContextExpandInputSchema = z.object({
|
||||||
|
contextPackId: z.string(),
|
||||||
|
blockTypes: z.array(z.string()).optional(),
|
||||||
|
schemaRefs: z.array(z.string()).optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
type WorkflowContextExpandParams = z.infer<typeof WorkflowContextExpandInputSchema>
|
||||||
|
|
||||||
|
function parseSchemaRefToBlockType(schemaRef: string): string | null {
|
||||||
|
if (!schemaRef) return null
|
||||||
|
const [blockType] = schemaRef.split('@')
|
||||||
|
return blockType || null
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildAvailableBlockCatalog(
|
||||||
|
schemaRefsByType: Record<string, string>
|
||||||
|
): Array<Record<string, any>> {
|
||||||
|
return Object.entries(schemaRefsByType)
|
||||||
|
.sort((a, b) => a[0].localeCompare(b[0]))
|
||||||
|
.map(([blockType, schemaRef]) => ({
|
||||||
|
blockType,
|
||||||
|
schemaRef,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
export const workflowContextGetServerTool: BaseServerTool<WorkflowContextGetParams, any> = {
|
||||||
|
name: 'workflow_context_get',
|
||||||
|
inputSchema: WorkflowContextGetInputSchema,
|
||||||
|
async execute(params: WorkflowContextGetParams, context?: { userId: string }): Promise<any> {
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
action: 'read',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflowState } = await loadWorkflowStateFromDb(params.workflowId)
|
||||||
|
const snapshotHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
||||||
|
|
||||||
|
const blockTypesInWorkflow = Object.values(workflowState.blocks || {}).map((block: any) =>
|
||||||
|
String(block?.type || '')
|
||||||
|
)
|
||||||
|
const requestedTypes = params.includeBlockTypes || []
|
||||||
|
const schemaMode =
|
||||||
|
params.includeAllSchemas === true ? 'all' : (params.schemaMode || 'minimal')
|
||||||
|
const candidateTypes =
|
||||||
|
schemaMode === 'all'
|
||||||
|
? getAllKnownBlockTypes()
|
||||||
|
: schemaMode === 'workflow'
|
||||||
|
? [...blockTypesInWorkflow, ...requestedTypes]
|
||||||
|
: [...requestedTypes]
|
||||||
|
const { schemasByType, schemaRefsByType } = buildSchemasByType(candidateTypes)
|
||||||
|
const suggestedSchemaTypes = [...new Set(blockTypesInWorkflow.filter(Boolean))]
|
||||||
|
|
||||||
|
const summary = summarizeWorkflowState(workflowState)
|
||||||
|
const packId = await saveContextPack({
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
snapshotHash,
|
||||||
|
workflowState,
|
||||||
|
schemasByType,
|
||||||
|
schemaRefsByType,
|
||||||
|
summary: {
|
||||||
|
...summary,
|
||||||
|
objective: params.objective || null,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Generated workflow context pack', {
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
contextPackId: packId,
|
||||||
|
schemaCount: Object.keys(schemaRefsByType).length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
contextPackId: packId,
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
snapshotHash,
|
||||||
|
schemaMode,
|
||||||
|
summary: {
|
||||||
|
...summary,
|
||||||
|
objective: params.objective || null,
|
||||||
|
},
|
||||||
|
schemaRefsByType,
|
||||||
|
availableBlockCatalog: buildAvailableBlockCatalog(schemaRefsByType),
|
||||||
|
suggestedSchemaTypes,
|
||||||
|
inScopeSchemas: schemasByType,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export const workflowContextExpandServerTool: BaseServerTool<WorkflowContextExpandParams, any> = {
|
||||||
|
name: 'workflow_context_expand',
|
||||||
|
inputSchema: WorkflowContextExpandInputSchema,
|
||||||
|
async execute(params: WorkflowContextExpandParams, context?: { userId: string }): Promise<any> {
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const contextPack = await getContextPack(params.contextPackId)
|
||||||
|
if (!contextPack) {
|
||||||
|
throw new Error(`Context pack not found or expired: ${params.contextPackId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId: contextPack.workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
action: 'read',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const requestedBlockTypes = new Set<string>()
|
||||||
|
for (const blockType of params.blockTypes || []) {
|
||||||
|
if (blockType) requestedBlockTypes.add(blockType)
|
||||||
|
}
|
||||||
|
for (const schemaRef of params.schemaRefs || []) {
|
||||||
|
const blockType = parseSchemaRefToBlockType(schemaRef)
|
||||||
|
if (blockType) requestedBlockTypes.add(blockType)
|
||||||
|
}
|
||||||
|
|
||||||
|
const typesToExpand = [...requestedBlockTypes]
|
||||||
|
const { schemasByType, schemaRefsByType } = buildSchemasByType(typesToExpand)
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
contextPackId: params.contextPackId,
|
||||||
|
workflowId: contextPack.workflowId,
|
||||||
|
snapshotHash: contextPack.snapshotHash,
|
||||||
|
schemasByType,
|
||||||
|
schemaRefsByType,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
226
apps/sim/lib/copilot/tools/server/workflow/workflow-state.ts
Normal file
226
apps/sim/lib/copilot/tools/server/workflow/workflow-state.ts
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
import crypto from 'crypto'
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow as workflowTable } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { getAllBlockTypes, getBlock } from '@/blocks/registry'
|
||||||
|
import type { SubBlockConfig } from '@/blocks/types'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkflowContextState')
|
||||||
|
|
||||||
|
function stableSortValue(value: any): any {
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
return value.map(stableSortValue)
|
||||||
|
}
|
||||||
|
if (value && typeof value === 'object') {
|
||||||
|
const sorted: Record<string, any> = {}
|
||||||
|
for (const key of Object.keys(value).sort()) {
|
||||||
|
sorted[key] = stableSortValue(value[key])
|
||||||
|
}
|
||||||
|
return sorted
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
export function hashWorkflowState(state: Record<string, unknown>): string {
|
||||||
|
const stable = stableSortValue(state)
|
||||||
|
const payload = JSON.stringify(stable)
|
||||||
|
return `sha256:${crypto.createHash('sha256').update(payload).digest('hex')}`
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeOptions(options: unknown): string[] | null {
|
||||||
|
if (!Array.isArray(options)) return null
|
||||||
|
const normalized = options
|
||||||
|
.map((option) => {
|
||||||
|
if (option == null) return null
|
||||||
|
if (typeof option === 'object') {
|
||||||
|
const optionRecord = option as Record<string, unknown>
|
||||||
|
const id = optionRecord.id
|
||||||
|
if (typeof id === 'string') return id
|
||||||
|
const label = optionRecord.label
|
||||||
|
if (typeof label === 'string') return label
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return String(option)
|
||||||
|
})
|
||||||
|
.filter((value): value is string => Boolean(value))
|
||||||
|
return normalized.length > 0 ? normalized : null
|
||||||
|
}
|
||||||
|
|
||||||
|
function serializeRequired(required: SubBlockConfig['required']): boolean | Record<string, any> {
|
||||||
|
if (typeof required === 'boolean') return required
|
||||||
|
if (!required) return false
|
||||||
|
if (typeof required === 'object') {
|
||||||
|
const out: Record<string, any> = {}
|
||||||
|
const record = required as Record<string, unknown>
|
||||||
|
for (const key of ['field', 'operator', 'value']) {
|
||||||
|
if (record[key] !== undefined) {
|
||||||
|
out[key] = record[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
function serializeSubBlock(subBlock: SubBlockConfig): Record<string, unknown> {
|
||||||
|
const staticOptions =
|
||||||
|
typeof subBlock.options === 'function' ? null : normalizeOptions(subBlock.options)
|
||||||
|
return {
|
||||||
|
id: subBlock.id,
|
||||||
|
type: subBlock.type,
|
||||||
|
title: subBlock.title,
|
||||||
|
description: subBlock.description || null,
|
||||||
|
mode: subBlock.mode || null,
|
||||||
|
placeholder: subBlock.placeholder || null,
|
||||||
|
hidden: Boolean(subBlock.hidden),
|
||||||
|
multiSelect: Boolean(subBlock.multiSelect),
|
||||||
|
required: serializeRequired(subBlock.required),
|
||||||
|
hasDynamicOptions: typeof subBlock.options === 'function',
|
||||||
|
options: staticOptions,
|
||||||
|
defaultValue: subBlock.defaultValue ?? null,
|
||||||
|
min: subBlock.min ?? null,
|
||||||
|
max: subBlock.max ?? null,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function serializeBlockSchema(blockType: string): Record<string, unknown> | null {
|
||||||
|
const blockConfig = getBlock(blockType)
|
||||||
|
if (!blockConfig) return null
|
||||||
|
|
||||||
|
const subBlocks = Array.isArray(blockConfig.subBlocks)
|
||||||
|
? blockConfig.subBlocks.map(serializeSubBlock)
|
||||||
|
: []
|
||||||
|
const outputs = blockConfig.outputs || {}
|
||||||
|
const outputKeys = Object.keys(outputs)
|
||||||
|
|
||||||
|
return {
|
||||||
|
blockType,
|
||||||
|
blockName: blockConfig.name || blockType,
|
||||||
|
category: blockConfig.category,
|
||||||
|
triggerAllowed: Boolean(blockConfig.triggerAllowed || blockConfig.triggers?.enabled),
|
||||||
|
hasTriggersConfig: Boolean(blockConfig.triggers?.enabled),
|
||||||
|
subBlocks,
|
||||||
|
outputKeys,
|
||||||
|
longDescription: blockConfig.longDescription || null,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildSchemasByType(blockTypes: string[]): {
|
||||||
|
schemasByType: Record<string, any>
|
||||||
|
schemaRefsByType: Record<string, string>
|
||||||
|
} {
|
||||||
|
const schemasByType: Record<string, any> = {}
|
||||||
|
const schemaRefsByType: Record<string, string> = {}
|
||||||
|
|
||||||
|
const uniqueTypes = [...new Set(blockTypes.filter(Boolean))]
|
||||||
|
for (const blockType of uniqueTypes) {
|
||||||
|
const schema = serializeBlockSchema(blockType)
|
||||||
|
if (!schema) continue
|
||||||
|
const stableSchema = stableSortValue(schema)
|
||||||
|
const schemaHash = crypto
|
||||||
|
.createHash('sha256')
|
||||||
|
.update(JSON.stringify(stableSchema))
|
||||||
|
.digest('hex')
|
||||||
|
schemasByType[blockType] = stableSchema
|
||||||
|
schemaRefsByType[blockType] = `${blockType}@sha256:${schemaHash}`
|
||||||
|
}
|
||||||
|
|
||||||
|
return { schemasByType, schemaRefsByType }
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function loadWorkflowStateFromDb(workflowId: string): Promise<{
|
||||||
|
workflowState: {
|
||||||
|
blocks: Record<string, any>
|
||||||
|
edges: Array<Record<string, any>>
|
||||||
|
loops: Record<string, any>
|
||||||
|
parallels: Record<string, any>
|
||||||
|
}
|
||||||
|
workspaceId?: string
|
||||||
|
}> {
|
||||||
|
const [workflowRecord] = await db
|
||||||
|
.select({ workspaceId: workflowTable.workspaceId })
|
||||||
|
.from(workflowTable)
|
||||||
|
.where(eq(workflowTable.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
if (!workflowRecord) {
|
||||||
|
throw new Error(`Workflow ${workflowId} not found`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
if (!normalized) {
|
||||||
|
throw new Error(`Workflow ${workflowId} has no normalized data`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks = { ...normalized.blocks }
|
||||||
|
const invalidBlockIds: string[] = []
|
||||||
|
for (const [blockId, block] of Object.entries(blocks)) {
|
||||||
|
if (!(block as { type?: unknown })?.type) {
|
||||||
|
invalidBlockIds.push(blockId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const blockId of invalidBlockIds) {
|
||||||
|
delete blocks[blockId]
|
||||||
|
}
|
||||||
|
|
||||||
|
const invalidSet = new Set(invalidBlockIds)
|
||||||
|
const edges = (normalized.edges || []).filter(
|
||||||
|
(edge: any) => !invalidSet.has(edge.source) && !invalidSet.has(edge.target)
|
||||||
|
)
|
||||||
|
|
||||||
|
if (invalidBlockIds.length > 0) {
|
||||||
|
logger.warn('Dropped blocks without type while loading workflow state', {
|
||||||
|
workflowId,
|
||||||
|
dropped: invalidBlockIds,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
workflowState: {
|
||||||
|
blocks,
|
||||||
|
edges,
|
||||||
|
loops: normalized.loops || {},
|
||||||
|
parallels: normalized.parallels || {},
|
||||||
|
},
|
||||||
|
workspaceId: workflowRecord.workspaceId || undefined,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function summarizeWorkflowState(workflowState: {
|
||||||
|
blocks: Record<string, any>
|
||||||
|
edges: Array<Record<string, any>>
|
||||||
|
loops: Record<string, any>
|
||||||
|
parallels: Record<string, any>
|
||||||
|
}): Record<string, unknown> {
|
||||||
|
const blocks = workflowState.blocks || {}
|
||||||
|
const edges = workflowState.edges || []
|
||||||
|
const blockTypes: Record<string, number> = {}
|
||||||
|
const triggerBlocks: Array<{ id: string; name: string; type: string }> = []
|
||||||
|
|
||||||
|
for (const [blockId, block] of Object.entries(blocks)) {
|
||||||
|
const blockType = String((block as Record<string, unknown>).type || 'unknown')
|
||||||
|
blockTypes[blockType] = (blockTypes[blockType] || 0) + 1
|
||||||
|
if ((block as Record<string, unknown>).triggerMode === true) {
|
||||||
|
triggerBlocks.push({
|
||||||
|
id: blockId,
|
||||||
|
name: String((block as Record<string, unknown>).name || blockType),
|
||||||
|
type: blockType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
blockCount: Object.keys(blocks).length,
|
||||||
|
edgeCount: edges.length,
|
||||||
|
loopCount: Object.keys(workflowState.loops || {}).length,
|
||||||
|
parallelCount: Object.keys(workflowState.parallels || {}).length,
|
||||||
|
blockTypes,
|
||||||
|
triggerBlocks,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAllKnownBlockTypes(): string[] {
|
||||||
|
return getAllBlockTypes()
|
||||||
|
}
|
||||||
194
apps/sim/lib/copilot/tools/server/workflow/workflow-verify.ts
Normal file
194
apps/sim/lib/copilot/tools/server/workflow/workflow-verify.ts
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||||
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
import { hashWorkflowState, loadWorkflowStateFromDb } from './workflow-state'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkflowVerifyServerTool')
|
||||||
|
|
||||||
|
const AcceptanceItemSchema = z.union([
|
||||||
|
z.string(),
|
||||||
|
z.object({
|
||||||
|
kind: z.string().optional(),
|
||||||
|
assert: z.string(),
|
||||||
|
}),
|
||||||
|
])
|
||||||
|
|
||||||
|
const WorkflowVerifyInputSchema = z
|
||||||
|
.object({
|
||||||
|
workflowId: z.string(),
|
||||||
|
acceptance: z.array(AcceptanceItemSchema).optional(),
|
||||||
|
baseSnapshotHash: z.string().optional(),
|
||||||
|
})
|
||||||
|
.strict()
|
||||||
|
|
||||||
|
type WorkflowVerifyParams = z.infer<typeof WorkflowVerifyInputSchema>
|
||||||
|
|
||||||
|
function normalizeName(value: string): string {
|
||||||
|
return value.trim().toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveBlockToken(
|
||||||
|
workflowState: { blocks: Record<string, any> },
|
||||||
|
token: string
|
||||||
|
): string | null {
|
||||||
|
if (!token) return null
|
||||||
|
if (workflowState.blocks[token]) return token
|
||||||
|
const normalized = normalizeName(token)
|
||||||
|
for (const [blockId, block] of Object.entries(workflowState.blocks || {})) {
|
||||||
|
const blockName = normalizeName(String((block as Record<string, unknown>).name || ''))
|
||||||
|
if (blockName === normalized) return blockId
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasPath(
|
||||||
|
workflowState: { edges: Array<Record<string, any>> },
|
||||||
|
blockPath: string[]
|
||||||
|
): boolean {
|
||||||
|
if (blockPath.length < 2) return true
|
||||||
|
const adjacency = new Map<string, string[]>()
|
||||||
|
for (const edge of workflowState.edges || []) {
|
||||||
|
const source = String(edge.source || '')
|
||||||
|
const target = String(edge.target || '')
|
||||||
|
if (!source || !target) continue
|
||||||
|
const existing = adjacency.get(source) || []
|
||||||
|
existing.push(target)
|
||||||
|
adjacency.set(source, existing)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < blockPath.length - 1; i++) {
|
||||||
|
const from = blockPath[i]
|
||||||
|
const to = blockPath[i + 1]
|
||||||
|
const next = adjacency.get(from) || []
|
||||||
|
if (!next.includes(to)) return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
function evaluateAssertions(params: {
|
||||||
|
workflowState: {
|
||||||
|
blocks: Record<string, any>
|
||||||
|
edges: Array<Record<string, any>>
|
||||||
|
}
|
||||||
|
assertions: string[]
|
||||||
|
}): { failures: string[]; checks: Array<Record<string, any>> } {
|
||||||
|
const failures: string[] = []
|
||||||
|
const checks: Array<Record<string, any>> = []
|
||||||
|
|
||||||
|
for (const assertion of params.assertions) {
|
||||||
|
if (assertion.startsWith('block_exists:')) {
|
||||||
|
const token = assertion.slice('block_exists:'.length).trim()
|
||||||
|
const blockId = resolveBlockToken(params.workflowState, token)
|
||||||
|
const passed = Boolean(blockId)
|
||||||
|
checks.push({ assert: assertion, passed, resolvedBlockId: blockId || null })
|
||||||
|
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (assertion.startsWith('trigger_exists:')) {
|
||||||
|
const triggerType = normalizeName(assertion.slice('trigger_exists:'.length))
|
||||||
|
const triggerBlock = Object.values(params.workflowState.blocks || {}).find((block: any) => {
|
||||||
|
if (block?.triggerMode !== true) return false
|
||||||
|
return normalizeName(String(block?.type || '')) === triggerType
|
||||||
|
})
|
||||||
|
const passed = Boolean(triggerBlock)
|
||||||
|
checks.push({ assert: assertion, passed })
|
||||||
|
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (assertion.startsWith('path_exists:')) {
|
||||||
|
const rawPath = assertion.slice('path_exists:'.length).trim()
|
||||||
|
const tokens = rawPath
|
||||||
|
.split('->')
|
||||||
|
.map((token) => token.trim())
|
||||||
|
.filter(Boolean)
|
||||||
|
const resolvedPath = tokens
|
||||||
|
.map((token) => resolveBlockToken(params.workflowState, token))
|
||||||
|
.filter((value): value is string => Boolean(value))
|
||||||
|
|
||||||
|
const resolvedAll = resolvedPath.length === tokens.length
|
||||||
|
const passed = resolvedAll && hasPath(params.workflowState, resolvedPath)
|
||||||
|
checks.push({
|
||||||
|
assert: assertion,
|
||||||
|
passed,
|
||||||
|
resolvedPath,
|
||||||
|
})
|
||||||
|
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unknown assertion format - mark as warning failure for explicit visibility.
|
||||||
|
checks.push({ assert: assertion, passed: false, reason: 'unknown_assertion_type' })
|
||||||
|
failures.push(`Unknown assertion format: ${assertion}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return { failures, checks }
|
||||||
|
}
|
||||||
|
|
||||||
|
export const workflowVerifyServerTool: BaseServerTool<WorkflowVerifyParams, any> = {
|
||||||
|
name: 'workflow_verify',
|
||||||
|
inputSchema: WorkflowVerifyInputSchema,
|
||||||
|
async execute(params: WorkflowVerifyParams, context?: { userId: string }): Promise<any> {
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
action: 'read',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflowState } = await loadWorkflowStateFromDb(params.workflowId)
|
||||||
|
const snapshotHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
||||||
|
if (params.baseSnapshotHash && params.baseSnapshotHash !== snapshotHash) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
verified: false,
|
||||||
|
reason: 'snapshot_mismatch',
|
||||||
|
expected: params.baseSnapshotHash,
|
||||||
|
current: snapshotHash,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const validation = validateWorkflowState(workflowState as any, { sanitize: false })
|
||||||
|
|
||||||
|
const assertions = (params.acceptance || []).map((item) =>
|
||||||
|
typeof item === 'string' ? item : item.assert
|
||||||
|
)
|
||||||
|
const assertionResults = evaluateAssertions({
|
||||||
|
workflowState,
|
||||||
|
assertions,
|
||||||
|
})
|
||||||
|
|
||||||
|
const verified =
|
||||||
|
validation.valid && assertionResults.failures.length === 0 && validation.errors.length === 0
|
||||||
|
|
||||||
|
logger.info('Workflow verification complete', {
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
verified,
|
||||||
|
errorCount: validation.errors.length,
|
||||||
|
warningCount: validation.warnings.length,
|
||||||
|
assertionFailures: assertionResults.failures.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
verified,
|
||||||
|
snapshotHash,
|
||||||
|
validation: {
|
||||||
|
valid: validation.valid,
|
||||||
|
errors: validation.errors,
|
||||||
|
warnings: validation.warnings,
|
||||||
|
},
|
||||||
|
assertions: assertionResults.checks,
|
||||||
|
failures: assertionResults.failures,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -1,246 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { getRedisClient } from '@/lib/core/config/redis'
|
|
||||||
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
|
|
||||||
|
|
||||||
const logger = createLogger('ExecutionEventBuffer')
|
|
||||||
|
|
||||||
const REDIS_PREFIX = 'execution:stream:'
|
|
||||||
const TTL_SECONDS = 60 * 60 // 1 hour
|
|
||||||
const EVENT_LIMIT = 1000
|
|
||||||
const RESERVE_BATCH = 100
|
|
||||||
const FLUSH_INTERVAL_MS = 15
|
|
||||||
const FLUSH_MAX_BATCH = 200
|
|
||||||
|
|
||||||
function getEventsKey(executionId: string) {
|
|
||||||
return `${REDIS_PREFIX}${executionId}:events`
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSeqKey(executionId: string) {
|
|
||||||
return `${REDIS_PREFIX}${executionId}:seq`
|
|
||||||
}
|
|
||||||
|
|
||||||
function getMetaKey(executionId: string) {
|
|
||||||
return `${REDIS_PREFIX}${executionId}:meta`
|
|
||||||
}
|
|
||||||
|
|
||||||
export type ExecutionStreamStatus = 'active' | 'complete' | 'error' | 'cancelled'
|
|
||||||
|
|
||||||
export interface ExecutionStreamMeta {
|
|
||||||
status: ExecutionStreamStatus
|
|
||||||
userId?: string
|
|
||||||
workflowId?: string
|
|
||||||
updatedAt?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ExecutionEventEntry {
|
|
||||||
eventId: number
|
|
||||||
executionId: string
|
|
||||||
event: ExecutionEvent
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ExecutionEventWriter {
|
|
||||||
write: (event: ExecutionEvent) => Promise<ExecutionEventEntry>
|
|
||||||
flush: () => Promise<void>
|
|
||||||
close: () => Promise<void>
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function setExecutionMeta(
|
|
||||||
executionId: string,
|
|
||||||
meta: Partial<ExecutionStreamMeta>
|
|
||||||
): Promise<void> {
|
|
||||||
const redis = getRedisClient()
|
|
||||||
if (!redis) {
|
|
||||||
logger.warn('setExecutionMeta: Redis client unavailable', { executionId })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const key = getMetaKey(executionId)
|
|
||||||
const payload: Record<string, string> = {
|
|
||||||
updatedAt: new Date().toISOString(),
|
|
||||||
}
|
|
||||||
if (meta.status) payload.status = meta.status
|
|
||||||
if (meta.userId) payload.userId = meta.userId
|
|
||||||
if (meta.workflowId) payload.workflowId = meta.workflowId
|
|
||||||
await redis.hset(key, payload)
|
|
||||||
await redis.expire(key, TTL_SECONDS)
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed to update execution meta', {
|
|
||||||
executionId,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getExecutionMeta(executionId: string): Promise<ExecutionStreamMeta | null> {
|
|
||||||
const redis = getRedisClient()
|
|
||||||
if (!redis) {
|
|
||||||
logger.warn('getExecutionMeta: Redis client unavailable', { executionId })
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const key = getMetaKey(executionId)
|
|
||||||
const meta = await redis.hgetall(key)
|
|
||||||
if (!meta || Object.keys(meta).length === 0) return null
|
|
||||||
return meta as unknown as ExecutionStreamMeta
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed to read execution meta', {
|
|
||||||
executionId,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function readExecutionEvents(
|
|
||||||
executionId: string,
|
|
||||||
afterEventId: number
|
|
||||||
): Promise<ExecutionEventEntry[]> {
|
|
||||||
const redis = getRedisClient()
|
|
||||||
if (!redis) return []
|
|
||||||
try {
|
|
||||||
const raw = await redis.zrangebyscore(getEventsKey(executionId), afterEventId + 1, '+inf')
|
|
||||||
return raw
|
|
||||||
.map((entry) => {
|
|
||||||
try {
|
|
||||||
return JSON.parse(entry) as ExecutionEventEntry
|
|
||||||
} catch {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter((entry): entry is ExecutionEventEntry => Boolean(entry))
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed to read execution events', {
|
|
||||||
executionId,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createExecutionEventWriter(executionId: string): ExecutionEventWriter {
|
|
||||||
const redis = getRedisClient()
|
|
||||||
if (!redis) {
|
|
||||||
logger.warn(
|
|
||||||
'createExecutionEventWriter: Redis client unavailable, events will not be buffered',
|
|
||||||
{
|
|
||||||
executionId,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
write: async (event) => ({ eventId: 0, executionId, event }),
|
|
||||||
flush: async () => {},
|
|
||||||
close: async () => {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let pending: ExecutionEventEntry[] = []
|
|
||||||
let nextEventId = 0
|
|
||||||
let maxReservedId = 0
|
|
||||||
let flushTimer: ReturnType<typeof setTimeout> | null = null
|
|
||||||
|
|
||||||
const scheduleFlush = () => {
|
|
||||||
if (flushTimer) return
|
|
||||||
flushTimer = setTimeout(() => {
|
|
||||||
flushTimer = null
|
|
||||||
void flush()
|
|
||||||
}, FLUSH_INTERVAL_MS)
|
|
||||||
}
|
|
||||||
|
|
||||||
const reserveIds = async (minCount: number) => {
|
|
||||||
const reserveCount = Math.max(RESERVE_BATCH, minCount)
|
|
||||||
const newMax = await redis.incrby(getSeqKey(executionId), reserveCount)
|
|
||||||
const startId = newMax - reserveCount + 1
|
|
||||||
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
|
||||||
nextEventId = startId
|
|
||||||
maxReservedId = newMax
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let flushPromise: Promise<void> | null = null
|
|
||||||
let closed = false
|
|
||||||
const inflightWrites = new Set<Promise<ExecutionEventEntry>>()
|
|
||||||
|
|
||||||
const doFlush = async () => {
|
|
||||||
if (pending.length === 0) return
|
|
||||||
const batch = pending
|
|
||||||
pending = []
|
|
||||||
try {
|
|
||||||
const key = getEventsKey(executionId)
|
|
||||||
const zaddArgs: (string | number)[] = []
|
|
||||||
for (const entry of batch) {
|
|
||||||
zaddArgs.push(entry.eventId, JSON.stringify(entry))
|
|
||||||
}
|
|
||||||
const pipeline = redis.pipeline()
|
|
||||||
pipeline.zadd(key, ...zaddArgs)
|
|
||||||
pipeline.expire(key, TTL_SECONDS)
|
|
||||||
pipeline.expire(getSeqKey(executionId), TTL_SECONDS)
|
|
||||||
pipeline.zremrangebyrank(key, 0, -EVENT_LIMIT - 1)
|
|
||||||
await pipeline.exec()
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed to flush execution events', {
|
|
||||||
executionId,
|
|
||||||
batchSize: batch.length,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
})
|
|
||||||
pending = batch.concat(pending)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const flush = async () => {
|
|
||||||
if (flushPromise) {
|
|
||||||
await flushPromise
|
|
||||||
return
|
|
||||||
}
|
|
||||||
flushPromise = doFlush()
|
|
||||||
try {
|
|
||||||
await flushPromise
|
|
||||||
} finally {
|
|
||||||
flushPromise = null
|
|
||||||
if (pending.length > 0) scheduleFlush()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const writeCore = async (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
|
|
||||||
if (closed) return { eventId: 0, executionId, event }
|
|
||||||
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
|
||||||
await reserveIds(1)
|
|
||||||
}
|
|
||||||
const eventId = nextEventId++
|
|
||||||
const entry: ExecutionEventEntry = { eventId, executionId, event }
|
|
||||||
pending.push(entry)
|
|
||||||
if (pending.length >= FLUSH_MAX_BATCH) {
|
|
||||||
await flush()
|
|
||||||
} else {
|
|
||||||
scheduleFlush()
|
|
||||||
}
|
|
||||||
return entry
|
|
||||||
}
|
|
||||||
|
|
||||||
const write = (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
|
|
||||||
const p = writeCore(event)
|
|
||||||
inflightWrites.add(p)
|
|
||||||
const remove = () => inflightWrites.delete(p)
|
|
||||||
p.then(remove, remove)
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
const close = async () => {
|
|
||||||
closed = true
|
|
||||||
if (flushTimer) {
|
|
||||||
clearTimeout(flushTimer)
|
|
||||||
flushTimer = null
|
|
||||||
}
|
|
||||||
if (inflightWrites.size > 0) {
|
|
||||||
await Promise.allSettled(inflightWrites)
|
|
||||||
}
|
|
||||||
if (flushPromise) {
|
|
||||||
await flushPromise
|
|
||||||
}
|
|
||||||
if (pending.length > 0) {
|
|
||||||
await doFlush()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { write, flush, close }
|
|
||||||
}
|
|
||||||
@@ -2364,261 +2364,6 @@ describe('hasWorkflowChanged', () => {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('Trigger Config Normalization (False Positive Prevention)', () => {
|
|
||||||
it.concurrent(
|
|
||||||
'should not detect change when deployed has null fields but current has values from triggerConfig',
|
|
||||||
() => {
|
|
||||||
// Core scenario: deployed state has null individual fields, current state has
|
|
||||||
// values populated from triggerConfig at runtime by populateTriggerFieldsFromConfig
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.concurrent(
|
|
||||||
'should detect change when user edits a trigger field to a different value',
|
|
||||||
() => {
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'old-secret' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'new-secret' },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'old-secret' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.concurrent('should not detect change when both sides have no triggerConfig', () => {
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent(
|
|
||||||
'should not detect change when deployed has empty fields and triggerConfig populates them',
|
|
||||||
() => {
|
|
||||||
// Empty string is also treated as "empty" by normalizeTriggerConfigValues
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.concurrent('should not detect change when triggerId differs', () => {
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
model: { value: 'gpt-4' },
|
|
||||||
triggerId: { value: null },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
model: { value: 'gpt-4' },
|
|
||||||
triggerId: { value: 'slack_webhook' },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent(
|
|
||||||
'should not detect change for namespaced system subBlock IDs like samplePayload_slack_webhook',
|
|
||||||
() => {
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
model: { value: 'gpt-4' },
|
|
||||||
samplePayload_slack_webhook: { value: 'old payload' },
|
|
||||||
triggerInstructions_slack_webhook: { value: 'old instructions' },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
model: { value: 'gpt-4' },
|
|
||||||
samplePayload_slack_webhook: { value: 'new payload' },
|
|
||||||
triggerInstructions_slack_webhook: { value: 'new instructions' },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.concurrent(
|
|
||||||
'should handle mixed scenario: some fields from triggerConfig, some user-edited',
|
|
||||||
() => {
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
|
||||||
includeFiles: { id: 'includeFiles', type: 'switch', value: false },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
|
||||||
includeFiles: { id: 'includeFiles', type: 'switch', value: true },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
// includeFiles changed from false to true — this IS a real change
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
|
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
|
||||||
it.concurrent('should not detect change when webhookId differs', () => {
|
it.concurrent('should not detect change when webhookId differs', () => {
|
||||||
const deployedState = createWorkflowState({
|
const deployedState = createWorkflowState({
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import {
|
|||||||
normalizeLoop,
|
normalizeLoop,
|
||||||
normalizeParallel,
|
normalizeParallel,
|
||||||
normalizeSubBlockValue,
|
normalizeSubBlockValue,
|
||||||
normalizeTriggerConfigValues,
|
|
||||||
normalizeValue,
|
normalizeValue,
|
||||||
normalizeVariables,
|
normalizeVariables,
|
||||||
sanitizeVariable,
|
sanitizeVariable,
|
||||||
@@ -173,18 +172,14 @@ export function generateWorkflowDiffSummary(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Normalize trigger config values for both states before comparison
|
|
||||||
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
|
|
||||||
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
|
|
||||||
|
|
||||||
// Compare subBlocks using shared helper for filtering (single source of truth)
|
// Compare subBlocks using shared helper for filtering (single source of truth)
|
||||||
const allSubBlockIds = filterSubBlockIds([
|
const allSubBlockIds = filterSubBlockIds([
|
||||||
...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]),
|
...new Set([...Object.keys(currentSubBlocks), ...Object.keys(previousSubBlocks)]),
|
||||||
])
|
])
|
||||||
|
|
||||||
for (const subId of allSubBlockIds) {
|
for (const subId of allSubBlockIds) {
|
||||||
const currentSub = normalizedCurrentSubs[subId] as Record<string, unknown> | undefined
|
const currentSub = currentSubBlocks[subId] as Record<string, unknown> | undefined
|
||||||
const previousSub = normalizedPreviousSubs[subId] as Record<string, unknown> | undefined
|
const previousSub = previousSubBlocks[subId] as Record<string, unknown> | undefined
|
||||||
|
|
||||||
if (!currentSub || !previousSub) {
|
if (!currentSub || !previousSub) {
|
||||||
changes.push({
|
changes.push({
|
||||||
|
|||||||
@@ -4,12 +4,10 @@
|
|||||||
import { describe, expect, it } from 'vitest'
|
import { describe, expect, it } from 'vitest'
|
||||||
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||||
import {
|
import {
|
||||||
filterSubBlockIds,
|
|
||||||
normalizedStringify,
|
normalizedStringify,
|
||||||
normalizeEdge,
|
normalizeEdge,
|
||||||
normalizeLoop,
|
normalizeLoop,
|
||||||
normalizeParallel,
|
normalizeParallel,
|
||||||
normalizeTriggerConfigValues,
|
|
||||||
normalizeValue,
|
normalizeValue,
|
||||||
sanitizeInputFormat,
|
sanitizeInputFormat,
|
||||||
sanitizeTools,
|
sanitizeTools,
|
||||||
@@ -586,214 +584,4 @@ describe('Workflow Normalization Utilities', () => {
|
|||||||
expect(result2).toBe(result3)
|
expect(result2).toBe(result3)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('filterSubBlockIds', () => {
|
|
||||||
it.concurrent('should exclude exact SYSTEM_SUBBLOCK_IDS', () => {
|
|
||||||
const ids = ['signingSecret', 'samplePayload', 'triggerInstructions', 'botToken']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['botToken', 'signingSecret'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should exclude namespaced SYSTEM_SUBBLOCK_IDS (prefix matching)', () => {
|
|
||||||
const ids = [
|
|
||||||
'signingSecret',
|
|
||||||
'samplePayload_slack_webhook',
|
|
||||||
'triggerInstructions_slack_webhook',
|
|
||||||
'webhookUrlDisplay_slack_webhook',
|
|
||||||
'botToken',
|
|
||||||
]
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['botToken', 'signingSecret'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should exclude exact TRIGGER_RUNTIME_SUBBLOCK_IDS', () => {
|
|
||||||
const ids = ['webhookId', 'triggerPath', 'triggerConfig', 'triggerId', 'signingSecret']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['signingSecret'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should not exclude IDs that merely contain a system ID substring', () => {
|
|
||||||
const ids = ['mySamplePayload', 'notSamplePayload']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['mySamplePayload', 'notSamplePayload'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should return sorted results', () => {
|
|
||||||
const ids = ['zebra', 'alpha', 'middle']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['alpha', 'middle', 'zebra'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should handle empty array', () => {
|
|
||||||
expect(filterSubBlockIds([])).toEqual([])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should handle all IDs being excluded', () => {
|
|
||||||
const ids = ['webhookId', 'triggerPath', 'samplePayload', 'triggerConfig']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual([])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should exclude setupScript and scheduleInfo namespaced variants', () => {
|
|
||||||
const ids = ['setupScript_google_sheets_row', 'scheduleInfo_cron_trigger', 'realField']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['realField'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should exclude triggerCredentials namespaced variants', () => {
|
|
||||||
const ids = ['triggerCredentials_slack_webhook', 'signingSecret']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['signingSecret'])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('normalizeTriggerConfigValues', () => {
|
|
||||||
it.concurrent('should return subBlocks unchanged when no triggerConfig exists', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect(result).toEqual(subBlocks)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should return subBlocks unchanged when triggerConfig value is null', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: null },
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect(result).toEqual(subBlocks)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent(
|
|
||||||
'should return subBlocks unchanged when triggerConfig value is not an object',
|
|
||||||
() => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: 'string-value' },
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect(result).toEqual(subBlocks)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.concurrent('should populate null individual fields from triggerConfig', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
|
||||||
expect((result.botToken as Record<string, unknown>).value).toBe('token456')
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should populate undefined individual fields from triggerConfig', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: undefined },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should populate empty string individual fields from triggerConfig', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should NOT overwrite existing non-empty individual field values', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'old-secret' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'user-edited-secret' },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('user-edited-secret')
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should skip triggerConfig fields that are null/undefined', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: null, botToken: undefined },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
|
||||||
expect((result.botToken as Record<string, unknown>).value).toBe(null)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should skip fields from triggerConfig that have no matching subBlock', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { nonExistentField: 'value123' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect(result.nonExistentField).toBeUndefined()
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should not mutate the original subBlocks object', () => {
|
|
||||||
const original = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
normalizeTriggerConfigValues(original)
|
|
||||||
expect((original.signingSecret as Record<string, unknown>).value).toBe(null)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should preserve other subBlock properties when populating value', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
signingSecret: {
|
|
||||||
id: 'signingSecret',
|
|
||||||
type: 'short-input',
|
|
||||||
value: null,
|
|
||||||
placeholder: 'Enter signing secret',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
const normalized = result.signingSecret as Record<string, unknown>
|
|
||||||
expect(normalized.value).toBe('secret123')
|
|
||||||
expect(normalized.id).toBe('signingSecret')
|
|
||||||
expect(normalized.type).toBe('short-input')
|
|
||||||
expect(normalized.placeholder).toBe('Enter signing secret')
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -418,48 +418,10 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
|
|||||||
*/
|
*/
|
||||||
export function filterSubBlockIds(subBlockIds: string[]): string[] {
|
export function filterSubBlockIds(subBlockIds: string[]): string[] {
|
||||||
return subBlockIds
|
return subBlockIds
|
||||||
.filter((id) => {
|
.filter((id) => !SYSTEM_SUBBLOCK_IDS.includes(id) && !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id))
|
||||||
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
|
|
||||||
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
|
|
||||||
return false
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
.sort()
|
.sort()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Normalizes trigger block subBlocks by populating null/empty individual fields
|
|
||||||
* from the triggerConfig aggregate subBlock. This compensates for the runtime
|
|
||||||
* population done by populateTriggerFieldsFromConfig, ensuring consistent
|
|
||||||
* comparison between client state (with populated values) and deployed state
|
|
||||||
* (with null values from DB).
|
|
||||||
*/
|
|
||||||
export function normalizeTriggerConfigValues(
|
|
||||||
subBlocks: Record<string, unknown>
|
|
||||||
): Record<string, unknown> {
|
|
||||||
const triggerConfigSub = subBlocks.triggerConfig as Record<string, unknown> | undefined
|
|
||||||
const triggerConfigValue = triggerConfigSub?.value
|
|
||||||
if (!triggerConfigValue || typeof triggerConfigValue !== 'object') {
|
|
||||||
return subBlocks
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = { ...subBlocks }
|
|
||||||
for (const [fieldId, configValue] of Object.entries(
|
|
||||||
triggerConfigValue as Record<string, unknown>
|
|
||||||
)) {
|
|
||||||
if (configValue === null || configValue === undefined) continue
|
|
||||||
const existingSub = result[fieldId] as Record<string, unknown> | undefined
|
|
||||||
if (
|
|
||||||
existingSub &&
|
|
||||||
(existingSub.value === null || existingSub.value === undefined || existingSub.value === '')
|
|
||||||
) {
|
|
||||||
result[fieldId] = { ...existingSub, value: configValue }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Normalizes a subBlock value with sanitization for specific subBlock types.
|
* Normalizes a subBlock value with sanitization for specific subBlock types.
|
||||||
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)
|
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)
|
||||||
|
|||||||
@@ -137,37 +137,6 @@ function handleSecurityFiltering(request: NextRequest): NextResponse | null {
|
|||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const UTM_KEYS = ['utm_source', 'utm_medium', 'utm_campaign', 'utm_content'] as const
|
|
||||||
const UTM_COOKIE_NAME = 'sim_utm'
|
|
||||||
const UTM_COOKIE_MAX_AGE = 3600
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sets a `sim_utm` cookie when UTM params are present on auth pages.
|
|
||||||
* Captures UTM values, the HTTP Referer, landing page, and a timestamp
|
|
||||||
* used by the attribution API to verify the user signed up after visiting the link.
|
|
||||||
*/
|
|
||||||
function setUtmCookie(request: NextRequest, response: NextResponse): void {
|
|
||||||
const { searchParams, pathname } = request.nextUrl
|
|
||||||
const hasUtm = UTM_KEYS.some((key) => searchParams.get(key))
|
|
||||||
if (!hasUtm) return
|
|
||||||
|
|
||||||
const utmData: Record<string, string> = {}
|
|
||||||
for (const key of UTM_KEYS) {
|
|
||||||
const value = searchParams.get(key)
|
|
||||||
if (value) utmData[key] = value
|
|
||||||
}
|
|
||||||
utmData.referrer_url = request.headers.get('referer') || ''
|
|
||||||
utmData.landing_page = pathname
|
|
||||||
utmData.created_at = Date.now().toString()
|
|
||||||
|
|
||||||
response.cookies.set(UTM_COOKIE_NAME, JSON.stringify(utmData), {
|
|
||||||
path: '/',
|
|
||||||
maxAge: UTM_COOKIE_MAX_AGE,
|
|
||||||
sameSite: 'lax',
|
|
||||||
httpOnly: false, // Client-side hook needs to detect cookie presence
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function proxy(request: NextRequest) {
|
export async function proxy(request: NextRequest) {
|
||||||
const url = request.nextUrl
|
const url = request.nextUrl
|
||||||
|
|
||||||
@@ -183,7 +152,6 @@ export async function proxy(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
const response = NextResponse.next()
|
const response = NextResponse.next()
|
||||||
response.headers.set('Content-Security-Policy', generateRuntimeCSP())
|
response.headers.set('Content-Security-Policy', generateRuntimeCSP())
|
||||||
setUtmCookie(request, response)
|
|
||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -129,18 +129,6 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
|
|||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
setCurrentExecutionId: (workflowId, executionId) => {
|
|
||||||
set({
|
|
||||||
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
|
||||||
currentExecutionId: executionId,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
},
|
|
||||||
|
|
||||||
getCurrentExecutionId: (workflowId) => {
|
|
||||||
return getOrCreate(get().workflowExecutions, workflowId).currentExecutionId
|
|
||||||
},
|
|
||||||
|
|
||||||
clearRunPath: (workflowId) => {
|
clearRunPath: (workflowId) => {
|
||||||
set({
|
set({
|
||||||
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
||||||
|
|||||||
@@ -35,8 +35,6 @@ export interface WorkflowExecutionState {
|
|||||||
lastRunPath: Map<string, BlockRunStatus>
|
lastRunPath: Map<string, BlockRunStatus>
|
||||||
/** Maps edge IDs to their run result from the last execution */
|
/** Maps edge IDs to their run result from the last execution */
|
||||||
lastRunEdges: Map<string, EdgeRunStatus>
|
lastRunEdges: Map<string, EdgeRunStatus>
|
||||||
/** The execution ID of the currently running execution */
|
|
||||||
currentExecutionId: string | null
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -56,7 +54,6 @@ export const defaultWorkflowExecutionState: WorkflowExecutionState = {
|
|||||||
debugContext: null,
|
debugContext: null,
|
||||||
lastRunPath: new Map(),
|
lastRunPath: new Map(),
|
||||||
lastRunEdges: new Map(),
|
lastRunEdges: new Map(),
|
||||||
currentExecutionId: null,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -99,10 +96,6 @@ export interface ExecutionActions {
|
|||||||
setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void
|
setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void
|
||||||
/** Clears the run path and run edges for a workflow */
|
/** Clears the run path and run edges for a workflow */
|
||||||
clearRunPath: (workflowId: string) => void
|
clearRunPath: (workflowId: string) => void
|
||||||
/** Stores the current execution ID for a workflow */
|
|
||||||
setCurrentExecutionId: (workflowId: string, executionId: string | null) => void
|
|
||||||
/** Returns the current execution ID for a workflow */
|
|
||||||
getCurrentExecutionId: (workflowId: string) => string | null
|
|
||||||
/** Resets the entire store to its initial empty state */
|
/** Resets the entire store to its initial empty state */
|
||||||
reset: () => void
|
reset: () => void
|
||||||
/** Stores a serializable execution snapshot for a workflow */
|
/** Stores a serializable execution snapshot for a workflow */
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ import {
|
|||||||
import { flushStreamingUpdates, stopStreamingUpdates } from '@/lib/copilot/client-sse/handlers'
|
import { flushStreamingUpdates, stopStreamingUpdates } from '@/lib/copilot/client-sse/handlers'
|
||||||
import type { ClientContentBlock, ClientStreamingContext } from '@/lib/copilot/client-sse/types'
|
import type { ClientContentBlock, ClientStreamingContext } from '@/lib/copilot/client-sse/types'
|
||||||
import {
|
import {
|
||||||
COPILOT_AUTO_ALLOWED_TOOLS_API_PATH,
|
|
||||||
COPILOT_CHAT_API_PATH,
|
COPILOT_CHAT_API_PATH,
|
||||||
COPILOT_CHAT_STREAM_API_PATH,
|
COPILOT_CHAT_STREAM_API_PATH,
|
||||||
COPILOT_CHECKPOINTS_API_PATH,
|
COPILOT_CHECKPOINTS_API_PATH,
|
||||||
@@ -84,6 +83,14 @@ function isPageUnloading(): boolean {
|
|||||||
return _isPageUnloading
|
return _isPageUnloading
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
||||||
|
if (name !== 'workflow_change') return false
|
||||||
|
|
||||||
|
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'apply') return true
|
||||||
|
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
function readActiveStreamFromStorage(): CopilotStreamInfo | null {
|
function readActiveStreamFromStorage(): CopilotStreamInfo | null {
|
||||||
if (typeof window === 'undefined') return null
|
if (typeof window === 'undefined') return null
|
||||||
try {
|
try {
|
||||||
@@ -140,41 +147,6 @@ function updateActiveStreamEventId(
|
|||||||
writeActiveStreamToStorage(next)
|
writeActiveStreamToStorage(next)
|
||||||
}
|
}
|
||||||
|
|
||||||
const AUTO_ALLOWED_TOOLS_STORAGE_KEY = 'copilot_auto_allowed_tools'
|
|
||||||
|
|
||||||
function readAutoAllowedToolsFromStorage(): string[] | null {
|
|
||||||
if (typeof window === 'undefined') return null
|
|
||||||
try {
|
|
||||||
const raw = window.localStorage.getItem(AUTO_ALLOWED_TOOLS_STORAGE_KEY)
|
|
||||||
if (!raw) return null
|
|
||||||
const parsed = JSON.parse(raw)
|
|
||||||
if (!Array.isArray(parsed)) return null
|
|
||||||
return parsed.filter((item): item is string => typeof item === 'string')
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('[AutoAllowedTools] Failed to read local cache', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function writeAutoAllowedToolsToStorage(tools: string[]): void {
|
|
||||||
if (typeof window === 'undefined') return
|
|
||||||
try {
|
|
||||||
window.localStorage.setItem(AUTO_ALLOWED_TOOLS_STORAGE_KEY, JSON.stringify(tools))
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('[AutoAllowedTools] Failed to write local cache', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function isToolAutoAllowedByList(toolId: string, autoAllowedTools: string[]): boolean {
|
|
||||||
if (!toolId) return false
|
|
||||||
const normalizedTarget = toolId.trim()
|
|
||||||
return autoAllowedTools.some((allowed) => allowed?.trim() === normalizedTarget)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clear any lingering diff preview from a previous session.
|
* Clear any lingering diff preview from a previous session.
|
||||||
* Called lazily when the store is first activated (setWorkflowId).
|
* Called lazily when the store is first activated (setWorkflowId).
|
||||||
@@ -480,11 +452,6 @@ function prepareSendContext(
|
|||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
logger.warn('[Copilot] Failed to load sensitive credential IDs', err)
|
logger.warn('[Copilot] Failed to load sensitive credential IDs', err)
|
||||||
})
|
})
|
||||||
get()
|
|
||||||
.loadAutoAllowedTools()
|
|
||||||
.catch((err) => {
|
|
||||||
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
|
||||||
})
|
|
||||||
|
|
||||||
let newMessages: CopilotMessage[]
|
let newMessages: CopilotMessage[]
|
||||||
if (revertState) {
|
if (revertState) {
|
||||||
@@ -1037,8 +1004,6 @@ async function resumeFromLiveStream(
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
const cachedAutoAllowedTools = readAutoAllowedToolsFromStorage()
|
|
||||||
|
|
||||||
// Initial state (subset required for UI/streaming)
|
// Initial state (subset required for UI/streaming)
|
||||||
const initialState = {
|
const initialState = {
|
||||||
mode: 'build' as const,
|
mode: 'build' as const,
|
||||||
@@ -1073,8 +1038,6 @@ const initialState = {
|
|||||||
streamingPlanContent: '',
|
streamingPlanContent: '',
|
||||||
toolCallsById: {} as Record<string, CopilotToolCall>,
|
toolCallsById: {} as Record<string, CopilotToolCall>,
|
||||||
suppressAutoSelect: false,
|
suppressAutoSelect: false,
|
||||||
autoAllowedTools: cachedAutoAllowedTools ?? ([] as string[]),
|
|
||||||
autoAllowedToolsLoaded: cachedAutoAllowedTools !== null,
|
|
||||||
activeStream: null as CopilotStreamInfo | null,
|
activeStream: null as CopilotStreamInfo | null,
|
||||||
messageQueue: [] as import('./types').QueuedMessage[],
|
messageQueue: [] as import('./types').QueuedMessage[],
|
||||||
suppressAbortContinueOption: false,
|
suppressAbortContinueOption: false,
|
||||||
@@ -1113,8 +1076,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
agentPrefetch: get().agentPrefetch,
|
agentPrefetch: get().agentPrefetch,
|
||||||
availableModels: get().availableModels,
|
availableModels: get().availableModels,
|
||||||
isLoadingModels: get().isLoadingModels,
|
isLoadingModels: get().isLoadingModels,
|
||||||
autoAllowedTools: get().autoAllowedTools,
|
|
||||||
autoAllowedToolsLoaded: get().autoAllowedToolsLoaded,
|
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -1429,16 +1390,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
|
|
||||||
// Send a message (streaming only)
|
// Send a message (streaming only)
|
||||||
sendMessage: async (message: string, options = {}) => {
|
sendMessage: async (message: string, options = {}) => {
|
||||||
if (!get().autoAllowedToolsLoaded) {
|
|
||||||
try {
|
|
||||||
await get().loadAutoAllowedTools()
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('[Copilot] Failed to preload auto-allowed tools before send', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const prepared = prepareSendContext(get, set, message, options as SendMessageOptionsInput)
|
const prepared = prepareSendContext(get, set, message, options as SendMessageOptionsInput)
|
||||||
if (!prepared) return
|
if (!prepared) return
|
||||||
|
|
||||||
@@ -1705,7 +1656,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (tn === 'edit_workflow') {
|
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -1714,7 +1665,9 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
}
|
}
|
||||||
// Fallback to map if not found in messages
|
// Fallback to map if not found in messages
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
const candidates = Object.values(toolCallsById).filter((t) =>
|
||||||
|
isWorkflowEditToolCall(t.name, t.params)
|
||||||
|
)
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2407,74 +2360,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
loadAutoAllowedTools: async () => {
|
|
||||||
try {
|
|
||||||
logger.debug('[AutoAllowedTools] Loading from API...')
|
|
||||||
const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH)
|
|
||||||
logger.debug('[AutoAllowedTools] Load response', { status: res.status, ok: res.ok })
|
|
||||||
if (res.ok) {
|
|
||||||
const data = await res.json()
|
|
||||||
const tools = data.autoAllowedTools ?? []
|
|
||||||
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
|
||||||
writeAutoAllowedToolsToStorage(tools)
|
|
||||||
logger.debug('[AutoAllowedTools] Loaded successfully', { count: tools.length, tools })
|
|
||||||
} else {
|
|
||||||
set({ autoAllowedToolsLoaded: true })
|
|
||||||
logger.warn('[AutoAllowedTools] Load failed with status', { status: res.status })
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
set({ autoAllowedToolsLoaded: true })
|
|
||||||
logger.error('[AutoAllowedTools] Failed to load', { error: err })
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
addAutoAllowedTool: async (toolId: string) => {
|
|
||||||
try {
|
|
||||||
logger.debug('[AutoAllowedTools] Adding tool...', { toolId })
|
|
||||||
const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolId }),
|
|
||||||
})
|
|
||||||
logger.debug('[AutoAllowedTools] API response', { toolId, status: res.status, ok: res.ok })
|
|
||||||
if (res.ok) {
|
|
||||||
const data = await res.json()
|
|
||||||
logger.debug('[AutoAllowedTools] API returned', { toolId, tools: data.autoAllowedTools })
|
|
||||||
const tools = data.autoAllowedTools ?? []
|
|
||||||
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
|
||||||
writeAutoAllowedToolsToStorage(tools)
|
|
||||||
logger.debug('[AutoAllowedTools] Added tool to store', { toolId })
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error('[AutoAllowedTools] Failed to add tool', { toolId, error: err })
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
removeAutoAllowedTool: async (toolId: string) => {
|
|
||||||
try {
|
|
||||||
const res = await fetch(
|
|
||||||
`${COPILOT_AUTO_ALLOWED_TOOLS_API_PATH}?toolId=${encodeURIComponent(toolId)}`,
|
|
||||||
{
|
|
||||||
method: 'DELETE',
|
|
||||||
}
|
|
||||||
)
|
|
||||||
if (res.ok) {
|
|
||||||
const data = await res.json()
|
|
||||||
const tools = data.autoAllowedTools ?? []
|
|
||||||
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
|
||||||
writeAutoAllowedToolsToStorage(tools)
|
|
||||||
logger.debug('[AutoAllowedTools] Removed tool', { toolId })
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error('[AutoAllowedTools] Failed to remove tool', { toolId, error: err })
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
isToolAutoAllowed: (toolId: string) => {
|
|
||||||
const { autoAllowedTools } = get()
|
|
||||||
return isToolAutoAllowedByList(toolId, autoAllowedTools)
|
|
||||||
},
|
|
||||||
|
|
||||||
// Credential masking
|
// Credential masking
|
||||||
loadSensitiveCredentialIds: async () => {
|
loadSensitiveCredentialIds: async () => {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -26,6 +26,26 @@ export interface CopilotToolCall {
|
|||||||
params?: Record<string, unknown>
|
params?: Record<string, unknown>
|
||||||
input?: Record<string, unknown>
|
input?: Record<string, unknown>
|
||||||
display?: ClientToolDisplay
|
display?: ClientToolDisplay
|
||||||
|
/** Server-provided UI contract for this tool call phase */
|
||||||
|
ui?: {
|
||||||
|
title?: string
|
||||||
|
phaseLabel?: string
|
||||||
|
icon?: string
|
||||||
|
showInterrupt?: boolean
|
||||||
|
showRemember?: boolean
|
||||||
|
autoAllowed?: boolean
|
||||||
|
actions?: Array<{
|
||||||
|
id: string
|
||||||
|
label: string
|
||||||
|
kind: 'accept' | 'reject'
|
||||||
|
remember?: boolean
|
||||||
|
}>
|
||||||
|
}
|
||||||
|
/** Server-provided execution routing contract */
|
||||||
|
execution?: {
|
||||||
|
target?: 'go' | 'go_subagent' | 'sim_server' | 'sim_client_capability' | string
|
||||||
|
capabilityId?: string
|
||||||
|
}
|
||||||
/** Content streamed from a subagent (e.g., debug agent) */
|
/** Content streamed from a subagent (e.g., debug agent) */
|
||||||
subAgentContent?: string
|
subAgentContent?: string
|
||||||
/** Tool calls made by the subagent */
|
/** Tool calls made by the subagent */
|
||||||
@@ -167,10 +187,6 @@ export interface CopilotState {
|
|||||||
|
|
||||||
// Per-message metadata captured at send-time for reliable stats
|
// Per-message metadata captured at send-time for reliable stats
|
||||||
|
|
||||||
// Auto-allowed integration tools (tools that can run without confirmation)
|
|
||||||
autoAllowedTools: string[]
|
|
||||||
autoAllowedToolsLoaded: boolean
|
|
||||||
|
|
||||||
// Active stream metadata for reconnect/replay
|
// Active stream metadata for reconnect/replay
|
||||||
activeStream: CopilotStreamInfo | null
|
activeStream: CopilotStreamInfo | null
|
||||||
|
|
||||||
@@ -247,11 +263,6 @@ export interface CopilotActions {
|
|||||||
abortSignal?: AbortSignal
|
abortSignal?: AbortSignal
|
||||||
) => Promise<void>
|
) => Promise<void>
|
||||||
handleNewChatCreation: (newChatId: string) => Promise<void>
|
handleNewChatCreation: (newChatId: string) => Promise<void>
|
||||||
loadAutoAllowedTools: () => Promise<void>
|
|
||||||
addAutoAllowedTool: (toolId: string) => Promise<void>
|
|
||||||
removeAutoAllowedTool: (toolId: string) => Promise<void>
|
|
||||||
isToolAutoAllowed: (toolId: string) => boolean
|
|
||||||
|
|
||||||
// Credential masking
|
// Credential masking
|
||||||
loadSensitiveCredentialIds: () => Promise<void>
|
loadSensitiveCredentialIds: () => Promise<void>
|
||||||
maskCredentialValue: (value: string) => string
|
maskCredentialValue: (value: string) => string
|
||||||
|
|||||||
@@ -224,7 +224,7 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
|||||||
|
|
||||||
const newEntry = get().entries[0]
|
const newEntry = get().entries[0]
|
||||||
|
|
||||||
if (newEntry?.error && newEntry.blockType !== 'cancelled') {
|
if (newEntry?.error) {
|
||||||
notifyBlockError({
|
notifyBlockError({
|
||||||
error: newEntry.error,
|
error: newEntry.error,
|
||||||
blockName: newEntry.blockName || 'Unknown Block',
|
blockName: newEntry.blockName || 'Unknown Block',
|
||||||
@@ -243,11 +243,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
|||||||
useExecutionStore.getState().clearRunPath(workflowId)
|
useExecutionStore.getState().clearRunPath(workflowId)
|
||||||
},
|
},
|
||||||
|
|
||||||
clearExecutionEntries: (executionId: string) =>
|
|
||||||
set((state) => ({
|
|
||||||
entries: state.entries.filter((e) => e.executionId !== executionId),
|
|
||||||
})),
|
|
||||||
|
|
||||||
exportConsoleCSV: (workflowId: string) => {
|
exportConsoleCSV: (workflowId: string) => {
|
||||||
const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
|
const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
|
||||||
|
|
||||||
@@ -475,24 +470,12 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
|||||||
},
|
},
|
||||||
merge: (persistedState, currentState) => {
|
merge: (persistedState, currentState) => {
|
||||||
const persisted = persistedState as Partial<ConsoleStore> | undefined
|
const persisted = persistedState as Partial<ConsoleStore> | undefined
|
||||||
const rawEntries = persisted?.entries ?? currentState.entries
|
const entries = (persisted?.entries ?? currentState.entries).map((entry, index) => {
|
||||||
const oneHourAgo = Date.now() - 60 * 60 * 1000
|
|
||||||
|
|
||||||
const entries = rawEntries.map((entry, index) => {
|
|
||||||
let updated = entry
|
|
||||||
if (entry.executionOrder === undefined) {
|
if (entry.executionOrder === undefined) {
|
||||||
updated = { ...updated, executionOrder: index + 1 }
|
return { ...entry, executionOrder: index + 1 }
|
||||||
}
|
}
|
||||||
if (
|
return entry
|
||||||
entry.isRunning &&
|
|
||||||
entry.startedAt &&
|
|
||||||
new Date(entry.startedAt).getTime() < oneHourAgo
|
|
||||||
) {
|
|
||||||
updated = { ...updated, isRunning: false }
|
|
||||||
}
|
|
||||||
return updated
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...currentState,
|
...currentState,
|
||||||
entries,
|
entries,
|
||||||
|
|||||||
@@ -51,7 +51,6 @@ export interface ConsoleStore {
|
|||||||
isOpen: boolean
|
isOpen: boolean
|
||||||
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
|
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
|
||||||
clearWorkflowConsole: (workflowId: string) => void
|
clearWorkflowConsole: (workflowId: string) => void
|
||||||
clearExecutionEntries: (executionId: string) => void
|
|
||||||
exportConsoleCSV: (workflowId: string) => void
|
exportConsoleCSV: (workflowId: string) => void
|
||||||
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
|
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
|
||||||
toggleConsole: () => void
|
toggleConsole: () => void
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import {
|
|||||||
captureBaselineSnapshot,
|
captureBaselineSnapshot,
|
||||||
cloneWorkflowState,
|
cloneWorkflowState,
|
||||||
createBatchedUpdater,
|
createBatchedUpdater,
|
||||||
findLatestEditWorkflowToolCallId,
|
findLatestWorkflowEditToolCallId,
|
||||||
getLatestUserMessageId,
|
getLatestUserMessageId,
|
||||||
persistWorkflowStateToServer,
|
persistWorkflowStateToServer,
|
||||||
} from './utils'
|
} from './utils'
|
||||||
@@ -334,7 +334,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
findLatestEditWorkflowToolCallId().then((toolCallId) => {
|
findLatestWorkflowEditToolCallId().then((toolCallId) => {
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
import('@/stores/panel/copilot/store')
|
import('@/stores/panel/copilot/store')
|
||||||
.then(({ useCopilotStore }) => {
|
.then(({ useCopilotStore }) => {
|
||||||
@@ -439,7 +439,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
findLatestEditWorkflowToolCallId().then((toolCallId) => {
|
findLatestWorkflowEditToolCallId().then((toolCallId) => {
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
import('@/stores/panel/copilot/store')
|
import('@/stores/panel/copilot/store')
|
||||||
.then(({ useCopilotStore }) => {
|
.then(({ useCopilotStore }) => {
|
||||||
|
|||||||
@@ -126,6 +126,20 @@ export async function getLatestUserMessageId(): Promise<string | null> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function findLatestEditWorkflowToolCallId(): Promise<string | undefined> {
|
export async function findLatestEditWorkflowToolCallId(): Promise<string | undefined> {
|
||||||
|
return findLatestWorkflowEditToolCallId()
|
||||||
|
}
|
||||||
|
|
||||||
|
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
||||||
|
if (name !== 'workflow_change') return false
|
||||||
|
|
||||||
|
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'apply') return true
|
||||||
|
|
||||||
|
// Be permissive for incomplete events: apply calls always include proposalId.
|
||||||
|
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function findLatestWorkflowEditToolCallId(): Promise<string | undefined> {
|
||||||
try {
|
try {
|
||||||
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
||||||
const { messages, toolCallsById } = useCopilotStore.getState()
|
const { messages, toolCallsById } = useCopilotStore.getState()
|
||||||
@@ -134,17 +148,22 @@ export async function findLatestEditWorkflowToolCallId(): Promise<string | undef
|
|||||||
const message = messages[mi]
|
const message = messages[mi]
|
||||||
if (message.role !== 'assistant' || !message.contentBlocks) continue
|
if (message.role !== 'assistant' || !message.contentBlocks) continue
|
||||||
for (const block of message.contentBlocks) {
|
for (const block of message.contentBlocks) {
|
||||||
if (block?.type === 'tool_call' && block.toolCall?.name === 'edit_workflow') {
|
if (
|
||||||
|
block?.type === 'tool_call' &&
|
||||||
|
isWorkflowEditToolCall(block.toolCall?.name, block.toolCall?.params)
|
||||||
|
) {
|
||||||
return block.toolCall?.id
|
return block.toolCall?.id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const fallback = Object.values(toolCallsById).filter((call) => call.name === 'edit_workflow')
|
const fallback = Object.values(toolCallsById).filter((call) =>
|
||||||
|
isWorkflowEditToolCall(call.name, call.params)
|
||||||
|
)
|
||||||
|
|
||||||
return fallback.length ? fallback[fallback.length - 1].id : undefined
|
return fallback.length ? fallback[fallback.length - 1].id : undefined
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn('Failed to resolve edit_workflow tool call id', { error })
|
logger.warn('Failed to resolve workflow edit tool call id', { error })
|
||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,12 +23,7 @@ export const SYSTEM_SUBBLOCK_IDS: string[] = [
|
|||||||
* with default values from the trigger definition on load, which aren't present in
|
* with default values from the trigger definition on load, which aren't present in
|
||||||
* the deployed state, causing false positive change detection.
|
* the deployed state, causing false positive change detection.
|
||||||
*/
|
*/
|
||||||
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = [
|
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = ['webhookId', 'triggerPath', 'triggerConfig']
|
||||||
'webhookId',
|
|
||||||
'triggerPath',
|
|
||||||
'triggerConfig',
|
|
||||||
'triggerId',
|
|
||||||
]
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Maximum number of consecutive failures before a trigger (schedule/webhook) is auto-disabled.
|
* Maximum number of consecutive failures before a trigger (schedule/webhook) is auto-disabled.
|
||||||
|
|||||||
@@ -1,41 +0,0 @@
|
|||||||
CREATE TABLE "referral_attribution" (
|
|
||||||
"id" text PRIMARY KEY NOT NULL,
|
|
||||||
"user_id" text NOT NULL,
|
|
||||||
"organization_id" text,
|
|
||||||
"campaign_id" text,
|
|
||||||
"utm_source" text,
|
|
||||||
"utm_medium" text,
|
|
||||||
"utm_campaign" text,
|
|
||||||
"utm_content" text,
|
|
||||||
"referrer_url" text,
|
|
||||||
"landing_page" text,
|
|
||||||
"bonus_credit_amount" numeric DEFAULT '0' NOT NULL,
|
|
||||||
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
||||||
CONSTRAINT "referral_attribution_user_id_unique" UNIQUE("user_id")
|
|
||||||
);
|
|
||||||
--> statement-breakpoint
|
|
||||||
CREATE TABLE "referral_campaigns" (
|
|
||||||
"id" text PRIMARY KEY NOT NULL,
|
|
||||||
"name" text NOT NULL,
|
|
||||||
"code" text,
|
|
||||||
"utm_source" text,
|
|
||||||
"utm_medium" text,
|
|
||||||
"utm_campaign" text,
|
|
||||||
"utm_content" text,
|
|
||||||
"bonus_credit_amount" numeric NOT NULL,
|
|
||||||
"is_active" boolean DEFAULT true NOT NULL,
|
|
||||||
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
||||||
"updated_at" timestamp DEFAULT now() NOT NULL,
|
|
||||||
CONSTRAINT "referral_campaigns_code_unique" UNIQUE("code")
|
|
||||||
);
|
|
||||||
--> statement-breakpoint
|
|
||||||
ALTER TABLE "referral_attribution" ADD CONSTRAINT "referral_attribution_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
||||||
ALTER TABLE "referral_attribution" ADD CONSTRAINT "referral_attribution_organization_id_organization_id_fk" FOREIGN KEY ("organization_id") REFERENCES "public"."organization"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
|
||||||
ALTER TABLE "referral_attribution" ADD CONSTRAINT "referral_attribution_campaign_id_referral_campaigns_id_fk" FOREIGN KEY ("campaign_id") REFERENCES "public"."referral_campaigns"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
|
||||||
CREATE INDEX "referral_attribution_user_id_idx" ON "referral_attribution" USING btree ("user_id");--> statement-breakpoint
|
|
||||||
CREATE UNIQUE INDEX "referral_attribution_org_unique_idx" ON "referral_attribution" USING btree ("organization_id") WHERE "referral_attribution"."organization_id" IS NOT NULL;--> statement-breakpoint
|
|
||||||
CREATE INDEX "referral_attribution_campaign_id_idx" ON "referral_attribution" USING btree ("campaign_id");--> statement-breakpoint
|
|
||||||
CREATE INDEX "referral_attribution_utm_campaign_idx" ON "referral_attribution" USING btree ("utm_campaign");--> statement-breakpoint
|
|
||||||
CREATE INDEX "referral_attribution_utm_content_idx" ON "referral_attribution" USING btree ("utm_content");--> statement-breakpoint
|
|
||||||
CREATE INDEX "referral_attribution_created_at_idx" ON "referral_attribution" USING btree ("created_at");--> statement-breakpoint
|
|
||||||
CREATE INDEX "referral_campaigns_active_idx" ON "referral_campaigns" USING btree ("is_active");
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1072,13 +1072,6 @@
|
|||||||
"when": 1770410282842,
|
"when": 1770410282842,
|
||||||
"tag": "0153_complete_arclight",
|
"tag": "0153_complete_arclight",
|
||||||
"breakpoints": true
|
"breakpoints": true
|
||||||
},
|
|
||||||
{
|
|
||||||
"idx": 154,
|
|
||||||
"version": "7",
|
|
||||||
"when": 1770869658697,
|
|
||||||
"tag": "0154_bumpy_living_mummy",
|
|
||||||
"breakpoints": true
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -726,61 +726,6 @@ export const userStats = pgTable('user_stats', {
|
|||||||
billingBlockedReason: billingBlockedReasonEnum('billing_blocked_reason'),
|
billingBlockedReason: billingBlockedReasonEnum('billing_blocked_reason'),
|
||||||
})
|
})
|
||||||
|
|
||||||
export const referralCampaigns = pgTable(
|
|
||||||
'referral_campaigns',
|
|
||||||
{
|
|
||||||
id: text('id').primaryKey(),
|
|
||||||
name: text('name').notNull(),
|
|
||||||
code: text('code').unique(),
|
|
||||||
utmSource: text('utm_source'),
|
|
||||||
utmMedium: text('utm_medium'),
|
|
||||||
utmCampaign: text('utm_campaign'),
|
|
||||||
utmContent: text('utm_content'),
|
|
||||||
bonusCreditAmount: decimal('bonus_credit_amount').notNull(),
|
|
||||||
isActive: boolean('is_active').notNull().default(true),
|
|
||||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
|
||||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
|
||||||
},
|
|
||||||
(table) => ({
|
|
||||||
activeIdx: index('referral_campaigns_active_idx').on(table.isActive),
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
export const referralAttribution = pgTable(
|
|
||||||
'referral_attribution',
|
|
||||||
{
|
|
||||||
id: text('id').primaryKey(),
|
|
||||||
userId: text('user_id')
|
|
||||||
.notNull()
|
|
||||||
.references(() => user.id, { onDelete: 'cascade' })
|
|
||||||
.unique(),
|
|
||||||
organizationId: text('organization_id').references(() => organization.id, {
|
|
||||||
onDelete: 'set null',
|
|
||||||
}),
|
|
||||||
campaignId: text('campaign_id').references(() => referralCampaigns.id, {
|
|
||||||
onDelete: 'set null',
|
|
||||||
}),
|
|
||||||
utmSource: text('utm_source'),
|
|
||||||
utmMedium: text('utm_medium'),
|
|
||||||
utmCampaign: text('utm_campaign'),
|
|
||||||
utmContent: text('utm_content'),
|
|
||||||
referrerUrl: text('referrer_url'),
|
|
||||||
landingPage: text('landing_page'),
|
|
||||||
bonusCreditAmount: decimal('bonus_credit_amount').notNull().default('0'),
|
|
||||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
|
||||||
},
|
|
||||||
(table) => ({
|
|
||||||
userIdIdx: index('referral_attribution_user_id_idx').on(table.userId),
|
|
||||||
orgUniqueIdx: uniqueIndex('referral_attribution_org_unique_idx')
|
|
||||||
.on(table.organizationId)
|
|
||||||
.where(sql`${table.organizationId} IS NOT NULL`),
|
|
||||||
campaignIdIdx: index('referral_attribution_campaign_id_idx').on(table.campaignId),
|
|
||||||
utmCampaignIdx: index('referral_attribution_utm_campaign_idx').on(table.utmCampaign),
|
|
||||||
utmContentIdx: index('referral_attribution_utm_content_idx').on(table.utmContent),
|
|
||||||
createdAtIdx: index('referral_attribution_created_at_idx').on(table.createdAt),
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
export const customTools = pgTable(
|
export const customTools = pgTable(
|
||||||
'custom_tools',
|
'custom_tools',
|
||||||
{
|
{
|
||||||
|
|||||||
Reference in New Issue
Block a user