mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-13 07:55:09 -05:00
Compare commits
70 Commits
feat/smart
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b45f3962fc | ||
|
|
7fbbc7ba7a | ||
|
|
a337aa7dfe | ||
|
|
022e84c4b1 | ||
|
|
602e371a7a | ||
|
|
9a06cae591 | ||
|
|
dce47a101c | ||
|
|
1130f8ddb2 | ||
|
|
ebc2ffa1c5 | ||
|
|
fc97ce007d | ||
|
|
6c006cdfec | ||
|
|
c380e59cb3 | ||
|
|
2944579d21 | ||
|
|
81dfeb0bb0 | ||
|
|
01577a18b4 | ||
|
|
07d50f8fe1 | ||
|
|
27973953f6 | ||
|
|
50585273ce | ||
|
|
654cb2b407 | ||
|
|
6c66521d64 | ||
|
|
479cd347ad | ||
|
|
a3a99eda19 | ||
|
|
1a66d48add | ||
|
|
46822e91f3 | ||
|
|
2bb68335ee | ||
|
|
8528fbe2d2 | ||
|
|
31fdd2be13 | ||
|
|
028bc652c2 | ||
|
|
c6bf5cd58c | ||
|
|
11dc18a80d | ||
|
|
ab4e9dc72f | ||
|
|
1c58c35bd8 | ||
|
|
d63a5cb504 | ||
|
|
8bd5d41723 | ||
|
|
c12931bc50 | ||
|
|
e9c4251c1c | ||
|
|
cc2be33d6b | ||
|
|
45371e521e | ||
|
|
0ce0f98aa5 | ||
|
|
dff1c9d083 | ||
|
|
b09f683072 | ||
|
|
a8bb0db660 | ||
|
|
af82820a28 | ||
|
|
4372841797 | ||
|
|
5e8c843241 | ||
|
|
7bf3d73ee6 | ||
|
|
7ffc11a738 | ||
|
|
be578e2ed7 | ||
|
|
f415e5edc4 | ||
|
|
13a6e6c3fa | ||
|
|
f5ab7f21ae | ||
|
|
bfb6fffe38 | ||
|
|
4fbec0a43f | ||
|
|
585f5e365b | ||
|
|
3792bdd252 | ||
|
|
eb5d1f3e5b | ||
|
|
54ab82c8dd | ||
|
|
f895bf469b | ||
|
|
dd3209af06 | ||
|
|
b6ba3b50a7 | ||
|
|
b304233062 | ||
|
|
57e4b49bd6 | ||
|
|
e12dd204ed | ||
|
|
3d9d9cbc54 | ||
|
|
0f4ec962ad | ||
|
|
4827866f9a | ||
|
|
3e697d9ed9 | ||
|
|
4431a1a484 | ||
|
|
4d1a9a3f22 | ||
|
|
eb07a080fb |
@@ -1157,6 +1157,21 @@ export function AirweaveIcon(props: SVGProps<SVGSVGElement>) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function GoogleBooksIcon(props: SVGProps<SVGSVGElement>) {
|
||||||
|
return (
|
||||||
|
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 478.633 540.068'>
|
||||||
|
<path
|
||||||
|
fill='#1C51A4'
|
||||||
|
d='M449.059,218.231L245.519,99.538l-0.061,193.23c0.031,1.504-0.368,2.977-1.166,4.204c-0.798,1.258-1.565,1.995-2.915,2.547c-1.35,0.552-2.792,0.706-4.204,0.399c-1.412-0.307-2.7-1.043-3.713-2.117l-69.166-70.609l-69.381,70.179c-1.013,0.982-2.301,1.657-3.652,1.903c-1.381,0.246-2.792,0.092-4.081-0.491c-1.289-0.583-1.626-0.522-2.394-1.749c-0.767-1.197-1.197-2.608-1.197-4.081L85.031,6.007l-2.915-1.289C43.973-11.638,0,16.409,0,59.891v420.306c0,46.029,49.312,74.782,88.775,51.767l360.285-210.138C488.491,298.782,488.491,241.246,449.059,218.231z'
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill='#80D7FB'
|
||||||
|
d='M88.805,8.124c-2.179-1.289-4.419-2.363-6.659-3.345l0.123,288.663c0,1.442,0.43,2.854,1.197,4.081c0.767,1.197,1.872,2.148,3.161,2.731c1.289,0.583,2.7,0.736,4.081,0.491c1.381-0.246,2.639-0.921,3.652-1.903l69.749-69.688l69.811,69.749c1.013,1.074,2.301,1.81,3.713,2.117c1.412,0.307,2.884,0.153,4.204-0.399c1.319-0.552,2.455-1.565,3.253-2.792c0.798-1.258,1.197-2.731,1.166-4.204V99.998L88.805,8.124z'
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
export function GoogleDocsIcon(props: SVGProps<SVGSVGElement>) {
|
export function GoogleDocsIcon(props: SVGProps<SVGSVGElement>) {
|
||||||
return (
|
return (
|
||||||
<svg
|
<svg
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ import {
|
|||||||
GithubIcon,
|
GithubIcon,
|
||||||
GitLabIcon,
|
GitLabIcon,
|
||||||
GmailIcon,
|
GmailIcon,
|
||||||
|
GoogleBooksIcon,
|
||||||
GoogleCalendarIcon,
|
GoogleCalendarIcon,
|
||||||
GoogleDocsIcon,
|
GoogleDocsIcon,
|
||||||
GoogleDriveIcon,
|
GoogleDriveIcon,
|
||||||
@@ -172,6 +173,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
|||||||
github_v2: GithubIcon,
|
github_v2: GithubIcon,
|
||||||
gitlab: GitLabIcon,
|
gitlab: GitLabIcon,
|
||||||
gmail_v2: GmailIcon,
|
gmail_v2: GmailIcon,
|
||||||
|
google_books: GoogleBooksIcon,
|
||||||
google_calendar_v2: GoogleCalendarIcon,
|
google_calendar_v2: GoogleCalendarIcon,
|
||||||
google_docs: GoogleDocsIcon,
|
google_docs: GoogleDocsIcon,
|
||||||
google_drive: GoogleDriveIcon,
|
google_drive: GoogleDriveIcon,
|
||||||
|
|||||||
96
apps/docs/content/docs/en/tools/google_books.mdx
Normal file
96
apps/docs/content/docs/en/tools/google_books.mdx
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
---
|
||||||
|
title: Google Books
|
||||||
|
description: Search and retrieve book information
|
||||||
|
---
|
||||||
|
|
||||||
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
|
<BlockInfoCard
|
||||||
|
type="google_books"
|
||||||
|
color="#FFFFFF"
|
||||||
|
/>
|
||||||
|
|
||||||
|
## Usage Instructions
|
||||||
|
|
||||||
|
Search for books using the Google Books API. Find volumes by title, author, ISBN, or keywords, and retrieve detailed information about specific books including descriptions, ratings, and publication details.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Tools
|
||||||
|
|
||||||
|
### `google_books_volume_search`
|
||||||
|
|
||||||
|
Search for books using the Google Books API
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Google Books API key |
|
||||||
|
| `query` | string | Yes | Search query. Supports special keywords: intitle:, inauthor:, inpublisher:, subject:, isbn: |
|
||||||
|
| `filter` | string | No | Filter results by availability \(partial, full, free-ebooks, paid-ebooks, ebooks\) |
|
||||||
|
| `printType` | string | No | Restrict to print type \(all, books, magazines\) |
|
||||||
|
| `orderBy` | string | No | Sort order \(relevance, newest\) |
|
||||||
|
| `startIndex` | number | No | Index of the first result to return \(for pagination\) |
|
||||||
|
| `maxResults` | number | No | Maximum number of results to return \(1-40\) |
|
||||||
|
| `langRestrict` | string | No | Restrict results to a specific language \(ISO 639-1 code\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `totalItems` | number | Total number of matching results |
|
||||||
|
| `volumes` | array | List of matching volumes |
|
||||||
|
| ↳ `id` | string | Volume ID |
|
||||||
|
| ↳ `title` | string | Book title |
|
||||||
|
| ↳ `subtitle` | string | Book subtitle |
|
||||||
|
| ↳ `authors` | array | List of authors |
|
||||||
|
| ↳ `publisher` | string | Publisher name |
|
||||||
|
| ↳ `publishedDate` | string | Publication date |
|
||||||
|
| ↳ `description` | string | Book description |
|
||||||
|
| ↳ `pageCount` | number | Number of pages |
|
||||||
|
| ↳ `categories` | array | Book categories |
|
||||||
|
| ↳ `averageRating` | number | Average rating \(1-5\) |
|
||||||
|
| ↳ `ratingsCount` | number | Number of ratings |
|
||||||
|
| ↳ `language` | string | Language code |
|
||||||
|
| ↳ `previewLink` | string | Link to preview on Google Books |
|
||||||
|
| ↳ `infoLink` | string | Link to info page |
|
||||||
|
| ↳ `thumbnailUrl` | string | Book cover thumbnail URL |
|
||||||
|
| ↳ `isbn10` | string | ISBN-10 identifier |
|
||||||
|
| ↳ `isbn13` | string | ISBN-13 identifier |
|
||||||
|
|
||||||
|
### `google_books_volume_details`
|
||||||
|
|
||||||
|
Get detailed information about a specific book volume
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Google Books API key |
|
||||||
|
| `volumeId` | string | Yes | The ID of the volume to retrieve |
|
||||||
|
| `projection` | string | No | Projection level \(full, lite\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `id` | string | Volume ID |
|
||||||
|
| `title` | string | Book title |
|
||||||
|
| `subtitle` | string | Book subtitle |
|
||||||
|
| `authors` | array | List of authors |
|
||||||
|
| `publisher` | string | Publisher name |
|
||||||
|
| `publishedDate` | string | Publication date |
|
||||||
|
| `description` | string | Book description |
|
||||||
|
| `pageCount` | number | Number of pages |
|
||||||
|
| `categories` | array | Book categories |
|
||||||
|
| `averageRating` | number | Average rating \(1-5\) |
|
||||||
|
| `ratingsCount` | number | Number of ratings |
|
||||||
|
| `language` | string | Language code |
|
||||||
|
| `previewLink` | string | Link to preview on Google Books |
|
||||||
|
| `infoLink` | string | Link to info page |
|
||||||
|
| `thumbnailUrl` | string | Book cover thumbnail URL |
|
||||||
|
| `isbn10` | string | ISBN-10 identifier |
|
||||||
|
| `isbn13` | string | ISBN-13 identifier |
|
||||||
|
|
||||||
|
|
||||||
@@ -33,6 +33,7 @@
|
|||||||
"github",
|
"github",
|
||||||
"gitlab",
|
"gitlab",
|
||||||
"gmail",
|
"gmail",
|
||||||
|
"google_books",
|
||||||
"google_calendar",
|
"google_calendar",
|
||||||
"google_docs",
|
"google_docs",
|
||||||
"google_drive",
|
"google_drive",
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ BETTER_AUTH_URL=http://localhost:3000
|
|||||||
|
|
||||||
# NextJS (Required)
|
# NextJS (Required)
|
||||||
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||||
|
# INTERNAL_API_BASE_URL=http://sim-app.default.svc.cluster.local:3000 # Optional: internal URL for server-side /api self-calls; defaults to NEXT_PUBLIC_APP_URL
|
||||||
|
|
||||||
# Security (Required)
|
# Security (Required)
|
||||||
ENCRYPTION_KEY=your_encryption_key # Use `openssl rand -hex 32` to generate, used to encrypt environment variables
|
ENCRYPTION_KEY=your_encryption_key # Use `openssl rand -hex 32` to generate, used to encrypt environment variables
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import type { Artifact, Message, PushNotificationConfig, Task, TaskState } from '@a2a-js/sdk'
|
import type { Artifact, Message, PushNotificationConfig, Task, TaskState } from '@a2a-js/sdk'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { generateInternalToken } from '@/lib/auth/internal'
|
import { generateInternalToken } from '@/lib/auth/internal'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
|
||||||
/** A2A v0.3 JSON-RPC method names */
|
/** A2A v0.3 JSON-RPC method names */
|
||||||
export const A2A_METHODS = {
|
export const A2A_METHODS = {
|
||||||
@@ -118,7 +118,7 @@ export interface ExecuteRequestResult {
|
|||||||
export async function buildExecuteRequest(
|
export async function buildExecuteRequest(
|
||||||
config: ExecuteRequestConfig
|
config: ExecuteRequestConfig
|
||||||
): Promise<ExecuteRequestResult> {
|
): Promise<ExecuteRequestResult> {
|
||||||
const url = `${getBaseUrl()}/api/workflows/${config.workflowId}/execute`
|
const url = `${getInternalApiBaseUrl()}/api/workflows/${config.workflowId}/execute`
|
||||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||||
let useInternalAuth = false
|
let useInternalAuth = false
|
||||||
|
|
||||||
|
|||||||
187
apps/sim/app/api/attribution/route.ts
Normal file
187
apps/sim/app/api/attribution/route.ts
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
/**
|
||||||
|
* POST /api/attribution
|
||||||
|
*
|
||||||
|
* Automatic UTM-based referral attribution.
|
||||||
|
*
|
||||||
|
* Reads the `sim_utm` cookie (set by proxy on auth pages), matches a campaign
|
||||||
|
* by UTM specificity, and atomically inserts an attribution record + applies
|
||||||
|
* bonus credits.
|
||||||
|
*
|
||||||
|
* Idempotent — the unique constraint on `userId` prevents double-attribution.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { referralAttribution, referralCampaigns, userStats } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { nanoid } from 'nanoid'
|
||||||
|
import { cookies } from 'next/headers'
|
||||||
|
import { NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
|
||||||
|
|
||||||
|
const logger = createLogger('AttributionAPI')
|
||||||
|
|
||||||
|
const COOKIE_NAME = 'sim_utm'
|
||||||
|
|
||||||
|
const UtmCookieSchema = z.object({
|
||||||
|
utm_source: z.string().optional(),
|
||||||
|
utm_medium: z.string().optional(),
|
||||||
|
utm_campaign: z.string().optional(),
|
||||||
|
utm_content: z.string().optional(),
|
||||||
|
referrer_url: z.string().optional(),
|
||||||
|
landing_page: z.string().optional(),
|
||||||
|
created_at: z.string().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds the most specific active campaign matching the given UTM params.
|
||||||
|
* Null fields on a campaign act as wildcards. Ties broken by newest campaign.
|
||||||
|
*/
|
||||||
|
async function findMatchingCampaign(utmData: z.infer<typeof UtmCookieSchema>) {
|
||||||
|
const campaigns = await db
|
||||||
|
.select()
|
||||||
|
.from(referralCampaigns)
|
||||||
|
.where(eq(referralCampaigns.isActive, true))
|
||||||
|
|
||||||
|
let bestMatch: (typeof campaigns)[number] | null = null
|
||||||
|
let bestScore = -1
|
||||||
|
|
||||||
|
for (const campaign of campaigns) {
|
||||||
|
let score = 0
|
||||||
|
let mismatch = false
|
||||||
|
|
||||||
|
const fields = [
|
||||||
|
{ campaignVal: campaign.utmSource, utmVal: utmData.utm_source },
|
||||||
|
{ campaignVal: campaign.utmMedium, utmVal: utmData.utm_medium },
|
||||||
|
{ campaignVal: campaign.utmCampaign, utmVal: utmData.utm_campaign },
|
||||||
|
{ campaignVal: campaign.utmContent, utmVal: utmData.utm_content },
|
||||||
|
] as const
|
||||||
|
|
||||||
|
for (const { campaignVal, utmVal } of fields) {
|
||||||
|
if (campaignVal === null) continue
|
||||||
|
if (campaignVal === utmVal) {
|
||||||
|
score++
|
||||||
|
} else {
|
||||||
|
mismatch = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!mismatch && score > 0) {
|
||||||
|
if (
|
||||||
|
score > bestScore ||
|
||||||
|
(score === bestScore &&
|
||||||
|
bestMatch &&
|
||||||
|
campaign.createdAt.getTime() > bestMatch.createdAt.getTime())
|
||||||
|
) {
|
||||||
|
bestScore = score
|
||||||
|
bestMatch = campaign
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return bestMatch
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST() {
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const cookieStore = await cookies()
|
||||||
|
const utmCookie = cookieStore.get(COOKIE_NAME)
|
||||||
|
if (!utmCookie?.value) {
|
||||||
|
return NextResponse.json({ attributed: false, reason: 'no_utm_cookie' })
|
||||||
|
}
|
||||||
|
|
||||||
|
let utmData: z.infer<typeof UtmCookieSchema>
|
||||||
|
try {
|
||||||
|
let decoded: string
|
||||||
|
try {
|
||||||
|
decoded = decodeURIComponent(utmCookie.value)
|
||||||
|
} catch {
|
||||||
|
decoded = utmCookie.value
|
||||||
|
}
|
||||||
|
utmData = UtmCookieSchema.parse(JSON.parse(decoded))
|
||||||
|
} catch {
|
||||||
|
logger.warn('Failed to parse UTM cookie', { userId: session.user.id })
|
||||||
|
cookieStore.delete(COOKIE_NAME)
|
||||||
|
return NextResponse.json({ attributed: false, reason: 'invalid_cookie' })
|
||||||
|
}
|
||||||
|
|
||||||
|
const matchedCampaign = await findMatchingCampaign(utmData)
|
||||||
|
if (!matchedCampaign) {
|
||||||
|
cookieStore.delete(COOKIE_NAME)
|
||||||
|
return NextResponse.json({ attributed: false, reason: 'no_matching_campaign' })
|
||||||
|
}
|
||||||
|
|
||||||
|
const bonusAmount = Number(matchedCampaign.bonusCreditAmount)
|
||||||
|
|
||||||
|
let attributed = false
|
||||||
|
await db.transaction(async (tx) => {
|
||||||
|
const [existingStats] = await tx
|
||||||
|
.select({ id: userStats.id })
|
||||||
|
.from(userStats)
|
||||||
|
.where(eq(userStats.userId, session.user.id))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!existingStats) {
|
||||||
|
await tx.insert(userStats).values({
|
||||||
|
id: nanoid(),
|
||||||
|
userId: session.user.id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await tx
|
||||||
|
.insert(referralAttribution)
|
||||||
|
.values({
|
||||||
|
id: nanoid(),
|
||||||
|
userId: session.user.id,
|
||||||
|
campaignId: matchedCampaign.id,
|
||||||
|
utmSource: utmData.utm_source || null,
|
||||||
|
utmMedium: utmData.utm_medium || null,
|
||||||
|
utmCampaign: utmData.utm_campaign || null,
|
||||||
|
utmContent: utmData.utm_content || null,
|
||||||
|
referrerUrl: utmData.referrer_url || null,
|
||||||
|
landingPage: utmData.landing_page || null,
|
||||||
|
bonusCreditAmount: bonusAmount.toString(),
|
||||||
|
})
|
||||||
|
.onConflictDoNothing({ target: referralAttribution.userId })
|
||||||
|
.returning({ id: referralAttribution.id })
|
||||||
|
|
||||||
|
if (result.length > 0) {
|
||||||
|
await applyBonusCredits(session.user.id, bonusAmount, tx)
|
||||||
|
attributed = true
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
if (attributed) {
|
||||||
|
logger.info('Referral attribution created and bonus credits applied', {
|
||||||
|
userId: session.user.id,
|
||||||
|
campaignId: matchedCampaign.id,
|
||||||
|
campaignName: matchedCampaign.name,
|
||||||
|
utmSource: utmData.utm_source,
|
||||||
|
utmCampaign: utmData.utm_campaign,
|
||||||
|
utmContent: utmData.utm_content,
|
||||||
|
bonusAmount,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
logger.info('User already attributed, skipping', { userId: session.user.id })
|
||||||
|
}
|
||||||
|
|
||||||
|
cookieStore.delete(COOKIE_NAME)
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
attributed,
|
||||||
|
bonusAmount: attributed ? bonusAmount : undefined,
|
||||||
|
reason: attributed ? undefined : 'already_attributed',
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Attribution error', { error })
|
||||||
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,20 +4,10 @@
|
|||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { loggerMock } from '@sim/testing'
|
import { databaseMock, loggerMock } from '@sim/testing'
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
vi.mock('@sim/db', () => ({
|
vi.mock('@sim/db', () => databaseMock)
|
||||||
db: {
|
|
||||||
select: vi.fn().mockReturnThis(),
|
|
||||||
from: vi.fn().mockReturnThis(),
|
|
||||||
where: vi.fn().mockReturnThis(),
|
|
||||||
limit: vi.fn().mockReturnValue([]),
|
|
||||||
update: vi.fn().mockReturnThis(),
|
|
||||||
set: vi.fn().mockReturnThis(),
|
|
||||||
orderBy: vi.fn().mockReturnThis(),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.mock('@/lib/oauth/oauth', () => ({
|
vi.mock('@/lib/oauth/oauth', () => ({
|
||||||
refreshOAuthToken: vi.fn(),
|
refreshOAuthToken: vi.fn(),
|
||||||
@@ -34,13 +24,36 @@ import {
|
|||||||
refreshTokenIfNeeded,
|
refreshTokenIfNeeded,
|
||||||
} from '@/app/api/auth/oauth/utils'
|
} from '@/app/api/auth/oauth/utils'
|
||||||
|
|
||||||
const mockDbTyped = db as any
|
const mockDb = db as any
|
||||||
const mockRefreshOAuthToken = refreshOAuthToken as any
|
const mockRefreshOAuthToken = refreshOAuthToken as any
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a chainable mock for db.select() calls.
|
||||||
|
* Returns a nested chain: select() -> from() -> where() -> limit() / orderBy()
|
||||||
|
*/
|
||||||
|
function mockSelectChain(limitResult: unknown[]) {
|
||||||
|
const mockLimit = vi.fn().mockReturnValue(limitResult)
|
||||||
|
const mockOrderBy = vi.fn().mockReturnValue(limitResult)
|
||||||
|
const mockWhere = vi.fn().mockReturnValue({ limit: mockLimit, orderBy: mockOrderBy })
|
||||||
|
const mockFrom = vi.fn().mockReturnValue({ where: mockWhere })
|
||||||
|
mockDb.select.mockReturnValueOnce({ from: mockFrom })
|
||||||
|
return { mockFrom, mockWhere, mockLimit }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a chainable mock for db.update() calls.
|
||||||
|
* Returns a nested chain: update() -> set() -> where()
|
||||||
|
*/
|
||||||
|
function mockUpdateChain() {
|
||||||
|
const mockWhere = vi.fn().mockResolvedValue({})
|
||||||
|
const mockSet = vi.fn().mockReturnValue({ where: mockWhere })
|
||||||
|
mockDb.update.mockReturnValueOnce({ set: mockSet })
|
||||||
|
return { mockSet, mockWhere }
|
||||||
|
}
|
||||||
|
|
||||||
describe('OAuth Utils', () => {
|
describe('OAuth Utils', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks()
|
vi.clearAllMocks()
|
||||||
mockDbTyped.limit.mockReturnValue([])
|
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -50,20 +63,20 @@ describe('OAuth Utils', () => {
|
|||||||
describe('getCredential', () => {
|
describe('getCredential', () => {
|
||||||
it('should return credential when found', async () => {
|
it('should return credential when found', async () => {
|
||||||
const mockCredential = { id: 'credential-id', userId: 'test-user-id' }
|
const mockCredential = { id: 'credential-id', userId: 'test-user-id' }
|
||||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
const { mockFrom, mockWhere, mockLimit } = mockSelectChain([mockCredential])
|
||||||
|
|
||||||
const credential = await getCredential('request-id', 'credential-id', 'test-user-id')
|
const credential = await getCredential('request-id', 'credential-id', 'test-user-id')
|
||||||
|
|
||||||
expect(mockDbTyped.select).toHaveBeenCalled()
|
expect(mockDb.select).toHaveBeenCalled()
|
||||||
expect(mockDbTyped.from).toHaveBeenCalled()
|
expect(mockFrom).toHaveBeenCalled()
|
||||||
expect(mockDbTyped.where).toHaveBeenCalled()
|
expect(mockWhere).toHaveBeenCalled()
|
||||||
expect(mockDbTyped.limit).toHaveBeenCalledWith(1)
|
expect(mockLimit).toHaveBeenCalledWith(1)
|
||||||
|
|
||||||
expect(credential).toEqual(mockCredential)
|
expect(credential).toEqual(mockCredential)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return undefined when credential is not found', async () => {
|
it('should return undefined when credential is not found', async () => {
|
||||||
mockDbTyped.limit.mockReturnValueOnce([])
|
mockSelectChain([])
|
||||||
|
|
||||||
const credential = await getCredential('request-id', 'nonexistent-id', 'test-user-id')
|
const credential = await getCredential('request-id', 'nonexistent-id', 'test-user-id')
|
||||||
|
|
||||||
@@ -102,11 +115,12 @@ describe('OAuth Utils', () => {
|
|||||||
refreshToken: 'new-refresh-token',
|
refreshToken: 'new-refresh-token',
|
||||||
})
|
})
|
||||||
|
|
||||||
|
mockUpdateChain()
|
||||||
|
|
||||||
const result = await refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
|
const result = await refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
|
||||||
|
|
||||||
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
|
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
|
||||||
expect(mockDbTyped.update).toHaveBeenCalled()
|
expect(mockDb.update).toHaveBeenCalled()
|
||||||
expect(mockDbTyped.set).toHaveBeenCalled()
|
|
||||||
expect(result).toEqual({ accessToken: 'new-token', refreshed: true })
|
expect(result).toEqual({ accessToken: 'new-token', refreshed: true })
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -152,7 +166,7 @@ describe('OAuth Utils', () => {
|
|||||||
providerId: 'google',
|
providerId: 'google',
|
||||||
userId: 'test-user-id',
|
userId: 'test-user-id',
|
||||||
}
|
}
|
||||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
mockSelectChain([mockCredential])
|
||||||
|
|
||||||
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
||||||
|
|
||||||
@@ -169,7 +183,8 @@ describe('OAuth Utils', () => {
|
|||||||
providerId: 'google',
|
providerId: 'google',
|
||||||
userId: 'test-user-id',
|
userId: 'test-user-id',
|
||||||
}
|
}
|
||||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
mockSelectChain([mockCredential])
|
||||||
|
mockUpdateChain()
|
||||||
|
|
||||||
mockRefreshOAuthToken.mockResolvedValueOnce({
|
mockRefreshOAuthToken.mockResolvedValueOnce({
|
||||||
accessToken: 'new-token',
|
accessToken: 'new-token',
|
||||||
@@ -180,13 +195,12 @@ describe('OAuth Utils', () => {
|
|||||||
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
||||||
|
|
||||||
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
|
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
|
||||||
expect(mockDbTyped.update).toHaveBeenCalled()
|
expect(mockDb.update).toHaveBeenCalled()
|
||||||
expect(mockDbTyped.set).toHaveBeenCalled()
|
|
||||||
expect(token).toBe('new-token')
|
expect(token).toBe('new-token')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return null if credential not found', async () => {
|
it('should return null if credential not found', async () => {
|
||||||
mockDbTyped.limit.mockReturnValueOnce([])
|
mockSelectChain([])
|
||||||
|
|
||||||
const token = await refreshAccessTokenIfNeeded('nonexistent-id', 'test-user-id', 'request-id')
|
const token = await refreshAccessTokenIfNeeded('nonexistent-id', 'test-user-id', 'request-id')
|
||||||
|
|
||||||
@@ -202,7 +216,7 @@ describe('OAuth Utils', () => {
|
|||||||
providerId: 'google',
|
providerId: 'google',
|
||||||
userId: 'test-user-id',
|
userId: 'test-user-id',
|
||||||
}
|
}
|
||||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
mockSelectChain([mockCredential])
|
||||||
|
|
||||||
mockRefreshOAuthToken.mockResolvedValueOnce(null)
|
mockRefreshOAuthToken.mockResolvedValueOnce(null)
|
||||||
|
|
||||||
|
|||||||
@@ -1,81 +1,145 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { settings } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
import { getSession } from '@/lib/auth'
|
||||||
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
|
||||||
import { env } from '@/lib/core/config/env'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
||||||
|
|
||||||
function copilotHeaders(): HeadersInit {
|
/**
|
||||||
const headers: Record<string, string> = {
|
* GET - Fetch user's auto-allowed integration tools
|
||||||
'Content-Type': 'application/json',
|
*/
|
||||||
}
|
export async function GET() {
|
||||||
if (env.COPILOT_API_KEY) {
|
|
||||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
|
||||||
}
|
|
||||||
return headers
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function DELETE(request: NextRequest) {
|
|
||||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
|
||||||
if (!isAuthenticated || !userId) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const toolIdFromQuery = new URL(request.url).searchParams.get('toolId') || undefined
|
|
||||||
const toolIdFromBody = await request
|
|
||||||
.json()
|
|
||||||
.then((body) => (typeof body?.toolId === 'string' ? body.toolId : undefined))
|
|
||||||
.catch(() => undefined)
|
|
||||||
const toolId = toolIdFromBody || toolIdFromQuery
|
|
||||||
if (!toolId) {
|
|
||||||
return NextResponse.json({ error: 'toolId is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
const session = await getSession()
|
||||||
method: 'DELETE',
|
|
||||||
headers: copilotHeaders(),
|
|
||||||
body: JSON.stringify({
|
|
||||||
userId,
|
|
||||||
toolId,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
const payload = await res.json().catch(() => ({}))
|
if (!session?.user?.id) {
|
||||||
if (!res.ok) {
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
logger.warn('Failed to remove auto-allowed tool via copilot backend', {
|
|
||||||
status: res.status,
|
|
||||||
userId,
|
|
||||||
toolId,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: payload?.error || 'Failed to remove auto-allowed tool',
|
|
||||||
autoAllowedTools: [],
|
|
||||||
},
|
|
||||||
{ status: res.status }
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json({
|
const userId = session.user.id
|
||||||
success: true,
|
|
||||||
autoAllowedTools: Array.isArray(payload?.autoAllowedTools) ? payload.autoAllowedTools : [],
|
const [userSettings] = await db
|
||||||
})
|
.select()
|
||||||
} catch (error) {
|
.from(settings)
|
||||||
logger.error('Error removing auto-allowed tool', {
|
.where(eq(settings.userId, userId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (userSettings) {
|
||||||
|
const autoAllowedTools = (userSettings.copilotAutoAllowedTools as string[]) || []
|
||||||
|
return NextResponse.json({ autoAllowedTools })
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.insert(settings).values({
|
||||||
|
id: userId,
|
||||||
userId,
|
userId,
|
||||||
toolId,
|
copilotAutoAllowedTools: [],
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
})
|
||||||
return NextResponse.json(
|
|
||||||
{
|
return NextResponse.json({ autoAllowedTools: [] })
|
||||||
success: false,
|
} catch (error) {
|
||||||
error: 'Failed to remove auto-allowed tool',
|
logger.error('Failed to fetch auto-allowed tools', { error })
|
||||||
autoAllowedTools: [],
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
},
|
}
|
||||||
{ status: 500 }
|
}
|
||||||
)
|
|
||||||
|
/**
|
||||||
|
* POST - Add a tool to the auto-allowed list
|
||||||
|
*/
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userId = session.user.id
|
||||||
|
const body = await request.json()
|
||||||
|
|
||||||
|
if (!body.toolId || typeof body.toolId !== 'string') {
|
||||||
|
return NextResponse.json({ error: 'toolId must be a string' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolId = body.toolId
|
||||||
|
|
||||||
|
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||||
|
|
||||||
|
if (!currentTools.includes(toolId)) {
|
||||||
|
const updatedTools = [...currentTools, toolId]
|
||||||
|
await db
|
||||||
|
.update(settings)
|
||||||
|
.set({
|
||||||
|
copilotAutoAllowedTools: updatedTools,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(settings.userId, userId))
|
||||||
|
|
||||||
|
logger.info('Added tool to auto-allowed list', { userId, toolId })
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: currentTools })
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.insert(settings).values({
|
||||||
|
id: userId,
|
||||||
|
userId,
|
||||||
|
copilotAutoAllowedTools: [toolId],
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Created settings and added tool to auto-allowed list', { userId, toolId })
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: [toolId] })
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to add auto-allowed tool', { error })
|
||||||
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DELETE - Remove a tool from the auto-allowed list
|
||||||
|
*/
|
||||||
|
export async function DELETE(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userId = session.user.id
|
||||||
|
const { searchParams } = new URL(request.url)
|
||||||
|
const toolId = searchParams.get('toolId')
|
||||||
|
|
||||||
|
if (!toolId) {
|
||||||
|
return NextResponse.json({ error: 'toolId query parameter is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||||
|
const updatedTools = currentTools.filter((t) => t !== toolId)
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(settings)
|
||||||
|
.set({
|
||||||
|
copilotAutoAllowedTools: updatedTools,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(settings.userId, userId))
|
||||||
|
|
||||||
|
logger.info('Removed tool from auto-allowed list', { userId, toolId })
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: [] })
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to remove auto-allowed tool', { error })
|
||||||
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,24 +28,13 @@ import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
|||||||
|
|
||||||
const logger = createLogger('CopilotChatAPI')
|
const logger = createLogger('CopilotChatAPI')
|
||||||
|
|
||||||
function truncateForLog(value: string, maxLength = 120): string {
|
|
||||||
if (!value || maxLength <= 0) return ''
|
|
||||||
return value.length <= maxLength ? value : `${value.slice(0, maxLength)}...`
|
|
||||||
}
|
|
||||||
|
|
||||||
async function requestChatTitleFromCopilot(params: {
|
async function requestChatTitleFromCopilot(params: {
|
||||||
message: string
|
message: string
|
||||||
model: string
|
model: string
|
||||||
provider?: string
|
provider?: string
|
||||||
}): Promise<string | null> {
|
}): Promise<string | null> {
|
||||||
const { message, model, provider } = params
|
const { message, model, provider } = params
|
||||||
if (!message || !model) {
|
if (!message || !model) return null
|
||||||
logger.warn('Skipping chat title request because message/model is missing', {
|
|
||||||
hasMessage: !!message,
|
|
||||||
hasModel: !!model,
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const headers: Record<string, string> = {
|
const headers: Record<string, string> = {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -55,13 +44,6 @@ async function requestChatTitleFromCopilot(params: {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
logger.info('Requesting chat title from copilot backend', {
|
|
||||||
model,
|
|
||||||
provider: provider || null,
|
|
||||||
messageLength: message.length,
|
|
||||||
messagePreview: truncateForLog(message),
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/generate-chat-title`, {
|
const response = await fetch(`${SIM_AGENT_API_URL}/api/generate-chat-title`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers,
|
headers,
|
||||||
@@ -81,32 +63,10 @@ async function requestChatTitleFromCopilot(params: {
|
|||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const rawTitle = typeof payload?.title === 'string' ? payload.title : ''
|
const title = typeof payload?.title === 'string' ? payload.title.trim() : ''
|
||||||
const title = rawTitle.trim()
|
|
||||||
logger.info('Received chat title response from copilot backend', {
|
|
||||||
status: response.status,
|
|
||||||
hasRawTitle: !!rawTitle,
|
|
||||||
rawTitle,
|
|
||||||
normalizedTitle: title,
|
|
||||||
messagePreview: truncateForLog(message),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!title) {
|
|
||||||
logger.warn('Copilot backend returned empty chat title', {
|
|
||||||
payload,
|
|
||||||
model,
|
|
||||||
provider: provider || null,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return title || null
|
return title || null
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error generating chat title:', {
|
logger.error('Error generating chat title:', error)
|
||||||
error,
|
|
||||||
model,
|
|
||||||
provider: provider || null,
|
|
||||||
messagePreview: truncateForLog(message),
|
|
||||||
})
|
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -125,7 +85,7 @@ const ChatMessageSchema = z.object({
|
|||||||
chatId: z.string().optional(),
|
chatId: z.string().optional(),
|
||||||
workflowId: z.string().optional(),
|
workflowId: z.string().optional(),
|
||||||
workflowName: z.string().optional(),
|
workflowName: z.string().optional(),
|
||||||
model: z.string().optional().default('claude-opus-4-6'),
|
model: z.string().optional().default('claude-opus-4-5'),
|
||||||
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
||||||
prefetch: z.boolean().optional(),
|
prefetch: z.boolean().optional(),
|
||||||
createNewChat: z.boolean().optional().default(false),
|
createNewChat: z.boolean().optional().default(false),
|
||||||
@@ -278,8 +238,7 @@ export async function POST(req: NextRequest) {
|
|||||||
let currentChat: any = null
|
let currentChat: any = null
|
||||||
let conversationHistory: any[] = []
|
let conversationHistory: any[] = []
|
||||||
let actualChatId = chatId
|
let actualChatId = chatId
|
||||||
let chatWasCreatedForRequest = false
|
const selectedModel = model || 'claude-opus-4-5'
|
||||||
const selectedModel = model || 'claude-opus-4-6'
|
|
||||||
|
|
||||||
if (chatId || createNewChat) {
|
if (chatId || createNewChat) {
|
||||||
const chatResult = await resolveOrCreateChat({
|
const chatResult = await resolveOrCreateChat({
|
||||||
@@ -290,7 +249,6 @@ export async function POST(req: NextRequest) {
|
|||||||
})
|
})
|
||||||
currentChat = chatResult.chat
|
currentChat = chatResult.chat
|
||||||
actualChatId = chatResult.chatId || chatId
|
actualChatId = chatResult.chatId || chatId
|
||||||
chatWasCreatedForRequest = chatResult.isNew
|
|
||||||
const history = buildConversationHistory(
|
const history = buildConversationHistory(
|
||||||
chatResult.conversationHistory,
|
chatResult.conversationHistory,
|
||||||
(chatResult.chat?.conversationId as string | undefined) || conversationId
|
(chatResult.chat?.conversationId as string | undefined) || conversationId
|
||||||
@@ -298,18 +256,6 @@ export async function POST(req: NextRequest) {
|
|||||||
conversationHistory = history.history
|
conversationHistory = history.history
|
||||||
}
|
}
|
||||||
|
|
||||||
const shouldGenerateTitleForRequest =
|
|
||||||
!!actualChatId &&
|
|
||||||
chatWasCreatedForRequest &&
|
|
||||||
!currentChat?.title &&
|
|
||||||
conversationHistory.length === 0
|
|
||||||
|
|
||||||
const titleGenerationParams = {
|
|
||||||
message,
|
|
||||||
model: selectedModel,
|
|
||||||
provider,
|
|
||||||
}
|
|
||||||
|
|
||||||
const effectiveMode = mode === 'agent' ? 'build' : mode
|
const effectiveMode = mode === 'agent' ? 'build' : mode
|
||||||
const effectiveConversationId =
|
const effectiveConversationId =
|
||||||
(currentChat?.conversationId as string | undefined) || conversationId
|
(currentChat?.conversationId as string | undefined) || conversationId
|
||||||
@@ -402,22 +348,10 @@ export async function POST(req: NextRequest) {
|
|||||||
await pushEvent({ type: 'chat_id', chatId: actualChatId })
|
await pushEvent({ type: 'chat_id', chatId: actualChatId })
|
||||||
}
|
}
|
||||||
|
|
||||||
if (shouldGenerateTitleForRequest) {
|
if (actualChatId && !currentChat?.title && conversationHistory.length === 0) {
|
||||||
logger.info(`[${tracker.requestId}] Starting title generation for streaming response`, {
|
requestChatTitleFromCopilot({ message, model: selectedModel, provider })
|
||||||
chatId: actualChatId,
|
|
||||||
model: titleGenerationParams.model,
|
|
||||||
provider: provider || null,
|
|
||||||
messageLength: message.length,
|
|
||||||
messagePreview: truncateForLog(message),
|
|
||||||
chatWasCreatedForRequest,
|
|
||||||
})
|
|
||||||
requestChatTitleFromCopilot(titleGenerationParams)
|
|
||||||
.then(async (title) => {
|
.then(async (title) => {
|
||||||
if (title) {
|
if (title) {
|
||||||
logger.info(`[${tracker.requestId}] Generated title for streaming response`, {
|
|
||||||
chatId: actualChatId,
|
|
||||||
title,
|
|
||||||
})
|
|
||||||
await db
|
await db
|
||||||
.update(copilotChats)
|
.update(copilotChats)
|
||||||
.set({
|
.set({
|
||||||
@@ -425,30 +359,12 @@ export async function POST(req: NextRequest) {
|
|||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
await pushEvent({ type: 'title_updated', title, chatId: actualChatId })
|
await pushEvent({ type: 'title_updated', title })
|
||||||
logger.info(`[${tracker.requestId}] Emitted title_updated SSE event`, {
|
|
||||||
chatId: actualChatId,
|
|
||||||
title,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
logger.warn(`[${tracker.requestId}] No title returned for streaming response`, {
|
|
||||||
chatId: actualChatId,
|
|
||||||
model: selectedModel,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||||
})
|
})
|
||||||
} else if (actualChatId && !chatWasCreatedForRequest) {
|
|
||||||
logger.info(
|
|
||||||
`[${tracker.requestId}] Skipping title generation because chat already exists`,
|
|
||||||
{
|
|
||||||
chatId: actualChatId,
|
|
||||||
model: titleGenerationParams.model,
|
|
||||||
provider: provider || null,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -563,9 +479,9 @@ export async function POST(req: NextRequest) {
|
|||||||
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
|
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
|
||||||
|
|
||||||
// Start title generation in parallel if this is first message (non-streaming)
|
// Start title generation in parallel if this is first message (non-streaming)
|
||||||
if (shouldGenerateTitleForRequest) {
|
if (actualChatId && !currentChat.title && conversationHistory.length === 0) {
|
||||||
logger.info(`[${tracker.requestId}] Starting title generation for non-streaming response`)
|
logger.info(`[${tracker.requestId}] Starting title generation for non-streaming response`)
|
||||||
requestChatTitleFromCopilot(titleGenerationParams)
|
requestChatTitleFromCopilot({ message, model: selectedModel, provider })
|
||||||
.then(async (title) => {
|
.then(async (title) => {
|
||||||
if (title) {
|
if (title) {
|
||||||
await db
|
await db
|
||||||
@@ -576,22 +492,11 @@ export async function POST(req: NextRequest) {
|
|||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
logger.info(`[${tracker.requestId}] Generated and saved title: ${title}`)
|
logger.info(`[${tracker.requestId}] Generated and saved title: ${title}`)
|
||||||
} else {
|
|
||||||
logger.warn(`[${tracker.requestId}] No title returned for non-streaming response`, {
|
|
||||||
chatId: actualChatId,
|
|
||||||
model: selectedModel,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||||
})
|
})
|
||||||
} else if (actualChatId && !chatWasCreatedForRequest) {
|
|
||||||
logger.info(`[${tracker.requestId}] Skipping title generation because chat already exists`, {
|
|
||||||
chatId: actualChatId,
|
|
||||||
model: titleGenerationParams.model,
|
|
||||||
provider: provider || null,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update chat in database immediately (without blocking for title)
|
// Update chat in database immediately (without blocking for title)
|
||||||
|
|||||||
@@ -18,9 +18,9 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
|||||||
setupCommonApiMocks()
|
setupCommonApiMocks()
|
||||||
mockCryptoUuid()
|
mockCryptoUuid()
|
||||||
|
|
||||||
// Mock getBaseUrl to return localhost for tests
|
|
||||||
vi.doMock('@/lib/core/utils/urls', () => ({
|
vi.doMock('@/lib/core/utils/urls', () => ({
|
||||||
getBaseUrl: vi.fn(() => 'http://localhost:3000'),
|
getBaseUrl: vi.fn(() => 'http://localhost:3000'),
|
||||||
|
getInternalApiBaseUrl: vi.fn(() => 'http://localhost:3000'),
|
||||||
getBaseDomain: vi.fn(() => 'localhost:3000'),
|
getBaseDomain: vi.fn(() => 'localhost:3000'),
|
||||||
getEmailDomain: vi.fn(() => 'localhost:3000'),
|
getEmailDomain: vi.fn(() => 'localhost:3000'),
|
||||||
}))
|
}))
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import {
|
|||||||
createRequestTracker,
|
createRequestTracker,
|
||||||
createUnauthorizedResponse,
|
createUnauthorizedResponse,
|
||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
import { isUuidV4 } from '@/executor/constants'
|
import { isUuidV4 } from '@/executor/constants'
|
||||||
|
|
||||||
@@ -99,7 +99,7 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const stateResponse = await fetch(
|
const stateResponse = await fetch(
|
||||||
`${getBaseUrl()}/api/workflows/${checkpoint.workflowId}/state`,
|
`${getInternalApiBaseUrl()}/api/workflows/${checkpoint.workflowId}/state`,
|
||||||
{
|
{
|
||||||
method: 'PUT',
|
method: 'PUT',
|
||||||
headers: {
|
headers: {
|
||||||
|
|||||||
@@ -1,11 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import {
|
import { REDIS_TOOL_CALL_PREFIX, REDIS_TOOL_CALL_TTL_SECONDS } from '@/lib/copilot/constants'
|
||||||
REDIS_TOOL_CALL_PREFIX,
|
|
||||||
REDIS_TOOL_CALL_TTL_SECONDS,
|
|
||||||
SIM_AGENT_API_URL,
|
|
||||||
} from '@/lib/copilot/constants'
|
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -14,7 +10,6 @@ import {
|
|||||||
createUnauthorizedResponse,
|
createUnauthorizedResponse,
|
||||||
type NotificationStatus,
|
type NotificationStatus,
|
||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
import { env } from '@/lib/core/config/env'
|
|
||||||
import { getRedisClient } from '@/lib/core/config/redis'
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
|
||||||
const logger = createLogger('CopilotConfirmAPI')
|
const logger = createLogger('CopilotConfirmAPI')
|
||||||
@@ -26,8 +21,6 @@ const ConfirmationSchema = z.object({
|
|||||||
errorMap: () => ({ message: 'Invalid notification status' }),
|
errorMap: () => ({ message: 'Invalid notification status' }),
|
||||||
}),
|
}),
|
||||||
message: z.string().optional(), // Optional message for background moves or additional context
|
message: z.string().optional(), // Optional message for background moves or additional context
|
||||||
toolName: z.string().optional(),
|
|
||||||
remember: z.boolean().optional(),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -64,44 +57,6 @@ async function updateToolCallStatus(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function saveAutoAllowedToolPreference(userId: string, toolName: string): Promise<boolean> {
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
}
|
|
||||||
if (env.COPILOT_API_KEY) {
|
|
||||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers,
|
|
||||||
body: JSON.stringify({
|
|
||||||
userId,
|
|
||||||
toolId: toolName,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
logger.warn('Failed to persist auto-allowed tool preference', {
|
|
||||||
userId,
|
|
||||||
toolName,
|
|
||||||
status: response.status,
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error persisting auto-allowed tool preference', {
|
|
||||||
userId,
|
|
||||||
toolName,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* POST /api/copilot/confirm
|
* POST /api/copilot/confirm
|
||||||
* Update tool call status (Accept/Reject)
|
* Update tool call status (Accept/Reject)
|
||||||
@@ -119,7 +74,7 @@ export async function POST(req: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const body = await req.json()
|
const body = await req.json()
|
||||||
const { toolCallId, status, message, toolName, remember } = ConfirmationSchema.parse(body)
|
const { toolCallId, status, message } = ConfirmationSchema.parse(body)
|
||||||
|
|
||||||
// Update the tool call status in Redis
|
// Update the tool call status in Redis
|
||||||
const updated = await updateToolCallStatus(toolCallId, status, message)
|
const updated = await updateToolCallStatus(toolCallId, status, message)
|
||||||
@@ -135,22 +90,14 @@ export async function POST(req: NextRequest) {
|
|||||||
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
||||||
}
|
}
|
||||||
|
|
||||||
let rememberSaved = false
|
const duration = tracker.getDuration()
|
||||||
if (status === 'accepted' && remember === true && toolName && authenticatedUserId) {
|
|
||||||
rememberSaved = await saveAutoAllowedToolPreference(authenticatedUserId, toolName)
|
|
||||||
}
|
|
||||||
|
|
||||||
const response: Record<string, unknown> = {
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
}
|
})
|
||||||
if (remember === true) {
|
|
||||||
response.rememberSaved = rememberSaved
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json(response)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const duration = tracker.getDuration()
|
const duration = tracker.getDuration()
|
||||||
|
|
||||||
|
|||||||
@@ -4,16 +4,12 @@
|
|||||||
*
|
*
|
||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
import { createEnvMock, createMockLogger } from '@sim/testing'
|
import { createEnvMock, databaseMock, loggerMock } from '@sim/testing'
|
||||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
const loggerMock = vi.hoisted(() => ({
|
|
||||||
createLogger: () => createMockLogger(),
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.mock('drizzle-orm')
|
vi.mock('drizzle-orm')
|
||||||
vi.mock('@sim/logger', () => loggerMock)
|
vi.mock('@sim/logger', () => loggerMock)
|
||||||
vi.mock('@sim/db')
|
vi.mock('@sim/db', () => databaseMock)
|
||||||
vi.mock('@/lib/knowledge/documents/utils', () => ({
|
vi.mock('@/lib/knowledge/documents/utils', () => ({
|
||||||
retryWithExponentialBackoff: (fn: any) => fn(),
|
retryWithExponentialBackoff: (fn: any) => fn(),
|
||||||
}))
|
}))
|
||||||
|
|||||||
@@ -1,89 +0,0 @@
|
|||||||
/**
|
|
||||||
* @vitest-environment node
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
|
||||||
|
|
||||||
describe('mcp copilot route manifest contract', () => {
|
|
||||||
const previousInternalSecret = process.env.INTERNAL_API_SECRET
|
|
||||||
const previousAgentUrl = process.env.SIM_AGENT_API_URL
|
|
||||||
const previousFetch = global.fetch
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
vi.resetModules()
|
|
||||||
process.env.INTERNAL_API_SECRET = 'x'.repeat(32)
|
|
||||||
process.env.SIM_AGENT_API_URL = 'https://copilot.sim.ai'
|
|
||||||
})
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
vi.restoreAllMocks()
|
|
||||||
global.fetch = previousFetch
|
|
||||||
if (previousInternalSecret === undefined) {
|
|
||||||
delete process.env.INTERNAL_API_SECRET
|
|
||||||
} else {
|
|
||||||
process.env.INTERNAL_API_SECRET = previousInternalSecret
|
|
||||||
}
|
|
||||||
if (previousAgentUrl === undefined) {
|
|
||||||
delete process.env.SIM_AGENT_API_URL
|
|
||||||
} else {
|
|
||||||
process.env.SIM_AGENT_API_URL = previousAgentUrl
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
it('loads and caches tool manifest from copilot backend', async () => {
|
|
||||||
const payload = {
|
|
||||||
directTools: [
|
|
||||||
{
|
|
||||||
name: 'list_workspaces',
|
|
||||||
description: 'List workspaces',
|
|
||||||
inputSchema: { type: 'object', properties: {} },
|
|
||||||
toolId: 'list_user_workspaces',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
subagentTools: [
|
|
||||||
{
|
|
||||||
name: 'sim_build',
|
|
||||||
description: 'Build workflows',
|
|
||||||
inputSchema: { type: 'object', properties: {} },
|
|
||||||
agentId: 'build',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
generatedAt: '2026-02-12T00:00:00Z',
|
|
||||||
}
|
|
||||||
|
|
||||||
const fetchSpy = vi.spyOn(global, 'fetch').mockResolvedValue(
|
|
||||||
new Response(JSON.stringify(payload), {
|
|
||||||
status: 200,
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
const mod = await import('./route')
|
|
||||||
mod.clearMcpToolManifestCacheForTests()
|
|
||||||
|
|
||||||
const first = await mod.getMcpToolManifest()
|
|
||||||
const second = await mod.getMcpToolManifest()
|
|
||||||
|
|
||||||
expect(first).toEqual(payload)
|
|
||||||
expect(second).toEqual(payload)
|
|
||||||
expect(fetchSpy).toHaveBeenCalledTimes(1)
|
|
||||||
expect(fetchSpy.mock.calls[0]?.[0]).toBe('https://copilot.sim.ai/api/mcp/tools/manifest')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('rejects invalid manifest payloads from copilot backend', async () => {
|
|
||||||
const fetchSpy = vi.spyOn(global, 'fetch').mockResolvedValue(
|
|
||||||
new Response(JSON.stringify({ tools: [] }), {
|
|
||||||
status: 200,
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
const mod = await import('./route')
|
|
||||||
mod.clearMcpToolManifestCacheForTests()
|
|
||||||
|
|
||||||
await expect(mod.fetchMcpToolManifestFromCopilot()).rejects.toThrow(
|
|
||||||
'invalid manifest payload from copilot'
|
|
||||||
)
|
|
||||||
expect(fetchSpy).toHaveBeenCalledTimes(1)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@@ -28,6 +28,7 @@ import {
|
|||||||
executeToolServerSide,
|
executeToolServerSide,
|
||||||
prepareExecutionContext,
|
prepareExecutionContext,
|
||||||
} from '@/lib/copilot/orchestrator/tool-executor'
|
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/definitions'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
import { RateLimiter } from '@/lib/core/rate-limiter'
|
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||||
import {
|
import {
|
||||||
@@ -37,33 +38,7 @@ import {
|
|||||||
|
|
||||||
const logger = createLogger('CopilotMcpAPI')
|
const logger = createLogger('CopilotMcpAPI')
|
||||||
const mcpRateLimiter = new RateLimiter()
|
const mcpRateLimiter = new RateLimiter()
|
||||||
const DEFAULT_COPILOT_MODEL = 'claude-opus-4-6'
|
const DEFAULT_COPILOT_MODEL = 'claude-opus-4-5'
|
||||||
const MCP_TOOL_MANIFEST_CACHE_TTL_MS = 60_000
|
|
||||||
|
|
||||||
type McpDirectToolDef = {
|
|
||||||
name: string
|
|
||||||
description: string
|
|
||||||
inputSchema: { type: 'object'; properties?: Record<string, unknown>; required?: string[] }
|
|
||||||
toolId: string
|
|
||||||
}
|
|
||||||
|
|
||||||
type McpSubagentToolDef = {
|
|
||||||
name: string
|
|
||||||
description: string
|
|
||||||
inputSchema: { type: 'object'; properties?: Record<string, unknown>; required?: string[] }
|
|
||||||
agentId: string
|
|
||||||
}
|
|
||||||
|
|
||||||
type McpToolManifest = {
|
|
||||||
directTools: McpDirectToolDef[]
|
|
||||||
subagentTools: McpSubagentToolDef[]
|
|
||||||
generatedAt?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
let cachedMcpToolManifest: {
|
|
||||||
value: McpToolManifest
|
|
||||||
expiresAt: number
|
|
||||||
} | null = null
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
export const runtime = 'nodejs'
|
export const runtime = 'nodejs'
|
||||||
@@ -137,58 +112,6 @@ async function authenticateCopilotApiKey(apiKey: string): Promise<CopilotKeyAuth
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isMcpToolManifest(value: unknown): value is McpToolManifest {
|
|
||||||
if (!value || typeof value !== 'object') return false
|
|
||||||
const payload = value as Record<string, unknown>
|
|
||||||
return Array.isArray(payload.directTools) && Array.isArray(payload.subagentTools)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function fetchMcpToolManifestFromCopilot(): Promise<McpToolManifest> {
|
|
||||||
const internalSecret = env.INTERNAL_API_SECRET
|
|
||||||
if (!internalSecret) {
|
|
||||||
throw new Error('INTERNAL_API_SECRET not configured')
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/mcp/tools/manifest`, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'x-api-key': internalSecret,
|
|
||||||
},
|
|
||||||
signal: AbortSignal.timeout(10_000),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
const bodyText = await res.text().catch(() => '')
|
|
||||||
throw new Error(`manifest fetch failed (${res.status}): ${bodyText || res.statusText}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload: unknown = await res.json()
|
|
||||||
if (!isMcpToolManifest(payload)) {
|
|
||||||
throw new Error('invalid manifest payload from copilot')
|
|
||||||
}
|
|
||||||
|
|
||||||
return payload
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getMcpToolManifest(): Promise<McpToolManifest> {
|
|
||||||
const now = Date.now()
|
|
||||||
if (cachedMcpToolManifest && cachedMcpToolManifest.expiresAt > now) {
|
|
||||||
return cachedMcpToolManifest.value
|
|
||||||
}
|
|
||||||
|
|
||||||
const manifest = await fetchMcpToolManifestFromCopilot()
|
|
||||||
cachedMcpToolManifest = {
|
|
||||||
value: manifest,
|
|
||||||
expiresAt: now + MCP_TOOL_MANIFEST_CACHE_TTL_MS,
|
|
||||||
}
|
|
||||||
return manifest
|
|
||||||
}
|
|
||||||
|
|
||||||
export function clearMcpToolManifestCacheForTests(): void {
|
|
||||||
cachedMcpToolManifest = null
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* MCP Server instructions that guide LLMs on how to use the Sim copilot tools.
|
* MCP Server instructions that guide LLMs on how to use the Sim copilot tools.
|
||||||
* This is included in the initialize response to help external LLMs understand
|
* This is included in the initialize response to help external LLMs understand
|
||||||
@@ -457,15 +380,13 @@ function buildMcpServer(abortSignal?: AbortSignal): Server {
|
|||||||
)
|
)
|
||||||
|
|
||||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||||
const manifest = await getMcpToolManifest()
|
const directTools = DIRECT_TOOL_DEFS.map((tool) => ({
|
||||||
|
|
||||||
const directTools = manifest.directTools.map((tool) => ({
|
|
||||||
name: tool.name,
|
name: tool.name,
|
||||||
description: tool.description,
|
description: tool.description,
|
||||||
inputSchema: tool.inputSchema,
|
inputSchema: tool.inputSchema,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
const subagentTools = manifest.subagentTools.map((tool) => ({
|
const subagentTools = SUBAGENT_TOOL_DEFS.map((tool) => ({
|
||||||
name: tool.name,
|
name: tool.name,
|
||||||
description: tool.description,
|
description: tool.description,
|
||||||
inputSchema: tool.inputSchema,
|
inputSchema: tool.inputSchema,
|
||||||
@@ -534,15 +455,12 @@ function buildMcpServer(abortSignal?: AbortSignal): Server {
|
|||||||
throw new McpError(ErrorCode.InvalidParams, 'Tool name required')
|
throw new McpError(ErrorCode.InvalidParams, 'Tool name required')
|
||||||
}
|
}
|
||||||
|
|
||||||
const manifest = await getMcpToolManifest()
|
|
||||||
|
|
||||||
const result = await handleToolsCall(
|
const result = await handleToolsCall(
|
||||||
{
|
{
|
||||||
name: params.name,
|
name: params.name,
|
||||||
arguments: params.arguments,
|
arguments: params.arguments,
|
||||||
},
|
},
|
||||||
authResult.userId,
|
authResult.userId,
|
||||||
manifest,
|
|
||||||
abortSignal
|
abortSignal
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -638,17 +556,16 @@ function trackMcpCopilotCall(userId: string): void {
|
|||||||
async function handleToolsCall(
|
async function handleToolsCall(
|
||||||
params: { name: string; arguments?: Record<string, unknown> },
|
params: { name: string; arguments?: Record<string, unknown> },
|
||||||
userId: string,
|
userId: string,
|
||||||
manifest: McpToolManifest,
|
|
||||||
abortSignal?: AbortSignal
|
abortSignal?: AbortSignal
|
||||||
): Promise<CallToolResult> {
|
): Promise<CallToolResult> {
|
||||||
const args = params.arguments || {}
|
const args = params.arguments || {}
|
||||||
|
|
||||||
const directTool = manifest.directTools.find((tool) => tool.name === params.name)
|
const directTool = DIRECT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||||
if (directTool) {
|
if (directTool) {
|
||||||
return handleDirectToolCall(directTool, args, userId)
|
return handleDirectToolCall(directTool, args, userId)
|
||||||
}
|
}
|
||||||
|
|
||||||
const subagentTool = manifest.subagentTools.find((tool) => tool.name === params.name)
|
const subagentTool = SUBAGENT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||||
if (subagentTool) {
|
if (subagentTool) {
|
||||||
return handleSubagentToolCall(subagentTool, args, userId, abortSignal)
|
return handleSubagentToolCall(subagentTool, args, userId, abortSignal)
|
||||||
}
|
}
|
||||||
@@ -657,7 +574,7 @@ async function handleToolsCall(
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function handleDirectToolCall(
|
async function handleDirectToolCall(
|
||||||
toolDef: McpDirectToolDef,
|
toolDef: (typeof DIRECT_TOOL_DEFS)[number],
|
||||||
args: Record<string, unknown>,
|
args: Record<string, unknown>,
|
||||||
userId: string
|
userId: string
|
||||||
): Promise<CallToolResult> {
|
): Promise<CallToolResult> {
|
||||||
@@ -794,7 +711,7 @@ async function handleBuildToolCall(
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function handleSubagentToolCall(
|
async function handleSubagentToolCall(
|
||||||
toolDef: McpSubagentToolDef,
|
toolDef: (typeof SUBAGENT_TOOL_DEFS)[number],
|
||||||
args: Record<string, unknown>,
|
args: Record<string, unknown>,
|
||||||
userId: string,
|
userId: string,
|
||||||
abortSignal?: AbortSignal
|
abortSignal?: AbortSignal
|
||||||
|
|||||||
@@ -72,6 +72,7 @@ describe('MCP Serve Route', () => {
|
|||||||
}))
|
}))
|
||||||
vi.doMock('@/lib/core/utils/urls', () => ({
|
vi.doMock('@/lib/core/utils/urls', () => ({
|
||||||
getBaseUrl: () => 'http://localhost:3000',
|
getBaseUrl: () => 'http://localhost:3000',
|
||||||
|
getInternalApiBaseUrl: () => 'http://localhost:3000',
|
||||||
}))
|
}))
|
||||||
vi.doMock('@/lib/core/execution-limits', () => ({
|
vi.doMock('@/lib/core/execution-limits', () => ({
|
||||||
getMaxExecutionTimeout: () => 10_000,
|
getMaxExecutionTimeout: () => 10_000,
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { type AuthResult, checkHybridAuth } from '@/lib/auth/hybrid'
|
import { type AuthResult, checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateInternalToken } from '@/lib/auth/internal'
|
import { generateInternalToken } from '@/lib/auth/internal'
|
||||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowMcpServeAPI')
|
const logger = createLogger('WorkflowMcpServeAPI')
|
||||||
@@ -285,7 +285,7 @@ async function handleToolsCall(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const executeUrl = `${getBaseUrl()}/api/workflows/${tool.workflowId}/execute`
|
const executeUrl = `${getInternalApiBaseUrl()}/api/workflows/${tool.workflowId}/execute`
|
||||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||||
|
|
||||||
if (publicServerOwnerId) {
|
if (publicServerOwnerId) {
|
||||||
|
|||||||
170
apps/sim/app/api/referral-code/redeem/route.ts
Normal file
170
apps/sim/app/api/referral-code/redeem/route.ts
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
/**
|
||||||
|
* POST /api/referral-code/redeem
|
||||||
|
*
|
||||||
|
* Redeem a referral/promo code to receive bonus credits.
|
||||||
|
*
|
||||||
|
* Body:
|
||||||
|
* - code: string — The referral code to redeem
|
||||||
|
*
|
||||||
|
* Response: { redeemed: boolean, bonusAmount?: number, error?: string }
|
||||||
|
*
|
||||||
|
* Constraints:
|
||||||
|
* - Enterprise users cannot redeem codes
|
||||||
|
* - One redemption per user, ever (unique constraint on userId)
|
||||||
|
* - One redemption per organization for team users (partial unique on organizationId)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { referralAttribution, referralCampaigns, userStats } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import { nanoid } from 'nanoid'
|
||||||
|
import { NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||||
|
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
|
||||||
|
|
||||||
|
const logger = createLogger('ReferralCodeRedemption')
|
||||||
|
|
||||||
|
const RedeemCodeSchema = z.object({
|
||||||
|
code: z.string().min(1, 'Code is required'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: Request) {
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const { code } = RedeemCodeSchema.parse(body)
|
||||||
|
|
||||||
|
const subscription = await getHighestPrioritySubscription(session.user.id)
|
||||||
|
|
||||||
|
if (subscription?.plan === 'enterprise') {
|
||||||
|
return NextResponse.json({
|
||||||
|
redeemed: false,
|
||||||
|
error: 'Enterprise accounts cannot redeem referral codes',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const isTeam = subscription?.plan === 'team'
|
||||||
|
const orgId = isTeam ? subscription.referenceId : null
|
||||||
|
|
||||||
|
const normalizedCode = code.trim().toUpperCase()
|
||||||
|
|
||||||
|
const [campaign] = await db
|
||||||
|
.select()
|
||||||
|
.from(referralCampaigns)
|
||||||
|
.where(and(eq(referralCampaigns.code, normalizedCode), eq(referralCampaigns.isActive, true)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!campaign) {
|
||||||
|
logger.info('Invalid code redemption attempt', {
|
||||||
|
userId: session.user.id,
|
||||||
|
code: normalizedCode,
|
||||||
|
})
|
||||||
|
return NextResponse.json({ error: 'Invalid or expired code' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const [existingUserAttribution] = await db
|
||||||
|
.select({ id: referralAttribution.id })
|
||||||
|
.from(referralAttribution)
|
||||||
|
.where(eq(referralAttribution.userId, session.user.id))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (existingUserAttribution) {
|
||||||
|
return NextResponse.json({
|
||||||
|
redeemed: false,
|
||||||
|
error: 'You have already redeemed a code',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (orgId) {
|
||||||
|
const [existingOrgAttribution] = await db
|
||||||
|
.select({ id: referralAttribution.id })
|
||||||
|
.from(referralAttribution)
|
||||||
|
.where(eq(referralAttribution.organizationId, orgId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (existingOrgAttribution) {
|
||||||
|
return NextResponse.json({
|
||||||
|
redeemed: false,
|
||||||
|
error: 'A code has already been redeemed for your organization',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const bonusAmount = Number(campaign.bonusCreditAmount)
|
||||||
|
|
||||||
|
let redeemed = false
|
||||||
|
await db.transaction(async (tx) => {
|
||||||
|
const [existingStats] = await tx
|
||||||
|
.select({ id: userStats.id })
|
||||||
|
.from(userStats)
|
||||||
|
.where(eq(userStats.userId, session.user.id))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!existingStats) {
|
||||||
|
await tx.insert(userStats).values({
|
||||||
|
id: nanoid(),
|
||||||
|
userId: session.user.id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await tx
|
||||||
|
.insert(referralAttribution)
|
||||||
|
.values({
|
||||||
|
id: nanoid(),
|
||||||
|
userId: session.user.id,
|
||||||
|
organizationId: orgId,
|
||||||
|
campaignId: campaign.id,
|
||||||
|
utmSource: null,
|
||||||
|
utmMedium: null,
|
||||||
|
utmCampaign: null,
|
||||||
|
utmContent: null,
|
||||||
|
referrerUrl: null,
|
||||||
|
landingPage: null,
|
||||||
|
bonusCreditAmount: bonusAmount.toString(),
|
||||||
|
})
|
||||||
|
.onConflictDoNothing()
|
||||||
|
.returning({ id: referralAttribution.id })
|
||||||
|
|
||||||
|
if (result.length > 0) {
|
||||||
|
await applyBonusCredits(session.user.id, bonusAmount, tx)
|
||||||
|
redeemed = true
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
if (redeemed) {
|
||||||
|
logger.info('Referral code redeemed', {
|
||||||
|
userId: session.user.id,
|
||||||
|
organizationId: orgId,
|
||||||
|
code: normalizedCode,
|
||||||
|
campaignId: campaign.id,
|
||||||
|
campaignName: campaign.name,
|
||||||
|
bonusAmount,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!redeemed) {
|
||||||
|
return NextResponse.json({
|
||||||
|
redeemed: false,
|
||||||
|
error: 'You have already redeemed a code',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
redeemed: true,
|
||||||
|
bonusAmount,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
|
||||||
|
}
|
||||||
|
logger.error('Referral code redemption error', { error })
|
||||||
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,17 +3,14 @@
|
|||||||
*
|
*
|
||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
import { loggerMock } from '@sim/testing'
|
import { databaseMock, loggerMock } from '@sim/testing'
|
||||||
import { NextRequest } from 'next/server'
|
import { NextRequest } from 'next/server'
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
const { mockGetSession, mockAuthorizeWorkflowByWorkspacePermission, mockDbSelect, mockDbUpdate } =
|
const { mockGetSession, mockAuthorizeWorkflowByWorkspacePermission } = vi.hoisted(() => ({
|
||||||
vi.hoisted(() => ({
|
mockGetSession: vi.fn(),
|
||||||
mockGetSession: vi.fn(),
|
mockAuthorizeWorkflowByWorkspacePermission: vi.fn(),
|
||||||
mockAuthorizeWorkflowByWorkspacePermission: vi.fn(),
|
}))
|
||||||
mockDbSelect: vi.fn(),
|
|
||||||
mockDbUpdate: vi.fn(),
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.mock('@/lib/auth', () => ({
|
vi.mock('@/lib/auth', () => ({
|
||||||
getSession: mockGetSession,
|
getSession: mockGetSession,
|
||||||
@@ -23,12 +20,7 @@ vi.mock('@/lib/workflows/utils', () => ({
|
|||||||
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.mock('@sim/db', () => ({
|
vi.mock('@sim/db', () => databaseMock)
|
||||||
db: {
|
|
||||||
select: mockDbSelect,
|
|
||||||
update: mockDbUpdate,
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.mock('@sim/db/schema', () => ({
|
vi.mock('@sim/db/schema', () => ({
|
||||||
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
|
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
|
||||||
@@ -59,6 +51,9 @@ function createParams(id: string): { params: Promise<{ id: string }> } {
|
|||||||
return { params: Promise.resolve({ id }) }
|
return { params: Promise.resolve({ id }) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const mockDbSelect = databaseMock.db.select as ReturnType<typeof vi.fn>
|
||||||
|
const mockDbUpdate = databaseMock.db.update as ReturnType<typeof vi.fn>
|
||||||
|
|
||||||
function mockDbChain(selectResults: unknown[][]) {
|
function mockDbChain(selectResults: unknown[][]) {
|
||||||
let selectCallIndex = 0
|
let selectCallIndex = 0
|
||||||
mockDbSelect.mockImplementation(() => ({
|
mockDbSelect.mockImplementation(() => ({
|
||||||
|
|||||||
@@ -3,17 +3,14 @@
|
|||||||
*
|
*
|
||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
import { loggerMock } from '@sim/testing'
|
import { databaseMock, loggerMock } from '@sim/testing'
|
||||||
import { NextRequest } from 'next/server'
|
import { NextRequest } from 'next/server'
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
const { mockGetSession, mockAuthorizeWorkflowByWorkspacePermission, mockDbSelect } = vi.hoisted(
|
const { mockGetSession, mockAuthorizeWorkflowByWorkspacePermission } = vi.hoisted(() => ({
|
||||||
() => ({
|
mockGetSession: vi.fn(),
|
||||||
mockGetSession: vi.fn(),
|
mockAuthorizeWorkflowByWorkspacePermission: vi.fn(),
|
||||||
mockAuthorizeWorkflowByWorkspacePermission: vi.fn(),
|
}))
|
||||||
mockDbSelect: vi.fn(),
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
vi.mock('@/lib/auth', () => ({
|
vi.mock('@/lib/auth', () => ({
|
||||||
getSession: mockGetSession,
|
getSession: mockGetSession,
|
||||||
@@ -23,11 +20,7 @@ vi.mock('@/lib/workflows/utils', () => ({
|
|||||||
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.mock('@sim/db', () => ({
|
vi.mock('@sim/db', () => databaseMock)
|
||||||
db: {
|
|
||||||
select: mockDbSelect,
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.mock('@sim/db/schema', () => ({
|
vi.mock('@sim/db/schema', () => ({
|
||||||
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
|
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
|
||||||
@@ -62,6 +55,8 @@ function createRequest(url: string): NextRequest {
|
|||||||
return new NextRequest(new URL(url), { method: 'GET' })
|
return new NextRequest(new URL(url), { method: 'GET' })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const mockDbSelect = databaseMock.db.select as ReturnType<typeof vi.fn>
|
||||||
|
|
||||||
function mockDbChain(results: any[]) {
|
function mockDbChain(results: any[]) {
|
||||||
let callIndex = 0
|
let callIndex = 0
|
||||||
mockDbSelect.mockImplementation(() => ({
|
mockDbSelect.mockImplementation(() => ({
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import {
|
import {
|
||||||
type RegenerateStateInput,
|
type RegenerateStateInput,
|
||||||
regenerateWorkflowStateIds,
|
regenerateWorkflowStateIds,
|
||||||
@@ -115,15 +115,18 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
|||||||
// Step 3: Save the workflow state using the existing state endpoint (like imports do)
|
// Step 3: Save the workflow state using the existing state endpoint (like imports do)
|
||||||
// Ensure variables in state are remapped for the new workflow as well
|
// Ensure variables in state are remapped for the new workflow as well
|
||||||
const workflowStateWithVariables = { ...workflowState, variables: remappedVariables }
|
const workflowStateWithVariables = { ...workflowState, variables: remappedVariables }
|
||||||
const stateResponse = await fetch(`${getBaseUrl()}/api/workflows/${newWorkflowId}/state`, {
|
const stateResponse = await fetch(
|
||||||
method: 'PUT',
|
`${getInternalApiBaseUrl()}/api/workflows/${newWorkflowId}/state`,
|
||||||
headers: {
|
{
|
||||||
'Content-Type': 'application/json',
|
method: 'PUT',
|
||||||
// Forward the session cookie for authentication
|
headers: {
|
||||||
cookie: request.headers.get('cookie') || '',
|
'Content-Type': 'application/json',
|
||||||
},
|
// Forward the session cookie for authentication
|
||||||
body: JSON.stringify(workflowStateWithVariables),
|
cookie: request.headers.get('cookie') || '',
|
||||||
})
|
},
|
||||||
|
body: JSON.stringify(workflowStateWithVariables),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if (!stateResponse.ok) {
|
if (!stateResponse.ok) {
|
||||||
logger.error(`[${requestId}] Failed to save workflow state for template use`)
|
logger.error(`[${requestId}] Failed to save workflow state for template use`)
|
||||||
|
|||||||
@@ -66,6 +66,12 @@
|
|||||||
* Credits:
|
* Credits:
|
||||||
* POST /api/v1/admin/credits - Issue credits to user (by userId or email)
|
* POST /api/v1/admin/credits - Issue credits to user (by userId or email)
|
||||||
*
|
*
|
||||||
|
* Referral Campaigns:
|
||||||
|
* GET /api/v1/admin/referral-campaigns - List campaigns (?active=true/false)
|
||||||
|
* POST /api/v1/admin/referral-campaigns - Create campaign
|
||||||
|
* GET /api/v1/admin/referral-campaigns/:id - Get campaign details
|
||||||
|
* PATCH /api/v1/admin/referral-campaigns/:id - Update campaign fields
|
||||||
|
*
|
||||||
* Access Control (Permission Groups):
|
* Access Control (Permission Groups):
|
||||||
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
|
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
|
||||||
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)
|
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)
|
||||||
@@ -97,6 +103,7 @@ export type {
|
|||||||
AdminOrganization,
|
AdminOrganization,
|
||||||
AdminOrganizationBillingSummary,
|
AdminOrganizationBillingSummary,
|
||||||
AdminOrganizationDetail,
|
AdminOrganizationDetail,
|
||||||
|
AdminReferralCampaign,
|
||||||
AdminSeatAnalytics,
|
AdminSeatAnalytics,
|
||||||
AdminSingleResponse,
|
AdminSingleResponse,
|
||||||
AdminSubscription,
|
AdminSubscription,
|
||||||
@@ -111,6 +118,7 @@ export type {
|
|||||||
AdminWorkspaceMember,
|
AdminWorkspaceMember,
|
||||||
DbMember,
|
DbMember,
|
||||||
DbOrganization,
|
DbOrganization,
|
||||||
|
DbReferralCampaign,
|
||||||
DbSubscription,
|
DbSubscription,
|
||||||
DbUser,
|
DbUser,
|
||||||
DbUserStats,
|
DbUserStats,
|
||||||
@@ -139,6 +147,7 @@ export {
|
|||||||
parseWorkflowVariables,
|
parseWorkflowVariables,
|
||||||
toAdminFolder,
|
toAdminFolder,
|
||||||
toAdminOrganization,
|
toAdminOrganization,
|
||||||
|
toAdminReferralCampaign,
|
||||||
toAdminSubscription,
|
toAdminSubscription,
|
||||||
toAdminUser,
|
toAdminUser,
|
||||||
toAdminWorkflow,
|
toAdminWorkflow,
|
||||||
|
|||||||
142
apps/sim/app/api/v1/admin/referral-campaigns/[id]/route.ts
Normal file
142
apps/sim/app/api/v1/admin/referral-campaigns/[id]/route.ts
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
/**
|
||||||
|
* GET /api/v1/admin/referral-campaigns/:id
|
||||||
|
*
|
||||||
|
* Get a single referral campaign by ID.
|
||||||
|
*
|
||||||
|
* PATCH /api/v1/admin/referral-campaigns/:id
|
||||||
|
*
|
||||||
|
* Update campaign fields. All fields are optional.
|
||||||
|
*
|
||||||
|
* Body:
|
||||||
|
* - name: string (non-empty) - Campaign name
|
||||||
|
* - bonusCreditAmount: number (> 0) - Bonus credits in dollars
|
||||||
|
* - isActive: boolean - Enable/disable the campaign
|
||||||
|
* - code: string | null (min 6 chars, auto-uppercased, null to remove) - Redeemable code
|
||||||
|
* - utmSource: string | null - UTM source match (null = wildcard)
|
||||||
|
* - utmMedium: string | null - UTM medium match (null = wildcard)
|
||||||
|
* - utmCampaign: string | null - UTM campaign match (null = wildcard)
|
||||||
|
* - utmContent: string | null - UTM content match (null = wildcard)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { referralCampaigns } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
|
||||||
|
import {
|
||||||
|
badRequestResponse,
|
||||||
|
internalErrorResponse,
|
||||||
|
notFoundResponse,
|
||||||
|
singleResponse,
|
||||||
|
} from '@/app/api/v1/admin/responses'
|
||||||
|
import { toAdminReferralCampaign } from '@/app/api/v1/admin/types'
|
||||||
|
|
||||||
|
const logger = createLogger('AdminReferralCampaignDetailAPI')
|
||||||
|
|
||||||
|
interface RouteParams {
|
||||||
|
id: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET = withAdminAuthParams<RouteParams>(async (_, context) => {
|
||||||
|
try {
|
||||||
|
const { id: campaignId } = await context.params
|
||||||
|
|
||||||
|
const [campaign] = await db
|
||||||
|
.select()
|
||||||
|
.from(referralCampaigns)
|
||||||
|
.where(eq(referralCampaigns.id, campaignId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!campaign) {
|
||||||
|
return notFoundResponse('Campaign')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Admin API: Retrieved referral campaign ${campaignId}`)
|
||||||
|
|
||||||
|
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Admin API: Failed to get referral campaign', { error })
|
||||||
|
return internalErrorResponse('Failed to get referral campaign')
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) => {
|
||||||
|
try {
|
||||||
|
const { id: campaignId } = await context.params
|
||||||
|
const body = await request.json()
|
||||||
|
|
||||||
|
const [existing] = await db
|
||||||
|
.select()
|
||||||
|
.from(referralCampaigns)
|
||||||
|
.where(eq(referralCampaigns.id, campaignId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!existing) {
|
||||||
|
return notFoundResponse('Campaign')
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateData: Record<string, unknown> = { updatedAt: new Date() }
|
||||||
|
|
||||||
|
if (body.name !== undefined) {
|
||||||
|
if (typeof body.name !== 'string' || body.name.trim().length === 0) {
|
||||||
|
return badRequestResponse('name must be a non-empty string')
|
||||||
|
}
|
||||||
|
updateData.name = body.name.trim()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.bonusCreditAmount !== undefined) {
|
||||||
|
if (
|
||||||
|
typeof body.bonusCreditAmount !== 'number' ||
|
||||||
|
!Number.isFinite(body.bonusCreditAmount) ||
|
||||||
|
body.bonusCreditAmount <= 0
|
||||||
|
) {
|
||||||
|
return badRequestResponse('bonusCreditAmount must be a positive number')
|
||||||
|
}
|
||||||
|
updateData.bonusCreditAmount = body.bonusCreditAmount.toString()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.isActive !== undefined) {
|
||||||
|
if (typeof body.isActive !== 'boolean') {
|
||||||
|
return badRequestResponse('isActive must be a boolean')
|
||||||
|
}
|
||||||
|
updateData.isActive = body.isActive
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.code !== undefined) {
|
||||||
|
if (body.code !== null) {
|
||||||
|
if (typeof body.code !== 'string') {
|
||||||
|
return badRequestResponse('code must be a string or null')
|
||||||
|
}
|
||||||
|
if (body.code.trim().length < 6) {
|
||||||
|
return badRequestResponse('code must be at least 6 characters')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
updateData.code = body.code ? body.code.trim().toUpperCase() : null
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const field of ['utmSource', 'utmMedium', 'utmCampaign', 'utmContent'] as const) {
|
||||||
|
if (body[field] !== undefined) {
|
||||||
|
if (body[field] !== null && typeof body[field] !== 'string') {
|
||||||
|
return badRequestResponse(`${field} must be a string or null`)
|
||||||
|
}
|
||||||
|
updateData[field] = body[field] || null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const [updated] = await db
|
||||||
|
.update(referralCampaigns)
|
||||||
|
.set(updateData)
|
||||||
|
.where(eq(referralCampaigns.id, campaignId))
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
logger.info(`Admin API: Updated referral campaign ${campaignId}`, {
|
||||||
|
fields: Object.keys(updateData).filter((k) => k !== 'updatedAt'),
|
||||||
|
})
|
||||||
|
|
||||||
|
return singleResponse(toAdminReferralCampaign(updated, getBaseUrl()))
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Admin API: Failed to update referral campaign', { error })
|
||||||
|
return internalErrorResponse('Failed to update referral campaign')
|
||||||
|
}
|
||||||
|
})
|
||||||
140
apps/sim/app/api/v1/admin/referral-campaigns/route.ts
Normal file
140
apps/sim/app/api/v1/admin/referral-campaigns/route.ts
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
/**
|
||||||
|
* GET /api/v1/admin/referral-campaigns
|
||||||
|
*
|
||||||
|
* List referral campaigns with optional filtering and pagination.
|
||||||
|
*
|
||||||
|
* Query Parameters:
|
||||||
|
* - active: string (optional) - Filter by active status ('true' or 'false')
|
||||||
|
* - limit: number (default: 50, max: 250)
|
||||||
|
* - offset: number (default: 0)
|
||||||
|
*
|
||||||
|
* POST /api/v1/admin/referral-campaigns
|
||||||
|
*
|
||||||
|
* Create a new referral campaign.
|
||||||
|
*
|
||||||
|
* Body:
|
||||||
|
* - name: string (required) - Campaign name
|
||||||
|
* - bonusCreditAmount: number (required, > 0) - Bonus credits in dollars
|
||||||
|
* - code: string | null (optional, min 6 chars, auto-uppercased) - Redeemable code
|
||||||
|
* - utmSource: string | null (optional) - UTM source match (null = wildcard)
|
||||||
|
* - utmMedium: string | null (optional) - UTM medium match (null = wildcard)
|
||||||
|
* - utmCampaign: string | null (optional) - UTM campaign match (null = wildcard)
|
||||||
|
* - utmContent: string | null (optional) - UTM content match (null = wildcard)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { referralCampaigns } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { count, eq, type SQL } from 'drizzle-orm'
|
||||||
|
import { nanoid } from 'nanoid'
|
||||||
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
|
||||||
|
import {
|
||||||
|
badRequestResponse,
|
||||||
|
internalErrorResponse,
|
||||||
|
listResponse,
|
||||||
|
singleResponse,
|
||||||
|
} from '@/app/api/v1/admin/responses'
|
||||||
|
import {
|
||||||
|
type AdminReferralCampaign,
|
||||||
|
createPaginationMeta,
|
||||||
|
parsePaginationParams,
|
||||||
|
toAdminReferralCampaign,
|
||||||
|
} from '@/app/api/v1/admin/types'
|
||||||
|
|
||||||
|
const logger = createLogger('AdminReferralCampaignsAPI')
|
||||||
|
|
||||||
|
export const GET = withAdminAuth(async (request) => {
|
||||||
|
const url = new URL(request.url)
|
||||||
|
const { limit, offset } = parsePaginationParams(url)
|
||||||
|
const activeFilter = url.searchParams.get('active')
|
||||||
|
|
||||||
|
try {
|
||||||
|
const conditions: SQL<unknown>[] = []
|
||||||
|
if (activeFilter === 'true') {
|
||||||
|
conditions.push(eq(referralCampaigns.isActive, true))
|
||||||
|
} else if (activeFilter === 'false') {
|
||||||
|
conditions.push(eq(referralCampaigns.isActive, false))
|
||||||
|
}
|
||||||
|
|
||||||
|
const whereClause = conditions.length > 0 ? conditions[0] : undefined
|
||||||
|
const baseUrl = getBaseUrl()
|
||||||
|
|
||||||
|
const [countResult, campaigns] = await Promise.all([
|
||||||
|
db.select({ total: count() }).from(referralCampaigns).where(whereClause),
|
||||||
|
db
|
||||||
|
.select()
|
||||||
|
.from(referralCampaigns)
|
||||||
|
.where(whereClause)
|
||||||
|
.orderBy(referralCampaigns.createdAt)
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset),
|
||||||
|
])
|
||||||
|
|
||||||
|
const total = countResult[0].total
|
||||||
|
const data: AdminReferralCampaign[] = campaigns.map((c) => toAdminReferralCampaign(c, baseUrl))
|
||||||
|
const pagination = createPaginationMeta(total, limit, offset)
|
||||||
|
|
||||||
|
logger.info(`Admin API: Listed ${data.length} referral campaigns (total: ${total})`)
|
||||||
|
|
||||||
|
return listResponse(data, pagination)
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Admin API: Failed to list referral campaigns', { error })
|
||||||
|
return internalErrorResponse('Failed to list referral campaigns')
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
export const POST = withAdminAuth(async (request) => {
|
||||||
|
try {
|
||||||
|
const body = await request.json()
|
||||||
|
const { name, code, utmSource, utmMedium, utmCampaign, utmContent, bonusCreditAmount } = body
|
||||||
|
|
||||||
|
if (!name || typeof name !== 'string') {
|
||||||
|
return badRequestResponse('name is required and must be a string')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
typeof bonusCreditAmount !== 'number' ||
|
||||||
|
!Number.isFinite(bonusCreditAmount) ||
|
||||||
|
bonusCreditAmount <= 0
|
||||||
|
) {
|
||||||
|
return badRequestResponse('bonusCreditAmount must be a positive number')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (code !== undefined && code !== null) {
|
||||||
|
if (typeof code !== 'string') {
|
||||||
|
return badRequestResponse('code must be a string or null')
|
||||||
|
}
|
||||||
|
if (code.trim().length < 6) {
|
||||||
|
return badRequestResponse('code must be at least 6 characters')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const id = nanoid()
|
||||||
|
|
||||||
|
const [campaign] = await db
|
||||||
|
.insert(referralCampaigns)
|
||||||
|
.values({
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
code: code ? code.trim().toUpperCase() : null,
|
||||||
|
utmSource: utmSource || null,
|
||||||
|
utmMedium: utmMedium || null,
|
||||||
|
utmCampaign: utmCampaign || null,
|
||||||
|
utmContent: utmContent || null,
|
||||||
|
bonusCreditAmount: bonusCreditAmount.toString(),
|
||||||
|
})
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
logger.info(`Admin API: Created referral campaign ${id}`, {
|
||||||
|
name,
|
||||||
|
code: campaign.code,
|
||||||
|
bonusCreditAmount,
|
||||||
|
})
|
||||||
|
|
||||||
|
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Admin API: Failed to create referral campaign', { error })
|
||||||
|
return internalErrorResponse('Failed to create referral campaign')
|
||||||
|
}
|
||||||
|
})
|
||||||
@@ -8,6 +8,7 @@
|
|||||||
import type {
|
import type {
|
||||||
member,
|
member,
|
||||||
organization,
|
organization,
|
||||||
|
referralCampaigns,
|
||||||
subscription,
|
subscription,
|
||||||
user,
|
user,
|
||||||
userStats,
|
userStats,
|
||||||
@@ -31,6 +32,7 @@ export type DbOrganization = InferSelectModel<typeof organization>
|
|||||||
export type DbSubscription = InferSelectModel<typeof subscription>
|
export type DbSubscription = InferSelectModel<typeof subscription>
|
||||||
export type DbMember = InferSelectModel<typeof member>
|
export type DbMember = InferSelectModel<typeof member>
|
||||||
export type DbUserStats = InferSelectModel<typeof userStats>
|
export type DbUserStats = InferSelectModel<typeof userStats>
|
||||||
|
export type DbReferralCampaign = InferSelectModel<typeof referralCampaigns>
|
||||||
|
|
||||||
// =============================================================================
|
// =============================================================================
|
||||||
// Pagination
|
// Pagination
|
||||||
@@ -646,3 +648,49 @@ export interface AdminDeployResult {
|
|||||||
export interface AdminUndeployResult {
|
export interface AdminUndeployResult {
|
||||||
isDeployed: boolean
|
isDeployed: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// =============================================================================
|
||||||
|
// Referral Campaign Types
|
||||||
|
// =============================================================================
|
||||||
|
|
||||||
|
export interface AdminReferralCampaign {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
code: string | null
|
||||||
|
utmSource: string | null
|
||||||
|
utmMedium: string | null
|
||||||
|
utmCampaign: string | null
|
||||||
|
utmContent: string | null
|
||||||
|
bonusCreditAmount: string
|
||||||
|
isActive: boolean
|
||||||
|
signupUrl: string | null
|
||||||
|
createdAt: string
|
||||||
|
updatedAt: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export function toAdminReferralCampaign(
|
||||||
|
dbCampaign: DbReferralCampaign,
|
||||||
|
baseUrl: string
|
||||||
|
): AdminReferralCampaign {
|
||||||
|
const utmParams = new URLSearchParams()
|
||||||
|
if (dbCampaign.utmSource) utmParams.set('utm_source', dbCampaign.utmSource)
|
||||||
|
if (dbCampaign.utmMedium) utmParams.set('utm_medium', dbCampaign.utmMedium)
|
||||||
|
if (dbCampaign.utmCampaign) utmParams.set('utm_campaign', dbCampaign.utmCampaign)
|
||||||
|
if (dbCampaign.utmContent) utmParams.set('utm_content', dbCampaign.utmContent)
|
||||||
|
const query = utmParams.toString()
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: dbCampaign.id,
|
||||||
|
name: dbCampaign.name,
|
||||||
|
code: dbCampaign.code,
|
||||||
|
utmSource: dbCampaign.utmSource,
|
||||||
|
utmMedium: dbCampaign.utmMedium,
|
||||||
|
utmCampaign: dbCampaign.utmCampaign,
|
||||||
|
utmContent: dbCampaign.utmContent,
|
||||||
|
bonusCreditAmount: dbCampaign.bonusCreditAmount,
|
||||||
|
isActive: dbCampaign.isActive,
|
||||||
|
signupUrl: query ? `${baseUrl}/signup?${query}` : null,
|
||||||
|
createdAt: dbCampaign.createdAt.toISOString(),
|
||||||
|
updatedAt: dbCampaign.updatedAt.toISOString(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
|||||||
import { authenticateV1Request } from '@/app/api/v1/auth'
|
import { authenticateV1Request } from '@/app/api/v1/auth'
|
||||||
|
|
||||||
const logger = createLogger('CopilotHeadlessAPI')
|
const logger = createLogger('CopilotHeadlessAPI')
|
||||||
const DEFAULT_COPILOT_MODEL = 'claude-opus-4-6'
|
const DEFAULT_COPILOT_MODEL = 'claude-opus-4-5'
|
||||||
|
|
||||||
const RequestSchema = z.object({
|
const RequestSchema = z.object({
|
||||||
message: z.string().min(1, 'message is required'),
|
message: z.string().min(1, 'message is required'),
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ const patchBodySchema = z
|
|||||||
description: z
|
description: z
|
||||||
.string()
|
.string()
|
||||||
.trim()
|
.trim()
|
||||||
.max(500, 'Description must be 500 characters or less')
|
.max(2000, 'Description must be 2000 characters or less')
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
isActive: z.literal(true).optional(), // Set to true to activate this version
|
isActive: z.literal(true).optional(), // Set to true to activate this version
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import {
|
|||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/event-buffer'
|
||||||
import { processInputFileFields } from '@/lib/execution/files'
|
import { processInputFileFields } from '@/lib/execution/files'
|
||||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||||
@@ -700,15 +700,27 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
||||||
let isStreamClosed = false
|
let isStreamClosed = false
|
||||||
|
|
||||||
|
const eventWriter = createExecutionEventWriter(executionId)
|
||||||
|
setExecutionMeta(executionId, {
|
||||||
|
status: 'active',
|
||||||
|
userId: actorUserId,
|
||||||
|
workflowId,
|
||||||
|
}).catch(() => {})
|
||||||
|
|
||||||
const stream = new ReadableStream<Uint8Array>({
|
const stream = new ReadableStream<Uint8Array>({
|
||||||
async start(controller) {
|
async start(controller) {
|
||||||
const sendEvent = (event: ExecutionEvent) => {
|
let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null
|
||||||
if (isStreamClosed) return
|
|
||||||
|
|
||||||
try {
|
const sendEvent = (event: ExecutionEvent) => {
|
||||||
controller.enqueue(encodeSSEEvent(event))
|
if (!isStreamClosed) {
|
||||||
} catch {
|
try {
|
||||||
isStreamClosed = true
|
controller.enqueue(encodeSSEEvent(event))
|
||||||
|
} catch {
|
||||||
|
isStreamClosed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (event.type !== 'stream:chunk' && event.type !== 'stream:done') {
|
||||||
|
eventWriter.write(event).catch(() => {})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -829,14 +841,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
|
|
||||||
const reader = streamingExec.stream.getReader()
|
const reader = streamingExec.stream.getReader()
|
||||||
const decoder = new TextDecoder()
|
const decoder = new TextDecoder()
|
||||||
let chunkCount = 0
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
while (true) {
|
while (true) {
|
||||||
const { done, value } = await reader.read()
|
const { done, value } = await reader.read()
|
||||||
if (done) break
|
if (done) break
|
||||||
|
|
||||||
chunkCount++
|
|
||||||
const chunk = decoder.decode(value, { stream: true })
|
const chunk = decoder.decode(value, { stream: true })
|
||||||
sendEvent({
|
sendEvent({
|
||||||
type: 'stream:chunk',
|
type: 'stream:chunk',
|
||||||
@@ -951,6 +961,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
duration: result.metadata?.duration || 0,
|
duration: result.metadata?.duration || 0,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
finalMetaStatus = 'error'
|
||||||
} else {
|
} else {
|
||||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||||
|
|
||||||
@@ -963,6 +974,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
duration: result.metadata?.duration || 0,
|
duration: result.metadata?.duration || 0,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
finalMetaStatus = 'cancelled'
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -986,6 +998,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
finalMetaStatus = 'complete'
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
||||||
const errorMessage = isTimeout
|
const errorMessage = isTimeout
|
||||||
@@ -1017,7 +1030,18 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
duration: executionResult?.metadata?.duration || 0,
|
duration: executionResult?.metadata?.duration || 0,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
finalMetaStatus = 'error'
|
||||||
} finally {
|
} finally {
|
||||||
|
try {
|
||||||
|
await eventWriter.close()
|
||||||
|
} catch (closeError) {
|
||||||
|
logger.warn(`[${requestId}] Failed to close event writer`, {
|
||||||
|
error: closeError instanceof Error ? closeError.message : String(closeError),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (finalMetaStatus) {
|
||||||
|
setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {})
|
||||||
|
}
|
||||||
timeoutController.cleanup()
|
timeoutController.cleanup()
|
||||||
if (executionId) {
|
if (executionId) {
|
||||||
await cleanupExecutionBase64Cache(executionId)
|
await cleanupExecutionBase64Cache(executionId)
|
||||||
@@ -1032,10 +1056,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
},
|
},
|
||||||
cancel() {
|
cancel() {
|
||||||
isStreamClosed = true
|
isStreamClosed = true
|
||||||
timeoutController.cleanup()
|
logger.info(`[${requestId}] Client disconnected from SSE stream`)
|
||||||
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
|
||||||
timeoutController.abort()
|
|
||||||
markExecutionCancelled(executionId).catch(() => {})
|
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,170 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
|
import {
|
||||||
|
type ExecutionStreamStatus,
|
||||||
|
getExecutionMeta,
|
||||||
|
readExecutionEvents,
|
||||||
|
} from '@/lib/execution/event-buffer'
|
||||||
|
import { formatSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||||
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('ExecutionStreamReconnectAPI')
|
||||||
|
|
||||||
|
const POLL_INTERVAL_MS = 500
|
||||||
|
const MAX_POLL_DURATION_MS = 10 * 60 * 1000 // 10 minutes
|
||||||
|
|
||||||
|
function isTerminalStatus(status: ExecutionStreamStatus): boolean {
|
||||||
|
return status === 'complete' || status === 'error' || status === 'cancelled'
|
||||||
|
}
|
||||||
|
|
||||||
|
export const runtime = 'nodejs'
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
export async function GET(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ id: string; executionId: string }> }
|
||||||
|
) {
|
||||||
|
const { id: workflowId, executionId } = await params
|
||||||
|
|
||||||
|
try {
|
||||||
|
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId,
|
||||||
|
userId: auth.userId,
|
||||||
|
action: 'read',
|
||||||
|
})
|
||||||
|
if (!workflowAuthorization.allowed) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: workflowAuthorization.message || 'Access denied' },
|
||||||
|
{ status: workflowAuthorization.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta = await getExecutionMeta(executionId)
|
||||||
|
if (!meta) {
|
||||||
|
return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.workflowId && meta.workflowId !== workflowId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Execution does not belong to this workflow' },
|
||||||
|
{ status: 403 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const fromParam = req.nextUrl.searchParams.get('from')
|
||||||
|
const parsed = fromParam ? Number.parseInt(fromParam, 10) : 0
|
||||||
|
const fromEventId = Number.isFinite(parsed) && parsed >= 0 ? parsed : 0
|
||||||
|
|
||||||
|
logger.info('Reconnection stream requested', {
|
||||||
|
workflowId,
|
||||||
|
executionId,
|
||||||
|
fromEventId,
|
||||||
|
metaStatus: meta.status,
|
||||||
|
})
|
||||||
|
|
||||||
|
const encoder = new TextEncoder()
|
||||||
|
|
||||||
|
let closed = false
|
||||||
|
|
||||||
|
const stream = new ReadableStream<Uint8Array>({
|
||||||
|
async start(controller) {
|
||||||
|
let lastEventId = fromEventId
|
||||||
|
const pollDeadline = Date.now() + MAX_POLL_DURATION_MS
|
||||||
|
|
||||||
|
const enqueue = (text: string) => {
|
||||||
|
if (closed) return
|
||||||
|
try {
|
||||||
|
controller.enqueue(encoder.encode(text))
|
||||||
|
} catch {
|
||||||
|
closed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const events = await readExecutionEvents(executionId, lastEventId)
|
||||||
|
for (const entry of events) {
|
||||||
|
if (closed) return
|
||||||
|
enqueue(formatSSEEvent(entry.event))
|
||||||
|
lastEventId = entry.eventId
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentMeta = await getExecutionMeta(executionId)
|
||||||
|
if (!currentMeta || isTerminalStatus(currentMeta.status)) {
|
||||||
|
enqueue('data: [DONE]\n\n')
|
||||||
|
if (!closed) controller.close()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
while (!closed && Date.now() < pollDeadline) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
|
||||||
|
if (closed) return
|
||||||
|
|
||||||
|
const newEvents = await readExecutionEvents(executionId, lastEventId)
|
||||||
|
for (const entry of newEvents) {
|
||||||
|
if (closed) return
|
||||||
|
enqueue(formatSSEEvent(entry.event))
|
||||||
|
lastEventId = entry.eventId
|
||||||
|
}
|
||||||
|
|
||||||
|
const polledMeta = await getExecutionMeta(executionId)
|
||||||
|
if (!polledMeta || isTerminalStatus(polledMeta.status)) {
|
||||||
|
const finalEvents = await readExecutionEvents(executionId, lastEventId)
|
||||||
|
for (const entry of finalEvents) {
|
||||||
|
if (closed) return
|
||||||
|
enqueue(formatSSEEvent(entry.event))
|
||||||
|
lastEventId = entry.eventId
|
||||||
|
}
|
||||||
|
enqueue('data: [DONE]\n\n')
|
||||||
|
if (!closed) controller.close()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!closed) {
|
||||||
|
logger.warn('Reconnection stream poll deadline reached', { executionId })
|
||||||
|
enqueue('data: [DONE]\n\n')
|
||||||
|
controller.close()
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error in reconnection stream', {
|
||||||
|
executionId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
if (!closed) {
|
||||||
|
try {
|
||||||
|
controller.close()
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
cancel() {
|
||||||
|
closed = true
|
||||||
|
logger.info('Client disconnected from reconnection stream', { executionId })
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return new NextResponse(stream, {
|
||||||
|
headers: {
|
||||||
|
...SSE_HEADERS,
|
||||||
|
'X-Execution-Id': executionId,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('Failed to start reconnection stream', {
|
||||||
|
workflowId,
|
||||||
|
executionId,
|
||||||
|
error: error.message,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: error.message || 'Failed to start reconnection stream' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,7 +5,7 @@
|
|||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { loggerMock } from '@sim/testing'
|
import { loggerMock, setupGlobalFetchMock } from '@sim/testing'
|
||||||
import { NextRequest } from 'next/server'
|
import { NextRequest } from 'next/server'
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
@@ -284,9 +284,7 @@ describe('Workflow By ID API Route', () => {
|
|||||||
where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]),
|
where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]),
|
||||||
})
|
})
|
||||||
|
|
||||||
global.fetch = vi.fn().mockResolvedValue({
|
setupGlobalFetchMock({ ok: true })
|
||||||
ok: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||||
method: 'DELETE',
|
method: 'DELETE',
|
||||||
@@ -331,9 +329,7 @@ describe('Workflow By ID API Route', () => {
|
|||||||
where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]),
|
where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]),
|
||||||
})
|
})
|
||||||
|
|
||||||
global.fetch = vi.fn().mockResolvedValue({
|
setupGlobalFetchMock({ ok: true })
|
||||||
ok: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||||
method: 'DELETE',
|
method: 'DELETE',
|
||||||
|
|||||||
@@ -14,14 +14,6 @@ const logger = createLogger('DiffControls')
|
|||||||
const NOTIFICATION_WIDTH = 240
|
const NOTIFICATION_WIDTH = 240
|
||||||
const NOTIFICATION_GAP = 16
|
const NOTIFICATION_GAP = 16
|
||||||
|
|
||||||
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
|
||||||
if (name !== 'workflow_change') return false
|
|
||||||
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
export const DiffControls = memo(function DiffControls() {
|
export const DiffControls = memo(function DiffControls() {
|
||||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||||
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
||||||
@@ -72,7 +64,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
if (tn === 'edit_workflow') {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -80,9 +72,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) =>
|
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||||
isWorkflowEditToolCall(t.name, t.params)
|
|
||||||
)
|
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
if (id) updatePreviewToolCallState('accepted', id)
|
if (id) updatePreviewToolCallState('accepted', id)
|
||||||
@@ -112,7 +102,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
if (tn === 'edit_workflow') {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -120,9 +110,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) =>
|
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||||
isWorkflowEditToolCall(t.name, t.params)
|
|
||||||
)
|
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
if (id) updatePreviewToolCallState('rejected', id)
|
if (id) updatePreviewToolCallState('rejected', id)
|
||||||
|
|||||||
@@ -47,27 +47,6 @@ interface ParsedTags {
|
|||||||
cleanContent: string
|
cleanContent: string
|
||||||
}
|
}
|
||||||
|
|
||||||
function getToolCallParams(toolCall?: CopilotToolCall): Record<string, unknown> {
|
|
||||||
const candidate = ((toolCall as any)?.parameters ||
|
|
||||||
(toolCall as any)?.input ||
|
|
||||||
(toolCall as any)?.params ||
|
|
||||||
{}) as Record<string, unknown>
|
|
||||||
return candidate && typeof candidate === 'object' ? candidate : {}
|
|
||||||
}
|
|
||||||
|
|
||||||
function isWorkflowChangeApplyMode(toolCall?: CopilotToolCall): boolean {
|
|
||||||
if (!toolCall || toolCall.name !== 'workflow_change') return false
|
|
||||||
const params = getToolCallParams(toolCall)
|
|
||||||
const mode = typeof params.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
function isWorkflowEditSummaryTool(toolCall?: CopilotToolCall): boolean {
|
|
||||||
if (!toolCall) return false
|
|
||||||
return isWorkflowChangeApplyMode(toolCall)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
||||||
* @param blocks - The subagent content blocks to search
|
* @param blocks - The subagent content blocks to search
|
||||||
@@ -892,10 +871,7 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (segment.type === 'tool' && segment.block.toolCall) {
|
if (segment.type === 'tool' && segment.block.toolCall) {
|
||||||
if (
|
if (toolCall.name === 'edit' && segment.block.toolCall.name === 'edit_workflow') {
|
||||||
(toolCall.name === 'edit' || toolCall.name === 'build') &&
|
|
||||||
isWorkflowEditSummaryTool(segment.block.toolCall)
|
|
||||||
) {
|
|
||||||
return (
|
return (
|
||||||
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
||||||
<WorkflowEditSummary toolCall={segment.block.toolCall} />
|
<WorkflowEditSummary toolCall={segment.block.toolCall} />
|
||||||
@@ -992,11 +968,12 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
}
|
}
|
||||||
}, [blocks])
|
}, [blocks])
|
||||||
|
|
||||||
if (!isWorkflowEditSummaryTool(toolCall)) {
|
if (toolCall.name !== 'edit_workflow') {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const params = getToolCallParams(toolCall)
|
const params =
|
||||||
|
(toolCall as any).parameters || (toolCall as any).input || (toolCall as any).params || {}
|
||||||
let operations = Array.isArray(params.operations) ? params.operations : []
|
let operations = Array.isArray(params.operations) ? params.operations : []
|
||||||
|
|
||||||
if (operations.length === 0 && Array.isArray((toolCall as any).operations)) {
|
if (operations.length === 0 && Array.isArray((toolCall as any).operations)) {
|
||||||
@@ -1242,6 +1219,11 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
/** Checks if a tool is server-side executed (not a client tool) */
|
||||||
|
function isIntegrationTool(toolName: string): boolean {
|
||||||
|
return !TOOL_DISPLAY_REGISTRY[toolName]
|
||||||
|
}
|
||||||
|
|
||||||
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||||
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
||||||
return false
|
return false
|
||||||
@@ -1251,96 +1233,59 @@ function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
if (toolCall.ui?.showInterrupt !== true) {
|
// Never show buttons for tools the user has marked as always-allowed
|
||||||
|
if (useCopilotStore.getState().isToolAutoAllowed(toolCall.name)) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
return true
|
const hasInterrupt = !!TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.interrupt
|
||||||
|
if (hasInterrupt) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Integration tools (user-installed) always require approval
|
||||||
|
if (isIntegrationTool(toolCall.name)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
const toolCallLogger = createLogger('CopilotToolCall')
|
const toolCallLogger = createLogger('CopilotToolCall')
|
||||||
|
|
||||||
async function sendToolDecision(
|
async function sendToolDecision(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
status: 'accepted' | 'rejected' | 'background',
|
status: 'accepted' | 'rejected' | 'background'
|
||||||
options?: {
|
|
||||||
toolName?: string
|
|
||||||
remember?: boolean
|
|
||||||
}
|
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
await fetch('/api/copilot/confirm', {
|
await fetch('/api/copilot/confirm', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({ toolCallId, status }),
|
||||||
toolCallId,
|
|
||||||
status,
|
|
||||||
...(options?.toolName ? { toolName: options.toolName } : {}),
|
|
||||||
...(options?.remember ? { remember: true } : {}),
|
|
||||||
}),
|
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toolCallLogger.warn('Failed to send tool decision', {
|
toolCallLogger.warn('Failed to send tool decision', {
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
remember: options?.remember === true,
|
|
||||||
toolName: options?.toolName,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function removeAutoAllowedToolPreference(toolName: string): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
const response = await fetch(`/api/copilot/auto-allowed-tools?toolId=${encodeURIComponent(toolName)}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
})
|
|
||||||
return response.ok
|
|
||||||
} catch (error) {
|
|
||||||
toolCallLogger.warn('Failed to remove auto-allowed tool preference', {
|
|
||||||
toolName,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type ToolUiAction = NonNullable<NonNullable<CopilotToolCall['ui']>['actions']>[number]
|
|
||||||
|
|
||||||
function actionDecision(action: ToolUiAction): 'accepted' | 'rejected' | 'background' {
|
|
||||||
const id = action.id.toLowerCase()
|
|
||||||
if (id.includes('background')) return 'background'
|
|
||||||
if (action.kind === 'reject') return 'rejected'
|
|
||||||
return 'accepted'
|
|
||||||
}
|
|
||||||
|
|
||||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
|
||||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
|
||||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
|
||||||
}
|
|
||||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleRun(
|
async function handleRun(
|
||||||
toolCall: CopilotToolCall,
|
toolCall: CopilotToolCall,
|
||||||
setToolCallState: any,
|
setToolCallState: any,
|
||||||
onStateChange?: any,
|
onStateChange?: any,
|
||||||
editedParams?: any,
|
editedParams?: any
|
||||||
options?: {
|
|
||||||
remember?: boolean
|
|
||||||
}
|
|
||||||
) {
|
) {
|
||||||
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
||||||
onStateChange?.('executing')
|
onStateChange?.('executing')
|
||||||
await sendToolDecision(toolCall.id, 'accepted', {
|
await sendToolDecision(toolCall.id, 'accepted')
|
||||||
toolName: toolCall.name,
|
|
||||||
remember: options?.remember === true,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Client-executable run tools: execute on the client for real-time feedback
|
// Client-executable run tools: execute on the client for real-time feedback
|
||||||
// (block pulsing, console logs, stop button). The server defers execution
|
// (block pulsing, console logs, stop button). The server defers execution
|
||||||
// for these tools; the client reports back via mark-complete.
|
// for these tools; the client reports back via mark-complete.
|
||||||
if (isClientRunCapability(toolCall)) {
|
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)) {
|
||||||
const params = editedParams || toolCall.params || {}
|
const params = editedParams || toolCall.params || {}
|
||||||
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
||||||
}
|
}
|
||||||
@@ -1353,9 +1298,6 @@ async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onSt
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getDisplayName(toolCall: CopilotToolCall): string {
|
function getDisplayName(toolCall: CopilotToolCall): string {
|
||||||
if (toolCall.ui?.phaseLabel) return toolCall.ui.phaseLabel
|
|
||||||
if (toolCall.ui?.title) return `${getStateVerb(toolCall.state)} ${toolCall.ui.title}`
|
|
||||||
|
|
||||||
const fromStore = (toolCall as any).display?.text
|
const fromStore = (toolCall as any).display?.text
|
||||||
if (fromStore) return fromStore
|
if (fromStore) return fromStore
|
||||||
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
@@ -1400,37 +1342,53 @@ function RunSkipButtons({
|
|||||||
toolCall,
|
toolCall,
|
||||||
onStateChange,
|
onStateChange,
|
||||||
editedParams,
|
editedParams,
|
||||||
actions,
|
|
||||||
}: {
|
}: {
|
||||||
toolCall: CopilotToolCall
|
toolCall: CopilotToolCall
|
||||||
onStateChange?: (state: any) => void
|
onStateChange?: (state: any) => void
|
||||||
editedParams?: any
|
editedParams?: any
|
||||||
actions: ToolUiAction[]
|
|
||||||
}) {
|
}) {
|
||||||
const [isProcessing, setIsProcessing] = useState(false)
|
const [isProcessing, setIsProcessing] = useState(false)
|
||||||
const [buttonsHidden, setButtonsHidden] = useState(false)
|
const [buttonsHidden, setButtonsHidden] = useState(false)
|
||||||
const actionInProgressRef = useRef(false)
|
const actionInProgressRef = useRef(false)
|
||||||
const { setToolCallState } = useCopilotStore()
|
const { setToolCallState, addAutoAllowedTool } = useCopilotStore()
|
||||||
|
|
||||||
const onAction = async (action: ToolUiAction) => {
|
const onRun = async () => {
|
||||||
// Prevent race condition - check ref synchronously
|
// Prevent race condition - check ref synchronously
|
||||||
if (actionInProgressRef.current) return
|
if (actionInProgressRef.current) return
|
||||||
actionInProgressRef.current = true
|
actionInProgressRef.current = true
|
||||||
setIsProcessing(true)
|
setIsProcessing(true)
|
||||||
setButtonsHidden(true)
|
setButtonsHidden(true)
|
||||||
try {
|
try {
|
||||||
const decision = actionDecision(action)
|
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||||
if (decision === 'accepted') {
|
} finally {
|
||||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams, {
|
setIsProcessing(false)
|
||||||
remember: action.remember === true,
|
actionInProgressRef.current = false
|
||||||
})
|
}
|
||||||
} else if (decision === 'rejected') {
|
}
|
||||||
await handleSkip(toolCall, setToolCallState, onStateChange)
|
|
||||||
} else {
|
const onAlwaysAllow = async () => {
|
||||||
setToolCallState(toolCall, ClientToolCallState.background)
|
// Prevent race condition - check ref synchronously
|
||||||
onStateChange?.('background')
|
if (actionInProgressRef.current) return
|
||||||
await sendToolDecision(toolCall.id, 'background')
|
actionInProgressRef.current = true
|
||||||
}
|
setIsProcessing(true)
|
||||||
|
setButtonsHidden(true)
|
||||||
|
try {
|
||||||
|
await addAutoAllowedTool(toolCall.name)
|
||||||
|
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||||
|
} finally {
|
||||||
|
setIsProcessing(false)
|
||||||
|
actionInProgressRef.current = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const onSkip = async () => {
|
||||||
|
// Prevent race condition - check ref synchronously
|
||||||
|
if (actionInProgressRef.current) return
|
||||||
|
actionInProgressRef.current = true
|
||||||
|
setIsProcessing(true)
|
||||||
|
setButtonsHidden(true)
|
||||||
|
try {
|
||||||
|
await handleSkip(toolCall, setToolCallState, onStateChange)
|
||||||
} finally {
|
} finally {
|
||||||
setIsProcessing(false)
|
setIsProcessing(false)
|
||||||
actionInProgressRef.current = false
|
actionInProgressRef.current = false
|
||||||
@@ -1439,22 +1397,23 @@ function RunSkipButtons({
|
|||||||
|
|
||||||
if (buttonsHidden) return null
|
if (buttonsHidden) return null
|
||||||
|
|
||||||
|
// Show "Always Allow" for all tools that require confirmation
|
||||||
|
const showAlwaysAllow = true
|
||||||
|
|
||||||
|
// Standardized buttons for all interrupt tools: Allow, Always Allow, Skip
|
||||||
return (
|
return (
|
||||||
<div className='mt-[10px] flex gap-[6px]'>
|
<div className='mt-[10px] flex gap-[6px]'>
|
||||||
{actions.map((action, index) => {
|
<Button onClick={onRun} disabled={isProcessing} variant='tertiary'>
|
||||||
const variant =
|
{isProcessing ? 'Allowing...' : 'Allow'}
|
||||||
action.kind === 'reject' ? 'default' : action.remember ? 'default' : 'tertiary'
|
</Button>
|
||||||
return (
|
{showAlwaysAllow && (
|
||||||
<Button
|
<Button onClick={onAlwaysAllow} disabled={isProcessing} variant='default'>
|
||||||
key={action.id}
|
{isProcessing ? 'Allowing...' : 'Always Allow'}
|
||||||
onClick={() => onAction(action)}
|
</Button>
|
||||||
disabled={isProcessing}
|
)}
|
||||||
variant={variant}
|
<Button onClick={onSkip} disabled={isProcessing} variant='default'>
|
||||||
>
|
Skip
|
||||||
{isProcessing && index === 0 ? 'Working...' : action.label}
|
</Button>
|
||||||
</Button>
|
|
||||||
)
|
|
||||||
})}
|
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -1471,16 +1430,10 @@ export function ToolCall({
|
|||||||
const liveToolCall = useCopilotStore((s) =>
|
const liveToolCall = useCopilotStore((s) =>
|
||||||
effectiveId ? s.toolCallsById[effectiveId] : undefined
|
effectiveId ? s.toolCallsById[effectiveId] : undefined
|
||||||
)
|
)
|
||||||
const rawToolCall = liveToolCall || toolCallProp
|
const toolCall = liveToolCall || toolCallProp
|
||||||
const hasRealToolCall = !!rawToolCall
|
|
||||||
const toolCall: CopilotToolCall =
|
// Guard: nothing to render without a toolCall
|
||||||
rawToolCall ||
|
if (!toolCall) return null
|
||||||
({
|
|
||||||
id: effectiveId || '',
|
|
||||||
name: '',
|
|
||||||
state: ClientToolCallState.generating,
|
|
||||||
params: {},
|
|
||||||
} as CopilotToolCall)
|
|
||||||
|
|
||||||
const isExpandablePending =
|
const isExpandablePending =
|
||||||
toolCall?.state === 'pending' &&
|
toolCall?.state === 'pending' &&
|
||||||
@@ -1488,15 +1441,17 @@ export function ToolCall({
|
|||||||
|
|
||||||
const [expanded, setExpanded] = useState(isExpandablePending)
|
const [expanded, setExpanded] = useState(isExpandablePending)
|
||||||
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
||||||
const [autoAllowRemovedForCall, setAutoAllowRemovedForCall] = useState(false)
|
|
||||||
|
|
||||||
// State for editable parameters
|
// State for editable parameters
|
||||||
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
||||||
const [editedParams, setEditedParams] = useState(params)
|
const [editedParams, setEditedParams] = useState(params)
|
||||||
const paramsRef = useRef(params)
|
const paramsRef = useRef(params)
|
||||||
|
|
||||||
const { setToolCallState } = useCopilotStore()
|
// Check if this integration tool is auto-allowed
|
||||||
const isAutoAllowed = toolCall.ui?.autoAllowed === true && !autoAllowRemovedForCall
|
const { removeAutoAllowedTool, setToolCallState } = useCopilotStore()
|
||||||
|
const isAutoAllowed = useCopilotStore(
|
||||||
|
(s) => isIntegrationTool(toolCall.name) && s.isToolAutoAllowed(toolCall.name)
|
||||||
|
)
|
||||||
|
|
||||||
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -1506,14 +1461,6 @@ export function ToolCall({
|
|||||||
}
|
}
|
||||||
}, [params])
|
}, [params])
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
setAutoAllowRemovedForCall(false)
|
|
||||||
setShowRemoveAutoAllow(false)
|
|
||||||
}, [toolCall.id])
|
|
||||||
|
|
||||||
// Guard: nothing to render without a toolCall
|
|
||||||
if (!hasRealToolCall) return null
|
|
||||||
|
|
||||||
// Skip rendering some internal tools
|
// Skip rendering some internal tools
|
||||||
if (
|
if (
|
||||||
toolCall.name === 'checkoff_todo' ||
|
toolCall.name === 'checkoff_todo' ||
|
||||||
@@ -1525,9 +1472,7 @@ export function ToolCall({
|
|||||||
return null
|
return null
|
||||||
|
|
||||||
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
||||||
const isSubagentTool =
|
const isSubagentTool = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
||||||
toolCall.execution?.target === 'go_subagent' ||
|
|
||||||
TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
|
||||||
|
|
||||||
// For ALL subagent tools, don't show anything until we have blocks with content
|
// For ALL subagent tools, don't show anything until we have blocks with content
|
||||||
if (isSubagentTool) {
|
if (isSubagentTool) {
|
||||||
@@ -1554,6 +1499,28 @@ export function ToolCall({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get current mode from store to determine if we should render integration tools
|
||||||
|
const mode = useCopilotStore.getState().mode
|
||||||
|
|
||||||
|
// Check if this is a completed/historical tool call (not pending/executing)
|
||||||
|
// Use string comparison to handle both enum values and string values from DB
|
||||||
|
const stateStr = String(toolCall.state)
|
||||||
|
const isCompletedToolCall =
|
||||||
|
stateStr === 'success' ||
|
||||||
|
stateStr === 'error' ||
|
||||||
|
stateStr === 'rejected' ||
|
||||||
|
stateStr === 'aborted'
|
||||||
|
|
||||||
|
// Allow rendering if:
|
||||||
|
// 1. Tool is in TOOL_DISPLAY_REGISTRY (client tools), OR
|
||||||
|
// 2. We're in build mode (integration tools are executed server-side), OR
|
||||||
|
// 3. Tool call is already completed (historical - should always render)
|
||||||
|
const isClientTool = !!TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
|
const isIntegrationToolInBuildMode = mode === 'build' && !isClientTool
|
||||||
|
|
||||||
|
if (!isClientTool && !isIntegrationToolInBuildMode && !isCompletedToolCall) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
||||||
// Check if tool has params table config (meaning it's expandable)
|
// Check if tool has params table config (meaning it's expandable)
|
||||||
const hasParamsTable = !!toolUIConfig?.paramsTable
|
const hasParamsTable = !!toolUIConfig?.paramsTable
|
||||||
@@ -1563,14 +1530,6 @@ export function ToolCall({
|
|||||||
toolCall.name === 'make_api_request' ||
|
toolCall.name === 'make_api_request' ||
|
||||||
toolCall.name === 'set_global_workflow_variables'
|
toolCall.name === 'set_global_workflow_variables'
|
||||||
|
|
||||||
const interruptActions =
|
|
||||||
(toolCall.ui?.actions && toolCall.ui.actions.length > 0
|
|
||||||
? toolCall.ui.actions
|
|
||||||
: [
|
|
||||||
{ id: 'allow_once', label: 'Allow', kind: 'accept' as const },
|
|
||||||
{ id: 'allow_always', label: 'Always Allow', kind: 'accept' as const, remember: true },
|
|
||||||
{ id: 'reject', label: 'Skip', kind: 'reject' as const },
|
|
||||||
]) as ToolUiAction[]
|
|
||||||
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
||||||
|
|
||||||
// Check UI config for secondary action - only show for current message tool calls
|
// Check UI config for secondary action - only show for current message tool calls
|
||||||
@@ -2028,12 +1987,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
await removeAutoAllowedTool(toolCall.name)
|
||||||
if (removed) {
|
setShowRemoveAutoAllow(false)
|
||||||
setAutoAllowRemovedForCall(true)
|
forceUpdate({})
|
||||||
setShowRemoveAutoAllow(false)
|
|
||||||
forceUpdate({})
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2047,7 +2003,6 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
actions={interruptActions}
|
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
@@ -2093,12 +2048,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
await removeAutoAllowedTool(toolCall.name)
|
||||||
if (removed) {
|
setShowRemoveAutoAllow(false)
|
||||||
setAutoAllowRemovedForCall(true)
|
forceUpdate({})
|
||||||
setShowRemoveAutoAllow(false)
|
|
||||||
forceUpdate({})
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2112,7 +2064,6 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
actions={interruptActions}
|
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
@@ -2136,7 +2087,7 @@ export function ToolCall({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const isEditWorkflow = isWorkflowEditSummaryTool(toolCall)
|
const isEditWorkflow = toolCall.name === 'edit_workflow'
|
||||||
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
||||||
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
||||||
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
||||||
@@ -2158,12 +2109,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
await removeAutoAllowedTool(toolCall.name)
|
||||||
if (removed) {
|
setShowRemoveAutoAllow(false)
|
||||||
setAutoAllowRemovedForCall(true)
|
forceUpdate({})
|
||||||
setShowRemoveAutoAllow(false)
|
|
||||||
forceUpdate({})
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2177,7 +2125,6 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
actions={interruptActions}
|
|
||||||
/>
|
/>
|
||||||
) : showMoveToBackground ? (
|
) : showMoveToBackground ? (
|
||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
@@ -2208,7 +2155,7 @@ export function ToolCall({
|
|||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
) : null}
|
) : null}
|
||||||
{/* Workflow edit summary - shows block changes after workflow_change(apply) */}
|
{/* Workflow edit summary - shows block changes after edit_workflow completes */}
|
||||||
<WorkflowEditSummary toolCall={toolCall} />
|
<WorkflowEditSummary toolCall={toolCall} />
|
||||||
|
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
|
|||||||
@@ -113,6 +113,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
clearPlanArtifact,
|
clearPlanArtifact,
|
||||||
savePlanArtifact,
|
savePlanArtifact,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
|
loadAutoAllowedTools,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
} = useCopilotStore()
|
} = useCopilotStore()
|
||||||
|
|
||||||
@@ -124,6 +125,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
setCopilotWorkflowId,
|
setCopilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
|
loadAutoAllowedTools,
|
||||||
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
})
|
})
|
||||||
@@ -151,8 +154,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
planTodos,
|
planTodos,
|
||||||
})
|
})
|
||||||
|
|
||||||
const renderedChatTitle = currentChat?.title || 'New Chat'
|
|
||||||
|
|
||||||
/** Gets markdown content for design document section (available in all modes once created) */
|
/** Gets markdown content for design document section (available in all modes once created) */
|
||||||
const designDocumentContent = useMemo(() => {
|
const designDocumentContent = useMemo(() => {
|
||||||
if (streamingPlanContent) {
|
if (streamingPlanContent) {
|
||||||
@@ -165,14 +166,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
return ''
|
return ''
|
||||||
}, [streamingPlanContent])
|
}, [streamingPlanContent])
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
logger.info('[TitleRender] Copilot header title changed', {
|
|
||||||
currentChatId: currentChat?.id || null,
|
|
||||||
currentChatTitle: currentChat?.title || null,
|
|
||||||
renderedTitle: renderedChatTitle,
|
|
||||||
})
|
|
||||||
}, [currentChat?.id, currentChat?.title, renderedChatTitle])
|
|
||||||
|
|
||||||
/** Focuses the copilot input */
|
/** Focuses the copilot input */
|
||||||
const focusInput = useCallback(() => {
|
const focusInput = useCallback(() => {
|
||||||
userInputRef.current?.focus()
|
userInputRef.current?.focus()
|
||||||
@@ -355,7 +348,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
{/* Header */}
|
{/* Header */}
|
||||||
<div className='mx-[-1px] flex flex-shrink-0 items-center justify-between gap-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] px-[12px] py-[6px]'>
|
<div className='mx-[-1px] flex flex-shrink-0 items-center justify-between gap-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] px-[12px] py-[6px]'>
|
||||||
<h2 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
<h2 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
||||||
{renderedChatTitle}
|
{currentChat?.title || 'New Chat'}
|
||||||
</h2>
|
</h2>
|
||||||
<div className='flex items-center gap-[8px]'>
|
<div className='flex items-center gap-[8px]'>
|
||||||
<Button variant='ghost' className='p-0' onClick={handleStartNewChat}>
|
<Button variant='ghost' className='p-0' onClick={handleStartNewChat}>
|
||||||
|
|||||||
@@ -12,6 +12,8 @@ interface UseCopilotInitializationProps {
|
|||||||
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
||||||
loadChats: (forceRefresh?: boolean) => Promise<void>
|
loadChats: (forceRefresh?: boolean) => Promise<void>
|
||||||
loadAvailableModels: () => Promise<void>
|
loadAvailableModels: () => Promise<void>
|
||||||
|
loadAutoAllowedTools: () => Promise<void>
|
||||||
|
currentChat: any
|
||||||
isSendingMessage: boolean
|
isSendingMessage: boolean
|
||||||
resumeActiveStream: () => Promise<boolean>
|
resumeActiveStream: () => Promise<boolean>
|
||||||
}
|
}
|
||||||
@@ -30,6 +32,8 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
setCopilotWorkflowId,
|
setCopilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
|
loadAutoAllowedTools,
|
||||||
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
} = props
|
} = props
|
||||||
@@ -116,6 +120,17 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
})
|
})
|
||||||
}, [isSendingMessage, resumeActiveStream])
|
}, [isSendingMessage, resumeActiveStream])
|
||||||
|
|
||||||
|
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
||||||
|
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||||
|
useEffect(() => {
|
||||||
|
if (!hasLoadedAutoAllowedToolsRef.current) {
|
||||||
|
hasLoadedAutoAllowedToolsRef.current = true
|
||||||
|
loadAutoAllowedTools().catch((err) => {
|
||||||
|
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}, [loadAutoAllowedTools])
|
||||||
|
|
||||||
/** Load available models once on mount */
|
/** Load available models once on mount */
|
||||||
const hasLoadedModelsRef = useRef(false)
|
const hasLoadedModelsRef = useRef(false)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -113,7 +113,7 @@ export function VersionDescriptionModal({
|
|||||||
className='min-h-[120px] resize-none'
|
className='min-h-[120px] resize-none'
|
||||||
value={description}
|
value={description}
|
||||||
onChange={(e) => setDescription(e.target.value)}
|
onChange={(e) => setDescription(e.target.value)}
|
||||||
maxLength={500}
|
maxLength={2000}
|
||||||
disabled={isGenerating}
|
disabled={isGenerating}
|
||||||
/>
|
/>
|
||||||
<div className='flex items-center justify-between'>
|
<div className='flex items-center justify-between'>
|
||||||
@@ -123,7 +123,7 @@ export function VersionDescriptionModal({
|
|||||||
</p>
|
</p>
|
||||||
)}
|
)}
|
||||||
{!updateMutation.error && !generateMutation.error && <div />}
|
{!updateMutation.error && !generateMutation.error && <div />}
|
||||||
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/500</p>
|
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/2000</p>
|
||||||
</div>
|
</div>
|
||||||
</ModalBody>
|
</ModalBody>
|
||||||
<ModalFooter>
|
<ModalFooter>
|
||||||
|
|||||||
@@ -57,6 +57,21 @@ export function useChangeDetection({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (block.triggerMode) {
|
||||||
|
const triggerConfigValue = blockSubValues?.triggerConfig
|
||||||
|
if (
|
||||||
|
triggerConfigValue &&
|
||||||
|
typeof triggerConfigValue === 'object' &&
|
||||||
|
!subBlocks.triggerConfig
|
||||||
|
) {
|
||||||
|
subBlocks.triggerConfig = {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: triggerConfigValue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
blocksWithSubBlocks[blockId] = {
|
blocksWithSubBlocks[blockId] = {
|
||||||
...block,
|
...block,
|
||||||
subBlocks,
|
subBlocks,
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
import { useCallback, useState } from 'react'
|
import { useCallback, useState } from 'react'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { runPreDeployChecks } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-predeploy-checks'
|
||||||
import { useNotificationStore } from '@/stores/notifications'
|
import { useNotificationStore } from '@/stores/notifications'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
|
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||||
|
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||||
|
|
||||||
const logger = createLogger('useDeployment')
|
const logger = createLogger('useDeployment')
|
||||||
|
|
||||||
@@ -35,6 +38,24 @@ export function useDeployment({
|
|||||||
return { success: true, shouldOpenModal: true }
|
return { success: true, shouldOpenModal: true }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { blocks, edges, loops, parallels } = useWorkflowStore.getState()
|
||||||
|
const liveBlocks = mergeSubblockState(blocks, workflowId)
|
||||||
|
const checkResult = runPreDeployChecks({
|
||||||
|
blocks: liveBlocks,
|
||||||
|
edges,
|
||||||
|
loops,
|
||||||
|
parallels,
|
||||||
|
workflowId,
|
||||||
|
})
|
||||||
|
if (!checkResult.passed) {
|
||||||
|
addNotification({
|
||||||
|
level: 'error',
|
||||||
|
message: checkResult.error || 'Pre-deploy validation failed',
|
||||||
|
workflowId,
|
||||||
|
})
|
||||||
|
return { success: false, shouldOpenModal: false }
|
||||||
|
}
|
||||||
|
|
||||||
setIsDeploying(true)
|
setIsDeploying(true)
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {
|
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { Button, Combobox } from '@/components/emcn/components'
|
|||||||
import {
|
import {
|
||||||
getCanonicalScopesForProvider,
|
getCanonicalScopesForProvider,
|
||||||
getProviderIdFromServiceId,
|
getProviderIdFromServiceId,
|
||||||
|
getServiceConfigByProviderId,
|
||||||
OAUTH_PROVIDERS,
|
OAUTH_PROVIDERS,
|
||||||
type OAuthProvider,
|
type OAuthProvider,
|
||||||
type OAuthService,
|
type OAuthService,
|
||||||
@@ -26,6 +27,11 @@ const getProviderIcon = (providerName: OAuthProvider) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getProviderName = (providerName: OAuthProvider) => {
|
const getProviderName = (providerName: OAuthProvider) => {
|
||||||
|
const serviceConfig = getServiceConfigByProviderId(providerName)
|
||||||
|
if (serviceConfig) {
|
||||||
|
return serviceConfig.name
|
||||||
|
}
|
||||||
|
|
||||||
const { baseProvider } = parseProvider(providerName)
|
const { baseProvider } = parseProvider(providerName)
|
||||||
const baseProviderConfig = OAUTH_PROVIDERS[baseProvider]
|
const baseProviderConfig = OAUTH_PROVIDERS[baseProvider]
|
||||||
|
|
||||||
@@ -54,7 +60,7 @@ export function ToolCredentialSelector({
|
|||||||
onChange,
|
onChange,
|
||||||
provider,
|
provider,
|
||||||
requiredScopes = [],
|
requiredScopes = [],
|
||||||
label = 'Select account',
|
label,
|
||||||
serviceId,
|
serviceId,
|
||||||
disabled = false,
|
disabled = false,
|
||||||
}: ToolCredentialSelectorProps) {
|
}: ToolCredentialSelectorProps) {
|
||||||
@@ -64,6 +70,7 @@ export function ToolCredentialSelector({
|
|||||||
const { activeWorkflowId } = useWorkflowRegistry()
|
const { activeWorkflowId } = useWorkflowRegistry()
|
||||||
|
|
||||||
const selectedId = value || ''
|
const selectedId = value || ''
|
||||||
|
const effectiveLabel = label || `Select ${getProviderName(provider)} account`
|
||||||
|
|
||||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||||
|
|
||||||
@@ -203,7 +210,7 @@ export function ToolCredentialSelector({
|
|||||||
selectedValue={selectedId}
|
selectedValue={selectedId}
|
||||||
onChange={handleComboboxChange}
|
onChange={handleComboboxChange}
|
||||||
onOpenChange={handleOpenChange}
|
onOpenChange={handleOpenChange}
|
||||||
placeholder={label}
|
placeholder={effectiveLabel}
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
editable={true}
|
editable={true}
|
||||||
filterOptions={!isForeign}
|
filterOptions={!isForeign}
|
||||||
@@ -0,0 +1,186 @@
|
|||||||
|
'use client'
|
||||||
|
|
||||||
|
import type React from 'react'
|
||||||
|
import { useRef, useState } from 'react'
|
||||||
|
import { ArrowLeftRight, ArrowUp } from 'lucide-react'
|
||||||
|
import { Button, Input, Label, Tooltip } from '@/components/emcn'
|
||||||
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
|
import type { WandControlHandlers } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Props for a generic parameter with label component
|
||||||
|
*/
|
||||||
|
export interface ParameterWithLabelProps {
|
||||||
|
paramId: string
|
||||||
|
title: string
|
||||||
|
isRequired: boolean
|
||||||
|
visibility: string
|
||||||
|
wandConfig?: {
|
||||||
|
enabled: boolean
|
||||||
|
prompt?: string
|
||||||
|
placeholder?: string
|
||||||
|
}
|
||||||
|
canonicalToggle?: {
|
||||||
|
mode: 'basic' | 'advanced'
|
||||||
|
disabled?: boolean
|
||||||
|
onToggle?: () => void
|
||||||
|
}
|
||||||
|
disabled: boolean
|
||||||
|
isPreview: boolean
|
||||||
|
children: (wandControlRef: React.MutableRefObject<WandControlHandlers | null>) => React.ReactNode
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generic wrapper component for parameters that manages wand state and renders label + input
|
||||||
|
*/
|
||||||
|
export function ParameterWithLabel({
|
||||||
|
paramId,
|
||||||
|
title,
|
||||||
|
isRequired,
|
||||||
|
visibility,
|
||||||
|
wandConfig,
|
||||||
|
canonicalToggle,
|
||||||
|
disabled,
|
||||||
|
isPreview,
|
||||||
|
children,
|
||||||
|
}: ParameterWithLabelProps) {
|
||||||
|
const [isSearchActive, setIsSearchActive] = useState(false)
|
||||||
|
const [searchQuery, setSearchQuery] = useState('')
|
||||||
|
const searchInputRef = useRef<HTMLInputElement>(null)
|
||||||
|
const wandControlRef = useRef<WandControlHandlers | null>(null)
|
||||||
|
|
||||||
|
const isWandEnabled = wandConfig?.enabled ?? false
|
||||||
|
const showWand = isWandEnabled && !isPreview && !disabled
|
||||||
|
|
||||||
|
const handleSearchClick = (): void => {
|
||||||
|
setIsSearchActive(true)
|
||||||
|
setTimeout(() => {
|
||||||
|
searchInputRef.current?.focus()
|
||||||
|
}, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleSearchBlur = (): void => {
|
||||||
|
if (!searchQuery.trim() && !wandControlRef.current?.isWandStreaming) {
|
||||||
|
setIsSearchActive(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleSearchChange = (value: string): void => {
|
||||||
|
setSearchQuery(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleSearchSubmit = (): void => {
|
||||||
|
if (searchQuery.trim() && wandControlRef.current) {
|
||||||
|
wandControlRef.current.onWandTrigger(searchQuery)
|
||||||
|
setSearchQuery('')
|
||||||
|
setIsSearchActive(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleSearchCancel = (): void => {
|
||||||
|
setSearchQuery('')
|
||||||
|
setIsSearchActive(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isStreaming = wandControlRef.current?.isWandStreaming ?? false
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div key={paramId} className='relative min-w-0 space-y-[6px]'>
|
||||||
|
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
|
||||||
|
<Label className='flex items-baseline gap-[6px] whitespace-nowrap font-medium text-[13px] text-[var(--text-primary)]'>
|
||||||
|
{title}
|
||||||
|
{isRequired && visibility === 'user-only' && <span className='ml-0.5'>*</span>}
|
||||||
|
</Label>
|
||||||
|
<div className='flex min-w-0 flex-1 items-center justify-end gap-[6px]'>
|
||||||
|
{showWand &&
|
||||||
|
(!isSearchActive ? (
|
||||||
|
<Button
|
||||||
|
variant='active'
|
||||||
|
className='-my-1 h-5 px-2 py-0 text-[11px]'
|
||||||
|
onClick={handleSearchClick}
|
||||||
|
>
|
||||||
|
Generate
|
||||||
|
</Button>
|
||||||
|
) : (
|
||||||
|
<div className='-my-1 flex min-w-[120px] max-w-[280px] flex-1 items-center gap-[4px]'>
|
||||||
|
<Input
|
||||||
|
ref={searchInputRef}
|
||||||
|
value={isStreaming ? 'Generating...' : searchQuery}
|
||||||
|
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
||||||
|
handleSearchChange(e.target.value)
|
||||||
|
}
|
||||||
|
onBlur={(e: React.FocusEvent<HTMLInputElement>) => {
|
||||||
|
const relatedTarget = e.relatedTarget as HTMLElement | null
|
||||||
|
if (relatedTarget?.closest('button')) return
|
||||||
|
handleSearchBlur()
|
||||||
|
}}
|
||||||
|
onKeyDown={(e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||||
|
if (e.key === 'Enter' && searchQuery.trim() && !isStreaming) {
|
||||||
|
handleSearchSubmit()
|
||||||
|
} else if (e.key === 'Escape') {
|
||||||
|
handleSearchCancel()
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
disabled={isStreaming}
|
||||||
|
className={cn(
|
||||||
|
'h-5 min-w-[80px] flex-1 text-[11px]',
|
||||||
|
isStreaming && 'text-muted-foreground'
|
||||||
|
)}
|
||||||
|
placeholder='Generate with AI...'
|
||||||
|
/>
|
||||||
|
<Button
|
||||||
|
variant='tertiary'
|
||||||
|
disabled={!searchQuery.trim() || isStreaming}
|
||||||
|
onMouseDown={(e: React.MouseEvent) => {
|
||||||
|
e.preventDefault()
|
||||||
|
e.stopPropagation()
|
||||||
|
}}
|
||||||
|
onClick={(e: React.MouseEvent) => {
|
||||||
|
e.stopPropagation()
|
||||||
|
handleSearchSubmit()
|
||||||
|
}}
|
||||||
|
className='h-[20px] w-[20px] flex-shrink-0 p-0'
|
||||||
|
>
|
||||||
|
<ArrowUp className='h-[12px] w-[12px]' />
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
{canonicalToggle && !isPreview && (
|
||||||
|
<Tooltip.Root>
|
||||||
|
<Tooltip.Trigger asChild>
|
||||||
|
<button
|
||||||
|
type='button'
|
||||||
|
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0 disabled:cursor-not-allowed disabled:opacity-50'
|
||||||
|
onClick={canonicalToggle.onToggle}
|
||||||
|
disabled={canonicalToggle.disabled || disabled}
|
||||||
|
aria-label={
|
||||||
|
canonicalToggle.mode === 'advanced'
|
||||||
|
? 'Switch to selector'
|
||||||
|
: 'Switch to manual ID'
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<ArrowLeftRight
|
||||||
|
className={cn(
|
||||||
|
'!h-[12px] !w-[12px]',
|
||||||
|
canonicalToggle.mode === 'advanced'
|
||||||
|
? 'text-[var(--text-primary)]'
|
||||||
|
: 'text-[var(--text-secondary)]'
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
</button>
|
||||||
|
</Tooltip.Trigger>
|
||||||
|
<Tooltip.Content side='top'>
|
||||||
|
<p>
|
||||||
|
{canonicalToggle.mode === 'advanced'
|
||||||
|
? 'Switch to selector'
|
||||||
|
: 'Switch to manual ID'}
|
||||||
|
</p>
|
||||||
|
</Tooltip.Content>
|
||||||
|
</Tooltip.Root>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className='relative w-full min-w-0'>{children(wandControlRef)}</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -0,0 +1,114 @@
|
|||||||
|
'use client'
|
||||||
|
|
||||||
|
import { useEffect, useRef } from 'react'
|
||||||
|
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||||
|
import { SubBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
|
||||||
|
import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
|
||||||
|
|
||||||
|
interface ToolSubBlockRendererProps {
|
||||||
|
blockId: string
|
||||||
|
subBlockId: string
|
||||||
|
toolIndex: number
|
||||||
|
subBlock: BlockSubBlockConfig
|
||||||
|
effectiveParamId: string
|
||||||
|
toolParams: Record<string, string> | undefined
|
||||||
|
onParamChange: (toolIndex: number, paramId: string, value: string) => void
|
||||||
|
disabled: boolean
|
||||||
|
canonicalToggle?: {
|
||||||
|
mode: 'basic' | 'advanced'
|
||||||
|
disabled?: boolean
|
||||||
|
onToggle?: () => void
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubBlock types whose store values are objects/arrays/non-strings.
|
||||||
|
* tool.params stores strings (via JSON.stringify), so when syncing
|
||||||
|
* back to the store we parse them to restore the native shape.
|
||||||
|
*/
|
||||||
|
const OBJECT_SUBBLOCK_TYPES = new Set(['file-upload', 'table', 'grouped-checkbox-list'])
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bridges the subblock store with StoredTool.params via a synthetic store key,
|
||||||
|
* then delegates all rendering to SubBlock for full parity.
|
||||||
|
*/
|
||||||
|
export function ToolSubBlockRenderer({
|
||||||
|
blockId,
|
||||||
|
subBlockId,
|
||||||
|
toolIndex,
|
||||||
|
subBlock,
|
||||||
|
effectiveParamId,
|
||||||
|
toolParams,
|
||||||
|
onParamChange,
|
||||||
|
disabled,
|
||||||
|
canonicalToggle,
|
||||||
|
}: ToolSubBlockRendererProps) {
|
||||||
|
const syntheticId = `${subBlockId}-tool-${toolIndex}-${effectiveParamId}`
|
||||||
|
const [storeValue, setStoreValue] = useSubBlockValue(blockId, syntheticId)
|
||||||
|
|
||||||
|
const toolParamValue = toolParams?.[effectiveParamId] ?? ''
|
||||||
|
const isObjectType = OBJECT_SUBBLOCK_TYPES.has(subBlock.type)
|
||||||
|
|
||||||
|
const lastPushedToStoreRef = useRef<string | null>(null)
|
||||||
|
const lastPushedToParamsRef = useRef<string | null>(null)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!toolParamValue && lastPushedToStoreRef.current === null) {
|
||||||
|
lastPushedToStoreRef.current = toolParamValue
|
||||||
|
lastPushedToParamsRef.current = toolParamValue
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (toolParamValue !== lastPushedToStoreRef.current) {
|
||||||
|
lastPushedToStoreRef.current = toolParamValue
|
||||||
|
lastPushedToParamsRef.current = toolParamValue
|
||||||
|
|
||||||
|
if (isObjectType && typeof toolParamValue === 'string' && toolParamValue) {
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(toolParamValue)
|
||||||
|
if (typeof parsed === 'object' && parsed !== null) {
|
||||||
|
setStoreValue(parsed)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Not valid JSON — fall through to set as string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
setStoreValue(toolParamValue)
|
||||||
|
}
|
||||||
|
}, [toolParamValue, setStoreValue, isObjectType])
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (storeValue == null && lastPushedToParamsRef.current === null) return
|
||||||
|
const stringValue =
|
||||||
|
storeValue == null
|
||||||
|
? ''
|
||||||
|
: typeof storeValue === 'string'
|
||||||
|
? storeValue
|
||||||
|
: JSON.stringify(storeValue)
|
||||||
|
if (stringValue !== lastPushedToParamsRef.current) {
|
||||||
|
lastPushedToParamsRef.current = stringValue
|
||||||
|
lastPushedToStoreRef.current = stringValue
|
||||||
|
onParamChange(toolIndex, effectiveParamId, stringValue)
|
||||||
|
}
|
||||||
|
}, [storeValue, toolIndex, effectiveParamId, onParamChange])
|
||||||
|
|
||||||
|
const visibility = subBlock.paramVisibility ?? 'user-or-llm'
|
||||||
|
const isOptionalForUser = visibility !== 'user-only'
|
||||||
|
|
||||||
|
const config = {
|
||||||
|
...subBlock,
|
||||||
|
id: syntheticId,
|
||||||
|
...(isOptionalForUser && { required: false }),
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SubBlock
|
||||||
|
blockId={blockId}
|
||||||
|
config={config}
|
||||||
|
isPreview={false}
|
||||||
|
disabled={disabled}
|
||||||
|
canonicalToggle={canonicalToggle}
|
||||||
|
dependencyContext={toolParams}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -2,37 +2,12 @@
|
|||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
import { describe, expect, it } from 'vitest'
|
import { describe, expect, it } from 'vitest'
|
||||||
|
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
|
||||||
interface StoredTool {
|
import {
|
||||||
type: string
|
isCustomToolAlreadySelected,
|
||||||
title?: string
|
isMcpToolAlreadySelected,
|
||||||
toolId?: string
|
isWorkflowAlreadySelected,
|
||||||
params?: Record<string, string>
|
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/utils'
|
||||||
customToolId?: string
|
|
||||||
schema?: any
|
|
||||||
code?: string
|
|
||||||
operation?: string
|
|
||||||
usageControl?: 'auto' | 'force' | 'none'
|
|
||||||
}
|
|
||||||
|
|
||||||
const isMcpToolAlreadySelected = (selectedTools: StoredTool[], mcpToolId: string): boolean => {
|
|
||||||
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
|
|
||||||
}
|
|
||||||
|
|
||||||
const isCustomToolAlreadySelected = (
|
|
||||||
selectedTools: StoredTool[],
|
|
||||||
customToolId: string
|
|
||||||
): boolean => {
|
|
||||||
return selectedTools.some(
|
|
||||||
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const isWorkflowAlreadySelected = (selectedTools: StoredTool[], workflowId: string): boolean => {
|
|
||||||
return selectedTools.some(
|
|
||||||
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('isMcpToolAlreadySelected', () => {
|
describe('isMcpToolAlreadySelected', () => {
|
||||||
describe('basic functionality', () => {
|
describe('basic functionality', () => {
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,31 @@
|
|||||||
|
/**
|
||||||
|
* Represents a tool selected and configured in the workflow
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* For custom tools (new format), we only store: type, customToolId, usageControl, isExpanded.
|
||||||
|
* Everything else (title, schema, code) is loaded dynamically from the database.
|
||||||
|
* Legacy custom tools with inline schema/code are still supported for backwards compatibility.
|
||||||
|
*/
|
||||||
|
export interface StoredTool {
|
||||||
|
/** Block type identifier */
|
||||||
|
type: string
|
||||||
|
/** Display title for the tool (optional for new custom tool format) */
|
||||||
|
title?: string
|
||||||
|
/** Direct tool ID for execution (optional for new custom tool format) */
|
||||||
|
toolId?: string
|
||||||
|
/** Parameter values configured by the user (optional for new custom tool format) */
|
||||||
|
params?: Record<string, string>
|
||||||
|
/** Whether the tool details are expanded in UI */
|
||||||
|
isExpanded?: boolean
|
||||||
|
/** Database ID for custom tools (new format - reference only) */
|
||||||
|
customToolId?: string
|
||||||
|
/** Tool schema for custom tools (legacy format - inline JSON schema) */
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
schema?: Record<string, any>
|
||||||
|
/** Implementation code for custom tools (legacy format - inline) */
|
||||||
|
code?: string
|
||||||
|
/** Selected operation for multi-operation tools */
|
||||||
|
operation?: string
|
||||||
|
/** Tool usage control mode for LLM */
|
||||||
|
usageControl?: 'auto' | 'force' | 'none'
|
||||||
|
}
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if an MCP tool is already selected.
|
||||||
|
*/
|
||||||
|
export function isMcpToolAlreadySelected(selectedTools: StoredTool[], mcpToolId: string): boolean {
|
||||||
|
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if a custom tool is already selected.
|
||||||
|
*/
|
||||||
|
export function isCustomToolAlreadySelected(
|
||||||
|
selectedTools: StoredTool[],
|
||||||
|
customToolId: string
|
||||||
|
): boolean {
|
||||||
|
return selectedTools.some(
|
||||||
|
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if a workflow is already selected.
|
||||||
|
*/
|
||||||
|
export function isWorkflowAlreadySelected(
|
||||||
|
selectedTools: StoredTool[],
|
||||||
|
workflowId: string
|
||||||
|
): boolean {
|
||||||
|
return selectedTools.some(
|
||||||
|
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -3,7 +3,6 @@ import { isEqual } from 'lodash'
|
|||||||
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
|
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
|
||||||
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
|
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
import type { FieldDiffStatus } from '@/lib/workflows/diff/types'
|
|
||||||
import {
|
import {
|
||||||
CheckboxList,
|
CheckboxList,
|
||||||
Code,
|
Code,
|
||||||
@@ -69,13 +68,15 @@ interface SubBlockProps {
|
|||||||
isPreview?: boolean
|
isPreview?: boolean
|
||||||
subBlockValues?: Record<string, any>
|
subBlockValues?: Record<string, any>
|
||||||
disabled?: boolean
|
disabled?: boolean
|
||||||
fieldDiffStatus?: FieldDiffStatus
|
|
||||||
allowExpandInPreview?: boolean
|
allowExpandInPreview?: boolean
|
||||||
canonicalToggle?: {
|
canonicalToggle?: {
|
||||||
mode: 'basic' | 'advanced'
|
mode: 'basic' | 'advanced'
|
||||||
disabled?: boolean
|
disabled?: boolean
|
||||||
onToggle?: () => void
|
onToggle?: () => void
|
||||||
}
|
}
|
||||||
|
labelSuffix?: React.ReactNode
|
||||||
|
/** Provides sibling values for dependency resolution in non-preview contexts (e.g. tool-input) */
|
||||||
|
dependencyContext?: Record<string, unknown>
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -162,16 +163,14 @@ const getPreviewValue = (
|
|||||||
/**
|
/**
|
||||||
* Renders the label with optional validation and description tooltips.
|
* Renders the label with optional validation and description tooltips.
|
||||||
*
|
*
|
||||||
* @remarks
|
|
||||||
* Handles JSON validation indicators for code blocks and required field markers.
|
|
||||||
* Includes inline AI generate button when wand is enabled.
|
|
||||||
*
|
|
||||||
* @param config - The sub-block configuration defining the label content
|
* @param config - The sub-block configuration defining the label content
|
||||||
* @param isValidJson - Whether the JSON content is valid (for code blocks)
|
* @param isValidJson - Whether the JSON content is valid (for code blocks)
|
||||||
* @param subBlockValues - Current values of all subblocks for evaluating conditional requirements
|
* @param subBlockValues - Current values of all subblocks for evaluating conditional requirements
|
||||||
* @param wandState - Optional state and handlers for the AI wand feature
|
* @param wandState - State and handlers for the inline AI generate feature
|
||||||
* @param canonicalToggle - Optional canonical toggle metadata and handlers
|
* @param canonicalToggle - Metadata and handlers for the basic/advanced mode toggle
|
||||||
* @param canonicalToggleIsDisabled - Whether the canonical toggle is disabled
|
* @param canonicalToggleIsDisabled - Whether the canonical toggle is disabled (includes dependsOn gating)
|
||||||
|
* @param copyState - State and handler for the copy-to-clipboard button
|
||||||
|
* @param labelSuffix - Additional content rendered after the label text
|
||||||
* @returns The label JSX element, or `null` for switch types or when no title is defined
|
* @returns The label JSX element, or `null` for switch types or when no title is defined
|
||||||
*/
|
*/
|
||||||
const renderLabel = (
|
const renderLabel = (
|
||||||
@@ -202,7 +201,8 @@ const renderLabel = (
|
|||||||
showCopyButton: boolean
|
showCopyButton: boolean
|
||||||
copied: boolean
|
copied: boolean
|
||||||
onCopy: () => void
|
onCopy: () => void
|
||||||
}
|
},
|
||||||
|
labelSuffix?: React.ReactNode
|
||||||
): JSX.Element | null => {
|
): JSX.Element | null => {
|
||||||
if (config.type === 'switch') return null
|
if (config.type === 'switch') return null
|
||||||
if (!config.title) return null
|
if (!config.title) return null
|
||||||
@@ -215,9 +215,10 @@ const renderLabel = (
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
|
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
|
||||||
<Label className='flex items-center gap-[6px] whitespace-nowrap'>
|
<Label className='flex items-baseline gap-[6px] whitespace-nowrap'>
|
||||||
{config.title}
|
{config.title}
|
||||||
{required && <span className='ml-0.5'>*</span>}
|
{required && <span className='ml-0.5'>*</span>}
|
||||||
|
{labelSuffix}
|
||||||
{config.type === 'code' &&
|
{config.type === 'code' &&
|
||||||
config.language === 'json' &&
|
config.language === 'json' &&
|
||||||
!isValidJson &&
|
!isValidJson &&
|
||||||
@@ -383,28 +384,25 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
|||||||
prevProps.isPreview === nextProps.isPreview &&
|
prevProps.isPreview === nextProps.isPreview &&
|
||||||
valueEqual &&
|
valueEqual &&
|
||||||
prevProps.disabled === nextProps.disabled &&
|
prevProps.disabled === nextProps.disabled &&
|
||||||
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
|
||||||
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
||||||
canonicalToggleEqual
|
canonicalToggleEqual &&
|
||||||
|
prevProps.labelSuffix === nextProps.labelSuffix &&
|
||||||
|
prevProps.dependencyContext === nextProps.dependencyContext
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Renders a single workflow sub-block input based on config.type.
|
* Renders a single workflow sub-block input based on config.type.
|
||||||
*
|
*
|
||||||
* @remarks
|
|
||||||
* Supports multiple input types including short-input, long-input, dropdown,
|
|
||||||
* combobox, slider, table, code, switch, tool-input, and many more.
|
|
||||||
* Handles preview mode, disabled states, and AI wand generation.
|
|
||||||
*
|
|
||||||
* @param blockId - The parent block identifier
|
* @param blockId - The parent block identifier
|
||||||
* @param config - Configuration defining the input type and properties
|
* @param config - Configuration defining the input type and properties
|
||||||
* @param isPreview - Whether to render in preview mode
|
* @param isPreview - Whether to render in preview mode
|
||||||
* @param subBlockValues - Current values of all subblocks
|
* @param subBlockValues - Current values of all subblocks
|
||||||
* @param disabled - Whether the input is disabled
|
* @param disabled - Whether the input is disabled
|
||||||
* @param fieldDiffStatus - Optional diff status for visual indicators
|
|
||||||
* @param allowExpandInPreview - Whether to allow expanding in preview mode
|
* @param allowExpandInPreview - Whether to allow expanding in preview mode
|
||||||
* @returns The rendered sub-block input component
|
* @param canonicalToggle - Metadata and handlers for the basic/advanced mode toggle
|
||||||
|
* @param labelSuffix - Additional content rendered after the label text
|
||||||
|
* @param dependencyContext - Sibling values for dependency resolution in non-preview contexts (e.g. tool-input)
|
||||||
*/
|
*/
|
||||||
function SubBlockComponent({
|
function SubBlockComponent({
|
||||||
blockId,
|
blockId,
|
||||||
@@ -412,9 +410,10 @@ function SubBlockComponent({
|
|||||||
isPreview = false,
|
isPreview = false,
|
||||||
subBlockValues,
|
subBlockValues,
|
||||||
disabled = false,
|
disabled = false,
|
||||||
fieldDiffStatus,
|
|
||||||
allowExpandInPreview,
|
allowExpandInPreview,
|
||||||
canonicalToggle,
|
canonicalToggle,
|
||||||
|
labelSuffix,
|
||||||
|
dependencyContext,
|
||||||
}: SubBlockProps): JSX.Element {
|
}: SubBlockProps): JSX.Element {
|
||||||
const [isValidJson, setIsValidJson] = useState(true)
|
const [isValidJson, setIsValidJson] = useState(true)
|
||||||
const [isSearchActive, setIsSearchActive] = useState(false)
|
const [isSearchActive, setIsSearchActive] = useState(false)
|
||||||
@@ -423,7 +422,6 @@ function SubBlockComponent({
|
|||||||
const searchInputRef = useRef<HTMLInputElement>(null)
|
const searchInputRef = useRef<HTMLInputElement>(null)
|
||||||
const wandControlRef = useRef<WandControlHandlers | null>(null)
|
const wandControlRef = useRef<WandControlHandlers | null>(null)
|
||||||
|
|
||||||
// Use webhook management hook when config has useWebhookUrl enabled
|
|
||||||
const webhookManagement = useWebhookManagement({
|
const webhookManagement = useWebhookManagement({
|
||||||
blockId,
|
blockId,
|
||||||
triggerId: undefined,
|
triggerId: undefined,
|
||||||
@@ -510,10 +508,12 @@ function SubBlockComponent({
|
|||||||
| null
|
| null
|
||||||
| undefined
|
| undefined
|
||||||
|
|
||||||
|
const contextValues = dependencyContext ?? (isPreview ? subBlockValues : undefined)
|
||||||
|
|
||||||
const { finalDisabled: gatedDisabled } = useDependsOnGate(blockId, config, {
|
const { finalDisabled: gatedDisabled } = useDependsOnGate(blockId, config, {
|
||||||
disabled,
|
disabled,
|
||||||
isPreview,
|
isPreview,
|
||||||
previewContextValues: isPreview ? subBlockValues : undefined,
|
previewContextValues: contextValues,
|
||||||
})
|
})
|
||||||
|
|
||||||
const isDisabled = gatedDisabled
|
const isDisabled = gatedDisabled
|
||||||
@@ -797,7 +797,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -809,7 +809,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -821,7 +821,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -833,7 +833,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -845,7 +845,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -868,7 +868,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue as any}
|
previewValue={previewValue as any}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -880,7 +880,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue as any}
|
previewValue={previewValue as any}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -892,7 +892,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue as any}
|
previewValue={previewValue as any}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -917,7 +917,7 @@ function SubBlockComponent({
|
|||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue as any}
|
previewValue={previewValue as any}
|
||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -953,7 +953,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -987,7 +987,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue as any}
|
previewValue={previewValue as any}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -999,7 +999,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
previewContextValues={contextValues}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1059,7 +1059,8 @@ function SubBlockComponent({
|
|||||||
showCopyButton: Boolean(config.showCopyButton && config.useWebhookUrl),
|
showCopyButton: Boolean(config.showCopyButton && config.useWebhookUrl),
|
||||||
copied,
|
copied,
|
||||||
onCopy: handleCopy,
|
onCopy: handleCopy,
|
||||||
}
|
},
|
||||||
|
labelSuffix
|
||||||
)}
|
)}
|
||||||
{renderInput()}
|
{renderInput()}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -571,7 +571,6 @@ export function Editor() {
|
|||||||
isPreview={false}
|
isPreview={false}
|
||||||
subBlockValues={subBlockState}
|
subBlockValues={subBlockState}
|
||||||
disabled={!canEditBlock}
|
disabled={!canEditBlock}
|
||||||
fieldDiffStatus={undefined}
|
|
||||||
allowExpandInPreview={false}
|
allowExpandInPreview={false}
|
||||||
canonicalToggle={
|
canonicalToggle={
|
||||||
isCanonicalSwap && canonicalMode && canonicalId
|
isCanonicalSwap && canonicalMode && canonicalId
|
||||||
@@ -635,7 +634,6 @@ export function Editor() {
|
|||||||
isPreview={false}
|
isPreview={false}
|
||||||
subBlockValues={subBlockState}
|
subBlockValues={subBlockState}
|
||||||
disabled={!canEditBlock}
|
disabled={!canEditBlock}
|
||||||
fieldDiffStatus={undefined}
|
|
||||||
allowExpandInPreview={false}
|
allowExpandInPreview={false}
|
||||||
/>
|
/>
|
||||||
{index < advancedOnlySubBlocks.length - 1 && (
|
{index < advancedOnlySubBlocks.length - 1 && (
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useCallback, useRef, useState } from 'react'
|
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { useQueryClient } from '@tanstack/react-query'
|
import { useQueryClient } from '@tanstack/react-query'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
@@ -46,7 +46,13 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
|||||||
|
|
||||||
const logger = createLogger('useWorkflowExecution')
|
const logger = createLogger('useWorkflowExecution')
|
||||||
|
|
||||||
// Debug state validation result
|
/**
|
||||||
|
* Module-level Set tracking which workflows have an active reconnection effect.
|
||||||
|
* Prevents multiple hook instances (from different components) from starting
|
||||||
|
* concurrent reconnection streams for the same workflow during the same mount cycle.
|
||||||
|
*/
|
||||||
|
const activeReconnections = new Set<string>()
|
||||||
|
|
||||||
interface DebugValidationResult {
|
interface DebugValidationResult {
|
||||||
isValid: boolean
|
isValid: boolean
|
||||||
error?: string
|
error?: string
|
||||||
@@ -54,7 +60,7 @@ interface DebugValidationResult {
|
|||||||
|
|
||||||
interface BlockEventHandlerConfig {
|
interface BlockEventHandlerConfig {
|
||||||
workflowId?: string
|
workflowId?: string
|
||||||
executionId?: string
|
executionIdRef: { current: string }
|
||||||
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
|
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
|
||||||
activeBlocksSet: Set<string>
|
activeBlocksSet: Set<string>
|
||||||
accumulatedBlockLogs: BlockLog[]
|
accumulatedBlockLogs: BlockLog[]
|
||||||
@@ -108,12 +114,15 @@ export function useWorkflowExecution() {
|
|||||||
const queryClient = useQueryClient()
|
const queryClient = useQueryClient()
|
||||||
const currentWorkflow = useCurrentWorkflow()
|
const currentWorkflow = useCurrentWorkflow()
|
||||||
const { activeWorkflowId, workflows } = useWorkflowRegistry()
|
const { activeWorkflowId, workflows } = useWorkflowRegistry()
|
||||||
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } =
|
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries, clearExecutionEntries } =
|
||||||
useTerminalConsoleStore()
|
useTerminalConsoleStore()
|
||||||
|
const hasHydrated = useTerminalConsoleStore((s) => s._hasHydrated)
|
||||||
const { getAllVariables } = useEnvironmentStore()
|
const { getAllVariables } = useEnvironmentStore()
|
||||||
const { getVariablesByWorkflowId, variables } = useVariablesStore()
|
const { getVariablesByWorkflowId, variables } = useVariablesStore()
|
||||||
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
|
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
|
||||||
useCurrentWorkflowExecution()
|
useCurrentWorkflowExecution()
|
||||||
|
const setCurrentExecutionId = useExecutionStore((s) => s.setCurrentExecutionId)
|
||||||
|
const getCurrentExecutionId = useExecutionStore((s) => s.getCurrentExecutionId)
|
||||||
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
|
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
|
||||||
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
|
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
|
||||||
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
|
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
|
||||||
@@ -297,7 +306,7 @@ export function useWorkflowExecution() {
|
|||||||
(config: BlockEventHandlerConfig) => {
|
(config: BlockEventHandlerConfig) => {
|
||||||
const {
|
const {
|
||||||
workflowId,
|
workflowId,
|
||||||
executionId,
|
executionIdRef,
|
||||||
workflowEdges,
|
workflowEdges,
|
||||||
activeBlocksSet,
|
activeBlocksSet,
|
||||||
accumulatedBlockLogs,
|
accumulatedBlockLogs,
|
||||||
@@ -308,6 +317,14 @@ export function useWorkflowExecution() {
|
|||||||
onBlockCompleteCallback,
|
onBlockCompleteCallback,
|
||||||
} = config
|
} = config
|
||||||
|
|
||||||
|
/** Returns true if this execution was cancelled or superseded by another run. */
|
||||||
|
const isStaleExecution = () =>
|
||||||
|
!!(
|
||||||
|
workflowId &&
|
||||||
|
executionIdRef.current &&
|
||||||
|
useExecutionStore.getState().getCurrentExecutionId(workflowId) !== executionIdRef.current
|
||||||
|
)
|
||||||
|
|
||||||
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
||||||
if (!workflowId) return
|
if (!workflowId) return
|
||||||
if (isActive) {
|
if (isActive) {
|
||||||
@@ -360,7 +377,7 @@ export function useWorkflowExecution() {
|
|||||||
endedAt: data.endedAt,
|
endedAt: data.endedAt,
|
||||||
workflowId,
|
workflowId,
|
||||||
blockId: data.blockId,
|
blockId: data.blockId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
blockName: data.blockName || 'Unknown Block',
|
blockName: data.blockName || 'Unknown Block',
|
||||||
blockType: data.blockType || 'unknown',
|
blockType: data.blockType || 'unknown',
|
||||||
iterationCurrent: data.iterationCurrent,
|
iterationCurrent: data.iterationCurrent,
|
||||||
@@ -383,7 +400,7 @@ export function useWorkflowExecution() {
|
|||||||
endedAt: data.endedAt,
|
endedAt: data.endedAt,
|
||||||
workflowId,
|
workflowId,
|
||||||
blockId: data.blockId,
|
blockId: data.blockId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
blockName: data.blockName || 'Unknown Block',
|
blockName: data.blockName || 'Unknown Block',
|
||||||
blockType: data.blockType || 'unknown',
|
blockType: data.blockType || 'unknown',
|
||||||
iterationCurrent: data.iterationCurrent,
|
iterationCurrent: data.iterationCurrent,
|
||||||
@@ -410,7 +427,7 @@ export function useWorkflowExecution() {
|
|||||||
iterationType: data.iterationType,
|
iterationType: data.iterationType,
|
||||||
iterationContainerId: data.iterationContainerId,
|
iterationContainerId: data.iterationContainerId,
|
||||||
},
|
},
|
||||||
executionId
|
executionIdRef.current
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -432,11 +449,12 @@ export function useWorkflowExecution() {
|
|||||||
iterationType: data.iterationType,
|
iterationType: data.iterationType,
|
||||||
iterationContainerId: data.iterationContainerId,
|
iterationContainerId: data.iterationContainerId,
|
||||||
},
|
},
|
||||||
executionId
|
executionIdRef.current
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const onBlockStarted = (data: BlockStartedData) => {
|
const onBlockStarted = (data: BlockStartedData) => {
|
||||||
|
if (isStaleExecution()) return
|
||||||
updateActiveBlocks(data.blockId, true)
|
updateActiveBlocks(data.blockId, true)
|
||||||
markIncomingEdges(data.blockId)
|
markIncomingEdges(data.blockId)
|
||||||
|
|
||||||
@@ -453,7 +471,7 @@ export function useWorkflowExecution() {
|
|||||||
endedAt: undefined,
|
endedAt: undefined,
|
||||||
workflowId,
|
workflowId,
|
||||||
blockId: data.blockId,
|
blockId: data.blockId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
blockName: data.blockName || 'Unknown Block',
|
blockName: data.blockName || 'Unknown Block',
|
||||||
blockType: data.blockType || 'unknown',
|
blockType: data.blockType || 'unknown',
|
||||||
isRunning: true,
|
isRunning: true,
|
||||||
@@ -465,6 +483,7 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const onBlockCompleted = (data: BlockCompletedData) => {
|
const onBlockCompleted = (data: BlockCompletedData) => {
|
||||||
|
if (isStaleExecution()) return
|
||||||
updateActiveBlocks(data.blockId, false)
|
updateActiveBlocks(data.blockId, false)
|
||||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
|
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
|
||||||
|
|
||||||
@@ -495,6 +514,7 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const onBlockError = (data: BlockErrorData) => {
|
const onBlockError = (data: BlockErrorData) => {
|
||||||
|
if (isStaleExecution()) return
|
||||||
updateActiveBlocks(data.blockId, false)
|
updateActiveBlocks(data.blockId, false)
|
||||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
|
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
|
||||||
|
|
||||||
@@ -902,10 +922,6 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
// Update block logs with actual stream completion times
|
// Update block logs with actual stream completion times
|
||||||
if (result.logs && streamCompletionTimes.size > 0) {
|
if (result.logs && streamCompletionTimes.size > 0) {
|
||||||
const streamCompletionEndTime = new Date(
|
|
||||||
Math.max(...Array.from(streamCompletionTimes.values()))
|
|
||||||
).toISOString()
|
|
||||||
|
|
||||||
result.logs.forEach((log: BlockLog) => {
|
result.logs.forEach((log: BlockLog) => {
|
||||||
if (streamCompletionTimes.has(log.blockId)) {
|
if (streamCompletionTimes.has(log.blockId)) {
|
||||||
const completionTime = streamCompletionTimes.get(log.blockId)!
|
const completionTime = streamCompletionTimes.get(log.blockId)!
|
||||||
@@ -987,7 +1003,6 @@ export function useWorkflowExecution() {
|
|||||||
return { success: true, stream }
|
return { success: true, stream }
|
||||||
}
|
}
|
||||||
|
|
||||||
// For manual (non-chat) execution
|
|
||||||
const manualExecutionId = uuidv4()
|
const manualExecutionId = uuidv4()
|
||||||
try {
|
try {
|
||||||
const result = await executeWorkflow(
|
const result = await executeWorkflow(
|
||||||
@@ -1002,29 +1017,10 @@ export function useWorkflowExecution() {
|
|||||||
if (result.metadata.pendingBlocks) {
|
if (result.metadata.pendingBlocks) {
|
||||||
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
|
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
|
||||||
}
|
}
|
||||||
} else if (result && 'success' in result) {
|
|
||||||
setExecutionResult(result)
|
|
||||||
// Reset execution state after successful non-debug execution
|
|
||||||
setIsExecuting(activeWorkflowId, false)
|
|
||||||
setIsDebugging(activeWorkflowId, false)
|
|
||||||
setActiveBlocks(activeWorkflowId, new Set())
|
|
||||||
|
|
||||||
if (isChatExecution) {
|
|
||||||
if (!result.metadata) {
|
|
||||||
result.metadata = { duration: 0, startTime: new Date().toISOString() }
|
|
||||||
}
|
|
||||||
;(result.metadata as any).source = 'chat'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Invalidate subscription queries to update usage
|
|
||||||
setTimeout(() => {
|
|
||||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
|
||||||
}, 1000)
|
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
|
const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
|
||||||
// Note: Error logs are already persisted server-side via execution-core.ts
|
|
||||||
return errorResult
|
return errorResult
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1275,7 +1271,7 @@ export function useWorkflowExecution() {
|
|||||||
if (activeWorkflowId) {
|
if (activeWorkflowId) {
|
||||||
logger.info('Using server-side executor')
|
logger.info('Using server-side executor')
|
||||||
|
|
||||||
const executionId = uuidv4()
|
const executionIdRef = { current: '' }
|
||||||
|
|
||||||
let executionResult: ExecutionResult = {
|
let executionResult: ExecutionResult = {
|
||||||
success: false,
|
success: false,
|
||||||
@@ -1293,7 +1289,7 @@ export function useWorkflowExecution() {
|
|||||||
try {
|
try {
|
||||||
const blockHandlers = buildBlockEventHandlers({
|
const blockHandlers = buildBlockEventHandlers({
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
executionId,
|
executionIdRef,
|
||||||
workflowEdges,
|
workflowEdges,
|
||||||
activeBlocksSet,
|
activeBlocksSet,
|
||||||
accumulatedBlockLogs,
|
accumulatedBlockLogs,
|
||||||
@@ -1326,6 +1322,10 @@ export function useWorkflowExecution() {
|
|||||||
loops: clientWorkflowState.loops,
|
loops: clientWorkflowState.loops,
|
||||||
parallels: clientWorkflowState.parallels,
|
parallels: clientWorkflowState.parallels,
|
||||||
},
|
},
|
||||||
|
onExecutionId: (id) => {
|
||||||
|
executionIdRef.current = id
|
||||||
|
setCurrentExecutionId(activeWorkflowId, id)
|
||||||
|
},
|
||||||
callbacks: {
|
callbacks: {
|
||||||
onExecutionStarted: (data) => {
|
onExecutionStarted: (data) => {
|
||||||
logger.info('Server execution started:', data)
|
logger.info('Server execution started:', data)
|
||||||
@@ -1368,6 +1368,18 @@ export function useWorkflowExecution() {
|
|||||||
},
|
},
|
||||||
|
|
||||||
onExecutionCompleted: (data) => {
|
onExecutionCompleted: (data) => {
|
||||||
|
if (
|
||||||
|
activeWorkflowId &&
|
||||||
|
executionIdRef.current &&
|
||||||
|
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||||
|
executionIdRef.current
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (activeWorkflowId) {
|
||||||
|
setCurrentExecutionId(activeWorkflowId, null)
|
||||||
|
}
|
||||||
|
|
||||||
executionResult = {
|
executionResult = {
|
||||||
success: data.success,
|
success: data.success,
|
||||||
output: data.output,
|
output: data.output,
|
||||||
@@ -1425,9 +1437,33 @@ export function useWorkflowExecution() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const workflowExecState = activeWorkflowId
|
||||||
|
? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId)
|
||||||
|
: null
|
||||||
|
if (activeWorkflowId && !workflowExecState?.isDebugging) {
|
||||||
|
setExecutionResult(executionResult)
|
||||||
|
setIsExecuting(activeWorkflowId, false)
|
||||||
|
setActiveBlocks(activeWorkflowId, new Set())
|
||||||
|
setTimeout(() => {
|
||||||
|
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||||
|
}, 1000)
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionError: (data) => {
|
onExecutionError: (data) => {
|
||||||
|
if (
|
||||||
|
activeWorkflowId &&
|
||||||
|
executionIdRef.current &&
|
||||||
|
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||||
|
executionIdRef.current
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (activeWorkflowId) {
|
||||||
|
setCurrentExecutionId(activeWorkflowId, null)
|
||||||
|
}
|
||||||
|
|
||||||
executionResult = {
|
executionResult = {
|
||||||
success: false,
|
success: false,
|
||||||
output: {},
|
output: {},
|
||||||
@@ -1441,43 +1477,53 @@ export function useWorkflowExecution() {
|
|||||||
const isPreExecutionError = accumulatedBlockLogs.length === 0
|
const isPreExecutionError = accumulatedBlockLogs.length === 0
|
||||||
handleExecutionErrorConsole({
|
handleExecutionErrorConsole({
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
error: data.error,
|
error: data.error,
|
||||||
durationMs: data.duration,
|
durationMs: data.duration,
|
||||||
blockLogs: accumulatedBlockLogs,
|
blockLogs: accumulatedBlockLogs,
|
||||||
isPreExecutionError,
|
isPreExecutionError,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (activeWorkflowId) {
|
||||||
|
setIsExecuting(activeWorkflowId, false)
|
||||||
|
setIsDebugging(activeWorkflowId, false)
|
||||||
|
setActiveBlocks(activeWorkflowId, new Set())
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionCancelled: (data) => {
|
onExecutionCancelled: (data) => {
|
||||||
|
if (
|
||||||
|
activeWorkflowId &&
|
||||||
|
executionIdRef.current &&
|
||||||
|
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||||
|
executionIdRef.current
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (activeWorkflowId) {
|
||||||
|
setCurrentExecutionId(activeWorkflowId, null)
|
||||||
|
}
|
||||||
|
|
||||||
handleExecutionCancelledConsole({
|
handleExecutionCancelledConsole({
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
durationMs: data?.duration,
|
durationMs: data?.duration,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (activeWorkflowId) {
|
||||||
|
setIsExecuting(activeWorkflowId, false)
|
||||||
|
setIsDebugging(activeWorkflowId, false)
|
||||||
|
setActiveBlocks(activeWorkflowId, new Set())
|
||||||
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
return executionResult
|
return executionResult
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
// Don't log abort errors - they're intentional user actions
|
|
||||||
if (error.name === 'AbortError' || error.message?.includes('aborted')) {
|
if (error.name === 'AbortError' || error.message?.includes('aborted')) {
|
||||||
logger.info('Execution aborted by user')
|
logger.info('Execution aborted by user')
|
||||||
|
return executionResult
|
||||||
// Reset execution state
|
|
||||||
if (activeWorkflowId) {
|
|
||||||
setIsExecuting(activeWorkflowId, false)
|
|
||||||
setActiveBlocks(activeWorkflowId, new Set())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return gracefully without error
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
output: {},
|
|
||||||
metadata: { duration: 0 },
|
|
||||||
logs: [],
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.error('Server-side execution failed:', error)
|
logger.error('Server-side execution failed:', error)
|
||||||
@@ -1485,7 +1531,6 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback: should never reach here
|
|
||||||
throw new Error('Server-side execution is required')
|
throw new Error('Server-side execution is required')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1717,25 +1762,28 @@ export function useWorkflowExecution() {
|
|||||||
* Handles cancelling the current workflow execution
|
* Handles cancelling the current workflow execution
|
||||||
*/
|
*/
|
||||||
const handleCancelExecution = useCallback(() => {
|
const handleCancelExecution = useCallback(() => {
|
||||||
|
if (!activeWorkflowId) return
|
||||||
logger.info('Workflow execution cancellation requested')
|
logger.info('Workflow execution cancellation requested')
|
||||||
|
|
||||||
// Cancel the execution stream for this workflow (server-side)
|
const storedExecutionId = getCurrentExecutionId(activeWorkflowId)
|
||||||
executionStream.cancel(activeWorkflowId ?? undefined)
|
|
||||||
|
|
||||||
// Mark current chat execution as superseded so its cleanup won't affect new executions
|
if (storedExecutionId) {
|
||||||
currentChatExecutionIdRef.current = null
|
setCurrentExecutionId(activeWorkflowId, null)
|
||||||
|
fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, {
|
||||||
// Mark all running entries as canceled in the terminal
|
method: 'POST',
|
||||||
if (activeWorkflowId) {
|
}).catch(() => {})
|
||||||
cancelRunningEntries(activeWorkflowId)
|
handleExecutionCancelledConsole({
|
||||||
|
workflowId: activeWorkflowId,
|
||||||
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
|
executionId: storedExecutionId,
|
||||||
setIsExecuting(activeWorkflowId, false)
|
})
|
||||||
setIsDebugging(activeWorkflowId, false)
|
|
||||||
setActiveBlocks(activeWorkflowId, new Set())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// If in debug mode, also reset debug state
|
executionStream.cancel(activeWorkflowId)
|
||||||
|
currentChatExecutionIdRef.current = null
|
||||||
|
setIsExecuting(activeWorkflowId, false)
|
||||||
|
setIsDebugging(activeWorkflowId, false)
|
||||||
|
setActiveBlocks(activeWorkflowId, new Set())
|
||||||
|
|
||||||
if (isDebugging) {
|
if (isDebugging) {
|
||||||
resetDebugState()
|
resetDebugState()
|
||||||
}
|
}
|
||||||
@@ -1747,7 +1795,9 @@ export function useWorkflowExecution() {
|
|||||||
setIsDebugging,
|
setIsDebugging,
|
||||||
setActiveBlocks,
|
setActiveBlocks,
|
||||||
activeWorkflowId,
|
activeWorkflowId,
|
||||||
cancelRunningEntries,
|
getCurrentExecutionId,
|
||||||
|
setCurrentExecutionId,
|
||||||
|
handleExecutionCancelledConsole,
|
||||||
])
|
])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1847,7 +1897,7 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
setIsExecuting(workflowId, true)
|
setIsExecuting(workflowId, true)
|
||||||
const executionId = uuidv4()
|
const executionIdRef = { current: '' }
|
||||||
const accumulatedBlockLogs: BlockLog[] = []
|
const accumulatedBlockLogs: BlockLog[] = []
|
||||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||||
const executedBlockIds = new Set<string>()
|
const executedBlockIds = new Set<string>()
|
||||||
@@ -1856,7 +1906,7 @@ export function useWorkflowExecution() {
|
|||||||
try {
|
try {
|
||||||
const blockHandlers = buildBlockEventHandlers({
|
const blockHandlers = buildBlockEventHandlers({
|
||||||
workflowId,
|
workflowId,
|
||||||
executionId,
|
executionIdRef,
|
||||||
workflowEdges,
|
workflowEdges,
|
||||||
activeBlocksSet,
|
activeBlocksSet,
|
||||||
accumulatedBlockLogs,
|
accumulatedBlockLogs,
|
||||||
@@ -1871,6 +1921,10 @@ export function useWorkflowExecution() {
|
|||||||
startBlockId: blockId,
|
startBlockId: blockId,
|
||||||
sourceSnapshot: effectiveSnapshot,
|
sourceSnapshot: effectiveSnapshot,
|
||||||
input: workflowInput,
|
input: workflowInput,
|
||||||
|
onExecutionId: (id) => {
|
||||||
|
executionIdRef.current = id
|
||||||
|
setCurrentExecutionId(workflowId, id)
|
||||||
|
},
|
||||||
callbacks: {
|
callbacks: {
|
||||||
onBlockStarted: blockHandlers.onBlockStarted,
|
onBlockStarted: blockHandlers.onBlockStarted,
|
||||||
onBlockCompleted: blockHandlers.onBlockCompleted,
|
onBlockCompleted: blockHandlers.onBlockCompleted,
|
||||||
@@ -1878,7 +1932,6 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
onExecutionCompleted: (data) => {
|
onExecutionCompleted: (data) => {
|
||||||
if (data.success) {
|
if (data.success) {
|
||||||
// Add the start block (trigger) to executed blocks
|
|
||||||
executedBlockIds.add(blockId)
|
executedBlockIds.add(blockId)
|
||||||
|
|
||||||
const mergedBlockStates: Record<string, BlockState> = {
|
const mergedBlockStates: Record<string, BlockState> = {
|
||||||
@@ -1902,6 +1955,10 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
setCurrentExecutionId(workflowId, null)
|
||||||
|
setIsExecuting(workflowId, false)
|
||||||
|
setActiveBlocks(workflowId, new Set())
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionError: (data) => {
|
onExecutionError: (data) => {
|
||||||
@@ -1921,19 +1978,27 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
handleExecutionErrorConsole({
|
handleExecutionErrorConsole({
|
||||||
workflowId,
|
workflowId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
error: data.error,
|
error: data.error,
|
||||||
durationMs: data.duration,
|
durationMs: data.duration,
|
||||||
blockLogs: accumulatedBlockLogs,
|
blockLogs: accumulatedBlockLogs,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
setCurrentExecutionId(workflowId, null)
|
||||||
|
setIsExecuting(workflowId, false)
|
||||||
|
setActiveBlocks(workflowId, new Set())
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionCancelled: (data) => {
|
onExecutionCancelled: (data) => {
|
||||||
handleExecutionCancelledConsole({
|
handleExecutionCancelledConsole({
|
||||||
workflowId,
|
workflowId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
durationMs: data?.duration,
|
durationMs: data?.duration,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
setCurrentExecutionId(workflowId, null)
|
||||||
|
setIsExecuting(workflowId, false)
|
||||||
|
setActiveBlocks(workflowId, new Set())
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@@ -1942,14 +2007,20 @@ export function useWorkflowExecution() {
|
|||||||
logger.error('Run-from-block failed:', error)
|
logger.error('Run-from-block failed:', error)
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
setIsExecuting(workflowId, false)
|
const currentId = getCurrentExecutionId(workflowId)
|
||||||
setActiveBlocks(workflowId, new Set())
|
if (currentId === null || currentId === executionIdRef.current) {
|
||||||
|
setCurrentExecutionId(workflowId, null)
|
||||||
|
setIsExecuting(workflowId, false)
|
||||||
|
setActiveBlocks(workflowId, new Set())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[
|
[
|
||||||
getLastExecutionSnapshot,
|
getLastExecutionSnapshot,
|
||||||
setLastExecutionSnapshot,
|
setLastExecutionSnapshot,
|
||||||
clearLastExecutionSnapshot,
|
clearLastExecutionSnapshot,
|
||||||
|
getCurrentExecutionId,
|
||||||
|
setCurrentExecutionId,
|
||||||
setIsExecuting,
|
setIsExecuting,
|
||||||
setActiveBlocks,
|
setActiveBlocks,
|
||||||
setBlockRunStatus,
|
setBlockRunStatus,
|
||||||
@@ -1979,29 +2050,213 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
const executionId = uuidv4()
|
const executionId = uuidv4()
|
||||||
try {
|
try {
|
||||||
const result = await executeWorkflow(
|
await executeWorkflow(undefined, undefined, executionId, undefined, 'manual', blockId)
|
||||||
undefined,
|
|
||||||
undefined,
|
|
||||||
executionId,
|
|
||||||
undefined,
|
|
||||||
'manual',
|
|
||||||
blockId
|
|
||||||
)
|
|
||||||
if (result && 'success' in result) {
|
|
||||||
setExecutionResult(result)
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorResult = handleExecutionError(error, { executionId })
|
const errorResult = handleExecutionError(error, { executionId })
|
||||||
return errorResult
|
return errorResult
|
||||||
} finally {
|
} finally {
|
||||||
|
setCurrentExecutionId(workflowId, null)
|
||||||
setIsExecuting(workflowId, false)
|
setIsExecuting(workflowId, false)
|
||||||
setIsDebugging(workflowId, false)
|
setIsDebugging(workflowId, false)
|
||||||
setActiveBlocks(workflowId, new Set())
|
setActiveBlocks(workflowId, new Set())
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
|
[
|
||||||
|
activeWorkflowId,
|
||||||
|
setCurrentExecutionId,
|
||||||
|
setExecutionResult,
|
||||||
|
setIsExecuting,
|
||||||
|
setIsDebugging,
|
||||||
|
setActiveBlocks,
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!activeWorkflowId || !hasHydrated) return
|
||||||
|
|
||||||
|
const entries = useTerminalConsoleStore.getState().entries
|
||||||
|
const runningEntries = entries.filter(
|
||||||
|
(e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId
|
||||||
|
)
|
||||||
|
if (runningEntries.length === 0) return
|
||||||
|
|
||||||
|
if (activeReconnections.has(activeWorkflowId)) return
|
||||||
|
activeReconnections.add(activeWorkflowId)
|
||||||
|
|
||||||
|
executionStream.cancel(activeWorkflowId)
|
||||||
|
|
||||||
|
const sorted = [...runningEntries].sort((a, b) => {
|
||||||
|
const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0
|
||||||
|
const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0
|
||||||
|
return bTime - aTime
|
||||||
|
})
|
||||||
|
const executionId = sorted[0].executionId!
|
||||||
|
|
||||||
|
const otherExecutionIds = new Set(
|
||||||
|
sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!)
|
||||||
|
)
|
||||||
|
if (otherExecutionIds.size > 0) {
|
||||||
|
cancelRunningEntries(activeWorkflowId)
|
||||||
|
}
|
||||||
|
|
||||||
|
setCurrentExecutionId(activeWorkflowId, executionId)
|
||||||
|
setIsExecuting(activeWorkflowId, true)
|
||||||
|
|
||||||
|
const workflowEdges = useWorkflowStore.getState().edges
|
||||||
|
const activeBlocksSet = new Set<string>()
|
||||||
|
const accumulatedBlockLogs: BlockLog[] = []
|
||||||
|
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||||
|
const executedBlockIds = new Set<string>()
|
||||||
|
|
||||||
|
const executionIdRef = { current: executionId }
|
||||||
|
|
||||||
|
const handlers = buildBlockEventHandlers({
|
||||||
|
workflowId: activeWorkflowId,
|
||||||
|
executionIdRef,
|
||||||
|
workflowEdges,
|
||||||
|
activeBlocksSet,
|
||||||
|
accumulatedBlockLogs,
|
||||||
|
accumulatedBlockStates,
|
||||||
|
executedBlockIds,
|
||||||
|
consoleMode: 'update',
|
||||||
|
includeStartConsoleEntry: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
const originalEntries = entries
|
||||||
|
.filter((e) => e.executionId === executionId)
|
||||||
|
.map((e) => ({ ...e }))
|
||||||
|
|
||||||
|
let cleared = false
|
||||||
|
let reconnectionComplete = false
|
||||||
|
let cleanupRan = false
|
||||||
|
const clearOnce = () => {
|
||||||
|
if (!cleared) {
|
||||||
|
cleared = true
|
||||||
|
clearExecutionEntries(executionId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const reconnectWorkflowId = activeWorkflowId
|
||||||
|
|
||||||
|
executionStream
|
||||||
|
.reconnect({
|
||||||
|
workflowId: reconnectWorkflowId,
|
||||||
|
executionId,
|
||||||
|
callbacks: {
|
||||||
|
onBlockStarted: (data) => {
|
||||||
|
clearOnce()
|
||||||
|
handlers.onBlockStarted(data)
|
||||||
|
},
|
||||||
|
onBlockCompleted: (data) => {
|
||||||
|
clearOnce()
|
||||||
|
handlers.onBlockCompleted(data)
|
||||||
|
},
|
||||||
|
onBlockError: (data) => {
|
||||||
|
clearOnce()
|
||||||
|
handlers.onBlockError(data)
|
||||||
|
},
|
||||||
|
onExecutionCompleted: () => {
|
||||||
|
const currentId = useExecutionStore
|
||||||
|
.getState()
|
||||||
|
.getCurrentExecutionId(reconnectWorkflowId)
|
||||||
|
if (currentId !== executionId) {
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
clearOnce()
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||||
|
setIsExecuting(reconnectWorkflowId, false)
|
||||||
|
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||||
|
},
|
||||||
|
onExecutionError: (data) => {
|
||||||
|
const currentId = useExecutionStore
|
||||||
|
.getState()
|
||||||
|
.getCurrentExecutionId(reconnectWorkflowId)
|
||||||
|
if (currentId !== executionId) {
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
clearOnce()
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||||
|
setIsExecuting(reconnectWorkflowId, false)
|
||||||
|
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||||
|
handleExecutionErrorConsole({
|
||||||
|
workflowId: reconnectWorkflowId,
|
||||||
|
executionId,
|
||||||
|
error: data.error,
|
||||||
|
blockLogs: accumulatedBlockLogs,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
onExecutionCancelled: () => {
|
||||||
|
const currentId = useExecutionStore
|
||||||
|
.getState()
|
||||||
|
.getCurrentExecutionId(reconnectWorkflowId)
|
||||||
|
if (currentId !== executionId) {
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
clearOnce()
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||||
|
setIsExecuting(reconnectWorkflowId, false)
|
||||||
|
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||||
|
handleExecutionCancelledConsole({
|
||||||
|
workflowId: reconnectWorkflowId,
|
||||||
|
executionId,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
logger.warn('Execution reconnection failed', { executionId, error })
|
||||||
|
})
|
||||||
|
.finally(() => {
|
||||||
|
if (reconnectionComplete || cleanupRan) return
|
||||||
|
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
|
||||||
|
if (currentId !== executionId) return
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
clearExecutionEntries(executionId)
|
||||||
|
for (const entry of originalEntries) {
|
||||||
|
addConsole({
|
||||||
|
workflowId: entry.workflowId,
|
||||||
|
blockId: entry.blockId,
|
||||||
|
blockName: entry.blockName,
|
||||||
|
blockType: entry.blockType,
|
||||||
|
executionId: entry.executionId,
|
||||||
|
executionOrder: entry.executionOrder,
|
||||||
|
isRunning: false,
|
||||||
|
warning: 'Execution result unavailable — check the logs page',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||||
|
setIsExecuting(reconnectWorkflowId, false)
|
||||||
|
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||||
|
})
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
cleanupRan = true
|
||||||
|
executionStream.cancel(reconnectWorkflowId)
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
|
||||||
|
if (cleared && !reconnectionComplete) {
|
||||||
|
clearExecutionEntries(executionId)
|
||||||
|
for (const entry of originalEntries) {
|
||||||
|
addConsole(entry)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [activeWorkflowId, hasHydrated])
|
||||||
|
|
||||||
return {
|
return {
|
||||||
isExecuting,
|
isExecuting,
|
||||||
isDebugging,
|
isDebugging,
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
export { CancelSubscription } from './cancel-subscription'
|
export { CancelSubscription } from './cancel-subscription'
|
||||||
export { CreditBalance } from './credit-balance'
|
export { CreditBalance } from './credit-balance'
|
||||||
export { PlanCard, type PlanCardProps, type PlanFeature } from './plan-card'
|
export { PlanCard, type PlanCardProps, type PlanFeature } from './plan-card'
|
||||||
|
export { ReferralCode } from './referral-code'
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
export { ReferralCode } from './referral-code'
|
||||||
@@ -0,0 +1,103 @@
|
|||||||
|
'use client'
|
||||||
|
|
||||||
|
import { useState } from 'react'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { Button, Input, Label } from '@/components/emcn'
|
||||||
|
|
||||||
|
const logger = createLogger('ReferralCode')
|
||||||
|
|
||||||
|
interface ReferralCodeProps {
|
||||||
|
onRedeemComplete?: () => void
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Inline referral/promo code entry field with redeem button.
|
||||||
|
* One-time use per account — shows success or "already redeemed" state.
|
||||||
|
*/
|
||||||
|
export function ReferralCode({ onRedeemComplete }: ReferralCodeProps) {
|
||||||
|
const [code, setCode] = useState('')
|
||||||
|
const [isRedeeming, setIsRedeeming] = useState(false)
|
||||||
|
const [error, setError] = useState<string | null>(null)
|
||||||
|
const [success, setSuccess] = useState<{ bonusAmount: number } | null>(null)
|
||||||
|
|
||||||
|
const handleRedeem = async () => {
|
||||||
|
const trimmed = code.trim()
|
||||||
|
if (!trimmed || isRedeeming) return
|
||||||
|
|
||||||
|
setIsRedeeming(true)
|
||||||
|
setError(null)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/referral-code/redeem', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ code: trimmed }),
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(data.error || 'Failed to redeem code')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.redeemed) {
|
||||||
|
setSuccess({ bonusAmount: data.bonusAmount })
|
||||||
|
setCode('')
|
||||||
|
onRedeemComplete?.()
|
||||||
|
} else {
|
||||||
|
setError(data.error || 'Code could not be redeemed')
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('Referral code redemption failed', { error: err })
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to redeem code')
|
||||||
|
} finally {
|
||||||
|
setIsRedeeming(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
return (
|
||||||
|
<div className='flex items-center justify-between'>
|
||||||
|
<Label>Referral Code</Label>
|
||||||
|
<span className='text-[12px] text-[var(--text-secondary)]'>
|
||||||
|
+${success.bonusAmount} credits applied
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className='flex flex-col'>
|
||||||
|
<div className='flex items-center justify-between gap-[12px]'>
|
||||||
|
<Label className='shrink-0'>Referral Code</Label>
|
||||||
|
<div className='flex items-center gap-[8px]'>
|
||||||
|
<Input
|
||||||
|
type='text'
|
||||||
|
value={code}
|
||||||
|
onChange={(e) => {
|
||||||
|
setCode(e.target.value)
|
||||||
|
setError(null)
|
||||||
|
}}
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
if (e.key === 'Enter') handleRedeem()
|
||||||
|
}}
|
||||||
|
placeholder='Enter code'
|
||||||
|
className='h-[32px] w-[140px] text-[12px]'
|
||||||
|
disabled={isRedeeming}
|
||||||
|
/>
|
||||||
|
<Button
|
||||||
|
variant='active'
|
||||||
|
className='h-[32px] shrink-0 rounded-[6px] text-[12px]'
|
||||||
|
onClick={handleRedeem}
|
||||||
|
disabled={isRedeeming || !code.trim()}
|
||||||
|
>
|
||||||
|
{isRedeeming ? 'Redeeming...' : 'Redeem'}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className='mt-[4px] min-h-[18px] text-right'>
|
||||||
|
{error && <span className='text-[11px] text-[var(--text-error)]'>{error}</span>}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -17,6 +17,7 @@ import {
|
|||||||
CancelSubscription,
|
CancelSubscription,
|
||||||
CreditBalance,
|
CreditBalance,
|
||||||
PlanCard,
|
PlanCard,
|
||||||
|
ReferralCode,
|
||||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components'
|
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components'
|
||||||
import {
|
import {
|
||||||
ENTERPRISE_PLAN_FEATURES,
|
ENTERPRISE_PLAN_FEATURES,
|
||||||
@@ -549,6 +550,10 @@ export function Subscription() {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{!subscription.isEnterprise && (
|
||||||
|
<ReferralCode onRedeemComplete={() => refetchSubscription()} />
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Next Billing Date - hidden from team members */}
|
{/* Next Billing Date - hidden from team members */}
|
||||||
{subscription.isPaid &&
|
{subscription.isPaid &&
|
||||||
subscriptionData?.data?.periodEnd &&
|
subscriptionData?.data?.periodEnd &&
|
||||||
|
|||||||
@@ -4,12 +4,14 @@ import { useEffect } from 'react'
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { useRouter } from 'next/navigation'
|
import { useRouter } from 'next/navigation'
|
||||||
import { useSession } from '@/lib/auth/auth-client'
|
import { useSession } from '@/lib/auth/auth-client'
|
||||||
|
import { useReferralAttribution } from '@/hooks/use-referral-attribution'
|
||||||
|
|
||||||
const logger = createLogger('WorkspacePage')
|
const logger = createLogger('WorkspacePage')
|
||||||
|
|
||||||
export default function WorkspacePage() {
|
export default function WorkspacePage() {
|
||||||
const router = useRouter()
|
const router = useRouter()
|
||||||
const { data: session, isPending } = useSession()
|
const { data: session, isPending } = useSession()
|
||||||
|
useReferralAttribution()
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const redirectToFirstWorkspace = async () => {
|
const redirectToFirstWorkspace = async () => {
|
||||||
|
|||||||
201
apps/sim/blocks/blocks/google_books.ts
Normal file
201
apps/sim/blocks/blocks/google_books.ts
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
import { GoogleBooksIcon } from '@/components/icons'
|
||||||
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
|
import { AuthMode } from '@/blocks/types'
|
||||||
|
|
||||||
|
export const GoogleBooksBlock: BlockConfig = {
|
||||||
|
type: 'google_books',
|
||||||
|
name: 'Google Books',
|
||||||
|
description: 'Search and retrieve book information',
|
||||||
|
authMode: AuthMode.ApiKey,
|
||||||
|
longDescription:
|
||||||
|
'Search for books using the Google Books API. Find volumes by title, author, ISBN, or keywords, and retrieve detailed information about specific books including descriptions, ratings, and publication details.',
|
||||||
|
docsLink: 'https://docs.sim.ai/tools/google_books',
|
||||||
|
category: 'tools',
|
||||||
|
bgColor: '#E0E0E0',
|
||||||
|
icon: GoogleBooksIcon,
|
||||||
|
|
||||||
|
subBlocks: [
|
||||||
|
{
|
||||||
|
id: 'operation',
|
||||||
|
title: 'Operation',
|
||||||
|
type: 'dropdown',
|
||||||
|
options: [
|
||||||
|
{ label: 'Search Volumes', id: 'volume_search' },
|
||||||
|
{ label: 'Get Volume Details', id: 'volume_details' },
|
||||||
|
],
|
||||||
|
value: () => 'volume_search',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'apiKey',
|
||||||
|
title: 'API Key',
|
||||||
|
type: 'short-input',
|
||||||
|
password: true,
|
||||||
|
placeholder: 'Enter your Google Books API key',
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'query',
|
||||||
|
title: 'Search Query',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'e.g., intitle:harry potter inauthor:rowling',
|
||||||
|
condition: { field: 'operation', value: 'volume_search' },
|
||||||
|
required: { field: 'operation', value: 'volume_search' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'filter',
|
||||||
|
title: 'Filter',
|
||||||
|
type: 'dropdown',
|
||||||
|
options: [
|
||||||
|
{ label: 'None', id: '' },
|
||||||
|
{ label: 'Partial Preview', id: 'partial' },
|
||||||
|
{ label: 'Full Preview', id: 'full' },
|
||||||
|
{ label: 'Free eBooks', id: 'free-ebooks' },
|
||||||
|
{ label: 'Paid eBooks', id: 'paid-ebooks' },
|
||||||
|
{ label: 'All eBooks', id: 'ebooks' },
|
||||||
|
],
|
||||||
|
condition: { field: 'operation', value: 'volume_search' },
|
||||||
|
mode: 'advanced',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'printType',
|
||||||
|
title: 'Print Type',
|
||||||
|
type: 'dropdown',
|
||||||
|
options: [
|
||||||
|
{ label: 'All', id: 'all' },
|
||||||
|
{ label: 'Books', id: 'books' },
|
||||||
|
{ label: 'Magazines', id: 'magazines' },
|
||||||
|
],
|
||||||
|
value: () => 'all',
|
||||||
|
condition: { field: 'operation', value: 'volume_search' },
|
||||||
|
mode: 'advanced',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'orderBy',
|
||||||
|
title: 'Order By',
|
||||||
|
type: 'dropdown',
|
||||||
|
options: [
|
||||||
|
{ label: 'Relevance', id: 'relevance' },
|
||||||
|
{ label: 'Newest', id: 'newest' },
|
||||||
|
],
|
||||||
|
value: () => 'relevance',
|
||||||
|
condition: { field: 'operation', value: 'volume_search' },
|
||||||
|
mode: 'advanced',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'maxResults',
|
||||||
|
title: 'Max Results',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Number of results (1-40)',
|
||||||
|
condition: { field: 'operation', value: 'volume_search' },
|
||||||
|
mode: 'advanced',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'startIndex',
|
||||||
|
title: 'Start Index',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Starting index for pagination',
|
||||||
|
condition: { field: 'operation', value: 'volume_search' },
|
||||||
|
mode: 'advanced',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'langRestrict',
|
||||||
|
title: 'Language',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'ISO 639-1 code (e.g., en, es, fr)',
|
||||||
|
condition: { field: 'operation', value: 'volume_search' },
|
||||||
|
mode: 'advanced',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'volumeId',
|
||||||
|
title: 'Volume ID',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Google Books volume ID',
|
||||||
|
condition: { field: 'operation', value: 'volume_details' },
|
||||||
|
required: { field: 'operation', value: 'volume_details' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'projection',
|
||||||
|
title: 'Projection',
|
||||||
|
type: 'dropdown',
|
||||||
|
options: [
|
||||||
|
{ label: 'Full', id: 'full' },
|
||||||
|
{ label: 'Lite', id: 'lite' },
|
||||||
|
],
|
||||||
|
value: () => 'full',
|
||||||
|
condition: { field: 'operation', value: 'volume_details' },
|
||||||
|
mode: 'advanced',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
|
||||||
|
tools: {
|
||||||
|
access: ['google_books_volume_search', 'google_books_volume_details'],
|
||||||
|
config: {
|
||||||
|
tool: (params) => `google_books_${params.operation}`,
|
||||||
|
params: (params) => {
|
||||||
|
const { operation, ...rest } = params
|
||||||
|
|
||||||
|
let maxResults: number | undefined
|
||||||
|
if (params.maxResults) {
|
||||||
|
maxResults = Number.parseInt(params.maxResults, 10)
|
||||||
|
if (Number.isNaN(maxResults)) {
|
||||||
|
maxResults = undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let startIndex: number | undefined
|
||||||
|
if (params.startIndex) {
|
||||||
|
startIndex = Number.parseInt(params.startIndex, 10)
|
||||||
|
if (Number.isNaN(startIndex)) {
|
||||||
|
startIndex = undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...rest,
|
||||||
|
maxResults,
|
||||||
|
startIndex,
|
||||||
|
filter: params.filter || undefined,
|
||||||
|
printType: params.printType || undefined,
|
||||||
|
orderBy: params.orderBy || undefined,
|
||||||
|
projection: params.projection || undefined,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
inputs: {
|
||||||
|
operation: { type: 'string', description: 'Operation to perform' },
|
||||||
|
apiKey: { type: 'string', description: 'Google Books API key' },
|
||||||
|
query: { type: 'string', description: 'Search query' },
|
||||||
|
filter: { type: 'string', description: 'Filter by availability' },
|
||||||
|
printType: { type: 'string', description: 'Print type filter' },
|
||||||
|
orderBy: { type: 'string', description: 'Sort order' },
|
||||||
|
maxResults: { type: 'string', description: 'Maximum number of results' },
|
||||||
|
startIndex: { type: 'string', description: 'Starting index for pagination' },
|
||||||
|
langRestrict: { type: 'string', description: 'Language restriction' },
|
||||||
|
volumeId: { type: 'string', description: 'Volume ID for details' },
|
||||||
|
projection: { type: 'string', description: 'Projection level' },
|
||||||
|
},
|
||||||
|
|
||||||
|
outputs: {
|
||||||
|
totalItems: { type: 'number', description: 'Total number of matching results' },
|
||||||
|
volumes: { type: 'json', description: 'List of matching volumes' },
|
||||||
|
id: { type: 'string', description: 'Volume ID' },
|
||||||
|
title: { type: 'string', description: 'Book title' },
|
||||||
|
subtitle: { type: 'string', description: 'Book subtitle' },
|
||||||
|
authors: { type: 'json', description: 'List of authors' },
|
||||||
|
publisher: { type: 'string', description: 'Publisher name' },
|
||||||
|
publishedDate: { type: 'string', description: 'Publication date' },
|
||||||
|
description: { type: 'string', description: 'Book description' },
|
||||||
|
pageCount: { type: 'number', description: 'Number of pages' },
|
||||||
|
categories: { type: 'json', description: 'Book categories' },
|
||||||
|
averageRating: { type: 'number', description: 'Average rating (1-5)' },
|
||||||
|
ratingsCount: { type: 'number', description: 'Number of ratings' },
|
||||||
|
language: { type: 'string', description: 'Language code' },
|
||||||
|
previewLink: { type: 'string', description: 'Link to preview on Google Books' },
|
||||||
|
infoLink: { type: 'string', description: 'Link to info page' },
|
||||||
|
thumbnailUrl: { type: 'string', description: 'Book cover thumbnail URL' },
|
||||||
|
isbn10: { type: 'string', description: 'ISBN-10 identifier' },
|
||||||
|
isbn13: { type: 'string', description: 'ISBN-13 identifier' },
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -58,6 +58,16 @@ export const S3Block: BlockConfig<S3Response> = {
|
|||||||
},
|
},
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: 'getObjectRegion',
|
||||||
|
title: 'AWS Region',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Used when S3 URL does not include region',
|
||||||
|
condition: {
|
||||||
|
field: 'operation',
|
||||||
|
value: ['get_object'],
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
id: 'bucketName',
|
id: 'bucketName',
|
||||||
title: 'Bucket Name',
|
title: 'Bucket Name',
|
||||||
@@ -291,34 +301,11 @@ export const S3Block: BlockConfig<S3Response> = {
|
|||||||
if (!params.s3Uri) {
|
if (!params.s3Uri) {
|
||||||
throw new Error('S3 Object URL is required')
|
throw new Error('S3 Object URL is required')
|
||||||
}
|
}
|
||||||
|
return {
|
||||||
// Parse S3 URI for get_object
|
accessKeyId: params.accessKeyId,
|
||||||
try {
|
secretAccessKey: params.secretAccessKey,
|
||||||
const url = new URL(params.s3Uri)
|
region: params.getObjectRegion || params.region,
|
||||||
const hostname = url.hostname
|
s3Uri: params.s3Uri,
|
||||||
const bucketName = hostname.split('.')[0]
|
|
||||||
const regionMatch = hostname.match(/s3[.-]([^.]+)\.amazonaws\.com/)
|
|
||||||
const region = regionMatch ? regionMatch[1] : params.region
|
|
||||||
const objectKey = url.pathname.startsWith('/')
|
|
||||||
? url.pathname.substring(1)
|
|
||||||
: url.pathname
|
|
||||||
|
|
||||||
if (!bucketName || !objectKey) {
|
|
||||||
throw new Error('Could not parse S3 URL')
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
accessKeyId: params.accessKeyId,
|
|
||||||
secretAccessKey: params.secretAccessKey,
|
|
||||||
region,
|
|
||||||
bucketName,
|
|
||||||
objectKey,
|
|
||||||
s3Uri: params.s3Uri,
|
|
||||||
}
|
|
||||||
} catch (_error) {
|
|
||||||
throw new Error(
|
|
||||||
'Invalid S3 Object URL format. Expected: https://bucket-name.s3.region.amazonaws.com/path/to/file'
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -401,6 +388,7 @@ export const S3Block: BlockConfig<S3Response> = {
|
|||||||
acl: { type: 'string', description: 'Access control list' },
|
acl: { type: 'string', description: 'Access control list' },
|
||||||
// Download inputs
|
// Download inputs
|
||||||
s3Uri: { type: 'string', description: 'S3 object URL' },
|
s3Uri: { type: 'string', description: 'S3 object URL' },
|
||||||
|
getObjectRegion: { type: 'string', description: 'Optional AWS region override for downloads' },
|
||||||
// List inputs
|
// List inputs
|
||||||
prefix: { type: 'string', description: 'Prefix filter' },
|
prefix: { type: 'string', description: 'Prefix filter' },
|
||||||
maxKeys: { type: 'number', description: 'Maximum results' },
|
maxKeys: { type: 'number', description: 'Maximum results' },
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ import { GitHubBlock, GitHubV2Block } from '@/blocks/blocks/github'
|
|||||||
import { GitLabBlock } from '@/blocks/blocks/gitlab'
|
import { GitLabBlock } from '@/blocks/blocks/gitlab'
|
||||||
import { GmailBlock, GmailV2Block } from '@/blocks/blocks/gmail'
|
import { GmailBlock, GmailV2Block } from '@/blocks/blocks/gmail'
|
||||||
import { GoogleSearchBlock } from '@/blocks/blocks/google'
|
import { GoogleSearchBlock } from '@/blocks/blocks/google'
|
||||||
|
import { GoogleBooksBlock } from '@/blocks/blocks/google_books'
|
||||||
import { GoogleCalendarBlock, GoogleCalendarV2Block } from '@/blocks/blocks/google_calendar'
|
import { GoogleCalendarBlock, GoogleCalendarV2Block } from '@/blocks/blocks/google_calendar'
|
||||||
import { GoogleDocsBlock } from '@/blocks/blocks/google_docs'
|
import { GoogleDocsBlock } from '@/blocks/blocks/google_docs'
|
||||||
import { GoogleDriveBlock } from '@/blocks/blocks/google_drive'
|
import { GoogleDriveBlock } from '@/blocks/blocks/google_drive'
|
||||||
@@ -214,6 +215,7 @@ export const registry: Record<string, BlockConfig> = {
|
|||||||
gmail_v2: GmailV2Block,
|
gmail_v2: GmailV2Block,
|
||||||
google_calendar: GoogleCalendarBlock,
|
google_calendar: GoogleCalendarBlock,
|
||||||
google_calendar_v2: GoogleCalendarV2Block,
|
google_calendar_v2: GoogleCalendarV2Block,
|
||||||
|
google_books: GoogleBooksBlock,
|
||||||
google_docs: GoogleDocsBlock,
|
google_docs: GoogleDocsBlock,
|
||||||
google_drive: GoogleDriveBlock,
|
google_drive: GoogleDriveBlock,
|
||||||
google_forms: GoogleFormsBlock,
|
google_forms: GoogleFormsBlock,
|
||||||
|
|||||||
@@ -196,6 +196,8 @@ export interface SubBlockConfig {
|
|||||||
type: SubBlockType
|
type: SubBlockType
|
||||||
mode?: 'basic' | 'advanced' | 'both' | 'trigger' // Default is 'both' if not specified. 'trigger' means only shown in trigger mode
|
mode?: 'basic' | 'advanced' | 'both' | 'trigger' // Default is 'both' if not specified. 'trigger' means only shown in trigger mode
|
||||||
canonicalParamId?: string
|
canonicalParamId?: string
|
||||||
|
/** Controls parameter visibility in agent/tool-input context */
|
||||||
|
paramVisibility?: 'user-or-llm' | 'user-only' | 'llm-only' | 'hidden'
|
||||||
required?:
|
required?:
|
||||||
| boolean
|
| boolean
|
||||||
| {
|
| {
|
||||||
|
|||||||
@@ -1157,6 +1157,21 @@ export function AirweaveIcon(props: SVGProps<SVGSVGElement>) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function GoogleBooksIcon(props: SVGProps<SVGSVGElement>) {
|
||||||
|
return (
|
||||||
|
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 478.633 540.068'>
|
||||||
|
<path
|
||||||
|
fill='#1C51A4'
|
||||||
|
d='M449.059,218.231L245.519,99.538l-0.061,193.23c0.031,1.504-0.368,2.977-1.166,4.204c-0.798,1.258-1.565,1.995-2.915,2.547c-1.35,0.552-2.792,0.706-4.204,0.399c-1.412-0.307-2.7-1.043-3.713-2.117l-69.166-70.609l-69.381,70.179c-1.013,0.982-2.301,1.657-3.652,1.903c-1.381,0.246-2.792,0.092-4.081-0.491c-1.289-0.583-1.626-0.522-2.394-1.749c-0.767-1.197-1.197-2.608-1.197-4.081L85.031,6.007l-2.915-1.289C43.973-11.638,0,16.409,0,59.891v420.306c0,46.029,49.312,74.782,88.775,51.767l360.285-210.138C488.491,298.782,488.491,241.246,449.059,218.231z'
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill='#80D7FB'
|
||||||
|
d='M88.805,8.124c-2.179-1.289-4.419-2.363-6.659-3.345l0.123,288.663c0,1.442,0.43,2.854,1.197,4.081c0.767,1.197,1.872,2.148,3.161,2.731c1.289,0.583,2.7,0.736,4.081,0.491c1.381-0.246,2.639-0.921,3.652-1.903l69.749-69.688l69.811,69.749c1.013,1.074,2.301,1.81,3.713,2.117c1.412,0.307,2.884,0.153,4.204-0.399c1.319-0.552,2.455-1.565,3.253-2.792c0.798-1.258,1.197-2.731,1.166-4.204V99.998L88.805,8.124z'
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
export function GoogleDocsIcon(props: SVGProps<SVGSVGElement>) {
|
export function GoogleDocsIcon(props: SVGProps<SVGSVGElement>) {
|
||||||
return (
|
return (
|
||||||
<svg
|
<svg
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import { setupGlobalFetchMock } from '@sim/testing'
|
||||||
import { afterEach, beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
||||||
import { getAllBlocks } from '@/blocks'
|
import { getAllBlocks } from '@/blocks'
|
||||||
import { BlockType, isMcpTool } from '@/executor/constants'
|
import { BlockType, isMcpTool } from '@/executor/constants'
|
||||||
@@ -61,6 +62,30 @@ vi.mock('@/providers', () => ({
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
vi.mock('@/executor/utils/http', () => ({
|
||||||
|
buildAuthHeaders: vi.fn().mockResolvedValue({ 'Content-Type': 'application/json' }),
|
||||||
|
buildAPIUrl: vi.fn((path: string, params?: Record<string, string>) => {
|
||||||
|
const url = new URL(path, 'http://localhost:3000')
|
||||||
|
if (params) {
|
||||||
|
for (const [key, value] of Object.entries(params)) {
|
||||||
|
if (value !== undefined && value !== null) {
|
||||||
|
url.searchParams.set(key, value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return url
|
||||||
|
}),
|
||||||
|
extractAPIErrorMessage: vi.fn(async (response: Response) => {
|
||||||
|
const defaultMessage = `API request failed with status ${response.status}`
|
||||||
|
try {
|
||||||
|
const errorData = await response.json()
|
||||||
|
return errorData.error || defaultMessage
|
||||||
|
} catch {
|
||||||
|
return defaultMessage
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
vi.mock('@sim/db', () => ({
|
vi.mock('@sim/db', () => ({
|
||||||
db: {
|
db: {
|
||||||
select: vi.fn().mockReturnValue({
|
select: vi.fn().mockReturnValue({
|
||||||
@@ -84,7 +109,7 @@ vi.mock('@sim/db/schema', () => ({
|
|||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
global.fetch = Object.assign(vi.fn(), { preconnect: vi.fn() }) as typeof fetch
|
setupGlobalFetchMock()
|
||||||
|
|
||||||
const mockGetAllBlocks = getAllBlocks as Mock
|
const mockGetAllBlocks = getAllBlocks as Mock
|
||||||
const mockExecuteTool = executeTool as Mock
|
const mockExecuteTool = executeTool as Mock
|
||||||
@@ -1901,5 +1926,301 @@ describe('AgentBlockHandler', () => {
|
|||||||
|
|
||||||
expect(discoveryCalls[0].url).toContain('serverId=mcp-legacy-server')
|
expect(discoveryCalls[0].url).toContain('serverId=mcp-legacy-server')
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('customToolId resolution - DB as source of truth', () => {
|
||||||
|
const staleInlineSchema = {
|
||||||
|
function: {
|
||||||
|
name: 'formatReport',
|
||||||
|
description: 'Formats a report',
|
||||||
|
parameters: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
title: { type: 'string', description: 'Report title' },
|
||||||
|
content: { type: 'string', description: 'Report content' },
|
||||||
|
},
|
||||||
|
required: ['title', 'content'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const dbSchema = {
|
||||||
|
function: {
|
||||||
|
name: 'formatReport',
|
||||||
|
description: 'Formats a report',
|
||||||
|
parameters: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
title: { type: 'string', description: 'Report title' },
|
||||||
|
content: { type: 'string', description: 'Report content' },
|
||||||
|
format: { type: 'string', description: 'Output format' },
|
||||||
|
},
|
||||||
|
required: ['title', 'content', 'format'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const staleInlineCode = 'return { title, content };'
|
||||||
|
const dbCode = 'return { title, content, format };'
|
||||||
|
|
||||||
|
function mockFetchForCustomTool(toolId: string) {
|
||||||
|
mockFetch.mockImplementation((url: string) => {
|
||||||
|
if (typeof url === 'string' && url.includes('/api/tools/custom')) {
|
||||||
|
return Promise.resolve({
|
||||||
|
ok: true,
|
||||||
|
headers: { get: () => null },
|
||||||
|
json: () =>
|
||||||
|
Promise.resolve({
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
id: toolId,
|
||||||
|
title: 'formatReport',
|
||||||
|
schema: dbSchema,
|
||||||
|
code: dbCode,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return Promise.resolve({
|
||||||
|
ok: true,
|
||||||
|
headers: { get: () => null },
|
||||||
|
json: () => Promise.resolve({}),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function mockFetchFailure() {
|
||||||
|
mockFetch.mockImplementation((url: string) => {
|
||||||
|
if (typeof url === 'string' && url.includes('/api/tools/custom')) {
|
||||||
|
return Promise.resolve({
|
||||||
|
ok: false,
|
||||||
|
status: 500,
|
||||||
|
headers: { get: () => null },
|
||||||
|
json: () => Promise.resolve({}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return Promise.resolve({
|
||||||
|
ok: true,
|
||||||
|
headers: { get: () => null },
|
||||||
|
json: () => Promise.resolve({}),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
Object.defineProperty(global, 'window', {
|
||||||
|
value: undefined,
|
||||||
|
writable: true,
|
||||||
|
configurable: true,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should always fetch latest schema from DB when customToolId is present', async () => {
|
||||||
|
const toolId = 'custom-tool-123'
|
||||||
|
mockFetchForCustomTool(toolId)
|
||||||
|
|
||||||
|
const inputs = {
|
||||||
|
model: 'gpt-4o',
|
||||||
|
userPrompt: 'Format a report',
|
||||||
|
apiKey: 'test-api-key',
|
||||||
|
tools: [
|
||||||
|
{
|
||||||
|
type: 'custom-tool',
|
||||||
|
customToolId: toolId,
|
||||||
|
title: 'formatReport',
|
||||||
|
schema: staleInlineSchema,
|
||||||
|
code: staleInlineCode,
|
||||||
|
usageControl: 'auto' as const,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
mockGetProviderFromModel.mockReturnValue('openai')
|
||||||
|
|
||||||
|
await handler.execute(mockContext, mockBlock, inputs)
|
||||||
|
|
||||||
|
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||||
|
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||||
|
const tools = providerCall[1].tools
|
||||||
|
|
||||||
|
expect(tools.length).toBe(1)
|
||||||
|
// DB schema wins over stale inline — includes format param
|
||||||
|
expect(tools[0].parameters.required).toContain('format')
|
||||||
|
expect(tools[0].parameters.properties).toHaveProperty('format')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should fetch from DB when customToolId has no inline schema', async () => {
|
||||||
|
const toolId = 'custom-tool-123'
|
||||||
|
mockFetchForCustomTool(toolId)
|
||||||
|
|
||||||
|
const inputs = {
|
||||||
|
model: 'gpt-4o',
|
||||||
|
userPrompt: 'Format a report',
|
||||||
|
apiKey: 'test-api-key',
|
||||||
|
tools: [
|
||||||
|
{
|
||||||
|
type: 'custom-tool',
|
||||||
|
customToolId: toolId,
|
||||||
|
usageControl: 'auto' as const,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
mockGetProviderFromModel.mockReturnValue('openai')
|
||||||
|
|
||||||
|
await handler.execute(mockContext, mockBlock, inputs)
|
||||||
|
|
||||||
|
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||||
|
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||||
|
const tools = providerCall[1].tools
|
||||||
|
|
||||||
|
expect(tools.length).toBe(1)
|
||||||
|
expect(tools[0].name).toBe('formatReport')
|
||||||
|
expect(tools[0].parameters.required).toContain('format')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should fall back to inline schema when DB fetch fails and inline exists', async () => {
|
||||||
|
mockFetchFailure()
|
||||||
|
|
||||||
|
const inputs = {
|
||||||
|
model: 'gpt-4o',
|
||||||
|
userPrompt: 'Format a report',
|
||||||
|
apiKey: 'test-api-key',
|
||||||
|
tools: [
|
||||||
|
{
|
||||||
|
type: 'custom-tool',
|
||||||
|
customToolId: 'custom-tool-123',
|
||||||
|
title: 'formatReport',
|
||||||
|
schema: staleInlineSchema,
|
||||||
|
code: staleInlineCode,
|
||||||
|
usageControl: 'auto' as const,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
mockGetProviderFromModel.mockReturnValue('openai')
|
||||||
|
|
||||||
|
await handler.execute(mockContext, mockBlock, inputs)
|
||||||
|
|
||||||
|
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||||
|
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||||
|
const tools = providerCall[1].tools
|
||||||
|
|
||||||
|
expect(tools.length).toBe(1)
|
||||||
|
expect(tools[0].name).toBe('formatReport')
|
||||||
|
expect(tools[0].parameters.required).not.toContain('format')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should return null when DB fetch fails and no inline schema exists', async () => {
|
||||||
|
mockFetchFailure()
|
||||||
|
|
||||||
|
const inputs = {
|
||||||
|
model: 'gpt-4o',
|
||||||
|
userPrompt: 'Format a report',
|
||||||
|
apiKey: 'test-api-key',
|
||||||
|
tools: [
|
||||||
|
{
|
||||||
|
type: 'custom-tool',
|
||||||
|
customToolId: 'custom-tool-123',
|
||||||
|
usageControl: 'auto' as const,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
mockGetProviderFromModel.mockReturnValue('openai')
|
||||||
|
|
||||||
|
await handler.execute(mockContext, mockBlock, inputs)
|
||||||
|
|
||||||
|
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||||
|
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||||
|
const tools = providerCall[1].tools
|
||||||
|
|
||||||
|
expect(tools.length).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should use DB code for executeFunction when customToolId resolves', async () => {
|
||||||
|
const toolId = 'custom-tool-123'
|
||||||
|
mockFetchForCustomTool(toolId)
|
||||||
|
|
||||||
|
let capturedTools: any[] = []
|
||||||
|
Promise.all = vi.fn().mockImplementation((promises: Promise<any>[]) => {
|
||||||
|
const result = originalPromiseAll.call(Promise, promises)
|
||||||
|
result.then((tools: any[]) => {
|
||||||
|
if (tools?.length) {
|
||||||
|
capturedTools = tools.filter((t) => t !== null)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return result
|
||||||
|
})
|
||||||
|
|
||||||
|
const inputs = {
|
||||||
|
model: 'gpt-4o',
|
||||||
|
userPrompt: 'Format a report',
|
||||||
|
apiKey: 'test-api-key',
|
||||||
|
tools: [
|
||||||
|
{
|
||||||
|
type: 'custom-tool',
|
||||||
|
customToolId: toolId,
|
||||||
|
title: 'formatReport',
|
||||||
|
schema: staleInlineSchema,
|
||||||
|
code: staleInlineCode,
|
||||||
|
usageControl: 'auto' as const,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
mockGetProviderFromModel.mockReturnValue('openai')
|
||||||
|
|
||||||
|
await handler.execute(mockContext, mockBlock, inputs)
|
||||||
|
|
||||||
|
expect(capturedTools.length).toBe(1)
|
||||||
|
expect(typeof capturedTools[0].executeFunction).toBe('function')
|
||||||
|
|
||||||
|
await capturedTools[0].executeFunction({ title: 'Q1', format: 'pdf' })
|
||||||
|
|
||||||
|
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||||
|
'function_execute',
|
||||||
|
expect.objectContaining({
|
||||||
|
code: dbCode,
|
||||||
|
}),
|
||||||
|
false,
|
||||||
|
expect.any(Object)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should not fetch from DB when no customToolId is present', async () => {
|
||||||
|
const inputs = {
|
||||||
|
model: 'gpt-4o',
|
||||||
|
userPrompt: 'Use the tool',
|
||||||
|
apiKey: 'test-api-key',
|
||||||
|
tools: [
|
||||||
|
{
|
||||||
|
type: 'custom-tool',
|
||||||
|
title: 'formatReport',
|
||||||
|
schema: staleInlineSchema,
|
||||||
|
code: staleInlineCode,
|
||||||
|
usageControl: 'auto' as const,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
mockGetProviderFromModel.mockReturnValue('openai')
|
||||||
|
|
||||||
|
await handler.execute(mockContext, mockBlock, inputs)
|
||||||
|
|
||||||
|
const customToolFetches = mockFetch.mock.calls.filter(
|
||||||
|
(call: any[]) => typeof call[0] === 'string' && call[0].includes('/api/tools/custom')
|
||||||
|
)
|
||||||
|
expect(customToolFetches.length).toBe(0)
|
||||||
|
|
||||||
|
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||||
|
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||||
|
const tools = providerCall[1].tools
|
||||||
|
|
||||||
|
expect(tools.length).toBe(1)
|
||||||
|
expect(tools[0].name).toBe('formatReport')
|
||||||
|
expect(tools[0].parameters.required).not.toContain('format')
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -62,9 +62,12 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
await validateModelProvider(ctx.userId, model, ctx)
|
await validateModelProvider(ctx.userId, model, ctx)
|
||||||
|
|
||||||
const providerId = getProviderFromModel(model)
|
const providerId = getProviderFromModel(model)
|
||||||
const formattedTools = await this.formatTools(ctx, filteredInputs.tools || [])
|
const formattedTools = await this.formatTools(
|
||||||
|
ctx,
|
||||||
|
filteredInputs.tools || [],
|
||||||
|
block.canonicalModes
|
||||||
|
)
|
||||||
|
|
||||||
// Resolve skill metadata for progressive disclosure
|
|
||||||
const skillInputs = filteredInputs.skills ?? []
|
const skillInputs = filteredInputs.skills ?? []
|
||||||
let skillMetadata: Array<{ name: string; description: string }> = []
|
let skillMetadata: Array<{ name: string; description: string }> = []
|
||||||
if (skillInputs.length > 0 && ctx.workspaceId) {
|
if (skillInputs.length > 0 && ctx.workspaceId) {
|
||||||
@@ -221,7 +224,11 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
private async formatTools(ctx: ExecutionContext, inputTools: ToolInput[]): Promise<any[]> {
|
private async formatTools(
|
||||||
|
ctx: ExecutionContext,
|
||||||
|
inputTools: ToolInput[],
|
||||||
|
canonicalModes?: Record<string, 'basic' | 'advanced'>
|
||||||
|
): Promise<any[]> {
|
||||||
if (!Array.isArray(inputTools)) return []
|
if (!Array.isArray(inputTools)) return []
|
||||||
|
|
||||||
const filtered = inputTools.filter((tool) => {
|
const filtered = inputTools.filter((tool) => {
|
||||||
@@ -249,7 +256,7 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
if (tool.type === 'custom-tool' && (tool.schema || tool.customToolId)) {
|
if (tool.type === 'custom-tool' && (tool.schema || tool.customToolId)) {
|
||||||
return await this.createCustomTool(ctx, tool)
|
return await this.createCustomTool(ctx, tool)
|
||||||
}
|
}
|
||||||
return this.transformBlockTool(ctx, tool)
|
return this.transformBlockTool(ctx, tool, canonicalModes)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[AgentHandler] Error creating tool:`, { tool, error })
|
logger.error(`[AgentHandler] Error creating tool:`, { tool, error })
|
||||||
return null
|
return null
|
||||||
@@ -272,15 +279,16 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
let code = tool.code
|
let code = tool.code
|
||||||
let title = tool.title
|
let title = tool.title
|
||||||
|
|
||||||
if (tool.customToolId && !schema) {
|
if (tool.customToolId) {
|
||||||
const resolved = await this.fetchCustomToolById(ctx, tool.customToolId)
|
const resolved = await this.fetchCustomToolById(ctx, tool.customToolId)
|
||||||
if (!resolved) {
|
if (resolved) {
|
||||||
|
schema = resolved.schema
|
||||||
|
code = resolved.code
|
||||||
|
title = resolved.title
|
||||||
|
} else if (!schema) {
|
||||||
logger.error(`Custom tool not found: ${tool.customToolId}`)
|
logger.error(`Custom tool not found: ${tool.customToolId}`)
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
schema = resolved.schema
|
|
||||||
code = resolved.code
|
|
||||||
title = resolved.title
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!schema?.function) {
|
if (!schema?.function) {
|
||||||
@@ -719,12 +727,17 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async transformBlockTool(ctx: ExecutionContext, tool: ToolInput) {
|
private async transformBlockTool(
|
||||||
|
ctx: ExecutionContext,
|
||||||
|
tool: ToolInput,
|
||||||
|
canonicalModes?: Record<string, 'basic' | 'advanced'>
|
||||||
|
) {
|
||||||
const transformedTool = await transformBlockTool(tool, {
|
const transformedTool = await transformBlockTool(tool, {
|
||||||
selectedOperation: tool.operation,
|
selectedOperation: tool.operation,
|
||||||
getAllBlocks,
|
getAllBlocks,
|
||||||
getToolAsync: (toolId: string) => getToolAsync(toolId, ctx.workflowId),
|
getToolAsync: (toolId: string) => getToolAsync(toolId, ctx.workflowId),
|
||||||
getTool,
|
getTool,
|
||||||
|
canonicalModes,
|
||||||
})
|
})
|
||||||
|
|
||||||
if (transformedTool) {
|
if (transformedTool) {
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { db } from '@sim/db'
|
|||||||
import { account } from '@sim/db/schema'
|
import { account } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
import { generateRouterPrompt, generateRouterV2Prompt } from '@/blocks/blocks/router'
|
import { generateRouterPrompt, generateRouterV2Prompt } from '@/blocks/blocks/router'
|
||||||
import type { BlockOutput } from '@/blocks/types'
|
import type { BlockOutput } from '@/blocks/types'
|
||||||
@@ -79,7 +79,7 @@ export class RouterBlockHandler implements BlockHandler {
|
|||||||
const providerId = getProviderFromModel(routerConfig.model)
|
const providerId = getProviderFromModel(routerConfig.model)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const url = new URL('/api/providers', getBaseUrl())
|
const url = new URL('/api/providers', getInternalApiBaseUrl())
|
||||||
if (ctx.userId) url.searchParams.set('userId', ctx.userId)
|
if (ctx.userId) url.searchParams.set('userId', ctx.userId)
|
||||||
|
|
||||||
const messages = [{ role: 'user', content: routerConfig.prompt }]
|
const messages = [{ role: 'user', content: routerConfig.prompt }]
|
||||||
@@ -209,7 +209,7 @@ export class RouterBlockHandler implements BlockHandler {
|
|||||||
const providerId = getProviderFromModel(routerConfig.model)
|
const providerId = getProviderFromModel(routerConfig.model)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const url = new URL('/api/providers', getBaseUrl())
|
const url = new URL('/api/providers', getInternalApiBaseUrl())
|
||||||
if (ctx.userId) url.searchParams.set('userId', ctx.userId)
|
if (ctx.userId) url.searchParams.set('userId', ctx.userId)
|
||||||
|
|
||||||
const messages = [{ role: 'user', content: routerConfig.context }]
|
const messages = [{ role: 'user', content: routerConfig.context }]
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import { setupGlobalFetchMock } from '@sim/testing'
|
||||||
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
||||||
import { BlockType } from '@/executor/constants'
|
import { BlockType } from '@/executor/constants'
|
||||||
import { WorkflowBlockHandler } from '@/executor/handlers/workflow/workflow-handler'
|
import { WorkflowBlockHandler } from '@/executor/handlers/workflow/workflow-handler'
|
||||||
@@ -9,7 +10,7 @@ vi.mock('@/lib/auth/internal', () => ({
|
|||||||
}))
|
}))
|
||||||
|
|
||||||
// Mock fetch globally
|
// Mock fetch globally
|
||||||
global.fetch = vi.fn()
|
setupGlobalFetchMock()
|
||||||
|
|
||||||
describe('WorkflowBlockHandler', () => {
|
describe('WorkflowBlockHandler', () => {
|
||||||
let handler: WorkflowBlockHandler
|
let handler: WorkflowBlockHandler
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { generateInternalToken } from '@/lib/auth/internal'
|
import { generateInternalToken } from '@/lib/auth/internal'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl, getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { HTTP } from '@/executor/constants'
|
import { HTTP } from '@/executor/constants'
|
||||||
|
|
||||||
export async function buildAuthHeaders(): Promise<Record<string, string>> {
|
export async function buildAuthHeaders(): Promise<Record<string, string>> {
|
||||||
@@ -16,7 +16,8 @@ export async function buildAuthHeaders(): Promise<Record<string, string>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function buildAPIUrl(path: string, params?: Record<string, string>): URL {
|
export function buildAPIUrl(path: string, params?: Record<string, string>): URL {
|
||||||
const url = new URL(path, getBaseUrl())
|
const baseUrl = path.startsWith('/api/') ? getInternalApiBaseUrl() : getBaseUrl()
|
||||||
|
const url = new URL(path, baseUrl)
|
||||||
|
|
||||||
if (params) {
|
if (params) {
|
||||||
for (const [key, value] of Object.entries(params)) {
|
for (const [key, value] of Object.entries(params)) {
|
||||||
|
|||||||
@@ -423,7 +423,7 @@ interface GenerateVersionDescriptionVariables {
|
|||||||
|
|
||||||
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
|
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
|
||||||
|
|
||||||
Write a brief, factual description (1-3 sentences, under 400 characters) that states what changed between versions.
|
Write a brief, factual description (1-3 sentences, under 2000 characters) that states what changed between versions.
|
||||||
|
|
||||||
Guidelines:
|
Guidelines:
|
||||||
- Use the specific values provided (credential names, channel names, model names)
|
- Use the specific values provided (credential names, channel names, model names)
|
||||||
|
|||||||
@@ -642,6 +642,10 @@ export function useDeployChildWorkflow() {
|
|||||||
queryClient.invalidateQueries({
|
queryClient.invalidateQueries({
|
||||||
queryKey: workflowKeys.deploymentStatus(variables.workflowId),
|
queryKey: workflowKeys.deploymentStatus(variables.workflowId),
|
||||||
})
|
})
|
||||||
|
// Invalidate workflow state so tool input mappings refresh
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: workflowKeys.state(variables.workflowId),
|
||||||
|
})
|
||||||
// Also invalidate deployment queries
|
// Also invalidate deployment queries
|
||||||
queryClient.invalidateQueries({
|
queryClient.invalidateQueries({
|
||||||
queryKey: deploymentKeys.info(variables.workflowId),
|
queryKey: deploymentKeys.info(variables.workflowId),
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useCallback, useRef } from 'react'
|
import { useCallback } from 'react'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import type {
|
import type {
|
||||||
BlockCompletedData,
|
BlockCompletedData,
|
||||||
@@ -16,6 +16,18 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
|
|||||||
|
|
||||||
const logger = createLogger('useExecutionStream')
|
const logger = createLogger('useExecutionStream')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
|
||||||
|
* These should be treated as clean disconnects, not execution errors.
|
||||||
|
*/
|
||||||
|
function isClientDisconnectError(error: any): boolean {
|
||||||
|
if (error.name === 'AbortError') return true
|
||||||
|
const msg = (error.message ?? '').toLowerCase()
|
||||||
|
return (
|
||||||
|
msg.includes('network error') || msg.includes('failed to fetch') || msg.includes('load failed')
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Processes SSE events from a response body and invokes appropriate callbacks.
|
* Processes SSE events from a response body and invokes appropriate callbacks.
|
||||||
*/
|
*/
|
||||||
@@ -121,6 +133,7 @@ export interface ExecuteStreamOptions {
|
|||||||
parallels?: Record<string, any>
|
parallels?: Record<string, any>
|
||||||
}
|
}
|
||||||
stopAfterBlockId?: string
|
stopAfterBlockId?: string
|
||||||
|
onExecutionId?: (executionId: string) => void
|
||||||
callbacks?: ExecutionStreamCallbacks
|
callbacks?: ExecutionStreamCallbacks
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -129,30 +142,40 @@ export interface ExecuteFromBlockOptions {
|
|||||||
startBlockId: string
|
startBlockId: string
|
||||||
sourceSnapshot: SerializableExecutionState
|
sourceSnapshot: SerializableExecutionState
|
||||||
input?: any
|
input?: any
|
||||||
|
onExecutionId?: (executionId: string) => void
|
||||||
callbacks?: ExecutionStreamCallbacks
|
callbacks?: ExecutionStreamCallbacks
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface ReconnectStreamOptions {
|
||||||
|
workflowId: string
|
||||||
|
executionId: string
|
||||||
|
fromEventId?: number
|
||||||
|
callbacks?: ExecutionStreamCallbacks
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module-level map shared across all hook instances.
|
||||||
|
* Ensures ANY instance can cancel streams started by ANY other instance,
|
||||||
|
* which is critical for SPA navigation where the original hook instance unmounts
|
||||||
|
* but the SSE stream must be cancellable from the new instance.
|
||||||
|
*/
|
||||||
|
const sharedAbortControllers = new Map<string, AbortController>()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Hook for executing workflows via server-side SSE streaming.
|
* Hook for executing workflows via server-side SSE streaming.
|
||||||
* Supports concurrent executions via per-workflow AbortController maps.
|
* Supports concurrent executions via per-workflow AbortController maps.
|
||||||
*/
|
*/
|
||||||
export function useExecutionStream() {
|
export function useExecutionStream() {
|
||||||
const abortControllersRef = useRef<Map<string, AbortController>>(new Map())
|
|
||||||
const currentExecutionsRef = useRef<Map<string, { workflowId: string; executionId: string }>>(
|
|
||||||
new Map()
|
|
||||||
)
|
|
||||||
|
|
||||||
const execute = useCallback(async (options: ExecuteStreamOptions) => {
|
const execute = useCallback(async (options: ExecuteStreamOptions) => {
|
||||||
const { workflowId, callbacks = {}, ...payload } = options
|
const { workflowId, callbacks = {}, onExecutionId, ...payload } = options
|
||||||
|
|
||||||
const existing = abortControllersRef.current.get(workflowId)
|
const existing = sharedAbortControllers.get(workflowId)
|
||||||
if (existing) {
|
if (existing) {
|
||||||
existing.abort()
|
existing.abort()
|
||||||
}
|
}
|
||||||
|
|
||||||
const abortController = new AbortController()
|
const abortController = new AbortController()
|
||||||
abortControllersRef.current.set(workflowId, abortController)
|
sharedAbortControllers.set(workflowId, abortController)
|
||||||
currentExecutionsRef.current.delete(workflowId)
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||||
@@ -177,42 +200,48 @@ export function useExecutionStream() {
|
|||||||
throw new Error('No response body')
|
throw new Error('No response body')
|
||||||
}
|
}
|
||||||
|
|
||||||
const executionId = response.headers.get('X-Execution-Id')
|
const serverExecutionId = response.headers.get('X-Execution-Id')
|
||||||
if (executionId) {
|
if (serverExecutionId) {
|
||||||
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
onExecutionId?.(serverExecutionId)
|
||||||
}
|
}
|
||||||
|
|
||||||
const reader = response.body.getReader()
|
const reader = response.body.getReader()
|
||||||
await processSSEStream(reader, callbacks, 'Execution')
|
await processSSEStream(reader, callbacks, 'Execution')
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
if (error.name === 'AbortError') {
|
if (isClientDisconnectError(error)) {
|
||||||
logger.info('Execution stream cancelled')
|
logger.info('Execution stream disconnected (page unload or abort)')
|
||||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
return
|
||||||
} else {
|
|
||||||
logger.error('Execution stream error:', error)
|
|
||||||
callbacks.onExecutionError?.({
|
|
||||||
error: error.message || 'Unknown error',
|
|
||||||
duration: 0,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
logger.error('Execution stream error:', error)
|
||||||
|
callbacks.onExecutionError?.({
|
||||||
|
error: error.message || 'Unknown error',
|
||||||
|
duration: 0,
|
||||||
|
})
|
||||||
throw error
|
throw error
|
||||||
} finally {
|
} finally {
|
||||||
abortControllersRef.current.delete(workflowId)
|
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||||
currentExecutionsRef.current.delete(workflowId)
|
sharedAbortControllers.delete(workflowId)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
||||||
const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options
|
const {
|
||||||
|
workflowId,
|
||||||
|
startBlockId,
|
||||||
|
sourceSnapshot,
|
||||||
|
input,
|
||||||
|
onExecutionId,
|
||||||
|
callbacks = {},
|
||||||
|
} = options
|
||||||
|
|
||||||
const existing = abortControllersRef.current.get(workflowId)
|
const existing = sharedAbortControllers.get(workflowId)
|
||||||
if (existing) {
|
if (existing) {
|
||||||
existing.abort()
|
existing.abort()
|
||||||
}
|
}
|
||||||
|
|
||||||
const abortController = new AbortController()
|
const abortController = new AbortController()
|
||||||
abortControllersRef.current.set(workflowId, abortController)
|
sharedAbortControllers.set(workflowId, abortController)
|
||||||
currentExecutionsRef.current.delete(workflowId)
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||||
@@ -246,64 +275,80 @@ export function useExecutionStream() {
|
|||||||
throw new Error('No response body')
|
throw new Error('No response body')
|
||||||
}
|
}
|
||||||
|
|
||||||
const executionId = response.headers.get('X-Execution-Id')
|
const serverExecutionId = response.headers.get('X-Execution-Id')
|
||||||
if (executionId) {
|
if (serverExecutionId) {
|
||||||
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
onExecutionId?.(serverExecutionId)
|
||||||
}
|
}
|
||||||
|
|
||||||
const reader = response.body.getReader()
|
const reader = response.body.getReader()
|
||||||
await processSSEStream(reader, callbacks, 'Run-from-block')
|
await processSSEStream(reader, callbacks, 'Run-from-block')
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
if (error.name === 'AbortError') {
|
if (isClientDisconnectError(error)) {
|
||||||
logger.info('Run-from-block execution cancelled')
|
logger.info('Run-from-block stream disconnected (page unload or abort)')
|
||||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
return
|
||||||
} else {
|
|
||||||
logger.error('Run-from-block execution error:', error)
|
|
||||||
callbacks.onExecutionError?.({
|
|
||||||
error: error.message || 'Unknown error',
|
|
||||||
duration: 0,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
logger.error('Run-from-block execution error:', error)
|
||||||
|
callbacks.onExecutionError?.({
|
||||||
|
error: error.message || 'Unknown error',
|
||||||
|
duration: 0,
|
||||||
|
})
|
||||||
throw error
|
throw error
|
||||||
} finally {
|
} finally {
|
||||||
abortControllersRef.current.delete(workflowId)
|
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||||
currentExecutionsRef.current.delete(workflowId)
|
sharedAbortControllers.delete(workflowId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
const reconnect = useCallback(async (options: ReconnectStreamOptions) => {
|
||||||
|
const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options
|
||||||
|
|
||||||
|
const existing = sharedAbortControllers.get(workflowId)
|
||||||
|
if (existing) {
|
||||||
|
existing.abort()
|
||||||
|
}
|
||||||
|
|
||||||
|
const abortController = new AbortController()
|
||||||
|
sharedAbortControllers.set(workflowId, abortController)
|
||||||
|
try {
|
||||||
|
const response = await fetch(
|
||||||
|
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
|
||||||
|
{ signal: abortController.signal }
|
||||||
|
)
|
||||||
|
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
|
||||||
|
if (!response.body) throw new Error('No response body')
|
||||||
|
|
||||||
|
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
|
||||||
|
} catch (error: any) {
|
||||||
|
if (isClientDisconnectError(error)) return
|
||||||
|
logger.error('Reconnection stream error:', error)
|
||||||
|
throw error
|
||||||
|
} finally {
|
||||||
|
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||||
|
sharedAbortControllers.delete(workflowId)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
const cancel = useCallback((workflowId?: string) => {
|
const cancel = useCallback((workflowId?: string) => {
|
||||||
if (workflowId) {
|
if (workflowId) {
|
||||||
const execution = currentExecutionsRef.current.get(workflowId)
|
const controller = sharedAbortControllers.get(workflowId)
|
||||||
if (execution) {
|
|
||||||
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
|
||||||
method: 'POST',
|
|
||||||
}).catch(() => {})
|
|
||||||
}
|
|
||||||
|
|
||||||
const controller = abortControllersRef.current.get(workflowId)
|
|
||||||
if (controller) {
|
if (controller) {
|
||||||
controller.abort()
|
controller.abort()
|
||||||
abortControllersRef.current.delete(workflowId)
|
sharedAbortControllers.delete(workflowId)
|
||||||
}
|
}
|
||||||
currentExecutionsRef.current.delete(workflowId)
|
|
||||||
} else {
|
} else {
|
||||||
for (const [, execution] of currentExecutionsRef.current) {
|
for (const [, controller] of sharedAbortControllers) {
|
||||||
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
|
||||||
method: 'POST',
|
|
||||||
}).catch(() => {})
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const [, controller] of abortControllersRef.current) {
|
|
||||||
controller.abort()
|
controller.abort()
|
||||||
}
|
}
|
||||||
abortControllersRef.current.clear()
|
sharedAbortControllers.clear()
|
||||||
currentExecutionsRef.current.clear()
|
|
||||||
}
|
}
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
return {
|
return {
|
||||||
execute,
|
execute,
|
||||||
executeFromBlock,
|
executeFromBlock,
|
||||||
|
reconnect,
|
||||||
cancel,
|
cancel,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
46
apps/sim/hooks/use-referral-attribution.ts
Normal file
46
apps/sim/hooks/use-referral-attribution.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
'use client'
|
||||||
|
|
||||||
|
import { useEffect, useRef } from 'react'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
|
||||||
|
const logger = createLogger('ReferralAttribution')
|
||||||
|
|
||||||
|
const COOKIE_NAME = 'sim_utm'
|
||||||
|
|
||||||
|
const TERMINAL_REASONS = new Set([
|
||||||
|
'invalid_cookie',
|
||||||
|
'no_utm_cookie',
|
||||||
|
'no_matching_campaign',
|
||||||
|
'already_attributed',
|
||||||
|
])
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fires a one-shot `POST /api/attribution` when a `sim_utm` cookie is present.
|
||||||
|
* Retries on transient failures; stops on terminal outcomes.
|
||||||
|
*/
|
||||||
|
export function useReferralAttribution() {
|
||||||
|
const calledRef = useRef(false)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (calledRef.current) return
|
||||||
|
if (!document.cookie.includes(COOKIE_NAME)) return
|
||||||
|
|
||||||
|
calledRef.current = true
|
||||||
|
|
||||||
|
fetch('/api/attribution', { method: 'POST' })
|
||||||
|
.then((res) => res.json())
|
||||||
|
.then((data) => {
|
||||||
|
if (data.attributed) {
|
||||||
|
logger.info('Referral attribution successful', { bonusAmount: data.bonusAmount })
|
||||||
|
} else if (data.error || TERMINAL_REASONS.has(data.reason)) {
|
||||||
|
logger.info('Referral attribution skipped', { reason: data.reason || data.error })
|
||||||
|
} else {
|
||||||
|
calledRef.current = false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
logger.warn('Referral attribution failed, will retry', { error: err })
|
||||||
|
calledRef.current = false
|
||||||
|
})
|
||||||
|
}, [])
|
||||||
|
}
|
||||||
64
apps/sim/lib/billing/credits/bonus.ts
Normal file
64
apps/sim/lib/billing/credits/bonus.ts
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { organization, userStats } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq, sql } from 'drizzle-orm'
|
||||||
|
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||||
|
import type { DbOrTx } from '@/lib/db/types'
|
||||||
|
|
||||||
|
const logger = createLogger('BonusCredits')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply bonus credits to a user (e.g. referral bonuses, promotional codes).
|
||||||
|
*
|
||||||
|
* Detects the user's current plan and routes credits accordingly:
|
||||||
|
* - Free/Pro: adds to `userStats.creditBalance` and increments `currentUsageLimit`
|
||||||
|
* - Team/Enterprise: adds to `organization.creditBalance` and increments `orgUsageLimit`
|
||||||
|
*
|
||||||
|
* Uses direct increment (not recalculation) so it works correctly for free-tier
|
||||||
|
* users where `setUsageLimitForCredits` would compute planBase=0 and skip the update.
|
||||||
|
*
|
||||||
|
* @param tx - Optional Drizzle transaction context. When provided, all DB writes
|
||||||
|
* participate in the caller's transaction for atomicity.
|
||||||
|
*/
|
||||||
|
export async function applyBonusCredits(
|
||||||
|
userId: string,
|
||||||
|
amount: number,
|
||||||
|
tx?: DbOrTx
|
||||||
|
): Promise<void> {
|
||||||
|
const dbCtx = tx ?? db
|
||||||
|
const subscription = await getHighestPrioritySubscription(userId)
|
||||||
|
const isTeamOrEnterprise = subscription?.plan === 'team' || subscription?.plan === 'enterprise'
|
||||||
|
|
||||||
|
if (isTeamOrEnterprise && subscription?.referenceId) {
|
||||||
|
const orgId = subscription.referenceId
|
||||||
|
|
||||||
|
await dbCtx
|
||||||
|
.update(organization)
|
||||||
|
.set({
|
||||||
|
creditBalance: sql`${organization.creditBalance} + ${amount}`,
|
||||||
|
orgUsageLimit: sql`COALESCE(${organization.orgUsageLimit}, '0')::decimal + ${amount}`,
|
||||||
|
})
|
||||||
|
.where(eq(organization.id, orgId))
|
||||||
|
|
||||||
|
logger.info('Applied bonus credits to organization', {
|
||||||
|
userId,
|
||||||
|
organizationId: orgId,
|
||||||
|
plan: subscription.plan,
|
||||||
|
amount,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
await dbCtx
|
||||||
|
.update(userStats)
|
||||||
|
.set({
|
||||||
|
creditBalance: sql`${userStats.creditBalance} + ${amount}`,
|
||||||
|
currentUsageLimit: sql`COALESCE(${userStats.currentUsageLimit}, '0')::decimal + ${amount}`,
|
||||||
|
})
|
||||||
|
.where(eq(userStats.userId, userId))
|
||||||
|
|
||||||
|
logger.info('Applied bonus credits to user', {
|
||||||
|
userId,
|
||||||
|
plan: subscription?.plan || 'free',
|
||||||
|
amount,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,21 +1,22 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
import { COPILOT_CONFIRM_API_PATH, STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
||||||
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
import { isBackgroundState, isRejectedState, isReviewState } from '@/lib/copilot/store-utils'
|
import {
|
||||||
|
isBackgroundState,
|
||||||
|
isRejectedState,
|
||||||
|
isReviewState,
|
||||||
|
resolveToolDisplay,
|
||||||
|
} from '@/lib/copilot/store-utils'
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||||
|
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
||||||
|
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||||
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
|
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||||
import { appendTextBlock, beginThinkingBlock, finalizeThinkingBlock } from './content-blocks'
|
import { appendTextBlock, beginThinkingBlock, finalizeThinkingBlock } from './content-blocks'
|
||||||
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
||||||
import {
|
|
||||||
extractOperationListFromResultPayload,
|
|
||||||
extractToolExecutionMetadata,
|
|
||||||
extractToolUiMetadata,
|
|
||||||
isWorkflowChangeApplyCall,
|
|
||||||
mapServerStateToClientState,
|
|
||||||
resolveDisplayFromServerUi,
|
|
||||||
} from './tool-call-helpers'
|
|
||||||
import { applyToolEffects } from './tool-effects'
|
|
||||||
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
||||||
|
|
||||||
const logger = createLogger('CopilotClientSseHandlers')
|
const logger = createLogger('CopilotClientSseHandlers')
|
||||||
@@ -25,11 +26,21 @@ const MAX_BATCH_INTERVAL = 50
|
|||||||
const MIN_BATCH_INTERVAL = 16
|
const MIN_BATCH_INTERVAL = 16
|
||||||
const MAX_QUEUE_SIZE = 5
|
const MAX_QUEUE_SIZE = 5
|
||||||
|
|
||||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
/**
|
||||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
* Send an auto-accept confirmation to the server for auto-allowed tools.
|
||||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
* The server-side orchestrator polls Redis for this decision.
|
||||||
}
|
*/
|
||||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
export function sendAutoAcceptConfirmation(toolCallId: string): void {
|
||||||
|
fetch(COPILOT_CONFIRM_API_PATH, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ toolCallId, status: 'accepted' }),
|
||||||
|
}).catch((error) => {
|
||||||
|
logger.warn('Failed to send auto-accept confirmation', {
|
||||||
|
toolCallId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
||||||
@@ -219,86 +230,28 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
title_updated: (_data, _context, get, set) => {
|
title_updated: (_data, _context, get, set) => {
|
||||||
const title = typeof _data.title === 'string' ? _data.title.trim() : ''
|
const title = _data.title
|
||||||
const eventChatId = typeof _data.chatId === 'string' ? _data.chatId : undefined
|
if (!title) return
|
||||||
const { currentChat, chats } = get()
|
const { currentChat, chats } = get()
|
||||||
|
if (currentChat) {
|
||||||
logger.info('[Title] Received title_updated SSE event', {
|
set({
|
||||||
eventTitle: title,
|
currentChat: { ...currentChat, title },
|
||||||
eventChatId: eventChatId || null,
|
chats: chats.map((c) => (c.id === currentChat.id ? { ...c, title } : c)),
|
||||||
currentChatId: currentChat?.id || null,
|
|
||||||
currentChatTitle: currentChat?.title || null,
|
|
||||||
chatCount: chats.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!title) {
|
|
||||||
logger.warn('[Title] Ignoring title_updated event with empty title', {
|
|
||||||
payload: _data,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!currentChat) {
|
|
||||||
logger.warn('[Title] Received title_updated event without an active currentChat', {
|
|
||||||
eventChatId: eventChatId || null,
|
|
||||||
title,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const targetChatId = eventChatId || currentChat.id
|
|
||||||
if (eventChatId && eventChatId !== currentChat.id) {
|
|
||||||
logger.warn('[Title] title_updated event chatId does not match currentChat', {
|
|
||||||
eventChatId,
|
|
||||||
currentChatId: currentChat.id,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
set({
|
|
||||||
currentChat:
|
|
||||||
currentChat.id === targetChatId
|
|
||||||
? {
|
|
||||||
...currentChat,
|
|
||||||
title,
|
|
||||||
}
|
|
||||||
: currentChat,
|
|
||||||
chats: chats.map((c) => (c.id === targetChatId ? { ...c, title } : c)),
|
|
||||||
})
|
|
||||||
|
|
||||||
const updatedState = get()
|
|
||||||
logger.info('[Title] Applied title_updated event to copilot store', {
|
|
||||||
targetChatId,
|
|
||||||
renderedCurrentChatId: updatedState.currentChat?.id || null,
|
|
||||||
renderedCurrentChatTitle: updatedState.currentChat?.title || null,
|
|
||||||
chatListTitle: updatedState.chats.find((c) => c.id === targetChatId)?.title || null,
|
|
||||||
})
|
|
||||||
},
|
},
|
||||||
'copilot.tool.result': (data, context, get, set) => {
|
tool_result: (data, context, get, set) => {
|
||||||
try {
|
try {
|
||||||
const eventData = asRecord(data?.data)
|
const eventData = asRecord(data?.data)
|
||||||
const toolCallId: string | undefined =
|
const toolCallId: string | undefined =
|
||||||
data?.toolCallId ||
|
data?.toolCallId || (eventData.id as string | undefined)
|
||||||
(eventData.id as string | undefined) ||
|
|
||||||
(eventData.callId as string | undefined)
|
|
||||||
const success: boolean | undefined = data?.success
|
const success: boolean | undefined = data?.success
|
||||||
const failedDependency: boolean = data?.failedDependency === true
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
const resultObj = asRecord(data?.result)
|
const resultObj = asRecord(data?.result)
|
||||||
const skipped: boolean = resultObj.skipped === true
|
const skipped: boolean = resultObj.skipped === true
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
const uiMetadata = extractToolUiMetadata(eventData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(eventData)
|
|
||||||
const serverState = (eventData.state as string | undefined) || undefined
|
|
||||||
const targetState = serverState
|
|
||||||
? mapServerStateToClientState(serverState)
|
|
||||||
: success
|
|
||||||
? ClientToolCallState.success
|
|
||||||
: failedDependency || skipped
|
|
||||||
? ClientToolCallState.rejected
|
|
||||||
: ClientToolCallState.error
|
|
||||||
const resultPayload = asRecord(data?.result || eventData.result || eventData.data || data?.data)
|
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
const current = toolCallsById[toolCallId]
|
const current = toolCallsById[toolCallId]
|
||||||
let paramsForCurrentToolCall: Record<string, unknown> | undefined = current?.params
|
|
||||||
if (current) {
|
if (current) {
|
||||||
if (
|
if (
|
||||||
isRejectedState(current.state) ||
|
isRejectedState(current.state) ||
|
||||||
@@ -307,33 +260,16 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if (
|
const targetState = success
|
||||||
targetState === ClientToolCallState.success &&
|
? ClientToolCallState.success
|
||||||
isWorkflowChangeApplyCall(current.name, paramsForCurrentToolCall)
|
: failedDependency || skipped
|
||||||
) {
|
? ClientToolCallState.rejected
|
||||||
const operations = extractOperationListFromResultPayload(resultPayload || {})
|
: ClientToolCallState.error
|
||||||
if (operations && operations.length > 0) {
|
|
||||||
paramsForCurrentToolCall = {
|
|
||||||
...(current.params || {}),
|
|
||||||
operations,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const updatedMap = { ...toolCallsById }
|
const updatedMap = { ...toolCallsById }
|
||||||
updatedMap[toolCallId] = {
|
updatedMap[toolCallId] = {
|
||||||
...current,
|
...current,
|
||||||
ui: uiMetadata || current.ui,
|
|
||||||
execution: executionMetadata || current.execution,
|
|
||||||
params: paramsForCurrentToolCall,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveDisplayFromServerUi(
|
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||||
current.name,
|
|
||||||
targetState,
|
|
||||||
current.id,
|
|
||||||
paramsForCurrentToolCall,
|
|
||||||
uiMetadata || current.ui
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
set({ toolCallsById: updatedMap })
|
set({ toolCallsById: updatedMap })
|
||||||
|
|
||||||
@@ -376,11 +312,138 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
applyToolEffects({
|
if (current.name === 'edit_workflow') {
|
||||||
effectsRaw: eventData.effects,
|
try {
|
||||||
toolCall: updatedMap[toolCallId],
|
const resultPayload = asRecord(
|
||||||
resultPayload,
|
data?.result || eventData.result || eventData.data || data?.data
|
||||||
})
|
)
|
||||||
|
const workflowState = asRecord(resultPayload?.workflowState)
|
||||||
|
const hasWorkflowState = !!resultPayload?.workflowState
|
||||||
|
logger.info('[SSE] edit_workflow result received', {
|
||||||
|
hasWorkflowState,
|
||||||
|
blockCount: hasWorkflowState ? Object.keys(workflowState.blocks ?? {}).length : 0,
|
||||||
|
edgeCount: Array.isArray(workflowState.edges) ? workflowState.edges.length : 0,
|
||||||
|
})
|
||||||
|
if (hasWorkflowState) {
|
||||||
|
const diffStore = useWorkflowDiffStore.getState()
|
||||||
|
diffStore
|
||||||
|
.setProposedChanges(resultPayload.workflowState as WorkflowState)
|
||||||
|
.catch((err) => {
|
||||||
|
logger.error('[SSE] Failed to apply edit_workflow diff', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[SSE] edit_workflow result handling failed', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deploy tools: update deployment status in workflow registry
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
(current.name === 'deploy_api' ||
|
||||||
|
current.name === 'deploy_chat' ||
|
||||||
|
current.name === 'deploy_mcp' ||
|
||||||
|
current.name === 'redeploy')
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const resultPayload = asRecord(
|
||||||
|
data?.result || eventData.result || eventData.data || data?.data
|
||||||
|
)
|
||||||
|
const input = asRecord(current.params)
|
||||||
|
const workflowId =
|
||||||
|
(resultPayload?.workflowId as string) ||
|
||||||
|
(input?.workflowId as string) ||
|
||||||
|
useWorkflowRegistry.getState().activeWorkflowId
|
||||||
|
const isDeployed = resultPayload?.isDeployed !== false
|
||||||
|
if (workflowId) {
|
||||||
|
useWorkflowRegistry
|
||||||
|
.getState()
|
||||||
|
.setDeploymentStatus(workflowId, isDeployed, isDeployed ? new Date() : undefined)
|
||||||
|
logger.info('[SSE] Updated deployment status from tool result', {
|
||||||
|
toolName: current.name,
|
||||||
|
workflowId,
|
||||||
|
isDeployed,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to hydrate deployment status', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Environment variables: reload store after successful set
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
current.name === 'set_environment_variables'
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
useEnvironmentStore.getState().loadEnvironmentVariables()
|
||||||
|
logger.info('[SSE] Triggered environment variables reload')
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to reload environment variables', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Workflow variables: reload store after successful set
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
current.name === 'set_global_workflow_variables'
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const input = asRecord(current.params)
|
||||||
|
const workflowId =
|
||||||
|
(input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId
|
||||||
|
if (workflowId) {
|
||||||
|
useVariablesStore.getState().loadForWorkflow(workflowId)
|
||||||
|
logger.info('[SSE] Triggered workflow variables reload', { workflowId })
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to reload workflow variables', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate API key: update deployment status with the new key
|
||||||
|
if (targetState === ClientToolCallState.success && current.name === 'generate_api_key') {
|
||||||
|
try {
|
||||||
|
const resultPayload = asRecord(
|
||||||
|
data?.result || eventData.result || eventData.data || data?.data
|
||||||
|
)
|
||||||
|
const input = asRecord(current.params)
|
||||||
|
const workflowId =
|
||||||
|
(input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId
|
||||||
|
const apiKey = (resultPayload?.apiKey || resultPayload?.key) as string | undefined
|
||||||
|
if (workflowId) {
|
||||||
|
const existingStatus = useWorkflowRegistry
|
||||||
|
.getState()
|
||||||
|
.getWorkflowDeploymentStatus(workflowId)
|
||||||
|
useWorkflowRegistry
|
||||||
|
.getState()
|
||||||
|
.setDeploymentStatus(
|
||||||
|
workflowId,
|
||||||
|
existingStatus?.isDeployed ?? false,
|
||||||
|
existingStatus?.deployedAt,
|
||||||
|
apiKey
|
||||||
|
)
|
||||||
|
logger.info('[SSE] Updated deployment status with API key', {
|
||||||
|
workflowId,
|
||||||
|
hasKey: !!apiKey,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to hydrate API key status', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (let i = 0; i < context.contentBlocks.length; i++) {
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
@@ -397,24 +460,16 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
: failedDependency || skipped
|
: failedDependency || skipped
|
||||||
? ClientToolCallState.rejected
|
? ClientToolCallState.rejected
|
||||||
: ClientToolCallState.error
|
: ClientToolCallState.error
|
||||||
const paramsForBlock =
|
|
||||||
b.toolCall?.id === toolCallId
|
|
||||||
? paramsForCurrentToolCall || b.toolCall?.params
|
|
||||||
: b.toolCall?.params
|
|
||||||
context.contentBlocks[i] = {
|
context.contentBlocks[i] = {
|
||||||
...b,
|
...b,
|
||||||
toolCall: {
|
toolCall: {
|
||||||
...b.toolCall,
|
...b.toolCall,
|
||||||
params: paramsForBlock,
|
|
||||||
ui: uiMetadata || b.toolCall?.ui,
|
|
||||||
execution: executionMetadata || b.toolCall?.execution,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveDisplayFromServerUi(
|
display: resolveToolDisplay(
|
||||||
b.toolCall?.name,
|
b.toolCall?.name,
|
||||||
targetState,
|
targetState,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
paramsForBlock,
|
b.toolCall?.params
|
||||||
uiMetadata || b.toolCall?.ui
|
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -428,29 +483,106 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'copilot.tool.call': (data, context, get, set) => {
|
tool_error: (data, context, get, set) => {
|
||||||
|
try {
|
||||||
|
const errorData = asRecord(data?.data)
|
||||||
|
const toolCallId: string | undefined =
|
||||||
|
data?.toolCallId || (errorData.id as string | undefined)
|
||||||
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
|
if (!toolCallId) return
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const current = toolCallsById[toolCallId]
|
||||||
|
if (current) {
|
||||||
|
if (
|
||||||
|
isRejectedState(current.state) ||
|
||||||
|
isReviewState(current.state) ||
|
||||||
|
isBackgroundState(current.state)
|
||||||
|
) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const targetState = failedDependency
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
const updatedMap = { ...toolCallsById }
|
||||||
|
updatedMap[toolCallId] = {
|
||||||
|
...current,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||||
|
}
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
}
|
||||||
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
|
const b = context.contentBlocks[i]
|
||||||
|
if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) {
|
||||||
|
if (
|
||||||
|
isRejectedState(b.toolCall?.state) ||
|
||||||
|
isReviewState(b.toolCall?.state) ||
|
||||||
|
isBackgroundState(b.toolCall?.state)
|
||||||
|
)
|
||||||
|
break
|
||||||
|
const targetState = failedDependency
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
context.contentBlocks[i] = {
|
||||||
|
...b,
|
||||||
|
toolCall: {
|
||||||
|
...b.toolCall,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(
|
||||||
|
b.toolCall?.name,
|
||||||
|
targetState,
|
||||||
|
toolCallId,
|
||||||
|
b.toolCall?.params
|
||||||
|
),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to process tool_error SSE event', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_generating: (data, context, get, set) => {
|
||||||
|
const { toolCallId, toolName } = data
|
||||||
|
if (!toolCallId || !toolName) return
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
|
if (!toolCallsById[toolCallId]) {
|
||||||
|
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
||||||
|
const initialState = isAutoAllowed
|
||||||
|
? ClientToolCallState.executing
|
||||||
|
: ClientToolCallState.pending
|
||||||
|
const tc: CopilotToolCall = {
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
state: initialState,
|
||||||
|
display: resolveToolDisplay(toolName, initialState, toolCallId),
|
||||||
|
}
|
||||||
|
const updated = { ...toolCallsById, [toolCallId]: tc }
|
||||||
|
set({ toolCallsById: updated })
|
||||||
|
logger.info('[toolCallsById] map updated', updated)
|
||||||
|
|
||||||
|
upsertToolCallBlock(context, tc)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_call: (data, context, get, set) => {
|
||||||
const toolData = asRecord(data?.data)
|
const toolData = asRecord(data?.data)
|
||||||
const id: string | undefined =
|
const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId
|
||||||
(toolData.id as string | undefined) ||
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
(toolData.callId as string | undefined) ||
|
|
||||||
data?.toolCallId
|
|
||||||
const name: string | undefined =
|
|
||||||
(toolData.name as string | undefined) ||
|
|
||||||
(toolData.toolName as string | undefined) ||
|
|
||||||
data?.toolName
|
|
||||||
if (!id) return
|
if (!id) return
|
||||||
const args = toolData.arguments as Record<string, unknown> | undefined
|
const args = toolData.arguments as Record<string, unknown> | undefined
|
||||||
const isPartial = toolData.partial === true
|
const isPartial = toolData.partial === true
|
||||||
const uiMetadata = extractToolUiMetadata(toolData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(toolData)
|
|
||||||
const serverState = toolData.state
|
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
const existing = toolCallsById[id]
|
const existing = toolCallsById[id]
|
||||||
const toolName = name || existing?.name || 'unknown_tool'
|
const toolName = name || existing?.name || 'unknown_tool'
|
||||||
let initialState = serverState
|
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
||||||
? mapServerStateToClientState(serverState)
|
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
||||||
: ClientToolCallState.pending
|
|
||||||
|
|
||||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||||
if (
|
if (
|
||||||
@@ -465,25 +597,15 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
...existing,
|
...existing,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
ui: uiMetadata || existing.ui,
|
|
||||||
execution: executionMetadata || existing.execution,
|
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveDisplayFromServerUi(
|
display: resolveToolDisplay(toolName, initialState, id, args || existing.params),
|
||||||
toolName,
|
|
||||||
initialState,
|
|
||||||
id,
|
|
||||||
args || existing.params,
|
|
||||||
uiMetadata || existing.ui
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
: {
|
: {
|
||||||
id,
|
id,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
ui: uiMetadata,
|
|
||||||
execution: executionMetadata,
|
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveDisplayFromServerUi(toolName, initialState, id, args, uiMetadata),
|
display: resolveToolDisplay(toolName, initialState, id, args),
|
||||||
}
|
}
|
||||||
const updated = { ...toolCallsById, [id]: next }
|
const updated = { ...toolCallsById, [id]: next }
|
||||||
set({ toolCallsById: updated })
|
set({ toolCallsById: updated })
|
||||||
@@ -496,12 +618,20 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const shouldInterrupt = next.ui?.showInterrupt === true
|
// Auto-allowed tools: send confirmation to the server so it can proceed
|
||||||
|
// without waiting for the user to click "Allow".
|
||||||
|
if (isAutoAllowed) {
|
||||||
|
sendAutoAcceptConfirmation(id)
|
||||||
|
}
|
||||||
|
|
||||||
// Client-run capability: execution is delegated to the browser.
|
// Client-executable run tools: execute on the client for real-time feedback
|
||||||
// We run immediately only when no interrupt is required.
|
// (block pulsing, console logs, stop button). The server defers execution
|
||||||
if (isClientRunCapability(next) && !shouldInterrupt) {
|
// for these tools in interactive mode; the client reports back via mark-complete.
|
||||||
executeRunToolOnClient(id, toolName, args || next.params || {})
|
if (
|
||||||
|
CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName) &&
|
||||||
|
initialState === ClientToolCallState.executing
|
||||||
|
) {
|
||||||
|
executeRunToolOnClient(id, toolName, args || existing?.params || {})
|
||||||
}
|
}
|
||||||
|
|
||||||
// OAuth: dispatch event to open the OAuth connect modal
|
// OAuth: dispatch event to open the OAuth connect modal
|
||||||
@@ -531,7 +661,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
return
|
return
|
||||||
},
|
},
|
||||||
'copilot.phase.progress': (data, context, _get, set) => {
|
reasoning: (data, context, _get, set) => {
|
||||||
const phase = (data && (data.phase || data?.data?.phase)) as string | undefined
|
const phase = (data && (data.phase || data?.data?.phase)) as string | undefined
|
||||||
if (phase === 'start') {
|
if (phase === 'start') {
|
||||||
beginThinkingBlock(context)
|
beginThinkingBlock(context)
|
||||||
@@ -548,7 +678,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
appendThinkingContent(context, chunk)
|
appendThinkingContent(context, chunk)
|
||||||
updateStreamingMessage(set, context)
|
updateStreamingMessage(set, context)
|
||||||
},
|
},
|
||||||
'copilot.content': (data, context, get, set) => {
|
content: (data, context, get, set) => {
|
||||||
if (!data.data) return
|
if (!data.data) return
|
||||||
context.pendingContent += data.data
|
context.pendingContent += data.data
|
||||||
|
|
||||||
@@ -763,7 +893,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
updateStreamingMessage(set, context)
|
updateStreamingMessage(set, context)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'copilot.phase.completed': (_data, context) => {
|
done: (_data, context) => {
|
||||||
logger.info('[SSE] DONE EVENT RECEIVED', {
|
logger.info('[SSE] DONE EVENT RECEIVED', {
|
||||||
doneEventCount: context.doneEventCount,
|
doneEventCount: context.doneEventCount,
|
||||||
data: _data,
|
data: _data,
|
||||||
@@ -774,7 +904,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.streamComplete = true
|
context.streamComplete = true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'copilot.error': (data, context, _get, set) => {
|
error: (data, context, _get, set) => {
|
||||||
logger.error('Stream error:', data.error)
|
logger.error('Stream error:', data.error)
|
||||||
set((state: CopilotStore) => ({
|
set((state: CopilotStore) => ({
|
||||||
messages: state.messages.map((msg) =>
|
messages: state.messages.map((msg) =>
|
||||||
@@ -789,7 +919,6 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}))
|
}))
|
||||||
context.streamComplete = true
|
context.streamComplete = true
|
||||||
},
|
},
|
||||||
'copilot.phase.started': () => {},
|
|
||||||
stream_end: (_data, context, _get, set) => {
|
stream_end: (_data, context, _get, set) => {
|
||||||
if (context.pendingContent) {
|
if (context.pendingContent) {
|
||||||
if (context.isInThinkingBlock && context.currentThinkingBlock) {
|
if (context.isInThinkingBlock && context.currentThinkingBlock) {
|
||||||
@@ -804,8 +933,3 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
},
|
},
|
||||||
default: () => {},
|
default: () => {},
|
||||||
}
|
}
|
||||||
|
|
||||||
sseHandlers['copilot.tool.interrupt_required'] = sseHandlers['copilot.tool.call']
|
|
||||||
sseHandlers['copilot.workflow.patch'] = sseHandlers['copilot.tool.result']
|
|
||||||
sseHandlers['copilot.workflow.verify'] = sseHandlers['copilot.tool.result']
|
|
||||||
sseHandlers['copilot.tool.interrupt_resolved'] = sseHandlers['copilot.tool.result']
|
|
||||||
|
|||||||
@@ -15,7 +15,10 @@ const logger = createLogger('CopilotRunToolExecution')
|
|||||||
* (block pulsing, logs, stop button, etc.).
|
* (block pulsing, logs, stop button, etc.).
|
||||||
*/
|
*/
|
||||||
export const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
export const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
||||||
'workflow_run',
|
'run_workflow',
|
||||||
|
'run_workflow_until_block',
|
||||||
|
'run_from_block',
|
||||||
|
'run_block',
|
||||||
])
|
])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -71,44 +74,21 @@ async function doExecuteRunTool(
|
|||||||
| Record<string, unknown>
|
| Record<string, unknown>
|
||||||
| undefined
|
| undefined
|
||||||
|
|
||||||
const runMode =
|
|
||||||
toolName === 'workflow_run' ? ((params.mode as string | undefined) || 'full').toLowerCase() : undefined
|
|
||||||
|
|
||||||
if (
|
|
||||||
toolName === 'workflow_run' &&
|
|
||||||
runMode !== 'full' &&
|
|
||||||
runMode !== 'until_block' &&
|
|
||||||
runMode !== 'from_block' &&
|
|
||||||
runMode !== 'block'
|
|
||||||
) {
|
|
||||||
const error = `Unsupported workflow_run mode: ${String(params.mode)}`
|
|
||||||
logger.warn('[RunTool] Execution prevented: unsupported workflow_run mode', {
|
|
||||||
toolCallId,
|
|
||||||
mode: params.mode,
|
|
||||||
})
|
|
||||||
setToolState(toolCallId, ClientToolCallState.error)
|
|
||||||
await reportCompletion(toolCallId, false, error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const stopAfterBlockId = (() => {
|
const stopAfterBlockId = (() => {
|
||||||
if (toolName === 'workflow_run' && runMode === 'until_block') {
|
if (toolName === 'run_workflow_until_block')
|
||||||
return params.stopAfterBlockId as string | undefined
|
return params.stopAfterBlockId as string | undefined
|
||||||
}
|
if (toolName === 'run_block') return params.blockId as string | undefined
|
||||||
if (toolName === 'workflow_run' && runMode === 'block') {
|
|
||||||
return params.blockId as string | undefined
|
|
||||||
}
|
|
||||||
return undefined
|
return undefined
|
||||||
})()
|
})()
|
||||||
|
|
||||||
const runFromBlock = (() => {
|
const runFromBlock = (() => {
|
||||||
if (toolName === 'workflow_run' && runMode === 'from_block' && params.startBlockId) {
|
if (toolName === 'run_from_block' && params.startBlockId) {
|
||||||
return {
|
return {
|
||||||
startBlockId: params.startBlockId as string,
|
startBlockId: params.startBlockId as string,
|
||||||
executionId: (params.executionId as string | undefined) || 'latest',
|
executionId: (params.executionId as string | undefined) || 'latest',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (toolName === 'workflow_run' && runMode === 'block' && params.blockId) {
|
if (toolName === 'run_block' && params.blockId) {
|
||||||
return {
|
return {
|
||||||
startBlockId: params.blockId as string,
|
startBlockId: params.blockId as string,
|
||||||
executionId: (params.executionId as string | undefined) || 'latest',
|
executionId: (params.executionId as string | undefined) || 'latest',
|
||||||
|
|||||||
@@ -1,172 +0,0 @@
|
|||||||
/**
|
|
||||||
* @vitest-environment node
|
|
||||||
*/
|
|
||||||
import { describe, expect, it, vi } from 'vitest'
|
|
||||||
import { applySseEvent } from '@/lib/copilot/client-sse/subagent-handlers'
|
|
||||||
import type { ClientStreamingContext } from '@/lib/copilot/client-sse/types'
|
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
|
||||||
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
|
||||||
|
|
||||||
type StoreSet = (
|
|
||||||
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
|
||||||
) => void
|
|
||||||
|
|
||||||
function createTestStore(initialToolCalls: Record<string, CopilotToolCall>) {
|
|
||||||
const state: Partial<CopilotStore> = {
|
|
||||||
messages: [{ id: 'assistant-msg', role: 'assistant', content: '', timestamp: new Date().toISOString() }],
|
|
||||||
toolCallsById: { ...initialToolCalls },
|
|
||||||
currentChat: null,
|
|
||||||
chats: [],
|
|
||||||
activeStream: null,
|
|
||||||
updatePlanTodoStatus: vi.fn(),
|
|
||||||
handleNewChatCreation: vi.fn().mockResolvedValue(undefined),
|
|
||||||
}
|
|
||||||
|
|
||||||
const get = () => state as CopilotStore
|
|
||||||
const set: StoreSet = (partial) => {
|
|
||||||
const patch = typeof partial === 'function' ? partial(get()) : partial
|
|
||||||
Object.assign(state, patch)
|
|
||||||
}
|
|
||||||
|
|
||||||
return { get, set }
|
|
||||||
}
|
|
||||||
|
|
||||||
function createStreamingContext(): ClientStreamingContext {
|
|
||||||
return {
|
|
||||||
messageId: 'assistant-msg',
|
|
||||||
accumulatedContent: '',
|
|
||||||
contentBlocks: [],
|
|
||||||
currentTextBlock: null,
|
|
||||||
isInThinkingBlock: false,
|
|
||||||
currentThinkingBlock: null,
|
|
||||||
isInDesignWorkflowBlock: false,
|
|
||||||
designWorkflowContent: '',
|
|
||||||
pendingContent: '',
|
|
||||||
doneEventCount: 0,
|
|
||||||
streamComplete: false,
|
|
||||||
subAgentContent: {},
|
|
||||||
subAgentToolCalls: {},
|
|
||||||
subAgentBlocks: {},
|
|
||||||
suppressStreamingUpdates: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('client SSE copilot.* stream smoke', () => {
|
|
||||||
it('processes main tool call/result events with copilot.* keys', async () => {
|
|
||||||
const { get, set } = createTestStore({})
|
|
||||||
const context = createStreamingContext()
|
|
||||||
|
|
||||||
await applySseEvent(
|
|
||||||
{
|
|
||||||
type: 'copilot.tool.call',
|
|
||||||
data: { id: 'main-tool-1', name: 'get_user_workflow', state: 'executing', arguments: {} },
|
|
||||||
} as any,
|
|
||||||
context,
|
|
||||||
get,
|
|
||||||
set
|
|
||||||
)
|
|
||||||
|
|
||||||
await applySseEvent(
|
|
||||||
{
|
|
||||||
type: 'copilot.tool.result',
|
|
||||||
toolCallId: 'main-tool-1',
|
|
||||||
success: true,
|
|
||||||
result: { ok: true },
|
|
||||||
data: {
|
|
||||||
id: 'main-tool-1',
|
|
||||||
name: 'get_user_workflow',
|
|
||||||
phase: 'completed',
|
|
||||||
state: 'success',
|
|
||||||
success: true,
|
|
||||||
result: { ok: true },
|
|
||||||
},
|
|
||||||
} as any,
|
|
||||||
context,
|
|
||||||
get,
|
|
||||||
set
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(get().toolCallsById['main-tool-1']).toBeDefined()
|
|
||||||
expect(get().toolCallsById['main-tool-1'].state).toBe(ClientToolCallState.success)
|
|
||||||
expect(
|
|
||||||
context.contentBlocks.some(
|
|
||||||
(block) => block.type === 'tool_call' && block.toolCall?.id === 'main-tool-1'
|
|
||||||
)
|
|
||||||
).toBe(true)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('processes subagent start/tool/result/end with copilot.* keys', async () => {
|
|
||||||
const parentToolCallId = 'parent-edit-tool'
|
|
||||||
const { get, set } = createTestStore({
|
|
||||||
[parentToolCallId]: {
|
|
||||||
id: parentToolCallId,
|
|
||||||
name: 'edit',
|
|
||||||
state: ClientToolCallState.executing,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
const context = createStreamingContext()
|
|
||||||
|
|
||||||
await applySseEvent(
|
|
||||||
{
|
|
||||||
type: 'copilot.subagent.started',
|
|
||||||
subagent: 'edit',
|
|
||||||
data: { tool_call_id: parentToolCallId },
|
|
||||||
} as any,
|
|
||||||
context,
|
|
||||||
get,
|
|
||||||
set
|
|
||||||
)
|
|
||||||
|
|
||||||
await applySseEvent(
|
|
||||||
{
|
|
||||||
type: 'copilot.tool.call',
|
|
||||||
subagent: 'edit',
|
|
||||||
data: {
|
|
||||||
id: 'sub-tool-1',
|
|
||||||
name: 'workflow_context_get',
|
|
||||||
state: 'executing',
|
|
||||||
arguments: { includeSchemas: false },
|
|
||||||
},
|
|
||||||
} as any,
|
|
||||||
context,
|
|
||||||
get,
|
|
||||||
set
|
|
||||||
)
|
|
||||||
|
|
||||||
await applySseEvent(
|
|
||||||
{
|
|
||||||
type: 'copilot.tool.result',
|
|
||||||
subagent: 'edit',
|
|
||||||
data: {
|
|
||||||
id: 'sub-tool-1',
|
|
||||||
name: 'workflow_context_get',
|
|
||||||
phase: 'completed',
|
|
||||||
state: 'success',
|
|
||||||
success: true,
|
|
||||||
result: { contextPackId: 'pack-1' },
|
|
||||||
},
|
|
||||||
} as any,
|
|
||||||
context,
|
|
||||||
get,
|
|
||||||
set
|
|
||||||
)
|
|
||||||
|
|
||||||
await applySseEvent(
|
|
||||||
{
|
|
||||||
type: 'copilot.subagent.completed',
|
|
||||||
subagent: 'edit',
|
|
||||||
data: {},
|
|
||||||
} as any,
|
|
||||||
context,
|
|
||||||
get,
|
|
||||||
set
|
|
||||||
)
|
|
||||||
|
|
||||||
const parentToolCall = get().toolCallsById[parentToolCallId]
|
|
||||||
expect(parentToolCall).toBeDefined()
|
|
||||||
expect(parentToolCall.subAgentStreaming).toBe(false)
|
|
||||||
expect(parentToolCall.subAgentToolCalls?.length).toBe(1)
|
|
||||||
expect(parentToolCall.subAgentToolCalls?.[0]?.id).toBe('sub-tool-1')
|
|
||||||
expect(parentToolCall.subAgentToolCalls?.[0]?.state).toBe(ClientToolCallState.success)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@@ -6,23 +6,16 @@ import {
|
|||||||
shouldSkipToolResultEvent,
|
shouldSkipToolResultEvent,
|
||||||
} from '@/lib/copilot/orchestrator/sse-utils'
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { resolveToolDisplay } from '@/lib/copilot/store-utils'
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
import {
|
import {
|
||||||
type SSEHandler,
|
type SSEHandler,
|
||||||
|
sendAutoAcceptConfirmation,
|
||||||
sseHandlers,
|
sseHandlers,
|
||||||
updateStreamingMessage,
|
updateStreamingMessage,
|
||||||
} from './handlers'
|
} from './handlers'
|
||||||
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
||||||
import {
|
|
||||||
extractOperationListFromResultPayload,
|
|
||||||
extractToolExecutionMetadata,
|
|
||||||
extractToolUiMetadata,
|
|
||||||
isWorkflowChangeApplyCall,
|
|
||||||
mapServerStateToClientState,
|
|
||||||
resolveDisplayFromServerUi,
|
|
||||||
} from './tool-call-helpers'
|
|
||||||
import { applyToolEffects } from './tool-effects'
|
|
||||||
import type { ClientStreamingContext } from './types'
|
import type { ClientStreamingContext } from './types'
|
||||||
|
|
||||||
const logger = createLogger('CopilotClientSubagentHandlers')
|
const logger = createLogger('CopilotClientSubagentHandlers')
|
||||||
@@ -31,13 +24,6 @@ type StoreSet = (
|
|||||||
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
) => void
|
) => void
|
||||||
|
|
||||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
|
||||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
|
||||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
|
||||||
}
|
|
||||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function appendSubAgentContent(
|
export function appendSubAgentContent(
|
||||||
context: ClientStreamingContext,
|
context: ClientStreamingContext,
|
||||||
parentToolCallId: string,
|
parentToolCallId: string,
|
||||||
@@ -124,11 +110,11 @@ export function updateToolCallWithSubAgentData(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||||
'copilot.phase.started': () => {
|
start: () => {
|
||||||
// No-op: parent subagent association is handled by copilot.subagent.started.
|
// Subagent start event - no action needed, parent is already tracked from subagent_start
|
||||||
},
|
},
|
||||||
|
|
||||||
'copilot.content': (data, context, get, set) => {
|
content: (data, context, get, set) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
const contentStr = typeof data.data === 'string' ? data.data : data.content || ''
|
const contentStr = typeof data.data === 'string' ? data.data : data.content || ''
|
||||||
logger.info('[SubAgent] content event', {
|
logger.info('[SubAgent] content event', {
|
||||||
@@ -149,7 +135,7 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
},
|
},
|
||||||
|
|
||||||
'copilot.phase.progress': (data, context, get, set) => {
|
reasoning: (data, context, get, set) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
const dataObj = asRecord(data?.data)
|
const dataObj = asRecord(data?.data)
|
||||||
const phase = data?.phase || (dataObj.phase as string | undefined)
|
const phase = data?.phase || (dataObj.phase as string | undefined)
|
||||||
@@ -165,7 +151,11 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
},
|
},
|
||||||
|
|
||||||
'copilot.tool.call': async (data, context, get, set) => {
|
tool_generating: () => {
|
||||||
|
// Tool generating event - no action needed, we'll handle the actual tool_call
|
||||||
|
},
|
||||||
|
|
||||||
|
tool_call: async (data, context, get, set) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId) return
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
@@ -174,8 +164,6 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
if (!id || !name) return
|
if (!id || !name) return
|
||||||
const isPartial = toolData.partial === true
|
const isPartial = toolData.partial === true
|
||||||
const uiMetadata = extractToolUiMetadata(toolData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(toolData)
|
|
||||||
|
|
||||||
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
||||||
| Record<string, unknown>
|
| Record<string, unknown>
|
||||||
@@ -211,10 +199,9 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
const existingToolCall =
|
const existingToolCall =
|
||||||
existingIndex >= 0 ? context.subAgentToolCalls[parentToolCallId][existingIndex] : undefined
|
existingIndex >= 0 ? context.subAgentToolCalls[parentToolCallId][existingIndex] : undefined
|
||||||
|
|
||||||
const serverState = toolData.state
|
// Auto-allowed tools skip pending state to avoid flashing interrupt buttons
|
||||||
let initialState = serverState
|
const isAutoAllowed = get().isToolAutoAllowed(name)
|
||||||
? mapServerStateToClientState(serverState)
|
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
||||||
: ClientToolCallState.pending
|
|
||||||
|
|
||||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||||
if (
|
if (
|
||||||
@@ -228,10 +215,8 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
id,
|
id,
|
||||||
name,
|
name,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
ui: uiMetadata,
|
|
||||||
execution: executionMetadata,
|
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveDisplayFromServerUi(name, initialState, id, args, uiMetadata),
|
display: resolveToolDisplay(name, initialState, id, args),
|
||||||
}
|
}
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
@@ -256,16 +241,21 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const shouldInterrupt = subAgentToolCall.ui?.showInterrupt === true
|
// Auto-allowed tools: send confirmation to the server so it can proceed
|
||||||
|
// without waiting for the user to click "Allow".
|
||||||
|
if (isAutoAllowed) {
|
||||||
|
sendAutoAcceptConfirmation(id)
|
||||||
|
}
|
||||||
|
|
||||||
// Client-run capability: execution is delegated to the browser.
|
// Client-executable run tools: if auto-allowed, execute immediately for
|
||||||
// Execute immediately only for non-interrupting calls.
|
// real-time feedback. For non-auto-allowed, the user must click "Allow"
|
||||||
if (isClientRunCapability(subAgentToolCall) && !shouldInterrupt) {
|
// first — handleRun in tool-call.tsx triggers executeRunToolOnClient.
|
||||||
|
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(name) && isAutoAllowed) {
|
||||||
executeRunToolOnClient(id, name, args || {})
|
executeRunToolOnClient(id, name, args || {})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
'copilot.tool.result': (data, context, get, set) => {
|
tool_result: (data, context, get, set) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId) return
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
@@ -285,51 +275,17 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
if (!context.subAgentToolCalls[parentToolCallId]) return
|
if (!context.subAgentToolCalls[parentToolCallId]) return
|
||||||
if (!context.subAgentBlocks[parentToolCallId]) return
|
if (!context.subAgentBlocks[parentToolCallId]) return
|
||||||
|
|
||||||
const serverState = resultData.state
|
const targetState = success ? ClientToolCallState.success : ClientToolCallState.error
|
||||||
const targetState = serverState
|
|
||||||
? mapServerStateToClientState(serverState)
|
|
||||||
: success
|
|
||||||
? ClientToolCallState.success
|
|
||||||
: ClientToolCallState.error
|
|
||||||
const uiMetadata = extractToolUiMetadata(resultData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(resultData)
|
|
||||||
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
||||||
(tc: CopilotToolCall) => tc.id === toolCallId
|
(tc: CopilotToolCall) => tc.id === toolCallId
|
||||||
)
|
)
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
||||||
let nextParams = existing.params
|
|
||||||
const resultPayload = asRecord(
|
|
||||||
data?.result || resultData.result || resultData.data || data?.data
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
targetState === ClientToolCallState.success &&
|
|
||||||
isWorkflowChangeApplyCall(existing.name, existing.params as Record<string, unknown>) &&
|
|
||||||
resultPayload
|
|
||||||
) {
|
|
||||||
const operations = extractOperationListFromResultPayload(resultPayload)
|
|
||||||
if (operations && operations.length > 0) {
|
|
||||||
nextParams = {
|
|
||||||
...(existing.params || {}),
|
|
||||||
operations,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const updatedSubAgentToolCall = {
|
const updatedSubAgentToolCall = {
|
||||||
...existing,
|
...existing,
|
||||||
params: nextParams,
|
|
||||||
ui: uiMetadata || existing.ui,
|
|
||||||
execution: executionMetadata || existing.execution,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveDisplayFromServerUi(
|
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params),
|
||||||
existing.name,
|
|
||||||
targetState,
|
|
||||||
toolCallId,
|
|
||||||
nextParams,
|
|
||||||
uiMetadata || existing.ui
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
||||||
|
|
||||||
@@ -353,18 +309,12 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
state: targetState,
|
state: targetState,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
applyToolEffects({
|
|
||||||
effectsRaw: resultData.effects,
|
|
||||||
toolCall: updatedSubAgentToolCall,
|
|
||||||
resultPayload,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
},
|
},
|
||||||
|
|
||||||
'copilot.phase.completed': (_data, context, get, set) => {
|
done: (_data, context, get, set) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId) return
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
@@ -372,11 +322,6 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
subAgentSSEHandlers['copilot.tool.interrupt_required'] = subAgentSSEHandlers['copilot.tool.call']
|
|
||||||
subAgentSSEHandlers['copilot.workflow.patch'] = subAgentSSEHandlers['copilot.tool.result']
|
|
||||||
subAgentSSEHandlers['copilot.workflow.verify'] = subAgentSSEHandlers['copilot.tool.result']
|
|
||||||
subAgentSSEHandlers['copilot.tool.interrupt_resolved'] = subAgentSSEHandlers['copilot.tool.result']
|
|
||||||
|
|
||||||
export async function applySseEvent(
|
export async function applySseEvent(
|
||||||
rawData: SSEEvent,
|
rawData: SSEEvent,
|
||||||
context: ClientStreamingContext,
|
context: ClientStreamingContext,
|
||||||
@@ -389,7 +334,7 @@ export async function applySseEvent(
|
|||||||
}
|
}
|
||||||
const data = normalizedEvent
|
const data = normalizedEvent
|
||||||
|
|
||||||
if (data.type === 'copilot.subagent.started') {
|
if (data.type === 'subagent_start') {
|
||||||
const startData = asRecord(data.data)
|
const startData = asRecord(data.data)
|
||||||
const toolCallId = startData.tool_call_id as string | undefined
|
const toolCallId = startData.tool_call_id as string | undefined
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
@@ -412,7 +357,7 @@ export async function applySseEvent(
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.type === 'copilot.subagent.completed') {
|
if (data.type === 'subagent_end') {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (parentToolCallId) {
|
if (parentToolCallId) {
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
|
|||||||
@@ -1,134 +0,0 @@
|
|||||||
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
|
||||||
import { humanizedFallback, resolveToolDisplay } from '@/lib/copilot/store-utils'
|
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
|
||||||
import type { CopilotToolCall } from '@/stores/panel/copilot/types'
|
|
||||||
|
|
||||||
export function mapServerStateToClientState(state: unknown): ClientToolCallState {
|
|
||||||
switch (String(state || '')) {
|
|
||||||
case 'generating':
|
|
||||||
return ClientToolCallState.generating
|
|
||||||
case 'pending':
|
|
||||||
case 'awaiting_approval':
|
|
||||||
return ClientToolCallState.pending
|
|
||||||
case 'executing':
|
|
||||||
return ClientToolCallState.executing
|
|
||||||
case 'success':
|
|
||||||
return ClientToolCallState.success
|
|
||||||
case 'rejected':
|
|
||||||
case 'skipped':
|
|
||||||
return ClientToolCallState.rejected
|
|
||||||
case 'aborted':
|
|
||||||
return ClientToolCallState.aborted
|
|
||||||
case 'error':
|
|
||||||
case 'failed':
|
|
||||||
return ClientToolCallState.error
|
|
||||||
default:
|
|
||||||
return ClientToolCallState.pending
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function extractToolUiMetadata(
|
|
||||||
data: Record<string, unknown>
|
|
||||||
): CopilotToolCall['ui'] | undefined {
|
|
||||||
const ui = asRecord(data.ui)
|
|
||||||
if (!ui || Object.keys(ui).length === 0) return undefined
|
|
||||||
const autoAllowedFromUi = ui.autoAllowed === true
|
|
||||||
const autoAllowedFromData = data.autoAllowed === true
|
|
||||||
return {
|
|
||||||
title: typeof ui.title === 'string' ? ui.title : undefined,
|
|
||||||
phaseLabel: typeof ui.phaseLabel === 'string' ? ui.phaseLabel : undefined,
|
|
||||||
icon: typeof ui.icon === 'string' ? ui.icon : undefined,
|
|
||||||
showInterrupt: ui.showInterrupt === true,
|
|
||||||
showRemember: ui.showRemember === true,
|
|
||||||
autoAllowed: autoAllowedFromUi || autoAllowedFromData,
|
|
||||||
actions: Array.isArray(ui.actions)
|
|
||||||
? ui.actions
|
|
||||||
.map((action) => {
|
|
||||||
const a = asRecord(action)
|
|
||||||
const id = typeof a.id === 'string' ? a.id : undefined
|
|
||||||
const label = typeof a.label === 'string' ? a.label : undefined
|
|
||||||
const kind: 'accept' | 'reject' = a.kind === 'reject' ? 'reject' : 'accept'
|
|
||||||
if (!id || !label) return null
|
|
||||||
return {
|
|
||||||
id,
|
|
||||||
label,
|
|
||||||
kind,
|
|
||||||
remember: a.remember === true,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter((a): a is NonNullable<typeof a> => !!a)
|
|
||||||
: undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function extractToolExecutionMetadata(
|
|
||||||
data: Record<string, unknown>
|
|
||||||
): CopilotToolCall['execution'] | undefined {
|
|
||||||
const execution = asRecord(data.execution)
|
|
||||||
if (!execution || Object.keys(execution).length === 0) return undefined
|
|
||||||
return {
|
|
||||||
target: typeof execution.target === 'string' ? execution.target : undefined,
|
|
||||||
capabilityId: typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function displayVerb(state: ClientToolCallState): string {
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return 'Completed'
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return 'Failed'
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return 'Skipped'
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return 'Aborted'
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
return 'Preparing'
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return 'Waiting'
|
|
||||||
default:
|
|
||||||
return 'Running'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function resolveDisplayFromServerUi(
|
|
||||||
toolName: string,
|
|
||||||
state: ClientToolCallState,
|
|
||||||
toolCallId: string,
|
|
||||||
params: Record<string, unknown> | undefined,
|
|
||||||
ui?: CopilotToolCall['ui']
|
|
||||||
) {
|
|
||||||
const fallback =
|
|
||||||
resolveToolDisplay(toolName, state, toolCallId, params) ||
|
|
||||||
humanizedFallback(toolName, state)
|
|
||||||
if (!fallback) return undefined
|
|
||||||
if (ui?.phaseLabel) {
|
|
||||||
return { text: ui.phaseLabel, icon: fallback.icon }
|
|
||||||
}
|
|
||||||
if (ui?.title) {
|
|
||||||
return { text: `${displayVerb(state)} ${ui.title}`, icon: fallback.icon }
|
|
||||||
}
|
|
||||||
return fallback
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isWorkflowChangeApplyCall(
|
|
||||||
toolName?: string,
|
|
||||||
params?: Record<string, unknown>
|
|
||||||
): boolean {
|
|
||||||
if (toolName !== 'workflow_change') return false
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
export function extractOperationListFromResultPayload(
|
|
||||||
resultPayload: Record<string, unknown>
|
|
||||||
): Array<Record<string, unknown>> | undefined {
|
|
||||||
const operations = resultPayload.operations
|
|
||||||
if (Array.isArray(operations)) return operations as Array<Record<string, unknown>>
|
|
||||||
|
|
||||||
const compiled = resultPayload.compiledOperations
|
|
||||||
if (Array.isArray(compiled)) return compiled as Array<Record<string, unknown>>
|
|
||||||
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
@@ -1,170 +0,0 @@
|
|||||||
/**
|
|
||||||
* @vitest-environment node
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { loggerMock } from '@sim/testing'
|
|
||||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
|
||||||
|
|
||||||
vi.mock('@sim/logger', () => loggerMock)
|
|
||||||
|
|
||||||
const mocked = vi.hoisted(() => ({
|
|
||||||
setProposedChanges: vi.fn().mockResolvedValue(undefined),
|
|
||||||
loadEnvironmentVariables: vi.fn(),
|
|
||||||
loadVariablesForWorkflow: vi.fn(),
|
|
||||||
getWorkflowDeploymentStatus: vi.fn().mockReturnValue(null),
|
|
||||||
setDeploymentStatus: vi.fn(),
|
|
||||||
registryState: {
|
|
||||||
activeWorkflowId: 'workflow-active',
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.mock('@/stores/workflow-diff/store', () => ({
|
|
||||||
useWorkflowDiffStore: {
|
|
||||||
getState: () => ({
|
|
||||||
setProposedChanges: mocked.setProposedChanges,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.mock('@/stores/settings/environment/store', () => ({
|
|
||||||
useEnvironmentStore: {
|
|
||||||
getState: () => ({
|
|
||||||
loadEnvironmentVariables: mocked.loadEnvironmentVariables,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.mock('@/stores/panel/variables/store', () => ({
|
|
||||||
useVariablesStore: {
|
|
||||||
getState: () => ({
|
|
||||||
loadForWorkflow: mocked.loadVariablesForWorkflow,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.mock('@/stores/workflows/registry/store', () => ({
|
|
||||||
useWorkflowRegistry: {
|
|
||||||
getState: () => ({
|
|
||||||
activeWorkflowId: mocked.registryState.activeWorkflowId,
|
|
||||||
getWorkflowDeploymentStatus: mocked.getWorkflowDeploymentStatus,
|
|
||||||
setDeploymentStatus: mocked.setDeploymentStatus,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
import { applyToolEffects } from '@/lib/copilot/client-sse/tool-effects'
|
|
||||||
|
|
||||||
describe('applyToolEffects', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
vi.clearAllMocks()
|
|
||||||
mocked.registryState.activeWorkflowId = 'workflow-active'
|
|
||||||
})
|
|
||||||
|
|
||||||
it('applies workflow_change fallback diff when effects are absent', () => {
|
|
||||||
const workflowState = {
|
|
||||||
blocks: {
|
|
||||||
start: { id: 'start', metadata: { id: 'start', type: 'start' }, inputs: {}, outputs: {} },
|
|
||||||
},
|
|
||||||
edges: [],
|
|
||||||
loops: {},
|
|
||||||
parallels: {},
|
|
||||||
}
|
|
||||||
|
|
||||||
applyToolEffects({
|
|
||||||
effectsRaw: [],
|
|
||||||
toolCall: {
|
|
||||||
id: 'tool-1',
|
|
||||||
name: 'workflow_change',
|
|
||||||
state: 'success',
|
|
||||||
params: { workflowId: 'workflow-123' },
|
|
||||||
} as any,
|
|
||||||
resultPayload: {
|
|
||||||
workflowState,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(mocked.setProposedChanges).toHaveBeenCalledTimes(1)
|
|
||||||
expect(mocked.setProposedChanges).toHaveBeenCalledWith(workflowState)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('applies workflow_change fallback diff from nested editResult.workflowState', () => {
|
|
||||||
const workflowState = {
|
|
||||||
blocks: {
|
|
||||||
start: { id: 'start', metadata: { id: 'start', type: 'start' }, inputs: {}, outputs: {} },
|
|
||||||
},
|
|
||||||
edges: [],
|
|
||||||
loops: {},
|
|
||||||
parallels: {},
|
|
||||||
}
|
|
||||||
|
|
||||||
applyToolEffects({
|
|
||||||
effectsRaw: [],
|
|
||||||
toolCall: {
|
|
||||||
id: 'tool-2',
|
|
||||||
name: 'workflow_change',
|
|
||||||
state: 'success',
|
|
||||||
} as any,
|
|
||||||
resultPayload: {
|
|
||||||
editResult: {
|
|
||||||
workflowState,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(mocked.setProposedChanges).toHaveBeenCalledTimes(1)
|
|
||||||
expect(mocked.setProposedChanges).toHaveBeenCalledWith(workflowState)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('applies explicit workflow.diff.proposed effect', () => {
|
|
||||||
const workflowState = {
|
|
||||||
blocks: {
|
|
||||||
start: { id: 'start', metadata: { id: 'start', type: 'start' }, inputs: {}, outputs: {} },
|
|
||||||
},
|
|
||||||
edges: [],
|
|
||||||
loops: {},
|
|
||||||
parallels: {},
|
|
||||||
}
|
|
||||||
|
|
||||||
applyToolEffects({
|
|
||||||
effectsRaw: [
|
|
||||||
{
|
|
||||||
kind: 'workflow.diff.proposed',
|
|
||||||
payload: {
|
|
||||||
workflowState,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
toolCall: {
|
|
||||||
id: 'tool-3',
|
|
||||||
name: 'workflow_change',
|
|
||||||
state: 'success',
|
|
||||||
} as any,
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(mocked.setProposedChanges).toHaveBeenCalledTimes(1)
|
|
||||||
expect(mocked.setProposedChanges).toHaveBeenCalledWith(workflowState)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('does not apply fallback diff for non-workflow_change tools', () => {
|
|
||||||
const workflowState = {
|
|
||||||
blocks: {},
|
|
||||||
edges: [],
|
|
||||||
loops: {},
|
|
||||||
parallels: {},
|
|
||||||
}
|
|
||||||
|
|
||||||
applyToolEffects({
|
|
||||||
effectsRaw: [],
|
|
||||||
toolCall: {
|
|
||||||
id: 'tool-4',
|
|
||||||
name: 'list_workflows',
|
|
||||||
state: 'success',
|
|
||||||
} as any,
|
|
||||||
resultPayload: {
|
|
||||||
workflowState,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(mocked.setProposedChanges).not.toHaveBeenCalled()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
@@ -1,180 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
|
||||||
import type { CopilotToolCall } from '@/stores/panel/copilot/types'
|
|
||||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
|
||||||
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
|
||||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotToolEffects')
|
|
||||||
|
|
||||||
type ParsedToolEffect = {
|
|
||||||
kind: string
|
|
||||||
payload: Record<string, unknown>
|
|
||||||
}
|
|
||||||
|
|
||||||
function asNonEmptyRecord(value: unknown): Record<string, unknown> | null {
|
|
||||||
const record = asRecord(value)
|
|
||||||
return Object.keys(record).length > 0 ? record : null
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseToolEffects(raw: unknown): ParsedToolEffect[] {
|
|
||||||
if (!Array.isArray(raw)) return []
|
|
||||||
const effects: ParsedToolEffect[] = []
|
|
||||||
for (const item of raw) {
|
|
||||||
const effect = asRecord(item)
|
|
||||||
const kind = typeof effect.kind === 'string' ? effect.kind : ''
|
|
||||||
if (!kind) continue
|
|
||||||
effects.push({
|
|
||||||
kind,
|
|
||||||
payload: asRecord(effect.payload) || {},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return effects
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolveWorkflowId(
|
|
||||||
payload: Record<string, unknown>,
|
|
||||||
toolCall?: CopilotToolCall
|
|
||||||
): string | undefined {
|
|
||||||
const payloadWorkflowId = typeof payload.workflowId === 'string' ? payload.workflowId : undefined
|
|
||||||
if (payloadWorkflowId) return payloadWorkflowId
|
|
||||||
|
|
||||||
const params = asRecord(toolCall?.params)
|
|
||||||
const paramWorkflowId = typeof params?.workflowId === 'string' ? params.workflowId : undefined
|
|
||||||
if (paramWorkflowId) return paramWorkflowId
|
|
||||||
|
|
||||||
return useWorkflowRegistry.getState().activeWorkflowId || undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolveWorkflowState(
|
|
||||||
payload: Record<string, unknown>,
|
|
||||||
resultPayload?: Record<string, unknown>
|
|
||||||
): WorkflowState | null {
|
|
||||||
const payloadState = asNonEmptyRecord(payload.workflowState)
|
|
||||||
if (payloadState) return payloadState as unknown as WorkflowState
|
|
||||||
|
|
||||||
if (resultPayload) {
|
|
||||||
const directState = asNonEmptyRecord(resultPayload.workflowState)
|
|
||||||
if (directState) return directState as unknown as WorkflowState
|
|
||||||
const editResult = asRecord(resultPayload.editResult)
|
|
||||||
const nestedState = asNonEmptyRecord(editResult?.workflowState)
|
|
||||||
if (nestedState) return nestedState as unknown as WorkflowState
|
|
||||||
}
|
|
||||||
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyDeploymentSyncEffect(payload: Record<string, unknown>, toolCall?: CopilotToolCall): void {
|
|
||||||
const workflowId = resolveWorkflowId(payload, toolCall)
|
|
||||||
if (!workflowId) return
|
|
||||||
|
|
||||||
const registry = useWorkflowRegistry.getState()
|
|
||||||
const existingStatus = registry.getWorkflowDeploymentStatus(workflowId)
|
|
||||||
|
|
||||||
const isDeployed =
|
|
||||||
typeof payload.isDeployed === 'boolean'
|
|
||||||
? payload.isDeployed
|
|
||||||
: (existingStatus?.isDeployed ?? true)
|
|
||||||
|
|
||||||
const deployedAt = (() => {
|
|
||||||
if (typeof payload.deployedAt === 'string' && payload.deployedAt) {
|
|
||||||
const parsed = new Date(payload.deployedAt)
|
|
||||||
if (!Number.isNaN(parsed.getTime())) return parsed
|
|
||||||
}
|
|
||||||
return existingStatus?.deployedAt
|
|
||||||
})()
|
|
||||||
|
|
||||||
const apiKey =
|
|
||||||
typeof payload.apiKey === 'string' && payload.apiKey.length > 0
|
|
||||||
? payload.apiKey
|
|
||||||
: existingStatus?.apiKey
|
|
||||||
|
|
||||||
registry.setDeploymentStatus(workflowId, isDeployed, deployedAt, apiKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyApiKeySyncEffect(payload: Record<string, unknown>, toolCall?: CopilotToolCall): void {
|
|
||||||
const workflowId = resolveWorkflowId(payload, toolCall)
|
|
||||||
if (!workflowId) return
|
|
||||||
|
|
||||||
const apiKey = typeof payload.apiKey === 'string' ? payload.apiKey : undefined
|
|
||||||
const registry = useWorkflowRegistry.getState()
|
|
||||||
const existingStatus = registry.getWorkflowDeploymentStatus(workflowId)
|
|
||||||
registry.setDeploymentStatus(
|
|
||||||
workflowId,
|
|
||||||
existingStatus?.isDeployed ?? false,
|
|
||||||
existingStatus?.deployedAt,
|
|
||||||
apiKey || existingStatus?.apiKey
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyWorkflowVariablesReload(
|
|
||||||
payload: Record<string, unknown>,
|
|
||||||
toolCall?: CopilotToolCall
|
|
||||||
): void {
|
|
||||||
const workflowId = resolveWorkflowId(payload, toolCall)
|
|
||||||
if (!workflowId) return
|
|
||||||
useVariablesStore.getState().loadForWorkflow(workflowId)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function applyToolEffects(params: {
|
|
||||||
effectsRaw: unknown
|
|
||||||
toolCall?: CopilotToolCall
|
|
||||||
resultPayload?: Record<string, unknown>
|
|
||||||
}): void {
|
|
||||||
const effects = parseToolEffects(params.effectsRaw)
|
|
||||||
if (effects.length === 0) {
|
|
||||||
if (params.toolCall?.name === 'workflow_change' && params.resultPayload) {
|
|
||||||
const workflowState = resolveWorkflowState({}, params.resultPayload)
|
|
||||||
if (!workflowState) return
|
|
||||||
useWorkflowDiffStore
|
|
||||||
.getState()
|
|
||||||
.setProposedChanges(workflowState)
|
|
||||||
.catch((error) => {
|
|
||||||
logger.error('Failed to apply fallback workflow diff from result payload', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const effect of effects) {
|
|
||||||
switch (effect.kind) {
|
|
||||||
case 'workflow.diff.proposed': {
|
|
||||||
const workflowState = resolveWorkflowState(effect.payload, params.resultPayload)
|
|
||||||
if (!workflowState) break
|
|
||||||
useWorkflowDiffStore
|
|
||||||
.getState()
|
|
||||||
.setProposedChanges(workflowState)
|
|
||||||
.catch((error) => {
|
|
||||||
logger.error('Failed to apply workflow diff effect', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'workflow.deployment.sync':
|
|
||||||
applyDeploymentSyncEffect(effect.payload, params.toolCall)
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'workflow.api_key.sync':
|
|
||||||
applyApiKeySyncEffect(effect.payload, params.toolCall)
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'environment.variables.reload':
|
|
||||||
useEnvironmentStore.getState().loadEnvironmentVariables()
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'workflow.variables.reload':
|
|
||||||
applyWorkflowVariablesReload(effect.payload, params.toolCall)
|
|
||||||
break
|
|
||||||
|
|
||||||
default:
|
|
||||||
logger.debug('Ignoring unknown tool effect', { kind: effect.kind })
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -101,6 +101,9 @@ export const COPILOT_CHECKPOINTS_API_PATH = '/api/copilot/checkpoints'
|
|||||||
/** POST — revert to a checkpoint. */
|
/** POST — revert to a checkpoint. */
|
||||||
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
||||||
|
|
||||||
|
/** GET/POST/DELETE — manage auto-allowed tools. */
|
||||||
|
export const COPILOT_AUTO_ALLOWED_TOOLS_API_PATH = '/api/copilot/auto-allowed-tools'
|
||||||
|
|
||||||
/** GET — fetch dynamically available copilot models. */
|
/** GET — fetch dynamically available copilot models. */
|
||||||
export const COPILOT_MODELS_API_PATH = '/api/copilot/models'
|
export const COPILOT_MODELS_API_PATH = '/api/copilot/models'
|
||||||
|
|
||||||
|
|||||||
67
apps/sim/lib/copilot/orchestrator/config.ts
Normal file
67
apps/sim/lib/copilot/orchestrator/config.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
export const INTERRUPT_TOOL_NAMES = [
|
||||||
|
'set_global_workflow_variables',
|
||||||
|
'run_workflow',
|
||||||
|
'run_workflow_until_block',
|
||||||
|
'run_from_block',
|
||||||
|
'run_block',
|
||||||
|
'manage_mcp_tool',
|
||||||
|
'manage_custom_tool',
|
||||||
|
'deploy_mcp',
|
||||||
|
'deploy_chat',
|
||||||
|
'deploy_api',
|
||||||
|
'create_workspace_mcp_server',
|
||||||
|
'set_environment_variables',
|
||||||
|
'make_api_request',
|
||||||
|
'oauth_request_access',
|
||||||
|
'navigate_ui',
|
||||||
|
'knowledge_base',
|
||||||
|
'generate_api_key',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const INTERRUPT_TOOL_SET = new Set<string>(INTERRUPT_TOOL_NAMES)
|
||||||
|
|
||||||
|
export const SUBAGENT_TOOL_NAMES = [
|
||||||
|
'debug',
|
||||||
|
'edit',
|
||||||
|
'build',
|
||||||
|
'plan',
|
||||||
|
'test',
|
||||||
|
'deploy',
|
||||||
|
'auth',
|
||||||
|
'research',
|
||||||
|
'knowledge',
|
||||||
|
'custom_tool',
|
||||||
|
'tour',
|
||||||
|
'info',
|
||||||
|
'workflow',
|
||||||
|
'evaluate',
|
||||||
|
'superagent',
|
||||||
|
'discovery',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const SUBAGENT_TOOL_SET = new Set<string>(SUBAGENT_TOOL_NAMES)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Respond tools are internal to the copilot's subagent system.
|
||||||
|
* They're used by subagents to signal completion and should NOT be executed by the sim side.
|
||||||
|
* The copilot backend handles these internally.
|
||||||
|
*/
|
||||||
|
export const RESPOND_TOOL_NAMES = [
|
||||||
|
'plan_respond',
|
||||||
|
'edit_respond',
|
||||||
|
'build_respond',
|
||||||
|
'debug_respond',
|
||||||
|
'info_respond',
|
||||||
|
'research_respond',
|
||||||
|
'deploy_respond',
|
||||||
|
'superagent_respond',
|
||||||
|
'discovery_respond',
|
||||||
|
'tour_respond',
|
||||||
|
'auth_respond',
|
||||||
|
'workflow_respond',
|
||||||
|
'knowledge_respond',
|
||||||
|
'custom_tool_respond',
|
||||||
|
'test_respond',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const RESPOND_TOOL_SET = new Set<string>(RESPOND_TOOL_NAMES)
|
||||||
@@ -54,14 +54,14 @@ describe('sse-handlers tool lifecycle', () => {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
it('executes copilot.tool.call and emits copilot.tool.result + mark-complete', async () => {
|
it('executes tool_call and emits tool_result + mark-complete', async () => {
|
||||||
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
||||||
markToolComplete.mockResolvedValueOnce(true)
|
markToolComplete.mockResolvedValueOnce(true)
|
||||||
const onEvent = vi.fn()
|
const onEvent = vi.fn()
|
||||||
|
|
||||||
await sseHandlers['copilot.tool.call'](
|
await sseHandlers.tool_call(
|
||||||
{
|
{
|
||||||
type: 'copilot.tool.call',
|
type: 'tool_call',
|
||||||
data: { id: 'tool-1', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
data: { id: 'tool-1', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||||
} as any,
|
} as any,
|
||||||
context,
|
context,
|
||||||
@@ -73,7 +73,7 @@ describe('sse-handlers tool lifecycle', () => {
|
|||||||
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
||||||
expect(onEvent).toHaveBeenCalledWith(
|
expect(onEvent).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
type: 'copilot.tool.result',
|
type: 'tool_result',
|
||||||
toolCallId: 'tool-1',
|
toolCallId: 'tool-1',
|
||||||
success: true,
|
success: true,
|
||||||
})
|
})
|
||||||
@@ -84,17 +84,17 @@ describe('sse-handlers tool lifecycle', () => {
|
|||||||
expect(updated?.result?.output).toEqual({ ok: true })
|
expect(updated?.result?.output).toEqual({ ok: true })
|
||||||
})
|
})
|
||||||
|
|
||||||
it('skips duplicate copilot.tool.call after result', async () => {
|
it('skips duplicate tool_call after result', async () => {
|
||||||
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
||||||
markToolComplete.mockResolvedValueOnce(true)
|
markToolComplete.mockResolvedValueOnce(true)
|
||||||
|
|
||||||
const event = {
|
const event = {
|
||||||
type: 'copilot.tool.call',
|
type: 'tool_call',
|
||||||
data: { id: 'tool-dup', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
data: { id: 'tool-dup', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||||
}
|
}
|
||||||
|
|
||||||
await sseHandlers['copilot.tool.call'](event as any, context, execContext, { interactive: false })
|
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })
|
||||||
await sseHandlers['copilot.tool.call'](event as any, context, execContext, { interactive: false })
|
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })
|
||||||
|
|
||||||
expect(executeToolServerSide).toHaveBeenCalledTimes(1)
|
expect(executeToolServerSide).toHaveBeenCalledTimes(1)
|
||||||
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
||||||
|
|||||||
@@ -1,12 +1,17 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
||||||
|
import { RESPOND_TOOL_SET, SUBAGENT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||||
import {
|
import {
|
||||||
asRecord,
|
asRecord,
|
||||||
getEventData,
|
getEventData,
|
||||||
markToolResultSeen,
|
markToolResultSeen,
|
||||||
wasToolResultSeen,
|
wasToolResultSeen,
|
||||||
} from '@/lib/copilot/orchestrator/sse-utils'
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import { markToolComplete } from '@/lib/copilot/orchestrator/tool-executor'
|
import {
|
||||||
|
isIntegrationTool,
|
||||||
|
isToolAvailableOnSimSide,
|
||||||
|
markToolComplete,
|
||||||
|
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
import type {
|
import type {
|
||||||
ContentBlock,
|
ContentBlock,
|
||||||
ExecutionContext,
|
ExecutionContext,
|
||||||
@@ -17,6 +22,7 @@ import type {
|
|||||||
} from '@/lib/copilot/orchestrator/types'
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
import {
|
import {
|
||||||
executeToolAndReport,
|
executeToolAndReport,
|
||||||
|
isInterruptToolName,
|
||||||
waitForToolCompletion,
|
waitForToolCompletion,
|
||||||
waitForToolDecision,
|
waitForToolDecision,
|
||||||
} from './tool-execution'
|
} from './tool-execution'
|
||||||
@@ -29,208 +35,12 @@ const logger = createLogger('CopilotSseHandlers')
|
|||||||
* execution to the browser client instead of running executeWorkflow directly.
|
* execution to the browser client instead of running executeWorkflow directly.
|
||||||
*/
|
*/
|
||||||
const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
||||||
'workflow_run',
|
'run_workflow',
|
||||||
|
'run_workflow_until_block',
|
||||||
|
'run_from_block',
|
||||||
|
'run_block',
|
||||||
])
|
])
|
||||||
|
|
||||||
function mapServerStateToToolStatus(state: unknown): ToolCallState['status'] {
|
|
||||||
switch (String(state || '')) {
|
|
||||||
case 'generating':
|
|
||||||
case 'pending':
|
|
||||||
case 'awaiting_approval':
|
|
||||||
return 'pending'
|
|
||||||
case 'executing':
|
|
||||||
return 'executing'
|
|
||||||
case 'success':
|
|
||||||
return 'success'
|
|
||||||
case 'rejected':
|
|
||||||
case 'skipped':
|
|
||||||
return 'rejected'
|
|
||||||
case 'aborted':
|
|
||||||
return 'skipped'
|
|
||||||
case 'error':
|
|
||||||
case 'failed':
|
|
||||||
return 'error'
|
|
||||||
default:
|
|
||||||
return 'pending'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getExecutionTarget(
|
|
||||||
toolData: Record<string, unknown>,
|
|
||||||
toolName: string
|
|
||||||
): { target: string; capabilityId?: string } {
|
|
||||||
const execution = asRecord(toolData.execution)
|
|
||||||
if (typeof execution.target === 'string' && execution.target.length > 0) {
|
|
||||||
return {
|
|
||||||
target: execution.target,
|
|
||||||
capabilityId:
|
|
||||||
typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback only when metadata is missing.
|
|
||||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
|
||||||
return { target: 'sim_client_capability', capabilityId: 'workflow.run' }
|
|
||||||
}
|
|
||||||
return { target: 'sim_server' }
|
|
||||||
}
|
|
||||||
|
|
||||||
function needsApproval(toolData: Record<string, unknown>): boolean {
|
|
||||||
const ui = asRecord(toolData.ui)
|
|
||||||
return ui.showInterrupt === true
|
|
||||||
}
|
|
||||||
|
|
||||||
async function waitForClientCapabilityAndReport(
|
|
||||||
toolCall: ToolCallState,
|
|
||||||
options: OrchestratorOptions,
|
|
||||||
logScope: string
|
|
||||||
): Promise<void> {
|
|
||||||
toolCall.status = 'executing'
|
|
||||||
const completion = await waitForToolCompletion(
|
|
||||||
toolCall.id,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
|
|
||||||
if (completion?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
202,
|
|
||||||
completion.message || 'Tool execution moved to background',
|
|
||||||
{ background: true }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error(`markToolComplete fire-and-forget failed (${logScope} background)`, {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (completion?.status === 'rejected') {
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, 400, completion.message || 'Tool execution rejected')
|
|
||||||
.catch((err) => {
|
|
||||||
logger.error(`markToolComplete fire-and-forget failed (${logScope} rejected)`, {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const success = completion?.status === 'success'
|
|
||||||
toolCall.status = success ? 'success' : 'error'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
|
||||||
logger.error(`markToolComplete fire-and-forget failed (${logScope})`, {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
toolName: toolCall.name,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
function markToolCallAndNotify(
|
|
||||||
toolCall: ToolCallState,
|
|
||||||
statusCode: number,
|
|
||||||
message: string,
|
|
||||||
data: Record<string, unknown> | undefined,
|
|
||||||
logScope: string
|
|
||||||
): void {
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, statusCode, message, data).catch((err) => {
|
|
||||||
logger.error(`markToolComplete fire-and-forget failed (${logScope})`, {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function executeToolCallWithPolicy(
|
|
||||||
toolCall: ToolCallState,
|
|
||||||
toolName: string,
|
|
||||||
toolData: Record<string, unknown>,
|
|
||||||
context: StreamingContext,
|
|
||||||
execContext: ExecutionContext,
|
|
||||||
options: OrchestratorOptions,
|
|
||||||
logScope: string
|
|
||||||
): Promise<void> {
|
|
||||||
const execution = getExecutionTarget(toolData, toolName)
|
|
||||||
const isInteractive = options.interactive === true
|
|
||||||
const requiresApproval = isInteractive && needsApproval(toolData)
|
|
||||||
|
|
||||||
if (toolData.state) {
|
|
||||||
toolCall.status = mapServerStateToToolStatus(toolData.state)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (requiresApproval) {
|
|
||||||
const decision = await waitForToolDecision(
|
|
||||||
toolCall.id,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
|
|
||||||
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
|
||||||
// Continue below into normal execution path.
|
|
||||||
} else if (decision?.status === 'rejected' || decision?.status === 'error') {
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolCallAndNotify(
|
|
||||||
toolCall,
|
|
||||||
400,
|
|
||||||
decision.message || 'Tool execution rejected',
|
|
||||||
{ skipped: true, reason: 'user_rejected' },
|
|
||||||
`${logScope} rejected`
|
|
||||||
)
|
|
||||||
return
|
|
||||||
} else if (decision?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolCallAndNotify(
|
|
||||||
toolCall,
|
|
||||||
202,
|
|
||||||
decision.message || 'Tool execution moved to background',
|
|
||||||
{ background: true },
|
|
||||||
`${logScope} background`
|
|
||||||
)
|
|
||||||
return
|
|
||||||
} else {
|
|
||||||
// Decision was null (timeout/abort).
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolCallAndNotify(
|
|
||||||
toolCall,
|
|
||||||
408,
|
|
||||||
'Tool approval timed out',
|
|
||||||
{ skipped: true, reason: 'timeout' },
|
|
||||||
`${logScope} timeout`
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (execution.target === 'sim_client_capability' && isInteractive) {
|
|
||||||
await waitForClientCapabilityAndReport(toolCall, options, logScope)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
(execution.target === 'sim_server' || execution.target === 'sim_client_capability') &&
|
|
||||||
options.autoExecuteTools !== false
|
|
||||||
) {
|
|
||||||
await executeToolAndReport(toolCall.id, context, execContext, options)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
||||||
|
|
||||||
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
||||||
@@ -266,7 +76,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.chatId = asRecord(event.data).chatId as string | undefined
|
context.chatId = asRecord(event.data).chatId as string | undefined
|
||||||
},
|
},
|
||||||
title_updated: () => {},
|
title_updated: () => {},
|
||||||
'copilot.tool.result': (event, context) => {
|
tool_result: (event, context) => {
|
||||||
const data = getEventData(event)
|
const data = getEventData(event)
|
||||||
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
@@ -275,11 +85,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
current.status = data?.state
|
current.status = success ? 'success' : 'error'
|
||||||
? mapServerStateToToolStatus(data.state)
|
|
||||||
: success
|
|
||||||
? 'success'
|
|
||||||
: 'error'
|
|
||||||
current.endTime = Date.now()
|
current.endTime = Date.now()
|
||||||
if (hasResultData) {
|
if (hasResultData) {
|
||||||
current.result = {
|
current.result = {
|
||||||
@@ -292,7 +98,35 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
current.error = (data?.error || resultObj.error) as string | undefined
|
current.error = (data?.error || resultObj.error) as string | undefined
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'copilot.tool.call': async (event, context, execContext, options) => {
|
tool_error: (event, context) => {
|
||||||
|
const data = getEventData(event)
|
||||||
|
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
||||||
|
if (!toolCallId) return
|
||||||
|
const current = context.toolCalls.get(toolCallId)
|
||||||
|
if (!current) return
|
||||||
|
current.status = 'error'
|
||||||
|
current.error = (data?.error as string | undefined) || 'Tool execution failed'
|
||||||
|
current.endTime = Date.now()
|
||||||
|
},
|
||||||
|
tool_generating: (event, context) => {
|
||||||
|
const data = getEventData(event)
|
||||||
|
const toolCallId =
|
||||||
|
event.toolCallId ||
|
||||||
|
(data?.toolCallId as string | undefined) ||
|
||||||
|
(data?.id as string | undefined)
|
||||||
|
const toolName =
|
||||||
|
event.toolName || (data?.toolName as string | undefined) || (data?.name as string | undefined)
|
||||||
|
if (!toolCallId || !toolName) return
|
||||||
|
if (!context.toolCalls.has(toolCallId)) {
|
||||||
|
context.toolCalls.set(toolCallId, {
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
status: 'pending',
|
||||||
|
startTime: Date.now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_call: async (event, context, execContext, options) => {
|
||||||
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
||||||
const toolCallId = (toolData.id as string | undefined) || event.toolCallId
|
const toolCallId = (toolData.id as string | undefined) || event.toolCallId
|
||||||
const toolName = (toolData.name as string | undefined) || event.toolName
|
const toolName = (toolData.name as string | undefined) || event.toolName
|
||||||
@@ -322,7 +156,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.toolCalls.set(toolCallId, {
|
context.toolCalls.set(toolCallId, {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
status: 'pending',
|
||||||
params: args,
|
params: args,
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
})
|
})
|
||||||
@@ -336,17 +170,149 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
const toolCall = context.toolCalls.get(toolCallId)
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
if (!toolCall) return
|
if (!toolCall) return
|
||||||
|
|
||||||
await executeToolCallWithPolicy(
|
// Subagent tools are executed by the copilot backend, not sim side.
|
||||||
toolCall,
|
if (SUBAGENT_TOOL_SET.has(toolName)) {
|
||||||
toolName,
|
return
|
||||||
toolData,
|
}
|
||||||
context,
|
|
||||||
execContext,
|
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||||
options,
|
// The copilot backend handles these internally to signal subagent completion.
|
||||||
'run tool'
|
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||||
)
|
toolCall.status = 'success'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
toolCall.result = {
|
||||||
|
success: true,
|
||||||
|
output: 'Internal respond tool - handled by copilot backend',
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const isInterruptTool = isInterruptToolName(toolName)
|
||||||
|
const isInteractive = options.interactive === true
|
||||||
|
// Integration tools (user-installed) also require approval in interactive mode
|
||||||
|
const needsApproval = isInterruptTool || isIntegrationTool(toolName)
|
||||||
|
|
||||||
|
if (needsApproval && isInteractive) {
|
||||||
|
const decision = await waitForToolDecision(
|
||||||
|
toolCallId,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||||
|
// Client-executable run tools: defer execution to the browser client.
|
||||||
|
// The client calls executeWorkflowWithFullLogging for real-time feedback
|
||||||
|
// (block pulsing, logs, stop button) and reports completion via
|
||||||
|
// /api/copilot/confirm with status success/error. We poll Redis for
|
||||||
|
// that completion signal, then fire-and-forget markToolComplete to Go.
|
||||||
|
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
||||||
|
toolCall.status = 'executing'
|
||||||
|
const completion = await waitForToolCompletion(
|
||||||
|
toolCallId,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
if (completion?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
completion.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (run tool background)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const success = completion?.status === 'success'
|
||||||
|
toolCall.status = success ? 'success' : 'error'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
const msg =
|
||||||
|
completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
||||||
|
// Fire-and-forget: tell Go backend the tool is done
|
||||||
|
// (must NOT await — see deadlock note in executeToolAndReport)
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (run tool)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
400,
|
||||||
|
decision.message || 'Tool execution rejected',
|
||||||
|
{ skipped: true, reason: 'user_rejected' }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (rejected)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decision?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
decision.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (background)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decision was null — timed out or aborted.
|
||||||
|
// Do NOT fall through to auto-execute. Mark the tool as timed out
|
||||||
|
// and notify Go so it can unblock waitForExternalTool.
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, 408, 'Tool approval timed out', {
|
||||||
|
skipped: true,
|
||||||
|
reason: 'timeout',
|
||||||
|
}).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (timeout)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.autoExecuteTools !== false) {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
}
|
||||||
},
|
},
|
||||||
'copilot.phase.progress': (event, context) => {
|
reasoning: (event, context) => {
|
||||||
const d = asRecord(event.data)
|
const d = asRecord(event.data)
|
||||||
const phase = d.phase || asRecord(d.data).phase
|
const phase = d.phase || asRecord(d.data).phase
|
||||||
if (phase === 'start') {
|
if (phase === 'start') {
|
||||||
@@ -370,7 +336,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
if (!chunk || !context.currentThinkingBlock) return
|
if (!chunk || !context.currentThinkingBlock) return
|
||||||
context.currentThinkingBlock.content = `${context.currentThinkingBlock.content || ''}${chunk}`
|
context.currentThinkingBlock.content = `${context.currentThinkingBlock.content || ''}${chunk}`
|
||||||
},
|
},
|
||||||
'copilot.content': (event, context) => {
|
content: (event, context) => {
|
||||||
// Go backend sends content as a plain string in event.data, not wrapped in an object.
|
// Go backend sends content as a plain string in event.data, not wrapped in an object.
|
||||||
let chunk: string | undefined
|
let chunk: string | undefined
|
||||||
if (typeof event.data === 'string') {
|
if (typeof event.data === 'string') {
|
||||||
@@ -383,20 +349,20 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.accumulatedContent += chunk
|
context.accumulatedContent += chunk
|
||||||
addContentBlock(context, { type: 'text', content: chunk })
|
addContentBlock(context, { type: 'text', content: chunk })
|
||||||
},
|
},
|
||||||
'copilot.phase.completed': (event, context) => {
|
done: (event, context) => {
|
||||||
const d = asRecord(event.data)
|
const d = asRecord(event.data)
|
||||||
if (d.responseId) {
|
if (d.responseId) {
|
||||||
context.conversationId = d.responseId as string
|
context.conversationId = d.responseId as string
|
||||||
}
|
}
|
||||||
context.streamComplete = true
|
context.streamComplete = true
|
||||||
},
|
},
|
||||||
'copilot.phase.started': (event, context) => {
|
start: (event, context) => {
|
||||||
const d = asRecord(event.data)
|
const d = asRecord(event.data)
|
||||||
if (d.responseId) {
|
if (d.responseId) {
|
||||||
context.conversationId = d.responseId as string
|
context.conversationId = d.responseId as string
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'copilot.error': (event, context) => {
|
error: (event, context) => {
|
||||||
const d = asRecord(event.data)
|
const d = asRecord(event.data)
|
||||||
const message = (d.message || d.error || event.error) as string | undefined
|
const message = (d.message || d.error || event.error) as string | undefined
|
||||||
if (message) {
|
if (message) {
|
||||||
@@ -407,7 +373,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const subAgentHandlers: Record<string, SSEHandler> = {
|
export const subAgentHandlers: Record<string, SSEHandler> = {
|
||||||
'copilot.content': (event, context) => {
|
content: (event, context) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId || !event.data) return
|
if (!parentToolCallId || !event.data) return
|
||||||
// Go backend sends content as a plain string in event.data
|
// Go backend sends content as a plain string in event.data
|
||||||
@@ -423,7 +389,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
(context.subAgentContent[parentToolCallId] || '') + chunk
|
(context.subAgentContent[parentToolCallId] || '') + chunk
|
||||||
addContentBlock(context, { type: 'subagent_text', content: chunk })
|
addContentBlock(context, { type: 'subagent_text', content: chunk })
|
||||||
},
|
},
|
||||||
'copilot.tool.call': async (event, context, execContext, options) => {
|
tool_call: async (event, context, execContext, options) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId) return
|
if (!parentToolCallId) return
|
||||||
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
||||||
@@ -444,7 +410,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
const toolCall: ToolCallState = {
|
const toolCall: ToolCallState = {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
status: 'pending',
|
||||||
params: args,
|
params: args,
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
}
|
}
|
||||||
@@ -462,17 +428,159 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
if (isPartial) return
|
if (isPartial) return
|
||||||
|
|
||||||
await executeToolCallWithPolicy(
|
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||||
toolCall,
|
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||||
toolName,
|
toolCall.status = 'success'
|
||||||
toolData,
|
toolCall.endTime = Date.now()
|
||||||
context,
|
toolCall.result = {
|
||||||
execContext,
|
success: true,
|
||||||
options,
|
output: 'Internal respond tool - handled by copilot backend',
|
||||||
'subagent run tool'
|
}
|
||||||
)
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tools that only exist on the Go backend (e.g. search_patterns,
|
||||||
|
// search_errors, remember_debug) should NOT be re-executed on the Sim side.
|
||||||
|
// The Go backend already executed them and will send its own tool_result
|
||||||
|
// SSE event with the real outcome. Trying to execute them here would fail
|
||||||
|
// with "Tool not found" and incorrectly mark the tool as failed.
|
||||||
|
if (!isToolAvailableOnSimSide(toolName)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interrupt tools and integration tools (user-installed) require approval
|
||||||
|
// in interactive mode, same as top-level handler.
|
||||||
|
const needsSubagentApproval = isInterruptToolName(toolName) || isIntegrationTool(toolName)
|
||||||
|
if (options.interactive === true && needsSubagentApproval) {
|
||||||
|
const decision = await waitForToolDecision(
|
||||||
|
toolCallId,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
400,
|
||||||
|
decision.message || 'Tool execution rejected',
|
||||||
|
{ skipped: true, reason: 'user_rejected' }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent rejected)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (decision?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
decision.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent background)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decision was null — timed out or aborted.
|
||||||
|
// Do NOT fall through to auto-execute.
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, 408, 'Tool approval timed out', {
|
||||||
|
skipped: true,
|
||||||
|
reason: 'timeout',
|
||||||
|
}).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent timeout)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Client-executable run tools in interactive mode: defer to client.
|
||||||
|
// Same pattern as main handler: wait for client completion, then tell Go.
|
||||||
|
if (options.interactive === true && CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
||||||
|
toolCall.status = 'executing'
|
||||||
|
const completion = await waitForToolCompletion(
|
||||||
|
toolCallId,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
if (completion?.status === 'rejected') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
400,
|
||||||
|
completion.message || 'Tool execution rejected'
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent run tool rejected)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (completion?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
completion.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent run tool background)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const success = completion?.status === 'success'
|
||||||
|
toolCall.status = success ? 'success' : 'error'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent run tool)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.autoExecuteTools !== false) {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
}
|
||||||
},
|
},
|
||||||
'copilot.tool.result': (event, context) => {
|
tool_result: (event, context) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId) return
|
if (!parentToolCallId) return
|
||||||
const data = getEventData(event)
|
const data = getEventData(event)
|
||||||
@@ -488,7 +596,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
const status = data?.state ? mapServerStateToToolStatus(data.state) : success ? 'success' : 'error'
|
const status = success ? 'success' : 'error'
|
||||||
const endTime = Date.now()
|
const endTime = Date.now()
|
||||||
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
||||||
|
|
||||||
@@ -512,22 +620,8 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'copilot.phase.progress': () => {
|
|
||||||
// Subagent reasoning chunks are surfaced via copilot.content.
|
|
||||||
},
|
|
||||||
'copilot.phase.completed': () => {},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sseHandlers['copilot.tool.interrupt_required'] = sseHandlers['copilot.tool.call']
|
|
||||||
sseHandlers['copilot.workflow.patch'] = sseHandlers['copilot.tool.result']
|
|
||||||
sseHandlers['copilot.workflow.verify'] = sseHandlers['copilot.tool.result']
|
|
||||||
sseHandlers['copilot.tool.interrupt_resolved'] = sseHandlers['copilot.tool.result']
|
|
||||||
|
|
||||||
subAgentHandlers['copilot.tool.interrupt_required'] = subAgentHandlers['copilot.tool.call']
|
|
||||||
subAgentHandlers['copilot.workflow.patch'] = subAgentHandlers['copilot.tool.result']
|
|
||||||
subAgentHandlers['copilot.workflow.verify'] = subAgentHandlers['copilot.tool.result']
|
|
||||||
subAgentHandlers['copilot.tool.interrupt_resolved'] = subAgentHandlers['copilot.tool.result']
|
|
||||||
|
|
||||||
export function handleSubagentRouting(event: SSEEvent, context: StreamingContext): boolean {
|
export function handleSubagentRouting(event: SSEEvent, context: StreamingContext): boolean {
|
||||||
if (!event.subagent) return false
|
if (!event.subagent) return false
|
||||||
if (!context.subAgentParentToolCallId) {
|
if (!context.subAgentParentToolCallId) {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import {
|
|||||||
TOOL_DECISION_MAX_POLL_MS,
|
TOOL_DECISION_MAX_POLL_MS,
|
||||||
TOOL_DECISION_POLL_BACKOFF,
|
TOOL_DECISION_POLL_BACKOFF,
|
||||||
} from '@/lib/copilot/constants'
|
} from '@/lib/copilot/constants'
|
||||||
|
import { INTERRUPT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||||
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
||||||
import {
|
import {
|
||||||
asRecord,
|
asRecord,
|
||||||
@@ -20,6 +21,10 @@ import type {
|
|||||||
|
|
||||||
const logger = createLogger('CopilotSseToolExecution')
|
const logger = createLogger('CopilotSseToolExecution')
|
||||||
|
|
||||||
|
export function isInterruptToolName(toolName: string): boolean {
|
||||||
|
return INTERRUPT_TOOL_SET.has(toolName)
|
||||||
|
}
|
||||||
|
|
||||||
export async function executeToolAndReport(
|
export async function executeToolAndReport(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
context: StreamingContext,
|
context: StreamingContext,
|
||||||
@@ -29,11 +34,9 @@ export async function executeToolAndReport(
|
|||||||
const toolCall = context.toolCalls.get(toolCallId)
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
if (!toolCall) return
|
if (!toolCall) return
|
||||||
|
|
||||||
const lockable = toolCall as typeof toolCall & { __simExecuting?: boolean }
|
if (toolCall.status === 'executing') return
|
||||||
if (lockable.__simExecuting) return
|
|
||||||
if (wasToolResultSeen(toolCall.id)) return
|
if (wasToolResultSeen(toolCall.id)) return
|
||||||
|
|
||||||
lockable.__simExecuting = true
|
|
||||||
toolCall.status = 'executing'
|
toolCall.status = 'executing'
|
||||||
try {
|
try {
|
||||||
const result = await executeToolServerSide(toolCall, execContext)
|
const result = await executeToolServerSide(toolCall, execContext)
|
||||||
@@ -80,7 +83,7 @@ export async function executeToolAndReport(
|
|||||||
})
|
})
|
||||||
|
|
||||||
const resultEvent: SSEEvent = {
|
const resultEvent: SSEEvent = {
|
||||||
type: 'copilot.tool.result',
|
type: 'tool_result',
|
||||||
toolCallId: toolCall.id,
|
toolCallId: toolCall.id,
|
||||||
toolName: toolCall.name,
|
toolName: toolCall.name,
|
||||||
success: result.success,
|
success: result.success,
|
||||||
@@ -88,8 +91,6 @@ export async function executeToolAndReport(
|
|||||||
data: {
|
data: {
|
||||||
id: toolCall.id,
|
id: toolCall.id,
|
||||||
name: toolCall.name,
|
name: toolCall.name,
|
||||||
phase: 'completed',
|
|
||||||
state: result.success ? 'success' : 'error',
|
|
||||||
success: result.success,
|
success: result.success,
|
||||||
result: result.output,
|
result: result.output,
|
||||||
},
|
},
|
||||||
@@ -112,22 +113,15 @@ export async function executeToolAndReport(
|
|||||||
})
|
})
|
||||||
|
|
||||||
const errorEvent: SSEEvent = {
|
const errorEvent: SSEEvent = {
|
||||||
type: 'copilot.tool.result',
|
type: 'tool_error',
|
||||||
toolCallId: toolCall.id,
|
toolCallId: toolCall.id,
|
||||||
toolName: toolCall.name,
|
|
||||||
success: false,
|
|
||||||
data: {
|
data: {
|
||||||
id: toolCall.id,
|
id: toolCall.id,
|
||||||
name: toolCall.name,
|
name: toolCall.name,
|
||||||
phase: 'completed',
|
|
||||||
state: 'error',
|
|
||||||
success: false,
|
|
||||||
error: toolCall.error,
|
error: toolCall.error,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
await options?.onEvent?.(errorEvent)
|
await options?.onEvent?.(errorEvent)
|
||||||
} finally {
|
|
||||||
delete lockable.__simExecuting
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,10 +11,10 @@ import {
|
|||||||
describe('sse-utils', () => {
|
describe('sse-utils', () => {
|
||||||
it.concurrent('normalizes tool fields from string data', () => {
|
it.concurrent('normalizes tool fields from string data', () => {
|
||||||
const event = {
|
const event = {
|
||||||
type: 'copilot.tool.result',
|
type: 'tool_result',
|
||||||
data: JSON.stringify({
|
data: JSON.stringify({
|
||||||
id: 'tool_1',
|
id: 'tool_1',
|
||||||
name: 'workflow_change',
|
name: 'edit_workflow',
|
||||||
success: true,
|
success: true,
|
||||||
result: { ok: true },
|
result: { ok: true },
|
||||||
}),
|
}),
|
||||||
@@ -22,62 +22,21 @@ describe('sse-utils', () => {
|
|||||||
|
|
||||||
const normalized = normalizeSseEvent(event as any)
|
const normalized = normalizeSseEvent(event as any)
|
||||||
|
|
||||||
expect(normalized.type).toBe('copilot.tool.result')
|
|
||||||
expect(normalized.toolCallId).toBe('tool_1')
|
expect(normalized.toolCallId).toBe('tool_1')
|
||||||
expect(normalized.toolName).toBe('workflow_change')
|
expect(normalized.toolName).toBe('edit_workflow')
|
||||||
expect(normalized.success).toBe(true)
|
expect(normalized.success).toBe(true)
|
||||||
expect(normalized.result).toEqual({ ok: true })
|
expect(normalized.result).toEqual({ ok: true })
|
||||||
})
|
})
|
||||||
|
|
||||||
it.concurrent('maps copilot tool event aliases and preserves tool metadata', () => {
|
it.concurrent('dedupes tool_call events', () => {
|
||||||
const event = {
|
const event = { type: 'tool_call', data: { id: 'tool_call_1', name: 'plan' } }
|
||||||
type: 'copilot.tool.interrupt_required',
|
|
||||||
data: {
|
|
||||||
id: 'tool_legacy_1',
|
|
||||||
name: 'workflow_run',
|
|
||||||
state: 'pending',
|
|
||||||
ui: { showInterrupt: true },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const normalized = normalizeSseEvent(event as any)
|
|
||||||
|
|
||||||
expect(normalized.type).toBe('copilot.tool.interrupt_required')
|
|
||||||
expect(normalized.toolCallId).toBe('tool_legacy_1')
|
|
||||||
expect(normalized.toolName).toBe('workflow_run')
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('keeps copilot content event type when payload is plain string', () => {
|
|
||||||
const event = {
|
|
||||||
type: 'copilot.content',
|
|
||||||
data: 'hello world',
|
|
||||||
}
|
|
||||||
|
|
||||||
const normalized = normalizeSseEvent(event as any)
|
|
||||||
|
|
||||||
expect(normalized.type).toBe('copilot.content')
|
|
||||||
expect(normalized.data).toBe('hello world')
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('dedupes copilot tool call events', () => {
|
|
||||||
const event = { type: 'copilot.tool.call', data: { id: 'tool_call_1', name: 'plan' } }
|
|
||||||
expect(shouldSkipToolCallEvent(event as any)).toBe(false)
|
expect(shouldSkipToolCallEvent(event as any)).toBe(false)
|
||||||
expect(shouldSkipToolCallEvent(event as any)).toBe(true)
|
expect(shouldSkipToolCallEvent(event as any)).toBe(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it.concurrent('dedupes copilot tool result events', () => {
|
it.concurrent('dedupes tool_result events', () => {
|
||||||
const event = { type: 'copilot.tool.result', data: { id: 'tool_result_1', name: 'plan' } }
|
const event = { type: 'tool_result', data: { id: 'tool_result_1', name: 'plan' } }
|
||||||
expect(shouldSkipToolResultEvent(event as any)).toBe(false)
|
expect(shouldSkipToolResultEvent(event as any)).toBe(false)
|
||||||
expect(shouldSkipToolResultEvent(event as any)).toBe(true)
|
expect(shouldSkipToolResultEvent(event as any)).toBe(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it.concurrent('dedupes copilot workflow patch result events', () => {
|
|
||||||
const normalized = normalizeSseEvent({
|
|
||||||
type: 'copilot.workflow.patch',
|
|
||||||
data: { id: 'tool_result_aliased_1', name: 'workflow_change' },
|
|
||||||
} as any)
|
|
||||||
|
|
||||||
expect(shouldSkipToolResultEvent(normalized as any)).toBe(false)
|
|
||||||
expect(shouldSkipToolResultEvent(normalized as any)).toBe(true)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -101,21 +101,8 @@ export function wasToolResultSeen(toolCallId: string): boolean {
|
|||||||
return seenToolResults.has(toolCallId)
|
return seenToolResults.has(toolCallId)
|
||||||
}
|
}
|
||||||
|
|
||||||
function isToolCallEventType(type: string): boolean {
|
|
||||||
return type === 'copilot.tool.call' || type === 'copilot.tool.interrupt_required'
|
|
||||||
}
|
|
||||||
|
|
||||||
function isToolResultEventType(type: string): boolean {
|
|
||||||
return (
|
|
||||||
type === 'copilot.tool.result' ||
|
|
||||||
type === 'copilot.workflow.patch' ||
|
|
||||||
type === 'copilot.workflow.verify' ||
|
|
||||||
type === 'copilot.tool.interrupt_resolved'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function shouldSkipToolCallEvent(event: SSEEvent): boolean {
|
export function shouldSkipToolCallEvent(event: SSEEvent): boolean {
|
||||||
if (!isToolCallEventType(String(event.type || ''))) return false
|
if (event.type !== 'tool_call') return false
|
||||||
const toolCallId = getToolCallIdFromEvent(event)
|
const toolCallId = getToolCallIdFromEvent(event)
|
||||||
if (!toolCallId) return false
|
if (!toolCallId) return false
|
||||||
const eventData = getEventData(event)
|
const eventData = getEventData(event)
|
||||||
@@ -128,7 +115,7 @@ export function shouldSkipToolCallEvent(event: SSEEvent): boolean {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function shouldSkipToolResultEvent(event: SSEEvent): boolean {
|
export function shouldSkipToolResultEvent(event: SSEEvent): boolean {
|
||||||
if (!isToolResultEventType(String(event.type || ''))) return false
|
if (event.type !== 'tool_result') return false
|
||||||
const toolCallId = getToolCallIdFromEvent(event)
|
const toolCallId = getToolCallIdFromEvent(event)
|
||||||
if (!toolCallId) return false
|
if (!toolCallId) return false
|
||||||
if (wasToolResultSeen(toolCallId)) return true
|
if (wasToolResultSeen(toolCallId)) return true
|
||||||
|
|||||||
@@ -97,8 +97,8 @@ describe('stream-buffer', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it.concurrent('replays events after a given event id', async () => {
|
it.concurrent('replays events after a given event id', async () => {
|
||||||
await appendStreamEvent('stream-1', { type: 'copilot.content', data: 'hello' })
|
await appendStreamEvent('stream-1', { type: 'content', data: 'hello' })
|
||||||
await appendStreamEvent('stream-1', { type: 'copilot.content', data: 'world' })
|
await appendStreamEvent('stream-1', { type: 'content', data: 'world' })
|
||||||
|
|
||||||
const allEvents = await readStreamEvents('stream-1', 0)
|
const allEvents = await readStreamEvents('stream-1', 0)
|
||||||
expect(allEvents.map((entry) => entry.event.data)).toEqual(['hello', 'world'])
|
expect(allEvents.map((entry) => entry.event.data)).toEqual(['hello', 'world'])
|
||||||
@@ -109,8 +109,8 @@ describe('stream-buffer', () => {
|
|||||||
|
|
||||||
it.concurrent('flushes buffered events for resume', async () => {
|
it.concurrent('flushes buffered events for resume', async () => {
|
||||||
const writer = createStreamEventWriter('stream-2')
|
const writer = createStreamEventWriter('stream-2')
|
||||||
await writer.write({ type: 'copilot.content', data: 'a' })
|
await writer.write({ type: 'content', data: 'a' })
|
||||||
await writer.write({ type: 'copilot.content', data: 'b' })
|
await writer.write({ type: 'content', data: 'b' })
|
||||||
await writer.flush()
|
await writer.flush()
|
||||||
|
|
||||||
const events = await readStreamEvents('stream-2', 0)
|
const events = await readStreamEvents('stream-2', 0)
|
||||||
|
|||||||
@@ -127,7 +127,7 @@ export async function runStreamLoop(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Standard subagent start/end handling.
|
// Standard subagent start/end handling.
|
||||||
if (normalizedEvent.type === 'copilot.subagent.started') {
|
if (normalizedEvent.type === 'subagent_start') {
|
||||||
const eventData = normalizedEvent.data as Record<string, unknown> | undefined
|
const eventData = normalizedEvent.data as Record<string, unknown> | undefined
|
||||||
const toolCallId = eventData?.tool_call_id as string | undefined
|
const toolCallId = eventData?.tool_call_id as string | undefined
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
@@ -138,7 +138,7 @@ export async function runStreamLoop(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if (normalizedEvent.type === 'copilot.subagent.completed') {
|
if (normalizedEvent.type === 'subagent_end') {
|
||||||
context.subAgentParentToolCallId = undefined
|
context.subAgentParentToolCallId = undefined
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ export async function orchestrateSubagentStream(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// For direct subagent calls, events may have the subagent field set
|
// For direct subagent calls, events may have the subagent field set
|
||||||
// but no copilot.subagent.started because this IS the top-level agent.
|
// but no subagent_start because this IS the top-level agent.
|
||||||
// Skip subagent routing for events where the subagent field matches
|
// Skip subagent routing for events where the subagent field matches
|
||||||
// the current agentId - these are top-level events.
|
// the current agentId - these are top-level events.
|
||||||
if (event.subagent === agentId && !ctx.subAgentParentToolCallId) {
|
if (event.subagent === agentId && !ctx.subAgentParentToolCallId) {
|
||||||
|
|||||||
@@ -220,8 +220,7 @@ export async function executeDeployMcp(
|
|||||||
if (!workflowRecord.isDeployed) {
|
if (!workflowRecord.isDeployed) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error:
|
error: 'Workflow must be deployed before adding as an MCP tool. Use deploy_api first.',
|
||||||
'Workflow must be deployed before adding as an MCP tool. Use workflow_deploy(mode: "api") first.',
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -50,8 +50,6 @@ import type {
|
|||||||
RunWorkflowParams,
|
RunWorkflowParams,
|
||||||
RunWorkflowUntilBlockParams,
|
RunWorkflowUntilBlockParams,
|
||||||
SetGlobalWorkflowVariablesParams,
|
SetGlobalWorkflowVariablesParams,
|
||||||
WorkflowDeployParams,
|
|
||||||
WorkflowRunParams,
|
|
||||||
} from './param-types'
|
} from './param-types'
|
||||||
import { PLATFORM_ACTIONS_CONTENT } from './platform-actions'
|
import { PLATFORM_ACTIONS_CONTENT } from './platform-actions'
|
||||||
import {
|
import {
|
||||||
@@ -320,91 +318,13 @@ async function executeManageCustomTool(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function executeWorkflowRunUnified(
|
|
||||||
rawParams: Record<string, unknown>,
|
|
||||||
context: ExecutionContext
|
|
||||||
): Promise<ToolCallResult> {
|
|
||||||
const params = rawParams as WorkflowRunParams
|
|
||||||
const mode = params.mode || 'full'
|
|
||||||
|
|
||||||
switch (mode) {
|
|
||||||
case 'full':
|
|
||||||
return executeRunWorkflow(params as RunWorkflowParams, context)
|
|
||||||
case 'until_block':
|
|
||||||
if (!params.stopAfterBlockId) {
|
|
||||||
return { success: false, error: 'stopAfterBlockId is required for mode=until_block' }
|
|
||||||
}
|
|
||||||
return executeRunWorkflowUntilBlock(params as RunWorkflowUntilBlockParams, context)
|
|
||||||
case 'from_block':
|
|
||||||
if (!params.startBlockId) {
|
|
||||||
return { success: false, error: 'startBlockId is required for mode=from_block' }
|
|
||||||
}
|
|
||||||
return executeRunFromBlock(params as RunFromBlockParams, context)
|
|
||||||
case 'block':
|
|
||||||
if (!params.blockId) {
|
|
||||||
return { success: false, error: 'blockId is required for mode=block' }
|
|
||||||
}
|
|
||||||
return executeRunBlock(params as RunBlockParams, context)
|
|
||||||
default:
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: `Unsupported workflow_run mode: ${String(mode)}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function executeWorkflowDeployUnified(
|
|
||||||
rawParams: Record<string, unknown>,
|
|
||||||
context: ExecutionContext
|
|
||||||
): Promise<ToolCallResult> {
|
|
||||||
const params = rawParams as unknown as WorkflowDeployParams
|
|
||||||
const mode = params.mode
|
|
||||||
|
|
||||||
if (!mode) {
|
|
||||||
return { success: false, error: 'mode is required for workflow_deploy' }
|
|
||||||
}
|
|
||||||
|
|
||||||
const scopedContext =
|
|
||||||
params.workflowId && params.workflowId !== context.workflowId
|
|
||||||
? { ...context, workflowId: params.workflowId }
|
|
||||||
: context
|
|
||||||
|
|
||||||
switch (mode) {
|
|
||||||
case 'status':
|
|
||||||
return executeCheckDeploymentStatus(params as CheckDeploymentStatusParams, scopedContext)
|
|
||||||
case 'redeploy':
|
|
||||||
return executeRedeploy(scopedContext)
|
|
||||||
case 'api':
|
|
||||||
return executeDeployApi(params as DeployApiParams, scopedContext)
|
|
||||||
case 'chat':
|
|
||||||
return executeDeployChat(params as DeployChatParams, scopedContext)
|
|
||||||
case 'mcp':
|
|
||||||
return executeDeployMcp(params as DeployMcpParams, scopedContext)
|
|
||||||
case 'list_mcp_servers':
|
|
||||||
return executeListWorkspaceMcpServers(params as ListWorkspaceMcpServersParams, scopedContext)
|
|
||||||
case 'create_mcp_server':
|
|
||||||
return executeCreateWorkspaceMcpServer(
|
|
||||||
params as CreateWorkspaceMcpServerParams,
|
|
||||||
scopedContext
|
|
||||||
)
|
|
||||||
default:
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: `Unsupported workflow_deploy mode: ${String(mode)}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const SERVER_TOOLS = new Set<string>([
|
const SERVER_TOOLS = new Set<string>([
|
||||||
'get_blocks_and_tools',
|
'get_blocks_and_tools',
|
||||||
'get_blocks_metadata',
|
'get_blocks_metadata',
|
||||||
'get_block_options',
|
'get_block_options',
|
||||||
'get_block_config',
|
'get_block_config',
|
||||||
'get_trigger_blocks',
|
'get_trigger_blocks',
|
||||||
'workflow_context_get',
|
'edit_workflow',
|
||||||
'workflow_context_expand',
|
|
||||||
'workflow_change',
|
|
||||||
'workflow_verify',
|
|
||||||
'get_workflow_console',
|
'get_workflow_console',
|
||||||
'search_documentation',
|
'search_documentation',
|
||||||
'search_online',
|
'search_online',
|
||||||
@@ -432,7 +352,11 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
|||||||
get_block_outputs: (p, c) => executeGetBlockOutputs(p as GetBlockOutputsParams, c),
|
get_block_outputs: (p, c) => executeGetBlockOutputs(p as GetBlockOutputsParams, c),
|
||||||
get_block_upstream_references: (p, c) =>
|
get_block_upstream_references: (p, c) =>
|
||||||
executeGetBlockUpstreamReferences(p as unknown as GetBlockUpstreamReferencesParams, c),
|
executeGetBlockUpstreamReferences(p as unknown as GetBlockUpstreamReferencesParams, c),
|
||||||
workflow_run: (p, c) => executeWorkflowRunUnified(p, c),
|
run_workflow: (p, c) => executeRunWorkflow(p as RunWorkflowParams, c),
|
||||||
|
run_workflow_until_block: (p, c) =>
|
||||||
|
executeRunWorkflowUntilBlock(p as unknown as RunWorkflowUntilBlockParams, c),
|
||||||
|
run_from_block: (p, c) => executeRunFromBlock(p as unknown as RunFromBlockParams, c),
|
||||||
|
run_block: (p, c) => executeRunBlock(p as unknown as RunBlockParams, c),
|
||||||
get_deployed_workflow_state: (p, c) =>
|
get_deployed_workflow_state: (p, c) =>
|
||||||
executeGetDeployedWorkflowState(p as GetDeployedWorkflowStateParams, c),
|
executeGetDeployedWorkflowState(p as GetDeployedWorkflowStateParams, c),
|
||||||
generate_api_key: (p, c) => executeGenerateApiKey(p as unknown as GenerateApiKeyParams, c),
|
generate_api_key: (p, c) => executeGenerateApiKey(p as unknown as GenerateApiKeyParams, c),
|
||||||
@@ -443,7 +367,10 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
|||||||
}),
|
}),
|
||||||
set_global_workflow_variables: (p, c) =>
|
set_global_workflow_variables: (p, c) =>
|
||||||
executeSetGlobalWorkflowVariables(p as SetGlobalWorkflowVariablesParams, c),
|
executeSetGlobalWorkflowVariables(p as SetGlobalWorkflowVariablesParams, c),
|
||||||
workflow_deploy: (p, c) => executeWorkflowDeployUnified(p, c),
|
deploy_api: (p, c) => executeDeployApi(p as DeployApiParams, c),
|
||||||
|
deploy_chat: (p, c) => executeDeployChat(p as DeployChatParams, c),
|
||||||
|
deploy_mcp: (p, c) => executeDeployMcp(p as DeployMcpParams, c),
|
||||||
|
redeploy: (_p, c) => executeRedeploy(c),
|
||||||
check_deployment_status: (p, c) =>
|
check_deployment_status: (p, c) =>
|
||||||
executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
|
executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
|
||||||
list_workspace_mcp_servers: (p, c) =>
|
list_workspace_mcp_servers: (p, c) =>
|
||||||
|
|||||||
@@ -93,18 +93,6 @@ export interface RunBlockParams {
|
|||||||
useDeployedState?: boolean
|
useDeployedState?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface WorkflowRunParams {
|
|
||||||
mode?: 'full' | 'until_block' | 'from_block' | 'block'
|
|
||||||
workflowId?: string
|
|
||||||
workflow_input?: unknown
|
|
||||||
input?: unknown
|
|
||||||
useDeployedState?: boolean
|
|
||||||
stopAfterBlockId?: string
|
|
||||||
startBlockId?: string
|
|
||||||
blockId?: string
|
|
||||||
executionId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface GetDeployedWorkflowStateParams {
|
export interface GetDeployedWorkflowStateParams {
|
||||||
workflowId?: string
|
workflowId?: string
|
||||||
}
|
}
|
||||||
@@ -181,39 +169,6 @@ export interface CreateWorkspaceMcpServerParams {
|
|||||||
workflowIds?: string[]
|
workflowIds?: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface WorkflowDeployParams {
|
|
||||||
mode:
|
|
||||||
| 'status'
|
|
||||||
| 'redeploy'
|
|
||||||
| 'api'
|
|
||||||
| 'chat'
|
|
||||||
| 'mcp'
|
|
||||||
| 'list_mcp_servers'
|
|
||||||
| 'create_mcp_server'
|
|
||||||
workflowId?: string
|
|
||||||
action?: 'deploy' | 'undeploy'
|
|
||||||
identifier?: string
|
|
||||||
title?: string
|
|
||||||
description?: string
|
|
||||||
customizations?: {
|
|
||||||
primaryColor?: string
|
|
||||||
secondaryColor?: string
|
|
||||||
welcomeMessage?: string
|
|
||||||
iconUrl?: string
|
|
||||||
}
|
|
||||||
authType?: 'none' | 'password' | 'public' | 'email' | 'sso'
|
|
||||||
password?: string
|
|
||||||
allowedEmails?: string[]
|
|
||||||
outputConfigs?: unknown[]
|
|
||||||
serverId?: string
|
|
||||||
toolName?: string
|
|
||||||
toolDescription?: string
|
|
||||||
parameterSchema?: Record<string, unknown>
|
|
||||||
name?: string
|
|
||||||
isPublic?: boolean
|
|
||||||
workflowIds?: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Workflow Organization Params ===
|
// === Workflow Organization Params ===
|
||||||
|
|
||||||
export interface RenameWorkflowParams {
|
export interface RenameWorkflowParams {
|
||||||
|
|||||||
@@ -1,22 +1,19 @@
|
|||||||
export type SSEEventType =
|
export type SSEEventType =
|
||||||
| 'chat_id'
|
| 'chat_id'
|
||||||
| 'title_updated'
|
| 'title_updated'
|
||||||
|
| 'content'
|
||||||
|
| 'reasoning'
|
||||||
|
| 'tool_call'
|
||||||
|
| 'tool_generating'
|
||||||
|
| 'tool_result'
|
||||||
|
| 'tool_error'
|
||||||
|
| 'subagent_start'
|
||||||
|
| 'subagent_end'
|
||||||
| 'structured_result'
|
| 'structured_result'
|
||||||
| 'subagent_result'
|
| 'subagent_result'
|
||||||
| 'stream_end'
|
| 'done'
|
||||||
| 'copilot.phase.started'
|
| 'error'
|
||||||
| 'copilot.phase.progress'
|
| 'start'
|
||||||
| 'copilot.phase.completed'
|
|
||||||
| 'copilot.tool.call'
|
|
||||||
| 'copilot.tool.result'
|
|
||||||
| 'copilot.tool.interrupt_required'
|
|
||||||
| 'copilot.tool.interrupt_resolved'
|
|
||||||
| 'copilot.workflow.patch'
|
|
||||||
| 'copilot.workflow.verify'
|
|
||||||
| 'copilot.subagent.started'
|
|
||||||
| 'copilot.subagent.completed'
|
|
||||||
| 'copilot.content'
|
|
||||||
| 'copilot.error'
|
|
||||||
|
|
||||||
export interface SSEEvent {
|
export interface SSEEvent {
|
||||||
type: SSEEventType
|
type: SSEEventType
|
||||||
|
|||||||
@@ -592,40 +592,16 @@ const META_edit: ToolMetadata = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const META_workflow_change: ToolMetadata = {
|
const META_edit_workflow: ToolMetadata = {
|
||||||
displayNames: {
|
displayNames: {
|
||||||
[ClientToolCallState.generating]: { text: 'Planning workflow changes', icon: Loader2 },
|
[ClientToolCallState.generating]: { text: 'Editing your workflow', icon: Loader2 },
|
||||||
[ClientToolCallState.executing]: { text: 'Applying workflow changes', icon: Loader2 },
|
[ClientToolCallState.executing]: { text: 'Editing your workflow', icon: Loader2 },
|
||||||
[ClientToolCallState.success]: { text: 'Updated your workflow', icon: Grid2x2Check },
|
[ClientToolCallState.success]: { text: 'Edited your workflow', icon: Grid2x2Check },
|
||||||
[ClientToolCallState.error]: { text: 'Failed to update your workflow', icon: XCircle },
|
[ClientToolCallState.error]: { text: 'Failed to edit your workflow', icon: XCircle },
|
||||||
[ClientToolCallState.review]: { text: 'Review your workflow changes', icon: Grid2x2 },
|
[ClientToolCallState.review]: { text: 'Review your workflow changes', icon: Grid2x2 },
|
||||||
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow changes', icon: MinusCircle },
|
[ClientToolCallState.aborted]: { text: 'Aborted editing your workflow', icon: MinusCircle },
|
||||||
[ClientToolCallState.pending]: { text: 'Planning workflow changes', icon: Loader2 },
|
[ClientToolCallState.pending]: { text: 'Editing your workflow', icon: Loader2 },
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'dry_run') {
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return 'Planned workflow changes'
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return 'Planning workflow changes'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (mode === 'apply' || typeof params?.proposalId === 'string') {
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return 'Applied workflow changes'
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return 'Applying workflow changes'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
},
|
||||||
uiConfig: {
|
uiConfig: {
|
||||||
isSpecial: true,
|
isSpecial: true,
|
||||||
@@ -633,42 +609,6 @@ const META_workflow_change: ToolMetadata = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const META_workflow_context_get: ToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Gathering workflow context', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Gathering workflow context', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Gathering workflow context', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Gathered workflow context', icon: FileText },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to gather workflow context', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped workflow context', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow context', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const META_workflow_context_expand: ToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Expanded workflow schemas', icon: FileText },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to expand workflow schemas', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped schema expansion', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted schema expansion', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const META_workflow_verify: ToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Verifying workflow', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Verifying workflow', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Verifying workflow', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Verified workflow', icon: CheckCircle2 },
|
|
||||||
[ClientToolCallState.error]: { text: 'Workflow verification failed', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped workflow verification', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow verification', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const META_evaluate: ToolMetadata = {
|
const META_evaluate: ToolMetadata = {
|
||||||
displayNames: {
|
displayNames: {
|
||||||
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
||||||
@@ -2601,12 +2541,7 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
|||||||
deploy_chat: META_deploy_chat,
|
deploy_chat: META_deploy_chat,
|
||||||
deploy_mcp: META_deploy_mcp,
|
deploy_mcp: META_deploy_mcp,
|
||||||
edit: META_edit,
|
edit: META_edit,
|
||||||
workflow_context_get: META_workflow_context_get,
|
edit_workflow: META_edit_workflow,
|
||||||
workflow_context_expand: META_workflow_context_expand,
|
|
||||||
workflow_change: META_workflow_change,
|
|
||||||
workflow_verify: META_workflow_verify,
|
|
||||||
workflow_run: META_run_workflow,
|
|
||||||
workflow_deploy: META_deploy_api,
|
|
||||||
evaluate: META_evaluate,
|
evaluate: META_evaluate,
|
||||||
get_block_config: META_get_block_config,
|
get_block_config: META_get_block_config,
|
||||||
get_block_options: META_get_block_options,
|
get_block_options: META_get_block_options,
|
||||||
|
|||||||
680
apps/sim/lib/copilot/tools/mcp/definitions.ts
Normal file
680
apps/sim/lib/copilot/tools/mcp/definitions.ts
Normal file
@@ -0,0 +1,680 @@
|
|||||||
|
export type DirectToolDef = {
|
||||||
|
name: string
|
||||||
|
description: string
|
||||||
|
inputSchema: { type: 'object'; properties?: Record<string, unknown>; required?: string[] }
|
||||||
|
toolId: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SubagentToolDef = {
|
||||||
|
name: string
|
||||||
|
description: string
|
||||||
|
inputSchema: { type: 'object'; properties?: Record<string, unknown>; required?: string[] }
|
||||||
|
agentId: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Direct tools that execute immediately without LLM orchestration.
|
||||||
|
* These are fast database queries that don't need AI reasoning.
|
||||||
|
*/
|
||||||
|
export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||||
|
{
|
||||||
|
name: 'list_workspaces',
|
||||||
|
toolId: 'list_user_workspaces',
|
||||||
|
description:
|
||||||
|
'List all workspaces the user has access to. Returns workspace IDs, names, and roles. Use this first to determine which workspace to operate in.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'list_workflows',
|
||||||
|
toolId: 'list_user_workflows',
|
||||||
|
description:
|
||||||
|
'List all workflows the user has access to. Returns workflow IDs, names, workspace, and folder info. Use workspaceId/folderId to scope results.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workspaceId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Optional workspace ID to filter workflows.',
|
||||||
|
},
|
||||||
|
folderId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Optional folder ID to filter workflows.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'list_folders',
|
||||||
|
toolId: 'list_folders',
|
||||||
|
description:
|
||||||
|
'List all folders in a workspace. Returns folder IDs, names, and parent relationships for organizing workflows.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workspaceId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Workspace ID to list folders from.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workspaceId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'get_workflow',
|
||||||
|
toolId: 'get_user_workflow',
|
||||||
|
description:
|
||||||
|
'Get a workflow by ID. Returns the full workflow definition including all blocks, connections, and configuration.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Workflow ID to retrieve.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'create_workflow',
|
||||||
|
toolId: 'create_workflow',
|
||||||
|
description:
|
||||||
|
'Create a new empty workflow. Returns the new workflow ID. Always call this FIRST before sim_build for new workflows. Use workspaceId to place it in a specific workspace.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
name: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Name for the new workflow.',
|
||||||
|
},
|
||||||
|
workspaceId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Optional workspace ID. Uses default workspace if not provided.',
|
||||||
|
},
|
||||||
|
folderId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Optional folder ID to place the workflow in.',
|
||||||
|
},
|
||||||
|
description: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Optional description for the workflow.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['name'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'create_folder',
|
||||||
|
toolId: 'create_folder',
|
||||||
|
description:
|
||||||
|
'Create a new folder for organizing workflows. Use parentId to create nested folder hierarchies.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
name: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Name for the new folder.',
|
||||||
|
},
|
||||||
|
workspaceId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Optional workspace ID. Uses default workspace if not provided.',
|
||||||
|
},
|
||||||
|
parentId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Optional parent folder ID for nested folders.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['name'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'rename_workflow',
|
||||||
|
toolId: 'rename_workflow',
|
||||||
|
description: 'Rename an existing workflow.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'The workflow ID to rename.',
|
||||||
|
},
|
||||||
|
name: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'The new name for the workflow.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId', 'name'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'move_workflow',
|
||||||
|
toolId: 'move_workflow',
|
||||||
|
description:
|
||||||
|
'Move a workflow into a different folder. Omit folderId or pass empty string to move to workspace root.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'The workflow ID to move.',
|
||||||
|
},
|
||||||
|
folderId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Target folder ID. Omit or pass empty string to move to workspace root.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'move_folder',
|
||||||
|
toolId: 'move_folder',
|
||||||
|
description:
|
||||||
|
'Move a folder into another folder. Omit parentId or pass empty string to move to workspace root.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
folderId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'The folder ID to move.',
|
||||||
|
},
|
||||||
|
parentId: {
|
||||||
|
type: 'string',
|
||||||
|
description:
|
||||||
|
'Target parent folder ID. Omit or pass empty string to move to workspace root.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['folderId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'run_workflow',
|
||||||
|
toolId: 'run_workflow',
|
||||||
|
description:
|
||||||
|
'Run a workflow and return its output. Works on both draft and deployed states. By default runs the draft (live) state.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The workflow ID to run.',
|
||||||
|
},
|
||||||
|
workflow_input: {
|
||||||
|
type: 'object',
|
||||||
|
description:
|
||||||
|
'JSON object with input values. Keys should match the workflow start block input field names.',
|
||||||
|
},
|
||||||
|
useDeployedState: {
|
||||||
|
type: 'boolean',
|
||||||
|
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'run_workflow_until_block',
|
||||||
|
toolId: 'run_workflow_until_block',
|
||||||
|
description:
|
||||||
|
'Run a workflow and stop after a specific block completes. Useful for testing partial execution or debugging specific blocks.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The workflow ID to run.',
|
||||||
|
},
|
||||||
|
stopAfterBlockId: {
|
||||||
|
type: 'string',
|
||||||
|
description:
|
||||||
|
'REQUIRED. The block ID to stop after. Execution halts once this block completes.',
|
||||||
|
},
|
||||||
|
workflow_input: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'JSON object with input values for the workflow.',
|
||||||
|
},
|
||||||
|
useDeployedState: {
|
||||||
|
type: 'boolean',
|
||||||
|
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId', 'stopAfterBlockId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'run_from_block',
|
||||||
|
toolId: 'run_from_block',
|
||||||
|
description:
|
||||||
|
'Run a workflow starting from a specific block, using cached outputs from a prior execution for upstream blocks. The workflow must have been run at least once first.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The workflow ID to run.',
|
||||||
|
},
|
||||||
|
startBlockId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The block ID to start execution from.',
|
||||||
|
},
|
||||||
|
executionId: {
|
||||||
|
type: 'string',
|
||||||
|
description:
|
||||||
|
'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
|
||||||
|
},
|
||||||
|
workflow_input: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Optional input values for the workflow.',
|
||||||
|
},
|
||||||
|
useDeployedState: {
|
||||||
|
type: 'boolean',
|
||||||
|
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId', 'startBlockId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'run_block',
|
||||||
|
toolId: 'run_block',
|
||||||
|
description:
|
||||||
|
'Run a single block in isolation using cached outputs from a prior execution. Only the specified block executes — nothing upstream or downstream. The workflow must have been run at least once first.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The workflow ID.',
|
||||||
|
},
|
||||||
|
blockId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The block ID to run in isolation.',
|
||||||
|
},
|
||||||
|
executionId: {
|
||||||
|
type: 'string',
|
||||||
|
description:
|
||||||
|
'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
|
||||||
|
},
|
||||||
|
workflow_input: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Optional input values for the workflow.',
|
||||||
|
},
|
||||||
|
useDeployedState: {
|
||||||
|
type: 'boolean',
|
||||||
|
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId', 'blockId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'get_deployed_workflow_state',
|
||||||
|
toolId: 'get_deployed_workflow_state',
|
||||||
|
description:
|
||||||
|
'Get the deployed (production) state of a workflow. Returns the full workflow definition as deployed, or indicates if the workflow is not yet deployed.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The workflow ID to get the deployed state for.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'generate_api_key',
|
||||||
|
toolId: 'generate_api_key',
|
||||||
|
description:
|
||||||
|
'Generate a new workspace API key for calling workflow API endpoints. The key is only shown once — tell the user to save it immediately.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
name: {
|
||||||
|
type: 'string',
|
||||||
|
description:
|
||||||
|
'A descriptive name for the API key (e.g., "production-key", "dev-testing").',
|
||||||
|
},
|
||||||
|
workspaceId: {
|
||||||
|
type: 'string',
|
||||||
|
description: "Optional workspace ID. Defaults to user's default workspace.",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['name'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
export const SUBAGENT_TOOL_DEFS: SubagentToolDef[] = [
|
||||||
|
{
|
||||||
|
name: 'sim_build',
|
||||||
|
agentId: 'build',
|
||||||
|
description: `Build a workflow end-to-end in a single step. This is the fast mode equivalent for headless/MCP usage.
|
||||||
|
|
||||||
|
USE THIS WHEN:
|
||||||
|
- Building a new workflow from scratch
|
||||||
|
- Modifying an existing workflow
|
||||||
|
- You want to gather information and build in one pass without separate plan→edit steps
|
||||||
|
|
||||||
|
WORKFLOW ID (REQUIRED):
|
||||||
|
- For NEW workflows: First call create_workflow to get a workflowId, then pass it here
|
||||||
|
- For EXISTING workflows: Always pass the workflowId parameter
|
||||||
|
|
||||||
|
CAN DO:
|
||||||
|
- Gather information about blocks, credentials, patterns
|
||||||
|
- Search documentation and patterns for best practices
|
||||||
|
- Add, modify, or remove blocks
|
||||||
|
- Configure block settings and connections
|
||||||
|
- Set environment variables and workflow variables
|
||||||
|
|
||||||
|
CANNOT DO:
|
||||||
|
- Run or test workflows (use sim_test separately)
|
||||||
|
- Deploy workflows (use sim_deploy separately)
|
||||||
|
|
||||||
|
WORKFLOW:
|
||||||
|
1. Call create_workflow to get a workflowId (for new workflows)
|
||||||
|
2. Call sim_build with the request and workflowId
|
||||||
|
3. Build agent gathers info and builds in one pass
|
||||||
|
4. Call sim_test to verify it works
|
||||||
|
5. Optionally call sim_deploy to make it externally accessible`,
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'What you want to build or modify in the workflow.',
|
||||||
|
},
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description:
|
||||||
|
'REQUIRED. The workflow ID. For new workflows, call create_workflow first to get this.',
|
||||||
|
},
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request', 'workflowId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_discovery',
|
||||||
|
agentId: 'discovery',
|
||||||
|
description: `Find workflows by their contents or functionality when the user doesn't know the exact name or ID.
|
||||||
|
|
||||||
|
USE THIS WHEN:
|
||||||
|
- User describes a workflow by what it does: "the one that sends emails", "my Slack notification workflow"
|
||||||
|
- User refers to workflow contents: "the workflow with the OpenAI block"
|
||||||
|
- User needs to search/match workflows by functionality or description
|
||||||
|
|
||||||
|
DO NOT USE (use direct tools instead):
|
||||||
|
- User knows the workflow name → use get_workflow
|
||||||
|
- User wants to list all workflows → use list_workflows
|
||||||
|
- User wants to list workspaces → use list_workspaces
|
||||||
|
- User wants to list folders → use list_folders`,
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: { type: 'string' },
|
||||||
|
workspaceId: { type: 'string' },
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_plan',
|
||||||
|
agentId: 'plan',
|
||||||
|
description: `Plan workflow changes by gathering required information. For most cases, prefer sim_build which combines planning and editing in one step.
|
||||||
|
|
||||||
|
USE THIS WHEN:
|
||||||
|
- You need fine-grained control over the build process
|
||||||
|
- You want to inspect the plan before executing it
|
||||||
|
|
||||||
|
WORKFLOW ID (REQUIRED):
|
||||||
|
- For NEW workflows: First call create_workflow to get a workflowId, then pass it here
|
||||||
|
- For EXISTING workflows: Always pass the workflowId parameter
|
||||||
|
|
||||||
|
This tool gathers information about available blocks, credentials, and the current workflow state.
|
||||||
|
|
||||||
|
RETURNS: A plan object containing block configurations, connections, and technical details.
|
||||||
|
IMPORTANT: Pass the returned plan EXACTLY to sim_edit - do not modify or summarize it.`,
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'What you want to build or modify in the workflow.',
|
||||||
|
},
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description:
|
||||||
|
'REQUIRED. The workflow ID. For new workflows, call create_workflow first to get this.',
|
||||||
|
},
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request', 'workflowId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_edit',
|
||||||
|
agentId: 'edit',
|
||||||
|
description: `Execute a workflow plan from sim_plan. For most cases, prefer sim_build which combines planning and editing in one step.
|
||||||
|
|
||||||
|
WORKFLOW ID (REQUIRED):
|
||||||
|
- You MUST provide the workflowId parameter
|
||||||
|
|
||||||
|
PLAN (REQUIRED):
|
||||||
|
- Pass the EXACT plan object from sim_plan in the context.plan field
|
||||||
|
- Do NOT modify, summarize, or interpret the plan - pass it verbatim
|
||||||
|
|
||||||
|
After sim_edit completes, you can test immediately with sim_test, or deploy with sim_deploy to make it accessible externally.`,
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
message: { type: 'string', description: 'Optional additional instructions for the edit.' },
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description:
|
||||||
|
'REQUIRED. The workflow ID to edit. Get this from create_workflow for new workflows.',
|
||||||
|
},
|
||||||
|
plan: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'The plan object from sim_plan. Pass it EXACTLY as returned, do not modify.',
|
||||||
|
},
|
||||||
|
context: {
|
||||||
|
type: 'object',
|
||||||
|
description:
|
||||||
|
'Additional context. Put the plan in context.plan if not using the plan field directly.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_deploy',
|
||||||
|
agentId: 'deploy',
|
||||||
|
description: `Deploy a workflow to make it accessible externally. Workflows can be tested without deploying, but deployment is needed for API access, chat UIs, or MCP exposure.
|
||||||
|
|
||||||
|
DEPLOYMENT TYPES:
|
||||||
|
- "deploy as api" - REST API endpoint for programmatic access
|
||||||
|
- "deploy as chat" - Managed chat UI with auth options
|
||||||
|
- "deploy as mcp" - Expose as MCP tool on an MCP server for AI agents to call
|
||||||
|
|
||||||
|
MCP DEPLOYMENT FLOW:
|
||||||
|
The deploy subagent will automatically: list available MCP servers → create one if needed → deploy the workflow as an MCP tool to that server. You can specify server name, tool name, and tool description.
|
||||||
|
|
||||||
|
ALSO CAN:
|
||||||
|
- Get the deployed (production) state to compare with draft
|
||||||
|
- Generate workspace API keys for calling deployed workflows
|
||||||
|
- List and create MCP servers in the workspace`,
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'The deployment request, e.g. "deploy as api" or "deploy as chat"',
|
||||||
|
},
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The workflow ID to deploy.',
|
||||||
|
},
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request', 'workflowId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_test',
|
||||||
|
agentId: 'test',
|
||||||
|
description: `Run a workflow and verify its outputs. Works on both deployed and undeployed (draft) workflows. Use after building to verify correctness.
|
||||||
|
|
||||||
|
Supports full and partial execution:
|
||||||
|
- Full run with test inputs
|
||||||
|
- Stop after a specific block (run_workflow_until_block)
|
||||||
|
- Run a single block in isolation (run_block)
|
||||||
|
- Resume from a specific block (run_from_block)`,
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: { type: 'string' },
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The workflow ID to test.',
|
||||||
|
},
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request', 'workflowId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_debug',
|
||||||
|
agentId: 'debug',
|
||||||
|
description:
|
||||||
|
'Diagnose errors or unexpected workflow behavior. Provide the error message and workflowId. Returns root cause analysis and fix suggestions.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
error: { type: 'string', description: 'The error message or description of the issue.' },
|
||||||
|
workflowId: { type: 'string', description: 'REQUIRED. The workflow ID to debug.' },
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['error', 'workflowId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_auth',
|
||||||
|
agentId: 'auth',
|
||||||
|
description:
|
||||||
|
'Check OAuth connection status, list connected services, and initiate new OAuth connections. Use when a workflow needs third-party service access (Google, Slack, GitHub, etc.). In MCP/headless mode, returns an authorization URL the user must open in their browser to complete the OAuth flow.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: { type: 'string' },
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_knowledge',
|
||||||
|
agentId: 'knowledge',
|
||||||
|
description:
|
||||||
|
'Manage knowledge bases for RAG-powered document retrieval. Supports listing, creating, updating, and deleting knowledge bases. Knowledge bases can be attached to agent blocks for context-aware responses.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: { type: 'string' },
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_custom_tool',
|
||||||
|
agentId: 'custom_tool',
|
||||||
|
description:
|
||||||
|
'Manage custom tools (reusable API integrations). Supports listing, creating, updating, and deleting custom tools. Custom tools can be added to agent blocks as callable functions.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: { type: 'string' },
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_info',
|
||||||
|
agentId: 'info',
|
||||||
|
description:
|
||||||
|
"Inspect a workflow's blocks, connections, outputs, variables, and metadata. Use for questions about the Sim platform itself — how blocks work, what integrations are available, platform concepts, etc. Always provide workflowId to scope results to a specific workflow.",
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: { type: 'string' },
|
||||||
|
workflowId: { type: 'string' },
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_workflow',
|
||||||
|
agentId: 'workflow',
|
||||||
|
description:
|
||||||
|
'Manage workflow-level configuration: environment variables, settings, scheduling, and deployment status. Use for any data about a specific workflow — its settings, credentials, variables, or deployment state.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: { type: 'string' },
|
||||||
|
workflowId: { type: 'string' },
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_research',
|
||||||
|
agentId: 'research',
|
||||||
|
description:
|
||||||
|
'Research external APIs and documentation. Use when you need to understand third-party services, external APIs, authentication flows, or data formats OUTSIDE of Sim. For questions about Sim itself, use sim_info instead.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: { type: 'string' },
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_superagent',
|
||||||
|
agentId: 'superagent',
|
||||||
|
description:
|
||||||
|
'Execute direct actions NOW: send an email, post to Slack, make an API call, etc. Use when the user wants to DO something immediately rather than build a workflow for it.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: { type: 'string' },
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sim_platform',
|
||||||
|
agentId: 'tour',
|
||||||
|
description:
|
||||||
|
'Get help with Sim platform navigation, keyboard shortcuts, and UI actions. Use when the user asks "how do I..." about the Sim editor, wants keyboard shortcuts, or needs to know what actions are available in the UI.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
request: { type: 'string' },
|
||||||
|
context: { type: 'object' },
|
||||||
|
},
|
||||||
|
required: ['request'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
@@ -109,7 +109,7 @@ function resolveSubBlockOptions(sb: SubBlockConfig): string[] | undefined {
|
|||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return canonical option IDs/values expected by workflow_change compilation and apply
|
// Return the actual option ID/value that edit_workflow expects, not the display label
|
||||||
return rawOptions
|
return rawOptions
|
||||||
.map((opt: any) => {
|
.map((opt: any) => {
|
||||||
if (!opt) return undefined
|
if (!opt) return undefined
|
||||||
|
|||||||
@@ -11,13 +11,8 @@ import { makeApiRequestServerTool } from '@/lib/copilot/tools/server/other/make-
|
|||||||
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
||||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||||
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
||||||
|
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
||||||
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
||||||
import { workflowChangeServerTool } from '@/lib/copilot/tools/server/workflow/workflow-change'
|
|
||||||
import {
|
|
||||||
workflowContextExpandServerTool,
|
|
||||||
workflowContextGetServerTool,
|
|
||||||
} from '@/lib/copilot/tools/server/workflow/workflow-context'
|
|
||||||
import { workflowVerifyServerTool } from '@/lib/copilot/tools/server/workflow/workflow-verify'
|
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||||
|
|
||||||
export { ExecuteResponseSuccessSchema }
|
export { ExecuteResponseSuccessSchema }
|
||||||
@@ -32,6 +27,7 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
|
|||||||
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
||||||
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
||||||
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
||||||
|
[editWorkflowServerTool.name]: editWorkflowServerTool,
|
||||||
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
||||||
[searchDocumentationServerTool.name]: searchDocumentationServerTool,
|
[searchDocumentationServerTool.name]: searchDocumentationServerTool,
|
||||||
[searchOnlineServerTool.name]: searchOnlineServerTool,
|
[searchOnlineServerTool.name]: searchOnlineServerTool,
|
||||||
@@ -39,10 +35,6 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
|
|||||||
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
||||||
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
||||||
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
||||||
[workflowContextGetServerTool.name]: workflowContextGetServerTool,
|
|
||||||
[workflowContextExpandServerTool.name]: workflowContextExpandServerTool,
|
|
||||||
[workflowChangeServerTool.name]: workflowChangeServerTool,
|
|
||||||
[workflowVerifyServerTool.name]: workflowVerifyServerTool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,225 +0,0 @@
|
|||||||
import crypto from 'crypto'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { getRedisClient } from '@/lib/core/config/redis'
|
|
||||||
|
|
||||||
type StoreEntry<T> = {
|
|
||||||
value: T
|
|
||||||
expiresAt: number
|
|
||||||
}
|
|
||||||
|
|
||||||
const DEFAULT_TTL_MS = 30 * 60 * 1000
|
|
||||||
const MAX_ENTRIES = 500
|
|
||||||
const DEFAULT_TTL_SECONDS = Math.floor(DEFAULT_TTL_MS / 1000)
|
|
||||||
const CONTEXT_PREFIX = 'copilot:workflow_change:context'
|
|
||||||
const PROPOSAL_PREFIX = 'copilot:workflow_change:proposal'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowChangeStore')
|
|
||||||
|
|
||||||
class TTLStore<T> {
|
|
||||||
private readonly data = new Map<string, StoreEntry<T>>()
|
|
||||||
|
|
||||||
constructor(private readonly ttlMs = DEFAULT_TTL_MS) {}
|
|
||||||
|
|
||||||
set(value: T): string {
|
|
||||||
this.gc()
|
|
||||||
if (this.data.size >= MAX_ENTRIES) {
|
|
||||||
const firstKey = this.data.keys().next().value as string | undefined
|
|
||||||
if (firstKey) {
|
|
||||||
this.data.delete(firstKey)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const id = crypto.randomUUID()
|
|
||||||
this.data.set(id, {
|
|
||||||
value,
|
|
||||||
expiresAt: Date.now() + this.ttlMs,
|
|
||||||
})
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
|
|
||||||
get(id: string): T | null {
|
|
||||||
const entry = this.data.get(id)
|
|
||||||
if (!entry) return null
|
|
||||||
if (entry.expiresAt <= Date.now()) {
|
|
||||||
this.data.delete(id)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
return entry.value
|
|
||||||
}
|
|
||||||
|
|
||||||
upsert(id: string, value: T): void {
|
|
||||||
this.gc()
|
|
||||||
this.data.set(id, {
|
|
||||||
value,
|
|
||||||
expiresAt: Date.now() + this.ttlMs,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
private gc(): void {
|
|
||||||
const now = Date.now()
|
|
||||||
for (const [key, entry] of this.data.entries()) {
|
|
||||||
if (entry.expiresAt <= now) {
|
|
||||||
this.data.delete(key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export type WorkflowContextPack = {
|
|
||||||
workflowId: string
|
|
||||||
snapshotHash: string
|
|
||||||
workflowState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
loops: Record<string, any>
|
|
||||||
parallels: Record<string, any>
|
|
||||||
}
|
|
||||||
schemasByType: Record<string, any>
|
|
||||||
schemaRefsByType: Record<string, string>
|
|
||||||
summary: Record<string, any>
|
|
||||||
}
|
|
||||||
|
|
||||||
export type WorkflowChangeProposal = {
|
|
||||||
workflowId: string
|
|
||||||
baseSnapshotHash: string
|
|
||||||
compiledOperations: Array<Record<string, any>>
|
|
||||||
diffSummary: Record<string, any>
|
|
||||||
warnings: string[]
|
|
||||||
diagnostics: string[]
|
|
||||||
touchedBlocks: string[]
|
|
||||||
resolvedIds?: Record<string, string>
|
|
||||||
acceptanceAssertions: string[]
|
|
||||||
postApply?: {
|
|
||||||
verify?: boolean
|
|
||||||
run?: Record<string, any>
|
|
||||||
evaluator?: Record<string, any>
|
|
||||||
}
|
|
||||||
handoff?: {
|
|
||||||
objective?: string
|
|
||||||
constraints?: string[]
|
|
||||||
resolvedIds?: Record<string, string>
|
|
||||||
assumptions?: string[]
|
|
||||||
unresolvedRisks?: string[]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const contextPackStore = new TTLStore<WorkflowContextPack>()
|
|
||||||
const proposalStore = new TTLStore<WorkflowChangeProposal>()
|
|
||||||
|
|
||||||
function getContextRedisKey(id: string): string {
|
|
||||||
return `${CONTEXT_PREFIX}:${id}`
|
|
||||||
}
|
|
||||||
|
|
||||||
function getProposalRedisKey(id: string): string {
|
|
||||||
return `${PROPOSAL_PREFIX}:${id}`
|
|
||||||
}
|
|
||||||
|
|
||||||
async function writeRedisJson(key: string, value: unknown): Promise<void> {
|
|
||||||
const redis = getRedisClient()!
|
|
||||||
await redis.set(key, JSON.stringify(value), 'EX', DEFAULT_TTL_SECONDS)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readRedisJson<T>(key: string): Promise<T | null> {
|
|
||||||
const redis = getRedisClient()!
|
|
||||||
|
|
||||||
const raw = await redis.get(key)
|
|
||||||
if (!raw) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
return JSON.parse(raw) as T
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed parsing workflow change store JSON payload', { key, error })
|
|
||||||
await redis.del(key).catch(() => {})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function saveContextPack(pack: WorkflowContextPack): Promise<string> {
|
|
||||||
if (!getRedisClient()) {
|
|
||||||
return contextPackStore.set(pack)
|
|
||||||
}
|
|
||||||
const id = crypto.randomUUID()
|
|
||||||
try {
|
|
||||||
await writeRedisJson(getContextRedisKey(id), pack)
|
|
||||||
return id
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Redis write failed for workflow context pack, using memory fallback', { error })
|
|
||||||
return contextPackStore.set(pack)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getContextPack(id: string): Promise<WorkflowContextPack | null> {
|
|
||||||
if (!getRedisClient()) {
|
|
||||||
return contextPackStore.get(id)
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const redisPayload = await readRedisJson<WorkflowContextPack>(getContextRedisKey(id))
|
|
||||||
if (redisPayload) {
|
|
||||||
return redisPayload
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Redis read failed for workflow context pack, using memory fallback', { error })
|
|
||||||
}
|
|
||||||
return contextPackStore.get(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function updateContextPack(
|
|
||||||
id: string,
|
|
||||||
patch: Partial<WorkflowContextPack>
|
|
||||||
): Promise<WorkflowContextPack | null> {
|
|
||||||
const existing = await getContextPack(id)
|
|
||||||
if (!existing) return null
|
|
||||||
const merged: WorkflowContextPack = {
|
|
||||||
...existing,
|
|
||||||
...patch,
|
|
||||||
workflowState: patch.workflowState || existing.workflowState,
|
|
||||||
schemasByType: patch.schemasByType || existing.schemasByType,
|
|
||||||
schemaRefsByType: patch.schemaRefsByType || existing.schemaRefsByType,
|
|
||||||
summary: patch.summary || existing.summary,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!getRedisClient()) {
|
|
||||||
contextPackStore.upsert(id, merged)
|
|
||||||
return merged
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await writeRedisJson(getContextRedisKey(id), merged)
|
|
||||||
contextPackStore.upsert(id, merged)
|
|
||||||
return merged
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Redis update failed for workflow context pack, using memory fallback', { error })
|
|
||||||
contextPackStore.upsert(id, merged)
|
|
||||||
return merged
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function saveProposal(proposal: WorkflowChangeProposal): Promise<string> {
|
|
||||||
if (!getRedisClient()) {
|
|
||||||
return proposalStore.set(proposal)
|
|
||||||
}
|
|
||||||
const id = crypto.randomUUID()
|
|
||||||
try {
|
|
||||||
await writeRedisJson(getProposalRedisKey(id), proposal)
|
|
||||||
return id
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Redis write failed for workflow proposal, using memory fallback', { error })
|
|
||||||
return proposalStore.set(proposal)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getProposal(id: string): Promise<WorkflowChangeProposal | null> {
|
|
||||||
if (!getRedisClient()) {
|
|
||||||
return proposalStore.get(id)
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const redisPayload = await readRedisJson<WorkflowChangeProposal>(getProposalRedisKey(id))
|
|
||||||
if (redisPayload) {
|
|
||||||
return redisPayload
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Redis read failed for workflow proposal, using memory fallback', { error })
|
|
||||||
}
|
|
||||||
return proposalStore.get(id)
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,298 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow as workflowTable } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import { applyAutoLayout } from '@/lib/workflows/autolayout'
|
||||||
|
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
|
||||||
|
import {
|
||||||
|
loadWorkflowFromNormalizedTables,
|
||||||
|
saveWorkflowToNormalizedTables,
|
||||||
|
} from '@/lib/workflows/persistence/utils'
|
||||||
|
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||||
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||||
|
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||||
|
import { applyOperationsToWorkflowState } from './engine'
|
||||||
|
import type { EditWorkflowParams, ValidationError } from './types'
|
||||||
|
import { preValidateCredentialInputs, validateWorkflowSelectorIds } from './validation'
|
||||||
|
|
||||||
|
async function getCurrentWorkflowStateFromDb(
|
||||||
|
workflowId: string
|
||||||
|
): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> {
|
||||||
|
const logger = createLogger('EditWorkflowServerTool')
|
||||||
|
const [workflowRecord] = await db
|
||||||
|
.select()
|
||||||
|
.from(workflowTable)
|
||||||
|
.where(eq(workflowTable.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
if (!workflowRecord) throw new Error(`Workflow ${workflowId} not found in database`)
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
if (!normalized) throw new Error('Workflow has no normalized data')
|
||||||
|
|
||||||
|
// Validate and fix blocks without types
|
||||||
|
const blocks = { ...normalized.blocks }
|
||||||
|
const invalidBlocks: string[] = []
|
||||||
|
|
||||||
|
Object.entries(blocks).forEach(([id, block]: [string, any]) => {
|
||||||
|
if (!block.type) {
|
||||||
|
logger.warn(`Block ${id} loaded without type from database`, {
|
||||||
|
blockKeys: Object.keys(block),
|
||||||
|
blockName: block.name,
|
||||||
|
})
|
||||||
|
invalidBlocks.push(id)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Remove invalid blocks
|
||||||
|
invalidBlocks.forEach((id) => delete blocks[id])
|
||||||
|
|
||||||
|
// Remove edges connected to invalid blocks
|
||||||
|
const edges = normalized.edges.filter(
|
||||||
|
(edge: any) => !invalidBlocks.includes(edge.source) && !invalidBlocks.includes(edge.target)
|
||||||
|
)
|
||||||
|
|
||||||
|
const workflowState: any = {
|
||||||
|
blocks,
|
||||||
|
edges,
|
||||||
|
loops: normalized.loops || {},
|
||||||
|
parallels: normalized.parallels || {},
|
||||||
|
}
|
||||||
|
const subBlockValues: Record<string, Record<string, any>> = {}
|
||||||
|
Object.entries(normalized.blocks).forEach(([blockId, block]) => {
|
||||||
|
subBlockValues[blockId] = {}
|
||||||
|
Object.entries((block as any).subBlocks || {}).forEach(([subId, sub]) => {
|
||||||
|
if ((sub as any).value !== undefined) subBlockValues[blockId][subId] = (sub as any).value
|
||||||
|
})
|
||||||
|
})
|
||||||
|
return { workflowState, subBlockValues }
|
||||||
|
}
|
||||||
|
|
||||||
|
export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown> = {
|
||||||
|
name: 'edit_workflow',
|
||||||
|
async execute(params: EditWorkflowParams, context?: { userId: string }): Promise<unknown> {
|
||||||
|
const logger = createLogger('EditWorkflowServerTool')
|
||||||
|
const { operations, workflowId, currentUserWorkflow } = params
|
||||||
|
if (!Array.isArray(operations) || operations.length === 0) {
|
||||||
|
throw new Error('operations are required and must be an array')
|
||||||
|
}
|
||||||
|
if (!workflowId) throw new Error('workflowId is required')
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
action: 'write',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Executing edit_workflow', {
|
||||||
|
operationCount: operations.length,
|
||||||
|
workflowId,
|
||||||
|
hasCurrentUserWorkflow: !!currentUserWorkflow,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Get current workflow state
|
||||||
|
let workflowState: any
|
||||||
|
if (currentUserWorkflow) {
|
||||||
|
try {
|
||||||
|
workflowState = JSON.parse(currentUserWorkflow)
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to parse currentUserWorkflow', error)
|
||||||
|
throw new Error('Invalid currentUserWorkflow format')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const fromDb = await getCurrentWorkflowStateFromDb(workflowId)
|
||||||
|
workflowState = fromDb.workflowState
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get permission config for the user
|
||||||
|
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||||
|
|
||||||
|
// Pre-validate credential and apiKey inputs before applying operations
|
||||||
|
// This filters out invalid credentials and apiKeys for hosted models
|
||||||
|
let operationsToApply = operations
|
||||||
|
const credentialErrors: ValidationError[] = []
|
||||||
|
if (context?.userId) {
|
||||||
|
const { filteredOperations, errors: credErrors } = await preValidateCredentialInputs(
|
||||||
|
operations,
|
||||||
|
{ userId: context.userId },
|
||||||
|
workflowState
|
||||||
|
)
|
||||||
|
operationsToApply = filteredOperations
|
||||||
|
credentialErrors.push(...credErrors)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply operations directly to the workflow state
|
||||||
|
const {
|
||||||
|
state: modifiedWorkflowState,
|
||||||
|
validationErrors,
|
||||||
|
skippedItems,
|
||||||
|
} = applyOperationsToWorkflowState(workflowState, operationsToApply, permissionConfig)
|
||||||
|
|
||||||
|
// Add credential validation errors
|
||||||
|
validationErrors.push(...credentialErrors)
|
||||||
|
|
||||||
|
// Get workspaceId for selector validation
|
||||||
|
let workspaceId: string | undefined
|
||||||
|
try {
|
||||||
|
const [workflowRecord] = await db
|
||||||
|
.select({ workspaceId: workflowTable.workspaceId })
|
||||||
|
.from(workflowTable)
|
||||||
|
.where(eq(workflowTable.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
workspaceId = workflowRecord?.workspaceId ?? undefined
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to get workspaceId for selector validation', { error, workflowId })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate selector IDs exist in the database
|
||||||
|
if (context?.userId) {
|
||||||
|
try {
|
||||||
|
const selectorErrors = await validateWorkflowSelectorIds(modifiedWorkflowState, {
|
||||||
|
userId: context.userId,
|
||||||
|
workspaceId,
|
||||||
|
})
|
||||||
|
validationErrors.push(...selectorErrors)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Selector ID validation failed', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate the workflow state
|
||||||
|
const validation = validateWorkflowState(modifiedWorkflowState, { sanitize: true })
|
||||||
|
|
||||||
|
if (!validation.valid) {
|
||||||
|
logger.error('Edited workflow state is invalid', {
|
||||||
|
errors: validation.errors,
|
||||||
|
warnings: validation.warnings,
|
||||||
|
})
|
||||||
|
throw new Error(`Invalid edited workflow: ${validation.errors.join('; ')}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validation.warnings.length > 0) {
|
||||||
|
logger.warn('Edited workflow validation warnings', {
|
||||||
|
warnings: validation.warnings,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract and persist custom tools to database (reuse workspaceId from selector validation)
|
||||||
|
if (context?.userId && workspaceId) {
|
||||||
|
try {
|
||||||
|
const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState
|
||||||
|
const { saved, errors } = await extractAndPersistCustomTools(
|
||||||
|
finalWorkflowState,
|
||||||
|
workspaceId,
|
||||||
|
context.userId
|
||||||
|
)
|
||||||
|
|
||||||
|
if (saved > 0) {
|
||||||
|
logger.info(`Persisted ${saved} custom tool(s) to database`, { workflowId })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errors.length > 0) {
|
||||||
|
logger.warn('Some custom tools failed to persist', { errors, workflowId })
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to persist custom tools', { error, workflowId })
|
||||||
|
}
|
||||||
|
} else if (context?.userId && !workspaceId) {
|
||||||
|
logger.warn('Workflow has no workspaceId, skipping custom tools persistence', {
|
||||||
|
workflowId,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
logger.warn('No userId in context - skipping custom tools persistence', { workflowId })
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('edit_workflow successfully applied operations', {
|
||||||
|
operationCount: operations.length,
|
||||||
|
blocksCount: Object.keys(modifiedWorkflowState.blocks).length,
|
||||||
|
edgesCount: modifiedWorkflowState.edges.length,
|
||||||
|
inputValidationErrors: validationErrors.length,
|
||||||
|
skippedItemsCount: skippedItems.length,
|
||||||
|
schemaValidationErrors: validation.errors.length,
|
||||||
|
validationWarnings: validation.warnings.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Format validation errors for LLM feedback
|
||||||
|
const inputErrors =
|
||||||
|
validationErrors.length > 0
|
||||||
|
? validationErrors.map((e) => `Block "${e.blockId}" (${e.blockType}): ${e.error}`)
|
||||||
|
: undefined
|
||||||
|
|
||||||
|
// Format skipped items for LLM feedback
|
||||||
|
const skippedMessages =
|
||||||
|
skippedItems.length > 0 ? skippedItems.map((item) => item.reason) : undefined
|
||||||
|
|
||||||
|
// Persist the workflow state to the database
|
||||||
|
const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState
|
||||||
|
|
||||||
|
// Apply autolayout to position blocks properly
|
||||||
|
const layoutResult = applyAutoLayout(finalWorkflowState.blocks, finalWorkflowState.edges, {
|
||||||
|
horizontalSpacing: 250,
|
||||||
|
verticalSpacing: 100,
|
||||||
|
padding: { x: 100, y: 100 },
|
||||||
|
})
|
||||||
|
|
||||||
|
const layoutedBlocks =
|
||||||
|
layoutResult.success && layoutResult.blocks ? layoutResult.blocks : finalWorkflowState.blocks
|
||||||
|
|
||||||
|
if (!layoutResult.success) {
|
||||||
|
logger.warn('Autolayout failed, using default positions', {
|
||||||
|
workflowId,
|
||||||
|
error: layoutResult.error,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowStateForDb = {
|
||||||
|
blocks: layoutedBlocks,
|
||||||
|
edges: finalWorkflowState.edges,
|
||||||
|
loops: generateLoopBlocks(layoutedBlocks as any),
|
||||||
|
parallels: generateParallelBlocks(layoutedBlocks as any),
|
||||||
|
lastSaved: Date.now(),
|
||||||
|
isDeployed: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowStateForDb as any)
|
||||||
|
if (!saveResult.success) {
|
||||||
|
logger.error('Failed to persist workflow state to database', {
|
||||||
|
workflowId,
|
||||||
|
error: saveResult.error,
|
||||||
|
})
|
||||||
|
throw new Error(`Failed to save workflow: ${saveResult.error}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update workflow's lastSynced timestamp
|
||||||
|
await db
|
||||||
|
.update(workflowTable)
|
||||||
|
.set({
|
||||||
|
lastSynced: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(workflowTable.id, workflowId))
|
||||||
|
|
||||||
|
logger.info('Workflow state persisted to database', { workflowId })
|
||||||
|
|
||||||
|
// Return the modified workflow state with autolayout applied
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
workflowState: { ...finalWorkflowState, blocks: layoutedBlocks },
|
||||||
|
// Include input validation errors so the LLM can see what was rejected
|
||||||
|
...(inputErrors && {
|
||||||
|
inputValidationErrors: inputErrors,
|
||||||
|
inputValidationMessage: `${inputErrors.length} input(s) were rejected due to validation errors. The workflow was still updated with valid inputs only. Errors: ${inputErrors.join('; ')}`,
|
||||||
|
}),
|
||||||
|
// Include skipped items so the LLM can see what operations were skipped
|
||||||
|
...(skippedMessages && {
|
||||||
|
skippedItems: skippedMessages,
|
||||||
|
skippedItemsMessage: `${skippedItems.length} operation(s) were skipped due to invalid references. Details: ${skippedMessages.join('; ')}`,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user