mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-12 07:24:55 -05:00
Compare commits
8 Commits
feat/mult-
...
feat/smart
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c22bd2caaa | ||
|
|
462aa15341 | ||
|
|
52aff4d60b | ||
|
|
3a3bddd6f8 | ||
|
|
639d50d6b9 | ||
|
|
cec74e09c2 | ||
|
|
d5a756c9f2 | ||
|
|
f3e994baf0 |
@@ -41,9 +41,6 @@ Diese Tastenkombinationen wechseln zwischen den Panel-Tabs auf der rechten Seite
|
||||
|
||||
| Tastenkombination | Aktion |
|
||||
|----------|--------|
|
||||
| `C` | Copilot-Tab fokussieren |
|
||||
| `T` | Toolbar-Tab fokussieren |
|
||||
| `E` | Editor-Tab fokussieren |
|
||||
| `Mod` + `F` | Toolbar-Suche fokussieren |
|
||||
|
||||
## Globale Navigation
|
||||
|
||||
@@ -43,9 +43,6 @@ These shortcuts switch between panel tabs on the right side of the canvas.
|
||||
|
||||
| Shortcut | Action |
|
||||
|----------|--------|
|
||||
| `C` | Focus Copilot tab |
|
||||
| `T` | Focus Toolbar tab |
|
||||
| `E` | Focus Editor tab |
|
||||
| `Mod` + `F` | Focus Toolbar search |
|
||||
|
||||
## Global Navigation
|
||||
|
||||
@@ -399,6 +399,28 @@ Create a new custom property (metadata) on a Confluence page.
|
||||
| ↳ `authorId` | string | Account ID of the version author |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||
|
||||
### `confluence_delete_page_property`
|
||||
|
||||
Delete a content property from a Confluence page by its property ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `pageId` | string | Yes | The ID of the page containing the property |
|
||||
| `propertyId` | string | Yes | The ID of the property to delete |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `pageId` | string | ID of the page |
|
||||
| `propertyId` | string | ID of the deleted property |
|
||||
| `deleted` | boolean | Deletion status |
|
||||
|
||||
### `confluence_search`
|
||||
|
||||
Search for content across Confluence pages, blog posts, and other content.
|
||||
@@ -872,6 +894,90 @@ Add a label to a Confluence page for organization and categorization.
|
||||
| `labelName` | string | Name of the added label |
|
||||
| `labelId` | string | ID of the added label |
|
||||
|
||||
### `confluence_delete_label`
|
||||
|
||||
Remove a label from a Confluence page.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `pageId` | string | Yes | Confluence page ID to remove the label from |
|
||||
| `labelName` | string | Yes | Name of the label to remove |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `pageId` | string | Page ID the label was removed from |
|
||||
| `labelName` | string | Name of the removed label |
|
||||
| `deleted` | boolean | Deletion status |
|
||||
|
||||
### `confluence_get_pages_by_label`
|
||||
|
||||
Retrieve all pages that have a specific label applied.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `labelId` | string | Yes | The ID of the label to get pages for |
|
||||
| `limit` | number | No | Maximum number of pages to return \(default: 50, max: 250\) |
|
||||
| `cursor` | string | No | Pagination cursor from previous response |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `labelId` | string | ID of the label |
|
||||
| `pages` | array | Array of pages with this label |
|
||||
| ↳ `id` | string | Unique page identifier |
|
||||
| ↳ `title` | string | Page title |
|
||||
| ↳ `status` | string | Page status \(e.g., current, archived, trashed, draft\) |
|
||||
| ↳ `spaceId` | string | ID of the space containing the page |
|
||||
| ↳ `parentId` | string | ID of the parent page \(null if top-level\) |
|
||||
| ↳ `authorId` | string | Account ID of the page author |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp when the page was created |
|
||||
| ↳ `version` | object | Page version information |
|
||||
| ↳ `number` | number | Version number |
|
||||
| ↳ `message` | string | Version message |
|
||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
||||
| ↳ `authorId` | string | Account ID of the version author |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||
|
||||
### `confluence_list_space_labels`
|
||||
|
||||
List all labels associated with a Confluence space.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `spaceId` | string | Yes | The ID of the Confluence space to list labels from |
|
||||
| `limit` | number | No | Maximum number of labels to return \(default: 25, max: 250\) |
|
||||
| `cursor` | string | No | Pagination cursor from previous response |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `spaceId` | string | ID of the space |
|
||||
| `labels` | array | Array of labels on the space |
|
||||
| ↳ `id` | string | Unique label identifier |
|
||||
| ↳ `name` | string | Label name |
|
||||
| ↳ `prefix` | string | Label prefix/type \(e.g., global, my, team\) |
|
||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||
|
||||
### `confluence_get_space`
|
||||
|
||||
Get details about a specific Confluence space.
|
||||
|
||||
@@ -42,9 +42,6 @@ Estos atajos cambian entre las pestañas del panel en el lado derecho del lienzo
|
||||
|
||||
| Atajo | Acción |
|
||||
|----------|--------|
|
||||
| `C` | Enfocar pestaña Copilot |
|
||||
| `T` | Enfocar pestaña Barra de herramientas |
|
||||
| `E` | Enfocar pestaña Editor |
|
||||
| `Mod` + `F` | Enfocar búsqueda de Barra de herramientas |
|
||||
|
||||
## Navegación global
|
||||
|
||||
@@ -42,9 +42,6 @@ Ces raccourcis permettent de basculer entre les onglets du panneau sur le côté
|
||||
|
||||
| Raccourci | Action |
|
||||
|----------|--------|
|
||||
| `C` | Activer l'onglet Copilot |
|
||||
| `T` | Activer l'onglet Barre d'outils |
|
||||
| `E` | Activer l'onglet Éditeur |
|
||||
| `Mod` + `F` | Activer la recherche dans la barre d'outils |
|
||||
|
||||
## Navigation globale
|
||||
|
||||
@@ -41,9 +41,6 @@ import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
| ショートカット | 操作 |
|
||||
|----------|--------|
|
||||
| `C` | Copilotタブにフォーカス |
|
||||
| `T` | Toolbarタブにフォーカス |
|
||||
| `E` | Editorタブにフォーカス |
|
||||
| `Mod` + `F` | Toolbar検索にフォーカス |
|
||||
|
||||
## グローバルナビゲーション
|
||||
|
||||
@@ -41,9 +41,6 @@ import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
| 快捷键 | 操作 |
|
||||
|----------|--------|
|
||||
| `C` | 聚焦 Copilot 标签页 |
|
||||
| `T` | 聚焦 Toolbar 标签页 |
|
||||
| `E` | 聚焦 Editor 标签页 |
|
||||
| `Mod` + `F` | 聚焦 Toolbar 搜索 |
|
||||
|
||||
## 全局导航
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
@@ -31,13 +31,15 @@ export async function GET(request: NextRequest) {
|
||||
})
|
||||
.from(account)
|
||||
.where(and(...whereConditions))
|
||||
.orderBy(desc(account.updatedAt))
|
||||
|
||||
// Use the user's email as the display name (consistent with credential selector)
|
||||
const userEmail = session.user.email
|
||||
|
||||
const accountsWithDisplayName = accounts.map((acc) => ({
|
||||
id: acc.id,
|
||||
accountId: acc.accountId,
|
||||
providerId: acc.providerId,
|
||||
displayName: acc.accountId || acc.providerId,
|
||||
displayName: userEmail || acc.providerId,
|
||||
}))
|
||||
|
||||
return NextResponse.json({ accounts: accountsWithDisplayName })
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, credential, credentialMember, user } from '@sim/db/schema'
|
||||
import { account, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { jwtDecode } from 'jwt-decode'
|
||||
@@ -7,10 +7,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
|
||||
import { evaluateScopeCoverage, type OAuthProvider, parseProvider } from '@/lib/oauth'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -20,7 +18,6 @@ const credentialsQuerySchema = z
|
||||
.object({
|
||||
provider: z.string().nullish(),
|
||||
workflowId: z.string().uuid('Workflow ID must be a valid UUID').nullish(),
|
||||
workspaceId: z.string().uuid('Workspace ID must be a valid UUID').nullish(),
|
||||
credentialId: z
|
||||
.string()
|
||||
.min(1, 'Credential ID must not be empty')
|
||||
@@ -38,79 +35,6 @@ interface GoogleIdToken {
|
||||
name?: string
|
||||
}
|
||||
|
||||
function toCredentialResponse(
|
||||
id: string,
|
||||
displayName: string,
|
||||
providerId: string,
|
||||
updatedAt: Date,
|
||||
scope: string | null
|
||||
) {
|
||||
const storedScope = scope?.trim()
|
||||
const grantedScopes = storedScope ? storedScope.split(/[\s,]+/).filter(Boolean) : []
|
||||
const scopeEvaluation = evaluateScopeCoverage(providerId, grantedScopes)
|
||||
const [_, featureType = 'default'] = providerId.split('-')
|
||||
|
||||
return {
|
||||
id,
|
||||
name: displayName,
|
||||
provider: providerId,
|
||||
lastUsed: updatedAt.toISOString(),
|
||||
isDefault: featureType === 'default',
|
||||
scopes: scopeEvaluation.grantedScopes,
|
||||
canonicalScopes: scopeEvaluation.canonicalScopes,
|
||||
missingScopes: scopeEvaluation.missingScopes,
|
||||
extraScopes: scopeEvaluation.extraScopes,
|
||||
requiresReauthorization: scopeEvaluation.requiresReauthorization,
|
||||
}
|
||||
}
|
||||
|
||||
async function getFallbackDisplayName(
|
||||
requestId: string,
|
||||
providerParam: string | null | undefined,
|
||||
accountRow: {
|
||||
idToken: string | null
|
||||
accountId: string
|
||||
userId: string
|
||||
}
|
||||
) {
|
||||
const providerForParse = (providerParam || 'google') as OAuthProvider
|
||||
const { baseProvider } = parseProvider(providerForParse)
|
||||
|
||||
if (accountRow.idToken) {
|
||||
try {
|
||||
const decoded = jwtDecode<GoogleIdToken>(accountRow.idToken)
|
||||
if (decoded.email) return decoded.email
|
||||
if (decoded.name) return decoded.name
|
||||
} catch (_error) {
|
||||
logger.warn(`[${requestId}] Error decoding ID token`, {
|
||||
accountId: accountRow.accountId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (baseProvider === 'github') {
|
||||
return `${accountRow.accountId} (GitHub)`
|
||||
}
|
||||
|
||||
try {
|
||||
const userRecord = await db
|
||||
.select({ email: user.email })
|
||||
.from(user)
|
||||
.where(eq(user.id, accountRow.userId))
|
||||
.limit(1)
|
||||
|
||||
if (userRecord.length > 0) {
|
||||
return userRecord[0].email
|
||||
}
|
||||
} catch (_error) {
|
||||
logger.warn(`[${requestId}] Error fetching user email`, {
|
||||
userId: accountRow.userId,
|
||||
})
|
||||
}
|
||||
|
||||
return `${accountRow.accountId} (${baseProvider})`
|
||||
}
|
||||
|
||||
/**
|
||||
* Get credentials for a specific provider
|
||||
*/
|
||||
@@ -122,7 +46,6 @@ export async function GET(request: NextRequest) {
|
||||
const rawQuery = {
|
||||
provider: searchParams.get('provider'),
|
||||
workflowId: searchParams.get('workflowId'),
|
||||
workspaceId: searchParams.get('workspaceId'),
|
||||
credentialId: searchParams.get('credentialId'),
|
||||
}
|
||||
|
||||
@@ -155,7 +78,7 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { provider: providerParam, workflowId, workspaceId, credentialId } = parseResult.data
|
||||
const { provider: providerParam, workflowId, credentialId } = parseResult.data
|
||||
|
||||
// Authenticate requester (supports session and internal JWT)
|
||||
const authResult = await checkSessionOrInternalAuth(request)
|
||||
@@ -165,7 +88,7 @@ export async function GET(request: NextRequest) {
|
||||
}
|
||||
const requesterUserId = authResult.userId
|
||||
|
||||
let effectiveWorkspaceId = workspaceId ?? undefined
|
||||
const effectiveUserId = requesterUserId
|
||||
if (workflowId) {
|
||||
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId,
|
||||
@@ -183,145 +106,101 @@ export async function GET(request: NextRequest) {
|
||||
{ status: workflowAuthorization.status }
|
||||
)
|
||||
}
|
||||
effectiveWorkspaceId = workflowAuthorization.workflow?.workspaceId || undefined
|
||||
}
|
||||
|
||||
if (effectiveWorkspaceId) {
|
||||
const workspaceAccess = await checkWorkspaceAccess(effectiveWorkspaceId, requesterUserId)
|
||||
if (!workspaceAccess.hasAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
}
|
||||
// Parse the provider to get base provider and feature type (if provider is present)
|
||||
const { baseProvider } = parseProvider((providerParam || 'google') as OAuthProvider)
|
||||
|
||||
let accountsData
|
||||
|
||||
if (credentialId) {
|
||||
const [platformCredential] = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
type: credential.type,
|
||||
displayName: credential.displayName,
|
||||
providerId: credential.providerId,
|
||||
accountId: credential.accountId,
|
||||
accountProviderId: account.providerId,
|
||||
accountScope: account.scope,
|
||||
accountUpdatedAt: account.updatedAt,
|
||||
})
|
||||
.from(credential)
|
||||
.leftJoin(account, eq(credential.accountId, account.id))
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (platformCredential) {
|
||||
if (platformCredential.type !== 'oauth' || !platformCredential.accountId) {
|
||||
return NextResponse.json({ credentials: [] }, { status: 200 })
|
||||
}
|
||||
|
||||
if (workflowId) {
|
||||
if (!effectiveWorkspaceId || platformCredential.workspaceId !== effectiveWorkspaceId) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
} else {
|
||||
const [membership] = await db
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, platformCredential.id),
|
||||
eq(credentialMember.userId, requesterUserId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
}
|
||||
|
||||
if (!platformCredential.accountProviderId || !platformCredential.accountUpdatedAt) {
|
||||
return NextResponse.json({ credentials: [] }, { status: 200 })
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
credentials: [
|
||||
toCredentialResponse(
|
||||
platformCredential.id,
|
||||
platformCredential.displayName,
|
||||
platformCredential.accountProviderId,
|
||||
platformCredential.accountUpdatedAt,
|
||||
platformCredential.accountScope
|
||||
),
|
||||
],
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (effectiveWorkspaceId && providerParam) {
|
||||
await syncWorkspaceOAuthCredentialsForUser({
|
||||
workspaceId: effectiveWorkspaceId,
|
||||
userId: requesterUserId,
|
||||
})
|
||||
|
||||
const credentialsData = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
displayName: credential.displayName,
|
||||
providerId: account.providerId,
|
||||
scope: account.scope,
|
||||
updatedAt: account.updatedAt,
|
||||
})
|
||||
.from(credential)
|
||||
.innerJoin(account, eq(credential.accountId, account.id))
|
||||
.innerJoin(
|
||||
credentialMember,
|
||||
and(
|
||||
eq(credentialMember.credentialId, credential.id),
|
||||
eq(credentialMember.userId, requesterUserId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, effectiveWorkspaceId),
|
||||
eq(credential.type, 'oauth'),
|
||||
eq(account.providerId, providerParam)
|
||||
)
|
||||
)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
credentials: credentialsData.map((row) =>
|
||||
toCredentialResponse(row.id, row.displayName, row.providerId, row.updatedAt, row.scope)
|
||||
),
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
|
||||
if (credentialId && workflowId) {
|
||||
// When both workflowId and credentialId are provided, fetch by ID only.
|
||||
// Workspace authorization above already proves access; the credential
|
||||
// may belong to another workspace member (e.g. for display name resolution).
|
||||
accountsData = await db.select().from(account).where(eq(account.id, credentialId))
|
||||
} else if (credentialId) {
|
||||
accountsData = await db
|
||||
.select()
|
||||
.from(account)
|
||||
.where(and(eq(account.userId, requesterUserId), eq(account.id, credentialId)))
|
||||
.where(and(eq(account.userId, effectiveUserId), eq(account.id, credentialId)))
|
||||
} else {
|
||||
// Fetch all credentials for provider and effective user
|
||||
accountsData = await db
|
||||
.select()
|
||||
.from(account)
|
||||
.where(and(eq(account.userId, requesterUserId), eq(account.providerId, providerParam!)))
|
||||
.where(and(eq(account.userId, effectiveUserId), eq(account.providerId, providerParam!)))
|
||||
}
|
||||
|
||||
// Transform accounts into credentials
|
||||
const credentials = await Promise.all(
|
||||
accountsData.map(async (acc) => {
|
||||
const displayName = await getFallbackDisplayName(requestId, providerParam, acc)
|
||||
return toCredentialResponse(acc.id, displayName, acc.providerId, acc.updatedAt, acc.scope)
|
||||
// Extract the feature type from providerId (e.g., 'google-default' -> 'default')
|
||||
const [_, featureType = 'default'] = acc.providerId.split('-')
|
||||
|
||||
// Try multiple methods to get a user-friendly display name
|
||||
let displayName = ''
|
||||
|
||||
// Method 1: Try to extract email from ID token (works for Google, etc.)
|
||||
if (acc.idToken) {
|
||||
try {
|
||||
const decoded = jwtDecode<GoogleIdToken>(acc.idToken)
|
||||
if (decoded.email) {
|
||||
displayName = decoded.email
|
||||
} else if (decoded.name) {
|
||||
displayName = decoded.name
|
||||
}
|
||||
} catch (_error) {
|
||||
logger.warn(`[${requestId}] Error decoding ID token`, {
|
||||
accountId: acc.id,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Method 2: For GitHub, the accountId might be the username
|
||||
if (!displayName && baseProvider === 'github') {
|
||||
displayName = `${acc.accountId} (GitHub)`
|
||||
}
|
||||
|
||||
// Method 3: Try to get the user's email from our database
|
||||
if (!displayName) {
|
||||
try {
|
||||
const userRecord = await db
|
||||
.select({ email: user.email })
|
||||
.from(user)
|
||||
.where(eq(user.id, acc.userId))
|
||||
.limit(1)
|
||||
|
||||
if (userRecord.length > 0) {
|
||||
displayName = userRecord[0].email
|
||||
}
|
||||
} catch (_error) {
|
||||
logger.warn(`[${requestId}] Error fetching user email`, {
|
||||
userId: acc.userId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: Use accountId with provider type as context
|
||||
if (!displayName) {
|
||||
displayName = `${acc.accountId} (${baseProvider})`
|
||||
}
|
||||
|
||||
const storedScope = acc.scope?.trim()
|
||||
const grantedScopes = storedScope ? storedScope.split(/[\s,]+/).filter(Boolean) : []
|
||||
const scopeEvaluation = evaluateScopeCoverage(acc.providerId, grantedScopes)
|
||||
|
||||
return {
|
||||
id: acc.id,
|
||||
name: displayName,
|
||||
provider: acc.providerId,
|
||||
lastUsed: acc.updatedAt.toISOString(),
|
||||
isDefault: featureType === 'default',
|
||||
scopes: scopeEvaluation.grantedScopes,
|
||||
canonicalScopes: scopeEvaluation.canonicalScopes,
|
||||
missingScopes: scopeEvaluation.missingScopes,
|
||||
extraScopes: scopeEvaluation.extraScopes,
|
||||
requiresReauthorization: scopeEvaluation.requiresReauthorization,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@ const logger = createLogger('OAuthDisconnectAPI')
|
||||
const disconnectSchema = z.object({
|
||||
provider: z.string({ required_error: 'Provider is required' }).min(1, 'Provider is required'),
|
||||
providerId: z.string().optional(),
|
||||
accountId: z.string().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -51,20 +50,15 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { provider, providerId, accountId } = parseResult.data
|
||||
const { provider, providerId } = parseResult.data
|
||||
|
||||
logger.info(`[${requestId}] Processing OAuth disconnect request`, {
|
||||
provider,
|
||||
hasProviderId: !!providerId,
|
||||
})
|
||||
|
||||
// If a specific account row ID is provided, delete that exact account
|
||||
if (accountId) {
|
||||
await db
|
||||
.delete(account)
|
||||
.where(and(eq(account.userId, session.user.id), eq(account.id, accountId)))
|
||||
} else if (providerId) {
|
||||
// If a specific providerId is provided, delete accounts for that provider ID
|
||||
// If a specific providerId is provided, delete only that account
|
||||
if (providerId) {
|
||||
await db
|
||||
.delete(account)
|
||||
.where(and(eq(account.userId, session.user.id), eq(account.providerId, providerId)))
|
||||
|
||||
@@ -38,18 +38,13 @@ export async function GET(request: NextRequest) {
|
||||
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
|
||||
const credential = await getCredential(
|
||||
requestId,
|
||||
resolvedCredentialId,
|
||||
authz.credentialOwnerUserId
|
||||
)
|
||||
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId)
|
||||
if (!credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const accessToken = await refreshAccessTokenIfNeeded(
|
||||
resolvedCredentialId,
|
||||
credentialId,
|
||||
authz.credentialOwnerUserId,
|
||||
requestId
|
||||
)
|
||||
|
||||
@@ -37,19 +37,14 @@ export async function GET(request: NextRequest) {
|
||||
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
|
||||
const credential = await getCredential(
|
||||
requestId,
|
||||
resolvedCredentialId,
|
||||
authz.credentialOwnerUserId
|
||||
)
|
||||
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId)
|
||||
if (!credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Refresh access token if needed using the utility function
|
||||
const accessToken = await refreshAccessTokenIfNeeded(
|
||||
resolvedCredentialId,
|
||||
credentialId,
|
||||
authz.credentialOwnerUserId,
|
||||
requestId
|
||||
)
|
||||
|
||||
@@ -119,23 +119,14 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
|
||||
}
|
||||
|
||||
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
|
||||
const credential = await getCredential(
|
||||
requestId,
|
||||
resolvedCredentialId,
|
||||
authz.credentialOwnerUserId
|
||||
)
|
||||
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId)
|
||||
|
||||
if (!credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
try {
|
||||
const { accessToken } = await refreshTokenIfNeeded(
|
||||
requestId,
|
||||
credential,
|
||||
resolvedCredentialId
|
||||
)
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId)
|
||||
|
||||
let instanceUrl: string | undefined
|
||||
if (credential.providerId === 'salesforce' && credential.scope) {
|
||||
@@ -195,20 +186,13 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
const { credentialId } = parseResult.data
|
||||
|
||||
const authz = await authorizeCredentialUse(request, {
|
||||
credentialId,
|
||||
requireWorkflowIdForInternal: false,
|
||||
})
|
||||
if (!authz.ok || authz.authType !== 'session' || !authz.credentialOwnerUserId) {
|
||||
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
|
||||
// For GET requests, we only support session-based authentication
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || auth.authType !== 'session' || !auth.userId) {
|
||||
return NextResponse.json({ error: 'User not authenticated' }, { status: 401 })
|
||||
}
|
||||
|
||||
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
|
||||
const credential = await getCredential(
|
||||
requestId,
|
||||
resolvedCredentialId,
|
||||
authz.credentialOwnerUserId
|
||||
)
|
||||
const credential = await getCredential(requestId, credentialId, auth.userId)
|
||||
|
||||
if (!credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
@@ -220,11 +204,7 @@ export async function GET(request: NextRequest) {
|
||||
}
|
||||
|
||||
try {
|
||||
const { accessToken } = await refreshTokenIfNeeded(
|
||||
requestId,
|
||||
credential,
|
||||
resolvedCredentialId
|
||||
)
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId)
|
||||
|
||||
// For Salesforce, extract instanceUrl from the scope field
|
||||
let instanceUrl: string | undefined
|
||||
|
||||
@@ -50,7 +50,7 @@ describe('OAuth Utils', () => {
|
||||
describe('getCredential', () => {
|
||||
it('should return credential when found', async () => {
|
||||
const mockCredential = { id: 'credential-id', userId: 'test-user-id' }
|
||||
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
|
||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||
|
||||
const credential = await getCredential('request-id', 'credential-id', 'test-user-id')
|
||||
|
||||
@@ -59,8 +59,7 @@ describe('OAuth Utils', () => {
|
||||
expect(mockDbTyped.where).toHaveBeenCalled()
|
||||
expect(mockDbTyped.limit).toHaveBeenCalledWith(1)
|
||||
|
||||
expect(credential).toMatchObject(mockCredential)
|
||||
expect(credential).toMatchObject({ resolvedCredentialId: 'credential-id' })
|
||||
expect(credential).toEqual(mockCredential)
|
||||
})
|
||||
|
||||
it('should return undefined when credential is not found', async () => {
|
||||
@@ -153,7 +152,7 @@ describe('OAuth Utils', () => {
|
||||
providerId: 'google',
|
||||
userId: 'test-user-id',
|
||||
}
|
||||
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
|
||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||
|
||||
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
||||
|
||||
@@ -170,7 +169,7 @@ describe('OAuth Utils', () => {
|
||||
providerId: 'google',
|
||||
userId: 'test-user-id',
|
||||
}
|
||||
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
|
||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||
|
||||
mockRefreshOAuthToken.mockResolvedValueOnce({
|
||||
accessToken: 'new-token',
|
||||
@@ -203,7 +202,7 @@ describe('OAuth Utils', () => {
|
||||
providerId: 'google',
|
||||
userId: 'test-user-id',
|
||||
}
|
||||
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
|
||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||
|
||||
mockRefreshOAuthToken.mockResolvedValueOnce(null)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, credential, credentialSetMember } from '@sim/db/schema'
|
||||
import { account, credentialSetMember } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, inArray } from 'drizzle-orm'
|
||||
import { refreshOAuthToken } from '@/lib/oauth'
|
||||
@@ -25,28 +25,6 @@ interface AccountInsertData {
|
||||
accessTokenExpiresAt?: Date
|
||||
}
|
||||
|
||||
async function resolveOAuthAccountId(
|
||||
credentialId: string
|
||||
): Promise<{ accountId: string; usedCredentialTable: boolean } | null> {
|
||||
const [credentialRow] = await db
|
||||
.select({
|
||||
type: credential.type,
|
||||
accountId: credential.accountId,
|
||||
})
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (credentialRow) {
|
||||
if (credentialRow.type !== 'oauth' || !credentialRow.accountId) {
|
||||
return null
|
||||
}
|
||||
return { accountId: credentialRow.accountId, usedCredentialTable: true }
|
||||
}
|
||||
|
||||
return { accountId: credentialId, usedCredentialTable: false }
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely inserts an account record, handling duplicate constraint violations gracefully.
|
||||
* If a duplicate is detected (unique constraint violation), logs a warning and returns success.
|
||||
@@ -74,16 +52,10 @@ export async function safeAccountInsert(
|
||||
* Get a credential by ID and verify it belongs to the user
|
||||
*/
|
||||
export async function getCredential(requestId: string, credentialId: string, userId: string) {
|
||||
const resolved = await resolveOAuthAccountId(credentialId)
|
||||
if (!resolved) {
|
||||
logger.warn(`[${requestId}] Credential is not an OAuth credential`)
|
||||
return undefined
|
||||
}
|
||||
|
||||
const credentials = await db
|
||||
.select()
|
||||
.from(account)
|
||||
.where(and(eq(account.id, resolved.accountId), eq(account.userId, userId)))
|
||||
.where(and(eq(account.id, credentialId), eq(account.userId, userId)))
|
||||
.limit(1)
|
||||
|
||||
if (!credentials.length) {
|
||||
@@ -91,10 +63,7 @@ export async function getCredential(requestId: string, credentialId: string, use
|
||||
return undefined
|
||||
}
|
||||
|
||||
return {
|
||||
...credentials[0],
|
||||
resolvedCredentialId: resolved.accountId,
|
||||
}
|
||||
return credentials[0]
|
||||
}
|
||||
|
||||
export async function getOAuthToken(userId: string, providerId: string): Promise<string | null> {
|
||||
@@ -269,9 +238,7 @@ export async function refreshAccessTokenIfNeeded(
|
||||
}
|
||||
|
||||
// Update the token in the database
|
||||
const resolvedCredentialId =
|
||||
(credential as { resolvedCredentialId?: string }).resolvedCredentialId ?? credentialId
|
||||
await db.update(account).set(updateData).where(eq(account.id, resolvedCredentialId))
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
|
||||
logger.info(`[${requestId}] Successfully refreshed access token for credential`)
|
||||
return refreshedToken.accessToken
|
||||
@@ -307,8 +274,6 @@ export async function refreshTokenIfNeeded(
|
||||
credential: any,
|
||||
credentialId: string
|
||||
): Promise<{ accessToken: string; refreshed: boolean }> {
|
||||
const resolvedCredentialId = credential.resolvedCredentialId ?? credentialId
|
||||
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const accessTokenExpiresAt = credential.accessTokenExpiresAt
|
||||
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
|
||||
@@ -369,7 +334,7 @@ export async function refreshTokenIfNeeded(
|
||||
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
await db.update(account).set(updateData).where(eq(account.id, resolvedCredentialId))
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
|
||||
logger.info(`[${requestId}] Successfully refreshed access token`)
|
||||
return { accessToken: refreshedToken, refreshed: true }
|
||||
@@ -378,7 +343,7 @@ export async function refreshTokenIfNeeded(
|
||||
`[${requestId}] Refresh attempt failed, checking if another concurrent request succeeded`
|
||||
)
|
||||
|
||||
const freshCredential = await getCredential(requestId, resolvedCredentialId, credential.userId)
|
||||
const freshCredential = await getCredential(requestId, credentialId, credential.userId)
|
||||
if (freshCredential?.accessToken) {
|
||||
const freshExpiresAt = freshCredential.accessTokenExpiresAt
|
||||
const stillValid = !freshExpiresAt || freshExpiresAt > new Date()
|
||||
|
||||
@@ -48,21 +48,16 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
const shopData = await shopResponse.json()
|
||||
const shopInfo = shopData.shop
|
||||
const stableAccountId = shopInfo.id?.toString() || shopDomain
|
||||
|
||||
const existing = await db.query.account.findFirst({
|
||||
where: and(
|
||||
eq(account.userId, session.user.id),
|
||||
eq(account.providerId, 'shopify'),
|
||||
eq(account.accountId, stableAccountId)
|
||||
),
|
||||
where: and(eq(account.userId, session.user.id), eq(account.providerId, 'shopify')),
|
||||
})
|
||||
|
||||
const now = new Date()
|
||||
|
||||
const accountData = {
|
||||
accessToken: accessToken,
|
||||
accountId: stableAccountId,
|
||||
accountId: shopInfo.id?.toString() || shopDomain,
|
||||
scope: scope || '',
|
||||
updatedAt: now,
|
||||
idToken: shopDomain,
|
||||
|
||||
@@ -52,11 +52,7 @@ export async function POST(request: NextRequest) {
|
||||
const trelloUser = await userResponse.json()
|
||||
|
||||
const existing = await db.query.account.findFirst({
|
||||
where: and(
|
||||
eq(account.userId, session.user.id),
|
||||
eq(account.providerId, 'trello'),
|
||||
eq(account.accountId, trelloUser.id)
|
||||
),
|
||||
where: and(eq(account.userId, session.user.id), eq(account.providerId, 'trello')),
|
||||
})
|
||||
|
||||
const now = new Date()
|
||||
|
||||
@@ -1,145 +1,81 @@
|
||||
import { db } from '@sim/db'
|
||||
import { settings } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
|
||||
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
||||
|
||||
/**
|
||||
* GET - Fetch user's auto-allowed integration tools
|
||||
*/
|
||||
export async function GET() {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
const [userSettings] = await db
|
||||
.select()
|
||||
.from(settings)
|
||||
.where(eq(settings.userId, userId))
|
||||
.limit(1)
|
||||
|
||||
if (userSettings) {
|
||||
const autoAllowedTools = (userSettings.copilotAutoAllowedTools as string[]) || []
|
||||
return NextResponse.json({ autoAllowedTools })
|
||||
}
|
||||
|
||||
await db.insert(settings).values({
|
||||
id: userId,
|
||||
userId,
|
||||
copilotAutoAllowedTools: [],
|
||||
})
|
||||
|
||||
return NextResponse.json({ autoAllowedTools: [] })
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch auto-allowed tools', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
function copilotHeaders(): HeadersInit {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
if (env.COPILOT_API_KEY) {
|
||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
||||
}
|
||||
return headers
|
||||
}
|
||||
|
||||
/**
|
||||
* POST - Add a tool to the auto-allowed list
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
const body = await request.json()
|
||||
|
||||
if (!body.toolId || typeof body.toolId !== 'string') {
|
||||
return NextResponse.json({ error: 'toolId must be a string' }, { status: 400 })
|
||||
}
|
||||
|
||||
const toolId = body.toolId
|
||||
|
||||
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||
|
||||
if (existing) {
|
||||
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||
|
||||
if (!currentTools.includes(toolId)) {
|
||||
const updatedTools = [...currentTools, toolId]
|
||||
await db
|
||||
.update(settings)
|
||||
.set({
|
||||
copilotAutoAllowedTools: updatedTools,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(settings.userId, userId))
|
||||
|
||||
logger.info('Added tool to auto-allowed list', { userId, toolId })
|
||||
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||
}
|
||||
|
||||
return NextResponse.json({ success: true, autoAllowedTools: currentTools })
|
||||
}
|
||||
|
||||
await db.insert(settings).values({
|
||||
id: userId,
|
||||
userId,
|
||||
copilotAutoAllowedTools: [toolId],
|
||||
})
|
||||
|
||||
logger.info('Created settings and added tool to auto-allowed list', { userId, toolId })
|
||||
return NextResponse.json({ success: true, autoAllowedTools: [toolId] })
|
||||
} catch (error) {
|
||||
logger.error('Failed to add auto-allowed tool', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE - Remove a tool from the auto-allowed list
|
||||
*/
|
||||
export async function DELETE(request: NextRequest) {
|
||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||
if (!isAuthenticated || !userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const toolIdFromQuery = new URL(request.url).searchParams.get('toolId') || undefined
|
||||
const toolIdFromBody = await request
|
||||
.json()
|
||||
.then((body) => (typeof body?.toolId === 'string' ? body.toolId : undefined))
|
||||
.catch(() => undefined)
|
||||
const toolId = toolIdFromBody || toolIdFromQuery
|
||||
if (!toolId) {
|
||||
return NextResponse.json({ error: 'toolId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
||||
method: 'DELETE',
|
||||
headers: copilotHeaders(),
|
||||
body: JSON.stringify({
|
||||
userId,
|
||||
toolId,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
const payload = await res.json().catch(() => ({}))
|
||||
if (!res.ok) {
|
||||
logger.warn('Failed to remove auto-allowed tool via copilot backend', {
|
||||
status: res.status,
|
||||
userId,
|
||||
toolId,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: payload?.error || 'Failed to remove auto-allowed tool',
|
||||
autoAllowedTools: [],
|
||||
},
|
||||
{ status: res.status }
|
||||
)
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
const { searchParams } = new URL(request.url)
|
||||
const toolId = searchParams.get('toolId')
|
||||
|
||||
if (!toolId) {
|
||||
return NextResponse.json({ error: 'toolId query parameter is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||
|
||||
if (existing) {
|
||||
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||
const updatedTools = currentTools.filter((t) => t !== toolId)
|
||||
|
||||
await db
|
||||
.update(settings)
|
||||
.set({
|
||||
copilotAutoAllowedTools: updatedTools,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(settings.userId, userId))
|
||||
|
||||
logger.info('Removed tool from auto-allowed list', { userId, toolId })
|
||||
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||
}
|
||||
|
||||
return NextResponse.json({ success: true, autoAllowedTools: [] })
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
autoAllowedTools: Array.isArray(payload?.autoAllowedTools) ? payload.autoAllowedTools : [],
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to remove auto-allowed tool', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
logger.error('Error removing auto-allowed tool', {
|
||||
userId,
|
||||
toolId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to remove auto-allowed tool',
|
||||
autoAllowedTools: [],
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -113,6 +113,7 @@ const ChatMessageSchema = z.object({
|
||||
workflowId: z.string().optional(),
|
||||
knowledgeId: z.string().optional(),
|
||||
blockId: z.string().optional(),
|
||||
blockIds: z.array(z.string()).optional(),
|
||||
templateId: z.string().optional(),
|
||||
executionId: z.string().optional(),
|
||||
// For workflow_block, provide both workflowId and blockId
|
||||
@@ -159,6 +160,20 @@ export async function POST(req: NextRequest) {
|
||||
commands,
|
||||
} = ChatMessageSchema.parse(body)
|
||||
|
||||
const normalizedContexts = Array.isArray(contexts)
|
||||
? contexts.map((ctx) => {
|
||||
if (ctx.kind !== 'blocks') return ctx
|
||||
if (Array.isArray(ctx.blockIds) && ctx.blockIds.length > 0) return ctx
|
||||
if (ctx.blockId) {
|
||||
return {
|
||||
...ctx,
|
||||
blockIds: [ctx.blockId],
|
||||
}
|
||||
}
|
||||
return ctx
|
||||
})
|
||||
: contexts
|
||||
|
||||
// Resolve workflowId - if not provided, use first workflow or find by name
|
||||
const resolved = await resolveWorkflowIdForUser(
|
||||
authenticatedUserId,
|
||||
@@ -176,10 +191,10 @@ export async function POST(req: NextRequest) {
|
||||
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
||||
try {
|
||||
logger.info(`[${tracker.requestId}] Received chat POST`, {
|
||||
hasContexts: Array.isArray(contexts),
|
||||
contextsCount: Array.isArray(contexts) ? contexts.length : 0,
|
||||
contextsPreview: Array.isArray(contexts)
|
||||
? contexts.map((c: any) => ({
|
||||
hasContexts: Array.isArray(normalizedContexts),
|
||||
contextsCount: Array.isArray(normalizedContexts) ? normalizedContexts.length : 0,
|
||||
contextsPreview: Array.isArray(normalizedContexts)
|
||||
? normalizedContexts.map((c: any) => ({
|
||||
kind: c?.kind,
|
||||
chatId: c?.chatId,
|
||||
workflowId: c?.workflowId,
|
||||
@@ -191,17 +206,25 @@ export async function POST(req: NextRequest) {
|
||||
} catch {}
|
||||
// Preprocess contexts server-side
|
||||
let agentContexts: Array<{ type: string; content: string }> = []
|
||||
if (Array.isArray(contexts) && contexts.length > 0) {
|
||||
if (Array.isArray(normalizedContexts) && normalizedContexts.length > 0) {
|
||||
try {
|
||||
const { processContextsServer } = await import('@/lib/copilot/process-contents')
|
||||
const processed = await processContextsServer(contexts as any, authenticatedUserId, message)
|
||||
const processed = await processContextsServer(
|
||||
normalizedContexts as any,
|
||||
authenticatedUserId,
|
||||
message
|
||||
)
|
||||
agentContexts = processed
|
||||
logger.info(`[${tracker.requestId}] Contexts processed for request`, {
|
||||
processedCount: agentContexts.length,
|
||||
kinds: agentContexts.map((c) => c.type),
|
||||
lengthPreview: agentContexts.map((c) => c.content?.length ?? 0),
|
||||
})
|
||||
if (Array.isArray(contexts) && contexts.length > 0 && agentContexts.length === 0) {
|
||||
if (
|
||||
Array.isArray(normalizedContexts) &&
|
||||
normalizedContexts.length > 0 &&
|
||||
agentContexts.length === 0
|
||||
) {
|
||||
logger.warn(
|
||||
`[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.`
|
||||
)
|
||||
@@ -246,11 +269,13 @@ export async function POST(req: NextRequest) {
|
||||
mode,
|
||||
model: selectedModel,
|
||||
provider,
|
||||
conversationId: effectiveConversationId,
|
||||
conversationHistory,
|
||||
contexts: agentContexts,
|
||||
fileAttachments,
|
||||
commands,
|
||||
chatId: actualChatId,
|
||||
prefetch,
|
||||
implicitFeedback,
|
||||
},
|
||||
{
|
||||
@@ -432,10 +457,15 @@ export async function POST(req: NextRequest) {
|
||||
content: message,
|
||||
timestamp: new Date().toISOString(),
|
||||
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
|
||||
...(Array.isArray(contexts) && contexts.length > 0 && { contexts }),
|
||||
...(Array.isArray(contexts) &&
|
||||
contexts.length > 0 && {
|
||||
contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }],
|
||||
...(Array.isArray(normalizedContexts) &&
|
||||
normalizedContexts.length > 0 && {
|
||||
contexts: normalizedContexts,
|
||||
}),
|
||||
...(Array.isArray(normalizedContexts) &&
|
||||
normalizedContexts.length > 0 && {
|
||||
contentBlocks: [
|
||||
{ type: 'contexts', contexts: normalizedContexts as any, timestamp: Date.now() },
|
||||
],
|
||||
}),
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { REDIS_TOOL_CALL_PREFIX, REDIS_TOOL_CALL_TTL_SECONDS } from '@/lib/copilot/constants'
|
||||
import {
|
||||
REDIS_TOOL_CALL_PREFIX,
|
||||
REDIS_TOOL_CALL_TTL_SECONDS,
|
||||
SIM_AGENT_API_URL,
|
||||
} from '@/lib/copilot/constants'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createBadRequestResponse,
|
||||
@@ -10,6 +14,7 @@ import {
|
||||
createUnauthorizedResponse,
|
||||
type NotificationStatus,
|
||||
} from '@/lib/copilot/request-helpers'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { getRedisClient } from '@/lib/core/config/redis'
|
||||
|
||||
const logger = createLogger('CopilotConfirmAPI')
|
||||
@@ -21,6 +26,8 @@ const ConfirmationSchema = z.object({
|
||||
errorMap: () => ({ message: 'Invalid notification status' }),
|
||||
}),
|
||||
message: z.string().optional(), // Optional message for background moves or additional context
|
||||
toolName: z.string().optional(),
|
||||
remember: z.boolean().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -57,6 +64,44 @@ async function updateToolCallStatus(
|
||||
}
|
||||
}
|
||||
|
||||
async function saveAutoAllowedToolPreference(userId: string, toolName: string): Promise<boolean> {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
if (env.COPILOT_API_KEY) {
|
||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
userId,
|
||||
toolId: toolName,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn('Failed to persist auto-allowed tool preference', {
|
||||
userId,
|
||||
toolName,
|
||||
status: response.status,
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Error persisting auto-allowed tool preference', {
|
||||
userId,
|
||||
toolName,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/copilot/confirm
|
||||
* Update tool call status (Accept/Reject)
|
||||
@@ -74,7 +119,7 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { toolCallId, status, message } = ConfirmationSchema.parse(body)
|
||||
const { toolCallId, status, message, toolName, remember } = ConfirmationSchema.parse(body)
|
||||
|
||||
// Update the tool call status in Redis
|
||||
const updated = await updateToolCallStatus(toolCallId, status, message)
|
||||
@@ -90,14 +135,22 @@ export async function POST(req: NextRequest) {
|
||||
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
||||
}
|
||||
|
||||
const duration = tracker.getDuration()
|
||||
let rememberSaved = false
|
||||
if (status === 'accepted' && remember === true && toolName && authenticatedUserId) {
|
||||
rememberSaved = await saveAutoAllowedToolPreference(authenticatedUserId, toolName)
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
const response: Record<string, unknown> = {
|
||||
success: true,
|
||||
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
||||
toolCallId,
|
||||
status,
|
||||
})
|
||||
}
|
||||
if (remember === true) {
|
||||
response.rememberSaved = rememberSaved
|
||||
}
|
||||
|
||||
return NextResponse.json(response)
|
||||
} catch (error) {
|
||||
const duration = tracker.getDuration()
|
||||
|
||||
|
||||
@@ -1,194 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { credential, credentialMember, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('CredentialMembersAPI')
|
||||
|
||||
interface RouteContext {
|
||||
params: Promise<{ id: string }>
|
||||
}
|
||||
|
||||
async function requireAdminMembership(credentialId: string, userId: string) {
|
||||
const [membership] = await db
|
||||
.select({ role: credentialMember.role, status: credentialMember.status })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, userId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership || membership.status !== 'active' || membership.role !== 'admin') {
|
||||
return null
|
||||
}
|
||||
return membership
|
||||
}
|
||||
|
||||
export async function GET(_request: NextRequest, context: RouteContext) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: credentialId } = await context.params
|
||||
|
||||
const [cred] = await db
|
||||
.select({ id: credential.id })
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (!cred) {
|
||||
return NextResponse.json({ members: [] }, { status: 200 })
|
||||
}
|
||||
|
||||
const members = await db
|
||||
.select({
|
||||
id: credentialMember.id,
|
||||
userId: credentialMember.userId,
|
||||
role: credentialMember.role,
|
||||
status: credentialMember.status,
|
||||
joinedAt: credentialMember.joinedAt,
|
||||
userName: user.name,
|
||||
userEmail: user.email,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.innerJoin(user, eq(credentialMember.userId, user.id))
|
||||
.where(eq(credentialMember.credentialId, credentialId))
|
||||
|
||||
return NextResponse.json({ members })
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch credential members', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
const addMemberSchema = z.object({
|
||||
userId: z.string().min(1),
|
||||
role: z.enum(['admin', 'member']).default('member'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest, context: RouteContext) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: credentialId } = await context.params
|
||||
|
||||
const admin = await requireAdminMembership(credentialId, session.user.id)
|
||||
if (!admin) {
|
||||
return NextResponse.json({ error: 'Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = addMemberSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 })
|
||||
}
|
||||
|
||||
const { userId, role } = parsed.data
|
||||
const now = new Date()
|
||||
|
||||
const [existing] = await db
|
||||
.select({ id: credentialMember.id, status: credentialMember.status })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, userId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existing) {
|
||||
await db
|
||||
.update(credentialMember)
|
||||
.set({ role, status: 'active', updatedAt: now })
|
||||
.where(eq(credentialMember.id, existing.id))
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
|
||||
await db.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId,
|
||||
role,
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: session.user.id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 201 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to add credential member', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(request: NextRequest, context: RouteContext) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: credentialId } = await context.params
|
||||
const targetUserId = new URL(request.url).searchParams.get('userId')
|
||||
if (!targetUserId) {
|
||||
return NextResponse.json({ error: 'userId query parameter required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const admin = await requireAdminMembership(credentialId, session.user.id)
|
||||
if (!admin) {
|
||||
return NextResponse.json({ error: 'Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
const [target] = await db
|
||||
.select({
|
||||
id: credentialMember.id,
|
||||
role: credentialMember.role,
|
||||
status: credentialMember.status,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
eq(credentialMember.userId, targetUserId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!target) {
|
||||
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (target.role === 'admin') {
|
||||
const activeAdmins = await db
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
eq(credentialMember.role, 'admin'),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
|
||||
if (activeAdmins.length <= 1) {
|
||||
return NextResponse.json({ error: 'Cannot remove the last admin' }, { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
await db.delete(credentialMember).where(eq(credentialMember.id, target.id))
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Failed to remove credential member', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,234 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { credential, credentialMember, environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getCredentialActorContext } from '@/lib/credentials/access'
|
||||
import {
|
||||
syncPersonalEnvCredentialsForUser,
|
||||
syncWorkspaceEnvCredentials,
|
||||
} from '@/lib/credentials/environment'
|
||||
|
||||
const logger = createLogger('CredentialByIdAPI')
|
||||
|
||||
const updateCredentialSchema = z
|
||||
.object({
|
||||
displayName: z.string().trim().min(1).max(255).optional(),
|
||||
accountId: z.string().trim().min(1).optional(),
|
||||
})
|
||||
.strict()
|
||||
.refine((data) => Boolean(data.displayName || data.accountId), {
|
||||
message: 'At least one field must be provided',
|
||||
path: ['displayName'],
|
||||
})
|
||||
|
||||
async function getCredentialResponse(credentialId: string, userId: string) {
|
||||
const [row] = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
type: credential.type,
|
||||
displayName: credential.displayName,
|
||||
providerId: credential.providerId,
|
||||
accountId: credential.accountId,
|
||||
envKey: credential.envKey,
|
||||
envOwnerUserId: credential.envOwnerUserId,
|
||||
createdBy: credential.createdBy,
|
||||
createdAt: credential.createdAt,
|
||||
updatedAt: credential.updatedAt,
|
||||
role: credentialMember.role,
|
||||
status: credentialMember.status,
|
||||
})
|
||||
.from(credential)
|
||||
.innerJoin(
|
||||
credentialMember,
|
||||
and(eq(credentialMember.credentialId, credential.id), eq(credentialMember.userId, userId))
|
||||
)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
return row ?? null
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const access = await getCredentialActorContext(id, session.user.id)
|
||||
if (!access.credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
if (!access.hasWorkspaceAccess || !access.member) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const row = await getCredentialResponse(id, session.user.id)
|
||||
return NextResponse.json({ credential: row }, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch credential', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const parseResult = updateCredentialSchema.safeParse(await request.json())
|
||||
if (!parseResult.success) {
|
||||
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
|
||||
}
|
||||
|
||||
const access = await getCredentialActorContext(id, session.user.id)
|
||||
if (!access.credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
if (!access.hasWorkspaceAccess || !access.isAdmin) {
|
||||
return NextResponse.json({ error: 'Credential admin permission required' }, { status: 403 })
|
||||
}
|
||||
|
||||
if (access.credential.type === 'oauth') {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'OAuth credential editing is disabled. Connect an account and create or use its linked credential.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'Environment credentials cannot be updated via this endpoint. Use the environment value editor in credentials settings.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error('Failed to update credential', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const access = await getCredentialActorContext(id, session.user.id)
|
||||
if (!access.credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
if (!access.hasWorkspaceAccess || !access.isAdmin) {
|
||||
return NextResponse.json({ error: 'Credential admin permission required' }, { status: 403 })
|
||||
}
|
||||
|
||||
if (access.credential.type === 'env_personal' && access.credential.envKey) {
|
||||
const ownerUserId = access.credential.envOwnerUserId
|
||||
if (!ownerUserId) {
|
||||
return NextResponse.json({ error: 'Invalid personal secret owner' }, { status: 400 })
|
||||
}
|
||||
|
||||
const [personalRow] = await db
|
||||
.select({ variables: environment.variables })
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, ownerUserId))
|
||||
.limit(1)
|
||||
|
||||
const current = ((personalRow?.variables as Record<string, string> | null) ?? {}) as Record<
|
||||
string,
|
||||
string
|
||||
>
|
||||
if (access.credential.envKey in current) {
|
||||
delete current[access.credential.envKey]
|
||||
}
|
||||
|
||||
await db
|
||||
.insert(environment)
|
||||
.values({
|
||||
id: ownerUserId,
|
||||
userId: ownerUserId,
|
||||
variables: current,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [environment.userId],
|
||||
set: { variables: current, updatedAt: new Date() },
|
||||
})
|
||||
|
||||
await syncPersonalEnvCredentialsForUser({
|
||||
userId: ownerUserId,
|
||||
envKeys: Object.keys(current),
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
}
|
||||
|
||||
if (access.credential.type === 'env_workspace' && access.credential.envKey) {
|
||||
const [workspaceRow] = await db
|
||||
.select({
|
||||
id: workspaceEnvironment.id,
|
||||
createdAt: workspaceEnvironment.createdAt,
|
||||
variables: workspaceEnvironment.variables,
|
||||
})
|
||||
.from(workspaceEnvironment)
|
||||
.where(eq(workspaceEnvironment.workspaceId, access.credential.workspaceId))
|
||||
.limit(1)
|
||||
|
||||
const current = ((workspaceRow?.variables as Record<string, string> | null) ?? {}) as Record<
|
||||
string,
|
||||
string
|
||||
>
|
||||
if (access.credential.envKey in current) {
|
||||
delete current[access.credential.envKey]
|
||||
}
|
||||
|
||||
await db
|
||||
.insert(workspaceEnvironment)
|
||||
.values({
|
||||
id: workspaceRow?.id || crypto.randomUUID(),
|
||||
workspaceId: access.credential.workspaceId,
|
||||
variables: current,
|
||||
createdAt: workspaceRow?.createdAt || new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [workspaceEnvironment.workspaceId],
|
||||
set: { variables: current, updatedAt: new Date() },
|
||||
})
|
||||
|
||||
await syncWorkspaceEnvCredentials({
|
||||
workspaceId: access.credential.workspaceId,
|
||||
envKeys: Object.keys(current),
|
||||
actingUserId: session.user.id,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
}
|
||||
|
||||
await db.delete(credential).where(eq(credential.id, id))
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete credential', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,81 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
syncPersonalEnvCredentialsForUser,
|
||||
syncWorkspaceEnvCredentials,
|
||||
} from '@/lib/credentials/environment'
|
||||
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('CredentialsBootstrapAPI')
|
||||
|
||||
const bootstrapSchema = z.object({
|
||||
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
|
||||
})
|
||||
|
||||
/**
|
||||
* Ensures the current user's connected accounts and env vars are reflected as workspace credentials.
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const parseResult = bootstrapSchema.safeParse(await request.json())
|
||||
if (!parseResult.success) {
|
||||
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
|
||||
}
|
||||
|
||||
const { workspaceId } = parseResult.data
|
||||
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
|
||||
if (!workspaceAccess.hasAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const [personalRow, workspaceRow] = await Promise.all([
|
||||
db
|
||||
.select({ variables: environment.variables })
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, session.user.id))
|
||||
.limit(1),
|
||||
db
|
||||
.select({ variables: workspaceEnvironment.variables })
|
||||
.from(workspaceEnvironment)
|
||||
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
|
||||
.limit(1),
|
||||
])
|
||||
|
||||
const personalKeys = Object.keys((personalRow[0]?.variables as Record<string, string>) || {})
|
||||
const workspaceKeys = Object.keys((workspaceRow[0]?.variables as Record<string, string>) || {})
|
||||
|
||||
const [oauthSyncResult] = await Promise.all([
|
||||
syncWorkspaceOAuthCredentialsForUser({ workspaceId, userId: session.user.id }),
|
||||
syncPersonalEnvCredentialsForUser({ userId: session.user.id, envKeys: personalKeys }),
|
||||
syncWorkspaceEnvCredentials({
|
||||
workspaceId,
|
||||
envKeys: workspaceKeys,
|
||||
actingUserId: session.user.id,
|
||||
}),
|
||||
])
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
synced: {
|
||||
oauthCreated: oauthSyncResult.createdCredentials,
|
||||
oauthMembershipsUpdated: oauthSyncResult.updatedMemberships,
|
||||
personalEnvKeys: personalKeys.length,
|
||||
workspaceEnvKeys: workspaceKeys.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to bootstrap workspace credentials', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { pendingCredentialDraft } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, lt } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('CredentialDraftAPI')
|
||||
|
||||
const DRAFT_TTL_MS = 15 * 60 * 1000
|
||||
|
||||
const createDraftSchema = z.object({
|
||||
workspaceId: z.string().min(1),
|
||||
providerId: z.string().min(1),
|
||||
displayName: z.string().min(1),
|
||||
})
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = createDraftSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 })
|
||||
}
|
||||
|
||||
const { workspaceId, providerId, displayName } = parsed.data
|
||||
const userId = session.user.id
|
||||
const now = new Date()
|
||||
|
||||
await db
|
||||
.delete(pendingCredentialDraft)
|
||||
.where(
|
||||
and(eq(pendingCredentialDraft.userId, userId), lt(pendingCredentialDraft.expiresAt, now))
|
||||
)
|
||||
|
||||
await db
|
||||
.insert(pendingCredentialDraft)
|
||||
.values({
|
||||
id: crypto.randomUUID(),
|
||||
userId,
|
||||
workspaceId,
|
||||
providerId,
|
||||
displayName,
|
||||
expiresAt: new Date(now.getTime() + DRAFT_TTL_MS),
|
||||
createdAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [
|
||||
pendingCredentialDraft.userId,
|
||||
pendingCredentialDraft.providerId,
|
||||
pendingCredentialDraft.workspaceId,
|
||||
],
|
||||
set: {
|
||||
displayName,
|
||||
expiresAt: new Date(now.getTime() + DRAFT_TTL_MS),
|
||||
createdAt: now,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info('Credential draft saved', { userId, workspaceId, providerId, displayName })
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to save credential draft', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { credential, credentialMember } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('CredentialMembershipsAPI')
|
||||
|
||||
const leaveCredentialSchema = z.object({
|
||||
credentialId: z.string().min(1),
|
||||
})
|
||||
|
||||
export async function GET() {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const memberships = await db
|
||||
.select({
|
||||
membershipId: credentialMember.id,
|
||||
credentialId: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
type: credential.type,
|
||||
displayName: credential.displayName,
|
||||
providerId: credential.providerId,
|
||||
role: credentialMember.role,
|
||||
status: credentialMember.status,
|
||||
joinedAt: credentialMember.joinedAt,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.innerJoin(credential, eq(credentialMember.credentialId, credential.id))
|
||||
.where(eq(credentialMember.userId, session.user.id))
|
||||
|
||||
return NextResponse.json({ memberships }, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to list credential memberships', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(request: NextRequest) {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const parseResult = leaveCredentialSchema.safeParse({
|
||||
credentialId: new URL(request.url).searchParams.get('credentialId'),
|
||||
})
|
||||
if (!parseResult.success) {
|
||||
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
|
||||
}
|
||||
|
||||
const { credentialId } = parseResult.data
|
||||
const [membership] = await db
|
||||
.select()
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
eq(credentialMember.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership) {
|
||||
return NextResponse.json({ error: 'Membership not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (membership.status !== 'active') {
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
}
|
||||
|
||||
if (membership.role === 'admin') {
|
||||
const activeAdmins = await db
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
eq(credentialMember.role, 'admin'),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
|
||||
if (activeAdmins.length <= 1) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Cannot leave credential as the last active admin' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
await db
|
||||
.update(credentialMember)
|
||||
.set({
|
||||
status: 'revoked',
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(credentialMember.id, membership.id))
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to leave credential', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,468 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, credential, credentialMember, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getWorkspaceMemberUserIds } from '@/lib/credentials/environment'
|
||||
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
|
||||
import { getServiceConfigByProviderId } from '@/lib/oauth'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
import { isValidEnvVarName } from '@/executor/constants'
|
||||
|
||||
const logger = createLogger('CredentialsAPI')
|
||||
|
||||
const credentialTypeSchema = z.enum(['oauth', 'env_workspace', 'env_personal'])
|
||||
|
||||
function normalizeEnvKeyInput(raw: string): string {
|
||||
const trimmed = raw.trim()
|
||||
const wrappedMatch = /^\{\{\s*([A-Za-z0-9_]+)\s*\}\}$/.exec(trimmed)
|
||||
return wrappedMatch ? wrappedMatch[1] : trimmed
|
||||
}
|
||||
|
||||
const listCredentialsSchema = z.object({
|
||||
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
|
||||
type: credentialTypeSchema.optional(),
|
||||
providerId: z.string().optional(),
|
||||
})
|
||||
|
||||
const createCredentialSchema = z
|
||||
.object({
|
||||
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
|
||||
type: credentialTypeSchema,
|
||||
displayName: z.string().trim().min(1).max(255).optional(),
|
||||
providerId: z.string().trim().min(1).optional(),
|
||||
accountId: z.string().trim().min(1).optional(),
|
||||
envKey: z.string().trim().min(1).optional(),
|
||||
envOwnerUserId: z.string().trim().min(1).optional(),
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.type === 'oauth') {
|
||||
if (!data.accountId) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'accountId is required for oauth credentials',
|
||||
path: ['accountId'],
|
||||
})
|
||||
}
|
||||
if (!data.providerId) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'providerId is required for oauth credentials',
|
||||
path: ['providerId'],
|
||||
})
|
||||
}
|
||||
if (!data.displayName) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'displayName is required for oauth credentials',
|
||||
path: ['displayName'],
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const normalizedEnvKey = data.envKey ? normalizeEnvKeyInput(data.envKey) : ''
|
||||
if (!normalizedEnvKey) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'envKey is required for env credentials',
|
||||
path: ['envKey'],
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (!isValidEnvVarName(normalizedEnvKey)) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'envKey must contain only letters, numbers, and underscores',
|
||||
path: ['envKey'],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
interface ExistingCredentialSourceParams {
|
||||
workspaceId: string
|
||||
type: 'oauth' | 'env_workspace' | 'env_personal'
|
||||
accountId?: string | null
|
||||
envKey?: string | null
|
||||
envOwnerUserId?: string | null
|
||||
}
|
||||
|
||||
async function findExistingCredentialBySource(params: ExistingCredentialSourceParams) {
|
||||
const { workspaceId, type, accountId, envKey, envOwnerUserId } = params
|
||||
|
||||
if (type === 'oauth' && accountId) {
|
||||
const [row] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'oauth'),
|
||||
eq(credential.accountId, accountId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
return row ?? null
|
||||
}
|
||||
|
||||
if (type === 'env_workspace' && envKey) {
|
||||
const [row] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_workspace'),
|
||||
eq(credential.envKey, envKey)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
return row ?? null
|
||||
}
|
||||
|
||||
if (type === 'env_personal' && envKey && envOwnerUserId) {
|
||||
const [row] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_personal'),
|
||||
eq(credential.envKey, envKey),
|
||||
eq(credential.envOwnerUserId, envOwnerUserId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
return row ?? null
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const rawWorkspaceId = searchParams.get('workspaceId')
|
||||
const rawType = searchParams.get('type')
|
||||
const rawProviderId = searchParams.get('providerId')
|
||||
const parseResult = listCredentialsSchema.safeParse({
|
||||
workspaceId: rawWorkspaceId?.trim(),
|
||||
type: rawType?.trim() || undefined,
|
||||
providerId: rawProviderId?.trim() || undefined,
|
||||
})
|
||||
|
||||
if (!parseResult.success) {
|
||||
logger.warn(`[${requestId}] Invalid credential list request`, {
|
||||
workspaceId: rawWorkspaceId,
|
||||
type: rawType,
|
||||
providerId: rawProviderId,
|
||||
errors: parseResult.error.errors,
|
||||
})
|
||||
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
|
||||
}
|
||||
|
||||
const { workspaceId, type, providerId } = parseResult.data
|
||||
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
|
||||
|
||||
if (!workspaceAccess.hasAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
if (!type || type === 'oauth') {
|
||||
await syncWorkspaceOAuthCredentialsForUser({ workspaceId, userId: session.user.id })
|
||||
}
|
||||
|
||||
const whereClauses = [
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credentialMember.userId, session.user.id),
|
||||
eq(credentialMember.status, 'active'),
|
||||
]
|
||||
|
||||
if (type) {
|
||||
whereClauses.push(eq(credential.type, type))
|
||||
}
|
||||
if (providerId) {
|
||||
whereClauses.push(eq(credential.providerId, providerId))
|
||||
}
|
||||
|
||||
const credentials = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
type: credential.type,
|
||||
displayName: credential.displayName,
|
||||
providerId: credential.providerId,
|
||||
accountId: credential.accountId,
|
||||
envKey: credential.envKey,
|
||||
envOwnerUserId: credential.envOwnerUserId,
|
||||
createdBy: credential.createdBy,
|
||||
createdAt: credential.createdAt,
|
||||
updatedAt: credential.updatedAt,
|
||||
role: credentialMember.role,
|
||||
})
|
||||
.from(credential)
|
||||
.innerJoin(
|
||||
credentialMember,
|
||||
and(
|
||||
eq(credentialMember.credentialId, credential.id),
|
||||
eq(credentialMember.userId, session.user.id),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.where(and(...whereClauses))
|
||||
|
||||
return NextResponse.json({ credentials })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to list credentials`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const parseResult = createCredentialSchema.safeParse(body)
|
||||
|
||||
if (!parseResult.success) {
|
||||
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
|
||||
}
|
||||
|
||||
const { workspaceId, type, displayName, providerId, accountId, envKey, envOwnerUserId } =
|
||||
parseResult.data
|
||||
|
||||
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
|
||||
if (!workspaceAccess.canWrite) {
|
||||
return NextResponse.json({ error: 'Write permission required' }, { status: 403 })
|
||||
}
|
||||
|
||||
let resolvedDisplayName = displayName?.trim() ?? ''
|
||||
let resolvedProviderId: string | null = providerId ?? null
|
||||
let resolvedAccountId: string | null = accountId ?? null
|
||||
const resolvedEnvKey: string | null = envKey ? normalizeEnvKeyInput(envKey) : null
|
||||
let resolvedEnvOwnerUserId: string | null = null
|
||||
|
||||
if (type === 'oauth') {
|
||||
const [accountRow] = await db
|
||||
.select({
|
||||
id: account.id,
|
||||
userId: account.userId,
|
||||
providerId: account.providerId,
|
||||
accountId: account.accountId,
|
||||
})
|
||||
.from(account)
|
||||
.where(eq(account.id, accountId!))
|
||||
.limit(1)
|
||||
|
||||
if (!accountRow) {
|
||||
return NextResponse.json({ error: 'OAuth account not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (accountRow.userId !== session.user.id) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Only account owners can create oauth credentials for an account' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (providerId !== accountRow.providerId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'providerId does not match the selected OAuth account' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
if (!resolvedDisplayName) {
|
||||
resolvedDisplayName =
|
||||
getServiceConfigByProviderId(accountRow.providerId)?.name || accountRow.providerId
|
||||
}
|
||||
} else if (type === 'env_personal') {
|
||||
resolvedEnvOwnerUserId = envOwnerUserId ?? session.user.id
|
||||
if (resolvedEnvOwnerUserId !== session.user.id) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Only the current user can create personal env credentials for themselves' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
resolvedProviderId = null
|
||||
resolvedAccountId = null
|
||||
resolvedDisplayName = resolvedEnvKey || ''
|
||||
} else {
|
||||
resolvedProviderId = null
|
||||
resolvedAccountId = null
|
||||
resolvedEnvOwnerUserId = null
|
||||
resolvedDisplayName = resolvedEnvKey || ''
|
||||
}
|
||||
|
||||
if (!resolvedDisplayName) {
|
||||
return NextResponse.json({ error: 'Display name is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const existingCredential = await findExistingCredentialBySource({
|
||||
workspaceId,
|
||||
type,
|
||||
accountId: resolvedAccountId,
|
||||
envKey: resolvedEnvKey,
|
||||
envOwnerUserId: resolvedEnvOwnerUserId,
|
||||
})
|
||||
|
||||
if (existingCredential) {
|
||||
const [membership] = await db
|
||||
.select({
|
||||
id: credentialMember.id,
|
||||
status: credentialMember.status,
|
||||
role: credentialMember.role,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, existingCredential.id),
|
||||
eq(credentialMember.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership || membership.status !== 'active') {
|
||||
return NextResponse.json(
|
||||
{ error: 'A credential with this source already exists in this workspace' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
type === 'oauth' &&
|
||||
membership.role === 'admin' &&
|
||||
resolvedDisplayName &&
|
||||
resolvedDisplayName !== existingCredential.displayName
|
||||
) {
|
||||
await db
|
||||
.update(credential)
|
||||
.set({
|
||||
displayName: resolvedDisplayName,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(credential.id, existingCredential.id))
|
||||
|
||||
const [updatedCredential] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(eq(credential.id, existingCredential.id))
|
||||
.limit(1)
|
||||
|
||||
return NextResponse.json(
|
||||
{ credential: updatedCredential ?? existingCredential },
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json({ credential: existingCredential }, { status: 200 })
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const credentialId = crypto.randomUUID()
|
||||
const [workspaceRow] = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
await db.transaction(async (tx) => {
|
||||
await tx.insert(credential).values({
|
||||
id: credentialId,
|
||||
workspaceId,
|
||||
type,
|
||||
displayName: resolvedDisplayName,
|
||||
providerId: resolvedProviderId,
|
||||
accountId: resolvedAccountId,
|
||||
envKey: resolvedEnvKey,
|
||||
envOwnerUserId: resolvedEnvOwnerUserId,
|
||||
createdBy: session.user.id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
if (type === 'env_workspace' && workspaceRow?.ownerId) {
|
||||
const workspaceUserIds = await getWorkspaceMemberUserIds(workspaceId)
|
||||
if (workspaceUserIds.length > 0) {
|
||||
for (const memberUserId of workspaceUserIds) {
|
||||
await tx.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId: memberUserId,
|
||||
role: memberUserId === workspaceRow.ownerId ? 'admin' : 'member',
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: session.user.id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await tx.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId: session.user.id,
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: session.user.id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
const [created] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
return NextResponse.json({ credential: created }, { status: 201 })
|
||||
} catch (error: any) {
|
||||
if (error?.code === '23505') {
|
||||
return NextResponse.json(
|
||||
{ error: 'A credential with this source already exists' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
if (error?.code === '23503') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid credential reference or membership target' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
if (error?.code === '23514') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Credential source data failed validation checks' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
logger.error(`[${requestId}] Credential create failure details`, {
|
||||
code: error?.code,
|
||||
detail: error?.detail,
|
||||
constraint: error?.constraint,
|
||||
table: error?.table,
|
||||
message: error?.message,
|
||||
})
|
||||
logger.error(`[${requestId}] Failed to create credential`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,6 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { syncPersonalEnvCredentialsForUser } from '@/lib/credentials/environment'
|
||||
import type { EnvironmentVariable } from '@/stores/settings/environment'
|
||||
|
||||
const logger = createLogger('EnvironmentAPI')
|
||||
@@ -54,11 +53,6 @@ export async function POST(req: NextRequest) {
|
||||
},
|
||||
})
|
||||
|
||||
await syncPersonalEnvCredentialsForUser({
|
||||
userId: session.user.id,
|
||||
envKeys: Object.keys(variables),
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
|
||||
@@ -191,3 +191,84 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Delete a label from a page
|
||||
export async function DELETE(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
cloudId: providedCloudId,
|
||||
pageId,
|
||||
labelName,
|
||||
} = await request.json()
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!pageId) {
|
||||
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!labelName) {
|
||||
return NextResponse.json({ error: 'Label name is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
||||
if (!pageIdValidation.isValid) {
|
||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const encodedLabel = encodeURIComponent(labelName.trim())
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label?name=${encodedLabel}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage =
|
||||
errorData?.message || `Failed to delete Confluence label (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
pageId,
|
||||
labelName,
|
||||
deleted: true,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error deleting Confluence label:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
103
apps/sim/app/api/tools/confluence/pages-by-label/route.ts
Normal file
103
apps/sim/app/api/tools/confluence/pages-by-label/route.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluencePagesByLabelAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const domain = searchParams.get('domain')
|
||||
const accessToken = searchParams.get('accessToken')
|
||||
const labelId = searchParams.get('labelId')
|
||||
const providedCloudId = searchParams.get('cloudId')
|
||||
const limit = searchParams.get('limit') || '50'
|
||||
const cursor = searchParams.get('cursor')
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!labelId) {
|
||||
return NextResponse.json({ error: 'Label ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
|
||||
if (!labelIdValidation.isValid) {
|
||||
return NextResponse.json({ error: labelIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
||||
if (cursor) {
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/labels/${labelId}/pages?${queryParams.toString()}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to get pages by label (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const pages = (data.results || []).map((page: any) => ({
|
||||
id: page.id,
|
||||
title: page.title,
|
||||
status: page.status ?? null,
|
||||
spaceId: page.spaceId ?? null,
|
||||
parentId: page.parentId ?? null,
|
||||
authorId: page.authorId ?? null,
|
||||
createdAt: page.createdAt ?? null,
|
||||
version: page.version ?? null,
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
pages,
|
||||
labelId,
|
||||
nextCursor: data._links?.next
|
||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
||||
: null,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error getting pages by label:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
98
apps/sim/app/api/tools/confluence/space-labels/route.ts
Normal file
98
apps/sim/app/api/tools/confluence/space-labels/route.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluenceSpaceLabelsAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const domain = searchParams.get('domain')
|
||||
const accessToken = searchParams.get('accessToken')
|
||||
const spaceId = searchParams.get('spaceId')
|
||||
const providedCloudId = searchParams.get('cloudId')
|
||||
const limit = searchParams.get('limit') || '25'
|
||||
const cursor = searchParams.get('cursor')
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!spaceId) {
|
||||
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
|
||||
if (!spaceIdValidation.isValid) {
|
||||
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
||||
if (cursor) {
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/labels?${queryParams.toString()}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to list space labels (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const labels = (data.results || []).map((label: any) => ({
|
||||
id: label.id,
|
||||
name: label.name,
|
||||
prefix: label.prefix || 'global',
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
labels,
|
||||
spaceId,
|
||||
nextCursor: data._links?.next
|
||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
||||
: null,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error listing space labels:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,12 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workspaceEnvironment } from '@sim/db/schema'
|
||||
import { environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { syncWorkspaceEnvCredentials } from '@/lib/credentials/environment'
|
||||
import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
|
||||
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WorkspaceEnvironmentAPI')
|
||||
@@ -46,10 +44,44 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { workspaceDecrypted, personalDecrypted, conflicts } = await getPersonalAndWorkspaceEnv(
|
||||
userId,
|
||||
workspaceId
|
||||
)
|
||||
// Workspace env (encrypted)
|
||||
const wsEnvRow = await db
|
||||
.select()
|
||||
.from(workspaceEnvironment)
|
||||
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
const wsEncrypted: Record<string, string> = (wsEnvRow[0]?.variables as any) || {}
|
||||
|
||||
// Personal env (encrypted)
|
||||
const personalRow = await db
|
||||
.select()
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, userId))
|
||||
.limit(1)
|
||||
|
||||
const personalEncrypted: Record<string, string> = (personalRow[0]?.variables as any) || {}
|
||||
|
||||
// Decrypt both for UI
|
||||
const decryptAll = async (src: Record<string, string>) => {
|
||||
const out: Record<string, string> = {}
|
||||
for (const [k, v] of Object.entries(src)) {
|
||||
try {
|
||||
const { decrypted } = await decryptSecret(v)
|
||||
out[k] = decrypted
|
||||
} catch {
|
||||
out[k] = ''
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
const [workspaceDecrypted, personalDecrypted] = await Promise.all([
|
||||
decryptAll(wsEncrypted),
|
||||
decryptAll(personalEncrypted),
|
||||
])
|
||||
|
||||
const conflicts = Object.keys(personalDecrypted).filter((k) => k in workspaceDecrypted)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
@@ -124,12 +156,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
set: { variables: merged, updatedAt: new Date() },
|
||||
})
|
||||
|
||||
await syncWorkspaceEnvCredentials({
|
||||
workspaceId,
|
||||
envKeys: Object.keys(merged),
|
||||
actingUserId: userId,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Workspace env PUT error`, error)
|
||||
@@ -196,12 +222,6 @@ export async function DELETE(
|
||||
set: { variables: current, updatedAt: new Date() },
|
||||
})
|
||||
|
||||
await syncWorkspaceEnvCredentials({
|
||||
workspaceId,
|
||||
envKeys: Object.keys(current),
|
||||
actingUserId: userId,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Workspace env DELETE error`, error)
|
||||
|
||||
@@ -13,9 +13,6 @@ export type CommandId =
|
||||
| 'goto-logs'
|
||||
| 'open-search'
|
||||
| 'run-workflow'
|
||||
| 'focus-copilot-tab'
|
||||
| 'focus-toolbar-tab'
|
||||
| 'focus-editor-tab'
|
||||
| 'clear-terminal-console'
|
||||
| 'focus-toolbar-search'
|
||||
| 'clear-notifications'
|
||||
@@ -75,21 +72,6 @@ export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
|
||||
shortcut: 'Mod+Enter',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'focus-copilot-tab': {
|
||||
id: 'focus-copilot-tab',
|
||||
shortcut: 'C',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'focus-toolbar-tab': {
|
||||
id: 'focus-toolbar-tab',
|
||||
shortcut: 'T',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'focus-editor-tab': {
|
||||
id: 'focus-editor-tab',
|
||||
shortcut: 'E',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'clear-terminal-console': {
|
||||
id: 'clear-terminal-console',
|
||||
shortcut: 'Mod+D',
|
||||
|
||||
@@ -14,6 +14,15 @@ const logger = createLogger('DiffControls')
|
||||
const NOTIFICATION_WIDTH = 240
|
||||
const NOTIFICATION_GAP = 16
|
||||
|
||||
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
||||
if (name === 'edit_workflow') return true
|
||||
if (name !== 'workflow_change') return false
|
||||
|
||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||
if (mode === 'apply') return true
|
||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||
}
|
||||
|
||||
export const DiffControls = memo(function DiffControls() {
|
||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
||||
@@ -64,7 +73,7 @@ export const DiffControls = memo(function DiffControls() {
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'edit_workflow') {
|
||||
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
@@ -72,7 +81,9 @@ export const DiffControls = memo(function DiffControls() {
|
||||
}
|
||||
}
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||
const candidates = Object.values(toolCallsById).filter((t) =>
|
||||
isWorkflowEditToolCall(t.name, t.params)
|
||||
)
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
if (id) updatePreviewToolCallState('accepted', id)
|
||||
@@ -102,7 +113,7 @@ export const DiffControls = memo(function DiffControls() {
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'edit_workflow') {
|
||||
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
@@ -110,7 +121,9 @@ export const DiffControls = memo(function DiffControls() {
|
||||
}
|
||||
}
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||
const candidates = Object.values(toolCallsById).filter((t) =>
|
||||
isWorkflowEditToolCall(t.name, t.params)
|
||||
)
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
if (id) updatePreviewToolCallState('rejected', id)
|
||||
|
||||
@@ -47,6 +47,28 @@ interface ParsedTags {
|
||||
cleanContent: string
|
||||
}
|
||||
|
||||
function getToolCallParams(toolCall?: CopilotToolCall): Record<string, unknown> {
|
||||
const candidate = ((toolCall as any)?.parameters ||
|
||||
(toolCall as any)?.input ||
|
||||
(toolCall as any)?.params ||
|
||||
{}) as Record<string, unknown>
|
||||
return candidate && typeof candidate === 'object' ? candidate : {}
|
||||
}
|
||||
|
||||
function isWorkflowChangeApplyMode(toolCall?: CopilotToolCall): boolean {
|
||||
if (!toolCall || toolCall.name !== 'workflow_change') return false
|
||||
const params = getToolCallParams(toolCall)
|
||||
const mode = typeof params.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||
if (mode === 'apply') return true
|
||||
return typeof params.proposalId === 'string' && params.proposalId.length > 0
|
||||
}
|
||||
|
||||
function isWorkflowEditSummaryTool(toolCall?: CopilotToolCall): boolean {
|
||||
if (!toolCall) return false
|
||||
if (toolCall.name === 'edit_workflow') return true
|
||||
return isWorkflowChangeApplyMode(toolCall)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
||||
* @param blocks - The subagent content blocks to search
|
||||
@@ -871,7 +893,10 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
)
|
||||
}
|
||||
if (segment.type === 'tool' && segment.block.toolCall) {
|
||||
if (toolCall.name === 'edit' && segment.block.toolCall.name === 'edit_workflow') {
|
||||
if (
|
||||
(toolCall.name === 'edit' || toolCall.name === 'build') &&
|
||||
isWorkflowEditSummaryTool(segment.block.toolCall)
|
||||
) {
|
||||
return (
|
||||
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
||||
<WorkflowEditSummary toolCall={segment.block.toolCall} />
|
||||
@@ -968,12 +993,11 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
}
|
||||
}, [blocks])
|
||||
|
||||
if (toolCall.name !== 'edit_workflow') {
|
||||
if (!isWorkflowEditSummaryTool(toolCall)) {
|
||||
return null
|
||||
}
|
||||
|
||||
const params =
|
||||
(toolCall as any).parameters || (toolCall as any).input || (toolCall as any).params || {}
|
||||
const params = getToolCallParams(toolCall)
|
||||
let operations = Array.isArray(params.operations) ? params.operations : []
|
||||
|
||||
if (operations.length === 0 && Array.isArray((toolCall as any).operations)) {
|
||||
@@ -1219,11 +1243,6 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
)
|
||||
})
|
||||
|
||||
/** Checks if a tool is server-side executed (not a client tool) */
|
||||
function isIntegrationTool(toolName: string): boolean {
|
||||
return !TOOL_DISPLAY_REGISTRY[toolName]
|
||||
}
|
||||
|
||||
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
||||
return false
|
||||
@@ -1233,59 +1252,96 @@ function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||
return false
|
||||
}
|
||||
|
||||
// Never show buttons for tools the user has marked as always-allowed
|
||||
if (useCopilotStore.getState().isToolAutoAllowed(toolCall.name)) {
|
||||
if (toolCall.ui?.showInterrupt !== true) {
|
||||
return false
|
||||
}
|
||||
|
||||
const hasInterrupt = !!TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.interrupt
|
||||
if (hasInterrupt) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Integration tools (user-installed) always require approval
|
||||
if (isIntegrationTool(toolCall.name)) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
return true
|
||||
}
|
||||
|
||||
const toolCallLogger = createLogger('CopilotToolCall')
|
||||
|
||||
async function sendToolDecision(
|
||||
toolCallId: string,
|
||||
status: 'accepted' | 'rejected' | 'background'
|
||||
status: 'accepted' | 'rejected' | 'background',
|
||||
options?: {
|
||||
toolName?: string
|
||||
remember?: boolean
|
||||
}
|
||||
) {
|
||||
try {
|
||||
await fetch('/api/copilot/confirm', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolCallId, status }),
|
||||
body: JSON.stringify({
|
||||
toolCallId,
|
||||
status,
|
||||
...(options?.toolName ? { toolName: options.toolName } : {}),
|
||||
...(options?.remember ? { remember: true } : {}),
|
||||
}),
|
||||
})
|
||||
} catch (error) {
|
||||
toolCallLogger.warn('Failed to send tool decision', {
|
||||
toolCallId,
|
||||
status,
|
||||
remember: options?.remember === true,
|
||||
toolName: options?.toolName,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function removeAutoAllowedToolPreference(toolName: string): Promise<boolean> {
|
||||
try {
|
||||
const response = await fetch(`/api/copilot/auto-allowed-tools?toolId=${encodeURIComponent(toolName)}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
return response.ok
|
||||
} catch (error) {
|
||||
toolCallLogger.warn('Failed to remove auto-allowed tool preference', {
|
||||
toolName,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
type ToolUiAction = NonNullable<NonNullable<CopilotToolCall['ui']>['actions']>[number]
|
||||
|
||||
function actionDecision(action: ToolUiAction): 'accepted' | 'rejected' | 'background' {
|
||||
const id = action.id.toLowerCase()
|
||||
if (id.includes('background')) return 'background'
|
||||
if (action.kind === 'reject') return 'rejected'
|
||||
return 'accepted'
|
||||
}
|
||||
|
||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
||||
}
|
||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
||||
}
|
||||
|
||||
async function handleRun(
|
||||
toolCall: CopilotToolCall,
|
||||
setToolCallState: any,
|
||||
onStateChange?: any,
|
||||
editedParams?: any
|
||||
editedParams?: any,
|
||||
options?: {
|
||||
remember?: boolean
|
||||
}
|
||||
) {
|
||||
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
||||
onStateChange?.('executing')
|
||||
await sendToolDecision(toolCall.id, 'accepted')
|
||||
await sendToolDecision(toolCall.id, 'accepted', {
|
||||
toolName: toolCall.name,
|
||||
remember: options?.remember === true,
|
||||
})
|
||||
|
||||
// Client-executable run tools: execute on the client for real-time feedback
|
||||
// (block pulsing, console logs, stop button). The server defers execution
|
||||
// for these tools; the client reports back via mark-complete.
|
||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)) {
|
||||
if (isClientRunCapability(toolCall)) {
|
||||
const params = editedParams || toolCall.params || {}
|
||||
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
||||
}
|
||||
@@ -1298,6 +1354,9 @@ async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onSt
|
||||
}
|
||||
|
||||
function getDisplayName(toolCall: CopilotToolCall): string {
|
||||
if (toolCall.ui?.phaseLabel) return toolCall.ui.phaseLabel
|
||||
if (toolCall.ui?.title) return `${getStateVerb(toolCall.state)} ${toolCall.ui.title}`
|
||||
|
||||
const fromStore = (toolCall as any).display?.text
|
||||
if (fromStore) return fromStore
|
||||
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||
@@ -1342,53 +1401,37 @@ function RunSkipButtons({
|
||||
toolCall,
|
||||
onStateChange,
|
||||
editedParams,
|
||||
actions,
|
||||
}: {
|
||||
toolCall: CopilotToolCall
|
||||
onStateChange?: (state: any) => void
|
||||
editedParams?: any
|
||||
actions: ToolUiAction[]
|
||||
}) {
|
||||
const [isProcessing, setIsProcessing] = useState(false)
|
||||
const [buttonsHidden, setButtonsHidden] = useState(false)
|
||||
const actionInProgressRef = useRef(false)
|
||||
const { setToolCallState, addAutoAllowedTool } = useCopilotStore()
|
||||
const { setToolCallState } = useCopilotStore()
|
||||
|
||||
const onRun = async () => {
|
||||
const onAction = async (action: ToolUiAction) => {
|
||||
// Prevent race condition - check ref synchronously
|
||||
if (actionInProgressRef.current) return
|
||||
actionInProgressRef.current = true
|
||||
setIsProcessing(true)
|
||||
setButtonsHidden(true)
|
||||
try {
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||
} finally {
|
||||
setIsProcessing(false)
|
||||
actionInProgressRef.current = false
|
||||
}
|
||||
}
|
||||
|
||||
const onAlwaysAllow = async () => {
|
||||
// Prevent race condition - check ref synchronously
|
||||
if (actionInProgressRef.current) return
|
||||
actionInProgressRef.current = true
|
||||
setIsProcessing(true)
|
||||
setButtonsHidden(true)
|
||||
try {
|
||||
await addAutoAllowedTool(toolCall.name)
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||
} finally {
|
||||
setIsProcessing(false)
|
||||
actionInProgressRef.current = false
|
||||
}
|
||||
}
|
||||
|
||||
const onSkip = async () => {
|
||||
// Prevent race condition - check ref synchronously
|
||||
if (actionInProgressRef.current) return
|
||||
actionInProgressRef.current = true
|
||||
setIsProcessing(true)
|
||||
setButtonsHidden(true)
|
||||
try {
|
||||
await handleSkip(toolCall, setToolCallState, onStateChange)
|
||||
const decision = actionDecision(action)
|
||||
if (decision === 'accepted') {
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams, {
|
||||
remember: action.remember === true,
|
||||
})
|
||||
} else if (decision === 'rejected') {
|
||||
await handleSkip(toolCall, setToolCallState, onStateChange)
|
||||
} else {
|
||||
setToolCallState(toolCall, ClientToolCallState.background)
|
||||
onStateChange?.('background')
|
||||
await sendToolDecision(toolCall.id, 'background')
|
||||
}
|
||||
} finally {
|
||||
setIsProcessing(false)
|
||||
actionInProgressRef.current = false
|
||||
@@ -1397,23 +1440,22 @@ function RunSkipButtons({
|
||||
|
||||
if (buttonsHidden) return null
|
||||
|
||||
// Show "Always Allow" for all tools that require confirmation
|
||||
const showAlwaysAllow = true
|
||||
|
||||
// Standardized buttons for all interrupt tools: Allow, Always Allow, Skip
|
||||
return (
|
||||
<div className='mt-[10px] flex gap-[6px]'>
|
||||
<Button onClick={onRun} disabled={isProcessing} variant='tertiary'>
|
||||
{isProcessing ? 'Allowing...' : 'Allow'}
|
||||
</Button>
|
||||
{showAlwaysAllow && (
|
||||
<Button onClick={onAlwaysAllow} disabled={isProcessing} variant='default'>
|
||||
{isProcessing ? 'Allowing...' : 'Always Allow'}
|
||||
</Button>
|
||||
)}
|
||||
<Button onClick={onSkip} disabled={isProcessing} variant='default'>
|
||||
Skip
|
||||
</Button>
|
||||
{actions.map((action, index) => {
|
||||
const variant =
|
||||
action.kind === 'reject' ? 'default' : action.remember ? 'default' : 'tertiary'
|
||||
return (
|
||||
<Button
|
||||
key={action.id}
|
||||
onClick={() => onAction(action)}
|
||||
disabled={isProcessing}
|
||||
variant={variant}
|
||||
>
|
||||
{isProcessing && index === 0 ? 'Working...' : action.label}
|
||||
</Button>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1430,10 +1472,16 @@ export function ToolCall({
|
||||
const liveToolCall = useCopilotStore((s) =>
|
||||
effectiveId ? s.toolCallsById[effectiveId] : undefined
|
||||
)
|
||||
const toolCall = liveToolCall || toolCallProp
|
||||
|
||||
// Guard: nothing to render without a toolCall
|
||||
if (!toolCall) return null
|
||||
const rawToolCall = liveToolCall || toolCallProp
|
||||
const hasRealToolCall = !!rawToolCall
|
||||
const toolCall: CopilotToolCall =
|
||||
rawToolCall ||
|
||||
({
|
||||
id: effectiveId || '',
|
||||
name: '',
|
||||
state: ClientToolCallState.generating,
|
||||
params: {},
|
||||
} as CopilotToolCall)
|
||||
|
||||
const isExpandablePending =
|
||||
toolCall?.state === 'pending' &&
|
||||
@@ -1441,17 +1489,15 @@ export function ToolCall({
|
||||
|
||||
const [expanded, setExpanded] = useState(isExpandablePending)
|
||||
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
||||
const [autoAllowRemovedForCall, setAutoAllowRemovedForCall] = useState(false)
|
||||
|
||||
// State for editable parameters
|
||||
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
||||
const [editedParams, setEditedParams] = useState(params)
|
||||
const paramsRef = useRef(params)
|
||||
|
||||
// Check if this integration tool is auto-allowed
|
||||
const { removeAutoAllowedTool, setToolCallState } = useCopilotStore()
|
||||
const isAutoAllowed = useCopilotStore(
|
||||
(s) => isIntegrationTool(toolCall.name) && s.isToolAutoAllowed(toolCall.name)
|
||||
)
|
||||
const { setToolCallState } = useCopilotStore()
|
||||
const isAutoAllowed = toolCall.ui?.autoAllowed === true && !autoAllowRemovedForCall
|
||||
|
||||
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
||||
useEffect(() => {
|
||||
@@ -1461,6 +1507,14 @@ export function ToolCall({
|
||||
}
|
||||
}, [params])
|
||||
|
||||
useEffect(() => {
|
||||
setAutoAllowRemovedForCall(false)
|
||||
setShowRemoveAutoAllow(false)
|
||||
}, [toolCall.id])
|
||||
|
||||
// Guard: nothing to render without a toolCall
|
||||
if (!hasRealToolCall) return null
|
||||
|
||||
// Skip rendering some internal tools
|
||||
if (
|
||||
toolCall.name === 'checkoff_todo' ||
|
||||
@@ -1472,7 +1526,9 @@ export function ToolCall({
|
||||
return null
|
||||
|
||||
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
||||
const isSubagentTool = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
||||
const isSubagentTool =
|
||||
toolCall.execution?.target === 'go_subagent' ||
|
||||
TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
||||
|
||||
// For ALL subagent tools, don't show anything until we have blocks with content
|
||||
if (isSubagentTool) {
|
||||
@@ -1499,28 +1555,6 @@ export function ToolCall({
|
||||
)
|
||||
}
|
||||
|
||||
// Get current mode from store to determine if we should render integration tools
|
||||
const mode = useCopilotStore.getState().mode
|
||||
|
||||
// Check if this is a completed/historical tool call (not pending/executing)
|
||||
// Use string comparison to handle both enum values and string values from DB
|
||||
const stateStr = String(toolCall.state)
|
||||
const isCompletedToolCall =
|
||||
stateStr === 'success' ||
|
||||
stateStr === 'error' ||
|
||||
stateStr === 'rejected' ||
|
||||
stateStr === 'aborted'
|
||||
|
||||
// Allow rendering if:
|
||||
// 1. Tool is in TOOL_DISPLAY_REGISTRY (client tools), OR
|
||||
// 2. We're in build mode (integration tools are executed server-side), OR
|
||||
// 3. Tool call is already completed (historical - should always render)
|
||||
const isClientTool = !!TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||
const isIntegrationToolInBuildMode = mode === 'build' && !isClientTool
|
||||
|
||||
if (!isClientTool && !isIntegrationToolInBuildMode && !isCompletedToolCall) {
|
||||
return null
|
||||
}
|
||||
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
||||
// Check if tool has params table config (meaning it's expandable)
|
||||
const hasParamsTable = !!toolUIConfig?.paramsTable
|
||||
@@ -1530,6 +1564,14 @@ export function ToolCall({
|
||||
toolCall.name === 'make_api_request' ||
|
||||
toolCall.name === 'set_global_workflow_variables'
|
||||
|
||||
const interruptActions =
|
||||
(toolCall.ui?.actions && toolCall.ui.actions.length > 0
|
||||
? toolCall.ui.actions
|
||||
: [
|
||||
{ id: 'allow_once', label: 'Allow', kind: 'accept' as const },
|
||||
{ id: 'allow_always', label: 'Always Allow', kind: 'accept' as const, remember: true },
|
||||
{ id: 'reject', label: 'Skip', kind: 'reject' as const },
|
||||
]) as ToolUiAction[]
|
||||
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
||||
|
||||
// Check UI config for secondary action - only show for current message tool calls
|
||||
@@ -1987,9 +2029,12 @@ export function ToolCall({
|
||||
<div className='mt-[10px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
await removeAutoAllowedTool(toolCall.name)
|
||||
setShowRemoveAutoAllow(false)
|
||||
forceUpdate({})
|
||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
||||
if (removed) {
|
||||
setAutoAllowRemovedForCall(true)
|
||||
setShowRemoveAutoAllow(false)
|
||||
forceUpdate({})
|
||||
}
|
||||
}}
|
||||
variant='default'
|
||||
className='text-xs'
|
||||
@@ -2003,6 +2048,7 @@ export function ToolCall({
|
||||
toolCall={toolCall}
|
||||
onStateChange={handleStateChange}
|
||||
editedParams={editedParams}
|
||||
actions={interruptActions}
|
||||
/>
|
||||
)}
|
||||
{/* Render subagent content as thinking text */}
|
||||
@@ -2048,9 +2094,12 @@ export function ToolCall({
|
||||
<div className='mt-[10px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
await removeAutoAllowedTool(toolCall.name)
|
||||
setShowRemoveAutoAllow(false)
|
||||
forceUpdate({})
|
||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
||||
if (removed) {
|
||||
setAutoAllowRemovedForCall(true)
|
||||
setShowRemoveAutoAllow(false)
|
||||
forceUpdate({})
|
||||
}
|
||||
}}
|
||||
variant='default'
|
||||
className='text-xs'
|
||||
@@ -2064,6 +2113,7 @@ export function ToolCall({
|
||||
toolCall={toolCall}
|
||||
onStateChange={handleStateChange}
|
||||
editedParams={editedParams}
|
||||
actions={interruptActions}
|
||||
/>
|
||||
)}
|
||||
{/* Render subagent content as thinking text */}
|
||||
@@ -2087,7 +2137,7 @@ export function ToolCall({
|
||||
}
|
||||
}
|
||||
|
||||
const isEditWorkflow = toolCall.name === 'edit_workflow'
|
||||
const isEditWorkflow = isWorkflowEditSummaryTool(toolCall)
|
||||
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
||||
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
||||
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
||||
@@ -2109,9 +2159,12 @@ export function ToolCall({
|
||||
<div className='mt-[10px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
await removeAutoAllowedTool(toolCall.name)
|
||||
setShowRemoveAutoAllow(false)
|
||||
forceUpdate({})
|
||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
||||
if (removed) {
|
||||
setAutoAllowRemovedForCall(true)
|
||||
setShowRemoveAutoAllow(false)
|
||||
forceUpdate({})
|
||||
}
|
||||
}}
|
||||
variant='default'
|
||||
className='text-xs'
|
||||
@@ -2125,6 +2178,7 @@ export function ToolCall({
|
||||
toolCall={toolCall}
|
||||
onStateChange={handleStateChange}
|
||||
editedParams={editedParams}
|
||||
actions={interruptActions}
|
||||
/>
|
||||
) : showMoveToBackground ? (
|
||||
<div className='mt-[10px]'>
|
||||
@@ -2155,7 +2209,7 @@ export function ToolCall({
|
||||
</Button>
|
||||
</div>
|
||||
) : null}
|
||||
{/* Workflow edit summary - shows block changes after edit_workflow completes */}
|
||||
{/* Workflow edit summary - shows block changes after edit_workflow/workflow_change(apply) */}
|
||||
<WorkflowEditSummary toolCall={toolCall} />
|
||||
|
||||
{/* Render subagent content as thinking text */}
|
||||
|
||||
@@ -113,7 +113,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
clearPlanArtifact,
|
||||
savePlanArtifact,
|
||||
loadAvailableModels,
|
||||
loadAutoAllowedTools,
|
||||
resumeActiveStream,
|
||||
} = useCopilotStore()
|
||||
|
||||
@@ -125,8 +124,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
setCopilotWorkflowId,
|
||||
loadChats,
|
||||
loadAvailableModels,
|
||||
loadAutoAllowedTools,
|
||||
currentChat,
|
||||
isSendingMessage,
|
||||
resumeActiveStream,
|
||||
})
|
||||
|
||||
@@ -12,8 +12,6 @@ interface UseCopilotInitializationProps {
|
||||
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
||||
loadChats: (forceRefresh?: boolean) => Promise<void>
|
||||
loadAvailableModels: () => Promise<void>
|
||||
loadAutoAllowedTools: () => Promise<void>
|
||||
currentChat: any
|
||||
isSendingMessage: boolean
|
||||
resumeActiveStream: () => Promise<boolean>
|
||||
}
|
||||
@@ -32,8 +30,6 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
setCopilotWorkflowId,
|
||||
loadChats,
|
||||
loadAvailableModels,
|
||||
loadAutoAllowedTools,
|
||||
currentChat,
|
||||
isSendingMessage,
|
||||
resumeActiveStream,
|
||||
} = props
|
||||
@@ -120,17 +116,6 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
})
|
||||
}, [isSendingMessage, resumeActiveStream])
|
||||
|
||||
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
||||
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||
useEffect(() => {
|
||||
if (!hasLoadedAutoAllowedToolsRef.current) {
|
||||
hasLoadedAutoAllowedToolsRef.current = true
|
||||
loadAutoAllowedTools().catch((err) => {
|
||||
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
||||
})
|
||||
}
|
||||
}, [loadAutoAllowedTools])
|
||||
|
||||
/** Load available models once on mount */
|
||||
const hasLoadedModelsRef = useRef(false)
|
||||
useEffect(() => {
|
||||
|
||||
@@ -30,7 +30,6 @@ export interface OAuthRequiredModalProps {
|
||||
requiredScopes?: string[]
|
||||
serviceId: string
|
||||
newScopes?: string[]
|
||||
onConnect?: () => Promise<void> | void
|
||||
}
|
||||
|
||||
const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
||||
@@ -315,7 +314,6 @@ export function OAuthRequiredModal({
|
||||
requiredScopes = [],
|
||||
serviceId,
|
||||
newScopes = [],
|
||||
onConnect,
|
||||
}: OAuthRequiredModalProps) {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const { baseProvider } = parseProvider(provider)
|
||||
@@ -361,12 +359,6 @@ export function OAuthRequiredModal({
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
if (onConnect) {
|
||||
await onConnect()
|
||||
onClose()
|
||||
return
|
||||
}
|
||||
|
||||
const providerId = getProviderIdFromServiceId(serviceId)
|
||||
|
||||
logger.info('Linking OAuth2:', {
|
||||
|
||||
@@ -3,12 +3,10 @@
|
||||
import { createElement, useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ExternalLink, Users } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Button, Combobox } from '@/components/emcn/components'
|
||||
import { getSubscriptionStatus } from '@/lib/billing/client'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { getPollingProviderFromOAuth } from '@/lib/credential-sets/providers'
|
||||
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
|
||||
import {
|
||||
getCanonicalScopesForProvider,
|
||||
getProviderIdFromServiceId,
|
||||
@@ -20,9 +18,9 @@ import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { CREDENTIAL_SET } from '@/executor/constants'
|
||||
import { CREDENTIAL, CREDENTIAL_SET } from '@/executor/constants'
|
||||
import { useCredentialSets } from '@/hooks/queries/credential-sets'
|
||||
import { useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import { useOrganizations } from '@/hooks/queries/organization'
|
||||
import { useSubscriptionData } from '@/hooks/queries/subscription'
|
||||
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
|
||||
@@ -48,8 +46,6 @@ export function CredentialSelector({
|
||||
previewValue,
|
||||
previewContextValues,
|
||||
}: CredentialSelectorProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = (params?.workspaceId as string) || ''
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
const [editingValue, setEditingValue] = useState('')
|
||||
const [isEditing, setIsEditing] = useState(false)
|
||||
@@ -100,32 +96,53 @@ export function CredentialSelector({
|
||||
data: credentials = [],
|
||||
isFetching: credentialsLoading,
|
||||
refetch: refetchCredentials,
|
||||
} = useOAuthCredentials(effectiveProviderId, {
|
||||
enabled: Boolean(effectiveProviderId),
|
||||
workspaceId,
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
} = useOAuthCredentials(effectiveProviderId, Boolean(effectiveProviderId))
|
||||
|
||||
const selectedCredential = useMemo(
|
||||
() => credentials.find((cred) => cred.id === selectedId),
|
||||
[credentials, selectedId]
|
||||
)
|
||||
|
||||
const shouldFetchForeignMeta =
|
||||
Boolean(selectedId) &&
|
||||
!selectedCredential &&
|
||||
Boolean(activeWorkflowId) &&
|
||||
Boolean(effectiveProviderId)
|
||||
|
||||
const { data: foreignCredentials = [], isFetching: foreignMetaLoading } =
|
||||
useOAuthCredentialDetail(
|
||||
shouldFetchForeignMeta ? selectedId : undefined,
|
||||
activeWorkflowId || undefined,
|
||||
shouldFetchForeignMeta
|
||||
)
|
||||
|
||||
const hasForeignMeta = foreignCredentials.length > 0
|
||||
const isForeign = Boolean(selectedId && !selectedCredential && hasForeignMeta)
|
||||
|
||||
const selectedCredentialSet = useMemo(
|
||||
() => credentialSets.find((cs) => cs.id === selectedCredentialSetId),
|
||||
[credentialSets, selectedCredentialSetId]
|
||||
)
|
||||
|
||||
const isForeignCredentialSet = Boolean(isCredentialSetSelected && !selectedCredentialSet)
|
||||
|
||||
const resolvedLabel = useMemo(() => {
|
||||
if (selectedCredentialSet) return selectedCredentialSet.name
|
||||
if (isForeignCredentialSet) return CREDENTIAL.FOREIGN_LABEL
|
||||
if (selectedCredential) return selectedCredential.name
|
||||
if (isForeign) return CREDENTIAL.FOREIGN_LABEL
|
||||
return ''
|
||||
}, [selectedCredentialSet, selectedCredential])
|
||||
}, [selectedCredentialSet, isForeignCredentialSet, selectedCredential, isForeign])
|
||||
|
||||
const displayValue = isEditing ? editingValue : resolvedLabel
|
||||
|
||||
const invalidSelection =
|
||||
!isPreview && Boolean(selectedId) && !selectedCredential && !credentialsLoading
|
||||
!isPreview &&
|
||||
Boolean(selectedId) &&
|
||||
!selectedCredential &&
|
||||
!hasForeignMeta &&
|
||||
!credentialsLoading &&
|
||||
!foreignMetaLoading
|
||||
|
||||
useEffect(() => {
|
||||
if (!invalidSelection) return
|
||||
@@ -136,7 +153,7 @@ export function CredentialSelector({
|
||||
setStoreValue('')
|
||||
}, [invalidSelection, selectedId, effectiveProviderId, setStoreValue])
|
||||
|
||||
useCredentialRefreshTriggers(refetchCredentials, effectiveProviderId, workspaceId)
|
||||
useCredentialRefreshTriggers(refetchCredentials)
|
||||
|
||||
const handleOpenChange = useCallback(
|
||||
(isOpen: boolean) => {
|
||||
@@ -178,18 +195,8 @@ export function CredentialSelector({
|
||||
)
|
||||
|
||||
const handleAddCredential = useCallback(() => {
|
||||
writePendingCredentialCreateRequest({
|
||||
workspaceId,
|
||||
type: 'oauth',
|
||||
providerId: effectiveProviderId,
|
||||
displayName: '',
|
||||
serviceId,
|
||||
requiredScopes: getCanonicalScopesForProvider(effectiveProviderId),
|
||||
requestedAt: Date.now(),
|
||||
})
|
||||
|
||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
|
||||
}, [workspaceId, effectiveProviderId, serviceId])
|
||||
setShowOAuthModal(true)
|
||||
}, [])
|
||||
|
||||
const getProviderIcon = useCallback((providerName: OAuthProvider) => {
|
||||
const { baseProvider } = parseProvider(providerName)
|
||||
@@ -244,18 +251,23 @@ export function CredentialSelector({
|
||||
label: cred.name,
|
||||
value: cred.id,
|
||||
}))
|
||||
credentialItems.push({
|
||||
label:
|
||||
credentials.length > 0
|
||||
? `Connect another ${getProviderName(provider)} account`
|
||||
: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
})
|
||||
|
||||
groups.push({
|
||||
section: 'Personal Credential',
|
||||
items: credentialItems,
|
||||
})
|
||||
if (credentialItems.length > 0) {
|
||||
groups.push({
|
||||
section: 'Personal Credential',
|
||||
items: credentialItems,
|
||||
})
|
||||
} else {
|
||||
groups.push({
|
||||
section: 'Personal Credential',
|
||||
items: [
|
||||
{
|
||||
label: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
|
||||
return { comboboxOptions: [], comboboxGroups: groups }
|
||||
}
|
||||
@@ -265,13 +277,12 @@ export function CredentialSelector({
|
||||
value: cred.id,
|
||||
}))
|
||||
|
||||
options.push({
|
||||
label:
|
||||
credentials.length > 0
|
||||
? `Connect another ${getProviderName(provider)} account`
|
||||
: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
})
|
||||
if (credentials.length === 0) {
|
||||
options.push({
|
||||
label: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
})
|
||||
}
|
||||
|
||||
return { comboboxOptions: options, comboboxGroups: undefined }
|
||||
}, [
|
||||
@@ -357,7 +368,7 @@ export function CredentialSelector({
|
||||
}
|
||||
disabled={effectiveDisabled}
|
||||
editable={true}
|
||||
filterOptions={true}
|
||||
filterOptions={!isForeign && !isForeignCredentialSet}
|
||||
isLoading={credentialsLoading}
|
||||
overlayContent={overlayContent}
|
||||
className={selectedId || isCredentialSetSelected ? 'pl-[28px]' : ''}
|
||||
@@ -369,13 +380,15 @@ export function CredentialSelector({
|
||||
<span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' />
|
||||
Additional permissions required
|
||||
</div>
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={() => setShowOAuthModal(true)}
|
||||
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
|
||||
>
|
||||
Update access
|
||||
</Button>
|
||||
{!isForeign && (
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={() => setShowOAuthModal(true)}
|
||||
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
|
||||
>
|
||||
Update access
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -394,11 +407,7 @@ export function CredentialSelector({
|
||||
)
|
||||
}
|
||||
|
||||
function useCredentialRefreshTriggers(
|
||||
refetchCredentials: () => Promise<unknown>,
|
||||
providerId: string,
|
||||
workspaceId: string
|
||||
) {
|
||||
function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>) {
|
||||
useEffect(() => {
|
||||
const refresh = () => {
|
||||
void refetchCredentials()
|
||||
@@ -416,29 +425,12 @@ function useCredentialRefreshTriggers(
|
||||
}
|
||||
}
|
||||
|
||||
const handleCredentialsUpdated = (
|
||||
event: CustomEvent<{ providerId?: string; workspaceId?: string }>
|
||||
) => {
|
||||
if (event.detail?.providerId && event.detail.providerId !== providerId) {
|
||||
return
|
||||
}
|
||||
if (event.detail?.workspaceId && workspaceId && event.detail.workspaceId !== workspaceId) {
|
||||
return
|
||||
}
|
||||
refresh()
|
||||
}
|
||||
|
||||
document.addEventListener('visibilitychange', handleVisibilityChange)
|
||||
window.addEventListener('pageshow', handlePageShow)
|
||||
window.addEventListener('oauth-credentials-updated', handleCredentialsUpdated as EventListener)
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('visibilitychange', handleVisibilityChange)
|
||||
window.removeEventListener('pageshow', handlePageShow)
|
||||
window.removeEventListener(
|
||||
'oauth-credentials-updated',
|
||||
handleCredentialsUpdated as EventListener
|
||||
)
|
||||
}
|
||||
}, [providerId, workspaceId, refetchCredentials])
|
||||
}, [refetchCredentials])
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
PopoverSection,
|
||||
} from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
|
||||
import {
|
||||
usePersonalEnvironment,
|
||||
useWorkspaceEnvironment,
|
||||
@@ -169,15 +168,7 @@ export const EnvVarDropdown: React.FC<EnvVarDropdownProps> = ({
|
||||
}, [searchTerm])
|
||||
|
||||
const openEnvironmentSettings = () => {
|
||||
if (workspaceId) {
|
||||
writePendingCredentialCreateRequest({
|
||||
workspaceId,
|
||||
type: 'env_personal',
|
||||
envKey: searchTerm.trim(),
|
||||
requestedAt: Date.now(),
|
||||
})
|
||||
}
|
||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
|
||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'environment' } }))
|
||||
onClose?.()
|
||||
}
|
||||
|
||||
@@ -311,7 +302,7 @@ export const EnvVarDropdown: React.FC<EnvVarDropdownProps> = ({
|
||||
}}
|
||||
>
|
||||
<Plus className='h-3 w-3' />
|
||||
<span>Create Secret</span>
|
||||
<span>Create environment variable</span>
|
||||
</PopoverItem>
|
||||
</PopoverScrollArea>
|
||||
) : (
|
||||
|
||||
@@ -7,6 +7,7 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
@@ -124,6 +125,8 @@ export function FileSelectorInput({
|
||||
const serviceId = subBlock.serviceId || ''
|
||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||
|
||||
const { isForeignCredential } = useForeignCredential(effectiveProviderId, normalizedCredentialId)
|
||||
|
||||
const selectorResolution = useMemo<SelectorResolution | null>(() => {
|
||||
return resolveSelectorForSubBlock(subBlock, {
|
||||
workflowId: workflowIdFromUrl,
|
||||
@@ -165,6 +168,7 @@ export function FileSelectorInput({
|
||||
|
||||
const disabledReason =
|
||||
finalDisabled ||
|
||||
isForeignCredential ||
|
||||
missingCredential ||
|
||||
missingDomain ||
|
||||
missingProject ||
|
||||
|
||||
@@ -4,6 +4,7 @@ import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
@@ -46,6 +47,10 @@ export function FolderSelectorInput({
|
||||
subBlock.canonicalParamId === 'copyDestinationId' ||
|
||||
subBlock.id === 'copyDestinationFolder' ||
|
||||
subBlock.id === 'manualCopyDestinationFolder'
|
||||
const { isForeignCredential } = useForeignCredential(
|
||||
effectiveProviderId,
|
||||
(connectedCredential as string) || ''
|
||||
)
|
||||
|
||||
// Central dependsOn gating
|
||||
const { finalDisabled } = useDependsOnGate(blockId, subBlock, {
|
||||
@@ -114,7 +119,9 @@ export function FolderSelectorInput({
|
||||
selectorContext={
|
||||
selectorResolution?.context ?? { credentialId, workflowId: activeWorkflowId || '' }
|
||||
}
|
||||
disabled={finalDisabled || missingCredential || !selectorResolution?.key}
|
||||
disabled={
|
||||
finalDisabled || isForeignCredential || missingCredential || !selectorResolution?.key
|
||||
}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue ?? null}
|
||||
placeholder={subBlock.placeholder || 'Select folder'}
|
||||
|
||||
@@ -7,6 +7,7 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
@@ -72,6 +73,11 @@ export function ProjectSelectorInput({
|
||||
|
||||
const serviceId = subBlock.serviceId || ''
|
||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||
|
||||
const { isForeignCredential } = useForeignCredential(
|
||||
effectiveProviderId,
|
||||
(connectedCredential as string) || ''
|
||||
)
|
||||
const workflowIdFromUrl = (params?.workflowId as string) || activeWorkflowId || ''
|
||||
const { finalDisabled } = useDependsOnGate(blockId, subBlock, {
|
||||
disabled,
|
||||
@@ -117,7 +123,7 @@ export function ProjectSelectorInput({
|
||||
subBlock={subBlock}
|
||||
selectorKey={selectorResolution.key}
|
||||
selectorContext={selectorResolution.context}
|
||||
disabled={finalDisabled || missingCredential}
|
||||
disabled={finalDisabled || isForeignCredential || missingCredential}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue ?? null}
|
||||
placeholder={subBlock.placeholder || 'Select project'}
|
||||
|
||||
@@ -7,6 +7,7 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
@@ -86,6 +87,8 @@ export function SheetSelectorInput({
|
||||
const serviceId = subBlock.serviceId || ''
|
||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||
|
||||
const { isForeignCredential } = useForeignCredential(effectiveProviderId, normalizedCredentialId)
|
||||
|
||||
const selectorResolution = useMemo<SelectorResolution | null>(() => {
|
||||
return resolveSelectorForSubBlock(subBlock, {
|
||||
workflowId: workflowIdFromUrl,
|
||||
@@ -98,7 +101,11 @@ export function SheetSelectorInput({
|
||||
const missingSpreadsheet = !normalizedSpreadsheetId
|
||||
|
||||
const disabledReason =
|
||||
finalDisabled || missingCredential || missingSpreadsheet || !selectorResolution?.key
|
||||
finalDisabled ||
|
||||
isForeignCredential ||
|
||||
missingCredential ||
|
||||
missingSpreadsheet ||
|
||||
!selectorResolution?.key
|
||||
|
||||
if (!selectorResolution?.key) {
|
||||
return (
|
||||
|
||||
@@ -6,6 +6,7 @@ import { Tooltip } from '@/components/emcn'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
@@ -84,6 +85,11 @@ export function SlackSelectorInput({
|
||||
? (effectiveBotToken as string) || ''
|
||||
: (effectiveCredential as string) || ''
|
||||
|
||||
const { isForeignCredential } = useForeignCredential(
|
||||
effectiveProviderId,
|
||||
(effectiveAuthMethod as string) === 'bot_token' ? '' : (effectiveCredential as string) || ''
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
const val = isPreview && previewValue !== undefined ? previewValue : storeValue
|
||||
if (typeof val === 'string') {
|
||||
@@ -93,7 +99,7 @@ export function SlackSelectorInput({
|
||||
|
||||
const requiresCredential = dependsOn.includes('credential')
|
||||
const missingCredential = !credential || credential.trim().length === 0
|
||||
const shouldForceDisable = requiresCredential && missingCredential
|
||||
const shouldForceDisable = requiresCredential && (missingCredential || isForeignCredential)
|
||||
|
||||
const context: SelectorContext = useMemo(
|
||||
() => ({
|
||||
@@ -130,7 +136,7 @@ export function SlackSelectorInput({
|
||||
subBlock={subBlock}
|
||||
selectorKey={config.selectorKey}
|
||||
selectorContext={context}
|
||||
disabled={finalDisabled || shouldForceDisable}
|
||||
disabled={finalDisabled || shouldForceDisable || isForeignCredential}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue ?? null}
|
||||
placeholder={subBlock.placeholder || config.placeholder}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { createElement, useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { ExternalLink } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Button, Combobox } from '@/components/emcn/components'
|
||||
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
|
||||
import {
|
||||
getCanonicalScopesForProvider,
|
||||
getProviderIdFromServiceId,
|
||||
@@ -12,7 +10,8 @@ import {
|
||||
parseProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal'
|
||||
import { useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import { CREDENTIAL } from '@/executor/constants'
|
||||
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
@@ -55,12 +54,10 @@ export function ToolCredentialSelector({
|
||||
onChange,
|
||||
provider,
|
||||
requiredScopes = [],
|
||||
label = 'Select credential',
|
||||
label = 'Select account',
|
||||
serviceId,
|
||||
disabled = false,
|
||||
}: ToolCredentialSelectorProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = (params?.workspaceId as string) || ''
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
const [editingInputValue, setEditingInputValue] = useState('')
|
||||
const [isEditing, setIsEditing] = useState(false)
|
||||
@@ -74,32 +71,50 @@ export function ToolCredentialSelector({
|
||||
data: credentials = [],
|
||||
isFetching: credentialsLoading,
|
||||
refetch: refetchCredentials,
|
||||
} = useOAuthCredentials(effectiveProviderId, {
|
||||
enabled: Boolean(effectiveProviderId),
|
||||
workspaceId,
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
} = useOAuthCredentials(effectiveProviderId, Boolean(effectiveProviderId))
|
||||
|
||||
const selectedCredential = useMemo(
|
||||
() => credentials.find((cred) => cred.id === selectedId),
|
||||
[credentials, selectedId]
|
||||
)
|
||||
|
||||
const shouldFetchForeignMeta =
|
||||
Boolean(selectedId) &&
|
||||
!selectedCredential &&
|
||||
Boolean(activeWorkflowId) &&
|
||||
Boolean(effectiveProviderId)
|
||||
|
||||
const { data: foreignCredentials = [], isFetching: foreignMetaLoading } =
|
||||
useOAuthCredentialDetail(
|
||||
shouldFetchForeignMeta ? selectedId : undefined,
|
||||
activeWorkflowId || undefined,
|
||||
shouldFetchForeignMeta
|
||||
)
|
||||
|
||||
const hasForeignMeta = foreignCredentials.length > 0
|
||||
const isForeign = Boolean(selectedId && !selectedCredential && hasForeignMeta)
|
||||
|
||||
const resolvedLabel = useMemo(() => {
|
||||
if (selectedCredential) return selectedCredential.name
|
||||
if (isForeign) return CREDENTIAL.FOREIGN_LABEL
|
||||
return ''
|
||||
}, [selectedCredential])
|
||||
}, [selectedCredential, isForeign])
|
||||
|
||||
const inputValue = isEditing ? editingInputValue : resolvedLabel
|
||||
|
||||
const invalidSelection = Boolean(selectedId) && !selectedCredential && !credentialsLoading
|
||||
const invalidSelection =
|
||||
Boolean(selectedId) &&
|
||||
!selectedCredential &&
|
||||
!hasForeignMeta &&
|
||||
!credentialsLoading &&
|
||||
!foreignMetaLoading
|
||||
|
||||
useEffect(() => {
|
||||
if (!invalidSelection) return
|
||||
onChange('')
|
||||
}, [invalidSelection, onChange])
|
||||
|
||||
useCredentialRefreshTriggers(refetchCredentials, effectiveProviderId, workspaceId)
|
||||
useCredentialRefreshTriggers(refetchCredentials)
|
||||
|
||||
const handleOpenChange = useCallback(
|
||||
(isOpen: boolean) => {
|
||||
@@ -127,18 +142,8 @@ export function ToolCredentialSelector({
|
||||
)
|
||||
|
||||
const handleAddCredential = useCallback(() => {
|
||||
writePendingCredentialCreateRequest({
|
||||
workspaceId,
|
||||
type: 'oauth',
|
||||
providerId: effectiveProviderId,
|
||||
displayName: '',
|
||||
serviceId,
|
||||
requiredScopes: getCanonicalScopesForProvider(effectiveProviderId),
|
||||
requestedAt: Date.now(),
|
||||
})
|
||||
|
||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
|
||||
}, [workspaceId, effectiveProviderId, serviceId])
|
||||
setShowOAuthModal(true)
|
||||
}, [])
|
||||
|
||||
const comboboxOptions = useMemo(() => {
|
||||
const options = credentials.map((cred) => ({
|
||||
@@ -146,13 +151,12 @@ export function ToolCredentialSelector({
|
||||
value: cred.id,
|
||||
}))
|
||||
|
||||
options.push({
|
||||
label:
|
||||
credentials.length > 0
|
||||
? `Connect another ${getProviderName(provider)} account`
|
||||
: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
})
|
||||
if (credentials.length === 0) {
|
||||
options.push({
|
||||
label: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
})
|
||||
}
|
||||
|
||||
return options
|
||||
}, [credentials, provider])
|
||||
@@ -202,7 +206,7 @@ export function ToolCredentialSelector({
|
||||
placeholder={label}
|
||||
disabled={disabled}
|
||||
editable={true}
|
||||
filterOptions={true}
|
||||
filterOptions={!isForeign}
|
||||
isLoading={credentialsLoading}
|
||||
overlayContent={overlayContent}
|
||||
className={selectedId ? 'pl-[28px]' : ''}
|
||||
@@ -214,13 +218,15 @@ export function ToolCredentialSelector({
|
||||
<span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' />
|
||||
Additional permissions required
|
||||
</div>
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={() => setShowOAuthModal(true)}
|
||||
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
|
||||
>
|
||||
Update access
|
||||
</Button>
|
||||
{!isForeign && (
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={() => setShowOAuthModal(true)}
|
||||
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
|
||||
>
|
||||
Update access
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -239,11 +245,7 @@ export function ToolCredentialSelector({
|
||||
)
|
||||
}
|
||||
|
||||
function useCredentialRefreshTriggers(
|
||||
refetchCredentials: () => Promise<unknown>,
|
||||
providerId: string,
|
||||
workspaceId: string
|
||||
) {
|
||||
function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>) {
|
||||
useEffect(() => {
|
||||
const refresh = () => {
|
||||
void refetchCredentials()
|
||||
@@ -261,29 +263,12 @@ function useCredentialRefreshTriggers(
|
||||
}
|
||||
}
|
||||
|
||||
const handleCredentialsUpdated = (
|
||||
event: CustomEvent<{ providerId?: string; workspaceId?: string }>
|
||||
) => {
|
||||
if (event.detail?.providerId && event.detail.providerId !== providerId) {
|
||||
return
|
||||
}
|
||||
if (event.detail?.workspaceId && workspaceId && event.detail.workspaceId !== workspaceId) {
|
||||
return
|
||||
}
|
||||
refresh()
|
||||
}
|
||||
|
||||
document.addEventListener('visibilitychange', handleVisibilityChange)
|
||||
window.addEventListener('pageshow', handlePageShow)
|
||||
window.addEventListener('oauth-credentials-updated', handleCredentialsUpdated as EventListener)
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('visibilitychange', handleVisibilityChange)
|
||||
window.removeEventListener('pageshow', handlePageShow)
|
||||
window.removeEventListener(
|
||||
'oauth-credentials-updated',
|
||||
handleCredentialsUpdated as EventListener
|
||||
)
|
||||
}
|
||||
}, [providerId, workspaceId, refetchCredentials])
|
||||
}, [refetchCredentials])
|
||||
}
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
|
||||
export function useForeignCredential(
|
||||
provider: string | undefined,
|
||||
credentialId: string | undefined
|
||||
) {
|
||||
const [isForeign, setIsForeign] = useState<boolean>(false)
|
||||
const [loading, setLoading] = useState<boolean>(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const normalizedProvider = useMemo(() => (provider || '').toString(), [provider])
|
||||
const normalizedCredentialId = useMemo(() => credentialId || '', [credentialId])
|
||||
|
||||
useEffect(() => {
|
||||
let cancelled = false
|
||||
async function check() {
|
||||
setLoading(true)
|
||||
setError(null)
|
||||
try {
|
||||
if (!normalizedProvider || !normalizedCredentialId) {
|
||||
if (!cancelled) setIsForeign(false)
|
||||
return
|
||||
}
|
||||
const res = await fetch(
|
||||
`/api/auth/oauth/credentials?provider=${encodeURIComponent(normalizedProvider)}`
|
||||
)
|
||||
if (!res.ok) {
|
||||
if (!cancelled) setIsForeign(true)
|
||||
return
|
||||
}
|
||||
const data = await res.json()
|
||||
const isOwn = (data.credentials || []).some((c: any) => c.id === normalizedCredentialId)
|
||||
if (!cancelled) setIsForeign(!isOwn)
|
||||
} catch (e) {
|
||||
if (!cancelled) {
|
||||
setIsForeign(true)
|
||||
setError((e as Error).message)
|
||||
}
|
||||
} finally {
|
||||
if (!cancelled) setLoading(false)
|
||||
}
|
||||
}
|
||||
void check()
|
||||
return () => {
|
||||
cancelled = true
|
||||
}
|
||||
}, [normalizedProvider, normalizedCredentialId])
|
||||
|
||||
return { isForeignCredential: isForeign, loading, error }
|
||||
}
|
||||
@@ -340,13 +340,7 @@ export const Panel = memo(function Panel() {
|
||||
* Register global keyboard shortcuts using the central commands registry.
|
||||
*
|
||||
* - Mod+Enter: Run / cancel workflow (matches the Run button behavior)
|
||||
* - C: Focus Copilot tab
|
||||
* - T: Focus Toolbar tab
|
||||
* - E: Focus Editor tab
|
||||
* - Mod+F: Focus Toolbar tab and search input
|
||||
*
|
||||
* The tab-switching commands are disabled inside editable elements so typing
|
||||
* in inputs or textareas is not interrupted.
|
||||
*/
|
||||
useRegisterGlobalCommands(() =>
|
||||
createCommands([
|
||||
@@ -363,33 +357,6 @@ export const Panel = memo(function Panel() {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-copilot-tab',
|
||||
handler: () => {
|
||||
setActiveTab('copilot')
|
||||
},
|
||||
overrides: {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-toolbar-tab',
|
||||
handler: () => {
|
||||
setActiveTab('toolbar')
|
||||
},
|
||||
overrides: {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-editor-tab',
|
||||
handler: () => {
|
||||
setActiveTab('editor')
|
||||
},
|
||||
overrides: {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-toolbar-search',
|
||||
handler: () => {
|
||||
|
||||
@@ -473,7 +473,7 @@ function ConnectionsSection({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Secrets */}
|
||||
{/* Environment Variables */}
|
||||
{envVars.length > 0 && (
|
||||
<div className='mb-[2px] last:mb-0'>
|
||||
<div
|
||||
@@ -489,7 +489,7 @@ function ConnectionsSection({
|
||||
'text-[var(--text-secondary)] group-hover:text-[var(--text-primary)]'
|
||||
)}
|
||||
>
|
||||
Secrets
|
||||
Environment Variables
|
||||
</span>
|
||||
<ChevronDownIcon
|
||||
className={cn(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,17 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { CredentialsManager } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/credentials/credentials-manager'
|
||||
|
||||
interface CredentialsProps {
|
||||
onOpenChange?: (open: boolean) => void
|
||||
registerCloseHandler?: (handler: (open: boolean) => void) => void
|
||||
registerBeforeLeaveHandler?: (handler: (onProceed: () => void) => void) => void
|
||||
}
|
||||
|
||||
export function Credentials(_props: CredentialsProps) {
|
||||
return (
|
||||
<div className='h-full min-h-0'>
|
||||
<CredentialsManager />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -134,7 +134,7 @@ function WorkspaceVariableRow({
|
||||
<Trash />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Delete secret</Tooltip.Content>
|
||||
<Tooltip.Content>Delete environment variable</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
@@ -637,7 +637,7 @@ export function EnvironmentVariables({ registerBeforeLeaveHandler }: Environment
|
||||
<Trash />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Delete secret</Tooltip.Content>
|
||||
<Tooltip.Content>Delete environment variable</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
@@ -811,7 +811,7 @@ export function EnvironmentVariables({ registerBeforeLeaveHandler }: Environment
|
||||
filteredWorkspaceEntries.length === 0 &&
|
||||
(envVars.length > 0 || Object.keys(workspaceVars).length > 0) && (
|
||||
<div className='py-[16px] text-center text-[13px] text-[var(--text-muted)]'>
|
||||
No secrets found matching "{searchTerm}"
|
||||
No environment variables found matching "{searchTerm}"
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
|
||||
@@ -2,7 +2,6 @@ export { ApiKeys } from './api-keys/api-keys'
|
||||
export { BYOK } from './byok/byok'
|
||||
export { Copilot } from './copilot/copilot'
|
||||
export { CredentialSets } from './credential-sets/credential-sets'
|
||||
export { Credentials } from './credentials/credentials'
|
||||
export { CustomTools } from './custom-tools/custom-tools'
|
||||
export { Debug } from './debug/debug'
|
||||
export { EnvironmentVariables } from './environment/environment'
|
||||
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
import {
|
||||
Card,
|
||||
Connections,
|
||||
FolderCode,
|
||||
HexSimple,
|
||||
Key,
|
||||
SModal,
|
||||
@@ -44,11 +45,12 @@ import {
|
||||
BYOK,
|
||||
Copilot,
|
||||
CredentialSets,
|
||||
Credentials,
|
||||
CustomTools,
|
||||
Debug,
|
||||
EnvironmentVariables,
|
||||
FileUploads,
|
||||
General,
|
||||
Integrations,
|
||||
MCP,
|
||||
Skills,
|
||||
Subscription,
|
||||
@@ -78,7 +80,6 @@ interface SettingsModalProps {
|
||||
|
||||
type SettingsSection =
|
||||
| 'general'
|
||||
| 'credentials'
|
||||
| 'environment'
|
||||
| 'template-profile'
|
||||
| 'integrations'
|
||||
@@ -155,10 +156,11 @@ const allNavigationItems: NavigationItem[] = [
|
||||
requiresHosted: true,
|
||||
requiresTeam: true,
|
||||
},
|
||||
{ id: 'credentials', label: 'Credentials', icon: Connections, section: 'tools' },
|
||||
{ id: 'integrations', label: 'Integrations', icon: Connections, section: 'tools' },
|
||||
{ id: 'custom-tools', label: 'Custom Tools', icon: Wrench, section: 'tools' },
|
||||
{ id: 'skills', label: 'Skills', icon: AgentSkillsIcon, section: 'tools' },
|
||||
{ id: 'mcp', label: 'MCP Tools', icon: McpIcon, section: 'tools' },
|
||||
{ id: 'environment', label: 'Environment', icon: FolderCode, section: 'system' },
|
||||
{ id: 'apikeys', label: 'API Keys', icon: Key, section: 'system' },
|
||||
{ id: 'workflow-mcp-servers', label: 'MCP Servers', icon: Server, section: 'system' },
|
||||
{
|
||||
@@ -254,6 +256,9 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
if (item.id === 'apikeys' && permissionConfig.hideApiKeysTab) {
|
||||
return false
|
||||
}
|
||||
if (item.id === 'environment' && permissionConfig.hideEnvironmentTab) {
|
||||
return false
|
||||
}
|
||||
if (item.id === 'files' && permissionConfig.hideFilesTab) {
|
||||
return false
|
||||
}
|
||||
@@ -319,9 +324,6 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
if (!isBillingEnabled && (activeSection === 'subscription' || activeSection === 'team')) {
|
||||
return 'general'
|
||||
}
|
||||
if (activeSection === 'environment' || activeSection === 'integrations') {
|
||||
return 'credentials'
|
||||
}
|
||||
return activeSection
|
||||
}, [activeSection])
|
||||
|
||||
@@ -340,7 +342,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
(sectionId: SettingsSection) => {
|
||||
if (sectionId === effectiveActiveSection) return
|
||||
|
||||
if (effectiveActiveSection === 'credentials' && environmentBeforeLeaveHandler.current) {
|
||||
if (effectiveActiveSection === 'environment' && environmentBeforeLeaveHandler.current) {
|
||||
environmentBeforeLeaveHandler.current(() => setActiveSection(sectionId))
|
||||
return
|
||||
}
|
||||
@@ -368,11 +370,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
|
||||
useEffect(() => {
|
||||
const handleOpenSettings = (event: CustomEvent<{ tab: SettingsSection }>) => {
|
||||
if (event.detail.tab === 'environment' || event.detail.tab === 'integrations') {
|
||||
setActiveSection('credentials')
|
||||
} else {
|
||||
setActiveSection(event.detail.tab)
|
||||
}
|
||||
setActiveSection(event.detail.tab)
|
||||
onOpenChange(true)
|
||||
}
|
||||
|
||||
@@ -481,19 +479,13 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
const handleDialogOpenChange = (newOpen: boolean) => {
|
||||
if (
|
||||
!newOpen &&
|
||||
effectiveActiveSection === 'credentials' &&
|
||||
effectiveActiveSection === 'environment' &&
|
||||
environmentBeforeLeaveHandler.current
|
||||
) {
|
||||
environmentBeforeLeaveHandler.current(() => {
|
||||
if (integrationsCloseHandler.current) {
|
||||
integrationsCloseHandler.current(newOpen)
|
||||
} else {
|
||||
onOpenChange(false)
|
||||
}
|
||||
})
|
||||
environmentBeforeLeaveHandler.current(() => onOpenChange(false))
|
||||
} else if (
|
||||
!newOpen &&
|
||||
effectiveActiveSection === 'credentials' &&
|
||||
effectiveActiveSection === 'integrations' &&
|
||||
integrationsCloseHandler.current
|
||||
) {
|
||||
integrationsCloseHandler.current(newOpen)
|
||||
@@ -510,7 +502,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
</VisuallyHidden.Root>
|
||||
<VisuallyHidden.Root>
|
||||
<DialogPrimitive.Description>
|
||||
Configure your workspace settings, credentials, and preferences
|
||||
Configure your workspace settings, environment variables, integrations, and preferences
|
||||
</DialogPrimitive.Description>
|
||||
</VisuallyHidden.Root>
|
||||
|
||||
@@ -547,14 +539,18 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
</SModalMainHeader>
|
||||
<SModalMainBody>
|
||||
{effectiveActiveSection === 'general' && <General onOpenChange={onOpenChange} />}
|
||||
{effectiveActiveSection === 'credentials' && (
|
||||
<Credentials
|
||||
onOpenChange={onOpenChange}
|
||||
registerCloseHandler={registerIntegrationsCloseHandler}
|
||||
{effectiveActiveSection === 'environment' && (
|
||||
<EnvironmentVariables
|
||||
registerBeforeLeaveHandler={registerEnvironmentBeforeLeaveHandler}
|
||||
/>
|
||||
)}
|
||||
{effectiveActiveSection === 'template-profile' && <TemplateProfile />}
|
||||
{effectiveActiveSection === 'integrations' && (
|
||||
<Integrations
|
||||
onOpenChange={onOpenChange}
|
||||
registerCloseHandler={registerIntegrationsCloseHandler}
|
||||
/>
|
||||
)}
|
||||
{effectiveActiveSection === 'credential-sets' && <CredentialSets />}
|
||||
{effectiveActiveSection === 'access-control' && <AccessControl />}
|
||||
{effectiveActiveSection === 'apikeys' && <ApiKeys onOpenChange={onOpenChange} />}
|
||||
|
||||
@@ -589,6 +589,7 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
|
||||
|
||||
export const scheduleExecution = task({
|
||||
id: 'schedule-execution',
|
||||
machine: 'medium-1x',
|
||||
retry: {
|
||||
maxAttempts: 1,
|
||||
},
|
||||
|
||||
@@ -669,6 +669,7 @@ async function executeWebhookJobInternal(
|
||||
|
||||
export const webhookExecution = task({
|
||||
id: 'webhook-execution',
|
||||
machine: 'medium-1x',
|
||||
retry: {
|
||||
maxAttempts: 1,
|
||||
},
|
||||
|
||||
@@ -197,5 +197,6 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
||||
|
||||
export const workflowExecutionTask = task({
|
||||
id: 'workflow-execution',
|
||||
machine: 'medium-1x',
|
||||
run: executeWorkflowJob,
|
||||
})
|
||||
|
||||
@@ -394,6 +394,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Page Property Operations
|
||||
{ label: 'List Page Properties', id: 'list_page_properties' },
|
||||
{ label: 'Create Page Property', id: 'create_page_property' },
|
||||
{ label: 'Delete Page Property', id: 'delete_page_property' },
|
||||
// Search Operations
|
||||
{ label: 'Search Content', id: 'search' },
|
||||
{ label: 'Search in Space', id: 'search_in_space' },
|
||||
@@ -414,6 +415,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Label Operations
|
||||
{ label: 'List Labels', id: 'list_labels' },
|
||||
{ label: 'Add Label', id: 'add_label' },
|
||||
{ label: 'Delete Label', id: 'delete_label' },
|
||||
{ label: 'Get Pages by Label', id: 'get_pages_by_label' },
|
||||
{ label: 'List Space Labels', id: 'list_space_labels' },
|
||||
// Space Operations
|
||||
{ label: 'Get Space', id: 'get_space' },
|
||||
{ label: 'List Spaces', id: 'list_spaces' },
|
||||
@@ -485,6 +489,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'search_in_space',
|
||||
'get_space',
|
||||
'list_spaces',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
not: true,
|
||||
},
|
||||
@@ -500,6 +506,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_labels',
|
||||
'upload_attachment',
|
||||
'add_label',
|
||||
'delete_label',
|
||||
'delete_page_property',
|
||||
'get_page_children',
|
||||
'get_page_ancestors',
|
||||
'list_page_versions',
|
||||
@@ -527,6 +535,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'search_in_space',
|
||||
'get_space',
|
||||
'list_spaces',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
not: true,
|
||||
},
|
||||
@@ -542,6 +552,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_labels',
|
||||
'upload_attachment',
|
||||
'add_label',
|
||||
'delete_label',
|
||||
'delete_page_property',
|
||||
'get_page_children',
|
||||
'get_page_ancestors',
|
||||
'list_page_versions',
|
||||
@@ -566,6 +578,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'search_in_space',
|
||||
'create_blogpost',
|
||||
'list_blogposts_in_space',
|
||||
'list_space_labels',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -601,6 +614,14 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'create_page_property' },
|
||||
},
|
||||
{
|
||||
id: 'propertyId',
|
||||
title: 'Property ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter property ID to delete',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'delete_page_property' },
|
||||
},
|
||||
{
|
||||
id: 'title',
|
||||
title: 'Title',
|
||||
@@ -694,7 +715,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter label name',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'add_label' },
|
||||
condition: { field: 'operation', value: ['add_label', 'delete_label'] },
|
||||
},
|
||||
{
|
||||
id: 'labelPrefix',
|
||||
@@ -709,6 +730,14 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
value: () => 'global',
|
||||
condition: { field: 'operation', value: 'add_label' },
|
||||
},
|
||||
{
|
||||
id: 'labelId',
|
||||
title: 'Label ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter label ID',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'get_pages_by_label' },
|
||||
},
|
||||
{
|
||||
id: 'blogPostStatus',
|
||||
title: 'Status',
|
||||
@@ -759,6 +788,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_page_versions',
|
||||
'list_page_properties',
|
||||
'list_labels',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -780,6 +811,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_page_versions',
|
||||
'list_page_properties',
|
||||
'list_labels',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -800,6 +833,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Property Tools
|
||||
'confluence_list_page_properties',
|
||||
'confluence_create_page_property',
|
||||
'confluence_delete_page_property',
|
||||
// Search Tools
|
||||
'confluence_search',
|
||||
'confluence_search_in_space',
|
||||
@@ -820,6 +854,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Label Tools
|
||||
'confluence_list_labels',
|
||||
'confluence_add_label',
|
||||
'confluence_delete_label',
|
||||
'confluence_get_pages_by_label',
|
||||
'confluence_list_space_labels',
|
||||
// Space Tools
|
||||
'confluence_get_space',
|
||||
'confluence_list_spaces',
|
||||
@@ -852,6 +889,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
return 'confluence_list_page_properties'
|
||||
case 'create_page_property':
|
||||
return 'confluence_create_page_property'
|
||||
case 'delete_page_property':
|
||||
return 'confluence_delete_page_property'
|
||||
// Search Operations
|
||||
case 'search':
|
||||
return 'confluence_search'
|
||||
@@ -887,6 +926,12 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
return 'confluence_list_labels'
|
||||
case 'add_label':
|
||||
return 'confluence_add_label'
|
||||
case 'delete_label':
|
||||
return 'confluence_delete_label'
|
||||
case 'get_pages_by_label':
|
||||
return 'confluence_get_pages_by_label'
|
||||
case 'list_space_labels':
|
||||
return 'confluence_list_space_labels'
|
||||
// Space Operations
|
||||
case 'get_space':
|
||||
return 'confluence_get_space'
|
||||
@@ -908,7 +953,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
versionNumber,
|
||||
propertyKey,
|
||||
propertyValue,
|
||||
propertyId,
|
||||
labelPrefix,
|
||||
labelId,
|
||||
blogPostStatus,
|
||||
purge,
|
||||
bodyFormat,
|
||||
@@ -959,7 +1006,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
}
|
||||
}
|
||||
|
||||
// Operations that support cursor pagination
|
||||
// Operations that support generic cursor pagination.
|
||||
// get_pages_by_label and list_space_labels have dedicated handlers
|
||||
// below that pass cursor along with their required params (labelId, spaceId).
|
||||
const supportsCursor = [
|
||||
'list_attachments',
|
||||
'list_spaces',
|
||||
@@ -996,6 +1045,35 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'delete_page_property') {
|
||||
return {
|
||||
credential,
|
||||
pageId: effectivePageId,
|
||||
operation,
|
||||
propertyId,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'get_pages_by_label') {
|
||||
return {
|
||||
credential,
|
||||
operation,
|
||||
labelId,
|
||||
cursor: cursor || undefined,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'list_space_labels') {
|
||||
return {
|
||||
credential,
|
||||
operation,
|
||||
cursor: cursor || undefined,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'upload_attachment') {
|
||||
const normalizedFile = normalizeFileInput(attachmentFile, { single: true })
|
||||
if (!normalizedFile) {
|
||||
@@ -1044,7 +1122,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
|
||||
attachmentComment: { type: 'string', description: 'Comment for the attachment' },
|
||||
labelName: { type: 'string', description: 'Label name' },
|
||||
labelId: { type: 'string', description: 'Label identifier' },
|
||||
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
|
||||
propertyId: { type: 'string', description: 'Property identifier' },
|
||||
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
|
||||
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
|
||||
bodyFormat: { type: 'string', description: 'Body format for comments' },
|
||||
@@ -1080,6 +1160,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Label Results
|
||||
labels: { type: 'array', description: 'List of labels' },
|
||||
labelName: { type: 'string', description: 'Label name' },
|
||||
labelId: { type: 'string', description: 'Label identifier' },
|
||||
// Space Results
|
||||
spaces: { type: 'array', description: 'List of spaces' },
|
||||
spaceId: { type: 'string', description: 'Space identifier' },
|
||||
|
||||
@@ -205,6 +205,10 @@ export const CREDENTIAL_SET = {
|
||||
PREFIX: 'credentialSet:',
|
||||
} as const
|
||||
|
||||
export const CREDENTIAL = {
|
||||
FOREIGN_LABEL: 'Saved by collaborator',
|
||||
} as const
|
||||
|
||||
export function isCredentialSetValue(value: string | null | undefined): boolean {
|
||||
return typeof value === 'string' && value.startsWith(CREDENTIAL_SET.PREFIX)
|
||||
}
|
||||
|
||||
@@ -1,268 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { environmentKeys } from '@/hooks/queries/environment'
|
||||
import { fetchJson } from '@/hooks/selectors/helpers'
|
||||
|
||||
export type WorkspaceCredentialType = 'oauth' | 'env_workspace' | 'env_personal'
|
||||
export type WorkspaceCredentialRole = 'admin' | 'member'
|
||||
export type WorkspaceCredentialMemberStatus = 'active' | 'pending' | 'revoked'
|
||||
|
||||
export interface WorkspaceCredential {
|
||||
id: string
|
||||
workspaceId: string
|
||||
type: WorkspaceCredentialType
|
||||
displayName: string
|
||||
providerId: string | null
|
||||
accountId: string | null
|
||||
envKey: string | null
|
||||
envOwnerUserId: string | null
|
||||
createdBy: string
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
role?: WorkspaceCredentialRole
|
||||
status?: WorkspaceCredentialMemberStatus
|
||||
}
|
||||
|
||||
export interface WorkspaceCredentialMember {
|
||||
id: string
|
||||
userId: string
|
||||
role: WorkspaceCredentialRole
|
||||
status: WorkspaceCredentialMemberStatus
|
||||
joinedAt: string | null
|
||||
invitedBy: string | null
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
userName: string | null
|
||||
userEmail: string | null
|
||||
userImage: string | null
|
||||
}
|
||||
|
||||
interface CredentialListResponse {
|
||||
credentials?: WorkspaceCredential[]
|
||||
}
|
||||
|
||||
interface CredentialResponse {
|
||||
credential?: WorkspaceCredential | null
|
||||
}
|
||||
|
||||
interface MembersResponse {
|
||||
members?: WorkspaceCredentialMember[]
|
||||
}
|
||||
|
||||
export const workspaceCredentialKeys = {
|
||||
all: ['workspaceCredentials'] as const,
|
||||
list: (workspaceId?: string, type?: string, providerId?: string) =>
|
||||
['workspaceCredentials', workspaceId ?? 'none', type ?? 'all', providerId ?? 'all'] as const,
|
||||
detail: (credentialId?: string) =>
|
||||
['workspaceCredentials', 'detail', credentialId ?? 'none'] as const,
|
||||
members: (credentialId?: string) =>
|
||||
['workspaceCredentials', 'detail', credentialId ?? 'none', 'members'] as const,
|
||||
}
|
||||
|
||||
export function useWorkspaceCredentials(params: {
|
||||
workspaceId?: string
|
||||
type?: WorkspaceCredentialType
|
||||
providerId?: string
|
||||
enabled?: boolean
|
||||
}) {
|
||||
const { workspaceId, type, providerId, enabled = true } = params
|
||||
|
||||
return useQuery<WorkspaceCredential[]>({
|
||||
queryKey: workspaceCredentialKeys.list(workspaceId, type, providerId),
|
||||
queryFn: async () => {
|
||||
if (!workspaceId) return []
|
||||
const data = await fetchJson<CredentialListResponse>('/api/credentials', {
|
||||
searchParams: {
|
||||
workspaceId,
|
||||
type,
|
||||
providerId,
|
||||
},
|
||||
})
|
||||
return data.credentials ?? []
|
||||
},
|
||||
enabled: Boolean(workspaceId) && enabled,
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
export function useWorkspaceCredential(credentialId?: string, enabled = true) {
|
||||
return useQuery<WorkspaceCredential | null>({
|
||||
queryKey: workspaceCredentialKeys.detail(credentialId),
|
||||
queryFn: async () => {
|
||||
if (!credentialId) return null
|
||||
const data = await fetchJson<CredentialResponse>(`/api/credentials/${credentialId}`)
|
||||
return data.credential ?? null
|
||||
},
|
||||
enabled: Boolean(credentialId) && enabled,
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
export function useCreateWorkspaceCredential() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (payload: {
|
||||
workspaceId: string
|
||||
type: WorkspaceCredentialType
|
||||
displayName?: string
|
||||
providerId?: string
|
||||
accountId?: string
|
||||
envKey?: string
|
||||
envOwnerUserId?: string
|
||||
}) => {
|
||||
const response = await fetch('/api/credentials', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(payload),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const data = await response.json()
|
||||
throw new Error(data.error || 'Failed to create credential')
|
||||
}
|
||||
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.list(variables.workspaceId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.all,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useUpdateWorkspaceCredential() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (payload: {
|
||||
credentialId: string
|
||||
displayName?: string
|
||||
accountId?: string
|
||||
}) => {
|
||||
const response = await fetch(`/api/credentials/${payload.credentialId}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
displayName: payload.displayName,
|
||||
accountId: payload.accountId,
|
||||
}),
|
||||
})
|
||||
if (!response.ok) {
|
||||
const data = await response.json()
|
||||
throw new Error(data.error || 'Failed to update credential')
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.all,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useDeleteWorkspaceCredential() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (credentialId: string) => {
|
||||
const response = await fetch(`/api/credentials/${credentialId}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
if (!response.ok) {
|
||||
const data = await response.json()
|
||||
throw new Error(data.error || 'Failed to delete credential')
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, credentialId) => {
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.detail(credentialId) })
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
|
||||
queryClient.invalidateQueries({ queryKey: environmentKeys.all })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useWorkspaceCredentialMembers(credentialId?: string) {
|
||||
return useQuery<WorkspaceCredentialMember[]>({
|
||||
queryKey: workspaceCredentialKeys.members(credentialId),
|
||||
queryFn: async () => {
|
||||
if (!credentialId) return []
|
||||
const data = await fetchJson<MembersResponse>(`/api/credentials/${credentialId}/members`)
|
||||
return data.members ?? []
|
||||
},
|
||||
enabled: Boolean(credentialId),
|
||||
staleTime: 30 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
export function useUpsertWorkspaceCredentialMember() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (payload: {
|
||||
credentialId: string
|
||||
userId: string
|
||||
role: WorkspaceCredentialRole
|
||||
}) => {
|
||||
const response = await fetch(`/api/credentials/${payload.credentialId}/members`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
userId: payload.userId,
|
||||
role: payload.role,
|
||||
}),
|
||||
})
|
||||
if (!response.ok) {
|
||||
const data = await response.json()
|
||||
throw new Error(data.error || 'Failed to update credential member')
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.members(variables.credentialId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
|
||||
})
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useRemoveWorkspaceCredentialMember() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (payload: { credentialId: string; userId: string }) => {
|
||||
const response = await fetch(
|
||||
`/api/credentials/${payload.credentialId}/members?userId=${encodeURIComponent(payload.userId)}`,
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
if (!response.ok) {
|
||||
const data = await response.json()
|
||||
throw new Error(data.error || 'Failed to remove credential member')
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.members(variables.credentialId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
|
||||
})
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -169,9 +169,9 @@ export function useConnectOAuthService() {
|
||||
|
||||
interface DisconnectServiceParams {
|
||||
provider: string
|
||||
providerId?: string
|
||||
providerId: string
|
||||
serviceId: string
|
||||
accountId?: string
|
||||
accountId: string
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -182,7 +182,7 @@ export function useDisconnectOAuthService() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ provider, providerId, accountId }: DisconnectServiceParams) => {
|
||||
mutationFn: async ({ provider, providerId }: DisconnectServiceParams) => {
|
||||
const response = await fetch('/api/auth/oauth/disconnect', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
@@ -191,7 +191,6 @@ export function useDisconnectOAuthService() {
|
||||
body: JSON.stringify({
|
||||
provider,
|
||||
providerId,
|
||||
accountId,
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -213,8 +212,7 @@ export function useDisconnectOAuthService() {
|
||||
oauthConnectionsKeys.connections(),
|
||||
previousServices.map((svc) => {
|
||||
if (svc.id === serviceId) {
|
||||
const updatedAccounts =
|
||||
accountId && svc.accounts ? svc.accounts.filter((acc) => acc.id !== accountId) : []
|
||||
const updatedAccounts = svc.accounts?.filter((acc) => acc.id !== accountId) || []
|
||||
return {
|
||||
...svc,
|
||||
accounts: updatedAccounts,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useQuery } from '@tanstack/react-query'
|
||||
import type { Credential } from '@/lib/oauth'
|
||||
import { CREDENTIAL_SET } from '@/executor/constants'
|
||||
import { CREDENTIAL, CREDENTIAL_SET } from '@/executor/constants'
|
||||
import { useCredentialSetDetail } from '@/hooks/queries/credential-sets'
|
||||
import { fetchJson } from '@/hooks/selectors/helpers'
|
||||
|
||||
@@ -13,34 +13,15 @@ interface CredentialDetailResponse {
|
||||
}
|
||||
|
||||
export const oauthCredentialKeys = {
|
||||
list: (providerId?: string, workspaceId?: string, workflowId?: string) =>
|
||||
[
|
||||
'oauthCredentials',
|
||||
providerId ?? 'none',
|
||||
workspaceId ?? 'none',
|
||||
workflowId ?? 'none',
|
||||
] as const,
|
||||
list: (providerId?: string) => ['oauthCredentials', providerId ?? 'none'] as const,
|
||||
detail: (credentialId?: string, workflowId?: string) =>
|
||||
['oauthCredentialDetail', credentialId ?? 'none', workflowId ?? 'none'] as const,
|
||||
}
|
||||
|
||||
interface FetchOAuthCredentialsParams {
|
||||
providerId: string
|
||||
workspaceId?: string
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export async function fetchOAuthCredentials(
|
||||
params: FetchOAuthCredentialsParams
|
||||
): Promise<Credential[]> {
|
||||
const { providerId, workspaceId, workflowId } = params
|
||||
export async function fetchOAuthCredentials(providerId: string): Promise<Credential[]> {
|
||||
if (!providerId) return []
|
||||
const data = await fetchJson<CredentialListResponse>('/api/auth/oauth/credentials', {
|
||||
searchParams: {
|
||||
provider: providerId,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
},
|
||||
searchParams: { provider: providerId },
|
||||
})
|
||||
return data.credentials ?? []
|
||||
}
|
||||
@@ -59,44 +40,10 @@ export async function fetchOAuthCredentialDetail(
|
||||
return data.credentials ?? []
|
||||
}
|
||||
|
||||
interface UseOAuthCredentialsOptions {
|
||||
enabled?: boolean
|
||||
workspaceId?: string
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
function resolveOptions(
|
||||
enabledOrOptions?: boolean | UseOAuthCredentialsOptions
|
||||
): Required<UseOAuthCredentialsOptions> {
|
||||
if (typeof enabledOrOptions === 'boolean') {
|
||||
return {
|
||||
enabled: enabledOrOptions,
|
||||
workspaceId: '',
|
||||
workflowId: '',
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
enabled: enabledOrOptions?.enabled ?? true,
|
||||
workspaceId: enabledOrOptions?.workspaceId ?? '',
|
||||
workflowId: enabledOrOptions?.workflowId ?? '',
|
||||
}
|
||||
}
|
||||
|
||||
export function useOAuthCredentials(
|
||||
providerId?: string,
|
||||
enabledOrOptions?: boolean | UseOAuthCredentialsOptions
|
||||
) {
|
||||
const { enabled, workspaceId, workflowId } = resolveOptions(enabledOrOptions)
|
||||
|
||||
export function useOAuthCredentials(providerId?: string, enabled = true) {
|
||||
return useQuery<Credential[]>({
|
||||
queryKey: oauthCredentialKeys.list(providerId, workspaceId, workflowId),
|
||||
queryFn: () =>
|
||||
fetchOAuthCredentials({
|
||||
providerId: providerId ?? '',
|
||||
workspaceId: workspaceId || undefined,
|
||||
workflowId: workflowId || undefined,
|
||||
}),
|
||||
queryKey: oauthCredentialKeys.list(providerId),
|
||||
queryFn: () => fetchOAuthCredentials(providerId ?? ''),
|
||||
enabled: Boolean(providerId) && enabled,
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
@@ -115,12 +62,7 @@ export function useOAuthCredentialDetail(
|
||||
})
|
||||
}
|
||||
|
||||
export function useCredentialName(
|
||||
credentialId?: string,
|
||||
providerId?: string,
|
||||
workflowId?: string,
|
||||
workspaceId?: string
|
||||
) {
|
||||
export function useCredentialName(credentialId?: string, providerId?: string, workflowId?: string) {
|
||||
// Check if this is a credential set value
|
||||
const isCredentialSet = credentialId?.startsWith(CREDENTIAL_SET.PREFIX) ?? false
|
||||
const credentialSetId = isCredentialSet
|
||||
@@ -135,11 +77,7 @@ export function useCredentialName(
|
||||
|
||||
const { data: credentials = [], isFetching: credentialsLoading } = useOAuthCredentials(
|
||||
providerId,
|
||||
{
|
||||
enabled: Boolean(providerId) && !isCredentialSet,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
}
|
||||
Boolean(providerId) && !isCredentialSet
|
||||
)
|
||||
|
||||
const selectedCredential = credentials.find((cred) => cred.id === credentialId)
|
||||
@@ -154,18 +92,18 @@ export function useCredentialName(
|
||||
shouldFetchDetail
|
||||
)
|
||||
|
||||
const detailCredential = foreignCredentials[0]
|
||||
const hasForeignMeta = foreignCredentials.length > 0
|
||||
const isForeignCredentialSet = isCredentialSet && !credentialSetData && !credentialSetLoading
|
||||
|
||||
const displayName =
|
||||
credentialSetData?.name ?? selectedCredential?.name ?? detailCredential?.name ?? null
|
||||
credentialSetData?.name ??
|
||||
selectedCredential?.name ??
|
||||
(hasForeignMeta ? CREDENTIAL.FOREIGN_LABEL : null) ??
|
||||
(isForeignCredentialSet ? CREDENTIAL.FOREIGN_LABEL : null)
|
||||
|
||||
return {
|
||||
displayName,
|
||||
isLoading:
|
||||
credentialsLoading ||
|
||||
foreignLoading ||
|
||||
(isCredentialSet && credentialSetLoading && !credentialSetData),
|
||||
isLoading: credentialsLoading || foreignLoading || (isCredentialSet && credentialSetLoading),
|
||||
hasForeignMeta,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
oneTimeToken,
|
||||
organization,
|
||||
} from 'better-auth/plugins'
|
||||
import { and, eq, inArray, sql } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { headers } from 'next/headers'
|
||||
import Stripe from 'stripe'
|
||||
import {
|
||||
@@ -150,6 +150,16 @@ export const auth = betterAuth({
|
||||
account: {
|
||||
create: {
|
||||
before: async (account) => {
|
||||
// Only one credential per (userId, providerId) is allowed
|
||||
// If user reconnects (even with a different external account), delete the old one
|
||||
// and let Better Auth create the new one (returning false breaks account linking flow)
|
||||
const existing = await db.query.account.findFirst({
|
||||
where: and(
|
||||
eq(schema.account.userId, account.userId),
|
||||
eq(schema.account.providerId, account.providerId)
|
||||
),
|
||||
})
|
||||
|
||||
const modifiedAccount = { ...account }
|
||||
|
||||
if (account.providerId === 'salesforce' && account.accessToken) {
|
||||
@@ -179,148 +189,32 @@ export const auth = betterAuth({
|
||||
}
|
||||
}
|
||||
|
||||
// Handle Microsoft refresh token expiry
|
||||
if (isMicrosoftProvider(account.providerId)) {
|
||||
modifiedAccount.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
// Delete the existing account so Better Auth can create the new one
|
||||
// This allows account linking/re-authorization to succeed
|
||||
await db.delete(schema.account).where(eq(schema.account.id, existing.id))
|
||||
|
||||
// Preserve the existing account ID so references (like workspace notifications) continue to work
|
||||
modifiedAccount.id = existing.id
|
||||
|
||||
logger.info('[account.create.before] Deleted existing account for re-authorization', {
|
||||
userId: account.userId,
|
||||
providerId: account.providerId,
|
||||
existingAccountId: existing.id,
|
||||
preservingId: true,
|
||||
})
|
||||
|
||||
// Sync webhooks for credential sets after reconnecting (in after hook)
|
||||
}
|
||||
|
||||
return { data: modifiedAccount }
|
||||
},
|
||||
after: async (account) => {
|
||||
/**
|
||||
* Migrate credentials from stale account rows to the newly created one.
|
||||
*
|
||||
* Each getUserInfo appends a random UUID to the stable external ID so
|
||||
* that Better Auth never blocks cross-user connections. This means
|
||||
* re-connecting the same external identity creates a new row. We detect
|
||||
* the stale siblings here by comparing the stable prefix (everything
|
||||
* before the trailing UUID), migrate any credential FKs to the new row,
|
||||
* then delete the stale rows.
|
||||
*/
|
||||
try {
|
||||
const UUID_SUFFIX_RE = /-[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/
|
||||
const stablePrefix = account.accountId.replace(UUID_SUFFIX_RE, '')
|
||||
|
||||
if (stablePrefix && stablePrefix !== account.accountId) {
|
||||
const siblings = await db
|
||||
.select({ id: schema.account.id, accountId: schema.account.accountId })
|
||||
.from(schema.account)
|
||||
.where(
|
||||
and(
|
||||
eq(schema.account.userId, account.userId),
|
||||
eq(schema.account.providerId, account.providerId),
|
||||
sql`${schema.account.id} != ${account.id}`
|
||||
)
|
||||
)
|
||||
|
||||
const staleRows = siblings.filter(
|
||||
(row) => row.accountId.replace(UUID_SUFFIX_RE, '') === stablePrefix
|
||||
)
|
||||
|
||||
if (staleRows.length > 0) {
|
||||
const staleIds = staleRows.map((row) => row.id)
|
||||
|
||||
await db
|
||||
.update(schema.credential)
|
||||
.set({ accountId: account.id })
|
||||
.where(inArray(schema.credential.accountId, staleIds))
|
||||
|
||||
await db.delete(schema.account).where(inArray(schema.account.id, staleIds))
|
||||
|
||||
logger.info('[account.create.after] Migrated credentials from stale accounts', {
|
||||
userId: account.userId,
|
||||
providerId: account.providerId,
|
||||
newAccountId: account.id,
|
||||
migratedFrom: staleIds,
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[account.create.after] Failed to clean up stale accounts', {
|
||||
userId: account.userId,
|
||||
providerId: account.providerId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* If a pending credential draft exists for this (userId, providerId),
|
||||
* create the credential now with the user's chosen display name.
|
||||
* This is deterministic — the account row is guaranteed to exist.
|
||||
*/
|
||||
try {
|
||||
const [draft] = await db
|
||||
.select()
|
||||
.from(schema.pendingCredentialDraft)
|
||||
.where(
|
||||
and(
|
||||
eq(schema.pendingCredentialDraft.userId, account.userId),
|
||||
eq(schema.pendingCredentialDraft.providerId, account.providerId),
|
||||
sql`${schema.pendingCredentialDraft.expiresAt} > NOW()`
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (draft) {
|
||||
const credentialId = crypto.randomUUID()
|
||||
const now = new Date()
|
||||
|
||||
try {
|
||||
await db.insert(schema.credential).values({
|
||||
id: credentialId,
|
||||
workspaceId: draft.workspaceId,
|
||||
type: 'oauth',
|
||||
displayName: draft.displayName,
|
||||
providerId: account.providerId,
|
||||
accountId: account.id,
|
||||
createdBy: account.userId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
await db.insert(schema.credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId: account.userId,
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: account.userId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
logger.info('[account.create.after] Created credential from draft', {
|
||||
credentialId,
|
||||
displayName: draft.displayName,
|
||||
providerId: account.providerId,
|
||||
accountId: account.id,
|
||||
})
|
||||
} catch (insertError: unknown) {
|
||||
const code =
|
||||
insertError && typeof insertError === 'object' && 'code' in insertError
|
||||
? (insertError as { code: string }).code
|
||||
: undefined
|
||||
if (code !== '23505') {
|
||||
throw insertError
|
||||
}
|
||||
logger.info('[account.create.after] Credential already exists, skipping draft', {
|
||||
providerId: account.providerId,
|
||||
accountId: account.id,
|
||||
})
|
||||
}
|
||||
|
||||
await db
|
||||
.delete(schema.pendingCredentialDraft)
|
||||
.where(eq(schema.pendingCredentialDraft.id, draft.id))
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[account.create.after] Failed to create credential from draft', {
|
||||
userId: account.userId,
|
||||
providerId: account.providerId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const { ensureUserStatsExists } = await import('@/lib/billing/core/usage')
|
||||
await ensureUserStatsExists(account.userId)
|
||||
@@ -1593,7 +1487,7 @@ export const auth = betterAuth({
|
||||
})
|
||||
|
||||
return {
|
||||
id: `${(data.user_id || data.hub_id).toString()}-${crypto.randomUUID()}`,
|
||||
id: `${data.user_id || data.hub_id.toString()}-${crypto.randomUUID()}`,
|
||||
name: data.user || 'HubSpot User',
|
||||
email: data.user || `hubspot-${data.hub_id}@hubspot.com`,
|
||||
emailVerified: true,
|
||||
@@ -1647,7 +1541,7 @@ export const auth = betterAuth({
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
id: `${(data.user_id || data.sub).toString()}-${crypto.randomUUID()}`,
|
||||
id: `${data.user_id || data.sub}-${crypto.randomUUID()}`,
|
||||
name: data.name || 'Salesforce User',
|
||||
email: data.email || `salesforce-${data.user_id}@salesforce.com`,
|
||||
emailVerified: data.email_verified || true,
|
||||
@@ -1706,7 +1600,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${profile.data.id.toString()}-${crypto.randomUUID()}`,
|
||||
id: `${profile.data.id}-${crypto.randomUUID()}`,
|
||||
name: profile.data.name || 'X User',
|
||||
email: `${profile.data.username}@x.com`,
|
||||
image: profile.data.profile_image_url,
|
||||
@@ -1786,7 +1680,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${profile.account_id.toString()}-${crypto.randomUUID()}`,
|
||||
id: `${profile.account_id}-${crypto.randomUUID()}`,
|
||||
name: profile.name || profile.display_name || 'Confluence User',
|
||||
email: profile.email || `${profile.account_id}@atlassian.com`,
|
||||
image: profile.picture || undefined,
|
||||
@@ -1897,7 +1791,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${profile.account_id.toString()}-${crypto.randomUUID()}`,
|
||||
id: `${profile.account_id}-${crypto.randomUUID()}`,
|
||||
name: profile.name || profile.display_name || 'Jira User',
|
||||
email: profile.email || `${profile.account_id}@atlassian.com`,
|
||||
image: profile.picture || undefined,
|
||||
@@ -1947,7 +1841,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${data.id.toString()}-${crypto.randomUUID()}`,
|
||||
id: `${data.id}-${crypto.randomUUID()}`,
|
||||
name: data.email ? data.email.split('@')[0] : 'Airtable User',
|
||||
email: data.email || `${data.id}@airtable.user`,
|
||||
emailVerified: !!data.email,
|
||||
@@ -1996,7 +1890,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${(profile.bot?.owner?.user?.id || profile.id).toString()}-${crypto.randomUUID()}`,
|
||||
id: `${profile.bot?.owner?.user?.id || profile.id}-${crypto.randomUUID()}`,
|
||||
name: profile.name || profile.bot?.owner?.user?.name || 'Notion User',
|
||||
email: profile.person?.email || `${profile.id}@notion.user`,
|
||||
emailVerified: !!profile.person?.email,
|
||||
@@ -2063,7 +1957,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${data.id.toString()}-${crypto.randomUUID()}`,
|
||||
id: `${data.id}-${crypto.randomUUID()}`,
|
||||
name: data.name || 'Reddit User',
|
||||
email: `${data.name}@reddit.user`,
|
||||
image: data.icon_img || undefined,
|
||||
@@ -2135,7 +2029,7 @@ export const auth = betterAuth({
|
||||
const viewer = data.viewer
|
||||
|
||||
return {
|
||||
id: `${viewer.id.toString()}-${crypto.randomUUID()}`,
|
||||
id: `${viewer.id}-${crypto.randomUUID()}`,
|
||||
email: viewer.email,
|
||||
name: viewer.name,
|
||||
emailVerified: true,
|
||||
@@ -2198,7 +2092,7 @@ export const auth = betterAuth({
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
id: `${data.account_id.toString()}-${crypto.randomUUID()}`,
|
||||
id: `${data.account_id}-${crypto.randomUUID()}`,
|
||||
email: data.email,
|
||||
name: data.name?.display_name || data.email,
|
||||
emailVerified: data.email_verified || false,
|
||||
@@ -2249,7 +2143,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${profile.gid.toString()}-${crypto.randomUUID()}`,
|
||||
id: `${profile.gid}-${crypto.randomUUID()}`,
|
||||
name: profile.name || 'Asana User',
|
||||
email: profile.email || `${profile.gid}@asana.user`,
|
||||
image: profile.photo?.image_128x128 || undefined,
|
||||
@@ -2484,7 +2378,7 @@ export const auth = betterAuth({
|
||||
const profile = await response.json()
|
||||
|
||||
return {
|
||||
id: `${profile.id.toString()}-${crypto.randomUUID()}`,
|
||||
id: `${profile.id}-${crypto.randomUUID()}`,
|
||||
name:
|
||||
`${profile.first_name || ''} ${profile.last_name || ''}`.trim() || 'Zoom User',
|
||||
email: profile.email || `${profile.id}@zoom.user`,
|
||||
@@ -2551,7 +2445,7 @@ export const auth = betterAuth({
|
||||
const profile = await response.json()
|
||||
|
||||
return {
|
||||
id: `${profile.id.toString()}-${crypto.randomUUID()}`,
|
||||
id: `${profile.id}-${crypto.randomUUID()}`,
|
||||
name: profile.display_name || 'Spotify User',
|
||||
email: profile.email || `${profile.id}@spotify.user`,
|
||||
emailVerified: true,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, credential, credentialMember, workflow as workflowTable } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { account, workflow as workflowTable } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
@@ -12,14 +12,17 @@ export interface CredentialAccessResult {
|
||||
requesterUserId?: string
|
||||
credentialOwnerUserId?: string
|
||||
workspaceId?: string
|
||||
resolvedCredentialId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Centralizes auth + credential membership checks for OAuth usage.
|
||||
* - Workspace-scoped credential IDs enforce active credential_member access.
|
||||
* - Legacy account IDs are resolved to workspace-scoped credentials when workflowId is provided.
|
||||
* - Direct legacy account-ID access without workflowId is restricted to account owners only.
|
||||
* Centralizes auth + collaboration rules for credential use.
|
||||
* - Uses checkSessionOrInternalAuth to authenticate the caller
|
||||
* - Fetches credential owner
|
||||
* - Authorization rules:
|
||||
* - session: allow if requester owns the credential; otherwise require workflowId and
|
||||
* verify BOTH requester and owner have access to the workflow's workspace
|
||||
* - internal_jwt: require workflowId (by default) and verify credential owner has access to the
|
||||
* workflow's workspace (requester identity is the system/workflow)
|
||||
*/
|
||||
export async function authorizeCredentialUse(
|
||||
request: NextRequest,
|
||||
@@ -34,173 +37,71 @@ export async function authorizeCredentialUse(
|
||||
return { ok: false, error: auth.error || 'Authentication required' }
|
||||
}
|
||||
|
||||
const [workflowContext] = workflowId
|
||||
? await db
|
||||
.select({ workspaceId: workflowTable.workspaceId })
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
: [null]
|
||||
|
||||
if (workflowId && (!workflowContext || !workflowContext.workspaceId)) {
|
||||
return { ok: false, error: 'Workflow not found' }
|
||||
}
|
||||
|
||||
const [platformCredential] = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
type: credential.type,
|
||||
accountId: credential.accountId,
|
||||
})
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (platformCredential) {
|
||||
if (platformCredential.type !== 'oauth' || !platformCredential.accountId) {
|
||||
return { ok: false, error: 'Unsupported credential type for OAuth access' }
|
||||
}
|
||||
|
||||
if (workflowContext && workflowContext.workspaceId !== platformCredential.workspaceId) {
|
||||
return { ok: false, error: 'Credential is not accessible from this workflow workspace' }
|
||||
}
|
||||
|
||||
const [accountRow] = await db
|
||||
.select({ userId: account.userId })
|
||||
.from(account)
|
||||
.where(eq(account.id, platformCredential.accountId))
|
||||
.limit(1)
|
||||
|
||||
if (!accountRow) {
|
||||
return { ok: false, error: 'Credential account not found' }
|
||||
}
|
||||
|
||||
const requesterPerm =
|
||||
auth.authType === 'internal_jwt'
|
||||
? null
|
||||
: await getUserEntityPermissions(auth.userId, 'workspace', platformCredential.workspaceId)
|
||||
|
||||
if (auth.authType !== 'internal_jwt') {
|
||||
const [membership] = await db
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, platformCredential.id),
|
||||
eq(credentialMember.userId, auth.userId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership || requesterPerm === null) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
}
|
||||
|
||||
const ownerPerm = await getUserEntityPermissions(
|
||||
accountRow.userId,
|
||||
'workspace',
|
||||
platformCredential.workspaceId
|
||||
)
|
||||
if (ownerPerm === null) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
authType: auth.authType as CredentialAccessResult['authType'],
|
||||
requesterUserId: auth.userId,
|
||||
credentialOwnerUserId: accountRow.userId,
|
||||
workspaceId: platformCredential.workspaceId,
|
||||
resolvedCredentialId: platformCredential.accountId,
|
||||
}
|
||||
}
|
||||
|
||||
if (workflowContext?.workspaceId) {
|
||||
const [workspaceCredential] = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
accountId: credential.accountId,
|
||||
})
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.type, 'oauth'),
|
||||
eq(credential.workspaceId, workflowContext.workspaceId),
|
||||
eq(credential.accountId, credentialId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceCredential?.accountId) {
|
||||
return { ok: false, error: 'Credential not found' }
|
||||
}
|
||||
|
||||
const [accountRow] = await db
|
||||
.select({ userId: account.userId })
|
||||
.from(account)
|
||||
.where(eq(account.id, workspaceCredential.accountId))
|
||||
.limit(1)
|
||||
|
||||
if (!accountRow) {
|
||||
return { ok: false, error: 'Credential account not found' }
|
||||
}
|
||||
|
||||
if (auth.authType !== 'internal_jwt') {
|
||||
const [membership] = await db
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, workspaceCredential.id),
|
||||
eq(credentialMember.userId, auth.userId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
}
|
||||
|
||||
const ownerPerm = await getUserEntityPermissions(
|
||||
accountRow.userId,
|
||||
'workspace',
|
||||
workflowContext.workspaceId
|
||||
)
|
||||
if (ownerPerm === null) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
authType: auth.authType as CredentialAccessResult['authType'],
|
||||
requesterUserId: auth.userId,
|
||||
credentialOwnerUserId: accountRow.userId,
|
||||
workspaceId: workflowContext.workspaceId,
|
||||
resolvedCredentialId: workspaceCredential.accountId,
|
||||
}
|
||||
}
|
||||
|
||||
const [legacyAccount] = await db
|
||||
// Lookup credential owner
|
||||
const [credRow] = await db
|
||||
.select({ userId: account.userId })
|
||||
.from(account)
|
||||
.where(eq(account.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (!legacyAccount) {
|
||||
if (!credRow) {
|
||||
return { ok: false, error: 'Credential not found' }
|
||||
}
|
||||
|
||||
if (auth.authType === 'internal_jwt') {
|
||||
const credentialOwnerUserId = credRow.userId
|
||||
|
||||
// If requester owns the credential, allow immediately
|
||||
if (auth.authType !== 'internal_jwt' && auth.userId === credentialOwnerUserId) {
|
||||
return {
|
||||
ok: true,
|
||||
authType: auth.authType as CredentialAccessResult['authType'],
|
||||
requesterUserId: auth.userId,
|
||||
credentialOwnerUserId,
|
||||
}
|
||||
}
|
||||
|
||||
// For collaboration paths, workflowId is required to scope to a workspace
|
||||
if (!workflowId) {
|
||||
return { ok: false, error: 'workflowId is required' }
|
||||
}
|
||||
|
||||
if (auth.userId !== legacyAccount.userId) {
|
||||
const [wf] = await db
|
||||
.select({ workspaceId: workflowTable.workspaceId })
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!wf || !wf.workspaceId) {
|
||||
return { ok: false, error: 'Workflow not found' }
|
||||
}
|
||||
|
||||
if (auth.authType === 'internal_jwt') {
|
||||
// Internal calls: verify credential owner belongs to the workflow's workspace
|
||||
const ownerPerm = await getUserEntityPermissions(
|
||||
credentialOwnerUserId,
|
||||
'workspace',
|
||||
wf.workspaceId
|
||||
)
|
||||
if (ownerPerm === null) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
return {
|
||||
ok: true,
|
||||
authType: auth.authType as CredentialAccessResult['authType'],
|
||||
requesterUserId: auth.userId,
|
||||
credentialOwnerUserId,
|
||||
workspaceId: wf.workspaceId,
|
||||
}
|
||||
}
|
||||
|
||||
// Session: verify BOTH requester and owner belong to the workflow's workspace
|
||||
const requesterPerm = await getUserEntityPermissions(auth.userId, 'workspace', wf.workspaceId)
|
||||
const ownerPerm = await getUserEntityPermissions(
|
||||
credentialOwnerUserId,
|
||||
'workspace',
|
||||
wf.workspaceId
|
||||
)
|
||||
if (requesterPerm === null || ownerPerm === null) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
|
||||
@@ -208,7 +109,7 @@ export async function authorizeCredentialUse(
|
||||
ok: true,
|
||||
authType: auth.authType as CredentialAccessResult['authType'],
|
||||
requesterUserId: auth.userId,
|
||||
credentialOwnerUserId: legacyAccount.userId,
|
||||
resolvedCredentialId: credentialId,
|
||||
credentialOwnerUserId,
|
||||
workspaceId: wf.workspaceId,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,8 @@ export interface BuildPayloadParams {
|
||||
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
|
||||
commands?: string[]
|
||||
chatId?: string
|
||||
conversationId?: string
|
||||
prefetch?: boolean
|
||||
implicitFeedback?: string
|
||||
}
|
||||
|
||||
@@ -64,6 +66,10 @@ export async function buildCopilotRequestPayload(
|
||||
fileAttachments,
|
||||
commands,
|
||||
chatId,
|
||||
conversationId,
|
||||
prefetch,
|
||||
conversationHistory,
|
||||
implicitFeedback,
|
||||
} = params
|
||||
|
||||
const selectedModel = options.selectedModel
|
||||
@@ -154,6 +160,12 @@ export async function buildCopilotRequestPayload(
|
||||
version: SIM_AGENT_VERSION,
|
||||
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
|
||||
...(chatId ? { chatId } : {}),
|
||||
...(conversationId ? { conversationId } : {}),
|
||||
...(Array.isArray(conversationHistory) && conversationHistory.length > 0
|
||||
? { conversationHistory }
|
||||
: {}),
|
||||
...(typeof prefetch === 'boolean' ? { prefetch } : {}),
|
||||
...(implicitFeedback ? { implicitFeedback } : {}),
|
||||
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
|
||||
...(integrationTools.length > 0 ? { integrationTools } : {}),
|
||||
...(credentials ? { credentials } : {}),
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { COPILOT_CONFIRM_API_PATH, STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
||||
import { STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
||||
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||
import {
|
||||
@@ -26,21 +26,119 @@ const MAX_BATCH_INTERVAL = 50
|
||||
const MIN_BATCH_INTERVAL = 16
|
||||
const MAX_QUEUE_SIZE = 5
|
||||
|
||||
/**
|
||||
* Send an auto-accept confirmation to the server for auto-allowed tools.
|
||||
* The server-side orchestrator polls Redis for this decision.
|
||||
*/
|
||||
export function sendAutoAcceptConfirmation(toolCallId: string): void {
|
||||
fetch(COPILOT_CONFIRM_API_PATH, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolCallId, status: 'accepted' }),
|
||||
}).catch((error) => {
|
||||
logger.warn('Failed to send auto-accept confirmation', {
|
||||
toolCallId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
})
|
||||
function isWorkflowEditToolCall(toolName?: string, params?: Record<string, unknown>): boolean {
|
||||
if (toolName === 'edit_workflow') return true
|
||||
if (toolName !== 'workflow_change') return false
|
||||
|
||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||
if (mode === 'apply') return true
|
||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||
}
|
||||
|
||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
||||
}
|
||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
||||
}
|
||||
|
||||
function mapServerStateToClientState(state: unknown): ClientToolCallState {
|
||||
switch (String(state || '')) {
|
||||
case 'generating':
|
||||
return ClientToolCallState.generating
|
||||
case 'pending':
|
||||
case 'awaiting_approval':
|
||||
return ClientToolCallState.pending
|
||||
case 'executing':
|
||||
return ClientToolCallState.executing
|
||||
case 'success':
|
||||
return ClientToolCallState.success
|
||||
case 'rejected':
|
||||
case 'skipped':
|
||||
return ClientToolCallState.rejected
|
||||
case 'aborted':
|
||||
return ClientToolCallState.aborted
|
||||
case 'error':
|
||||
case 'failed':
|
||||
return ClientToolCallState.error
|
||||
default:
|
||||
return ClientToolCallState.pending
|
||||
}
|
||||
}
|
||||
|
||||
function extractToolUiMetadata(data: Record<string, unknown>): CopilotToolCall['ui'] | undefined {
|
||||
const ui = asRecord(data.ui)
|
||||
if (!ui || Object.keys(ui).length === 0) return undefined
|
||||
const autoAllowedFromUi = ui.autoAllowed === true
|
||||
const autoAllowedFromData = data.autoAllowed === true
|
||||
return {
|
||||
title: typeof ui.title === 'string' ? ui.title : undefined,
|
||||
phaseLabel: typeof ui.phaseLabel === 'string' ? ui.phaseLabel : undefined,
|
||||
icon: typeof ui.icon === 'string' ? ui.icon : undefined,
|
||||
showInterrupt: ui.showInterrupt === true,
|
||||
showRemember: ui.showRemember === true,
|
||||
autoAllowed: autoAllowedFromUi || autoAllowedFromData,
|
||||
actions: Array.isArray(ui.actions)
|
||||
? ui.actions
|
||||
.map((action) => {
|
||||
const a = asRecord(action)
|
||||
const id = typeof a.id === 'string' ? a.id : undefined
|
||||
const label = typeof a.label === 'string' ? a.label : undefined
|
||||
const kind: 'accept' | 'reject' = a.kind === 'reject' ? 'reject' : 'accept'
|
||||
if (!id || !label) return null
|
||||
return {
|
||||
id,
|
||||
label,
|
||||
kind,
|
||||
remember: a.remember === true,
|
||||
}
|
||||
})
|
||||
.filter((a): a is NonNullable<typeof a> => !!a)
|
||||
: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
function extractToolExecutionMetadata(
|
||||
data: Record<string, unknown>
|
||||
): CopilotToolCall['execution'] | undefined {
|
||||
const execution = asRecord(data.execution)
|
||||
if (!execution || Object.keys(execution).length === 0) return undefined
|
||||
return {
|
||||
target: typeof execution.target === 'string' ? execution.target : undefined,
|
||||
capabilityId: typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
||||
}
|
||||
}
|
||||
|
||||
function isWorkflowChangeApplyCall(toolName?: string, params?: Record<string, unknown>): boolean {
|
||||
if (toolName !== 'workflow_change') return false
|
||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||
if (mode === 'apply') return true
|
||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||
}
|
||||
|
||||
function extractWorkflowStateFromResultPayload(
|
||||
resultPayload: Record<string, unknown>
|
||||
): WorkflowState | null {
|
||||
const directState = asRecord(resultPayload.workflowState)
|
||||
if (directState) return directState as unknown as WorkflowState
|
||||
|
||||
const editResult = asRecord(resultPayload.editResult)
|
||||
const nestedState = asRecord(editResult?.workflowState)
|
||||
if (nestedState) return nestedState as unknown as WorkflowState
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
function extractOperationListFromResultPayload(
|
||||
resultPayload: Record<string, unknown>
|
||||
): Array<Record<string, unknown>> | undefined {
|
||||
const operations = resultPayload.operations
|
||||
if (Array.isArray(operations)) return operations as Array<Record<string, unknown>>
|
||||
|
||||
const compiled = resultPayload.compiledOperations
|
||||
if (Array.isArray(compiled)) return compiled as Array<Record<string, unknown>>
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
||||
@@ -244,14 +342,28 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
try {
|
||||
const eventData = asRecord(data?.data)
|
||||
const toolCallId: string | undefined =
|
||||
data?.toolCallId || (eventData.id as string | undefined)
|
||||
data?.toolCallId ||
|
||||
(eventData.id as string | undefined) ||
|
||||
(eventData.callId as string | undefined)
|
||||
const success: boolean | undefined = data?.success
|
||||
const failedDependency: boolean = data?.failedDependency === true
|
||||
const resultObj = asRecord(data?.result)
|
||||
const skipped: boolean = resultObj.skipped === true
|
||||
if (!toolCallId) return
|
||||
const uiMetadata = extractToolUiMetadata(eventData)
|
||||
const executionMetadata = extractToolExecutionMetadata(eventData)
|
||||
const serverState = (eventData.state as string | undefined) || undefined
|
||||
const targetState = serverState
|
||||
? mapServerStateToClientState(serverState)
|
||||
: success
|
||||
? ClientToolCallState.success
|
||||
: failedDependency || skipped
|
||||
? ClientToolCallState.rejected
|
||||
: ClientToolCallState.error
|
||||
const resultPayload = asRecord(data?.result || eventData.result || eventData.data || data?.data)
|
||||
const { toolCallsById } = get()
|
||||
const current = toolCallsById[toolCallId]
|
||||
let paramsForCurrentToolCall: Record<string, unknown> | undefined = current?.params
|
||||
if (current) {
|
||||
if (
|
||||
isRejectedState(current.state) ||
|
||||
@@ -260,16 +372,32 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
) {
|
||||
return
|
||||
}
|
||||
const targetState = success
|
||||
? ClientToolCallState.success
|
||||
: failedDependency || skipped
|
||||
? ClientToolCallState.rejected
|
||||
: ClientToolCallState.error
|
||||
if (
|
||||
targetState === ClientToolCallState.success &&
|
||||
isWorkflowChangeApplyCall(current.name, paramsForCurrentToolCall)
|
||||
) {
|
||||
const operations = extractOperationListFromResultPayload(resultPayload || {})
|
||||
if (operations && operations.length > 0) {
|
||||
paramsForCurrentToolCall = {
|
||||
...(current.params || {}),
|
||||
operations,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const updatedMap = { ...toolCallsById }
|
||||
updatedMap[toolCallId] = {
|
||||
...current,
|
||||
ui: uiMetadata || current.ui,
|
||||
execution: executionMetadata || current.execution,
|
||||
params: paramsForCurrentToolCall,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||
display: resolveToolDisplay(
|
||||
current.name,
|
||||
targetState,
|
||||
current.id,
|
||||
paramsForCurrentToolCall
|
||||
),
|
||||
}
|
||||
set({ toolCallsById: updatedMap })
|
||||
|
||||
@@ -312,31 +440,39 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
}
|
||||
}
|
||||
|
||||
if (current.name === 'edit_workflow') {
|
||||
if (
|
||||
targetState === ClientToolCallState.success &&
|
||||
isWorkflowEditToolCall(current.name, paramsForCurrentToolCall)
|
||||
) {
|
||||
try {
|
||||
const resultPayload = asRecord(
|
||||
data?.result || eventData.result || eventData.data || data?.data
|
||||
)
|
||||
const workflowState = asRecord(resultPayload?.workflowState)
|
||||
const hasWorkflowState = !!resultPayload?.workflowState
|
||||
logger.info('[SSE] edit_workflow result received', {
|
||||
const workflowState = resultPayload
|
||||
? extractWorkflowStateFromResultPayload(resultPayload)
|
||||
: null
|
||||
const hasWorkflowState = !!workflowState
|
||||
logger.info('[SSE] workflow edit result received', {
|
||||
toolName: current.name,
|
||||
hasWorkflowState,
|
||||
blockCount: hasWorkflowState ? Object.keys(workflowState.blocks ?? {}).length : 0,
|
||||
edgeCount: Array.isArray(workflowState.edges) ? workflowState.edges.length : 0,
|
||||
blockCount: hasWorkflowState
|
||||
? Object.keys((workflowState as any).blocks ?? {}).length
|
||||
: 0,
|
||||
edgeCount:
|
||||
hasWorkflowState && Array.isArray((workflowState as any).edges)
|
||||
? (workflowState as any).edges.length
|
||||
: 0,
|
||||
})
|
||||
if (hasWorkflowState) {
|
||||
if (workflowState) {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
diffStore
|
||||
.setProposedChanges(resultPayload.workflowState as WorkflowState)
|
||||
.catch((err) => {
|
||||
logger.error('[SSE] Failed to apply edit_workflow diff', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
diffStore.setProposedChanges(workflowState).catch((err) => {
|
||||
logger.error('[SSE] Failed to apply workflow edit diff', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
toolName: current.name,
|
||||
})
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('[SSE] edit_workflow result handling failed', {
|
||||
logger.error('[SSE] workflow edit result handling failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
toolName: current.name,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -460,16 +596,23 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
: failedDependency || skipped
|
||||
? ClientToolCallState.rejected
|
||||
: ClientToolCallState.error
|
||||
const paramsForBlock =
|
||||
b.toolCall?.id === toolCallId
|
||||
? paramsForCurrentToolCall || b.toolCall?.params
|
||||
: b.toolCall?.params
|
||||
context.contentBlocks[i] = {
|
||||
...b,
|
||||
toolCall: {
|
||||
...b.toolCall,
|
||||
params: paramsForBlock,
|
||||
ui: uiMetadata || b.toolCall?.ui,
|
||||
execution: executionMetadata || b.toolCall?.execution,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(
|
||||
b.toolCall?.name,
|
||||
targetState,
|
||||
toolCallId,
|
||||
b.toolCall?.params
|
||||
paramsForBlock
|
||||
),
|
||||
},
|
||||
}
|
||||
@@ -487,7 +630,9 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
try {
|
||||
const errorData = asRecord(data?.data)
|
||||
const toolCallId: string | undefined =
|
||||
data?.toolCallId || (errorData.id as string | undefined)
|
||||
data?.toolCallId ||
|
||||
(errorData.id as string | undefined) ||
|
||||
(errorData.callId as string | undefined)
|
||||
const failedDependency: boolean = data?.failedDependency === true
|
||||
if (!toolCallId) return
|
||||
const { toolCallsById } = get()
|
||||
@@ -500,12 +645,18 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
) {
|
||||
return
|
||||
}
|
||||
const targetState = failedDependency
|
||||
? ClientToolCallState.rejected
|
||||
: ClientToolCallState.error
|
||||
const targetState = errorData.state
|
||||
? mapServerStateToClientState(errorData.state)
|
||||
: failedDependency
|
||||
? ClientToolCallState.rejected
|
||||
: ClientToolCallState.error
|
||||
const uiMetadata = extractToolUiMetadata(errorData)
|
||||
const executionMetadata = extractToolExecutionMetadata(errorData)
|
||||
const updatedMap = { ...toolCallsById }
|
||||
updatedMap[toolCallId] = {
|
||||
...current,
|
||||
ui: uiMetadata || current.ui,
|
||||
execution: executionMetadata || current.execution,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||
}
|
||||
@@ -520,13 +671,19 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
isBackgroundState(b.toolCall?.state)
|
||||
)
|
||||
break
|
||||
const targetState = failedDependency
|
||||
? ClientToolCallState.rejected
|
||||
: ClientToolCallState.error
|
||||
const targetState = errorData.state
|
||||
? mapServerStateToClientState(errorData.state)
|
||||
: failedDependency
|
||||
? ClientToolCallState.rejected
|
||||
: ClientToolCallState.error
|
||||
const uiMetadata = extractToolUiMetadata(errorData)
|
||||
const executionMetadata = extractToolExecutionMetadata(errorData)
|
||||
context.contentBlocks[i] = {
|
||||
...b,
|
||||
toolCall: {
|
||||
...b.toolCall,
|
||||
ui: uiMetadata || b.toolCall?.ui,
|
||||
execution: executionMetadata || b.toolCall?.execution,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(
|
||||
b.toolCall?.name,
|
||||
@@ -547,19 +704,26 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
}
|
||||
},
|
||||
tool_generating: (data, context, get, set) => {
|
||||
const { toolCallId, toolName } = data
|
||||
const eventData = asRecord(data?.data)
|
||||
const toolCallId =
|
||||
data?.toolCallId ||
|
||||
(eventData.id as string | undefined) ||
|
||||
(eventData.callId as string | undefined)
|
||||
const toolName =
|
||||
data?.toolName ||
|
||||
(eventData.name as string | undefined) ||
|
||||
(eventData.toolName as string | undefined)
|
||||
if (!toolCallId || !toolName) return
|
||||
const { toolCallsById } = get()
|
||||
|
||||
if (!toolCallsById[toolCallId]) {
|
||||
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
||||
const initialState = isAutoAllowed
|
||||
? ClientToolCallState.executing
|
||||
: ClientToolCallState.pending
|
||||
const initialState = ClientToolCallState.generating
|
||||
const tc: CopilotToolCall = {
|
||||
id: toolCallId,
|
||||
name: toolName,
|
||||
state: initialState,
|
||||
ui: extractToolUiMetadata(eventData),
|
||||
execution: extractToolExecutionMetadata(eventData),
|
||||
display: resolveToolDisplay(toolName, initialState, toolCallId),
|
||||
}
|
||||
const updated = { ...toolCallsById, [toolCallId]: tc }
|
||||
@@ -572,17 +736,27 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
},
|
||||
tool_call: (data, context, get, set) => {
|
||||
const toolData = asRecord(data?.data)
|
||||
const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId
|
||||
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||
const id: string | undefined =
|
||||
(toolData.id as string | undefined) ||
|
||||
(toolData.callId as string | undefined) ||
|
||||
data?.toolCallId
|
||||
const name: string | undefined =
|
||||
(toolData.name as string | undefined) ||
|
||||
(toolData.toolName as string | undefined) ||
|
||||
data?.toolName
|
||||
if (!id) return
|
||||
const args = toolData.arguments as Record<string, unknown> | undefined
|
||||
const isPartial = toolData.partial === true
|
||||
const uiMetadata = extractToolUiMetadata(toolData)
|
||||
const executionMetadata = extractToolExecutionMetadata(toolData)
|
||||
const serverState = toolData.state
|
||||
const { toolCallsById } = get()
|
||||
|
||||
const existing = toolCallsById[id]
|
||||
const toolName = name || existing?.name || 'unknown_tool'
|
||||
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
||||
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
||||
let initialState = serverState
|
||||
? mapServerStateToClientState(serverState)
|
||||
: ClientToolCallState.pending
|
||||
|
||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||
if (
|
||||
@@ -597,6 +771,8 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
...existing,
|
||||
name: toolName,
|
||||
state: initialState,
|
||||
ui: uiMetadata || existing.ui,
|
||||
execution: executionMetadata || existing.execution,
|
||||
...(args ? { params: args } : {}),
|
||||
display: resolveToolDisplay(toolName, initialState, id, args || existing.params),
|
||||
}
|
||||
@@ -604,6 +780,8 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
id,
|
||||
name: toolName,
|
||||
state: initialState,
|
||||
ui: uiMetadata,
|
||||
execution: executionMetadata,
|
||||
...(args ? { params: args } : {}),
|
||||
display: resolveToolDisplay(toolName, initialState, id, args),
|
||||
}
|
||||
@@ -618,20 +796,12 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
return
|
||||
}
|
||||
|
||||
// Auto-allowed tools: send confirmation to the server so it can proceed
|
||||
// without waiting for the user to click "Allow".
|
||||
if (isAutoAllowed) {
|
||||
sendAutoAcceptConfirmation(id)
|
||||
}
|
||||
const shouldInterrupt = next.ui?.showInterrupt === true
|
||||
|
||||
// Client-executable run tools: execute on the client for real-time feedback
|
||||
// (block pulsing, console logs, stop button). The server defers execution
|
||||
// for these tools in interactive mode; the client reports back via mark-complete.
|
||||
if (
|
||||
CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName) &&
|
||||
initialState === ClientToolCallState.executing
|
||||
) {
|
||||
executeRunToolOnClient(id, toolName, args || existing?.params || {})
|
||||
// Client-run capability: execution is delegated to the browser.
|
||||
// We run immediately only when no interrupt is required.
|
||||
if (isClientRunCapability(next) && !shouldInterrupt) {
|
||||
executeRunToolOnClient(id, toolName, args || next.params || {})
|
||||
}
|
||||
|
||||
// OAuth: dispatch event to open the OAuth connect modal
|
||||
|
||||
@@ -9,9 +9,10 @@ import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||
import { resolveToolDisplay } from '@/lib/copilot/store-utils'
|
||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import {
|
||||
type SSEHandler,
|
||||
sendAutoAcceptConfirmation,
|
||||
sseHandlers,
|
||||
updateStreamingMessage,
|
||||
} from './handlers'
|
||||
@@ -24,6 +25,113 @@ type StoreSet = (
|
||||
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||
) => void
|
||||
|
||||
function mapServerStateToClientState(state: unknown): ClientToolCallState {
|
||||
switch (String(state || '')) {
|
||||
case 'generating':
|
||||
return ClientToolCallState.generating
|
||||
case 'pending':
|
||||
case 'awaiting_approval':
|
||||
return ClientToolCallState.pending
|
||||
case 'executing':
|
||||
return ClientToolCallState.executing
|
||||
case 'success':
|
||||
return ClientToolCallState.success
|
||||
case 'rejected':
|
||||
case 'skipped':
|
||||
return ClientToolCallState.rejected
|
||||
case 'aborted':
|
||||
return ClientToolCallState.aborted
|
||||
case 'error':
|
||||
case 'failed':
|
||||
return ClientToolCallState.error
|
||||
default:
|
||||
return ClientToolCallState.pending
|
||||
}
|
||||
}
|
||||
|
||||
function extractToolUiMetadata(data: Record<string, unknown>): CopilotToolCall['ui'] | undefined {
|
||||
const ui = asRecord(data.ui)
|
||||
if (!ui || Object.keys(ui).length === 0) return undefined
|
||||
const autoAllowedFromUi = ui.autoAllowed === true
|
||||
const autoAllowedFromData = data.autoAllowed === true
|
||||
return {
|
||||
title: typeof ui.title === 'string' ? ui.title : undefined,
|
||||
phaseLabel: typeof ui.phaseLabel === 'string' ? ui.phaseLabel : undefined,
|
||||
icon: typeof ui.icon === 'string' ? ui.icon : undefined,
|
||||
showInterrupt: ui.showInterrupt === true,
|
||||
showRemember: ui.showRemember === true,
|
||||
autoAllowed: autoAllowedFromUi || autoAllowedFromData,
|
||||
actions: Array.isArray(ui.actions)
|
||||
? ui.actions
|
||||
.map((action) => {
|
||||
const a = asRecord(action)
|
||||
const id = typeof a.id === 'string' ? a.id : undefined
|
||||
const label = typeof a.label === 'string' ? a.label : undefined
|
||||
const kind: 'accept' | 'reject' = a.kind === 'reject' ? 'reject' : 'accept'
|
||||
if (!id || !label) return null
|
||||
return {
|
||||
id,
|
||||
label,
|
||||
kind,
|
||||
remember: a.remember === true,
|
||||
}
|
||||
})
|
||||
.filter((a): a is NonNullable<typeof a> => !!a)
|
||||
: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
function extractToolExecutionMetadata(
|
||||
data: Record<string, unknown>
|
||||
): CopilotToolCall['execution'] | undefined {
|
||||
const execution = asRecord(data.execution)
|
||||
if (!execution || Object.keys(execution).length === 0) return undefined
|
||||
return {
|
||||
target: typeof execution.target === 'string' ? execution.target : undefined,
|
||||
capabilityId: typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
||||
}
|
||||
}
|
||||
|
||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
||||
}
|
||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
||||
}
|
||||
|
||||
function isWorkflowChangeApplyCall(toolCall: CopilotToolCall): boolean {
|
||||
if (toolCall.name !== 'workflow_change') return false
|
||||
const params = (toolCall.params || {}) as Record<string, unknown>
|
||||
const mode = typeof params.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||
if (mode === 'apply') return true
|
||||
return typeof params.proposalId === 'string' && params.proposalId.length > 0
|
||||
}
|
||||
|
||||
function extractWorkflowStateFromResultPayload(
|
||||
resultPayload: Record<string, unknown>
|
||||
): WorkflowState | null {
|
||||
const directState = asRecord(resultPayload.workflowState)
|
||||
if (directState) return directState as unknown as WorkflowState
|
||||
|
||||
const editResult = asRecord(resultPayload.editResult)
|
||||
const nestedState = asRecord(editResult?.workflowState)
|
||||
if (nestedState) return nestedState as unknown as WorkflowState
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
function extractOperationListFromResultPayload(
|
||||
resultPayload: Record<string, unknown>
|
||||
): Array<Record<string, unknown>> | undefined {
|
||||
const operations = resultPayload.operations
|
||||
if (Array.isArray(operations)) return operations as Array<Record<string, unknown>>
|
||||
|
||||
const compiled = resultPayload.compiledOperations
|
||||
if (Array.isArray(compiled)) return compiled as Array<Record<string, unknown>>
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
export function appendSubAgentContent(
|
||||
context: ClientStreamingContext,
|
||||
parentToolCallId: string,
|
||||
@@ -164,6 +272,8 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||
if (!id || !name) return
|
||||
const isPartial = toolData.partial === true
|
||||
const uiMetadata = extractToolUiMetadata(toolData)
|
||||
const executionMetadata = extractToolExecutionMetadata(toolData)
|
||||
|
||||
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
||||
| Record<string, unknown>
|
||||
@@ -199,9 +309,10 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
const existingToolCall =
|
||||
existingIndex >= 0 ? context.subAgentToolCalls[parentToolCallId][existingIndex] : undefined
|
||||
|
||||
// Auto-allowed tools skip pending state to avoid flashing interrupt buttons
|
||||
const isAutoAllowed = get().isToolAutoAllowed(name)
|
||||
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
||||
const serverState = toolData.state
|
||||
let initialState = serverState
|
||||
? mapServerStateToClientState(serverState)
|
||||
: ClientToolCallState.pending
|
||||
|
||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||
if (
|
||||
@@ -215,6 +326,8 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
id,
|
||||
name,
|
||||
state: initialState,
|
||||
ui: uiMetadata,
|
||||
execution: executionMetadata,
|
||||
...(args ? { params: args } : {}),
|
||||
display: resolveToolDisplay(name, initialState, id, args),
|
||||
}
|
||||
@@ -241,16 +354,11 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
return
|
||||
}
|
||||
|
||||
// Auto-allowed tools: send confirmation to the server so it can proceed
|
||||
// without waiting for the user to click "Allow".
|
||||
if (isAutoAllowed) {
|
||||
sendAutoAcceptConfirmation(id)
|
||||
}
|
||||
const shouldInterrupt = subAgentToolCall.ui?.showInterrupt === true
|
||||
|
||||
// Client-executable run tools: if auto-allowed, execute immediately for
|
||||
// real-time feedback. For non-auto-allowed, the user must click "Allow"
|
||||
// first — handleRun in tool-call.tsx triggers executeRunToolOnClient.
|
||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(name) && isAutoAllowed) {
|
||||
// Client-run capability: execution is delegated to the browser.
|
||||
// Execute immediately only for non-interrupting calls.
|
||||
if (isClientRunCapability(subAgentToolCall) && !shouldInterrupt) {
|
||||
executeRunToolOnClient(id, name, args || {})
|
||||
}
|
||||
},
|
||||
@@ -275,17 +383,45 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
if (!context.subAgentToolCalls[parentToolCallId]) return
|
||||
if (!context.subAgentBlocks[parentToolCallId]) return
|
||||
|
||||
const targetState = success ? ClientToolCallState.success : ClientToolCallState.error
|
||||
const serverState = resultData.state
|
||||
const targetState = serverState
|
||||
? mapServerStateToClientState(serverState)
|
||||
: success
|
||||
? ClientToolCallState.success
|
||||
: ClientToolCallState.error
|
||||
const uiMetadata = extractToolUiMetadata(resultData)
|
||||
const executionMetadata = extractToolExecutionMetadata(resultData)
|
||||
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
||||
(tc: CopilotToolCall) => tc.id === toolCallId
|
||||
)
|
||||
|
||||
if (existingIndex >= 0) {
|
||||
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
||||
let nextParams = existing.params
|
||||
const resultPayload = asRecord(
|
||||
data?.result || resultData.result || resultData.data || data?.data
|
||||
)
|
||||
if (
|
||||
targetState === ClientToolCallState.success &&
|
||||
isWorkflowChangeApplyCall(existing) &&
|
||||
resultPayload
|
||||
) {
|
||||
const operations = extractOperationListFromResultPayload(resultPayload)
|
||||
if (operations && operations.length > 0) {
|
||||
nextParams = {
|
||||
...(existing.params || {}),
|
||||
operations,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const updatedSubAgentToolCall = {
|
||||
...existing,
|
||||
params: nextParams,
|
||||
ui: uiMetadata || existing.ui,
|
||||
execution: executionMetadata || existing.execution,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params),
|
||||
display: resolveToolDisplay(existing.name, targetState, toolCallId, nextParams),
|
||||
}
|
||||
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
||||
|
||||
@@ -309,6 +445,23 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
state: targetState,
|
||||
})
|
||||
}
|
||||
|
||||
if (
|
||||
targetState === ClientToolCallState.success &&
|
||||
resultPayload &&
|
||||
isWorkflowChangeApplyCall(updatedSubAgentToolCall)
|
||||
) {
|
||||
const workflowState = extractWorkflowStateFromResultPayload(resultPayload)
|
||||
if (workflowState) {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
diffStore.setProposedChanges(workflowState).catch((error) => {
|
||||
logger.error('[SubAgent] Failed to apply workflow_change diff', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
toolCallId,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||
|
||||
@@ -101,9 +101,6 @@ export const COPILOT_CHECKPOINTS_API_PATH = '/api/copilot/checkpoints'
|
||||
/** POST — revert to a checkpoint. */
|
||||
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
||||
|
||||
/** GET/POST/DELETE — manage auto-allowed tools. */
|
||||
export const COPILOT_AUTO_ALLOWED_TOOLS_API_PATH = '/api/copilot/auto-allowed-tools'
|
||||
|
||||
/** GET — fetch dynamically available copilot models. */
|
||||
export const COPILOT_MODELS_API_PATH = '/api/copilot/models'
|
||||
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
export const INTERRUPT_TOOL_NAMES = [
|
||||
'set_global_workflow_variables',
|
||||
'run_workflow',
|
||||
'run_workflow_until_block',
|
||||
'run_from_block',
|
||||
'run_block',
|
||||
'manage_mcp_tool',
|
||||
'manage_custom_tool',
|
||||
'deploy_mcp',
|
||||
'deploy_chat',
|
||||
'deploy_api',
|
||||
'create_workspace_mcp_server',
|
||||
'set_environment_variables',
|
||||
'make_api_request',
|
||||
'oauth_request_access',
|
||||
'navigate_ui',
|
||||
'knowledge_base',
|
||||
'generate_api_key',
|
||||
] as const
|
||||
|
||||
export const INTERRUPT_TOOL_SET = new Set<string>(INTERRUPT_TOOL_NAMES)
|
||||
|
||||
export const SUBAGENT_TOOL_NAMES = [
|
||||
'debug',
|
||||
'edit',
|
||||
'build',
|
||||
'plan',
|
||||
'test',
|
||||
'deploy',
|
||||
'auth',
|
||||
'research',
|
||||
'knowledge',
|
||||
'custom_tool',
|
||||
'tour',
|
||||
'info',
|
||||
'workflow',
|
||||
'evaluate',
|
||||
'superagent',
|
||||
'discovery',
|
||||
] as const
|
||||
|
||||
export const SUBAGENT_TOOL_SET = new Set<string>(SUBAGENT_TOOL_NAMES)
|
||||
|
||||
/**
|
||||
* Respond tools are internal to the copilot's subagent system.
|
||||
* They're used by subagents to signal completion and should NOT be executed by the sim side.
|
||||
* The copilot backend handles these internally.
|
||||
*/
|
||||
export const RESPOND_TOOL_NAMES = [
|
||||
'plan_respond',
|
||||
'edit_respond',
|
||||
'build_respond',
|
||||
'debug_respond',
|
||||
'info_respond',
|
||||
'research_respond',
|
||||
'deploy_respond',
|
||||
'superagent_respond',
|
||||
'discovery_respond',
|
||||
'tour_respond',
|
||||
'auth_respond',
|
||||
'workflow_respond',
|
||||
'knowledge_respond',
|
||||
'custom_tool_respond',
|
||||
'test_respond',
|
||||
] as const
|
||||
|
||||
export const RESPOND_TOOL_SET = new Set<string>(RESPOND_TOOL_NAMES)
|
||||
@@ -1,17 +1,12 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
||||
import { RESPOND_TOOL_SET, SUBAGENT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||
import {
|
||||
asRecord,
|
||||
getEventData,
|
||||
markToolResultSeen,
|
||||
wasToolResultSeen,
|
||||
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||
import {
|
||||
isIntegrationTool,
|
||||
isToolAvailableOnSimSide,
|
||||
markToolComplete,
|
||||
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||
import { markToolComplete } from '@/lib/copilot/orchestrator/tool-executor'
|
||||
import type {
|
||||
ContentBlock,
|
||||
ExecutionContext,
|
||||
@@ -22,7 +17,6 @@ import type {
|
||||
} from '@/lib/copilot/orchestrator/types'
|
||||
import {
|
||||
executeToolAndReport,
|
||||
isInterruptToolName,
|
||||
waitForToolCompletion,
|
||||
waitForToolDecision,
|
||||
} from './tool-execution'
|
||||
@@ -41,6 +35,113 @@ const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
||||
'run_block',
|
||||
])
|
||||
|
||||
function mapServerStateToToolStatus(state: unknown): ToolCallState['status'] {
|
||||
switch (String(state || '')) {
|
||||
case 'generating':
|
||||
case 'pending':
|
||||
case 'awaiting_approval':
|
||||
return 'pending'
|
||||
case 'executing':
|
||||
return 'executing'
|
||||
case 'success':
|
||||
return 'success'
|
||||
case 'rejected':
|
||||
case 'skipped':
|
||||
return 'rejected'
|
||||
case 'aborted':
|
||||
return 'skipped'
|
||||
case 'error':
|
||||
case 'failed':
|
||||
return 'error'
|
||||
default:
|
||||
return 'pending'
|
||||
}
|
||||
}
|
||||
|
||||
function getExecutionTarget(
|
||||
toolData: Record<string, unknown>,
|
||||
toolName: string
|
||||
): { target: string; capabilityId?: string } {
|
||||
const execution = asRecord(toolData.execution)
|
||||
if (typeof execution.target === 'string' && execution.target.length > 0) {
|
||||
return {
|
||||
target: execution.target,
|
||||
capabilityId:
|
||||
typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback only when metadata is missing.
|
||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
||||
return { target: 'sim_client_capability', capabilityId: 'workflow.run' }
|
||||
}
|
||||
return { target: 'sim_server' }
|
||||
}
|
||||
|
||||
function needsApproval(toolData: Record<string, unknown>): boolean {
|
||||
const ui = asRecord(toolData.ui)
|
||||
return ui.showInterrupt === true
|
||||
}
|
||||
|
||||
async function waitForClientCapabilityAndReport(
|
||||
toolCall: ToolCallState,
|
||||
options: OrchestratorOptions,
|
||||
logScope: string
|
||||
): Promise<void> {
|
||||
toolCall.status = 'executing'
|
||||
const completion = await waitForToolCompletion(
|
||||
toolCall.id,
|
||||
options.timeout || STREAM_TIMEOUT_MS,
|
||||
options.abortSignal
|
||||
)
|
||||
|
||||
if (completion?.status === 'background') {
|
||||
toolCall.status = 'skipped'
|
||||
toolCall.endTime = Date.now()
|
||||
markToolComplete(
|
||||
toolCall.id,
|
||||
toolCall.name,
|
||||
202,
|
||||
completion.message || 'Tool execution moved to background',
|
||||
{ background: true }
|
||||
).catch((err) => {
|
||||
logger.error(`markToolComplete fire-and-forget failed (${logScope} background)`, {
|
||||
toolCallId: toolCall.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
})
|
||||
markToolResultSeen(toolCall.id)
|
||||
return
|
||||
}
|
||||
|
||||
if (completion?.status === 'rejected') {
|
||||
toolCall.status = 'rejected'
|
||||
toolCall.endTime = Date.now()
|
||||
markToolComplete(toolCall.id, toolCall.name, 400, completion.message || 'Tool execution rejected')
|
||||
.catch((err) => {
|
||||
logger.error(`markToolComplete fire-and-forget failed (${logScope} rejected)`, {
|
||||
toolCallId: toolCall.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
})
|
||||
markToolResultSeen(toolCall.id)
|
||||
return
|
||||
}
|
||||
|
||||
const success = completion?.status === 'success'
|
||||
toolCall.status = success ? 'success' : 'error'
|
||||
toolCall.endTime = Date.now()
|
||||
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
||||
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
||||
logger.error(`markToolComplete fire-and-forget failed (${logScope})`, {
|
||||
toolCallId: toolCall.id,
|
||||
toolName: toolCall.name,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
})
|
||||
markToolResultSeen(toolCall.id)
|
||||
}
|
||||
|
||||
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
||||
|
||||
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
||||
@@ -85,7 +186,11 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
|
||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||
|
||||
current.status = success ? 'success' : 'error'
|
||||
current.status = data?.state
|
||||
? mapServerStateToToolStatus(data.state)
|
||||
: success
|
||||
? 'success'
|
||||
: 'error'
|
||||
current.endTime = Date.now()
|
||||
if (hasResultData) {
|
||||
current.result = {
|
||||
@@ -104,7 +209,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
if (!toolCallId) return
|
||||
const current = context.toolCalls.get(toolCallId)
|
||||
if (!current) return
|
||||
current.status = 'error'
|
||||
current.status = data?.state ? mapServerStateToToolStatus(data.state) : 'error'
|
||||
current.error = (data?.error as string | undefined) || 'Tool execution failed'
|
||||
current.endTime = Date.now()
|
||||
},
|
||||
@@ -121,7 +226,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
context.toolCalls.set(toolCallId, {
|
||||
id: toolCallId,
|
||||
name: toolName,
|
||||
status: 'pending',
|
||||
status: data?.state ? mapServerStateToToolStatus(data.state) : 'pending',
|
||||
startTime: Date.now(),
|
||||
})
|
||||
}
|
||||
@@ -156,7 +261,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
context.toolCalls.set(toolCallId, {
|
||||
id: toolCallId,
|
||||
name: toolName,
|
||||
status: 'pending',
|
||||
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
||||
params: args,
|
||||
startTime: Date.now(),
|
||||
})
|
||||
@@ -170,83 +275,29 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
const toolCall = context.toolCalls.get(toolCallId)
|
||||
if (!toolCall) return
|
||||
|
||||
// Subagent tools are executed by the copilot backend, not sim side.
|
||||
if (SUBAGENT_TOOL_SET.has(toolName)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||
// The copilot backend handles these internally to signal subagent completion.
|
||||
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||
toolCall.status = 'success'
|
||||
toolCall.endTime = Date.now()
|
||||
toolCall.result = {
|
||||
success: true,
|
||||
output: 'Internal respond tool - handled by copilot backend',
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const isInterruptTool = isInterruptToolName(toolName)
|
||||
const execution = getExecutionTarget(toolData, toolName)
|
||||
const isInteractive = options.interactive === true
|
||||
// Integration tools (user-installed) also require approval in interactive mode
|
||||
const needsApproval = isInterruptTool || isIntegrationTool(toolName)
|
||||
const requiresApproval = isInteractive && needsApproval(toolData)
|
||||
if (toolData.state) {
|
||||
toolCall.status = mapServerStateToToolStatus(toolData.state)
|
||||
}
|
||||
|
||||
if (needsApproval && isInteractive) {
|
||||
if (requiresApproval) {
|
||||
const decision = await waitForToolDecision(
|
||||
toolCallId,
|
||||
options.timeout || STREAM_TIMEOUT_MS,
|
||||
options.abortSignal
|
||||
)
|
||||
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||
// Client-executable run tools: defer execution to the browser client.
|
||||
// The client calls executeWorkflowWithFullLogging for real-time feedback
|
||||
// (block pulsing, logs, stop button) and reports completion via
|
||||
// /api/copilot/confirm with status success/error. We poll Redis for
|
||||
// that completion signal, then fire-and-forget markToolComplete to Go.
|
||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
||||
toolCall.status = 'executing'
|
||||
const completion = await waitForToolCompletion(
|
||||
toolCallId,
|
||||
options.timeout || STREAM_TIMEOUT_MS,
|
||||
options.abortSignal
|
||||
)
|
||||
if (completion?.status === 'background') {
|
||||
toolCall.status = 'skipped'
|
||||
toolCall.endTime = Date.now()
|
||||
markToolComplete(
|
||||
toolCall.id,
|
||||
toolCall.name,
|
||||
202,
|
||||
completion.message || 'Tool execution moved to background',
|
||||
{ background: true }
|
||||
).catch((err) => {
|
||||
logger.error('markToolComplete fire-and-forget failed (run tool background)', {
|
||||
toolCallId: toolCall.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
})
|
||||
markToolResultSeen(toolCallId)
|
||||
return
|
||||
}
|
||||
const success = completion?.status === 'success'
|
||||
toolCall.status = success ? 'success' : 'error'
|
||||
toolCall.endTime = Date.now()
|
||||
const msg =
|
||||
completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
||||
// Fire-and-forget: tell Go backend the tool is done
|
||||
// (must NOT await — see deadlock note in executeToolAndReport)
|
||||
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
||||
logger.error('markToolComplete fire-and-forget failed (run tool)', {
|
||||
toolCallId: toolCall.id,
|
||||
toolName: toolCall.name,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
})
|
||||
markToolResultSeen(toolCallId)
|
||||
if (execution.target === 'sim_client_capability' && isInteractive) {
|
||||
await waitForClientCapabilityAndReport(toolCall, options, 'run tool')
|
||||
return
|
||||
}
|
||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||
if (execution.target === 'sim_server' || execution.target === 'sim_client_capability') {
|
||||
if (options.autoExecuteTools !== false) {
|
||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -308,7 +359,15 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
return
|
||||
}
|
||||
|
||||
if (options.autoExecuteTools !== false) {
|
||||
if (execution.target === 'sim_client_capability' && isInteractive) {
|
||||
await waitForClientCapabilityAndReport(toolCall, options, 'run tool')
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
(execution.target === 'sim_server' || execution.target === 'sim_client_capability') &&
|
||||
options.autoExecuteTools !== false
|
||||
) {
|
||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||
}
|
||||
},
|
||||
@@ -410,7 +469,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
||||
const toolCall: ToolCallState = {
|
||||
id: toolCallId,
|
||||
name: toolName,
|
||||
status: 'pending',
|
||||
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
||||
params: args,
|
||||
startTime: Date.now(),
|
||||
}
|
||||
@@ -428,37 +487,26 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
||||
|
||||
if (isPartial) return
|
||||
|
||||
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||
toolCall.status = 'success'
|
||||
toolCall.endTime = Date.now()
|
||||
toolCall.result = {
|
||||
success: true,
|
||||
output: 'Internal respond tool - handled by copilot backend',
|
||||
}
|
||||
return
|
||||
}
|
||||
const execution = getExecutionTarget(toolData, toolName)
|
||||
const isInteractive = options.interactive === true
|
||||
const requiresApproval = isInteractive && needsApproval(toolData)
|
||||
|
||||
// Tools that only exist on the Go backend (e.g. search_patterns,
|
||||
// search_errors, remember_debug) should NOT be re-executed on the Sim side.
|
||||
// The Go backend already executed them and will send its own tool_result
|
||||
// SSE event with the real outcome. Trying to execute them here would fail
|
||||
// with "Tool not found" and incorrectly mark the tool as failed.
|
||||
if (!isToolAvailableOnSimSide(toolName)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Interrupt tools and integration tools (user-installed) require approval
|
||||
// in interactive mode, same as top-level handler.
|
||||
const needsSubagentApproval = isInterruptToolName(toolName) || isIntegrationTool(toolName)
|
||||
if (options.interactive === true && needsSubagentApproval) {
|
||||
if (requiresApproval) {
|
||||
const decision = await waitForToolDecision(
|
||||
toolCallId,
|
||||
options.timeout || STREAM_TIMEOUT_MS,
|
||||
options.abortSignal
|
||||
)
|
||||
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||
if (execution.target === 'sim_client_capability' && isInteractive) {
|
||||
await waitForClientCapabilityAndReport(toolCall, options, 'subagent run tool')
|
||||
return
|
||||
}
|
||||
if (execution.target === 'sim_server' || execution.target === 'sim_client_capability') {
|
||||
if (options.autoExecuteTools !== false) {
|
||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
||||
@@ -517,66 +565,15 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
||||
return
|
||||
}
|
||||
|
||||
// Client-executable run tools in interactive mode: defer to client.
|
||||
// Same pattern as main handler: wait for client completion, then tell Go.
|
||||
if (options.interactive === true && CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
||||
toolCall.status = 'executing'
|
||||
const completion = await waitForToolCompletion(
|
||||
toolCallId,
|
||||
options.timeout || STREAM_TIMEOUT_MS,
|
||||
options.abortSignal
|
||||
)
|
||||
if (completion?.status === 'rejected') {
|
||||
toolCall.status = 'rejected'
|
||||
toolCall.endTime = Date.now()
|
||||
markToolComplete(
|
||||
toolCall.id,
|
||||
toolCall.name,
|
||||
400,
|
||||
completion.message || 'Tool execution rejected'
|
||||
).catch((err) => {
|
||||
logger.error('markToolComplete fire-and-forget failed (subagent run tool rejected)', {
|
||||
toolCallId: toolCall.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
})
|
||||
markToolResultSeen(toolCallId)
|
||||
return
|
||||
}
|
||||
if (completion?.status === 'background') {
|
||||
toolCall.status = 'skipped'
|
||||
toolCall.endTime = Date.now()
|
||||
markToolComplete(
|
||||
toolCall.id,
|
||||
toolCall.name,
|
||||
202,
|
||||
completion.message || 'Tool execution moved to background',
|
||||
{ background: true }
|
||||
).catch((err) => {
|
||||
logger.error('markToolComplete fire-and-forget failed (subagent run tool background)', {
|
||||
toolCallId: toolCall.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
})
|
||||
markToolResultSeen(toolCallId)
|
||||
return
|
||||
}
|
||||
const success = completion?.status === 'success'
|
||||
toolCall.status = success ? 'success' : 'error'
|
||||
toolCall.endTime = Date.now()
|
||||
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
||||
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
||||
logger.error('markToolComplete fire-and-forget failed (subagent run tool)', {
|
||||
toolCallId: toolCall.id,
|
||||
toolName: toolCall.name,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
})
|
||||
markToolResultSeen(toolCallId)
|
||||
if (execution.target === 'sim_client_capability' && isInteractive) {
|
||||
await waitForClientCapabilityAndReport(toolCall, options, 'subagent run tool')
|
||||
return
|
||||
}
|
||||
|
||||
if (options.autoExecuteTools !== false) {
|
||||
if (
|
||||
(execution.target === 'sim_server' || execution.target === 'sim_client_capability') &&
|
||||
options.autoExecuteTools !== false
|
||||
) {
|
||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||
}
|
||||
},
|
||||
@@ -596,7 +593,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
||||
|
||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||
|
||||
const status = success ? 'success' : 'error'
|
||||
const status = data?.state ? mapServerStateToToolStatus(data.state) : success ? 'success' : 'error'
|
||||
const endTime = Date.now()
|
||||
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
TOOL_DECISION_MAX_POLL_MS,
|
||||
TOOL_DECISION_POLL_BACKOFF,
|
||||
} from '@/lib/copilot/constants'
|
||||
import { INTERRUPT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
||||
import {
|
||||
asRecord,
|
||||
@@ -21,10 +20,6 @@ import type {
|
||||
|
||||
const logger = createLogger('CopilotSseToolExecution')
|
||||
|
||||
export function isInterruptToolName(toolName: string): boolean {
|
||||
return INTERRUPT_TOOL_SET.has(toolName)
|
||||
}
|
||||
|
||||
export async function executeToolAndReport(
|
||||
toolCallId: string,
|
||||
context: StreamingContext,
|
||||
@@ -34,9 +29,11 @@ export async function executeToolAndReport(
|
||||
const toolCall = context.toolCalls.get(toolCallId)
|
||||
if (!toolCall) return
|
||||
|
||||
if (toolCall.status === 'executing') return
|
||||
const lockable = toolCall as typeof toolCall & { __simExecuting?: boolean }
|
||||
if (lockable.__simExecuting) return
|
||||
if (wasToolResultSeen(toolCall.id)) return
|
||||
|
||||
lockable.__simExecuting = true
|
||||
toolCall.status = 'executing'
|
||||
try {
|
||||
const result = await executeToolServerSide(toolCall, execContext)
|
||||
@@ -122,6 +119,8 @@ export async function executeToolAndReport(
|
||||
},
|
||||
}
|
||||
await options?.onEvent?.(errorEvent)
|
||||
} finally {
|
||||
delete lockable.__simExecuting
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow } from '@sim/db/schema'
|
||||
import { customTools, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { and, desc, eq, isNull, or } from 'drizzle-orm'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import type {
|
||||
ExecutionContext,
|
||||
@@ -12,6 +12,7 @@ import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
|
||||
import { getTool, resolveToolId } from '@/tools/utils'
|
||||
import {
|
||||
executeCheckDeploymentStatus,
|
||||
@@ -76,6 +77,247 @@ import {
|
||||
|
||||
const logger = createLogger('CopilotToolExecutor')
|
||||
|
||||
type ManageCustomToolOperation = 'add' | 'edit' | 'delete' | 'list'
|
||||
|
||||
interface ManageCustomToolSchema {
|
||||
type: 'function'
|
||||
function: {
|
||||
name: string
|
||||
description?: string
|
||||
parameters: Record<string, unknown>
|
||||
}
|
||||
}
|
||||
|
||||
interface ManageCustomToolParams {
|
||||
operation?: string
|
||||
toolId?: string
|
||||
schema?: ManageCustomToolSchema
|
||||
code?: string
|
||||
title?: string
|
||||
workspaceId?: string
|
||||
}
|
||||
|
||||
async function executeManageCustomTool(
|
||||
rawParams: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const params = rawParams as ManageCustomToolParams
|
||||
const operation = String(params.operation || '').toLowerCase() as ManageCustomToolOperation
|
||||
const workspaceId = params.workspaceId || context.workspaceId
|
||||
|
||||
if (!operation) {
|
||||
return { success: false, error: "Missing required 'operation' argument" }
|
||||
}
|
||||
|
||||
try {
|
||||
if (operation === 'list') {
|
||||
const toolsForUser = workspaceId
|
||||
? await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(
|
||||
or(
|
||||
eq(customTools.workspaceId, workspaceId),
|
||||
and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId))
|
||||
)
|
||||
)
|
||||
.orderBy(desc(customTools.createdAt))
|
||||
: await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId)))
|
||||
.orderBy(desc(customTools.createdAt))
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
tools: toolsForUser,
|
||||
count: toolsForUser.length,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'add') {
|
||||
if (!workspaceId) {
|
||||
return {
|
||||
success: false,
|
||||
error: "workspaceId is required for operation 'add'",
|
||||
}
|
||||
}
|
||||
if (!params.schema || !params.code) {
|
||||
return {
|
||||
success: false,
|
||||
error: "Both 'schema' and 'code' are required for operation 'add'",
|
||||
}
|
||||
}
|
||||
|
||||
const title = params.title || params.schema.function?.name
|
||||
if (!title) {
|
||||
return { success: false, error: "Missing tool title or schema.function.name for 'add'" }
|
||||
}
|
||||
|
||||
const resultTools = await upsertCustomTools({
|
||||
tools: [
|
||||
{
|
||||
title,
|
||||
schema: params.schema,
|
||||
code: params.code,
|
||||
},
|
||||
],
|
||||
workspaceId,
|
||||
userId: context.userId,
|
||||
})
|
||||
const created = resultTools.find((tool) => tool.title === title)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
toolId: created?.id,
|
||||
title,
|
||||
message: `Created custom tool "${title}"`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'edit') {
|
||||
if (!workspaceId) {
|
||||
return {
|
||||
success: false,
|
||||
error: "workspaceId is required for operation 'edit'",
|
||||
}
|
||||
}
|
||||
if (!params.toolId) {
|
||||
return { success: false, error: "'toolId' is required for operation 'edit'" }
|
||||
}
|
||||
if (!params.schema && !params.code) {
|
||||
return {
|
||||
success: false,
|
||||
error: "At least one of 'schema' or 'code' is required for operation 'edit'",
|
||||
}
|
||||
}
|
||||
|
||||
const workspaceTool = await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId)))
|
||||
.limit(1)
|
||||
|
||||
const legacyTool =
|
||||
workspaceTool.length === 0
|
||||
? await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(
|
||||
and(
|
||||
eq(customTools.id, params.toolId),
|
||||
isNull(customTools.workspaceId),
|
||||
eq(customTools.userId, context.userId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
: []
|
||||
|
||||
const existing = workspaceTool[0] || legacyTool[0]
|
||||
if (!existing) {
|
||||
return { success: false, error: `Custom tool not found: ${params.toolId}` }
|
||||
}
|
||||
|
||||
const mergedSchema = params.schema || (existing.schema as ManageCustomToolSchema)
|
||||
const mergedCode = params.code || existing.code
|
||||
const title = params.title || mergedSchema.function?.name || existing.title
|
||||
|
||||
await upsertCustomTools({
|
||||
tools: [
|
||||
{
|
||||
id: params.toolId,
|
||||
title,
|
||||
schema: mergedSchema,
|
||||
code: mergedCode,
|
||||
},
|
||||
],
|
||||
workspaceId,
|
||||
userId: context.userId,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
toolId: params.toolId,
|
||||
title,
|
||||
message: `Updated custom tool "${title}"`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'delete') {
|
||||
if (!params.toolId) {
|
||||
return { success: false, error: "'toolId' is required for operation 'delete'" }
|
||||
}
|
||||
|
||||
const workspaceDelete =
|
||||
workspaceId != null
|
||||
? await db
|
||||
.delete(customTools)
|
||||
.where(
|
||||
and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId))
|
||||
)
|
||||
.returning({ id: customTools.id })
|
||||
: []
|
||||
|
||||
const legacyDelete =
|
||||
workspaceDelete.length === 0
|
||||
? await db
|
||||
.delete(customTools)
|
||||
.where(
|
||||
and(
|
||||
eq(customTools.id, params.toolId),
|
||||
isNull(customTools.workspaceId),
|
||||
eq(customTools.userId, context.userId)
|
||||
)
|
||||
)
|
||||
.returning({ id: customTools.id })
|
||||
: []
|
||||
|
||||
const deleted = workspaceDelete[0] || legacyDelete[0]
|
||||
if (!deleted) {
|
||||
return { success: false, error: `Custom tool not found: ${params.toolId}` }
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
toolId: params.toolId,
|
||||
message: 'Deleted custom tool',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: `Unsupported operation for manage_custom_tool: ${operation}`,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('manage_custom_tool execution failed', {
|
||||
operation,
|
||||
workspaceId,
|
||||
userId: context.userId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to manage custom tool',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const SERVER_TOOLS = new Set<string>([
|
||||
'get_blocks_and_tools',
|
||||
'get_blocks_metadata',
|
||||
@@ -83,6 +325,10 @@ const SERVER_TOOLS = new Set<string>([
|
||||
'get_block_config',
|
||||
'get_trigger_blocks',
|
||||
'edit_workflow',
|
||||
'workflow_context_get',
|
||||
'workflow_context_expand',
|
||||
'workflow_change',
|
||||
'workflow_verify',
|
||||
'get_workflow_console',
|
||||
'search_documentation',
|
||||
'search_online',
|
||||
@@ -161,6 +407,19 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||
}
|
||||
}
|
||||
},
|
||||
oauth_request_access: async (p, _c) => {
|
||||
const providerName = (p.providerName || p.provider_name || 'the provider') as string
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
status: 'requested',
|
||||
providerName,
|
||||
message: `Requested ${providerName} OAuth connection. The user should complete the OAuth modal in the UI, then retry credential-dependent actions.`,
|
||||
},
|
||||
}
|
||||
},
|
||||
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -609,6 +609,83 @@ const META_edit_workflow: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_workflow_change: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Planning workflow changes', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Applying workflow changes', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Updated your workflow', icon: Grid2x2Check },
|
||||
[ClientToolCallState.error]: { text: 'Failed to update your workflow', icon: XCircle },
|
||||
[ClientToolCallState.review]: { text: 'Review your workflow changes', icon: Grid2x2 },
|
||||
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow changes', icon: MinusCircle },
|
||||
[ClientToolCallState.pending]: { text: 'Planning workflow changes', icon: Loader2 },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||
if (mode === 'dry_run') {
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return 'Planned workflow changes'
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return 'Planning workflow changes'
|
||||
}
|
||||
}
|
||||
if (mode === 'apply' || typeof params?.proposalId === 'string') {
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return 'Applied workflow changes'
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return 'Applying workflow changes'
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
uiConfig: {
|
||||
isSpecial: true,
|
||||
customRenderer: 'edit_summary',
|
||||
},
|
||||
}
|
||||
|
||||
const META_workflow_context_get: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Gathering workflow context', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Gathering workflow context', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Gathering workflow context', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Gathered workflow context', icon: FileText },
|
||||
[ClientToolCallState.error]: { text: 'Failed to gather workflow context', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped workflow context', icon: MinusCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow context', icon: MinusCircle },
|
||||
},
|
||||
}
|
||||
|
||||
const META_workflow_context_expand: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Expanded workflow schemas', icon: FileText },
|
||||
[ClientToolCallState.error]: { text: 'Failed to expand workflow schemas', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped schema expansion', icon: MinusCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted schema expansion', icon: MinusCircle },
|
||||
},
|
||||
}
|
||||
|
||||
const META_workflow_verify: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Verifying workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Verifying workflow', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Verifying workflow', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Verified workflow', icon: CheckCircle2 },
|
||||
[ClientToolCallState.error]: { text: 'Workflow verification failed', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped workflow verification', icon: MinusCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow verification', icon: MinusCircle },
|
||||
},
|
||||
}
|
||||
|
||||
const META_evaluate: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
||||
@@ -2542,6 +2619,10 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
||||
deploy_mcp: META_deploy_mcp,
|
||||
edit: META_edit,
|
||||
edit_workflow: META_edit_workflow,
|
||||
workflow_context_get: META_workflow_context_get,
|
||||
workflow_context_expand: META_workflow_context_expand,
|
||||
workflow_change: META_workflow_change,
|
||||
workflow_verify: META_workflow_verify,
|
||||
evaluate: META_evaluate,
|
||||
get_block_config: META_get_block_config,
|
||||
get_block_options: META_get_block_options,
|
||||
|
||||
@@ -13,6 +13,12 @@ import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-cr
|
||||
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
||||
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
||||
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
||||
import { workflowChangeServerTool } from '@/lib/copilot/tools/server/workflow/workflow-change'
|
||||
import {
|
||||
workflowContextExpandServerTool,
|
||||
workflowContextGetServerTool,
|
||||
} from '@/lib/copilot/tools/server/workflow/workflow-context'
|
||||
import { workflowVerifyServerTool } from '@/lib/copilot/tools/server/workflow/workflow-verify'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
|
||||
export { ExecuteResponseSuccessSchema }
|
||||
@@ -35,6 +41,10 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
|
||||
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
||||
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
||||
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
||||
[workflowContextGetServerTool.name]: workflowContextGetServerTool,
|
||||
[workflowContextExpandServerTool.name]: workflowContextExpandServerTool,
|
||||
[workflowChangeServerTool.name]: workflowChangeServerTool,
|
||||
[workflowVerifyServerTool.name]: workflowVerifyServerTool,
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
93
apps/sim/lib/copilot/tools/server/workflow/change-store.ts
Normal file
93
apps/sim/lib/copilot/tools/server/workflow/change-store.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import crypto from 'crypto'
|
||||
|
||||
type StoreEntry<T> = {
|
||||
value: T
|
||||
expiresAt: number
|
||||
}
|
||||
|
||||
const DEFAULT_TTL_MS = 30 * 60 * 1000
|
||||
const MAX_ENTRIES = 500
|
||||
|
||||
class TTLStore<T> {
|
||||
private readonly data = new Map<string, StoreEntry<T>>()
|
||||
|
||||
constructor(private readonly ttlMs = DEFAULT_TTL_MS) {}
|
||||
|
||||
set(value: T): string {
|
||||
this.gc()
|
||||
if (this.data.size >= MAX_ENTRIES) {
|
||||
const firstKey = this.data.keys().next().value as string | undefined
|
||||
if (firstKey) {
|
||||
this.data.delete(firstKey)
|
||||
}
|
||||
}
|
||||
const id = crypto.randomUUID()
|
||||
this.data.set(id, {
|
||||
value,
|
||||
expiresAt: Date.now() + this.ttlMs,
|
||||
})
|
||||
return id
|
||||
}
|
||||
|
||||
get(id: string): T | null {
|
||||
const entry = this.data.get(id)
|
||||
if (!entry) return null
|
||||
if (entry.expiresAt <= Date.now()) {
|
||||
this.data.delete(id)
|
||||
return null
|
||||
}
|
||||
return entry.value
|
||||
}
|
||||
|
||||
private gc(): void {
|
||||
const now = Date.now()
|
||||
for (const [key, entry] of this.data.entries()) {
|
||||
if (entry.expiresAt <= now) {
|
||||
this.data.delete(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type WorkflowContextPack = {
|
||||
workflowId: string
|
||||
snapshotHash: string
|
||||
workflowState: {
|
||||
blocks: Record<string, any>
|
||||
edges: Array<Record<string, any>>
|
||||
loops: Record<string, any>
|
||||
parallels: Record<string, any>
|
||||
}
|
||||
schemasByType: Record<string, any>
|
||||
schemaRefsByType: Record<string, string>
|
||||
summary: Record<string, any>
|
||||
}
|
||||
|
||||
export type WorkflowChangeProposal = {
|
||||
workflowId: string
|
||||
baseSnapshotHash: string
|
||||
compiledOperations: Array<Record<string, any>>
|
||||
diffSummary: Record<string, any>
|
||||
warnings: string[]
|
||||
diagnostics: string[]
|
||||
touchedBlocks: string[]
|
||||
}
|
||||
|
||||
const contextPackStore = new TTLStore<WorkflowContextPack>()
|
||||
const proposalStore = new TTLStore<WorkflowChangeProposal>()
|
||||
|
||||
export function saveContextPack(pack: WorkflowContextPack): string {
|
||||
return contextPackStore.set(pack)
|
||||
}
|
||||
|
||||
export function getContextPack(id: string): WorkflowContextPack | null {
|
||||
return contextPackStore.get(id)
|
||||
}
|
||||
|
||||
export function saveProposal(proposal: WorkflowChangeProposal): string {
|
||||
return proposalStore.set(proposal)
|
||||
}
|
||||
|
||||
export function getProposal(id: string): WorkflowChangeProposal | null {
|
||||
return proposalStore.get(id)
|
||||
}
|
||||
987
apps/sim/lib/copilot/tools/server/workflow/workflow-change.ts
Normal file
987
apps/sim/lib/copilot/tools/server/workflow/workflow-change.ts
Normal file
@@ -0,0 +1,987 @@
|
||||
import crypto from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { z } from 'zod'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
import {
|
||||
getContextPack,
|
||||
getProposal,
|
||||
saveProposal,
|
||||
type WorkflowChangeProposal,
|
||||
} from './change-store'
|
||||
import { editWorkflowServerTool } from './edit-workflow'
|
||||
import { applyOperationsToWorkflowState } from './edit-workflow/engine'
|
||||
import { preValidateCredentialInputs } from './edit-workflow/validation'
|
||||
import { hashWorkflowState, loadWorkflowStateFromDb } from './workflow-state'
|
||||
|
||||
const logger = createLogger('WorkflowChangeServerTool')
|
||||
|
||||
const TargetSchema = z
|
||||
.object({
|
||||
blockId: z.string().optional(),
|
||||
alias: z.string().optional(),
|
||||
match: z
|
||||
.object({
|
||||
type: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.strict()
|
||||
|
||||
const CredentialSelectionSchema = z
|
||||
.object({
|
||||
strategy: z.enum(['first_connected', 'by_id', 'by_name']).optional(),
|
||||
id: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.strict()
|
||||
|
||||
const ChangeOperationSchema = z
|
||||
.object({
|
||||
op: z.enum(['set', 'unset', 'merge', 'append', 'remove', 'attach_credential']),
|
||||
path: z.string().optional(),
|
||||
value: z.any().optional(),
|
||||
provider: z.string().optional(),
|
||||
selection: CredentialSelectionSchema.optional(),
|
||||
required: z.boolean().optional(),
|
||||
})
|
||||
.strict()
|
||||
|
||||
const MutationSchema = z
|
||||
.object({
|
||||
action: z.enum([
|
||||
'ensure_block',
|
||||
'patch_block',
|
||||
'remove_block',
|
||||
'connect',
|
||||
'disconnect',
|
||||
'ensure_variable',
|
||||
'set_variable',
|
||||
]),
|
||||
target: TargetSchema.optional(),
|
||||
type: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
inputs: z.record(z.any()).optional(),
|
||||
triggerMode: z.boolean().optional(),
|
||||
advancedMode: z.boolean().optional(),
|
||||
enabled: z.boolean().optional(),
|
||||
changes: z.array(ChangeOperationSchema).optional(),
|
||||
from: TargetSchema.optional(),
|
||||
to: TargetSchema.optional(),
|
||||
handle: z.string().optional(),
|
||||
toHandle: z.string().optional(),
|
||||
mode: z.enum(['set', 'append', 'remove']).optional(),
|
||||
})
|
||||
.strict()
|
||||
|
||||
const LinkEndpointSchema = z
|
||||
.object({
|
||||
blockId: z.string().optional(),
|
||||
alias: z.string().optional(),
|
||||
match: z
|
||||
.object({
|
||||
type: z.string().optional(),
|
||||
name: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
handle: z.string().optional(),
|
||||
})
|
||||
.strict()
|
||||
|
||||
const LinkSchema = z
|
||||
.object({
|
||||
from: LinkEndpointSchema,
|
||||
to: LinkEndpointSchema,
|
||||
mode: z.enum(['set', 'append', 'remove']).optional(),
|
||||
})
|
||||
.strict()
|
||||
|
||||
const ChangeSpecSchema = z
|
||||
.object({
|
||||
objective: z.string().optional(),
|
||||
constraints: z.record(z.any()).optional(),
|
||||
resources: z.record(z.any()).optional(),
|
||||
mutations: z.array(MutationSchema).optional(),
|
||||
links: z.array(LinkSchema).optional(),
|
||||
acceptance: z.array(z.any()).optional(),
|
||||
})
|
||||
.strict()
|
||||
|
||||
const WorkflowChangeInputSchema = z
|
||||
.object({
|
||||
mode: z.enum(['dry_run', 'apply']),
|
||||
workflowId: z.string().optional(),
|
||||
contextPackId: z.string().optional(),
|
||||
proposalId: z.string().optional(),
|
||||
baseSnapshotHash: z.string().optional(),
|
||||
expectedSnapshotHash: z.string().optional(),
|
||||
changeSpec: ChangeSpecSchema.optional(),
|
||||
})
|
||||
.strict()
|
||||
|
||||
type WorkflowChangeParams = z.input<typeof WorkflowChangeInputSchema>
|
||||
type ChangeSpec = z.input<typeof ChangeSpecSchema>
|
||||
type TargetRef = z.input<typeof TargetSchema>
|
||||
type ChangeOperation = z.input<typeof ChangeOperationSchema>
|
||||
|
||||
type CredentialRecord = {
|
||||
id: string
|
||||
name: string
|
||||
provider: string
|
||||
isDefault?: boolean
|
||||
}
|
||||
|
||||
type ConnectionTarget = {
|
||||
block: string
|
||||
handle?: string
|
||||
}
|
||||
|
||||
type ConnectionState = Map<string, Map<string, ConnectionTarget[]>>
|
||||
|
||||
function createDraftBlockId(seed?: string): string {
|
||||
const suffix = crypto.randomUUID().slice(0, 8)
|
||||
const base = seed ? seed.replace(/[^a-zA-Z0-9]/g, '').slice(0, 24) : 'draft'
|
||||
return `${base || 'draft'}_${suffix}`
|
||||
}
|
||||
|
||||
function normalizeHandle(handle?: string): string {
|
||||
if (!handle) return 'source'
|
||||
if (handle === 'success') return 'source'
|
||||
return handle
|
||||
}
|
||||
|
||||
function deepClone<T>(value: T): T {
|
||||
return JSON.parse(JSON.stringify(value))
|
||||
}
|
||||
|
||||
function stableUnique(values: string[]): string[] {
|
||||
return [...new Set(values.filter(Boolean))]
|
||||
}
|
||||
|
||||
function buildConnectionState(workflowState: {
|
||||
edges: Array<Record<string, any>>
|
||||
}): ConnectionState {
|
||||
const state: ConnectionState = new Map()
|
||||
for (const edge of workflowState.edges || []) {
|
||||
const source = String(edge.source || '')
|
||||
const target = String(edge.target || '')
|
||||
if (!source || !target) continue
|
||||
const sourceHandle = normalizeHandle(String(edge.sourceHandle || 'source'))
|
||||
const targetHandle = edge.targetHandle ? String(edge.targetHandle) : undefined
|
||||
|
||||
let handleMap = state.get(source)
|
||||
if (!handleMap) {
|
||||
handleMap = new Map()
|
||||
state.set(source, handleMap)
|
||||
}
|
||||
const existing = handleMap.get(sourceHandle) || []
|
||||
existing.push({ block: target, handle: targetHandle })
|
||||
handleMap.set(sourceHandle, existing)
|
||||
}
|
||||
return state
|
||||
}
|
||||
|
||||
function connectionStateToPayload(state: Map<string, ConnectionTarget[]>): Record<string, any> {
|
||||
const payload: Record<string, any> = {}
|
||||
for (const [handle, targets] of state.entries()) {
|
||||
if (!targets || targets.length === 0) continue
|
||||
const normalizedTargets = targets.map((target) => {
|
||||
if (!target.handle || target.handle === 'target') {
|
||||
return target.block
|
||||
}
|
||||
return { block: target.block, handle: target.handle }
|
||||
})
|
||||
payload[handle] = normalizedTargets.length === 1 ? normalizedTargets[0] : normalizedTargets
|
||||
}
|
||||
return payload
|
||||
}
|
||||
|
||||
function findMatchingBlockId(
|
||||
workflowState: { blocks: Record<string, any> },
|
||||
target: TargetRef
|
||||
): string | null {
|
||||
if (target.blockId && workflowState.blocks[target.blockId]) {
|
||||
return target.blockId
|
||||
}
|
||||
|
||||
if (target.match) {
|
||||
const type = target.match.type
|
||||
const name = target.match.name?.toLowerCase()
|
||||
const matches = Object.entries(workflowState.blocks || {}).filter(([_, block]) => {
|
||||
const blockType = String((block as Record<string, unknown>).type || '')
|
||||
const blockName = String((block as Record<string, unknown>).name || '').toLowerCase()
|
||||
const typeOk = type ? blockType === type : true
|
||||
const nameOk = name ? blockName === name : true
|
||||
return typeOk && nameOk
|
||||
})
|
||||
if (matches.length === 1) {
|
||||
return matches[0][0]
|
||||
}
|
||||
if (matches.length > 1) {
|
||||
throw new Error(
|
||||
`ambiguous_target: target match resolved to ${matches.length} blocks (${matches.map(([id]) => id).join(', ')})`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
function getNestedValue(value: any, path: string[]): any {
|
||||
let cursor = value
|
||||
for (const segment of path) {
|
||||
if (cursor == null || typeof cursor !== 'object') return undefined
|
||||
cursor = cursor[segment]
|
||||
}
|
||||
return cursor
|
||||
}
|
||||
|
||||
function setNestedValue(base: any, path: string[], nextValue: any): any {
|
||||
if (path.length === 0) return nextValue
|
||||
const out = Array.isArray(base) ? [...base] : { ...(base || {}) }
|
||||
let cursor: any = out
|
||||
for (let i = 0; i < path.length - 1; i++) {
|
||||
const key = path[i]
|
||||
const current = cursor[key]
|
||||
cursor[key] =
|
||||
current && typeof current === 'object'
|
||||
? Array.isArray(current)
|
||||
? [...current]
|
||||
: { ...current }
|
||||
: {}
|
||||
cursor = cursor[key]
|
||||
}
|
||||
cursor[path[path.length - 1]] = nextValue
|
||||
return out
|
||||
}
|
||||
|
||||
function removeArrayItem(arr: unknown[], value: unknown): unknown[] {
|
||||
return arr.filter((item) => JSON.stringify(item) !== JSON.stringify(value))
|
||||
}
|
||||
|
||||
function selectCredentialId(
|
||||
availableCredentials: CredentialRecord[],
|
||||
provider: string,
|
||||
selection: z.infer<typeof CredentialSelectionSchema> | undefined
|
||||
): string | null {
|
||||
const providerLower = provider.toLowerCase()
|
||||
const providerMatches = availableCredentials.filter((credential) => {
|
||||
const credentialProvider = credential.provider.toLowerCase()
|
||||
return (
|
||||
credentialProvider === providerLower || credentialProvider.startsWith(`${providerLower}-`)
|
||||
)
|
||||
})
|
||||
|
||||
const pool = providerMatches.length > 0 ? providerMatches : availableCredentials
|
||||
const strategy = selection?.strategy || 'first_connected'
|
||||
|
||||
if (strategy === 'by_id') {
|
||||
const id = selection?.id
|
||||
if (!id) return null
|
||||
return pool.find((credential) => credential.id === id)?.id || null
|
||||
}
|
||||
|
||||
if (strategy === 'by_name') {
|
||||
const name = selection?.name?.toLowerCase()
|
||||
if (!name) return null
|
||||
const exact = pool.find((credential) => credential.name.toLowerCase() === name)
|
||||
if (exact) return exact.id
|
||||
const partial = pool.find((credential) => credential.name.toLowerCase().includes(name))
|
||||
return partial?.id || null
|
||||
}
|
||||
|
||||
const defaultCredential = pool.find((credential) => credential.isDefault)
|
||||
if (defaultCredential) return defaultCredential.id
|
||||
return pool[0]?.id || null
|
||||
}
|
||||
|
||||
function selectCredentialFieldId(blockType: string, provider: string): string | null {
|
||||
const blockConfig = getBlock(blockType)
|
||||
if (!blockConfig) return null
|
||||
|
||||
const oauthFields = (blockConfig.subBlocks || []).filter(
|
||||
(subBlock) => subBlock.type === 'oauth-input'
|
||||
)
|
||||
if (oauthFields.length === 0) return null
|
||||
|
||||
const providerKey = provider.replace(/[^a-zA-Z0-9]/g, '').toLowerCase()
|
||||
const fieldMatch = oauthFields.find((subBlock) =>
|
||||
subBlock.id
|
||||
.replace(/[^a-zA-Z0-9]/g, '')
|
||||
.toLowerCase()
|
||||
.includes(providerKey)
|
||||
)
|
||||
if (fieldMatch) return fieldMatch.id
|
||||
return oauthFields[0].id
|
||||
}
|
||||
|
||||
function ensureConnectionTarget(
|
||||
existing: ConnectionTarget[],
|
||||
target: ConnectionTarget,
|
||||
mode: 'set' | 'append' | 'remove'
|
||||
): ConnectionTarget[] {
|
||||
if (mode === 'set') {
|
||||
return [target]
|
||||
}
|
||||
|
||||
if (mode === 'remove') {
|
||||
return existing.filter(
|
||||
(item) =>
|
||||
!(item.block === target.block && (item.handle || 'target') === (target.handle || 'target'))
|
||||
)
|
||||
}
|
||||
|
||||
const duplicate = existing.some(
|
||||
(item) =>
|
||||
item.block === target.block && (item.handle || 'target') === (target.handle || 'target')
|
||||
)
|
||||
if (duplicate) return existing
|
||||
return [...existing, target]
|
||||
}
|
||||
|
||||
async function compileChangeSpec(params: {
|
||||
changeSpec: ChangeSpec
|
||||
workflowState: {
|
||||
blocks: Record<string, any>
|
||||
edges: Array<Record<string, any>>
|
||||
loops: Record<string, any>
|
||||
parallels: Record<string, any>
|
||||
}
|
||||
userId: string
|
||||
workflowId: string
|
||||
}): Promise<{
|
||||
operations: Array<Record<string, any>>
|
||||
warnings: string[]
|
||||
diagnostics: string[]
|
||||
touchedBlocks: string[]
|
||||
}> {
|
||||
const { changeSpec, workflowState, userId, workflowId } = params
|
||||
const operations: Array<Record<string, any>> = []
|
||||
const diagnostics: string[] = []
|
||||
const warnings: string[] = []
|
||||
const touchedBlocks = new Set<string>()
|
||||
|
||||
const aliasMap = new Map<string, string>()
|
||||
const workingState = deepClone(workflowState)
|
||||
const connectionState = buildConnectionState(workingState)
|
||||
const connectionTouchedSources = new Set<string>()
|
||||
const plannedBlockTypes = new Map<string, string>()
|
||||
|
||||
// Seed aliases from existing block names.
|
||||
for (const [blockId, block] of Object.entries(workingState.blocks || {})) {
|
||||
const blockName = String((block as Record<string, unknown>).name || '')
|
||||
if (!blockName) continue
|
||||
const normalizedAlias = blockName.replace(/[^a-zA-Z0-9]/g, '')
|
||||
if (normalizedAlias && !aliasMap.has(normalizedAlias)) {
|
||||
aliasMap.set(normalizedAlias, blockId)
|
||||
}
|
||||
}
|
||||
|
||||
const credentialsResponse = await getCredentialsServerTool.execute({ workflowId }, { userId })
|
||||
const availableCredentials: CredentialRecord[] =
|
||||
credentialsResponse?.oauth?.connected?.credentials?.map((credential: any) => ({
|
||||
id: String(credential.id || ''),
|
||||
name: String(credential.name || ''),
|
||||
provider: String(credential.provider || ''),
|
||||
isDefault: Boolean(credential.isDefault),
|
||||
})) || []
|
||||
|
||||
const resolveTarget = (
|
||||
target: TargetRef | undefined,
|
||||
allowCreateAlias = false
|
||||
): string | null => {
|
||||
if (!target) return null
|
||||
if (target.blockId) {
|
||||
if (workingState.blocks[target.blockId] || plannedBlockTypes.has(target.blockId)) {
|
||||
return target.blockId
|
||||
}
|
||||
return allowCreateAlias ? target.blockId : null
|
||||
}
|
||||
|
||||
if (target.alias) {
|
||||
if (aliasMap.has(target.alias)) return aliasMap.get(target.alias) || null
|
||||
const byMatch = findMatchingBlockId(workingState, { alias: target.alias })
|
||||
if (byMatch) {
|
||||
aliasMap.set(target.alias, byMatch)
|
||||
return byMatch
|
||||
}
|
||||
return allowCreateAlias ? target.alias : null
|
||||
}
|
||||
|
||||
const matched = findMatchingBlockId(workingState, target)
|
||||
if (matched) return matched
|
||||
return null
|
||||
}
|
||||
|
||||
const applyPatchChange = (
|
||||
targetId: string,
|
||||
blockType: string | null,
|
||||
change: ChangeOperation,
|
||||
paramsOut: Record<string, any>
|
||||
): void => {
|
||||
if (change.op === 'attach_credential') {
|
||||
const provider = change.provider
|
||||
if (!provider) {
|
||||
diagnostics.push(`attach_credential on ${targetId} is missing provider`)
|
||||
return
|
||||
}
|
||||
if (!blockType) {
|
||||
diagnostics.push(`attach_credential on ${targetId} failed: unknown block type`)
|
||||
return
|
||||
}
|
||||
const credentialFieldId = selectCredentialFieldId(blockType, provider)
|
||||
if (!credentialFieldId) {
|
||||
const msg = `No oauth input field found for block type "${blockType}" on ${targetId}`
|
||||
if (change.required) diagnostics.push(msg)
|
||||
else warnings.push(msg)
|
||||
return
|
||||
}
|
||||
|
||||
const credentialId = selectCredentialId(availableCredentials, provider, change.selection)
|
||||
if (!credentialId) {
|
||||
const msg = `No credential found for provider "${provider}" on ${targetId}`
|
||||
if (change.required) diagnostics.push(msg)
|
||||
else warnings.push(msg)
|
||||
return
|
||||
}
|
||||
|
||||
paramsOut.inputs = paramsOut.inputs || {}
|
||||
paramsOut.inputs[credentialFieldId] = credentialId
|
||||
return
|
||||
}
|
||||
|
||||
if (!change.path) {
|
||||
diagnostics.push(`${change.op} on ${targetId} requires a path`)
|
||||
return
|
||||
}
|
||||
|
||||
const pathSegments = change.path.split('.').filter(Boolean)
|
||||
if (pathSegments.length === 0) {
|
||||
diagnostics.push(`${change.op} on ${targetId} has an invalid path "${change.path}"`)
|
||||
return
|
||||
}
|
||||
|
||||
if (pathSegments[0] === 'inputs') {
|
||||
const inputKey = pathSegments[1]
|
||||
if (!inputKey) {
|
||||
diagnostics.push(`${change.op} on ${targetId} has invalid input path "${change.path}"`)
|
||||
return
|
||||
}
|
||||
|
||||
const currentInputValue =
|
||||
paramsOut.inputs?.[inputKey] ??
|
||||
workingState.blocks[targetId]?.subBlocks?.[inputKey]?.value ??
|
||||
null
|
||||
|
||||
let nextInputValue = currentInputValue
|
||||
const nestedPath = pathSegments.slice(2)
|
||||
|
||||
if (change.op === 'set') {
|
||||
nextInputValue =
|
||||
nestedPath.length > 0
|
||||
? setNestedValue(currentInputValue ?? {}, nestedPath, change.value)
|
||||
: change.value
|
||||
} else if (change.op === 'unset') {
|
||||
nextInputValue =
|
||||
nestedPath.length > 0 ? setNestedValue(currentInputValue ?? {}, nestedPath, null) : null
|
||||
} else if (change.op === 'merge') {
|
||||
if (nestedPath.length > 0) {
|
||||
const baseObject = getNestedValue(currentInputValue ?? {}, nestedPath) || {}
|
||||
if (
|
||||
baseObject &&
|
||||
typeof baseObject === 'object' &&
|
||||
change.value &&
|
||||
typeof change.value === 'object'
|
||||
) {
|
||||
nextInputValue = setNestedValue(currentInputValue ?? {}, nestedPath, {
|
||||
...baseObject,
|
||||
...(change.value as Record<string, unknown>),
|
||||
})
|
||||
} else {
|
||||
diagnostics.push(`merge on ${targetId} at "${change.path}" requires object values`)
|
||||
return
|
||||
}
|
||||
} else if (
|
||||
currentInputValue &&
|
||||
typeof currentInputValue === 'object' &&
|
||||
!Array.isArray(currentInputValue) &&
|
||||
change.value &&
|
||||
typeof change.value === 'object' &&
|
||||
!Array.isArray(change.value)
|
||||
) {
|
||||
nextInputValue = { ...currentInputValue, ...(change.value as Record<string, unknown>) }
|
||||
} else if (currentInputValue == null && change.value && typeof change.value === 'object') {
|
||||
nextInputValue = change.value
|
||||
} else {
|
||||
diagnostics.push(`merge on ${targetId} at "${change.path}" requires object values`)
|
||||
return
|
||||
}
|
||||
} else if (change.op === 'append') {
|
||||
const arr = Array.isArray(currentInputValue) ? [...currentInputValue] : []
|
||||
arr.push(change.value)
|
||||
nextInputValue = arr
|
||||
} else if (change.op === 'remove') {
|
||||
if (!Array.isArray(currentInputValue)) {
|
||||
diagnostics.push(`remove on ${targetId} at "${change.path}" requires an array value`)
|
||||
return
|
||||
}
|
||||
nextInputValue = removeArrayItem(currentInputValue, change.value)
|
||||
}
|
||||
|
||||
paramsOut.inputs = paramsOut.inputs || {}
|
||||
paramsOut.inputs[inputKey] = nextInputValue
|
||||
return
|
||||
}
|
||||
|
||||
if (pathSegments.length !== 1) {
|
||||
diagnostics.push(
|
||||
`Unsupported path "${change.path}" on ${targetId}. Use inputs.* or top-level field names.`
|
||||
)
|
||||
return
|
||||
}
|
||||
const topLevelField = pathSegments[0]
|
||||
if (!['name', 'type', 'triggerMode', 'advancedMode', 'enabled'].includes(topLevelField)) {
|
||||
diagnostics.push(`Unsupported top-level path "${change.path}" on ${targetId}`)
|
||||
return
|
||||
}
|
||||
paramsOut[topLevelField] = change.op === 'unset' ? null : change.value
|
||||
}
|
||||
|
||||
for (const mutation of changeSpec.mutations || []) {
|
||||
if (mutation.action === 'ensure_block') {
|
||||
const targetId = resolveTarget(mutation.target, true)
|
||||
if (!targetId) {
|
||||
diagnostics.push('ensure_block is missing a resolvable target')
|
||||
continue
|
||||
}
|
||||
|
||||
const existingBlock = workingState.blocks[targetId]
|
||||
if (existingBlock) {
|
||||
const editParams: Record<string, any> = {}
|
||||
if (mutation.name) editParams.name = mutation.name
|
||||
if (mutation.type) editParams.type = mutation.type
|
||||
if (mutation.inputs) editParams.inputs = mutation.inputs
|
||||
if (mutation.triggerMode !== undefined) editParams.triggerMode = mutation.triggerMode
|
||||
if (mutation.advancedMode !== undefined) editParams.advancedMode = mutation.advancedMode
|
||||
if (mutation.enabled !== undefined) editParams.enabled = mutation.enabled
|
||||
operations.push({
|
||||
operation_type: 'edit',
|
||||
block_id: targetId,
|
||||
params: editParams,
|
||||
})
|
||||
touchedBlocks.add(targetId)
|
||||
} else {
|
||||
if (!mutation.type || !mutation.name) {
|
||||
diagnostics.push(`ensure_block for "${targetId}" requires type and name when creating`)
|
||||
continue
|
||||
}
|
||||
const blockId =
|
||||
mutation.target?.blockId || mutation.target?.alias || createDraftBlockId(mutation.name)
|
||||
const addParams: Record<string, any> = {
|
||||
type: mutation.type,
|
||||
name: mutation.name,
|
||||
}
|
||||
if (mutation.inputs) addParams.inputs = mutation.inputs
|
||||
if (mutation.triggerMode !== undefined) addParams.triggerMode = mutation.triggerMode
|
||||
if (mutation.advancedMode !== undefined) addParams.advancedMode = mutation.advancedMode
|
||||
if (mutation.enabled !== undefined) addParams.enabled = mutation.enabled
|
||||
operations.push({
|
||||
operation_type: 'add',
|
||||
block_id: blockId,
|
||||
params: addParams,
|
||||
})
|
||||
workingState.blocks[blockId] = {
|
||||
id: blockId,
|
||||
type: mutation.type,
|
||||
name: mutation.name,
|
||||
subBlocks: Object.fromEntries(
|
||||
Object.entries(mutation.inputs || {}).map(([key, value]) => [
|
||||
key,
|
||||
{ id: key, value, type: 'short-input' },
|
||||
])
|
||||
),
|
||||
triggerMode: mutation.triggerMode || false,
|
||||
advancedMode: mutation.advancedMode || false,
|
||||
enabled: mutation.enabled !== undefined ? mutation.enabled : true,
|
||||
}
|
||||
plannedBlockTypes.set(blockId, mutation.type)
|
||||
touchedBlocks.add(blockId)
|
||||
if (mutation.target?.alias) aliasMap.set(mutation.target.alias, blockId)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (mutation.action === 'patch_block') {
|
||||
const targetId = resolveTarget(mutation.target)
|
||||
if (!targetId) {
|
||||
diagnostics.push('patch_block target could not be resolved')
|
||||
continue
|
||||
}
|
||||
const blockType =
|
||||
String(workingState.blocks[targetId]?.type || '') || plannedBlockTypes.get(targetId) || null
|
||||
|
||||
const editParams: Record<string, any> = {}
|
||||
for (const change of mutation.changes || []) {
|
||||
applyPatchChange(targetId, blockType, change, editParams)
|
||||
}
|
||||
if (Object.keys(editParams).length === 0) {
|
||||
warnings.push(`patch_block for ${targetId} had no effective changes`)
|
||||
continue
|
||||
}
|
||||
operations.push({
|
||||
operation_type: 'edit',
|
||||
block_id: targetId,
|
||||
params: editParams,
|
||||
})
|
||||
touchedBlocks.add(targetId)
|
||||
continue
|
||||
}
|
||||
|
||||
if (mutation.action === 'remove_block') {
|
||||
const targetId = resolveTarget(mutation.target)
|
||||
if (!targetId) {
|
||||
diagnostics.push('remove_block target could not be resolved')
|
||||
continue
|
||||
}
|
||||
operations.push({
|
||||
operation_type: 'delete',
|
||||
block_id: targetId,
|
||||
params: {},
|
||||
})
|
||||
touchedBlocks.add(targetId)
|
||||
connectionState.delete(targetId)
|
||||
for (const [source, handles] of connectionState.entries()) {
|
||||
for (const [handle, targets] of handles.entries()) {
|
||||
const nextTargets = targets.filter((target) => target.block !== targetId)
|
||||
handles.set(handle, nextTargets)
|
||||
}
|
||||
connectionTouchedSources.add(source)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (mutation.action === 'connect' || mutation.action === 'disconnect') {
|
||||
const from = resolveTarget(mutation.from)
|
||||
const to = resolveTarget(mutation.to)
|
||||
if (!from || !to) {
|
||||
diagnostics.push(`${mutation.action} requires resolvable from/to targets`)
|
||||
continue
|
||||
}
|
||||
const sourceHandle = normalizeHandle(mutation.handle)
|
||||
const targetHandle = mutation.toHandle || 'target'
|
||||
let sourceMap = connectionState.get(from)
|
||||
if (!sourceMap) {
|
||||
sourceMap = new Map()
|
||||
connectionState.set(from, sourceMap)
|
||||
}
|
||||
const existingTargets = sourceMap.get(sourceHandle) || []
|
||||
const mode = mutation.action === 'disconnect' ? 'remove' : mutation.mode || 'set'
|
||||
const nextTargets = ensureConnectionTarget(
|
||||
existingTargets,
|
||||
{ block: to, handle: targetHandle },
|
||||
mode
|
||||
)
|
||||
sourceMap.set(sourceHandle, nextTargets)
|
||||
connectionTouchedSources.add(from)
|
||||
touchedBlocks.add(from)
|
||||
}
|
||||
}
|
||||
|
||||
for (const link of changeSpec.links || []) {
|
||||
const from = resolveTarget(
|
||||
{
|
||||
blockId: link.from.blockId,
|
||||
alias: link.from.alias,
|
||||
match: link.from.match,
|
||||
},
|
||||
true
|
||||
)
|
||||
const to = resolveTarget(
|
||||
{
|
||||
blockId: link.to.blockId,
|
||||
alias: link.to.alias,
|
||||
match: link.to.match,
|
||||
},
|
||||
true
|
||||
)
|
||||
if (!from || !to) {
|
||||
diagnostics.push('link contains unresolved from/to target')
|
||||
continue
|
||||
}
|
||||
|
||||
const sourceHandle = normalizeHandle(link.from.handle)
|
||||
const targetHandle = link.to.handle || 'target'
|
||||
let sourceMap = connectionState.get(from)
|
||||
if (!sourceMap) {
|
||||
sourceMap = new Map()
|
||||
connectionState.set(from, sourceMap)
|
||||
}
|
||||
const existingTargets = sourceMap.get(sourceHandle) || []
|
||||
const nextTargets = ensureConnectionTarget(
|
||||
existingTargets,
|
||||
{ block: to, handle: targetHandle },
|
||||
link.mode || 'set'
|
||||
)
|
||||
sourceMap.set(sourceHandle, nextTargets)
|
||||
connectionTouchedSources.add(from)
|
||||
touchedBlocks.add(from)
|
||||
}
|
||||
|
||||
for (const sourceBlockId of stableUnique([...connectionTouchedSources])) {
|
||||
if (!connectionState.has(sourceBlockId)) continue
|
||||
const sourceConnections = connectionState.get(sourceBlockId)!
|
||||
operations.push({
|
||||
operation_type: 'edit',
|
||||
block_id: sourceBlockId,
|
||||
params: {
|
||||
connections: connectionStateToPayload(sourceConnections),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
operations,
|
||||
warnings,
|
||||
diagnostics,
|
||||
touchedBlocks: [...touchedBlocks],
|
||||
}
|
||||
}
|
||||
|
||||
function summarizeDiff(
|
||||
beforeState: { blocks: Record<string, any>; edges: Array<Record<string, any>> },
|
||||
afterState: { blocks: Record<string, any>; edges: Array<Record<string, any>> },
|
||||
operations: Array<Record<string, any>>
|
||||
): Record<string, any> {
|
||||
const beforeBlocks = Object.keys(beforeState.blocks || {}).length
|
||||
const afterBlocks = Object.keys(afterState.blocks || {}).length
|
||||
const beforeEdges = (beforeState.edges || []).length
|
||||
const afterEdges = (afterState.edges || []).length
|
||||
|
||||
const counts = operations.reduce<Record<string, number>>((acc, operation) => {
|
||||
const opType = String(operation.operation_type || 'unknown')
|
||||
acc[opType] = (acc[opType] || 0) + 1
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
return {
|
||||
operationCounts: counts,
|
||||
blocks: {
|
||||
before: beforeBlocks,
|
||||
after: afterBlocks,
|
||||
delta: afterBlocks - beforeBlocks,
|
||||
},
|
||||
edges: {
|
||||
before: beforeEdges,
|
||||
after: afterEdges,
|
||||
delta: afterEdges - beforeEdges,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
async function validateAndSimulateOperations(params: {
|
||||
workflowState: {
|
||||
blocks: Record<string, any>
|
||||
edges: Array<Record<string, any>>
|
||||
loops: Record<string, any>
|
||||
parallels: Record<string, any>
|
||||
}
|
||||
operations: Array<Record<string, any>>
|
||||
userId: string
|
||||
}): Promise<{
|
||||
operationsForApply: Array<Record<string, any>>
|
||||
simulatedState: {
|
||||
blocks: Record<string, any>
|
||||
edges: Array<Record<string, any>>
|
||||
loops: Record<string, any>
|
||||
parallels: Record<string, any>
|
||||
}
|
||||
warnings: string[]
|
||||
diagnostics: string[]
|
||||
}> {
|
||||
const diagnostics: string[] = []
|
||||
const warnings: string[] = []
|
||||
|
||||
const permissionConfig = await getUserPermissionConfig(params.userId)
|
||||
const { filteredOperations, errors: preValidationErrors } = await preValidateCredentialInputs(
|
||||
params.operations as any,
|
||||
{ userId: params.userId },
|
||||
params.workflowState
|
||||
)
|
||||
for (const error of preValidationErrors) {
|
||||
warnings.push(error.error)
|
||||
}
|
||||
|
||||
const { state, validationErrors, skippedItems } = applyOperationsToWorkflowState(
|
||||
params.workflowState,
|
||||
filteredOperations as any,
|
||||
permissionConfig
|
||||
)
|
||||
|
||||
for (const validationError of validationErrors) {
|
||||
warnings.push(validationError.error)
|
||||
}
|
||||
for (const skippedItem of skippedItems) {
|
||||
warnings.push(skippedItem.reason)
|
||||
}
|
||||
|
||||
if (Object.keys(state.blocks || {}).length === 0) {
|
||||
diagnostics.push('Simulation produced an empty workflow state')
|
||||
}
|
||||
|
||||
return {
|
||||
operationsForApply: filteredOperations as Array<Record<string, any>>,
|
||||
simulatedState: state,
|
||||
warnings,
|
||||
diagnostics,
|
||||
}
|
||||
}
|
||||
|
||||
export const workflowChangeServerTool: BaseServerTool<WorkflowChangeParams, any> = {
|
||||
name: 'workflow_change',
|
||||
inputSchema: WorkflowChangeInputSchema,
|
||||
async execute(params: WorkflowChangeParams, context?: { userId: string }): Promise<any> {
|
||||
if (!context?.userId) {
|
||||
throw new Error('Unauthorized workflow access')
|
||||
}
|
||||
|
||||
if (params.mode === 'dry_run') {
|
||||
const workflowId = params.workflowId || getContextPack(params.contextPackId || '')?.workflowId
|
||||
if (!workflowId) {
|
||||
throw new Error('workflowId is required for dry_run')
|
||||
}
|
||||
if (!params.changeSpec) {
|
||||
throw new Error('changeSpec is required for dry_run')
|
||||
}
|
||||
|
||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId,
|
||||
userId: context.userId,
|
||||
action: 'write',
|
||||
})
|
||||
if (!authorization.allowed) {
|
||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||
}
|
||||
|
||||
const { workflowState } = await loadWorkflowStateFromDb(workflowId)
|
||||
const currentHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
||||
const requestedHash = params.baseSnapshotHash
|
||||
if (requestedHash && requestedHash !== currentHash) {
|
||||
throw new Error(
|
||||
`snapshot_mismatch: expected ${requestedHash} but current state is ${currentHash}`
|
||||
)
|
||||
}
|
||||
|
||||
const compileResult = await compileChangeSpec({
|
||||
changeSpec: params.changeSpec,
|
||||
workflowState,
|
||||
userId: context.userId,
|
||||
workflowId,
|
||||
})
|
||||
|
||||
const simulation = await validateAndSimulateOperations({
|
||||
workflowState,
|
||||
operations: compileResult.operations,
|
||||
userId: context.userId,
|
||||
})
|
||||
|
||||
const diffSummary = summarizeDiff(
|
||||
workflowState,
|
||||
simulation.simulatedState,
|
||||
simulation.operationsForApply
|
||||
)
|
||||
const diagnostics = [...compileResult.diagnostics, ...simulation.diagnostics]
|
||||
const warnings = [...compileResult.warnings, ...simulation.warnings]
|
||||
|
||||
const proposal: WorkflowChangeProposal = {
|
||||
workflowId,
|
||||
baseSnapshotHash: currentHash,
|
||||
compiledOperations: simulation.operationsForApply,
|
||||
diffSummary,
|
||||
warnings,
|
||||
diagnostics,
|
||||
touchedBlocks: compileResult.touchedBlocks,
|
||||
}
|
||||
const proposalId = saveProposal(proposal)
|
||||
|
||||
logger.info('Compiled workflow_change dry run', {
|
||||
workflowId,
|
||||
proposalId,
|
||||
operationCount: proposal.compiledOperations.length,
|
||||
warningCount: warnings.length,
|
||||
diagnosticsCount: diagnostics.length,
|
||||
})
|
||||
|
||||
return {
|
||||
success: diagnostics.length === 0,
|
||||
mode: 'dry_run',
|
||||
workflowId,
|
||||
proposalId,
|
||||
baseSnapshotHash: currentHash,
|
||||
compiledOperations: proposal.compiledOperations,
|
||||
diffSummary,
|
||||
warnings,
|
||||
diagnostics,
|
||||
touchedBlocks: proposal.touchedBlocks,
|
||||
}
|
||||
}
|
||||
|
||||
// apply mode
|
||||
const proposalId = params.proposalId
|
||||
if (!proposalId) {
|
||||
throw new Error('proposalId is required for apply')
|
||||
}
|
||||
|
||||
const proposal = getProposal(proposalId)
|
||||
if (!proposal) {
|
||||
throw new Error(`Proposal not found or expired: ${proposalId}`)
|
||||
}
|
||||
|
||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId: proposal.workflowId,
|
||||
userId: context.userId,
|
||||
action: 'write',
|
||||
})
|
||||
if (!authorization.allowed) {
|
||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||
}
|
||||
|
||||
const { workflowState } = await loadWorkflowStateFromDb(proposal.workflowId)
|
||||
const currentHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
||||
const expectedHash = params.expectedSnapshotHash || proposal.baseSnapshotHash
|
||||
if (expectedHash && expectedHash !== currentHash) {
|
||||
throw new Error(`snapshot_mismatch: expected ${expectedHash} but current is ${currentHash}`)
|
||||
}
|
||||
|
||||
const applyResult = await editWorkflowServerTool.execute(
|
||||
{
|
||||
workflowId: proposal.workflowId,
|
||||
operations: proposal.compiledOperations as any,
|
||||
},
|
||||
{ userId: context.userId }
|
||||
)
|
||||
|
||||
const appliedWorkflowState = (applyResult as any)?.workflowState
|
||||
const newSnapshotHash = appliedWorkflowState
|
||||
? hashWorkflowState(appliedWorkflowState as Record<string, unknown>)
|
||||
: null
|
||||
|
||||
return {
|
||||
success: true,
|
||||
mode: 'apply',
|
||||
workflowId: proposal.workflowId,
|
||||
proposalId,
|
||||
baseSnapshotHash: proposal.baseSnapshotHash,
|
||||
newSnapshotHash,
|
||||
operations: proposal.compiledOperations,
|
||||
workflowState: appliedWorkflowState || null,
|
||||
appliedDiff: proposal.diffSummary,
|
||||
warnings: proposal.warnings,
|
||||
diagnostics: proposal.diagnostics,
|
||||
editResult: applyResult,
|
||||
}
|
||||
},
|
||||
}
|
||||
158
apps/sim/lib/copilot/tools/server/workflow/workflow-context.ts
Normal file
158
apps/sim/lib/copilot/tools/server/workflow/workflow-context.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { z } from 'zod'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { getContextPack, saveContextPack } from './change-store'
|
||||
import {
|
||||
buildSchemasByType,
|
||||
getAllKnownBlockTypes,
|
||||
hashWorkflowState,
|
||||
loadWorkflowStateFromDb,
|
||||
summarizeWorkflowState,
|
||||
} from './workflow-state'
|
||||
|
||||
const logger = createLogger('WorkflowContextServerTool')
|
||||
|
||||
const WorkflowContextGetInputSchema = z.object({
|
||||
workflowId: z.string(),
|
||||
objective: z.string().optional(),
|
||||
includeBlockTypes: z.array(z.string()).optional(),
|
||||
includeAllSchemas: z.boolean().optional(),
|
||||
})
|
||||
|
||||
type WorkflowContextGetParams = z.infer<typeof WorkflowContextGetInputSchema>
|
||||
|
||||
const WorkflowContextExpandInputSchema = z.object({
|
||||
contextPackId: z.string(),
|
||||
blockTypes: z.array(z.string()).optional(),
|
||||
schemaRefs: z.array(z.string()).optional(),
|
||||
})
|
||||
|
||||
type WorkflowContextExpandParams = z.infer<typeof WorkflowContextExpandInputSchema>
|
||||
|
||||
function parseSchemaRefToBlockType(schemaRef: string): string | null {
|
||||
if (!schemaRef) return null
|
||||
const [blockType] = schemaRef.split('@')
|
||||
return blockType || null
|
||||
}
|
||||
|
||||
function buildAvailableBlockCatalog(
|
||||
schemaRefsByType: Record<string, string>
|
||||
): Array<Record<string, any>> {
|
||||
return Object.entries(schemaRefsByType)
|
||||
.sort((a, b) => a[0].localeCompare(b[0]))
|
||||
.map(([blockType, schemaRef]) => ({
|
||||
blockType,
|
||||
schemaRef,
|
||||
}))
|
||||
}
|
||||
|
||||
export const workflowContextGetServerTool: BaseServerTool<WorkflowContextGetParams, any> = {
|
||||
name: 'workflow_context_get',
|
||||
inputSchema: WorkflowContextGetInputSchema,
|
||||
async execute(params: WorkflowContextGetParams, context?: { userId: string }): Promise<any> {
|
||||
if (!context?.userId) {
|
||||
throw new Error('Unauthorized workflow access')
|
||||
}
|
||||
|
||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId: params.workflowId,
|
||||
userId: context.userId,
|
||||
action: 'read',
|
||||
})
|
||||
if (!authorization.allowed) {
|
||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||
}
|
||||
|
||||
const { workflowState } = await loadWorkflowStateFromDb(params.workflowId)
|
||||
const snapshotHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
||||
|
||||
const blockTypesInWorkflow = Object.values(workflowState.blocks || {}).map((block: any) =>
|
||||
String(block?.type || '')
|
||||
)
|
||||
const requestedTypes = params.includeBlockTypes || []
|
||||
const includeAllSchemas = params.includeAllSchemas === true
|
||||
const candidateTypes = includeAllSchemas
|
||||
? getAllKnownBlockTypes()
|
||||
: [...blockTypesInWorkflow, ...requestedTypes]
|
||||
const { schemasByType, schemaRefsByType } = buildSchemasByType(candidateTypes)
|
||||
|
||||
const summary = summarizeWorkflowState(workflowState)
|
||||
const packId = saveContextPack({
|
||||
workflowId: params.workflowId,
|
||||
snapshotHash,
|
||||
workflowState,
|
||||
schemasByType,
|
||||
schemaRefsByType,
|
||||
summary: {
|
||||
...summary,
|
||||
objective: params.objective || null,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info('Generated workflow context pack', {
|
||||
workflowId: params.workflowId,
|
||||
contextPackId: packId,
|
||||
schemaCount: Object.keys(schemaRefsByType).length,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
contextPackId: packId,
|
||||
workflowId: params.workflowId,
|
||||
snapshotHash,
|
||||
summary: {
|
||||
...summary,
|
||||
objective: params.objective || null,
|
||||
},
|
||||
schemaRefsByType,
|
||||
availableBlockCatalog: buildAvailableBlockCatalog(schemaRefsByType),
|
||||
inScopeSchemas: schemasByType,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
export const workflowContextExpandServerTool: BaseServerTool<WorkflowContextExpandParams, any> = {
|
||||
name: 'workflow_context_expand',
|
||||
inputSchema: WorkflowContextExpandInputSchema,
|
||||
async execute(params: WorkflowContextExpandParams, context?: { userId: string }): Promise<any> {
|
||||
if (!context?.userId) {
|
||||
throw new Error('Unauthorized workflow access')
|
||||
}
|
||||
|
||||
const contextPack = getContextPack(params.contextPackId)
|
||||
if (!contextPack) {
|
||||
throw new Error(`Context pack not found or expired: ${params.contextPackId}`)
|
||||
}
|
||||
|
||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId: contextPack.workflowId,
|
||||
userId: context.userId,
|
||||
action: 'read',
|
||||
})
|
||||
if (!authorization.allowed) {
|
||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||
}
|
||||
|
||||
const requestedBlockTypes = new Set<string>()
|
||||
for (const blockType of params.blockTypes || []) {
|
||||
if (blockType) requestedBlockTypes.add(blockType)
|
||||
}
|
||||
for (const schemaRef of params.schemaRefs || []) {
|
||||
const blockType = parseSchemaRefToBlockType(schemaRef)
|
||||
if (blockType) requestedBlockTypes.add(blockType)
|
||||
}
|
||||
|
||||
const typesToExpand = [...requestedBlockTypes]
|
||||
const { schemasByType, schemaRefsByType } = buildSchemasByType(typesToExpand)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
contextPackId: params.contextPackId,
|
||||
workflowId: contextPack.workflowId,
|
||||
snapshotHash: contextPack.snapshotHash,
|
||||
schemasByType,
|
||||
schemaRefsByType,
|
||||
}
|
||||
},
|
||||
}
|
||||
226
apps/sim/lib/copilot/tools/server/workflow/workflow-state.ts
Normal file
226
apps/sim/lib/copilot/tools/server/workflow/workflow-state.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
import crypto from 'crypto'
|
||||
import { db } from '@sim/db'
|
||||
import { workflow as workflowTable } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { getAllBlockTypes, getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
|
||||
const logger = createLogger('WorkflowContextState')
|
||||
|
||||
function stableSortValue(value: any): any {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(stableSortValue)
|
||||
}
|
||||
if (value && typeof value === 'object') {
|
||||
const sorted: Record<string, any> = {}
|
||||
for (const key of Object.keys(value).sort()) {
|
||||
sorted[key] = stableSortValue(value[key])
|
||||
}
|
||||
return sorted
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
export function hashWorkflowState(state: Record<string, unknown>): string {
|
||||
const stable = stableSortValue(state)
|
||||
const payload = JSON.stringify(stable)
|
||||
return `sha256:${crypto.createHash('sha256').update(payload).digest('hex')}`
|
||||
}
|
||||
|
||||
function normalizeOptions(options: unknown): string[] | null {
|
||||
if (!Array.isArray(options)) return null
|
||||
const normalized = options
|
||||
.map((option) => {
|
||||
if (option == null) return null
|
||||
if (typeof option === 'object') {
|
||||
const optionRecord = option as Record<string, unknown>
|
||||
const id = optionRecord.id
|
||||
if (typeof id === 'string') return id
|
||||
const label = optionRecord.label
|
||||
if (typeof label === 'string') return label
|
||||
return null
|
||||
}
|
||||
return String(option)
|
||||
})
|
||||
.filter((value): value is string => Boolean(value))
|
||||
return normalized.length > 0 ? normalized : null
|
||||
}
|
||||
|
||||
function serializeRequired(required: SubBlockConfig['required']): boolean | Record<string, any> {
|
||||
if (typeof required === 'boolean') return required
|
||||
if (!required) return false
|
||||
if (typeof required === 'object') {
|
||||
const out: Record<string, any> = {}
|
||||
const record = required as Record<string, unknown>
|
||||
for (const key of ['field', 'operator', 'value']) {
|
||||
if (record[key] !== undefined) {
|
||||
out[key] = record[key]
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
function serializeSubBlock(subBlock: SubBlockConfig): Record<string, unknown> {
|
||||
const staticOptions =
|
||||
typeof subBlock.options === 'function' ? null : normalizeOptions(subBlock.options)
|
||||
return {
|
||||
id: subBlock.id,
|
||||
type: subBlock.type,
|
||||
title: subBlock.title,
|
||||
description: subBlock.description || null,
|
||||
mode: subBlock.mode || null,
|
||||
placeholder: subBlock.placeholder || null,
|
||||
hidden: Boolean(subBlock.hidden),
|
||||
multiSelect: Boolean(subBlock.multiSelect),
|
||||
required: serializeRequired(subBlock.required),
|
||||
hasDynamicOptions: typeof subBlock.options === 'function',
|
||||
options: staticOptions,
|
||||
defaultValue: subBlock.defaultValue ?? null,
|
||||
min: subBlock.min ?? null,
|
||||
max: subBlock.max ?? null,
|
||||
}
|
||||
}
|
||||
|
||||
function serializeBlockSchema(blockType: string): Record<string, unknown> | null {
|
||||
const blockConfig = getBlock(blockType)
|
||||
if (!blockConfig) return null
|
||||
|
||||
const subBlocks = Array.isArray(blockConfig.subBlocks)
|
||||
? blockConfig.subBlocks.map(serializeSubBlock)
|
||||
: []
|
||||
const outputs = blockConfig.outputs || {}
|
||||
const outputKeys = Object.keys(outputs)
|
||||
|
||||
return {
|
||||
blockType,
|
||||
blockName: blockConfig.name || blockType,
|
||||
category: blockConfig.category,
|
||||
triggerAllowed: Boolean(blockConfig.triggerAllowed || blockConfig.triggers?.enabled),
|
||||
hasTriggersConfig: Boolean(blockConfig.triggers?.enabled),
|
||||
subBlocks,
|
||||
outputKeys,
|
||||
longDescription: blockConfig.longDescription || null,
|
||||
}
|
||||
}
|
||||
|
||||
export function buildSchemasByType(blockTypes: string[]): {
|
||||
schemasByType: Record<string, any>
|
||||
schemaRefsByType: Record<string, string>
|
||||
} {
|
||||
const schemasByType: Record<string, any> = {}
|
||||
const schemaRefsByType: Record<string, string> = {}
|
||||
|
||||
const uniqueTypes = [...new Set(blockTypes.filter(Boolean))]
|
||||
for (const blockType of uniqueTypes) {
|
||||
const schema = serializeBlockSchema(blockType)
|
||||
if (!schema) continue
|
||||
const stableSchema = stableSortValue(schema)
|
||||
const schemaHash = crypto
|
||||
.createHash('sha256')
|
||||
.update(JSON.stringify(stableSchema))
|
||||
.digest('hex')
|
||||
schemasByType[blockType] = stableSchema
|
||||
schemaRefsByType[blockType] = `${blockType}@sha256:${schemaHash}`
|
||||
}
|
||||
|
||||
return { schemasByType, schemaRefsByType }
|
||||
}
|
||||
|
||||
export async function loadWorkflowStateFromDb(workflowId: string): Promise<{
|
||||
workflowState: {
|
||||
blocks: Record<string, any>
|
||||
edges: Array<Record<string, any>>
|
||||
loops: Record<string, any>
|
||||
parallels: Record<string, any>
|
||||
}
|
||||
workspaceId?: string
|
||||
}> {
|
||||
const [workflowRecord] = await db
|
||||
.select({ workspaceId: workflowTable.workspaceId })
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
if (!workflowRecord) {
|
||||
throw new Error(`Workflow ${workflowId} not found`)
|
||||
}
|
||||
|
||||
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
if (!normalized) {
|
||||
throw new Error(`Workflow ${workflowId} has no normalized data`)
|
||||
}
|
||||
|
||||
const blocks = { ...normalized.blocks }
|
||||
const invalidBlockIds: string[] = []
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
if (!(block as { type?: unknown })?.type) {
|
||||
invalidBlockIds.push(blockId)
|
||||
}
|
||||
}
|
||||
|
||||
for (const blockId of invalidBlockIds) {
|
||||
delete blocks[blockId]
|
||||
}
|
||||
|
||||
const invalidSet = new Set(invalidBlockIds)
|
||||
const edges = (normalized.edges || []).filter(
|
||||
(edge: any) => !invalidSet.has(edge.source) && !invalidSet.has(edge.target)
|
||||
)
|
||||
|
||||
if (invalidBlockIds.length > 0) {
|
||||
logger.warn('Dropped blocks without type while loading workflow state', {
|
||||
workflowId,
|
||||
dropped: invalidBlockIds,
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
workflowState: {
|
||||
blocks,
|
||||
edges,
|
||||
loops: normalized.loops || {},
|
||||
parallels: normalized.parallels || {},
|
||||
},
|
||||
workspaceId: workflowRecord.workspaceId || undefined,
|
||||
}
|
||||
}
|
||||
|
||||
export function summarizeWorkflowState(workflowState: {
|
||||
blocks: Record<string, any>
|
||||
edges: Array<Record<string, any>>
|
||||
loops: Record<string, any>
|
||||
parallels: Record<string, any>
|
||||
}): Record<string, unknown> {
|
||||
const blocks = workflowState.blocks || {}
|
||||
const edges = workflowState.edges || []
|
||||
const blockTypes: Record<string, number> = {}
|
||||
const triggerBlocks: Array<{ id: string; name: string; type: string }> = []
|
||||
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
const blockType = String((block as Record<string, unknown>).type || 'unknown')
|
||||
blockTypes[blockType] = (blockTypes[blockType] || 0) + 1
|
||||
if ((block as Record<string, unknown>).triggerMode === true) {
|
||||
triggerBlocks.push({
|
||||
id: blockId,
|
||||
name: String((block as Record<string, unknown>).name || blockType),
|
||||
type: blockType,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
blockCount: Object.keys(blocks).length,
|
||||
edgeCount: edges.length,
|
||||
loopCount: Object.keys(workflowState.loops || {}).length,
|
||||
parallelCount: Object.keys(workflowState.parallels || {}).length,
|
||||
blockTypes,
|
||||
triggerBlocks,
|
||||
}
|
||||
}
|
||||
|
||||
export function getAllKnownBlockTypes(): string[] {
|
||||
return getAllBlockTypes()
|
||||
}
|
||||
194
apps/sim/lib/copilot/tools/server/workflow/workflow-verify.ts
Normal file
194
apps/sim/lib/copilot/tools/server/workflow/workflow-verify.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { z } from 'zod'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { hashWorkflowState, loadWorkflowStateFromDb } from './workflow-state'
|
||||
|
||||
const logger = createLogger('WorkflowVerifyServerTool')
|
||||
|
||||
const AcceptanceItemSchema = z.union([
|
||||
z.string(),
|
||||
z.object({
|
||||
kind: z.string().optional(),
|
||||
assert: z.string(),
|
||||
}),
|
||||
])
|
||||
|
||||
const WorkflowVerifyInputSchema = z
|
||||
.object({
|
||||
workflowId: z.string(),
|
||||
acceptance: z.array(AcceptanceItemSchema).optional(),
|
||||
baseSnapshotHash: z.string().optional(),
|
||||
})
|
||||
.strict()
|
||||
|
||||
type WorkflowVerifyParams = z.infer<typeof WorkflowVerifyInputSchema>
|
||||
|
||||
function normalizeName(value: string): string {
|
||||
return value.trim().toLowerCase()
|
||||
}
|
||||
|
||||
function resolveBlockToken(
|
||||
workflowState: { blocks: Record<string, any> },
|
||||
token: string
|
||||
): string | null {
|
||||
if (!token) return null
|
||||
if (workflowState.blocks[token]) return token
|
||||
const normalized = normalizeName(token)
|
||||
for (const [blockId, block] of Object.entries(workflowState.blocks || {})) {
|
||||
const blockName = normalizeName(String((block as Record<string, unknown>).name || ''))
|
||||
if (blockName === normalized) return blockId
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function hasPath(
|
||||
workflowState: { edges: Array<Record<string, any>> },
|
||||
blockPath: string[]
|
||||
): boolean {
|
||||
if (blockPath.length < 2) return true
|
||||
const adjacency = new Map<string, string[]>()
|
||||
for (const edge of workflowState.edges || []) {
|
||||
const source = String(edge.source || '')
|
||||
const target = String(edge.target || '')
|
||||
if (!source || !target) continue
|
||||
const existing = adjacency.get(source) || []
|
||||
existing.push(target)
|
||||
adjacency.set(source, existing)
|
||||
}
|
||||
|
||||
for (let i = 0; i < blockPath.length - 1; i++) {
|
||||
const from = blockPath[i]
|
||||
const to = blockPath[i + 1]
|
||||
const next = adjacency.get(from) || []
|
||||
if (!next.includes(to)) return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
function evaluateAssertions(params: {
|
||||
workflowState: {
|
||||
blocks: Record<string, any>
|
||||
edges: Array<Record<string, any>>
|
||||
}
|
||||
assertions: string[]
|
||||
}): { failures: string[]; checks: Array<Record<string, any>> } {
|
||||
const failures: string[] = []
|
||||
const checks: Array<Record<string, any>> = []
|
||||
|
||||
for (const assertion of params.assertions) {
|
||||
if (assertion.startsWith('block_exists:')) {
|
||||
const token = assertion.slice('block_exists:'.length).trim()
|
||||
const blockId = resolveBlockToken(params.workflowState, token)
|
||||
const passed = Boolean(blockId)
|
||||
checks.push({ assert: assertion, passed, resolvedBlockId: blockId || null })
|
||||
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
||||
continue
|
||||
}
|
||||
|
||||
if (assertion.startsWith('trigger_exists:')) {
|
||||
const triggerType = normalizeName(assertion.slice('trigger_exists:'.length))
|
||||
const triggerBlock = Object.values(params.workflowState.blocks || {}).find((block: any) => {
|
||||
if (block?.triggerMode !== true) return false
|
||||
return normalizeName(String(block?.type || '')) === triggerType
|
||||
})
|
||||
const passed = Boolean(triggerBlock)
|
||||
checks.push({ assert: assertion, passed })
|
||||
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
||||
continue
|
||||
}
|
||||
|
||||
if (assertion.startsWith('path_exists:')) {
|
||||
const rawPath = assertion.slice('path_exists:'.length).trim()
|
||||
const tokens = rawPath
|
||||
.split('->')
|
||||
.map((token) => token.trim())
|
||||
.filter(Boolean)
|
||||
const resolvedPath = tokens
|
||||
.map((token) => resolveBlockToken(params.workflowState, token))
|
||||
.filter((value): value is string => Boolean(value))
|
||||
|
||||
const resolvedAll = resolvedPath.length === tokens.length
|
||||
const passed = resolvedAll && hasPath(params.workflowState, resolvedPath)
|
||||
checks.push({
|
||||
assert: assertion,
|
||||
passed,
|
||||
resolvedPath,
|
||||
})
|
||||
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Unknown assertion format - mark as warning failure for explicit visibility.
|
||||
checks.push({ assert: assertion, passed: false, reason: 'unknown_assertion_type' })
|
||||
failures.push(`Unknown assertion format: ${assertion}`)
|
||||
}
|
||||
|
||||
return { failures, checks }
|
||||
}
|
||||
|
||||
export const workflowVerifyServerTool: BaseServerTool<WorkflowVerifyParams, any> = {
|
||||
name: 'workflow_verify',
|
||||
inputSchema: WorkflowVerifyInputSchema,
|
||||
async execute(params: WorkflowVerifyParams, context?: { userId: string }): Promise<any> {
|
||||
if (!context?.userId) {
|
||||
throw new Error('Unauthorized workflow access')
|
||||
}
|
||||
|
||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId: params.workflowId,
|
||||
userId: context.userId,
|
||||
action: 'read',
|
||||
})
|
||||
if (!authorization.allowed) {
|
||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||
}
|
||||
|
||||
const { workflowState } = await loadWorkflowStateFromDb(params.workflowId)
|
||||
const snapshotHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
||||
if (params.baseSnapshotHash && params.baseSnapshotHash !== snapshotHash) {
|
||||
return {
|
||||
success: false,
|
||||
verified: false,
|
||||
reason: 'snapshot_mismatch',
|
||||
expected: params.baseSnapshotHash,
|
||||
current: snapshotHash,
|
||||
}
|
||||
}
|
||||
|
||||
const validation = validateWorkflowState(workflowState as any, { sanitize: false })
|
||||
|
||||
const assertions = (params.acceptance || []).map((item) =>
|
||||
typeof item === 'string' ? item : item.assert
|
||||
)
|
||||
const assertionResults = evaluateAssertions({
|
||||
workflowState,
|
||||
assertions,
|
||||
})
|
||||
|
||||
const verified =
|
||||
validation.valid && assertionResults.failures.length === 0 && validation.errors.length === 0
|
||||
|
||||
logger.info('Workflow verification complete', {
|
||||
workflowId: params.workflowId,
|
||||
verified,
|
||||
errorCount: validation.errors.length,
|
||||
warningCount: validation.warnings.length,
|
||||
assertionFailures: assertionResults.failures.length,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
verified,
|
||||
snapshotHash,
|
||||
validation: {
|
||||
valid: validation.valid,
|
||||
errors: validation.errors,
|
||||
warnings: validation.warnings,
|
||||
},
|
||||
assertions: assertionResults.checks,
|
||||
failures: assertionResults.failures,
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { credential, credentialMember } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
type ActiveCredentialMember = typeof credentialMember.$inferSelect
|
||||
type CredentialRecord = typeof credential.$inferSelect
|
||||
|
||||
export interface CredentialActorContext {
|
||||
credential: CredentialRecord | null
|
||||
member: ActiveCredentialMember | null
|
||||
hasWorkspaceAccess: boolean
|
||||
canWriteWorkspace: boolean
|
||||
isAdmin: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves user access context for a credential.
|
||||
*/
|
||||
export async function getCredentialActorContext(
|
||||
credentialId: string,
|
||||
userId: string
|
||||
): Promise<CredentialActorContext> {
|
||||
const [credentialRow] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (!credentialRow) {
|
||||
return {
|
||||
credential: null,
|
||||
member: null,
|
||||
hasWorkspaceAccess: false,
|
||||
canWriteWorkspace: false,
|
||||
isAdmin: false,
|
||||
}
|
||||
}
|
||||
|
||||
const workspaceAccess = await checkWorkspaceAccess(credentialRow.workspaceId, userId)
|
||||
const [memberRow] = await db
|
||||
.select()
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
eq(credentialMember.userId, userId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
const isAdmin = memberRow?.role === 'admin'
|
||||
|
||||
return {
|
||||
credential: credentialRow,
|
||||
member: memberRow ?? null,
|
||||
hasWorkspaceAccess: workspaceAccess.hasAccess,
|
||||
canWriteWorkspace: workspaceAccess.canWrite,
|
||||
isAdmin,
|
||||
}
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
'use client'
|
||||
|
||||
export const PENDING_OAUTH_CREDENTIAL_DRAFT_KEY = 'sim.pending-oauth-credential-draft'
|
||||
export const PENDING_CREDENTIAL_CREATE_REQUEST_KEY = 'sim.pending-credential-create-request'
|
||||
|
||||
export interface PendingOAuthCredentialDraft {
|
||||
workspaceId: string
|
||||
providerId: string
|
||||
displayName: string
|
||||
existingCredentialIds: string[]
|
||||
existingAccountIds: string[]
|
||||
requestedAt: number
|
||||
}
|
||||
|
||||
interface PendingOAuthCredentialCreateRequest {
|
||||
workspaceId: string
|
||||
type: 'oauth'
|
||||
providerId: string
|
||||
displayName: string
|
||||
serviceId: string
|
||||
requiredScopes: string[]
|
||||
requestedAt: number
|
||||
}
|
||||
|
||||
interface PendingSecretCredentialCreateRequest {
|
||||
workspaceId: string
|
||||
type: 'env_personal' | 'env_workspace'
|
||||
envKey?: string
|
||||
requestedAt: number
|
||||
}
|
||||
|
||||
export type PendingCredentialCreateRequest =
|
||||
| PendingOAuthCredentialCreateRequest
|
||||
| PendingSecretCredentialCreateRequest
|
||||
|
||||
function parseJson<T>(raw: string | null): T | null {
|
||||
if (!raw) return null
|
||||
try {
|
||||
return JSON.parse(raw) as T
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export function readPendingOAuthCredentialDraft(): PendingOAuthCredentialDraft | null {
|
||||
if (typeof window === 'undefined') return null
|
||||
return parseJson<PendingOAuthCredentialDraft>(
|
||||
window.sessionStorage.getItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY)
|
||||
)
|
||||
}
|
||||
|
||||
export function writePendingOAuthCredentialDraft(payload: PendingOAuthCredentialDraft) {
|
||||
if (typeof window === 'undefined') return
|
||||
window.sessionStorage.setItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY, JSON.stringify(payload))
|
||||
}
|
||||
|
||||
export function clearPendingOAuthCredentialDraft() {
|
||||
if (typeof window === 'undefined') return
|
||||
window.sessionStorage.removeItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY)
|
||||
}
|
||||
|
||||
export function readPendingCredentialCreateRequest(): PendingCredentialCreateRequest | null {
|
||||
if (typeof window === 'undefined') return null
|
||||
return parseJson<PendingCredentialCreateRequest>(
|
||||
window.sessionStorage.getItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY)
|
||||
)
|
||||
}
|
||||
|
||||
export function writePendingCredentialCreateRequest(payload: PendingCredentialCreateRequest) {
|
||||
if (typeof window === 'undefined') return
|
||||
window.sessionStorage.setItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY, JSON.stringify(payload))
|
||||
}
|
||||
|
||||
export function clearPendingCredentialCreateRequest() {
|
||||
if (typeof window === 'undefined') return
|
||||
window.sessionStorage.removeItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY)
|
||||
}
|
||||
@@ -1,356 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { credential, credentialMember, permissions, workspace } from '@sim/db/schema'
|
||||
import { and, eq, inArray, notInArray } from 'drizzle-orm'
|
||||
|
||||
interface AccessibleEnvCredential {
|
||||
type: 'env_workspace' | 'env_personal'
|
||||
envKey: string
|
||||
envOwnerUserId: string | null
|
||||
updatedAt: Date
|
||||
}
|
||||
|
||||
function getPostgresErrorCode(error: unknown): string | undefined {
|
||||
if (!error || typeof error !== 'object') return undefined
|
||||
const err = error as { code?: string; cause?: { code?: string } }
|
||||
return err.code || err.cause?.code
|
||||
}
|
||||
|
||||
export async function getWorkspaceMemberUserIds(workspaceId: string): Promise<string[]> {
|
||||
const [workspaceRows, permissionRows] = await Promise.all([
|
||||
db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1),
|
||||
db
|
||||
.select({ userId: permissions.userId })
|
||||
.from(permissions)
|
||||
.where(and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspaceId))),
|
||||
])
|
||||
const workspaceRow = workspaceRows[0]
|
||||
|
||||
const memberIds = new Set<string>(permissionRows.map((row) => row.userId))
|
||||
if (workspaceRow?.ownerId) {
|
||||
memberIds.add(workspaceRow.ownerId)
|
||||
}
|
||||
return Array.from(memberIds)
|
||||
}
|
||||
|
||||
export async function getUserWorkspaceIds(userId: string): Promise<string[]> {
|
||||
const [permissionRows, ownedWorkspaceRows] = await Promise.all([
|
||||
db
|
||||
.select({ workspaceId: workspace.id })
|
||||
.from(permissions)
|
||||
.innerJoin(
|
||||
workspace,
|
||||
and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspace.id))
|
||||
)
|
||||
.where(eq(permissions.userId, userId)),
|
||||
db.select({ workspaceId: workspace.id }).from(workspace).where(eq(workspace.ownerId, userId)),
|
||||
])
|
||||
|
||||
const workspaceIds = new Set<string>(permissionRows.map((row) => row.workspaceId))
|
||||
for (const row of ownedWorkspaceRows) {
|
||||
workspaceIds.add(row.workspaceId)
|
||||
}
|
||||
|
||||
return Array.from(workspaceIds)
|
||||
}
|
||||
|
||||
async function upsertCredentialAdminMember(credentialId: string, adminUserId: string) {
|
||||
const now = new Date()
|
||||
const [existingMembership] = await db
|
||||
.select({ id: credentialMember.id, joinedAt: credentialMember.joinedAt })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, adminUserId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingMembership) {
|
||||
await db
|
||||
.update(credentialMember)
|
||||
.set({
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: existingMembership.joinedAt ?? now,
|
||||
invitedBy: adminUserId,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(credentialMember.id, existingMembership.id))
|
||||
return
|
||||
}
|
||||
|
||||
await db.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId: adminUserId,
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: adminUserId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
}
|
||||
|
||||
async function ensureWorkspaceCredentialMemberships(
|
||||
credentialId: string,
|
||||
workspaceId: string,
|
||||
ownerUserId: string
|
||||
) {
|
||||
const workspaceMemberUserIds = await getWorkspaceMemberUserIds(workspaceId)
|
||||
if (!workspaceMemberUserIds.length) return
|
||||
|
||||
const existingMemberships = await db
|
||||
.select({
|
||||
id: credentialMember.id,
|
||||
userId: credentialMember.userId,
|
||||
joinedAt: credentialMember.joinedAt,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
inArray(credentialMember.userId, workspaceMemberUserIds)
|
||||
)
|
||||
)
|
||||
|
||||
const byUserId = new Map(existingMemberships.map((row) => [row.userId, row]))
|
||||
const now = new Date()
|
||||
|
||||
for (const memberUserId of workspaceMemberUserIds) {
|
||||
const targetRole = memberUserId === ownerUserId ? 'admin' : 'member'
|
||||
const existing = byUserId.get(memberUserId)
|
||||
if (existing) {
|
||||
await db
|
||||
.update(credentialMember)
|
||||
.set({
|
||||
role: targetRole,
|
||||
status: 'active',
|
||||
joinedAt: existing.joinedAt ?? now,
|
||||
invitedBy: ownerUserId,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(credentialMember.id, existing.id))
|
||||
continue
|
||||
}
|
||||
|
||||
await db.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId: memberUserId,
|
||||
role: targetRole,
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: ownerUserId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function syncWorkspaceEnvCredentials(params: {
|
||||
workspaceId: string
|
||||
envKeys: string[]
|
||||
actingUserId: string
|
||||
}) {
|
||||
const { workspaceId, envKeys, actingUserId } = params
|
||||
const [workspaceRow] = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceRow) return
|
||||
|
||||
const normalizedKeys = Array.from(new Set(envKeys.filter(Boolean)))
|
||||
const existingCredentials = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
envKey: credential.envKey,
|
||||
})
|
||||
.from(credential)
|
||||
.where(and(eq(credential.workspaceId, workspaceId), eq(credential.type, 'env_workspace')))
|
||||
|
||||
const existingByKey = new Map(
|
||||
existingCredentials
|
||||
.filter((row): row is { id: string; envKey: string } => Boolean(row.envKey))
|
||||
.map((row) => [row.envKey, row.id])
|
||||
)
|
||||
|
||||
const credentialIdsToEnsureMembership = new Set<string>()
|
||||
const now = new Date()
|
||||
|
||||
for (const envKey of normalizedKeys) {
|
||||
const existingId = existingByKey.get(envKey)
|
||||
if (existingId) {
|
||||
credentialIdsToEnsureMembership.add(existingId)
|
||||
continue
|
||||
}
|
||||
|
||||
const createdId = crypto.randomUUID()
|
||||
try {
|
||||
await db.insert(credential).values({
|
||||
id: createdId,
|
||||
workspaceId,
|
||||
type: 'env_workspace',
|
||||
displayName: envKey,
|
||||
envKey,
|
||||
createdBy: actingUserId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
credentialIdsToEnsureMembership.add(createdId)
|
||||
} catch (error: unknown) {
|
||||
const code = getPostgresErrorCode(error)
|
||||
if (code !== '23505') throw error
|
||||
}
|
||||
}
|
||||
|
||||
for (const credentialId of credentialIdsToEnsureMembership) {
|
||||
await ensureWorkspaceCredentialMemberships(credentialId, workspaceId, workspaceRow.ownerId)
|
||||
}
|
||||
|
||||
if (normalizedKeys.length > 0) {
|
||||
await db
|
||||
.delete(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_workspace'),
|
||||
notInArray(credential.envKey, normalizedKeys)
|
||||
)
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
await db
|
||||
.delete(credential)
|
||||
.where(and(eq(credential.workspaceId, workspaceId), eq(credential.type, 'env_workspace')))
|
||||
}
|
||||
|
||||
export async function syncPersonalEnvCredentialsForUser(params: {
|
||||
userId: string
|
||||
envKeys: string[]
|
||||
}) {
|
||||
const { userId, envKeys } = params
|
||||
const workspaceIds = await getUserWorkspaceIds(userId)
|
||||
if (!workspaceIds.length) return
|
||||
|
||||
const normalizedKeys = Array.from(new Set(envKeys.filter(Boolean)))
|
||||
const now = new Date()
|
||||
|
||||
for (const workspaceId of workspaceIds) {
|
||||
const existingCredentials = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
envKey: credential.envKey,
|
||||
})
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_personal'),
|
||||
eq(credential.envOwnerUserId, userId)
|
||||
)
|
||||
)
|
||||
|
||||
const existingByKey = new Map(
|
||||
existingCredentials
|
||||
.filter((row): row is { id: string; envKey: string } => Boolean(row.envKey))
|
||||
.map((row) => [row.envKey, row.id])
|
||||
)
|
||||
|
||||
for (const envKey of normalizedKeys) {
|
||||
const existingId = existingByKey.get(envKey)
|
||||
if (existingId) {
|
||||
await upsertCredentialAdminMember(existingId, userId)
|
||||
continue
|
||||
}
|
||||
|
||||
const createdId = crypto.randomUUID()
|
||||
try {
|
||||
await db.insert(credential).values({
|
||||
id: createdId,
|
||||
workspaceId,
|
||||
type: 'env_personal',
|
||||
displayName: envKey,
|
||||
envKey,
|
||||
envOwnerUserId: userId,
|
||||
createdBy: userId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
await upsertCredentialAdminMember(createdId, userId)
|
||||
} catch (error: unknown) {
|
||||
const code = getPostgresErrorCode(error)
|
||||
if (code !== '23505') throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (normalizedKeys.length > 0) {
|
||||
await db
|
||||
.delete(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_personal'),
|
||||
eq(credential.envOwnerUserId, userId),
|
||||
notInArray(credential.envKey, normalizedKeys)
|
||||
)
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
await db
|
||||
.delete(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_personal'),
|
||||
eq(credential.envOwnerUserId, userId)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function getAccessibleEnvCredentials(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<AccessibleEnvCredential[]> {
|
||||
const rows = await db
|
||||
.select({
|
||||
type: credential.type,
|
||||
envKey: credential.envKey,
|
||||
envOwnerUserId: credential.envOwnerUserId,
|
||||
updatedAt: credential.updatedAt,
|
||||
})
|
||||
.from(credential)
|
||||
.innerJoin(
|
||||
credentialMember,
|
||||
and(
|
||||
eq(credentialMember.credentialId, credential.id),
|
||||
eq(credentialMember.userId, userId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
inArray(credential.type, ['env_workspace', 'env_personal'])
|
||||
)
|
||||
)
|
||||
|
||||
return rows
|
||||
.filter(
|
||||
(row): row is AccessibleEnvCredential =>
|
||||
(row.type === 'env_workspace' || row.type === 'env_personal') && Boolean(row.envKey)
|
||||
)
|
||||
.map((row) => ({
|
||||
type: row.type,
|
||||
envKey: row.envKey!,
|
||||
envOwnerUserId: row.envOwnerUserId,
|
||||
updatedAt: row.updatedAt,
|
||||
}))
|
||||
}
|
||||
@@ -1,195 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, credential, credentialMember } from '@sim/db/schema'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { getServiceConfigByProviderId } from '@/lib/oauth'
|
||||
|
||||
interface SyncWorkspaceOAuthCredentialsForUserParams {
|
||||
workspaceId: string
|
||||
userId: string
|
||||
}
|
||||
|
||||
interface SyncWorkspaceOAuthCredentialsForUserResult {
|
||||
createdCredentials: number
|
||||
updatedMemberships: number
|
||||
}
|
||||
|
||||
function getPostgresErrorCode(error: unknown): string | undefined {
|
||||
if (!error || typeof error !== 'object') return undefined
|
||||
const err = error as { code?: string; cause?: { code?: string } }
|
||||
return err.code || err.cause?.code
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures connected OAuth accounts for a user exist as workspace-scoped credentials.
|
||||
*/
|
||||
export async function syncWorkspaceOAuthCredentialsForUser(
|
||||
params: SyncWorkspaceOAuthCredentialsForUserParams
|
||||
): Promise<SyncWorkspaceOAuthCredentialsForUserResult> {
|
||||
const { workspaceId, userId } = params
|
||||
|
||||
const userAccounts = await db
|
||||
.select({
|
||||
id: account.id,
|
||||
providerId: account.providerId,
|
||||
accountId: account.accountId,
|
||||
})
|
||||
.from(account)
|
||||
.where(eq(account.userId, userId))
|
||||
|
||||
if (userAccounts.length === 0) {
|
||||
return { createdCredentials: 0, updatedMemberships: 0 }
|
||||
}
|
||||
|
||||
const accountIds = userAccounts.map((row) => row.id)
|
||||
const existingCredentials = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
displayName: credential.displayName,
|
||||
providerId: credential.providerId,
|
||||
accountId: credential.accountId,
|
||||
})
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'oauth'),
|
||||
inArray(credential.accountId, accountIds)
|
||||
)
|
||||
)
|
||||
|
||||
const now = new Date()
|
||||
const userAccountById = new Map(userAccounts.map((row) => [row.id, row]))
|
||||
for (const existingCredential of existingCredentials) {
|
||||
if (!existingCredential.accountId) continue
|
||||
const linkedAccount = userAccountById.get(existingCredential.accountId)
|
||||
if (!linkedAccount) continue
|
||||
|
||||
const normalizedLabel =
|
||||
getServiceConfigByProviderId(linkedAccount.providerId)?.name || linkedAccount.providerId
|
||||
const shouldNormalizeDisplayName =
|
||||
existingCredential.displayName === linkedAccount.accountId ||
|
||||
existingCredential.displayName === linkedAccount.providerId
|
||||
|
||||
if (!shouldNormalizeDisplayName || existingCredential.displayName === normalizedLabel) {
|
||||
continue
|
||||
}
|
||||
|
||||
await db
|
||||
.update(credential)
|
||||
.set({
|
||||
displayName: normalizedLabel,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(credential.id, existingCredential.id))
|
||||
}
|
||||
|
||||
const existingByAccountId = new Map(
|
||||
existingCredentials
|
||||
.filter((row) => Boolean(row.accountId))
|
||||
.map((row) => [row.accountId!, row.id])
|
||||
)
|
||||
|
||||
let createdCredentials = 0
|
||||
|
||||
for (const acc of userAccounts) {
|
||||
if (existingByAccountId.has(acc.id)) {
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
await db.insert(credential).values({
|
||||
id: crypto.randomUUID(),
|
||||
workspaceId,
|
||||
type: 'oauth',
|
||||
displayName: getServiceConfigByProviderId(acc.providerId)?.name || acc.providerId,
|
||||
providerId: acc.providerId,
|
||||
accountId: acc.id,
|
||||
createdBy: userId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
createdCredentials += 1
|
||||
} catch (error) {
|
||||
if (getPostgresErrorCode(error) !== '23505') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const credentialRows = await db
|
||||
.select({ id: credential.id, accountId: credential.accountId })
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'oauth'),
|
||||
inArray(credential.accountId, accountIds)
|
||||
)
|
||||
)
|
||||
|
||||
const credentialIdByAccountId = new Map(
|
||||
credentialRows.filter((row) => Boolean(row.accountId)).map((row) => [row.accountId!, row.id])
|
||||
)
|
||||
const allCredentialIds = Array.from(credentialIdByAccountId.values())
|
||||
if (allCredentialIds.length === 0) {
|
||||
return { createdCredentials, updatedMemberships: 0 }
|
||||
}
|
||||
|
||||
const existingMemberships = await db
|
||||
.select({
|
||||
id: credentialMember.id,
|
||||
credentialId: credentialMember.credentialId,
|
||||
joinedAt: credentialMember.joinedAt,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
inArray(credentialMember.credentialId, allCredentialIds),
|
||||
eq(credentialMember.userId, userId)
|
||||
)
|
||||
)
|
||||
|
||||
const membershipByCredentialId = new Map(
|
||||
existingMemberships.map((row) => [row.credentialId, row])
|
||||
)
|
||||
let updatedMemberships = 0
|
||||
|
||||
for (const credentialId of allCredentialIds) {
|
||||
const existingMembership = membershipByCredentialId.get(credentialId)
|
||||
if (existingMembership) {
|
||||
await db
|
||||
.update(credentialMember)
|
||||
.set({
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: existingMembership.joinedAt ?? now,
|
||||
invitedBy: userId,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(credentialMember.id, existingMembership.id))
|
||||
updatedMemberships += 1
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
await db.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId,
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: userId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
updatedMemberships += 1
|
||||
} catch (error) {
|
||||
if (getPostgresErrorCode(error) !== '23505') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { createdCredentials, updatedMemberships }
|
||||
}
|
||||
@@ -1,9 +1,8 @@
|
||||
import { db } from '@sim/db'
|
||||
import { environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq, inArray } from 'drizzle-orm'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { getAccessibleEnvCredentials } from '@/lib/credentials/environment'
|
||||
|
||||
const logger = createLogger('EnvironmentUtils')
|
||||
|
||||
@@ -54,7 +53,7 @@ export async function getPersonalAndWorkspaceEnv(
|
||||
conflicts: string[]
|
||||
decryptionFailures: string[]
|
||||
}> {
|
||||
const [personalRows, workspaceRows, accessibleEnvCredentials] = await Promise.all([
|
||||
const [personalRows, workspaceRows] = await Promise.all([
|
||||
db.select().from(environment).where(eq(environment.userId, userId)).limit(1),
|
||||
workspaceId
|
||||
? db
|
||||
@@ -63,69 +62,10 @@ export async function getPersonalAndWorkspaceEnv(
|
||||
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
|
||||
.limit(1)
|
||||
: Promise.resolve([] as any[]),
|
||||
workspaceId ? getAccessibleEnvCredentials(workspaceId, userId) : Promise.resolve([]),
|
||||
])
|
||||
|
||||
const ownPersonalEncrypted: Record<string, string> = (personalRows[0]?.variables as any) || {}
|
||||
const allWorkspaceEncrypted: Record<string, string> = (workspaceRows[0]?.variables as any) || {}
|
||||
|
||||
const hasCredentialFiltering = Boolean(workspaceId) && accessibleEnvCredentials.length > 0
|
||||
const workspaceCredentialKeys = new Set(
|
||||
accessibleEnvCredentials.filter((row) => row.type === 'env_workspace').map((row) => row.envKey)
|
||||
)
|
||||
|
||||
const personalCredentialRows = accessibleEnvCredentials
|
||||
.filter((row) => row.type === 'env_personal' && row.envOwnerUserId)
|
||||
.sort((a, b) => {
|
||||
const aIsRequester = a.envOwnerUserId === userId
|
||||
const bIsRequester = b.envOwnerUserId === userId
|
||||
if (aIsRequester && !bIsRequester) return -1
|
||||
if (!aIsRequester && bIsRequester) return 1
|
||||
return b.updatedAt.getTime() - a.updatedAt.getTime()
|
||||
})
|
||||
|
||||
const selectedPersonalOwners = new Map<string, string>()
|
||||
for (const row of personalCredentialRows) {
|
||||
if (!selectedPersonalOwners.has(row.envKey) && row.envOwnerUserId) {
|
||||
selectedPersonalOwners.set(row.envKey, row.envOwnerUserId)
|
||||
}
|
||||
}
|
||||
|
||||
const ownerUserIds = Array.from(new Set(selectedPersonalOwners.values()))
|
||||
const ownerEnvironmentRows =
|
||||
ownerUserIds.length > 0
|
||||
? await db
|
||||
.select({
|
||||
userId: environment.userId,
|
||||
variables: environment.variables,
|
||||
})
|
||||
.from(environment)
|
||||
.where(inArray(environment.userId, ownerUserIds))
|
||||
: []
|
||||
|
||||
const ownerVariablesByUserId = new Map<string, Record<string, string>>(
|
||||
ownerEnvironmentRows.map((row) => [row.userId, (row.variables as Record<string, string>) || {}])
|
||||
)
|
||||
|
||||
let personalEncrypted: Record<string, string> = ownPersonalEncrypted
|
||||
let workspaceEncrypted: Record<string, string> = allWorkspaceEncrypted
|
||||
|
||||
if (hasCredentialFiltering) {
|
||||
personalEncrypted = {}
|
||||
for (const [envKey, ownerUserId] of selectedPersonalOwners.entries()) {
|
||||
const ownerVariables = ownerVariablesByUserId.get(ownerUserId)
|
||||
const encryptedValue = ownerVariables?.[envKey]
|
||||
if (encryptedValue) {
|
||||
personalEncrypted[envKey] = encryptedValue
|
||||
}
|
||||
}
|
||||
|
||||
workspaceEncrypted = Object.fromEntries(
|
||||
Object.entries(allWorkspaceEncrypted).filter(([envKey]) =>
|
||||
workspaceCredentialKeys.has(envKey)
|
||||
)
|
||||
)
|
||||
}
|
||||
const personalEncrypted: Record<string, string> = (personalRows[0]?.variables as any) || {}
|
||||
const workspaceEncrypted: Record<string, string> = (workspaceRows[0]?.variables as any) || {}
|
||||
|
||||
const decryptionFailures: string[] = []
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
export type SettingsSection =
|
||||
| 'general'
|
||||
| 'credentials'
|
||||
| 'environment'
|
||||
| 'template-profile'
|
||||
| 'integrations'
|
||||
|
||||
@@ -18,7 +18,6 @@ import {
|
||||
import { flushStreamingUpdates, stopStreamingUpdates } from '@/lib/copilot/client-sse/handlers'
|
||||
import type { ClientContentBlock, ClientStreamingContext } from '@/lib/copilot/client-sse/types'
|
||||
import {
|
||||
COPILOT_AUTO_ALLOWED_TOOLS_API_PATH,
|
||||
COPILOT_CHAT_API_PATH,
|
||||
COPILOT_CHAT_STREAM_API_PATH,
|
||||
COPILOT_CHECKPOINTS_API_PATH,
|
||||
@@ -84,6 +83,15 @@ function isPageUnloading(): boolean {
|
||||
return _isPageUnloading
|
||||
}
|
||||
|
||||
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
||||
if (name === 'edit_workflow') return true
|
||||
if (name !== 'workflow_change') return false
|
||||
|
||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||
if (mode === 'apply') return true
|
||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||
}
|
||||
|
||||
function readActiveStreamFromStorage(): CopilotStreamInfo | null {
|
||||
if (typeof window === 'undefined') return null
|
||||
try {
|
||||
@@ -140,41 +148,6 @@ function updateActiveStreamEventId(
|
||||
writeActiveStreamToStorage(next)
|
||||
}
|
||||
|
||||
const AUTO_ALLOWED_TOOLS_STORAGE_KEY = 'copilot_auto_allowed_tools'
|
||||
|
||||
function readAutoAllowedToolsFromStorage(): string[] | null {
|
||||
if (typeof window === 'undefined') return null
|
||||
try {
|
||||
const raw = window.localStorage.getItem(AUTO_ALLOWED_TOOLS_STORAGE_KEY)
|
||||
if (!raw) return null
|
||||
const parsed = JSON.parse(raw)
|
||||
if (!Array.isArray(parsed)) return null
|
||||
return parsed.filter((item): item is string => typeof item === 'string')
|
||||
} catch (error) {
|
||||
logger.warn('[AutoAllowedTools] Failed to read local cache', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
function writeAutoAllowedToolsToStorage(tools: string[]): void {
|
||||
if (typeof window === 'undefined') return
|
||||
try {
|
||||
window.localStorage.setItem(AUTO_ALLOWED_TOOLS_STORAGE_KEY, JSON.stringify(tools))
|
||||
} catch (error) {
|
||||
logger.warn('[AutoAllowedTools] Failed to write local cache', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function isToolAutoAllowedByList(toolId: string, autoAllowedTools: string[]): boolean {
|
||||
if (!toolId) return false
|
||||
const normalizedTarget = toolId.trim()
|
||||
return autoAllowedTools.some((allowed) => allowed?.trim() === normalizedTarget)
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear any lingering diff preview from a previous session.
|
||||
* Called lazily when the store is first activated (setWorkflowId).
|
||||
@@ -310,6 +283,50 @@ function parseModelKey(compositeKey: string): { provider: string; modelId: strin
|
||||
return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) }
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert legacy/variant Claude IDs into the canonical ID shape used by the model catalog.
|
||||
*
|
||||
* Examples:
|
||||
* - claude-4.5-opus -> claude-opus-4-5
|
||||
* - claude-opus-4.6 -> claude-opus-4-6
|
||||
* - anthropic.claude-opus-4-5-20251101-v1:0 -> claude-opus-4-5 (match key only)
|
||||
*/
|
||||
function canonicalizeModelMatchKey(modelId: string): string {
|
||||
if (!modelId) return modelId
|
||||
const normalized = modelId.trim().toLowerCase()
|
||||
|
||||
const toCanonicalClaude = (tier: string, version: string): string => {
|
||||
const normalizedVersion = version.replace(/\./g, '-')
|
||||
return `claude-${tier}-${normalizedVersion}`
|
||||
}
|
||||
|
||||
const tierFirstExact = normalized.match(/^claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)$/)
|
||||
if (tierFirstExact) {
|
||||
const [, tier, version] = tierFirstExact
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
const versionFirstExact = normalized.match(/^claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)$/)
|
||||
if (versionFirstExact) {
|
||||
const [, version, tier] = versionFirstExact
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
const tierFirstEmbedded = normalized.match(/claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)/)
|
||||
if (tierFirstEmbedded) {
|
||||
const [, tier, version] = tierFirstEmbedded
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
const versionFirstEmbedded = normalized.match(/claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)/)
|
||||
if (versionFirstEmbedded) {
|
||||
const [, version, tier] = versionFirstEmbedded
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
const MODEL_PROVIDER_PRIORITY = [
|
||||
'anthropic',
|
||||
'bedrock',
|
||||
@@ -350,12 +367,23 @@ function normalizeSelectedModelKey(selectedModel: string, models: AvailableModel
|
||||
|
||||
const { provider, modelId } = parseModelKey(selectedModel)
|
||||
const targetModelId = modelId || selectedModel
|
||||
const targetMatchKey = canonicalizeModelMatchKey(targetModelId)
|
||||
|
||||
const matches = models.filter((m) => m.id.endsWith(`/${targetModelId}`))
|
||||
const matches = models.filter((m) => {
|
||||
const candidateModelId = parseModelKey(m.id).modelId || m.id
|
||||
const candidateMatchKey = canonicalizeModelMatchKey(candidateModelId)
|
||||
return (
|
||||
candidateModelId === targetModelId ||
|
||||
m.id.endsWith(`/${targetModelId}`) ||
|
||||
candidateMatchKey === targetMatchKey
|
||||
)
|
||||
})
|
||||
if (matches.length === 0) return selectedModel
|
||||
|
||||
if (provider) {
|
||||
const sameProvider = matches.find((m) => m.provider === provider)
|
||||
const sameProvider = matches.find(
|
||||
(m) => m.provider === provider || m.id.startsWith(`${provider}/`)
|
||||
)
|
||||
if (sameProvider) return sameProvider.id
|
||||
}
|
||||
|
||||
@@ -425,11 +453,6 @@ function prepareSendContext(
|
||||
.catch((err) => {
|
||||
logger.warn('[Copilot] Failed to load sensitive credential IDs', err)
|
||||
})
|
||||
get()
|
||||
.loadAutoAllowedTools()
|
||||
.catch((err) => {
|
||||
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
||||
})
|
||||
|
||||
let newMessages: CopilotMessage[]
|
||||
if (revertState) {
|
||||
@@ -982,8 +1005,6 @@ async function resumeFromLiveStream(
|
||||
return false
|
||||
}
|
||||
|
||||
const cachedAutoAllowedTools = readAutoAllowedToolsFromStorage()
|
||||
|
||||
// Initial state (subset required for UI/streaming)
|
||||
const initialState = {
|
||||
mode: 'build' as const,
|
||||
@@ -1018,8 +1039,6 @@ const initialState = {
|
||||
streamingPlanContent: '',
|
||||
toolCallsById: {} as Record<string, CopilotToolCall>,
|
||||
suppressAutoSelect: false,
|
||||
autoAllowedTools: cachedAutoAllowedTools ?? ([] as string[]),
|
||||
autoAllowedToolsLoaded: cachedAutoAllowedTools !== null,
|
||||
activeStream: null as CopilotStreamInfo | null,
|
||||
messageQueue: [] as import('./types').QueuedMessage[],
|
||||
suppressAbortContinueOption: false,
|
||||
@@ -1058,8 +1077,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
agentPrefetch: get().agentPrefetch,
|
||||
availableModels: get().availableModels,
|
||||
isLoadingModels: get().isLoadingModels,
|
||||
autoAllowedTools: get().autoAllowedTools,
|
||||
autoAllowedToolsLoaded: get().autoAllowedToolsLoaded,
|
||||
})
|
||||
},
|
||||
|
||||
@@ -1093,11 +1110,12 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const chatConfig = chat.config ?? {}
|
||||
const chatMode = chatConfig.mode || get().mode
|
||||
const chatModel = chatConfig.model || get().selectedModel
|
||||
const normalizedChatModel = normalizeSelectedModelKey(chatModel, get().availableModels)
|
||||
|
||||
logger.debug('[Chat] Restoring chat config', {
|
||||
chatId: chat.id,
|
||||
mode: chatMode,
|
||||
model: chatModel,
|
||||
model: normalizedChatModel,
|
||||
hasPlanArtifact: !!planArtifact,
|
||||
})
|
||||
|
||||
@@ -1119,7 +1137,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
showPlanTodos: false,
|
||||
streamingPlanContent: planArtifact,
|
||||
mode: chatMode,
|
||||
selectedModel: chatModel as CopilotStore['selectedModel'],
|
||||
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
|
||||
suppressAutoSelect: false,
|
||||
})
|
||||
|
||||
@@ -1292,6 +1310,10 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const refreshedConfig = updatedCurrentChat.config ?? {}
|
||||
const refreshedMode = refreshedConfig.mode || get().mode
|
||||
const refreshedModel = refreshedConfig.model || get().selectedModel
|
||||
const normalizedRefreshedModel = normalizeSelectedModelKey(
|
||||
refreshedModel,
|
||||
get().availableModels
|
||||
)
|
||||
const toolCallsById = buildToolCallsById(normalizedMessages)
|
||||
|
||||
set({
|
||||
@@ -1300,7 +1322,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
toolCallsById,
|
||||
streamingPlanContent: refreshedPlanArtifact,
|
||||
mode: refreshedMode,
|
||||
selectedModel: refreshedModel as CopilotStore['selectedModel'],
|
||||
selectedModel: normalizedRefreshedModel as CopilotStore['selectedModel'],
|
||||
})
|
||||
}
|
||||
try {
|
||||
@@ -1320,11 +1342,15 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const chatConfig = mostRecentChat.config ?? {}
|
||||
const chatMode = chatConfig.mode || get().mode
|
||||
const chatModel = chatConfig.model || get().selectedModel
|
||||
const normalizedChatModel = normalizeSelectedModelKey(
|
||||
chatModel,
|
||||
get().availableModels
|
||||
)
|
||||
|
||||
logger.info('[Chat] Auto-selecting most recent chat with config', {
|
||||
chatId: mostRecentChat.id,
|
||||
mode: chatMode,
|
||||
model: chatModel,
|
||||
model: normalizedChatModel,
|
||||
hasPlanArtifact: !!planArtifact,
|
||||
})
|
||||
|
||||
@@ -1336,7 +1362,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
toolCallsById,
|
||||
streamingPlanContent: planArtifact,
|
||||
mode: chatMode,
|
||||
selectedModel: chatModel as CopilotStore['selectedModel'],
|
||||
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
|
||||
})
|
||||
try {
|
||||
await get().loadMessageCheckpoints(mostRecentChat.id)
|
||||
@@ -1365,16 +1391,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
|
||||
// Send a message (streaming only)
|
||||
sendMessage: async (message: string, options = {}) => {
|
||||
if (!get().autoAllowedToolsLoaded) {
|
||||
try {
|
||||
await get().loadAutoAllowedTools()
|
||||
} catch (error) {
|
||||
logger.warn('[Copilot] Failed to preload auto-allowed tools before send', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const prepared = prepareSendContext(get, set, message, options as SendMessageOptionsInput)
|
||||
if (!prepared) return
|
||||
|
||||
@@ -1641,7 +1657,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'edit_workflow') {
|
||||
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
@@ -1650,7 +1666,9 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
}
|
||||
// Fallback to map if not found in messages
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||
const candidates = Object.values(toolCallsById).filter((t) =>
|
||||
isWorkflowEditToolCall(t.name, t.params)
|
||||
)
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
}
|
||||
@@ -2268,7 +2286,8 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
},
|
||||
|
||||
setSelectedModel: async (model) => {
|
||||
set({ selectedModel: model })
|
||||
const normalizedModel = normalizeSelectedModelKey(model, get().availableModels)
|
||||
set({ selectedModel: normalizedModel as CopilotStore['selectedModel'] })
|
||||
},
|
||||
setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }),
|
||||
loadAvailableModels: async () => {
|
||||
@@ -2342,74 +2361,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
}
|
||||
},
|
||||
|
||||
loadAutoAllowedTools: async () => {
|
||||
try {
|
||||
logger.debug('[AutoAllowedTools] Loading from API...')
|
||||
const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH)
|
||||
logger.debug('[AutoAllowedTools] Load response', { status: res.status, ok: res.ok })
|
||||
if (res.ok) {
|
||||
const data = await res.json()
|
||||
const tools = data.autoAllowedTools ?? []
|
||||
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
||||
writeAutoAllowedToolsToStorage(tools)
|
||||
logger.debug('[AutoAllowedTools] Loaded successfully', { count: tools.length, tools })
|
||||
} else {
|
||||
set({ autoAllowedToolsLoaded: true })
|
||||
logger.warn('[AutoAllowedTools] Load failed with status', { status: res.status })
|
||||
}
|
||||
} catch (err) {
|
||||
set({ autoAllowedToolsLoaded: true })
|
||||
logger.error('[AutoAllowedTools] Failed to load', { error: err })
|
||||
}
|
||||
},
|
||||
|
||||
addAutoAllowedTool: async (toolId: string) => {
|
||||
try {
|
||||
logger.debug('[AutoAllowedTools] Adding tool...', { toolId })
|
||||
const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolId }),
|
||||
})
|
||||
logger.debug('[AutoAllowedTools] API response', { toolId, status: res.status, ok: res.ok })
|
||||
if (res.ok) {
|
||||
const data = await res.json()
|
||||
logger.debug('[AutoAllowedTools] API returned', { toolId, tools: data.autoAllowedTools })
|
||||
const tools = data.autoAllowedTools ?? []
|
||||
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
||||
writeAutoAllowedToolsToStorage(tools)
|
||||
logger.debug('[AutoAllowedTools] Added tool to store', { toolId })
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('[AutoAllowedTools] Failed to add tool', { toolId, error: err })
|
||||
}
|
||||
},
|
||||
|
||||
removeAutoAllowedTool: async (toolId: string) => {
|
||||
try {
|
||||
const res = await fetch(
|
||||
`${COPILOT_AUTO_ALLOWED_TOOLS_API_PATH}?toolId=${encodeURIComponent(toolId)}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
if (res.ok) {
|
||||
const data = await res.json()
|
||||
const tools = data.autoAllowedTools ?? []
|
||||
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
||||
writeAutoAllowedToolsToStorage(tools)
|
||||
logger.debug('[AutoAllowedTools] Removed tool', { toolId })
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('[AutoAllowedTools] Failed to remove tool', { toolId, error: err })
|
||||
}
|
||||
},
|
||||
|
||||
isToolAutoAllowed: (toolId: string) => {
|
||||
const { autoAllowedTools } = get()
|
||||
return isToolAutoAllowedByList(toolId, autoAllowedTools)
|
||||
},
|
||||
|
||||
// Credential masking
|
||||
loadSensitiveCredentialIds: async () => {
|
||||
try {
|
||||
|
||||
@@ -26,6 +26,26 @@ export interface CopilotToolCall {
|
||||
params?: Record<string, unknown>
|
||||
input?: Record<string, unknown>
|
||||
display?: ClientToolDisplay
|
||||
/** Server-provided UI contract for this tool call phase */
|
||||
ui?: {
|
||||
title?: string
|
||||
phaseLabel?: string
|
||||
icon?: string
|
||||
showInterrupt?: boolean
|
||||
showRemember?: boolean
|
||||
autoAllowed?: boolean
|
||||
actions?: Array<{
|
||||
id: string
|
||||
label: string
|
||||
kind: 'accept' | 'reject'
|
||||
remember?: boolean
|
||||
}>
|
||||
}
|
||||
/** Server-provided execution routing contract */
|
||||
execution?: {
|
||||
target?: 'go' | 'go_subagent' | 'sim_server' | 'sim_client_capability' | string
|
||||
capabilityId?: string
|
||||
}
|
||||
/** Content streamed from a subagent (e.g., debug agent) */
|
||||
subAgentContent?: string
|
||||
/** Tool calls made by the subagent */
|
||||
@@ -167,10 +187,6 @@ export interface CopilotState {
|
||||
|
||||
// Per-message metadata captured at send-time for reliable stats
|
||||
|
||||
// Auto-allowed integration tools (tools that can run without confirmation)
|
||||
autoAllowedTools: string[]
|
||||
autoAllowedToolsLoaded: boolean
|
||||
|
||||
// Active stream metadata for reconnect/replay
|
||||
activeStream: CopilotStreamInfo | null
|
||||
|
||||
@@ -247,11 +263,6 @@ export interface CopilotActions {
|
||||
abortSignal?: AbortSignal
|
||||
) => Promise<void>
|
||||
handleNewChatCreation: (newChatId: string) => Promise<void>
|
||||
loadAutoAllowedTools: () => Promise<void>
|
||||
addAutoAllowedTool: (toolId: string) => Promise<void>
|
||||
removeAutoAllowedTool: (toolId: string) => Promise<void>
|
||||
isToolAutoAllowed: (toolId: string) => boolean
|
||||
|
||||
// Credential masking
|
||||
loadSensitiveCredentialIds: () => Promise<void>
|
||||
maskCredentialValue: (value: string) => string
|
||||
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
captureBaselineSnapshot,
|
||||
cloneWorkflowState,
|
||||
createBatchedUpdater,
|
||||
findLatestEditWorkflowToolCallId,
|
||||
findLatestWorkflowEditToolCallId,
|
||||
getLatestUserMessageId,
|
||||
persistWorkflowStateToServer,
|
||||
} from './utils'
|
||||
@@ -334,7 +334,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
||||
})
|
||||
}
|
||||
|
||||
findLatestEditWorkflowToolCallId().then((toolCallId) => {
|
||||
findLatestWorkflowEditToolCallId().then((toolCallId) => {
|
||||
if (toolCallId) {
|
||||
import('@/stores/panel/copilot/store')
|
||||
.then(({ useCopilotStore }) => {
|
||||
@@ -439,7 +439,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
||||
})
|
||||
}
|
||||
|
||||
findLatestEditWorkflowToolCallId().then((toolCallId) => {
|
||||
findLatestWorkflowEditToolCallId().then((toolCallId) => {
|
||||
if (toolCallId) {
|
||||
import('@/stores/panel/copilot/store')
|
||||
.then(({ useCopilotStore }) => {
|
||||
|
||||
@@ -126,6 +126,21 @@ export async function getLatestUserMessageId(): Promise<string | null> {
|
||||
}
|
||||
|
||||
export async function findLatestEditWorkflowToolCallId(): Promise<string | undefined> {
|
||||
return findLatestWorkflowEditToolCallId()
|
||||
}
|
||||
|
||||
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
||||
if (name === 'edit_workflow') return true
|
||||
if (name !== 'workflow_change') return false
|
||||
|
||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||
if (mode === 'apply') return true
|
||||
|
||||
// Be permissive for legacy/incomplete events: apply calls always include proposalId.
|
||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||
}
|
||||
|
||||
export async function findLatestWorkflowEditToolCallId(): Promise<string | undefined> {
|
||||
try {
|
||||
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
||||
const { messages, toolCallsById } = useCopilotStore.getState()
|
||||
@@ -134,17 +149,22 @@ export async function findLatestEditWorkflowToolCallId(): Promise<string | undef
|
||||
const message = messages[mi]
|
||||
if (message.role !== 'assistant' || !message.contentBlocks) continue
|
||||
for (const block of message.contentBlocks) {
|
||||
if (block?.type === 'tool_call' && block.toolCall?.name === 'edit_workflow') {
|
||||
if (
|
||||
block?.type === 'tool_call' &&
|
||||
isWorkflowEditToolCall(block.toolCall?.name, block.toolCall?.params)
|
||||
) {
|
||||
return block.toolCall?.id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const fallback = Object.values(toolCallsById).filter((call) => call.name === 'edit_workflow')
|
||||
const fallback = Object.values(toolCallsById).filter((call) =>
|
||||
isWorkflowEditToolCall(call.name, call.params)
|
||||
)
|
||||
|
||||
return fallback.length ? fallback[fallback.length - 1].id : undefined
|
||||
} catch (error) {
|
||||
logger.warn('Failed to resolve edit_workflow tool call id', { error })
|
||||
logger.warn('Failed to resolve workflow edit tool call id', { error })
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
114
apps/sim/tools/confluence/delete_label.ts
Normal file
114
apps/sim/tools/confluence/delete_label.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface ConfluenceDeleteLabelParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
pageId: string
|
||||
labelName: string
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface ConfluenceDeleteLabelResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
ts: string
|
||||
pageId: string
|
||||
labelName: string
|
||||
deleted: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceDeleteLabelTool: ToolConfig<
|
||||
ConfluenceDeleteLabelParams,
|
||||
ConfluenceDeleteLabelResponse
|
||||
> = {
|
||||
id: 'confluence_delete_label',
|
||||
name: 'Confluence Delete Label',
|
||||
description: 'Remove a label from a Confluence page.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Confluence',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
pageId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Confluence page ID to remove the label from',
|
||||
},
|
||||
labelName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Name of the label to remove',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: () => '/api/tools/confluence/labels',
|
||||
method: 'DELETE',
|
||||
headers: (params: ConfluenceDeleteLabelParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
body: (params: ConfluenceDeleteLabelParams) => ({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
pageId: params.pageId?.trim(),
|
||||
labelName: params.labelName?.trim(),
|
||||
cloudId: params.cloudId,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
pageId: data.pageId ?? '',
|
||||
labelName: data.labelName ?? '',
|
||||
deleted: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: TIMESTAMP_OUTPUT,
|
||||
pageId: {
|
||||
type: 'string',
|
||||
description: 'Page ID the label was removed from',
|
||||
},
|
||||
labelName: {
|
||||
type: 'string',
|
||||
description: 'Name of the removed label',
|
||||
},
|
||||
deleted: {
|
||||
type: 'boolean',
|
||||
description: 'Deletion status',
|
||||
},
|
||||
},
|
||||
}
|
||||
105
apps/sim/tools/confluence/delete_page_property.ts
Normal file
105
apps/sim/tools/confluence/delete_page_property.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface ConfluenceDeletePagePropertyParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
pageId: string
|
||||
propertyId: string
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface ConfluenceDeletePagePropertyResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
ts: string
|
||||
pageId: string
|
||||
propertyId: string
|
||||
deleted: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceDeletePagePropertyTool: ToolConfig<
|
||||
ConfluenceDeletePagePropertyParams,
|
||||
ConfluenceDeletePagePropertyResponse
|
||||
> = {
|
||||
id: 'confluence_delete_page_property',
|
||||
name: 'Confluence Delete Page Property',
|
||||
description: 'Delete a content property from a Confluence page by its property ID.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Confluence',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
pageId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The ID of the page containing the property',
|
||||
},
|
||||
propertyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The ID of the property to delete',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: () => '/api/tools/confluence/page-properties',
|
||||
method: 'DELETE',
|
||||
headers: (params: ConfluenceDeletePagePropertyParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
body: (params: ConfluenceDeletePagePropertyParams) => ({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
pageId: params.pageId?.trim(),
|
||||
propertyId: params.propertyId?.trim(),
|
||||
cloudId: params.cloudId,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
pageId: data.pageId ?? '',
|
||||
propertyId: data.propertyId ?? '',
|
||||
deleted: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: TIMESTAMP_OUTPUT,
|
||||
pageId: { type: 'string', description: 'ID of the page' },
|
||||
propertyId: { type: 'string', description: 'ID of the deleted property' },
|
||||
deleted: { type: 'boolean', description: 'Deletion status' },
|
||||
},
|
||||
}
|
||||
143
apps/sim/tools/confluence/get_pages_by_label.ts
Normal file
143
apps/sim/tools/confluence/get_pages_by_label.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { PAGE_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface ConfluenceGetPagesByLabelParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
labelId: string
|
||||
limit?: number
|
||||
cursor?: string
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface ConfluenceGetPagesByLabelResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
ts: string
|
||||
labelId: string
|
||||
pages: Array<{
|
||||
id: string
|
||||
title: string
|
||||
status: string | null
|
||||
spaceId: string | null
|
||||
parentId: string | null
|
||||
authorId: string | null
|
||||
createdAt: string | null
|
||||
version: {
|
||||
number: number
|
||||
message?: string
|
||||
createdAt?: string
|
||||
} | null
|
||||
}>
|
||||
nextCursor: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceGetPagesByLabelTool: ToolConfig<
|
||||
ConfluenceGetPagesByLabelParams,
|
||||
ConfluenceGetPagesByLabelResponse
|
||||
> = {
|
||||
id: 'confluence_get_pages_by_label',
|
||||
name: 'Confluence Get Pages by Label',
|
||||
description: 'Retrieve all pages that have a specific label applied.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Confluence',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
labelId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The ID of the label to get pages for',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of pages to return (default: 50, max: 250)',
|
||||
},
|
||||
cursor: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Pagination cursor from previous response',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: ConfluenceGetPagesByLabelParams) => {
|
||||
const query = new URLSearchParams({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
labelId: params.labelId,
|
||||
limit: String(params.limit || 50),
|
||||
})
|
||||
if (params.cursor) {
|
||||
query.set('cursor', params.cursor)
|
||||
}
|
||||
if (params.cloudId) {
|
||||
query.set('cloudId', params.cloudId)
|
||||
}
|
||||
return `/api/tools/confluence/pages-by-label?${query.toString()}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: ConfluenceGetPagesByLabelParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
labelId: data.labelId ?? '',
|
||||
pages: data.pages ?? [],
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: TIMESTAMP_OUTPUT,
|
||||
labelId: { type: 'string', description: 'ID of the label' },
|
||||
pages: {
|
||||
type: 'array',
|
||||
description: 'Array of pages with this label',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: PAGE_ITEM_PROPERTIES,
|
||||
},
|
||||
},
|
||||
nextCursor: {
|
||||
type: 'string',
|
||||
description: 'Cursor for fetching the next page of results',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -5,11 +5,14 @@ import { confluenceCreatePageTool } from '@/tools/confluence/create_page'
|
||||
import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property'
|
||||
import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment'
|
||||
import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment'
|
||||
import { confluenceDeleteLabelTool } from '@/tools/confluence/delete_label'
|
||||
import { confluenceDeletePageTool } from '@/tools/confluence/delete_page'
|
||||
import { confluenceDeletePagePropertyTool } from '@/tools/confluence/delete_page_property'
|
||||
import { confluenceGetBlogPostTool } from '@/tools/confluence/get_blogpost'
|
||||
import { confluenceGetPageAncestorsTool } from '@/tools/confluence/get_page_ancestors'
|
||||
import { confluenceGetPageChildrenTool } from '@/tools/confluence/get_page_children'
|
||||
import { confluenceGetPageVersionTool } from '@/tools/confluence/get_page_version'
|
||||
import { confluenceGetPagesByLabelTool } from '@/tools/confluence/get_pages_by_label'
|
||||
import { confluenceGetSpaceTool } from '@/tools/confluence/get_space'
|
||||
import { confluenceListAttachmentsTool } from '@/tools/confluence/list_attachments'
|
||||
import { confluenceListBlogPostsTool } from '@/tools/confluence/list_blogposts'
|
||||
@@ -19,6 +22,7 @@ import { confluenceListLabelsTool } from '@/tools/confluence/list_labels'
|
||||
import { confluenceListPagePropertiesTool } from '@/tools/confluence/list_page_properties'
|
||||
import { confluenceListPageVersionsTool } from '@/tools/confluence/list_page_versions'
|
||||
import { confluenceListPagesInSpaceTool } from '@/tools/confluence/list_pages_in_space'
|
||||
import { confluenceListSpaceLabelsTool } from '@/tools/confluence/list_space_labels'
|
||||
import { confluenceListSpacesTool } from '@/tools/confluence/list_spaces'
|
||||
import { confluenceRetrieveTool } from '@/tools/confluence/retrieve'
|
||||
import { confluenceSearchTool } from '@/tools/confluence/search'
|
||||
@@ -78,6 +82,7 @@ export {
|
||||
// Page Properties Tools
|
||||
confluenceListPagePropertiesTool,
|
||||
confluenceCreatePagePropertyTool,
|
||||
confluenceDeletePagePropertyTool,
|
||||
// Blog Post Tools
|
||||
confluenceListBlogPostsTool,
|
||||
confluenceGetBlogPostTool,
|
||||
@@ -98,6 +103,9 @@ export {
|
||||
// Label Tools
|
||||
confluenceListLabelsTool,
|
||||
confluenceAddLabelTool,
|
||||
confluenceDeleteLabelTool,
|
||||
confluenceGetPagesByLabelTool,
|
||||
confluenceListSpaceLabelsTool,
|
||||
// Space Tools
|
||||
confluenceGetSpaceTool,
|
||||
confluenceListSpacesTool,
|
||||
|
||||
134
apps/sim/tools/confluence/list_space_labels.ts
Normal file
134
apps/sim/tools/confluence/list_space_labels.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import { LABEL_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface ConfluenceListSpaceLabelsParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
spaceId: string
|
||||
limit?: number
|
||||
cursor?: string
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface ConfluenceListSpaceLabelsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
ts: string
|
||||
spaceId: string
|
||||
labels: Array<{
|
||||
id: string
|
||||
name: string
|
||||
prefix: string
|
||||
}>
|
||||
nextCursor: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceListSpaceLabelsTool: ToolConfig<
|
||||
ConfluenceListSpaceLabelsParams,
|
||||
ConfluenceListSpaceLabelsResponse
|
||||
> = {
|
||||
id: 'confluence_list_space_labels',
|
||||
name: 'Confluence List Space Labels',
|
||||
description: 'List all labels associated with a Confluence space.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Confluence',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
spaceId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The ID of the Confluence space to list labels from',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of labels to return (default: 25, max: 250)',
|
||||
},
|
||||
cursor: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Pagination cursor from previous response',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: ConfluenceListSpaceLabelsParams) => {
|
||||
const query = new URLSearchParams({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
spaceId: params.spaceId,
|
||||
limit: String(params.limit || 25),
|
||||
})
|
||||
if (params.cursor) {
|
||||
query.set('cursor', params.cursor)
|
||||
}
|
||||
if (params.cloudId) {
|
||||
query.set('cloudId', params.cloudId)
|
||||
}
|
||||
return `/api/tools/confluence/space-labels?${query.toString()}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: ConfluenceListSpaceLabelsParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
spaceId: data.spaceId ?? '',
|
||||
labels: data.labels ?? [],
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: TIMESTAMP_OUTPUT,
|
||||
spaceId: { type: 'string', description: 'ID of the space' },
|
||||
labels: {
|
||||
type: 'array',
|
||||
description: 'Array of labels on the space',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: LABEL_ITEM_PROPERTIES,
|
||||
},
|
||||
},
|
||||
nextCursor: {
|
||||
type: 'string',
|
||||
description: 'Cursor for fetching the next page of results',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -118,10 +118,13 @@ import {
|
||||
confluenceCreatePageTool,
|
||||
confluenceDeleteAttachmentTool,
|
||||
confluenceDeleteCommentTool,
|
||||
confluenceDeleteLabelTool,
|
||||
confluenceDeletePagePropertyTool,
|
||||
confluenceDeletePageTool,
|
||||
confluenceGetBlogPostTool,
|
||||
confluenceGetPageAncestorsTool,
|
||||
confluenceGetPageChildrenTool,
|
||||
confluenceGetPagesByLabelTool,
|
||||
confluenceGetPageVersionTool,
|
||||
confluenceGetSpaceTool,
|
||||
confluenceListAttachmentsTool,
|
||||
@@ -132,6 +135,7 @@ import {
|
||||
confluenceListPagePropertiesTool,
|
||||
confluenceListPagesInSpaceTool,
|
||||
confluenceListPageVersionsTool,
|
||||
confluenceListSpaceLabelsTool,
|
||||
confluenceListSpacesTool,
|
||||
confluenceRetrieveTool,
|
||||
confluenceSearchInSpaceTool,
|
||||
@@ -2667,6 +2671,10 @@ export const tools: Record<string, ToolConfig> = {
|
||||
confluence_delete_attachment: confluenceDeleteAttachmentTool,
|
||||
confluence_list_labels: confluenceListLabelsTool,
|
||||
confluence_add_label: confluenceAddLabelTool,
|
||||
confluence_get_pages_by_label: confluenceGetPagesByLabelTool,
|
||||
confluence_list_space_labels: confluenceListSpaceLabelsTool,
|
||||
confluence_delete_label: confluenceDeleteLabelTool,
|
||||
confluence_delete_page_property: confluenceDeletePagePropertyTool,
|
||||
confluence_get_space: confluenceGetSpaceTool,
|
||||
confluence_list_spaces: confluenceListSpacesTool,
|
||||
cursor_list_agents: cursorListAgentsTool,
|
||||
|
||||
@@ -1,274 +0,0 @@
|
||||
CREATE TYPE "public"."credential_member_role" AS ENUM('admin', 'member');--> statement-breakpoint
|
||||
CREATE TYPE "public"."credential_member_status" AS ENUM('active', 'pending', 'revoked');--> statement-breakpoint
|
||||
CREATE TYPE "public"."credential_type" AS ENUM('oauth', 'env_workspace', 'env_personal');--> statement-breakpoint
|
||||
CREATE TABLE "credential" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"workspace_id" text NOT NULL,
|
||||
"type" "credential_type" NOT NULL,
|
||||
"display_name" text NOT NULL,
|
||||
"provider_id" text,
|
||||
"account_id" text,
|
||||
"env_key" text,
|
||||
"env_owner_user_id" text,
|
||||
"created_by" text NOT NULL,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "credential_oauth_source_check" CHECK ((type <> 'oauth') OR (account_id IS NOT NULL AND provider_id IS NOT NULL)),
|
||||
CONSTRAINT "credential_workspace_env_source_check" CHECK ((type <> 'env_workspace') OR (env_key IS NOT NULL AND env_owner_user_id IS NULL)),
|
||||
CONSTRAINT "credential_personal_env_source_check" CHECK ((type <> 'env_personal') OR (env_key IS NOT NULL AND env_owner_user_id IS NOT NULL))
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "credential_member" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"credential_id" text NOT NULL,
|
||||
"user_id" text NOT NULL,
|
||||
"role" "credential_member_role" DEFAULT 'member' NOT NULL,
|
||||
"status" "credential_member_status" DEFAULT 'active' NOT NULL,
|
||||
"joined_at" timestamp,
|
||||
"invited_by" text,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "credential" ADD CONSTRAINT "credential_workspace_id_workspace_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspace"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "credential" ADD CONSTRAINT "credential_account_id_account_id_fk" FOREIGN KEY ("account_id") REFERENCES "public"."account"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "credential" ADD CONSTRAINT "credential_env_owner_user_id_user_id_fk" FOREIGN KEY ("env_owner_user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "credential" ADD CONSTRAINT "credential_created_by_user_id_fk" FOREIGN KEY ("created_by") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "credential_member" ADD CONSTRAINT "credential_member_credential_id_credential_id_fk" FOREIGN KEY ("credential_id") REFERENCES "public"."credential"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "credential_member" ADD CONSTRAINT "credential_member_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "credential_member" ADD CONSTRAINT "credential_member_invited_by_user_id_fk" FOREIGN KEY ("invited_by") REFERENCES "public"."user"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "credential_workspace_id_idx" ON "credential" USING btree ("workspace_id");--> statement-breakpoint
|
||||
CREATE INDEX "credential_type_idx" ON "credential" USING btree ("type");--> statement-breakpoint
|
||||
CREATE INDEX "credential_provider_id_idx" ON "credential" USING btree ("provider_id");--> statement-breakpoint
|
||||
CREATE INDEX "credential_account_id_idx" ON "credential" USING btree ("account_id");--> statement-breakpoint
|
||||
CREATE INDEX "credential_env_owner_user_id_idx" ON "credential" USING btree ("env_owner_user_id");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "credential_workspace_account_unique" ON "credential" USING btree ("workspace_id","account_id") WHERE account_id IS NOT NULL;--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "credential_workspace_env_unique" ON "credential" USING btree ("workspace_id","type","env_key") WHERE type = 'env_workspace';--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "credential_workspace_personal_env_unique" ON "credential" USING btree ("workspace_id","type","env_key","env_owner_user_id") WHERE type = 'env_personal';--> statement-breakpoint
|
||||
CREATE INDEX "credential_member_credential_id_idx" ON "credential_member" USING btree ("credential_id");--> statement-breakpoint
|
||||
CREATE INDEX "credential_member_user_id_idx" ON "credential_member" USING btree ("user_id");--> statement-breakpoint
|
||||
CREATE INDEX "credential_member_role_idx" ON "credential_member" USING btree ("role");--> statement-breakpoint
|
||||
CREATE INDEX "credential_member_status_idx" ON "credential_member" USING btree ("status");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "credential_member_unique" ON "credential_member" USING btree ("credential_id","user_id");
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE IF NOT EXISTS "pending_credential_draft" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"user_id" text NOT NULL,
|
||||
"workspace_id" text NOT NULL,
|
||||
"provider_id" text NOT NULL,
|
||||
"display_name" text NOT NULL,
|
||||
"expires_at" timestamp NOT NULL,
|
||||
"created_at" timestamp DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "pending_credential_draft" ADD CONSTRAINT "pending_credential_draft_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "pending_credential_draft" ADD CONSTRAINT "pending_credential_draft_workspace_id_workspace_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspace"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "pending_draft_user_provider_ws" ON "pending_credential_draft" USING btree ("user_id","provider_id","workspace_id");
|
||||
--> statement-breakpoint
|
||||
DROP INDEX IF EXISTS "account_user_provider_unique";
|
||||
--> statement-breakpoint
|
||||
WITH workspace_user_access AS (
|
||||
SELECT DISTINCT w.id AS workspace_id, p.user_id
|
||||
FROM "permissions" p
|
||||
INNER JOIN "workspace" w
|
||||
ON w.id = p.entity_id
|
||||
WHERE p.entity_type = 'workspace'
|
||||
UNION
|
||||
SELECT w.id AS workspace_id, w.owner_id AS user_id
|
||||
FROM "workspace" w
|
||||
UNION
|
||||
SELECT DISTINCT wf.workspace_id AS workspace_id, wf.user_id
|
||||
FROM "workflow" wf
|
||||
INNER JOIN "workspace" w
|
||||
ON w.id = wf.workspace_id
|
||||
WHERE wf.workspace_id IS NOT NULL
|
||||
)
|
||||
INSERT INTO "credential" (
|
||||
"id",
|
||||
"workspace_id",
|
||||
"type",
|
||||
"display_name",
|
||||
"provider_id",
|
||||
"account_id",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_at"
|
||||
)
|
||||
SELECT
|
||||
'cred_' || md5('oauth:' || wua.workspace_id || ':' || a.id) AS id,
|
||||
wua.workspace_id,
|
||||
'oauth'::"credential_type",
|
||||
COALESCE(NULLIF(a.account_id, ''), a.provider_id) AS display_name,
|
||||
a.provider_id,
|
||||
a.id,
|
||||
a.user_id,
|
||||
now(),
|
||||
now()
|
||||
FROM "account" a
|
||||
INNER JOIN workspace_user_access wua
|
||||
ON wua.user_id = a.user_id
|
||||
ON CONFLICT DO NOTHING;
|
||||
--> statement-breakpoint
|
||||
INSERT INTO "credential" (
|
||||
"id",
|
||||
"workspace_id",
|
||||
"type",
|
||||
"display_name",
|
||||
"env_key",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_at"
|
||||
)
|
||||
SELECT
|
||||
'cred_' || md5('env_workspace:' || we.workspace_id || ':' || env.key) AS id,
|
||||
we.workspace_id,
|
||||
'env_workspace'::"credential_type",
|
||||
env.key AS display_name,
|
||||
env.key,
|
||||
COALESCE(wf_owner.user_id, w.owner_id),
|
||||
now(),
|
||||
now()
|
||||
FROM "workspace_environment" we
|
||||
INNER JOIN "workspace" w
|
||||
ON w.id = we.workspace_id
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT wf.user_id
|
||||
FROM "workflow" wf
|
||||
WHERE wf.workspace_id = we.workspace_id
|
||||
ORDER BY wf.created_at ASC
|
||||
LIMIT 1
|
||||
) wf_owner
|
||||
ON TRUE
|
||||
CROSS JOIN LATERAL jsonb_each_text(COALESCE(we.variables::jsonb, '{}'::jsonb)) AS env(key, value)
|
||||
ON CONFLICT DO NOTHING;
|
||||
--> statement-breakpoint
|
||||
WITH workflow_workspace_owners AS (
|
||||
SELECT DISTINCT wf.workspace_id, wf.user_id
|
||||
FROM "workflow" wf
|
||||
INNER JOIN "workspace" w
|
||||
ON w.id = wf.workspace_id
|
||||
WHERE wf.workspace_id IS NOT NULL
|
||||
)
|
||||
INSERT INTO "credential" (
|
||||
"id",
|
||||
"workspace_id",
|
||||
"type",
|
||||
"display_name",
|
||||
"env_key",
|
||||
"env_owner_user_id",
|
||||
"created_by",
|
||||
"created_at",
|
||||
"updated_at"
|
||||
)
|
||||
SELECT
|
||||
'cred_' || md5('env_personal:' || wwo.workspace_id || ':' || e.user_id || ':' || env.key) AS id,
|
||||
wwo.workspace_id,
|
||||
'env_personal'::"credential_type",
|
||||
env.key AS display_name,
|
||||
env.key,
|
||||
e.user_id,
|
||||
e.user_id,
|
||||
now(),
|
||||
now()
|
||||
FROM "environment" e
|
||||
INNER JOIN workflow_workspace_owners wwo
|
||||
ON wwo.user_id = e.user_id
|
||||
CROSS JOIN LATERAL jsonb_each_text(COALESCE(e.variables::jsonb, '{}'::jsonb)) AS env(key, value)
|
||||
ON CONFLICT DO NOTHING;
|
||||
--> statement-breakpoint
|
||||
WITH workspace_user_access AS (
|
||||
SELECT DISTINCT w.id AS workspace_id, p.user_id
|
||||
FROM "permissions" p
|
||||
INNER JOIN "workspace" w
|
||||
ON w.id = p.entity_id
|
||||
WHERE p.entity_type = 'workspace'
|
||||
UNION
|
||||
SELECT w.id AS workspace_id, w.owner_id AS user_id
|
||||
FROM "workspace" w
|
||||
UNION
|
||||
SELECT DISTINCT wf.workspace_id AS workspace_id, wf.user_id
|
||||
FROM "workflow" wf
|
||||
INNER JOIN "workspace" w
|
||||
ON w.id = wf.workspace_id
|
||||
WHERE wf.workspace_id IS NOT NULL
|
||||
),
|
||||
workflow_workspace_owners AS (
|
||||
SELECT DISTINCT wf.workspace_id, wf.user_id
|
||||
FROM "workflow" wf
|
||||
INNER JOIN "workspace" w
|
||||
ON w.id = wf.workspace_id
|
||||
WHERE wf.workspace_id IS NOT NULL
|
||||
)
|
||||
INSERT INTO "credential_member" (
|
||||
"id",
|
||||
"credential_id",
|
||||
"user_id",
|
||||
"role",
|
||||
"status",
|
||||
"joined_at",
|
||||
"invited_by",
|
||||
"created_at",
|
||||
"updated_at"
|
||||
)
|
||||
SELECT
|
||||
'credm_' || md5(c.id || ':' || wua.user_id) AS id,
|
||||
c.id,
|
||||
wua.user_id,
|
||||
CASE
|
||||
WHEN c.type = 'oauth'::"credential_type" AND c.created_by = wua.user_id THEN 'admin'::"credential_member_role"
|
||||
WHEN c.type = 'env_workspace'::"credential_type" AND (
|
||||
EXISTS (
|
||||
SELECT 1
|
||||
FROM workflow_workspace_owners wwo
|
||||
WHERE wwo.workspace_id = c.workspace_id
|
||||
AND wwo.user_id = wua.user_id
|
||||
)
|
||||
OR (
|
||||
NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM workflow_workspace_owners wwo
|
||||
WHERE wwo.workspace_id = c.workspace_id
|
||||
)
|
||||
AND w.owner_id = wua.user_id
|
||||
)
|
||||
) THEN 'admin'::"credential_member_role"
|
||||
ELSE 'member'::"credential_member_role"
|
||||
END AS role,
|
||||
'active'::"credential_member_status",
|
||||
now(),
|
||||
c.created_by,
|
||||
now(),
|
||||
now()
|
||||
FROM "credential" c
|
||||
INNER JOIN "workspace" w
|
||||
ON w.id = c.workspace_id
|
||||
INNER JOIN workspace_user_access wua
|
||||
ON wua.workspace_id = c.workspace_id
|
||||
WHERE c.type IN ('oauth'::"credential_type", 'env_workspace'::"credential_type")
|
||||
ON CONFLICT DO NOTHING;
|
||||
--> statement-breakpoint
|
||||
INSERT INTO "credential_member" (
|
||||
"id",
|
||||
"credential_id",
|
||||
"user_id",
|
||||
"role",
|
||||
"status",
|
||||
"joined_at",
|
||||
"invited_by",
|
||||
"created_at",
|
||||
"updated_at"
|
||||
)
|
||||
SELECT
|
||||
'credm_' || md5(c.id || ':' || c.env_owner_user_id) AS id,
|
||||
c.id,
|
||||
c.env_owner_user_id,
|
||||
'admin'::"credential_member_role",
|
||||
'active'::"credential_member_status",
|
||||
now(),
|
||||
c.created_by,
|
||||
now(),
|
||||
now()
|
||||
FROM "credential" c
|
||||
WHERE c.type = 'env_personal'::"credential_type"
|
||||
AND c.env_owner_user_id IS NOT NULL
|
||||
ON CONFLICT DO NOTHING;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1072,13 +1072,6 @@
|
||||
"when": 1770410282842,
|
||||
"tag": "0153_complete_arclight",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 154,
|
||||
"version": "7",
|
||||
"when": 1770840006821,
|
||||
"tag": "0154_luxuriant_maria_hill",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -89,6 +89,10 @@ export const account = pgTable(
|
||||
table.accountId,
|
||||
table.providerId
|
||||
),
|
||||
uniqueUserProvider: uniqueIndex('account_user_provider_unique').on(
|
||||
table.userId,
|
||||
table.providerId
|
||||
),
|
||||
})
|
||||
)
|
||||
|
||||
@@ -2007,118 +2011,6 @@ export const usageLog = pgTable(
|
||||
})
|
||||
)
|
||||
|
||||
export const credentialTypeEnum = pgEnum('credential_type', [
|
||||
'oauth',
|
||||
'env_workspace',
|
||||
'env_personal',
|
||||
])
|
||||
|
||||
export const credential = pgTable(
|
||||
'credential',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
workspaceId: text('workspace_id')
|
||||
.notNull()
|
||||
.references(() => workspace.id, { onDelete: 'cascade' }),
|
||||
type: credentialTypeEnum('type').notNull(),
|
||||
displayName: text('display_name').notNull(),
|
||||
providerId: text('provider_id'),
|
||||
accountId: text('account_id').references(() => account.id, { onDelete: 'cascade' }),
|
||||
envKey: text('env_key'),
|
||||
envOwnerUserId: text('env_owner_user_id').references(() => user.id, { onDelete: 'cascade' }),
|
||||
createdBy: text('created_by')
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: 'cascade' }),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
workspaceIdIdx: index('credential_workspace_id_idx').on(table.workspaceId),
|
||||
typeIdx: index('credential_type_idx').on(table.type),
|
||||
providerIdIdx: index('credential_provider_id_idx').on(table.providerId),
|
||||
accountIdIdx: index('credential_account_id_idx').on(table.accountId),
|
||||
envOwnerUserIdIdx: index('credential_env_owner_user_id_idx').on(table.envOwnerUserId),
|
||||
workspaceAccountUnique: uniqueIndex('credential_workspace_account_unique')
|
||||
.on(table.workspaceId, table.accountId)
|
||||
.where(sql`account_id IS NOT NULL`),
|
||||
workspaceEnvUnique: uniqueIndex('credential_workspace_env_unique')
|
||||
.on(table.workspaceId, table.type, table.envKey)
|
||||
.where(sql`type = 'env_workspace'`),
|
||||
workspacePersonalEnvUnique: uniqueIndex('credential_workspace_personal_env_unique')
|
||||
.on(table.workspaceId, table.type, table.envKey, table.envOwnerUserId)
|
||||
.where(sql`type = 'env_personal'`),
|
||||
oauthSourceConstraint: check(
|
||||
'credential_oauth_source_check',
|
||||
sql`(type <> 'oauth') OR (account_id IS NOT NULL AND provider_id IS NOT NULL)`
|
||||
),
|
||||
workspaceEnvSourceConstraint: check(
|
||||
'credential_workspace_env_source_check',
|
||||
sql`(type <> 'env_workspace') OR (env_key IS NOT NULL AND env_owner_user_id IS NULL)`
|
||||
),
|
||||
personalEnvSourceConstraint: check(
|
||||
'credential_personal_env_source_check',
|
||||
sql`(type <> 'env_personal') OR (env_key IS NOT NULL AND env_owner_user_id IS NOT NULL)`
|
||||
),
|
||||
})
|
||||
)
|
||||
|
||||
export const credentialMemberRoleEnum = pgEnum('credential_member_role', ['admin', 'member'])
|
||||
export const credentialMemberStatusEnum = pgEnum('credential_member_status', [
|
||||
'active',
|
||||
'pending',
|
||||
'revoked',
|
||||
])
|
||||
|
||||
export const credentialMember = pgTable(
|
||||
'credential_member',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
credentialId: text('credential_id')
|
||||
.notNull()
|
||||
.references(() => credential.id, { onDelete: 'cascade' }),
|
||||
userId: text('user_id')
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: 'cascade' }),
|
||||
role: credentialMemberRoleEnum('role').notNull().default('member'),
|
||||
status: credentialMemberStatusEnum('status').notNull().default('active'),
|
||||
joinedAt: timestamp('joined_at'),
|
||||
invitedBy: text('invited_by').references(() => user.id, { onDelete: 'set null' }),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
credentialIdIdx: index('credential_member_credential_id_idx').on(table.credentialId),
|
||||
userIdIdx: index('credential_member_user_id_idx').on(table.userId),
|
||||
roleIdx: index('credential_member_role_idx').on(table.role),
|
||||
statusIdx: index('credential_member_status_idx').on(table.status),
|
||||
uniqueMembership: uniqueIndex('credential_member_unique').on(table.credentialId, table.userId),
|
||||
})
|
||||
)
|
||||
|
||||
export const pendingCredentialDraft = pgTable(
|
||||
'pending_credential_draft',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
userId: text('user_id')
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: 'cascade' }),
|
||||
workspaceId: text('workspace_id')
|
||||
.notNull()
|
||||
.references(() => workspace.id, { onDelete: 'cascade' }),
|
||||
providerId: text('provider_id').notNull(),
|
||||
displayName: text('display_name').notNull(),
|
||||
expiresAt: timestamp('expires_at').notNull(),
|
||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
uniqueDraft: uniqueIndex('pending_draft_user_provider_ws').on(
|
||||
table.userId,
|
||||
table.providerId,
|
||||
table.workspaceId
|
||||
),
|
||||
})
|
||||
)
|
||||
|
||||
export const credentialSet = pgTable(
|
||||
'credential_set',
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user