Compare commits

..

3 Commits

Author SHA1 Message Date
Vikhyath Mondreti
508772cf58 make it autoselect personal secret when create secret is clicked 2026-02-11 20:06:27 -08:00
Vikhyath Mondreti
7314675f50 checkpoint 2026-02-11 19:58:24 -08:00
Vikhyath Mondreti
253161afba feat(mult-credentials): progress 2026-02-11 15:18:31 -08:00
108 changed files with 17517 additions and 4536 deletions

View File

@@ -41,6 +41,9 @@ Diese Tastenkombinationen wechseln zwischen den Panel-Tabs auf der rechten Seite
| Tastenkombination | Aktion | | Tastenkombination | Aktion |
|----------|--------| |----------|--------|
| `C` | Copilot-Tab fokussieren |
| `T` | Toolbar-Tab fokussieren |
| `E` | Editor-Tab fokussieren |
| `Mod` + `F` | Toolbar-Suche fokussieren | | `Mod` + `F` | Toolbar-Suche fokussieren |
## Globale Navigation ## Globale Navigation

View File

@@ -43,6 +43,9 @@ These shortcuts switch between panel tabs on the right side of the canvas.
| Shortcut | Action | | Shortcut | Action |
|----------|--------| |----------|--------|
| `C` | Focus Copilot tab |
| `T` | Focus Toolbar tab |
| `E` | Focus Editor tab |
| `Mod` + `F` | Focus Toolbar search | | `Mod` + `F` | Focus Toolbar search |
## Global Navigation ## Global Navigation

View File

@@ -399,28 +399,6 @@ Create a new custom property (metadata) on a Confluence page.
| ↳ `authorId` | string | Account ID of the version author | | ↳ `authorId` | string | Account ID of the version author |
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation | | ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
### `confluence_delete_page_property`
Delete a content property from a Confluence page by its property ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `pageId` | string | Yes | The ID of the page containing the property |
| `propertyId` | string | Yes | The ID of the property to delete |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `pageId` | string | ID of the page |
| `propertyId` | string | ID of the deleted property |
| `deleted` | boolean | Deletion status |
### `confluence_search` ### `confluence_search`
Search for content across Confluence pages, blog posts, and other content. Search for content across Confluence pages, blog posts, and other content.
@@ -894,90 +872,6 @@ Add a label to a Confluence page for organization and categorization.
| `labelName` | string | Name of the added label | | `labelName` | string | Name of the added label |
| `labelId` | string | ID of the added label | | `labelId` | string | ID of the added label |
### `confluence_delete_label`
Remove a label from a Confluence page.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `pageId` | string | Yes | Confluence page ID to remove the label from |
| `labelName` | string | Yes | Name of the label to remove |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `pageId` | string | Page ID the label was removed from |
| `labelName` | string | Name of the removed label |
| `deleted` | boolean | Deletion status |
### `confluence_get_pages_by_label`
Retrieve all pages that have a specific label applied.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `labelId` | string | Yes | The ID of the label to get pages for |
| `limit` | number | No | Maximum number of pages to return \(default: 50, max: 250\) |
| `cursor` | string | No | Pagination cursor from previous response |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `labelId` | string | ID of the label |
| `pages` | array | Array of pages with this label |
| ↳ `id` | string | Unique page identifier |
| ↳ `title` | string | Page title |
| ↳ `status` | string | Page status \(e.g., current, archived, trashed, draft\) |
| ↳ `spaceId` | string | ID of the space containing the page |
| ↳ `parentId` | string | ID of the parent page \(null if top-level\) |
| ↳ `authorId` | string | Account ID of the page author |
| ↳ `createdAt` | string | ISO 8601 timestamp when the page was created |
| ↳ `version` | object | Page version information |
| ↳ `number` | number | Version number |
| ↳ `message` | string | Version message |
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
| ↳ `authorId` | string | Account ID of the version author |
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
| `nextCursor` | string | Cursor for fetching the next page of results |
### `confluence_list_space_labels`
List all labels associated with a Confluence space.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `spaceId` | string | Yes | The ID of the Confluence space to list labels from |
| `limit` | number | No | Maximum number of labels to return \(default: 25, max: 250\) |
| `cursor` | string | No | Pagination cursor from previous response |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `spaceId` | string | ID of the space |
| `labels` | array | Array of labels on the space |
| ↳ `id` | string | Unique label identifier |
| ↳ `name` | string | Label name |
| ↳ `prefix` | string | Label prefix/type \(e.g., global, my, team\) |
| `nextCursor` | string | Cursor for fetching the next page of results |
### `confluence_get_space` ### `confluence_get_space`
Get details about a specific Confluence space. Get details about a specific Confluence space.

View File

@@ -42,6 +42,9 @@ Estos atajos cambian entre las pestañas del panel en el lado derecho del lienzo
| Atajo | Acción | | Atajo | Acción |
|----------|--------| |----------|--------|
| `C` | Enfocar pestaña Copilot |
| `T` | Enfocar pestaña Barra de herramientas |
| `E` | Enfocar pestaña Editor |
| `Mod` + `F` | Enfocar búsqueda de Barra de herramientas | | `Mod` + `F` | Enfocar búsqueda de Barra de herramientas |
## Navegación global ## Navegación global

View File

@@ -42,6 +42,9 @@ Ces raccourcis permettent de basculer entre les onglets du panneau sur le côté
| Raccourci | Action | | Raccourci | Action |
|----------|--------| |----------|--------|
| `C` | Activer l'onglet Copilot |
| `T` | Activer l'onglet Barre d'outils |
| `E` | Activer l'onglet Éditeur |
| `Mod` + `F` | Activer la recherche dans la barre d'outils | | `Mod` + `F` | Activer la recherche dans la barre d'outils |
## Navigation globale ## Navigation globale

View File

@@ -41,6 +41,9 @@ import { Callout } from 'fumadocs-ui/components/callout'
| ショートカット | 操作 | | ショートカット | 操作 |
|----------|--------| |----------|--------|
| `C` | Copilotタブにフォーカス |
| `T` | Toolbarタブにフォーカス |
| `E` | Editorタブにフォーカス |
| `Mod` + `F` | Toolbar検索にフォーカス | | `Mod` + `F` | Toolbar検索にフォーカス |
## グローバルナビゲーション ## グローバルナビゲーション

View File

@@ -41,6 +41,9 @@ import { Callout } from 'fumadocs-ui/components/callout'
| 快捷键 | 操作 | | 快捷键 | 操作 |
|----------|--------| |----------|--------|
| `C` | 聚焦 Copilot 标签页 |
| `T` | 聚焦 Toolbar 标签页 |
| `E` | 聚焦 Editor 标签页 |
| `Mod` + `F` | 聚焦 Toolbar 搜索 | | `Mod` + `F` | 聚焦 Toolbar 搜索 |
## 全局导航 ## 全局导航

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { account } from '@sim/db/schema' import { account } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm' import { and, desc, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth' import { getSession } from '@/lib/auth'
@@ -31,15 +31,13 @@ export async function GET(request: NextRequest) {
}) })
.from(account) .from(account)
.where(and(...whereConditions)) .where(and(...whereConditions))
.orderBy(desc(account.updatedAt))
// Use the user's email as the display name (consistent with credential selector)
const userEmail = session.user.email
const accountsWithDisplayName = accounts.map((acc) => ({ const accountsWithDisplayName = accounts.map((acc) => ({
id: acc.id, id: acc.id,
accountId: acc.accountId, accountId: acc.accountId,
providerId: acc.providerId, providerId: acc.providerId,
displayName: userEmail || acc.providerId, displayName: acc.accountId || acc.providerId,
})) }))
return NextResponse.json({ accounts: accountsWithDisplayName }) return NextResponse.json({ accounts: accountsWithDisplayName })

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { account, user } from '@sim/db/schema' import { account, credential, credentialMember, user } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm' import { and, eq } from 'drizzle-orm'
import { jwtDecode } from 'jwt-decode' import { jwtDecode } from 'jwt-decode'
@@ -7,8 +7,10 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
import { evaluateScopeCoverage, type OAuthProvider, parseProvider } from '@/lib/oauth' import { evaluateScopeCoverage, type OAuthProvider, parseProvider } from '@/lib/oauth'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -18,6 +20,7 @@ const credentialsQuerySchema = z
.object({ .object({
provider: z.string().nullish(), provider: z.string().nullish(),
workflowId: z.string().uuid('Workflow ID must be a valid UUID').nullish(), workflowId: z.string().uuid('Workflow ID must be a valid UUID').nullish(),
workspaceId: z.string().uuid('Workspace ID must be a valid UUID').nullish(),
credentialId: z credentialId: z
.string() .string()
.min(1, 'Credential ID must not be empty') .min(1, 'Credential ID must not be empty')
@@ -35,6 +38,79 @@ interface GoogleIdToken {
name?: string name?: string
} }
function toCredentialResponse(
id: string,
displayName: string,
providerId: string,
updatedAt: Date,
scope: string | null
) {
const storedScope = scope?.trim()
const grantedScopes = storedScope ? storedScope.split(/[\s,]+/).filter(Boolean) : []
const scopeEvaluation = evaluateScopeCoverage(providerId, grantedScopes)
const [_, featureType = 'default'] = providerId.split('-')
return {
id,
name: displayName,
provider: providerId,
lastUsed: updatedAt.toISOString(),
isDefault: featureType === 'default',
scopes: scopeEvaluation.grantedScopes,
canonicalScopes: scopeEvaluation.canonicalScopes,
missingScopes: scopeEvaluation.missingScopes,
extraScopes: scopeEvaluation.extraScopes,
requiresReauthorization: scopeEvaluation.requiresReauthorization,
}
}
async function getFallbackDisplayName(
requestId: string,
providerParam: string | null | undefined,
accountRow: {
idToken: string | null
accountId: string
userId: string
}
) {
const providerForParse = (providerParam || 'google') as OAuthProvider
const { baseProvider } = parseProvider(providerForParse)
if (accountRow.idToken) {
try {
const decoded = jwtDecode<GoogleIdToken>(accountRow.idToken)
if (decoded.email) return decoded.email
if (decoded.name) return decoded.name
} catch (_error) {
logger.warn(`[${requestId}] Error decoding ID token`, {
accountId: accountRow.accountId,
})
}
}
if (baseProvider === 'github') {
return `${accountRow.accountId} (GitHub)`
}
try {
const userRecord = await db
.select({ email: user.email })
.from(user)
.where(eq(user.id, accountRow.userId))
.limit(1)
if (userRecord.length > 0) {
return userRecord[0].email
}
} catch (_error) {
logger.warn(`[${requestId}] Error fetching user email`, {
userId: accountRow.userId,
})
}
return `${accountRow.accountId} (${baseProvider})`
}
/** /**
* Get credentials for a specific provider * Get credentials for a specific provider
*/ */
@@ -46,6 +122,7 @@ export async function GET(request: NextRequest) {
const rawQuery = { const rawQuery = {
provider: searchParams.get('provider'), provider: searchParams.get('provider'),
workflowId: searchParams.get('workflowId'), workflowId: searchParams.get('workflowId'),
workspaceId: searchParams.get('workspaceId'),
credentialId: searchParams.get('credentialId'), credentialId: searchParams.get('credentialId'),
} }
@@ -78,7 +155,7 @@ export async function GET(request: NextRequest) {
) )
} }
const { provider: providerParam, workflowId, credentialId } = parseResult.data const { provider: providerParam, workflowId, workspaceId, credentialId } = parseResult.data
// Authenticate requester (supports session and internal JWT) // Authenticate requester (supports session and internal JWT)
const authResult = await checkSessionOrInternalAuth(request) const authResult = await checkSessionOrInternalAuth(request)
@@ -88,7 +165,7 @@ export async function GET(request: NextRequest) {
} }
const requesterUserId = authResult.userId const requesterUserId = authResult.userId
const effectiveUserId = requesterUserId let effectiveWorkspaceId = workspaceId ?? undefined
if (workflowId) { if (workflowId) {
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({ const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
workflowId, workflowId,
@@ -106,101 +183,145 @@ export async function GET(request: NextRequest) {
{ status: workflowAuthorization.status } { status: workflowAuthorization.status }
) )
} }
effectiveWorkspaceId = workflowAuthorization.workflow?.workspaceId || undefined
} }
// Parse the provider to get base provider and feature type (if provider is present) if (effectiveWorkspaceId) {
const { baseProvider } = parseProvider((providerParam || 'google') as OAuthProvider) const workspaceAccess = await checkWorkspaceAccess(effectiveWorkspaceId, requesterUserId)
if (!workspaceAccess.hasAccess) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
}
let accountsData let accountsData
if (credentialId) {
const [platformCredential] = await db
.select({
id: credential.id,
workspaceId: credential.workspaceId,
type: credential.type,
displayName: credential.displayName,
providerId: credential.providerId,
accountId: credential.accountId,
accountProviderId: account.providerId,
accountScope: account.scope,
accountUpdatedAt: account.updatedAt,
})
.from(credential)
.leftJoin(account, eq(credential.accountId, account.id))
.where(eq(credential.id, credentialId))
.limit(1)
if (platformCredential) {
if (platformCredential.type !== 'oauth' || !platformCredential.accountId) {
return NextResponse.json({ credentials: [] }, { status: 200 })
}
if (workflowId) {
if (!effectiveWorkspaceId || platformCredential.workspaceId !== effectiveWorkspaceId) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
} else {
const [membership] = await db
.select({ id: credentialMember.id })
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, platformCredential.id),
eq(credentialMember.userId, requesterUserId),
eq(credentialMember.status, 'active')
)
)
.limit(1)
if (!membership) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
}
if (!platformCredential.accountProviderId || !platformCredential.accountUpdatedAt) {
return NextResponse.json({ credentials: [] }, { status: 200 })
}
return NextResponse.json(
{
credentials: [
toCredentialResponse(
platformCredential.id,
platformCredential.displayName,
platformCredential.accountProviderId,
platformCredential.accountUpdatedAt,
platformCredential.accountScope
),
],
},
{ status: 200 }
)
}
}
if (effectiveWorkspaceId && providerParam) {
await syncWorkspaceOAuthCredentialsForUser({
workspaceId: effectiveWorkspaceId,
userId: requesterUserId,
})
const credentialsData = await db
.select({
id: credential.id,
displayName: credential.displayName,
providerId: account.providerId,
scope: account.scope,
updatedAt: account.updatedAt,
})
.from(credential)
.innerJoin(account, eq(credential.accountId, account.id))
.innerJoin(
credentialMember,
and(
eq(credentialMember.credentialId, credential.id),
eq(credentialMember.userId, requesterUserId),
eq(credentialMember.status, 'active')
)
)
.where(
and(
eq(credential.workspaceId, effectiveWorkspaceId),
eq(credential.type, 'oauth'),
eq(account.providerId, providerParam)
)
)
return NextResponse.json(
{
credentials: credentialsData.map((row) =>
toCredentialResponse(row.id, row.displayName, row.providerId, row.updatedAt, row.scope)
),
},
{ status: 200 }
)
}
if (credentialId && workflowId) { if (credentialId && workflowId) {
// When both workflowId and credentialId are provided, fetch by ID only.
// Workspace authorization above already proves access; the credential
// may belong to another workspace member (e.g. for display name resolution).
accountsData = await db.select().from(account).where(eq(account.id, credentialId)) accountsData = await db.select().from(account).where(eq(account.id, credentialId))
} else if (credentialId) { } else if (credentialId) {
accountsData = await db accountsData = await db
.select() .select()
.from(account) .from(account)
.where(and(eq(account.userId, effectiveUserId), eq(account.id, credentialId))) .where(and(eq(account.userId, requesterUserId), eq(account.id, credentialId)))
} else { } else {
// Fetch all credentials for provider and effective user
accountsData = await db accountsData = await db
.select() .select()
.from(account) .from(account)
.where(and(eq(account.userId, effectiveUserId), eq(account.providerId, providerParam!))) .where(and(eq(account.userId, requesterUserId), eq(account.providerId, providerParam!)))
} }
// Transform accounts into credentials // Transform accounts into credentials
const credentials = await Promise.all( const credentials = await Promise.all(
accountsData.map(async (acc) => { accountsData.map(async (acc) => {
// Extract the feature type from providerId (e.g., 'google-default' -> 'default') const displayName = await getFallbackDisplayName(requestId, providerParam, acc)
const [_, featureType = 'default'] = acc.providerId.split('-') return toCredentialResponse(acc.id, displayName, acc.providerId, acc.updatedAt, acc.scope)
// Try multiple methods to get a user-friendly display name
let displayName = ''
// Method 1: Try to extract email from ID token (works for Google, etc.)
if (acc.idToken) {
try {
const decoded = jwtDecode<GoogleIdToken>(acc.idToken)
if (decoded.email) {
displayName = decoded.email
} else if (decoded.name) {
displayName = decoded.name
}
} catch (_error) {
logger.warn(`[${requestId}] Error decoding ID token`, {
accountId: acc.id,
})
}
}
// Method 2: For GitHub, the accountId might be the username
if (!displayName && baseProvider === 'github') {
displayName = `${acc.accountId} (GitHub)`
}
// Method 3: Try to get the user's email from our database
if (!displayName) {
try {
const userRecord = await db
.select({ email: user.email })
.from(user)
.where(eq(user.id, acc.userId))
.limit(1)
if (userRecord.length > 0) {
displayName = userRecord[0].email
}
} catch (_error) {
logger.warn(`[${requestId}] Error fetching user email`, {
userId: acc.userId,
})
}
}
// Fallback: Use accountId with provider type as context
if (!displayName) {
displayName = `${acc.accountId} (${baseProvider})`
}
const storedScope = acc.scope?.trim()
const grantedScopes = storedScope ? storedScope.split(/[\s,]+/).filter(Boolean) : []
const scopeEvaluation = evaluateScopeCoverage(acc.providerId, grantedScopes)
return {
id: acc.id,
name: displayName,
provider: acc.providerId,
lastUsed: acc.updatedAt.toISOString(),
isDefault: featureType === 'default',
scopes: scopeEvaluation.grantedScopes,
canonicalScopes: scopeEvaluation.canonicalScopes,
missingScopes: scopeEvaluation.missingScopes,
extraScopes: scopeEvaluation.extraScopes,
requiresReauthorization: scopeEvaluation.requiresReauthorization,
}
}) })
) )

View File

@@ -15,6 +15,7 @@ const logger = createLogger('OAuthDisconnectAPI')
const disconnectSchema = z.object({ const disconnectSchema = z.object({
provider: z.string({ required_error: 'Provider is required' }).min(1, 'Provider is required'), provider: z.string({ required_error: 'Provider is required' }).min(1, 'Provider is required'),
providerId: z.string().optional(), providerId: z.string().optional(),
accountId: z.string().optional(),
}) })
/** /**
@@ -50,15 +51,20 @@ export async function POST(request: NextRequest) {
) )
} }
const { provider, providerId } = parseResult.data const { provider, providerId, accountId } = parseResult.data
logger.info(`[${requestId}] Processing OAuth disconnect request`, { logger.info(`[${requestId}] Processing OAuth disconnect request`, {
provider, provider,
hasProviderId: !!providerId, hasProviderId: !!providerId,
}) })
// If a specific providerId is provided, delete only that account // If a specific account row ID is provided, delete that exact account
if (providerId) { if (accountId) {
await db
.delete(account)
.where(and(eq(account.userId, session.user.id), eq(account.id, accountId)))
} else if (providerId) {
// If a specific providerId is provided, delete accounts for that provider ID
await db await db
.delete(account) .delete(account)
.where(and(eq(account.userId, session.user.id), eq(account.providerId, providerId))) .where(and(eq(account.userId, session.user.id), eq(account.providerId, providerId)))

View File

@@ -38,13 +38,18 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status }) return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status })
} }
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId) const resolvedCredentialId = authz.resolvedCredentialId || credentialId
const credential = await getCredential(
requestId,
resolvedCredentialId,
authz.credentialOwnerUserId
)
if (!credential) { if (!credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
} }
const accessToken = await refreshAccessTokenIfNeeded( const accessToken = await refreshAccessTokenIfNeeded(
credentialId, resolvedCredentialId,
authz.credentialOwnerUserId, authz.credentialOwnerUserId,
requestId requestId
) )

View File

@@ -37,14 +37,19 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status }) return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status })
} }
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId) const resolvedCredentialId = authz.resolvedCredentialId || credentialId
const credential = await getCredential(
requestId,
resolvedCredentialId,
authz.credentialOwnerUserId
)
if (!credential) { if (!credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
} }
// Refresh access token if needed using the utility function // Refresh access token if needed using the utility function
const accessToken = await refreshAccessTokenIfNeeded( const accessToken = await refreshAccessTokenIfNeeded(
credentialId, resolvedCredentialId,
authz.credentialOwnerUserId, authz.credentialOwnerUserId,
requestId requestId
) )

View File

@@ -119,14 +119,23 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 }) return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
} }
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId) const resolvedCredentialId = authz.resolvedCredentialId || credentialId
const credential = await getCredential(
requestId,
resolvedCredentialId,
authz.credentialOwnerUserId
)
if (!credential) { if (!credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
} }
try { try {
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId) const { accessToken } = await refreshTokenIfNeeded(
requestId,
credential,
resolvedCredentialId
)
let instanceUrl: string | undefined let instanceUrl: string | undefined
if (credential.providerId === 'salesforce' && credential.scope) { if (credential.providerId === 'salesforce' && credential.scope) {
@@ -186,13 +195,20 @@ export async function GET(request: NextRequest) {
const { credentialId } = parseResult.data const { credentialId } = parseResult.data
// For GET requests, we only support session-based authentication const authz = await authorizeCredentialUse(request, {
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) credentialId,
if (!auth.success || auth.authType !== 'session' || !auth.userId) { requireWorkflowIdForInternal: false,
return NextResponse.json({ error: 'User not authenticated' }, { status: 401 }) })
if (!authz.ok || authz.authType !== 'session' || !authz.credentialOwnerUserId) {
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
} }
const credential = await getCredential(requestId, credentialId, auth.userId) const resolvedCredentialId = authz.resolvedCredentialId || credentialId
const credential = await getCredential(
requestId,
resolvedCredentialId,
authz.credentialOwnerUserId
)
if (!credential) { if (!credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
@@ -204,7 +220,11 @@ export async function GET(request: NextRequest) {
} }
try { try {
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId) const { accessToken } = await refreshTokenIfNeeded(
requestId,
credential,
resolvedCredentialId
)
// For Salesforce, extract instanceUrl from the scope field // For Salesforce, extract instanceUrl from the scope field
let instanceUrl: string | undefined let instanceUrl: string | undefined

View File

@@ -50,7 +50,7 @@ describe('OAuth Utils', () => {
describe('getCredential', () => { describe('getCredential', () => {
it('should return credential when found', async () => { it('should return credential when found', async () => {
const mockCredential = { id: 'credential-id', userId: 'test-user-id' } const mockCredential = { id: 'credential-id', userId: 'test-user-id' }
mockDbTyped.limit.mockReturnValueOnce([mockCredential]) mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
const credential = await getCredential('request-id', 'credential-id', 'test-user-id') const credential = await getCredential('request-id', 'credential-id', 'test-user-id')
@@ -59,7 +59,8 @@ describe('OAuth Utils', () => {
expect(mockDbTyped.where).toHaveBeenCalled() expect(mockDbTyped.where).toHaveBeenCalled()
expect(mockDbTyped.limit).toHaveBeenCalledWith(1) expect(mockDbTyped.limit).toHaveBeenCalledWith(1)
expect(credential).toEqual(mockCredential) expect(credential).toMatchObject(mockCredential)
expect(credential).toMatchObject({ resolvedCredentialId: 'credential-id' })
}) })
it('should return undefined when credential is not found', async () => { it('should return undefined when credential is not found', async () => {
@@ -152,7 +153,7 @@ describe('OAuth Utils', () => {
providerId: 'google', providerId: 'google',
userId: 'test-user-id', userId: 'test-user-id',
} }
mockDbTyped.limit.mockReturnValueOnce([mockCredential]) mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id') const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
@@ -169,7 +170,7 @@ describe('OAuth Utils', () => {
providerId: 'google', providerId: 'google',
userId: 'test-user-id', userId: 'test-user-id',
} }
mockDbTyped.limit.mockReturnValueOnce([mockCredential]) mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
mockRefreshOAuthToken.mockResolvedValueOnce({ mockRefreshOAuthToken.mockResolvedValueOnce({
accessToken: 'new-token', accessToken: 'new-token',
@@ -202,7 +203,7 @@ describe('OAuth Utils', () => {
providerId: 'google', providerId: 'google',
userId: 'test-user-id', userId: 'test-user-id',
} }
mockDbTyped.limit.mockReturnValueOnce([mockCredential]) mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
mockRefreshOAuthToken.mockResolvedValueOnce(null) mockRefreshOAuthToken.mockResolvedValueOnce(null)

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { account, credentialSetMember } from '@sim/db/schema' import { account, credential, credentialSetMember } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, desc, eq, inArray } from 'drizzle-orm' import { and, desc, eq, inArray } from 'drizzle-orm'
import { refreshOAuthToken } from '@/lib/oauth' import { refreshOAuthToken } from '@/lib/oauth'
@@ -25,6 +25,28 @@ interface AccountInsertData {
accessTokenExpiresAt?: Date accessTokenExpiresAt?: Date
} }
async function resolveOAuthAccountId(
credentialId: string
): Promise<{ accountId: string; usedCredentialTable: boolean } | null> {
const [credentialRow] = await db
.select({
type: credential.type,
accountId: credential.accountId,
})
.from(credential)
.where(eq(credential.id, credentialId))
.limit(1)
if (credentialRow) {
if (credentialRow.type !== 'oauth' || !credentialRow.accountId) {
return null
}
return { accountId: credentialRow.accountId, usedCredentialTable: true }
}
return { accountId: credentialId, usedCredentialTable: false }
}
/** /**
* Safely inserts an account record, handling duplicate constraint violations gracefully. * Safely inserts an account record, handling duplicate constraint violations gracefully.
* If a duplicate is detected (unique constraint violation), logs a warning and returns success. * If a duplicate is detected (unique constraint violation), logs a warning and returns success.
@@ -52,10 +74,16 @@ export async function safeAccountInsert(
* Get a credential by ID and verify it belongs to the user * Get a credential by ID and verify it belongs to the user
*/ */
export async function getCredential(requestId: string, credentialId: string, userId: string) { export async function getCredential(requestId: string, credentialId: string, userId: string) {
const resolved = await resolveOAuthAccountId(credentialId)
if (!resolved) {
logger.warn(`[${requestId}] Credential is not an OAuth credential`)
return undefined
}
const credentials = await db const credentials = await db
.select() .select()
.from(account) .from(account)
.where(and(eq(account.id, credentialId), eq(account.userId, userId))) .where(and(eq(account.id, resolved.accountId), eq(account.userId, userId)))
.limit(1) .limit(1)
if (!credentials.length) { if (!credentials.length) {
@@ -63,7 +91,10 @@ export async function getCredential(requestId: string, credentialId: string, use
return undefined return undefined
} }
return credentials[0] return {
...credentials[0],
resolvedCredentialId: resolved.accountId,
}
} }
export async function getOAuthToken(userId: string, providerId: string): Promise<string | null> { export async function getOAuthToken(userId: string, providerId: string): Promise<string | null> {
@@ -238,7 +269,9 @@ export async function refreshAccessTokenIfNeeded(
} }
// Update the token in the database // Update the token in the database
await db.update(account).set(updateData).where(eq(account.id, credentialId)) const resolvedCredentialId =
(credential as { resolvedCredentialId?: string }).resolvedCredentialId ?? credentialId
await db.update(account).set(updateData).where(eq(account.id, resolvedCredentialId))
logger.info(`[${requestId}] Successfully refreshed access token for credential`) logger.info(`[${requestId}] Successfully refreshed access token for credential`)
return refreshedToken.accessToken return refreshedToken.accessToken
@@ -274,6 +307,8 @@ export async function refreshTokenIfNeeded(
credential: any, credential: any,
credentialId: string credentialId: string
): Promise<{ accessToken: string; refreshed: boolean }> { ): Promise<{ accessToken: string; refreshed: boolean }> {
const resolvedCredentialId = credential.resolvedCredentialId ?? credentialId
// Decide if we should refresh: token missing OR expired // Decide if we should refresh: token missing OR expired
const accessTokenExpiresAt = credential.accessTokenExpiresAt const accessTokenExpiresAt = credential.accessTokenExpiresAt
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
@@ -334,7 +369,7 @@ export async function refreshTokenIfNeeded(
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry() updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
} }
await db.update(account).set(updateData).where(eq(account.id, credentialId)) await db.update(account).set(updateData).where(eq(account.id, resolvedCredentialId))
logger.info(`[${requestId}] Successfully refreshed access token`) logger.info(`[${requestId}] Successfully refreshed access token`)
return { accessToken: refreshedToken, refreshed: true } return { accessToken: refreshedToken, refreshed: true }
@@ -343,7 +378,7 @@ export async function refreshTokenIfNeeded(
`[${requestId}] Refresh attempt failed, checking if another concurrent request succeeded` `[${requestId}] Refresh attempt failed, checking if another concurrent request succeeded`
) )
const freshCredential = await getCredential(requestId, credentialId, credential.userId) const freshCredential = await getCredential(requestId, resolvedCredentialId, credential.userId)
if (freshCredential?.accessToken) { if (freshCredential?.accessToken) {
const freshExpiresAt = freshCredential.accessTokenExpiresAt const freshExpiresAt = freshCredential.accessTokenExpiresAt
const stillValid = !freshExpiresAt || freshExpiresAt > new Date() const stillValid = !freshExpiresAt || freshExpiresAt > new Date()

View File

@@ -48,16 +48,21 @@ export async function GET(request: NextRequest) {
const shopData = await shopResponse.json() const shopData = await shopResponse.json()
const shopInfo = shopData.shop const shopInfo = shopData.shop
const stableAccountId = shopInfo.id?.toString() || shopDomain
const existing = await db.query.account.findFirst({ const existing = await db.query.account.findFirst({
where: and(eq(account.userId, session.user.id), eq(account.providerId, 'shopify')), where: and(
eq(account.userId, session.user.id),
eq(account.providerId, 'shopify'),
eq(account.accountId, stableAccountId)
),
}) })
const now = new Date() const now = new Date()
const accountData = { const accountData = {
accessToken: accessToken, accessToken: accessToken,
accountId: shopInfo.id?.toString() || shopDomain, accountId: stableAccountId,
scope: scope || '', scope: scope || '',
updatedAt: now, updatedAt: now,
idToken: shopDomain, idToken: shopDomain,

View File

@@ -52,7 +52,11 @@ export async function POST(request: NextRequest) {
const trelloUser = await userResponse.json() const trelloUser = await userResponse.json()
const existing = await db.query.account.findFirst({ const existing = await db.query.account.findFirst({
where: and(eq(account.userId, session.user.id), eq(account.providerId, 'trello')), where: and(
eq(account.userId, session.user.id),
eq(account.providerId, 'trello'),
eq(account.accountId, trelloUser.id)
),
}) })
const now = new Date() const now = new Date()

View File

@@ -113,7 +113,6 @@ const ChatMessageSchema = z.object({
workflowId: z.string().optional(), workflowId: z.string().optional(),
knowledgeId: z.string().optional(), knowledgeId: z.string().optional(),
blockId: z.string().optional(), blockId: z.string().optional(),
blockIds: z.array(z.string()).optional(),
templateId: z.string().optional(), templateId: z.string().optional(),
executionId: z.string().optional(), executionId: z.string().optional(),
// For workflow_block, provide both workflowId and blockId // For workflow_block, provide both workflowId and blockId
@@ -160,20 +159,6 @@ export async function POST(req: NextRequest) {
commands, commands,
} = ChatMessageSchema.parse(body) } = ChatMessageSchema.parse(body)
const normalizedContexts = Array.isArray(contexts)
? contexts.map((ctx) => {
if (ctx.kind !== 'blocks') return ctx
if (Array.isArray(ctx.blockIds) && ctx.blockIds.length > 0) return ctx
if (ctx.blockId) {
return {
...ctx,
blockIds: [ctx.blockId],
}
}
return ctx
})
: contexts
// Resolve workflowId - if not provided, use first workflow or find by name // Resolve workflowId - if not provided, use first workflow or find by name
const resolved = await resolveWorkflowIdForUser( const resolved = await resolveWorkflowIdForUser(
authenticatedUserId, authenticatedUserId,
@@ -191,10 +176,10 @@ export async function POST(req: NextRequest) {
const userMessageIdToUse = userMessageId || crypto.randomUUID() const userMessageIdToUse = userMessageId || crypto.randomUUID()
try { try {
logger.info(`[${tracker.requestId}] Received chat POST`, { logger.info(`[${tracker.requestId}] Received chat POST`, {
hasContexts: Array.isArray(normalizedContexts), hasContexts: Array.isArray(contexts),
contextsCount: Array.isArray(normalizedContexts) ? normalizedContexts.length : 0, contextsCount: Array.isArray(contexts) ? contexts.length : 0,
contextsPreview: Array.isArray(normalizedContexts) contextsPreview: Array.isArray(contexts)
? normalizedContexts.map((c: any) => ({ ? contexts.map((c: any) => ({
kind: c?.kind, kind: c?.kind,
chatId: c?.chatId, chatId: c?.chatId,
workflowId: c?.workflowId, workflowId: c?.workflowId,
@@ -206,25 +191,17 @@ export async function POST(req: NextRequest) {
} catch {} } catch {}
// Preprocess contexts server-side // Preprocess contexts server-side
let agentContexts: Array<{ type: string; content: string }> = [] let agentContexts: Array<{ type: string; content: string }> = []
if (Array.isArray(normalizedContexts) && normalizedContexts.length > 0) { if (Array.isArray(contexts) && contexts.length > 0) {
try { try {
const { processContextsServer } = await import('@/lib/copilot/process-contents') const { processContextsServer } = await import('@/lib/copilot/process-contents')
const processed = await processContextsServer( const processed = await processContextsServer(contexts as any, authenticatedUserId, message)
normalizedContexts as any,
authenticatedUserId,
message
)
agentContexts = processed agentContexts = processed
logger.info(`[${tracker.requestId}] Contexts processed for request`, { logger.info(`[${tracker.requestId}] Contexts processed for request`, {
processedCount: agentContexts.length, processedCount: agentContexts.length,
kinds: agentContexts.map((c) => c.type), kinds: agentContexts.map((c) => c.type),
lengthPreview: agentContexts.map((c) => c.content?.length ?? 0), lengthPreview: agentContexts.map((c) => c.content?.length ?? 0),
}) })
if ( if (Array.isArray(contexts) && contexts.length > 0 && agentContexts.length === 0) {
Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 &&
agentContexts.length === 0
) {
logger.warn( logger.warn(
`[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.` `[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.`
) )
@@ -269,13 +246,11 @@ export async function POST(req: NextRequest) {
mode, mode,
model: selectedModel, model: selectedModel,
provider, provider,
conversationId: effectiveConversationId,
conversationHistory, conversationHistory,
contexts: agentContexts, contexts: agentContexts,
fileAttachments, fileAttachments,
commands, commands,
chatId: actualChatId, chatId: actualChatId,
prefetch,
implicitFeedback, implicitFeedback,
}, },
{ {
@@ -457,15 +432,10 @@ export async function POST(req: NextRequest) {
content: message, content: message,
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }), ...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
...(Array.isArray(normalizedContexts) && ...(Array.isArray(contexts) && contexts.length > 0 && { contexts }),
normalizedContexts.length > 0 && { ...(Array.isArray(contexts) &&
contexts: normalizedContexts, contexts.length > 0 && {
}), contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }],
...(Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 && {
contentBlocks: [
{ type: 'contexts', contexts: normalizedContexts as any, timestamp: Date.now() },
],
}), }),
} }

View File

@@ -0,0 +1,194 @@
import { db } from '@sim/db'
import { credential, credentialMember, user } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
const logger = createLogger('CredentialMembersAPI')
interface RouteContext {
params: Promise<{ id: string }>
}
async function requireAdminMembership(credentialId: string, userId: string) {
const [membership] = await db
.select({ role: credentialMember.role, status: credentialMember.status })
.from(credentialMember)
.where(
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, userId))
)
.limit(1)
if (!membership || membership.status !== 'active' || membership.role !== 'admin') {
return null
}
return membership
}
export async function GET(_request: NextRequest, context: RouteContext) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id: credentialId } = await context.params
const [cred] = await db
.select({ id: credential.id })
.from(credential)
.where(eq(credential.id, credentialId))
.limit(1)
if (!cred) {
return NextResponse.json({ members: [] }, { status: 200 })
}
const members = await db
.select({
id: credentialMember.id,
userId: credentialMember.userId,
role: credentialMember.role,
status: credentialMember.status,
joinedAt: credentialMember.joinedAt,
userName: user.name,
userEmail: user.email,
})
.from(credentialMember)
.innerJoin(user, eq(credentialMember.userId, user.id))
.where(eq(credentialMember.credentialId, credentialId))
return NextResponse.json({ members })
} catch (error) {
logger.error('Failed to fetch credential members', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
const addMemberSchema = z.object({
userId: z.string().min(1),
role: z.enum(['admin', 'member']).default('member'),
})
export async function POST(request: NextRequest, context: RouteContext) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id: credentialId } = await context.params
const admin = await requireAdminMembership(credentialId, session.user.id)
if (!admin) {
return NextResponse.json({ error: 'Admin access required' }, { status: 403 })
}
const body = await request.json()
const parsed = addMemberSchema.safeParse(body)
if (!parsed.success) {
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 })
}
const { userId, role } = parsed.data
const now = new Date()
const [existing] = await db
.select({ id: credentialMember.id, status: credentialMember.status })
.from(credentialMember)
.where(
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, userId))
)
.limit(1)
if (existing) {
await db
.update(credentialMember)
.set({ role, status: 'active', updatedAt: now })
.where(eq(credentialMember.id, existing.id))
return NextResponse.json({ success: true })
}
await db.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId,
role,
status: 'active',
joinedAt: now,
invitedBy: session.user.id,
createdAt: now,
updatedAt: now,
})
return NextResponse.json({ success: true }, { status: 201 })
} catch (error) {
logger.error('Failed to add credential member', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
export async function DELETE(request: NextRequest, context: RouteContext) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id: credentialId } = await context.params
const targetUserId = new URL(request.url).searchParams.get('userId')
if (!targetUserId) {
return NextResponse.json({ error: 'userId query parameter required' }, { status: 400 })
}
const admin = await requireAdminMembership(credentialId, session.user.id)
if (!admin) {
return NextResponse.json({ error: 'Admin access required' }, { status: 403 })
}
const [target] = await db
.select({
id: credentialMember.id,
role: credentialMember.role,
status: credentialMember.status,
})
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
eq(credentialMember.userId, targetUserId)
)
)
.limit(1)
if (!target) {
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
}
if (target.role === 'admin') {
const activeAdmins = await db
.select({ id: credentialMember.id })
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
eq(credentialMember.role, 'admin'),
eq(credentialMember.status, 'active')
)
)
if (activeAdmins.length <= 1) {
return NextResponse.json({ error: 'Cannot remove the last admin' }, { status: 400 })
}
}
await db.delete(credentialMember).where(eq(credentialMember.id, target.id))
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Failed to remove credential member', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,234 @@
import { db } from '@sim/db'
import { credential, credentialMember, environment, workspaceEnvironment } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { getCredentialActorContext } from '@/lib/credentials/access'
import {
syncPersonalEnvCredentialsForUser,
syncWorkspaceEnvCredentials,
} from '@/lib/credentials/environment'
const logger = createLogger('CredentialByIdAPI')
const updateCredentialSchema = z
.object({
displayName: z.string().trim().min(1).max(255).optional(),
accountId: z.string().trim().min(1).optional(),
})
.strict()
.refine((data) => Boolean(data.displayName || data.accountId), {
message: 'At least one field must be provided',
path: ['displayName'],
})
async function getCredentialResponse(credentialId: string, userId: string) {
const [row] = await db
.select({
id: credential.id,
workspaceId: credential.workspaceId,
type: credential.type,
displayName: credential.displayName,
providerId: credential.providerId,
accountId: credential.accountId,
envKey: credential.envKey,
envOwnerUserId: credential.envOwnerUserId,
createdBy: credential.createdBy,
createdAt: credential.createdAt,
updatedAt: credential.updatedAt,
role: credentialMember.role,
status: credentialMember.status,
})
.from(credential)
.innerJoin(
credentialMember,
and(eq(credentialMember.credentialId, credential.id), eq(credentialMember.userId, userId))
)
.where(eq(credential.id, credentialId))
.limit(1)
return row ?? null
}
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
try {
const access = await getCredentialActorContext(id, session.user.id)
if (!access.credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
}
if (!access.hasWorkspaceAccess || !access.member) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
const row = await getCredentialResponse(id, session.user.id)
return NextResponse.json({ credential: row }, { status: 200 })
} catch (error) {
logger.error('Failed to fetch credential', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
try {
const parseResult = updateCredentialSchema.safeParse(await request.json())
if (!parseResult.success) {
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
}
const access = await getCredentialActorContext(id, session.user.id)
if (!access.credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
}
if (!access.hasWorkspaceAccess || !access.isAdmin) {
return NextResponse.json({ error: 'Credential admin permission required' }, { status: 403 })
}
if (access.credential.type === 'oauth') {
return NextResponse.json(
{
error:
'OAuth credential editing is disabled. Connect an account and create or use its linked credential.',
},
{ status: 400 }
)
}
return NextResponse.json(
{
error:
'Environment credentials cannot be updated via this endpoint. Use the environment value editor in credentials settings.',
},
{ status: 400 }
)
} catch (error) {
logger.error('Failed to update credential', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
try {
const access = await getCredentialActorContext(id, session.user.id)
if (!access.credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
}
if (!access.hasWorkspaceAccess || !access.isAdmin) {
return NextResponse.json({ error: 'Credential admin permission required' }, { status: 403 })
}
if (access.credential.type === 'env_personal' && access.credential.envKey) {
const ownerUserId = access.credential.envOwnerUserId
if (!ownerUserId) {
return NextResponse.json({ error: 'Invalid personal secret owner' }, { status: 400 })
}
const [personalRow] = await db
.select({ variables: environment.variables })
.from(environment)
.where(eq(environment.userId, ownerUserId))
.limit(1)
const current = ((personalRow?.variables as Record<string, string> | null) ?? {}) as Record<
string,
string
>
if (access.credential.envKey in current) {
delete current[access.credential.envKey]
}
await db
.insert(environment)
.values({
id: ownerUserId,
userId: ownerUserId,
variables: current,
updatedAt: new Date(),
})
.onConflictDoUpdate({
target: [environment.userId],
set: { variables: current, updatedAt: new Date() },
})
await syncPersonalEnvCredentialsForUser({
userId: ownerUserId,
envKeys: Object.keys(current),
})
return NextResponse.json({ success: true }, { status: 200 })
}
if (access.credential.type === 'env_workspace' && access.credential.envKey) {
const [workspaceRow] = await db
.select({
id: workspaceEnvironment.id,
createdAt: workspaceEnvironment.createdAt,
variables: workspaceEnvironment.variables,
})
.from(workspaceEnvironment)
.where(eq(workspaceEnvironment.workspaceId, access.credential.workspaceId))
.limit(1)
const current = ((workspaceRow?.variables as Record<string, string> | null) ?? {}) as Record<
string,
string
>
if (access.credential.envKey in current) {
delete current[access.credential.envKey]
}
await db
.insert(workspaceEnvironment)
.values({
id: workspaceRow?.id || crypto.randomUUID(),
workspaceId: access.credential.workspaceId,
variables: current,
createdAt: workspaceRow?.createdAt || new Date(),
updatedAt: new Date(),
})
.onConflictDoUpdate({
target: [workspaceEnvironment.workspaceId],
set: { variables: current, updatedAt: new Date() },
})
await syncWorkspaceEnvCredentials({
workspaceId: access.credential.workspaceId,
envKeys: Object.keys(current),
actingUserId: session.user.id,
})
return NextResponse.json({ success: true }, { status: 200 })
}
await db.delete(credential).where(eq(credential.id, id))
return NextResponse.json({ success: true }, { status: 200 })
} catch (error) {
logger.error('Failed to delete credential', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,81 @@
import { db } from '@sim/db'
import { environment, workspaceEnvironment } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import {
syncPersonalEnvCredentialsForUser,
syncWorkspaceEnvCredentials,
} from '@/lib/credentials/environment'
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('CredentialsBootstrapAPI')
const bootstrapSchema = z.object({
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
})
/**
* Ensures the current user's connected accounts and env vars are reflected as workspace credentials.
*/
export async function POST(request: NextRequest) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const parseResult = bootstrapSchema.safeParse(await request.json())
if (!parseResult.success) {
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
}
const { workspaceId } = parseResult.data
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
if (!workspaceAccess.hasAccess) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
const [personalRow, workspaceRow] = await Promise.all([
db
.select({ variables: environment.variables })
.from(environment)
.where(eq(environment.userId, session.user.id))
.limit(1),
db
.select({ variables: workspaceEnvironment.variables })
.from(workspaceEnvironment)
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
.limit(1),
])
const personalKeys = Object.keys((personalRow[0]?.variables as Record<string, string>) || {})
const workspaceKeys = Object.keys((workspaceRow[0]?.variables as Record<string, string>) || {})
const [oauthSyncResult] = await Promise.all([
syncWorkspaceOAuthCredentialsForUser({ workspaceId, userId: session.user.id }),
syncPersonalEnvCredentialsForUser({ userId: session.user.id, envKeys: personalKeys }),
syncWorkspaceEnvCredentials({
workspaceId,
envKeys: workspaceKeys,
actingUserId: session.user.id,
}),
])
return NextResponse.json({
success: true,
synced: {
oauthCreated: oauthSyncResult.createdCredentials,
oauthMembershipsUpdated: oauthSyncResult.updatedMemberships,
personalEnvKeys: personalKeys.length,
workspaceEnvKeys: workspaceKeys.length,
},
})
} catch (error) {
logger.error('Failed to bootstrap workspace credentials', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,73 @@
import { db } from '@sim/db'
import { pendingCredentialDraft } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, lt } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
const logger = createLogger('CredentialDraftAPI')
const DRAFT_TTL_MS = 15 * 60 * 1000
const createDraftSchema = z.object({
workspaceId: z.string().min(1),
providerId: z.string().min(1),
displayName: z.string().min(1),
})
export async function POST(request: Request) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const parsed = createDraftSchema.safeParse(body)
if (!parsed.success) {
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 })
}
const { workspaceId, providerId, displayName } = parsed.data
const userId = session.user.id
const now = new Date()
await db
.delete(pendingCredentialDraft)
.where(
and(eq(pendingCredentialDraft.userId, userId), lt(pendingCredentialDraft.expiresAt, now))
)
await db
.insert(pendingCredentialDraft)
.values({
id: crypto.randomUUID(),
userId,
workspaceId,
providerId,
displayName,
expiresAt: new Date(now.getTime() + DRAFT_TTL_MS),
createdAt: now,
})
.onConflictDoUpdate({
target: [
pendingCredentialDraft.userId,
pendingCredentialDraft.providerId,
pendingCredentialDraft.workspaceId,
],
set: {
displayName,
expiresAt: new Date(now.getTime() + DRAFT_TTL_MS),
createdAt: now,
},
})
logger.info('Credential draft saved', { userId, workspaceId, providerId, displayName })
return NextResponse.json({ success: true }, { status: 200 })
} catch (error) {
logger.error('Failed to save credential draft', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,112 @@
import { db } from '@sim/db'
import { credential, credentialMember } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
const logger = createLogger('CredentialMembershipsAPI')
const leaveCredentialSchema = z.object({
credentialId: z.string().min(1),
})
export async function GET() {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const memberships = await db
.select({
membershipId: credentialMember.id,
credentialId: credential.id,
workspaceId: credential.workspaceId,
type: credential.type,
displayName: credential.displayName,
providerId: credential.providerId,
role: credentialMember.role,
status: credentialMember.status,
joinedAt: credentialMember.joinedAt,
})
.from(credentialMember)
.innerJoin(credential, eq(credentialMember.credentialId, credential.id))
.where(eq(credentialMember.userId, session.user.id))
return NextResponse.json({ memberships }, { status: 200 })
} catch (error) {
logger.error('Failed to list credential memberships', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
export async function DELETE(request: NextRequest) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const parseResult = leaveCredentialSchema.safeParse({
credentialId: new URL(request.url).searchParams.get('credentialId'),
})
if (!parseResult.success) {
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
}
const { credentialId } = parseResult.data
const [membership] = await db
.select()
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
eq(credentialMember.userId, session.user.id)
)
)
.limit(1)
if (!membership) {
return NextResponse.json({ error: 'Membership not found' }, { status: 404 })
}
if (membership.status !== 'active') {
return NextResponse.json({ success: true }, { status: 200 })
}
if (membership.role === 'admin') {
const activeAdmins = await db
.select({ id: credentialMember.id })
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
eq(credentialMember.role, 'admin'),
eq(credentialMember.status, 'active')
)
)
if (activeAdmins.length <= 1) {
return NextResponse.json(
{ error: 'Cannot leave credential as the last active admin' },
{ status: 400 }
)
}
}
await db
.update(credentialMember)
.set({
status: 'revoked',
updatedAt: new Date(),
})
.where(eq(credentialMember.id, membership.id))
return NextResponse.json({ success: true }, { status: 200 })
} catch (error) {
logger.error('Failed to leave credential', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,468 @@
import { db } from '@sim/db'
import { account, credential, credentialMember, workspace } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { getWorkspaceMemberUserIds } from '@/lib/credentials/environment'
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
import { getServiceConfigByProviderId } from '@/lib/oauth'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
import { isValidEnvVarName } from '@/executor/constants'
const logger = createLogger('CredentialsAPI')
const credentialTypeSchema = z.enum(['oauth', 'env_workspace', 'env_personal'])
function normalizeEnvKeyInput(raw: string): string {
const trimmed = raw.trim()
const wrappedMatch = /^\{\{\s*([A-Za-z0-9_]+)\s*\}\}$/.exec(trimmed)
return wrappedMatch ? wrappedMatch[1] : trimmed
}
const listCredentialsSchema = z.object({
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
type: credentialTypeSchema.optional(),
providerId: z.string().optional(),
})
const createCredentialSchema = z
.object({
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
type: credentialTypeSchema,
displayName: z.string().trim().min(1).max(255).optional(),
providerId: z.string().trim().min(1).optional(),
accountId: z.string().trim().min(1).optional(),
envKey: z.string().trim().min(1).optional(),
envOwnerUserId: z.string().trim().min(1).optional(),
})
.superRefine((data, ctx) => {
if (data.type === 'oauth') {
if (!data.accountId) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'accountId is required for oauth credentials',
path: ['accountId'],
})
}
if (!data.providerId) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'providerId is required for oauth credentials',
path: ['providerId'],
})
}
if (!data.displayName) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'displayName is required for oauth credentials',
path: ['displayName'],
})
}
return
}
const normalizedEnvKey = data.envKey ? normalizeEnvKeyInput(data.envKey) : ''
if (!normalizedEnvKey) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'envKey is required for env credentials',
path: ['envKey'],
})
return
}
if (!isValidEnvVarName(normalizedEnvKey)) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'envKey must contain only letters, numbers, and underscores',
path: ['envKey'],
})
}
})
interface ExistingCredentialSourceParams {
workspaceId: string
type: 'oauth' | 'env_workspace' | 'env_personal'
accountId?: string | null
envKey?: string | null
envOwnerUserId?: string | null
}
async function findExistingCredentialBySource(params: ExistingCredentialSourceParams) {
const { workspaceId, type, accountId, envKey, envOwnerUserId } = params
if (type === 'oauth' && accountId) {
const [row] = await db
.select()
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'oauth'),
eq(credential.accountId, accountId)
)
)
.limit(1)
return row ?? null
}
if (type === 'env_workspace' && envKey) {
const [row] = await db
.select()
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_workspace'),
eq(credential.envKey, envKey)
)
)
.limit(1)
return row ?? null
}
if (type === 'env_personal' && envKey && envOwnerUserId) {
const [row] = await db
.select()
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_personal'),
eq(credential.envKey, envKey),
eq(credential.envOwnerUserId, envOwnerUserId)
)
)
.limit(1)
return row ?? null
}
return null
}
export async function GET(request: NextRequest) {
const requestId = generateRequestId()
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const { searchParams } = new URL(request.url)
const rawWorkspaceId = searchParams.get('workspaceId')
const rawType = searchParams.get('type')
const rawProviderId = searchParams.get('providerId')
const parseResult = listCredentialsSchema.safeParse({
workspaceId: rawWorkspaceId?.trim(),
type: rawType?.trim() || undefined,
providerId: rawProviderId?.trim() || undefined,
})
if (!parseResult.success) {
logger.warn(`[${requestId}] Invalid credential list request`, {
workspaceId: rawWorkspaceId,
type: rawType,
providerId: rawProviderId,
errors: parseResult.error.errors,
})
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
}
const { workspaceId, type, providerId } = parseResult.data
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
if (!workspaceAccess.hasAccess) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
if (!type || type === 'oauth') {
await syncWorkspaceOAuthCredentialsForUser({ workspaceId, userId: session.user.id })
}
const whereClauses = [
eq(credential.workspaceId, workspaceId),
eq(credentialMember.userId, session.user.id),
eq(credentialMember.status, 'active'),
]
if (type) {
whereClauses.push(eq(credential.type, type))
}
if (providerId) {
whereClauses.push(eq(credential.providerId, providerId))
}
const credentials = await db
.select({
id: credential.id,
workspaceId: credential.workspaceId,
type: credential.type,
displayName: credential.displayName,
providerId: credential.providerId,
accountId: credential.accountId,
envKey: credential.envKey,
envOwnerUserId: credential.envOwnerUserId,
createdBy: credential.createdBy,
createdAt: credential.createdAt,
updatedAt: credential.updatedAt,
role: credentialMember.role,
})
.from(credential)
.innerJoin(
credentialMember,
and(
eq(credentialMember.credentialId, credential.id),
eq(credentialMember.userId, session.user.id),
eq(credentialMember.status, 'active')
)
)
.where(and(...whereClauses))
return NextResponse.json({ credentials })
} catch (error) {
logger.error(`[${requestId}] Failed to list credentials`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const body = await request.json()
const parseResult = createCredentialSchema.safeParse(body)
if (!parseResult.success) {
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
}
const { workspaceId, type, displayName, providerId, accountId, envKey, envOwnerUserId } =
parseResult.data
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
if (!workspaceAccess.canWrite) {
return NextResponse.json({ error: 'Write permission required' }, { status: 403 })
}
let resolvedDisplayName = displayName?.trim() ?? ''
let resolvedProviderId: string | null = providerId ?? null
let resolvedAccountId: string | null = accountId ?? null
const resolvedEnvKey: string | null = envKey ? normalizeEnvKeyInput(envKey) : null
let resolvedEnvOwnerUserId: string | null = null
if (type === 'oauth') {
const [accountRow] = await db
.select({
id: account.id,
userId: account.userId,
providerId: account.providerId,
accountId: account.accountId,
})
.from(account)
.where(eq(account.id, accountId!))
.limit(1)
if (!accountRow) {
return NextResponse.json({ error: 'OAuth account not found' }, { status: 404 })
}
if (accountRow.userId !== session.user.id) {
return NextResponse.json(
{ error: 'Only account owners can create oauth credentials for an account' },
{ status: 403 }
)
}
if (providerId !== accountRow.providerId) {
return NextResponse.json(
{ error: 'providerId does not match the selected OAuth account' },
{ status: 400 }
)
}
if (!resolvedDisplayName) {
resolvedDisplayName =
getServiceConfigByProviderId(accountRow.providerId)?.name || accountRow.providerId
}
} else if (type === 'env_personal') {
resolvedEnvOwnerUserId = envOwnerUserId ?? session.user.id
if (resolvedEnvOwnerUserId !== session.user.id) {
return NextResponse.json(
{ error: 'Only the current user can create personal env credentials for themselves' },
{ status: 403 }
)
}
resolvedProviderId = null
resolvedAccountId = null
resolvedDisplayName = resolvedEnvKey || ''
} else {
resolvedProviderId = null
resolvedAccountId = null
resolvedEnvOwnerUserId = null
resolvedDisplayName = resolvedEnvKey || ''
}
if (!resolvedDisplayName) {
return NextResponse.json({ error: 'Display name is required' }, { status: 400 })
}
const existingCredential = await findExistingCredentialBySource({
workspaceId,
type,
accountId: resolvedAccountId,
envKey: resolvedEnvKey,
envOwnerUserId: resolvedEnvOwnerUserId,
})
if (existingCredential) {
const [membership] = await db
.select({
id: credentialMember.id,
status: credentialMember.status,
role: credentialMember.role,
})
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, existingCredential.id),
eq(credentialMember.userId, session.user.id)
)
)
.limit(1)
if (!membership || membership.status !== 'active') {
return NextResponse.json(
{ error: 'A credential with this source already exists in this workspace' },
{ status: 409 }
)
}
if (
type === 'oauth' &&
membership.role === 'admin' &&
resolvedDisplayName &&
resolvedDisplayName !== existingCredential.displayName
) {
await db
.update(credential)
.set({
displayName: resolvedDisplayName,
updatedAt: new Date(),
})
.where(eq(credential.id, existingCredential.id))
const [updatedCredential] = await db
.select()
.from(credential)
.where(eq(credential.id, existingCredential.id))
.limit(1)
return NextResponse.json(
{ credential: updatedCredential ?? existingCredential },
{ status: 200 }
)
}
return NextResponse.json({ credential: existingCredential }, { status: 200 })
}
const now = new Date()
const credentialId = crypto.randomUUID()
const [workspaceRow] = await db
.select({ ownerId: workspace.ownerId })
.from(workspace)
.where(eq(workspace.id, workspaceId))
.limit(1)
await db.transaction(async (tx) => {
await tx.insert(credential).values({
id: credentialId,
workspaceId,
type,
displayName: resolvedDisplayName,
providerId: resolvedProviderId,
accountId: resolvedAccountId,
envKey: resolvedEnvKey,
envOwnerUserId: resolvedEnvOwnerUserId,
createdBy: session.user.id,
createdAt: now,
updatedAt: now,
})
if (type === 'env_workspace' && workspaceRow?.ownerId) {
const workspaceUserIds = await getWorkspaceMemberUserIds(workspaceId)
if (workspaceUserIds.length > 0) {
for (const memberUserId of workspaceUserIds) {
await tx.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId: memberUserId,
role: memberUserId === workspaceRow.ownerId ? 'admin' : 'member',
status: 'active',
joinedAt: now,
invitedBy: session.user.id,
createdAt: now,
updatedAt: now,
})
}
}
} else {
await tx.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId: session.user.id,
role: 'admin',
status: 'active',
joinedAt: now,
invitedBy: session.user.id,
createdAt: now,
updatedAt: now,
})
}
})
const [created] = await db
.select()
.from(credential)
.where(eq(credential.id, credentialId))
.limit(1)
return NextResponse.json({ credential: created }, { status: 201 })
} catch (error: any) {
if (error?.code === '23505') {
return NextResponse.json(
{ error: 'A credential with this source already exists' },
{ status: 409 }
)
}
if (error?.code === '23503') {
return NextResponse.json(
{ error: 'Invalid credential reference or membership target' },
{ status: 400 }
)
}
if (error?.code === '23514') {
return NextResponse.json(
{ error: 'Credential source data failed validation checks' },
{ status: 400 }
)
}
logger.error(`[${requestId}] Credential create failure details`, {
code: error?.code,
detail: error?.detail,
constraint: error?.constraint,
table: error?.table,
message: error?.message,
})
logger.error(`[${requestId}] Failed to create credential`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -7,6 +7,7 @@ import { z } from 'zod'
import { getSession } from '@/lib/auth' import { getSession } from '@/lib/auth'
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption' import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { syncPersonalEnvCredentialsForUser } from '@/lib/credentials/environment'
import type { EnvironmentVariable } from '@/stores/settings/environment' import type { EnvironmentVariable } from '@/stores/settings/environment'
const logger = createLogger('EnvironmentAPI') const logger = createLogger('EnvironmentAPI')
@@ -53,6 +54,11 @@ export async function POST(req: NextRequest) {
}, },
}) })
await syncPersonalEnvCredentialsForUser({
userId: session.user.id,
envKeys: Object.keys(variables),
})
return NextResponse.json({ success: true }) return NextResponse.json({ success: true })
} catch (validationError) { } catch (validationError) {
if (validationError instanceof z.ZodError) { if (validationError instanceof z.ZodError) {

View File

@@ -191,84 +191,3 @@ export async function GET(request: NextRequest) {
) )
} }
} }
// Delete a label from a page
export async function DELETE(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const {
domain,
accessToken,
cloudId: providedCloudId,
pageId,
labelName,
} = await request.json()
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!pageId) {
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
}
if (!labelName) {
return NextResponse.json({ error: 'Label name is required' }, { status: 400 })
}
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
if (!pageIdValidation.isValid) {
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const encodedLabel = encodeURIComponent(labelName.trim())
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label?name=${encodedLabel}`
const response = await fetch(url, {
method: 'DELETE',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage =
errorData?.message || `Failed to delete Confluence label (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
return NextResponse.json({
pageId,
labelName,
deleted: true,
})
} catch (error) {
logger.error('Error deleting Confluence label:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -1,103 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
import { getConfluenceCloudId } from '@/tools/confluence/utils'
const logger = createLogger('ConfluencePagesByLabelAPI')
export const dynamic = 'force-dynamic'
export async function GET(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const domain = searchParams.get('domain')
const accessToken = searchParams.get('accessToken')
const labelId = searchParams.get('labelId')
const providedCloudId = searchParams.get('cloudId')
const limit = searchParams.get('limit') || '50'
const cursor = searchParams.get('cursor')
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!labelId) {
return NextResponse.json({ error: 'Label ID is required' }, { status: 400 })
}
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
if (!labelIdValidation.isValid) {
return NextResponse.json({ error: labelIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const queryParams = new URLSearchParams()
queryParams.append('limit', String(Math.min(Number(limit), 250)))
if (cursor) {
queryParams.append('cursor', cursor)
}
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/labels/${labelId}/pages?${queryParams.toString()}`
const response = await fetch(url, {
method: 'GET',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage = errorData?.message || `Failed to get pages by label (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
const data = await response.json()
const pages = (data.results || []).map((page: any) => ({
id: page.id,
title: page.title,
status: page.status ?? null,
spaceId: page.spaceId ?? null,
parentId: page.parentId ?? null,
authorId: page.authorId ?? null,
createdAt: page.createdAt ?? null,
version: page.version ?? null,
}))
return NextResponse.json({
pages,
labelId,
nextCursor: data._links?.next
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
: null,
})
} catch (error) {
logger.error('Error getting pages by label:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -1,98 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
import { getConfluenceCloudId } from '@/tools/confluence/utils'
const logger = createLogger('ConfluenceSpaceLabelsAPI')
export const dynamic = 'force-dynamic'
export async function GET(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const domain = searchParams.get('domain')
const accessToken = searchParams.get('accessToken')
const spaceId = searchParams.get('spaceId')
const providedCloudId = searchParams.get('cloudId')
const limit = searchParams.get('limit') || '25'
const cursor = searchParams.get('cursor')
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!spaceId) {
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
}
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
if (!spaceIdValidation.isValid) {
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const queryParams = new URLSearchParams()
queryParams.append('limit', String(Math.min(Number(limit), 250)))
if (cursor) {
queryParams.append('cursor', cursor)
}
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/labels?${queryParams.toString()}`
const response = await fetch(url, {
method: 'GET',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage = errorData?.message || `Failed to list space labels (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
const data = await response.json()
const labels = (data.results || []).map((label: any) => ({
id: label.id,
name: label.name,
prefix: label.prefix || 'global',
}))
return NextResponse.json({
labels,
spaceId,
nextCursor: data._links?.next
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
: null,
})
} catch (error) {
logger.error('Error listing space labels:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -29,7 +29,7 @@ const patchBodySchema = z
description: z description: z
.string() .string()
.trim() .trim()
.max(2000, 'Description must be 2000 characters or less') .max(500, 'Description must be 500 characters or less')
.nullable() .nullable()
.optional(), .optional(),
isActive: z.literal(true).optional(), // Set to true to activate this version isActive: z.literal(true).optional(), // Set to true to activate this version

View File

@@ -12,7 +12,7 @@ import {
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse' import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/event-buffer' import { markExecutionCancelled } from '@/lib/execution/cancellation'
import { processInputFileFields } from '@/lib/execution/files' import { processInputFileFields } from '@/lib/execution/files'
import { preprocessExecution } from '@/lib/execution/preprocessing' import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session' import { LoggingSession } from '@/lib/logs/execution/logging-session'
@@ -700,29 +700,17 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync) const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
let isStreamClosed = false let isStreamClosed = false
const eventWriter = createExecutionEventWriter(executionId)
setExecutionMeta(executionId, {
status: 'active',
userId: actorUserId,
workflowId,
}).catch(() => {})
const stream = new ReadableStream<Uint8Array>({ const stream = new ReadableStream<Uint8Array>({
async start(controller) { async start(controller) {
let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null
const sendEvent = (event: ExecutionEvent) => { const sendEvent = (event: ExecutionEvent) => {
if (!isStreamClosed) { if (isStreamClosed) return
try { try {
controller.enqueue(encodeSSEEvent(event)) controller.enqueue(encodeSSEEvent(event))
} catch { } catch {
isStreamClosed = true isStreamClosed = true
} }
} }
if (event.type !== 'stream:chunk' && event.type !== 'stream:done') {
eventWriter.write(event).catch(() => {})
}
}
try { try {
const startTime = new Date() const startTime = new Date()
@@ -841,12 +829,14 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const reader = streamingExec.stream.getReader() const reader = streamingExec.stream.getReader()
const decoder = new TextDecoder() const decoder = new TextDecoder()
let chunkCount = 0
try { try {
while (true) { while (true) {
const { done, value } = await reader.read() const { done, value } = await reader.read()
if (done) break if (done) break
chunkCount++
const chunk = decoder.decode(value, { stream: true }) const chunk = decoder.decode(value, { stream: true })
sendEvent({ sendEvent({
type: 'stream:chunk', type: 'stream:chunk',
@@ -961,7 +951,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: result.metadata?.duration || 0, duration: result.metadata?.duration || 0,
}, },
}) })
finalMetaStatus = 'error'
} else { } else {
logger.info(`[${requestId}] Workflow execution was cancelled`) logger.info(`[${requestId}] Workflow execution was cancelled`)
@@ -974,7 +963,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: result.metadata?.duration || 0, duration: result.metadata?.duration || 0,
}, },
}) })
finalMetaStatus = 'cancelled'
} }
return return
} }
@@ -998,7 +986,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
endTime: result.metadata?.endTime || new Date().toISOString(), endTime: result.metadata?.endTime || new Date().toISOString(),
}, },
}) })
finalMetaStatus = 'complete'
} catch (error: unknown) { } catch (error: unknown) {
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut() const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
const errorMessage = isTimeout const errorMessage = isTimeout
@@ -1030,18 +1017,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: executionResult?.metadata?.duration || 0, duration: executionResult?.metadata?.duration || 0,
}, },
}) })
finalMetaStatus = 'error'
} finally { } finally {
try {
await eventWriter.close()
} catch (closeError) {
logger.warn(`[${requestId}] Failed to close event writer`, {
error: closeError instanceof Error ? closeError.message : String(closeError),
})
}
if (finalMetaStatus) {
setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {})
}
timeoutController.cleanup() timeoutController.cleanup()
if (executionId) { if (executionId) {
await cleanupExecutionBase64Cache(executionId) await cleanupExecutionBase64Cache(executionId)
@@ -1056,7 +1032,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}, },
cancel() { cancel() {
isStreamClosed = true isStreamClosed = true
logger.info(`[${requestId}] Client disconnected from SSE stream`) timeoutController.cleanup()
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
timeoutController.abort()
markExecutionCancelled(executionId).catch(() => {})
}, },
}) })

View File

@@ -1,170 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import {
type ExecutionStreamStatus,
getExecutionMeta,
readExecutionEvents,
} from '@/lib/execution/event-buffer'
import { formatSSEEvent } from '@/lib/workflows/executor/execution-events'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
const logger = createLogger('ExecutionStreamReconnectAPI')
const POLL_INTERVAL_MS = 500
const MAX_POLL_DURATION_MS = 10 * 60 * 1000 // 10 minutes
function isTerminalStatus(status: ExecutionStreamStatus): boolean {
return status === 'complete' || status === 'error' || status === 'cancelled'
}
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function GET(
req: NextRequest,
{ params }: { params: Promise<{ id: string; executionId: string }> }
) {
const { id: workflowId, executionId } = await params
try {
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
workflowId,
userId: auth.userId,
action: 'read',
})
if (!workflowAuthorization.allowed) {
return NextResponse.json(
{ error: workflowAuthorization.message || 'Access denied' },
{ status: workflowAuthorization.status }
)
}
const meta = await getExecutionMeta(executionId)
if (!meta) {
return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 })
}
if (meta.workflowId && meta.workflowId !== workflowId) {
return NextResponse.json(
{ error: 'Execution does not belong to this workflow' },
{ status: 403 }
)
}
const fromParam = req.nextUrl.searchParams.get('from')
const parsed = fromParam ? Number.parseInt(fromParam, 10) : 0
const fromEventId = Number.isFinite(parsed) && parsed >= 0 ? parsed : 0
logger.info('Reconnection stream requested', {
workflowId,
executionId,
fromEventId,
metaStatus: meta.status,
})
const encoder = new TextEncoder()
let closed = false
const stream = new ReadableStream<Uint8Array>({
async start(controller) {
let lastEventId = fromEventId
const pollDeadline = Date.now() + MAX_POLL_DURATION_MS
const enqueue = (text: string) => {
if (closed) return
try {
controller.enqueue(encoder.encode(text))
} catch {
closed = true
}
}
try {
const events = await readExecutionEvents(executionId, lastEventId)
for (const entry of events) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
const currentMeta = await getExecutionMeta(executionId)
if (!currentMeta || isTerminalStatus(currentMeta.status)) {
enqueue('data: [DONE]\n\n')
if (!closed) controller.close()
return
}
while (!closed && Date.now() < pollDeadline) {
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
if (closed) return
const newEvents = await readExecutionEvents(executionId, lastEventId)
for (const entry of newEvents) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
const polledMeta = await getExecutionMeta(executionId)
if (!polledMeta || isTerminalStatus(polledMeta.status)) {
const finalEvents = await readExecutionEvents(executionId, lastEventId)
for (const entry of finalEvents) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
enqueue('data: [DONE]\n\n')
if (!closed) controller.close()
return
}
}
if (!closed) {
logger.warn('Reconnection stream poll deadline reached', { executionId })
enqueue('data: [DONE]\n\n')
controller.close()
}
} catch (error) {
logger.error('Error in reconnection stream', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
if (!closed) {
try {
controller.close()
} catch {}
}
}
},
cancel() {
closed = true
logger.info('Client disconnected from reconnection stream', { executionId })
},
})
return new NextResponse(stream, {
headers: {
...SSE_HEADERS,
'X-Execution-Id': executionId,
},
})
} catch (error: any) {
logger.error('Failed to start reconnection stream', {
workflowId,
executionId,
error: error.message,
})
return NextResponse.json(
{ error: error.message || 'Failed to start reconnection stream' },
{ status: 500 }
)
}
}

View File

@@ -1,12 +1,14 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { environment, workspaceEnvironment } from '@sim/db/schema' import { workspaceEnvironment } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm' import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { getSession } from '@/lib/auth' import { getSession } from '@/lib/auth'
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption' import { encryptSecret } from '@/lib/core/security/encryption'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { syncWorkspaceEnvCredentials } from '@/lib/credentials/environment'
import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils' import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WorkspaceEnvironmentAPI') const logger = createLogger('WorkspaceEnvironmentAPI')
@@ -44,44 +46,10 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
} }
// Workspace env (encrypted) const { workspaceDecrypted, personalDecrypted, conflicts } = await getPersonalAndWorkspaceEnv(
const wsEnvRow = await db userId,
.select() workspaceId
.from(workspaceEnvironment) )
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
.limit(1)
const wsEncrypted: Record<string, string> = (wsEnvRow[0]?.variables as any) || {}
// Personal env (encrypted)
const personalRow = await db
.select()
.from(environment)
.where(eq(environment.userId, userId))
.limit(1)
const personalEncrypted: Record<string, string> = (personalRow[0]?.variables as any) || {}
// Decrypt both for UI
const decryptAll = async (src: Record<string, string>) => {
const out: Record<string, string> = {}
for (const [k, v] of Object.entries(src)) {
try {
const { decrypted } = await decryptSecret(v)
out[k] = decrypted
} catch {
out[k] = ''
}
}
return out
}
const [workspaceDecrypted, personalDecrypted] = await Promise.all([
decryptAll(wsEncrypted),
decryptAll(personalEncrypted),
])
const conflicts = Object.keys(personalDecrypted).filter((k) => k in workspaceDecrypted)
return NextResponse.json( return NextResponse.json(
{ {
@@ -156,6 +124,12 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
set: { variables: merged, updatedAt: new Date() }, set: { variables: merged, updatedAt: new Date() },
}) })
await syncWorkspaceEnvCredentials({
workspaceId,
envKeys: Object.keys(merged),
actingUserId: userId,
})
return NextResponse.json({ success: true }) return NextResponse.json({ success: true })
} catch (error: any) { } catch (error: any) {
logger.error(`[${requestId}] Workspace env PUT error`, error) logger.error(`[${requestId}] Workspace env PUT error`, error)
@@ -222,6 +196,12 @@ export async function DELETE(
set: { variables: current, updatedAt: new Date() }, set: { variables: current, updatedAt: new Date() },
}) })
await syncWorkspaceEnvCredentials({
workspaceId,
envKeys: Object.keys(current),
actingUserId: userId,
})
return NextResponse.json({ success: true }) return NextResponse.json({ success: true })
} catch (error: any) { } catch (error: any) {
logger.error(`[${requestId}] Workspace env DELETE error`, error) logger.error(`[${requestId}] Workspace env DELETE error`, error)

View File

@@ -13,6 +13,9 @@ export type CommandId =
| 'goto-logs' | 'goto-logs'
| 'open-search' | 'open-search'
| 'run-workflow' | 'run-workflow'
| 'focus-copilot-tab'
| 'focus-toolbar-tab'
| 'focus-editor-tab'
| 'clear-terminal-console' | 'clear-terminal-console'
| 'focus-toolbar-search' | 'focus-toolbar-search'
| 'clear-notifications' | 'clear-notifications'
@@ -72,6 +75,21 @@ export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
shortcut: 'Mod+Enter', shortcut: 'Mod+Enter',
allowInEditable: false, allowInEditable: false,
}, },
'focus-copilot-tab': {
id: 'focus-copilot-tab',
shortcut: 'C',
allowInEditable: false,
},
'focus-toolbar-tab': {
id: 'focus-toolbar-tab',
shortcut: 'T',
allowInEditable: false,
},
'focus-editor-tab': {
id: 'focus-editor-tab',
shortcut: 'E',
allowInEditable: false,
},
'clear-terminal-console': { 'clear-terminal-console': {
id: 'clear-terminal-console', id: 'clear-terminal-console',
shortcut: 'Mod+D', shortcut: 'Mod+D',

View File

@@ -113,7 +113,7 @@ export function VersionDescriptionModal({
className='min-h-[120px] resize-none' className='min-h-[120px] resize-none'
value={description} value={description}
onChange={(e) => setDescription(e.target.value)} onChange={(e) => setDescription(e.target.value)}
maxLength={2000} maxLength={500}
disabled={isGenerating} disabled={isGenerating}
/> />
<div className='flex items-center justify-between'> <div className='flex items-center justify-between'>
@@ -123,7 +123,7 @@ export function VersionDescriptionModal({
</p> </p>
)} )}
{!updateMutation.error && !generateMutation.error && <div />} {!updateMutation.error && !generateMutation.error && <div />}
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/2000</p> <p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/500</p>
</div> </div>
</ModalBody> </ModalBody>
<ModalFooter> <ModalFooter>

View File

@@ -57,21 +57,6 @@ export function useChangeDetection({
} }
} }
if (block.triggerMode) {
const triggerConfigValue = blockSubValues?.triggerConfig
if (
triggerConfigValue &&
typeof triggerConfigValue === 'object' &&
!subBlocks.triggerConfig
) {
subBlocks.triggerConfig = {
id: 'triggerConfig',
type: 'short-input',
value: triggerConfigValue,
}
}
}
blocksWithSubBlocks[blockId] = { blocksWithSubBlocks[blockId] = {
...block, ...block,
subBlocks, subBlocks,

View File

@@ -1,10 +1,7 @@
import { useCallback, useState } from 'react' import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { runPreDeployChecks } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-predeploy-checks'
import { useNotificationStore } from '@/stores/notifications' import { useNotificationStore } from '@/stores/notifications'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { mergeSubblockState } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('useDeployment') const logger = createLogger('useDeployment')
@@ -38,24 +35,6 @@ export function useDeployment({
return { success: true, shouldOpenModal: true } return { success: true, shouldOpenModal: true }
} }
const { blocks, edges, loops, parallels } = useWorkflowStore.getState()
const liveBlocks = mergeSubblockState(blocks, workflowId)
const checkResult = runPreDeployChecks({
blocks: liveBlocks,
edges,
loops,
parallels,
workflowId,
})
if (!checkResult.passed) {
addNotification({
level: 'error',
message: checkResult.error || 'Pre-deploy validation failed',
workflowId,
})
return { success: false, shouldOpenModal: false }
}
setIsDeploying(true) setIsDeploying(true)
try { try {
const response = await fetch(`/api/workflows/${workflowId}/deploy`, { const response = await fetch(`/api/workflows/${workflowId}/deploy`, {

View File

@@ -30,6 +30,7 @@ export interface OAuthRequiredModalProps {
requiredScopes?: string[] requiredScopes?: string[]
serviceId: string serviceId: string
newScopes?: string[] newScopes?: string[]
onConnect?: () => Promise<void> | void
} }
const SCOPE_DESCRIPTIONS: Record<string, string> = { const SCOPE_DESCRIPTIONS: Record<string, string> = {
@@ -314,6 +315,7 @@ export function OAuthRequiredModal({
requiredScopes = [], requiredScopes = [],
serviceId, serviceId,
newScopes = [], newScopes = [],
onConnect,
}: OAuthRequiredModalProps) { }: OAuthRequiredModalProps) {
const [error, setError] = useState<string | null>(null) const [error, setError] = useState<string | null>(null)
const { baseProvider } = parseProvider(provider) const { baseProvider } = parseProvider(provider)
@@ -359,6 +361,12 @@ export function OAuthRequiredModal({
setError(null) setError(null)
try { try {
if (onConnect) {
await onConnect()
onClose()
return
}
const providerId = getProviderIdFromServiceId(serviceId) const providerId = getProviderIdFromServiceId(serviceId)
logger.info('Linking OAuth2:', { logger.info('Linking OAuth2:', {

View File

@@ -3,10 +3,12 @@
import { createElement, useCallback, useEffect, useMemo, useState } from 'react' import { createElement, useCallback, useEffect, useMemo, useState } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { ExternalLink, Users } from 'lucide-react' import { ExternalLink, Users } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button, Combobox } from '@/components/emcn/components' import { Button, Combobox } from '@/components/emcn/components'
import { getSubscriptionStatus } from '@/lib/billing/client' import { getSubscriptionStatus } from '@/lib/billing/client'
import { getEnv, isTruthy } from '@/lib/core/config/env' import { getEnv, isTruthy } from '@/lib/core/config/env'
import { getPollingProviderFromOAuth } from '@/lib/credential-sets/providers' import { getPollingProviderFromOAuth } from '@/lib/credential-sets/providers'
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
import { import {
getCanonicalScopesForProvider, getCanonicalScopesForProvider,
getProviderIdFromServiceId, getProviderIdFromServiceId,
@@ -18,9 +20,9 @@ import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate' import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value' import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import type { SubBlockConfig } from '@/blocks/types' import type { SubBlockConfig } from '@/blocks/types'
import { CREDENTIAL, CREDENTIAL_SET } from '@/executor/constants' import { CREDENTIAL_SET } from '@/executor/constants'
import { useCredentialSets } from '@/hooks/queries/credential-sets' import { useCredentialSets } from '@/hooks/queries/credential-sets'
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials' import { useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
import { useOrganizations } from '@/hooks/queries/organization' import { useOrganizations } from '@/hooks/queries/organization'
import { useSubscriptionData } from '@/hooks/queries/subscription' import { useSubscriptionData } from '@/hooks/queries/subscription'
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status' import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
@@ -46,6 +48,8 @@ export function CredentialSelector({
previewValue, previewValue,
previewContextValues, previewContextValues,
}: CredentialSelectorProps) { }: CredentialSelectorProps) {
const params = useParams()
const workspaceId = (params?.workspaceId as string) || ''
const [showOAuthModal, setShowOAuthModal] = useState(false) const [showOAuthModal, setShowOAuthModal] = useState(false)
const [editingValue, setEditingValue] = useState('') const [editingValue, setEditingValue] = useState('')
const [isEditing, setIsEditing] = useState(false) const [isEditing, setIsEditing] = useState(false)
@@ -96,53 +100,32 @@ export function CredentialSelector({
data: credentials = [], data: credentials = [],
isFetching: credentialsLoading, isFetching: credentialsLoading,
refetch: refetchCredentials, refetch: refetchCredentials,
} = useOAuthCredentials(effectiveProviderId, Boolean(effectiveProviderId)) } = useOAuthCredentials(effectiveProviderId, {
enabled: Boolean(effectiveProviderId),
workspaceId,
workflowId: activeWorkflowId || undefined,
})
const selectedCredential = useMemo( const selectedCredential = useMemo(
() => credentials.find((cred) => cred.id === selectedId), () => credentials.find((cred) => cred.id === selectedId),
[credentials, selectedId] [credentials, selectedId]
) )
const shouldFetchForeignMeta =
Boolean(selectedId) &&
!selectedCredential &&
Boolean(activeWorkflowId) &&
Boolean(effectiveProviderId)
const { data: foreignCredentials = [], isFetching: foreignMetaLoading } =
useOAuthCredentialDetail(
shouldFetchForeignMeta ? selectedId : undefined,
activeWorkflowId || undefined,
shouldFetchForeignMeta
)
const hasForeignMeta = foreignCredentials.length > 0
const isForeign = Boolean(selectedId && !selectedCredential && hasForeignMeta)
const selectedCredentialSet = useMemo( const selectedCredentialSet = useMemo(
() => credentialSets.find((cs) => cs.id === selectedCredentialSetId), () => credentialSets.find((cs) => cs.id === selectedCredentialSetId),
[credentialSets, selectedCredentialSetId] [credentialSets, selectedCredentialSetId]
) )
const isForeignCredentialSet = Boolean(isCredentialSetSelected && !selectedCredentialSet)
const resolvedLabel = useMemo(() => { const resolvedLabel = useMemo(() => {
if (selectedCredentialSet) return selectedCredentialSet.name if (selectedCredentialSet) return selectedCredentialSet.name
if (isForeignCredentialSet) return CREDENTIAL.FOREIGN_LABEL
if (selectedCredential) return selectedCredential.name if (selectedCredential) return selectedCredential.name
if (isForeign) return CREDENTIAL.FOREIGN_LABEL
return '' return ''
}, [selectedCredentialSet, isForeignCredentialSet, selectedCredential, isForeign]) }, [selectedCredentialSet, selectedCredential])
const displayValue = isEditing ? editingValue : resolvedLabel const displayValue = isEditing ? editingValue : resolvedLabel
const invalidSelection = const invalidSelection =
!isPreview && !isPreview && Boolean(selectedId) && !selectedCredential && !credentialsLoading
Boolean(selectedId) &&
!selectedCredential &&
!hasForeignMeta &&
!credentialsLoading &&
!foreignMetaLoading
useEffect(() => { useEffect(() => {
if (!invalidSelection) return if (!invalidSelection) return
@@ -153,7 +136,7 @@ export function CredentialSelector({
setStoreValue('') setStoreValue('')
}, [invalidSelection, selectedId, effectiveProviderId, setStoreValue]) }, [invalidSelection, selectedId, effectiveProviderId, setStoreValue])
useCredentialRefreshTriggers(refetchCredentials) useCredentialRefreshTriggers(refetchCredentials, effectiveProviderId, workspaceId)
const handleOpenChange = useCallback( const handleOpenChange = useCallback(
(isOpen: boolean) => { (isOpen: boolean) => {
@@ -195,8 +178,18 @@ export function CredentialSelector({
) )
const handleAddCredential = useCallback(() => { const handleAddCredential = useCallback(() => {
setShowOAuthModal(true) writePendingCredentialCreateRequest({
}, []) workspaceId,
type: 'oauth',
providerId: effectiveProviderId,
displayName: '',
serviceId,
requiredScopes: getCanonicalScopesForProvider(effectiveProviderId),
requestedAt: Date.now(),
})
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
}, [workspaceId, effectiveProviderId, serviceId])
const getProviderIcon = useCallback((providerName: OAuthProvider) => { const getProviderIcon = useCallback((providerName: OAuthProvider) => {
const { baseProvider } = parseProvider(providerName) const { baseProvider } = parseProvider(providerName)
@@ -251,23 +244,18 @@ export function CredentialSelector({
label: cred.name, label: cred.name,
value: cred.id, value: cred.id,
})) }))
credentialItems.push({
label:
credentials.length > 0
? `Connect another ${getProviderName(provider)} account`
: `Connect ${getProviderName(provider)} account`,
value: '__connect_account__',
})
if (credentialItems.length > 0) {
groups.push({ groups.push({
section: 'Personal Credential', section: 'Personal Credential',
items: credentialItems, items: credentialItems,
}) })
} else {
groups.push({
section: 'Personal Credential',
items: [
{
label: `Connect ${getProviderName(provider)} account`,
value: '__connect_account__',
},
],
})
}
return { comboboxOptions: [], comboboxGroups: groups } return { comboboxOptions: [], comboboxGroups: groups }
} }
@@ -277,12 +265,13 @@ export function CredentialSelector({
value: cred.id, value: cred.id,
})) }))
if (credentials.length === 0) {
options.push({ options.push({
label: `Connect ${getProviderName(provider)} account`, label:
credentials.length > 0
? `Connect another ${getProviderName(provider)} account`
: `Connect ${getProviderName(provider)} account`,
value: '__connect_account__', value: '__connect_account__',
}) })
}
return { comboboxOptions: options, comboboxGroups: undefined } return { comboboxOptions: options, comboboxGroups: undefined }
}, [ }, [
@@ -368,7 +357,7 @@ export function CredentialSelector({
} }
disabled={effectiveDisabled} disabled={effectiveDisabled}
editable={true} editable={true}
filterOptions={!isForeign && !isForeignCredentialSet} filterOptions={true}
isLoading={credentialsLoading} isLoading={credentialsLoading}
overlayContent={overlayContent} overlayContent={overlayContent}
className={selectedId || isCredentialSetSelected ? 'pl-[28px]' : ''} className={selectedId || isCredentialSetSelected ? 'pl-[28px]' : ''}
@@ -380,7 +369,6 @@ export function CredentialSelector({
<span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' /> <span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' />
Additional permissions required Additional permissions required
</div> </div>
{!isForeign && (
<Button <Button
variant='active' variant='active'
onClick={() => setShowOAuthModal(true)} onClick={() => setShowOAuthModal(true)}
@@ -388,7 +376,6 @@ export function CredentialSelector({
> >
Update access Update access
</Button> </Button>
)}
</div> </div>
)} )}
@@ -407,7 +394,11 @@ export function CredentialSelector({
) )
} }
function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>) { function useCredentialRefreshTriggers(
refetchCredentials: () => Promise<unknown>,
providerId: string,
workspaceId: string
) {
useEffect(() => { useEffect(() => {
const refresh = () => { const refresh = () => {
void refetchCredentials() void refetchCredentials()
@@ -425,12 +416,29 @@ function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>
} }
} }
const handleCredentialsUpdated = (
event: CustomEvent<{ providerId?: string; workspaceId?: string }>
) => {
if (event.detail?.providerId && event.detail.providerId !== providerId) {
return
}
if (event.detail?.workspaceId && workspaceId && event.detail.workspaceId !== workspaceId) {
return
}
refresh()
}
document.addEventListener('visibilitychange', handleVisibilityChange) document.addEventListener('visibilitychange', handleVisibilityChange)
window.addEventListener('pageshow', handlePageShow) window.addEventListener('pageshow', handlePageShow)
window.addEventListener('oauth-credentials-updated', handleCredentialsUpdated as EventListener)
return () => { return () => {
document.removeEventListener('visibilitychange', handleVisibilityChange) document.removeEventListener('visibilitychange', handleVisibilityChange)
window.removeEventListener('pageshow', handlePageShow) window.removeEventListener('pageshow', handlePageShow)
window.removeEventListener(
'oauth-credentials-updated',
handleCredentialsUpdated as EventListener
)
} }
}, [refetchCredentials]) }, [providerId, workspaceId, refetchCredentials])
} }

View File

@@ -9,6 +9,7 @@ import {
PopoverSection, PopoverSection,
} from '@/components/emcn' } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
import { import {
usePersonalEnvironment, usePersonalEnvironment,
useWorkspaceEnvironment, useWorkspaceEnvironment,
@@ -168,7 +169,15 @@ export const EnvVarDropdown: React.FC<EnvVarDropdownProps> = ({
}, [searchTerm]) }, [searchTerm])
const openEnvironmentSettings = () => { const openEnvironmentSettings = () => {
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'environment' } })) if (workspaceId) {
writePendingCredentialCreateRequest({
workspaceId,
type: 'env_personal',
envKey: searchTerm.trim(),
requestedAt: Date.now(),
})
}
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
onClose?.() onClose?.()
} }
@@ -302,7 +311,7 @@ export const EnvVarDropdown: React.FC<EnvVarDropdownProps> = ({
}} }}
> >
<Plus className='h-3 w-3' /> <Plus className='h-3 w-3' />
<span>Create environment variable</span> <span>Create Secret</span>
</PopoverItem> </PopoverItem>
</PopoverScrollArea> </PopoverScrollArea>
) : ( ) : (

View File

@@ -7,7 +7,6 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility' import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox' import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate' import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value' import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils' import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
import { getBlock } from '@/blocks/registry' import { getBlock } from '@/blocks/registry'
@@ -125,8 +124,6 @@ export function FileSelectorInput({
const serviceId = subBlock.serviceId || '' const serviceId = subBlock.serviceId || ''
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId]) const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
const { isForeignCredential } = useForeignCredential(effectiveProviderId, normalizedCredentialId)
const selectorResolution = useMemo<SelectorResolution | null>(() => { const selectorResolution = useMemo<SelectorResolution | null>(() => {
return resolveSelectorForSubBlock(subBlock, { return resolveSelectorForSubBlock(subBlock, {
workflowId: workflowIdFromUrl, workflowId: workflowIdFromUrl,
@@ -168,7 +165,6 @@ export function FileSelectorInput({
const disabledReason = const disabledReason =
finalDisabled || finalDisabled ||
isForeignCredential ||
missingCredential || missingCredential ||
missingDomain || missingDomain ||
missingProject || missingProject ||

View File

@@ -4,7 +4,6 @@ import { useCallback, useEffect, useMemo, useState } from 'react'
import { getProviderIdFromServiceId } from '@/lib/oauth' import { getProviderIdFromServiceId } from '@/lib/oauth'
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox' import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate' import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value' import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils' import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
import type { SubBlockConfig } from '@/blocks/types' import type { SubBlockConfig } from '@/blocks/types'
@@ -47,10 +46,6 @@ export function FolderSelectorInput({
subBlock.canonicalParamId === 'copyDestinationId' || subBlock.canonicalParamId === 'copyDestinationId' ||
subBlock.id === 'copyDestinationFolder' || subBlock.id === 'copyDestinationFolder' ||
subBlock.id === 'manualCopyDestinationFolder' subBlock.id === 'manualCopyDestinationFolder'
const { isForeignCredential } = useForeignCredential(
effectiveProviderId,
(connectedCredential as string) || ''
)
// Central dependsOn gating // Central dependsOn gating
const { finalDisabled } = useDependsOnGate(blockId, subBlock, { const { finalDisabled } = useDependsOnGate(blockId, subBlock, {
@@ -119,9 +114,7 @@ export function FolderSelectorInput({
selectorContext={ selectorContext={
selectorResolution?.context ?? { credentialId, workflowId: activeWorkflowId || '' } selectorResolution?.context ?? { credentialId, workflowId: activeWorkflowId || '' }
} }
disabled={ disabled={finalDisabled || missingCredential || !selectorResolution?.key}
finalDisabled || isForeignCredential || missingCredential || !selectorResolution?.key
}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue ?? null} previewValue={previewValue ?? null}
placeholder={subBlock.placeholder || 'Select folder'} placeholder={subBlock.placeholder || 'Select folder'}

View File

@@ -7,7 +7,6 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility' import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox' import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate' import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value' import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils' import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
import { getBlock } from '@/blocks/registry' import { getBlock } from '@/blocks/registry'
@@ -73,11 +72,6 @@ export function ProjectSelectorInput({
const serviceId = subBlock.serviceId || '' const serviceId = subBlock.serviceId || ''
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId]) const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
const { isForeignCredential } = useForeignCredential(
effectiveProviderId,
(connectedCredential as string) || ''
)
const workflowIdFromUrl = (params?.workflowId as string) || activeWorkflowId || '' const workflowIdFromUrl = (params?.workflowId as string) || activeWorkflowId || ''
const { finalDisabled } = useDependsOnGate(blockId, subBlock, { const { finalDisabled } = useDependsOnGate(blockId, subBlock, {
disabled, disabled,
@@ -123,7 +117,7 @@ export function ProjectSelectorInput({
subBlock={subBlock} subBlock={subBlock}
selectorKey={selectorResolution.key} selectorKey={selectorResolution.key}
selectorContext={selectorResolution.context} selectorContext={selectorResolution.context}
disabled={finalDisabled || isForeignCredential || missingCredential} disabled={finalDisabled || missingCredential}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue ?? null} previewValue={previewValue ?? null}
placeholder={subBlock.placeholder || 'Select project'} placeholder={subBlock.placeholder || 'Select project'}

View File

@@ -7,7 +7,6 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility' import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox' import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate' import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils' import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
import { getBlock } from '@/blocks/registry' import { getBlock } from '@/blocks/registry'
import type { SubBlockConfig } from '@/blocks/types' import type { SubBlockConfig } from '@/blocks/types'
@@ -87,8 +86,6 @@ export function SheetSelectorInput({
const serviceId = subBlock.serviceId || '' const serviceId = subBlock.serviceId || ''
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId]) const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
const { isForeignCredential } = useForeignCredential(effectiveProviderId, normalizedCredentialId)
const selectorResolution = useMemo<SelectorResolution | null>(() => { const selectorResolution = useMemo<SelectorResolution | null>(() => {
return resolveSelectorForSubBlock(subBlock, { return resolveSelectorForSubBlock(subBlock, {
workflowId: workflowIdFromUrl, workflowId: workflowIdFromUrl,
@@ -101,11 +98,7 @@ export function SheetSelectorInput({
const missingSpreadsheet = !normalizedSpreadsheetId const missingSpreadsheet = !normalizedSpreadsheetId
const disabledReason = const disabledReason =
finalDisabled || finalDisabled || missingCredential || missingSpreadsheet || !selectorResolution?.key
isForeignCredential ||
missingCredential ||
missingSpreadsheet ||
!selectorResolution?.key
if (!selectorResolution?.key) { if (!selectorResolution?.key) {
return ( return (

View File

@@ -6,7 +6,6 @@ import { Tooltip } from '@/components/emcn'
import { getProviderIdFromServiceId } from '@/lib/oauth' import { getProviderIdFromServiceId } from '@/lib/oauth'
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox' import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate' import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value' import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils' import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
import type { SubBlockConfig } from '@/blocks/types' import type { SubBlockConfig } from '@/blocks/types'
@@ -85,11 +84,6 @@ export function SlackSelectorInput({
? (effectiveBotToken as string) || '' ? (effectiveBotToken as string) || ''
: (effectiveCredential as string) || '' : (effectiveCredential as string) || ''
const { isForeignCredential } = useForeignCredential(
effectiveProviderId,
(effectiveAuthMethod as string) === 'bot_token' ? '' : (effectiveCredential as string) || ''
)
useEffect(() => { useEffect(() => {
const val = isPreview && previewValue !== undefined ? previewValue : storeValue const val = isPreview && previewValue !== undefined ? previewValue : storeValue
if (typeof val === 'string') { if (typeof val === 'string') {
@@ -99,7 +93,7 @@ export function SlackSelectorInput({
const requiresCredential = dependsOn.includes('credential') const requiresCredential = dependsOn.includes('credential')
const missingCredential = !credential || credential.trim().length === 0 const missingCredential = !credential || credential.trim().length === 0
const shouldForceDisable = requiresCredential && (missingCredential || isForeignCredential) const shouldForceDisable = requiresCredential && missingCredential
const context: SelectorContext = useMemo( const context: SelectorContext = useMemo(
() => ({ () => ({
@@ -136,7 +130,7 @@ export function SlackSelectorInput({
subBlock={subBlock} subBlock={subBlock}
selectorKey={config.selectorKey} selectorKey={config.selectorKey}
selectorContext={context} selectorContext={context}
disabled={finalDisabled || shouldForceDisable || isForeignCredential} disabled={finalDisabled || shouldForceDisable}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue ?? null} previewValue={previewValue ?? null}
placeholder={subBlock.placeholder || config.placeholder} placeholder={subBlock.placeholder || config.placeholder}

View File

@@ -1,6 +1,8 @@
import { createElement, useCallback, useEffect, useMemo, useState } from 'react' import { createElement, useCallback, useEffect, useMemo, useState } from 'react'
import { ExternalLink } from 'lucide-react' import { ExternalLink } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button, Combobox } from '@/components/emcn/components' import { Button, Combobox } from '@/components/emcn/components'
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
import { import {
getCanonicalScopesForProvider, getCanonicalScopesForProvider,
getProviderIdFromServiceId, getProviderIdFromServiceId,
@@ -10,8 +12,7 @@ import {
parseProvider, parseProvider,
} from '@/lib/oauth' } from '@/lib/oauth'
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal' import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal'
import { CREDENTIAL } from '@/executor/constants' import { useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status' import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -54,10 +55,12 @@ export function ToolCredentialSelector({
onChange, onChange,
provider, provider,
requiredScopes = [], requiredScopes = [],
label = 'Select account', label = 'Select credential',
serviceId, serviceId,
disabled = false, disabled = false,
}: ToolCredentialSelectorProps) { }: ToolCredentialSelectorProps) {
const params = useParams()
const workspaceId = (params?.workspaceId as string) || ''
const [showOAuthModal, setShowOAuthModal] = useState(false) const [showOAuthModal, setShowOAuthModal] = useState(false)
const [editingInputValue, setEditingInputValue] = useState('') const [editingInputValue, setEditingInputValue] = useState('')
const [isEditing, setIsEditing] = useState(false) const [isEditing, setIsEditing] = useState(false)
@@ -71,50 +74,32 @@ export function ToolCredentialSelector({
data: credentials = [], data: credentials = [],
isFetching: credentialsLoading, isFetching: credentialsLoading,
refetch: refetchCredentials, refetch: refetchCredentials,
} = useOAuthCredentials(effectiveProviderId, Boolean(effectiveProviderId)) } = useOAuthCredentials(effectiveProviderId, {
enabled: Boolean(effectiveProviderId),
workspaceId,
workflowId: activeWorkflowId || undefined,
})
const selectedCredential = useMemo( const selectedCredential = useMemo(
() => credentials.find((cred) => cred.id === selectedId), () => credentials.find((cred) => cred.id === selectedId),
[credentials, selectedId] [credentials, selectedId]
) )
const shouldFetchForeignMeta =
Boolean(selectedId) &&
!selectedCredential &&
Boolean(activeWorkflowId) &&
Boolean(effectiveProviderId)
const { data: foreignCredentials = [], isFetching: foreignMetaLoading } =
useOAuthCredentialDetail(
shouldFetchForeignMeta ? selectedId : undefined,
activeWorkflowId || undefined,
shouldFetchForeignMeta
)
const hasForeignMeta = foreignCredentials.length > 0
const isForeign = Boolean(selectedId && !selectedCredential && hasForeignMeta)
const resolvedLabel = useMemo(() => { const resolvedLabel = useMemo(() => {
if (selectedCredential) return selectedCredential.name if (selectedCredential) return selectedCredential.name
if (isForeign) return CREDENTIAL.FOREIGN_LABEL
return '' return ''
}, [selectedCredential, isForeign]) }, [selectedCredential])
const inputValue = isEditing ? editingInputValue : resolvedLabel const inputValue = isEditing ? editingInputValue : resolvedLabel
const invalidSelection = const invalidSelection = Boolean(selectedId) && !selectedCredential && !credentialsLoading
Boolean(selectedId) &&
!selectedCredential &&
!hasForeignMeta &&
!credentialsLoading &&
!foreignMetaLoading
useEffect(() => { useEffect(() => {
if (!invalidSelection) return if (!invalidSelection) return
onChange('') onChange('')
}, [invalidSelection, onChange]) }, [invalidSelection, onChange])
useCredentialRefreshTriggers(refetchCredentials) useCredentialRefreshTriggers(refetchCredentials, effectiveProviderId, workspaceId)
const handleOpenChange = useCallback( const handleOpenChange = useCallback(
(isOpen: boolean) => { (isOpen: boolean) => {
@@ -142,8 +127,18 @@ export function ToolCredentialSelector({
) )
const handleAddCredential = useCallback(() => { const handleAddCredential = useCallback(() => {
setShowOAuthModal(true) writePendingCredentialCreateRequest({
}, []) workspaceId,
type: 'oauth',
providerId: effectiveProviderId,
displayName: '',
serviceId,
requiredScopes: getCanonicalScopesForProvider(effectiveProviderId),
requestedAt: Date.now(),
})
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
}, [workspaceId, effectiveProviderId, serviceId])
const comboboxOptions = useMemo(() => { const comboboxOptions = useMemo(() => {
const options = credentials.map((cred) => ({ const options = credentials.map((cred) => ({
@@ -151,12 +146,13 @@ export function ToolCredentialSelector({
value: cred.id, value: cred.id,
})) }))
if (credentials.length === 0) {
options.push({ options.push({
label: `Connect ${getProviderName(provider)} account`, label:
credentials.length > 0
? `Connect another ${getProviderName(provider)} account`
: `Connect ${getProviderName(provider)} account`,
value: '__connect_account__', value: '__connect_account__',
}) })
}
return options return options
}, [credentials, provider]) }, [credentials, provider])
@@ -206,7 +202,7 @@ export function ToolCredentialSelector({
placeholder={label} placeholder={label}
disabled={disabled} disabled={disabled}
editable={true} editable={true}
filterOptions={!isForeign} filterOptions={true}
isLoading={credentialsLoading} isLoading={credentialsLoading}
overlayContent={overlayContent} overlayContent={overlayContent}
className={selectedId ? 'pl-[28px]' : ''} className={selectedId ? 'pl-[28px]' : ''}
@@ -218,7 +214,6 @@ export function ToolCredentialSelector({
<span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' /> <span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' />
Additional permissions required Additional permissions required
</div> </div>
{!isForeign && (
<Button <Button
variant='active' variant='active'
onClick={() => setShowOAuthModal(true)} onClick={() => setShowOAuthModal(true)}
@@ -226,7 +221,6 @@ export function ToolCredentialSelector({
> >
Update access Update access
</Button> </Button>
)}
</div> </div>
)} )}
@@ -245,7 +239,11 @@ export function ToolCredentialSelector({
) )
} }
function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>) { function useCredentialRefreshTriggers(
refetchCredentials: () => Promise<unknown>,
providerId: string,
workspaceId: string
) {
useEffect(() => { useEffect(() => {
const refresh = () => { const refresh = () => {
void refetchCredentials() void refetchCredentials()
@@ -263,12 +261,29 @@ function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>
} }
} }
const handleCredentialsUpdated = (
event: CustomEvent<{ providerId?: string; workspaceId?: string }>
) => {
if (event.detail?.providerId && event.detail.providerId !== providerId) {
return
}
if (event.detail?.workspaceId && workspaceId && event.detail.workspaceId !== workspaceId) {
return
}
refresh()
}
document.addEventListener('visibilitychange', handleVisibilityChange) document.addEventListener('visibilitychange', handleVisibilityChange)
window.addEventListener('pageshow', handlePageShow) window.addEventListener('pageshow', handlePageShow)
window.addEventListener('oauth-credentials-updated', handleCredentialsUpdated as EventListener)
return () => { return () => {
document.removeEventListener('visibilitychange', handleVisibilityChange) document.removeEventListener('visibilitychange', handleVisibilityChange)
window.removeEventListener('pageshow', handlePageShow) window.removeEventListener('pageshow', handlePageShow)
window.removeEventListener(
'oauth-credentials-updated',
handleCredentialsUpdated as EventListener
)
} }
}, [refetchCredentials]) }, [providerId, workspaceId, refetchCredentials])
} }

View File

@@ -1,186 +0,0 @@
'use client'
import type React from 'react'
import { useRef, useState } from 'react'
import { ArrowLeftRight, ArrowUp } from 'lucide-react'
import { Button, Input, Label, Tooltip } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import type { WandControlHandlers } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
/**
* Props for a generic parameter with label component
*/
export interface ParameterWithLabelProps {
paramId: string
title: string
isRequired: boolean
visibility: string
wandConfig?: {
enabled: boolean
prompt?: string
placeholder?: string
}
canonicalToggle?: {
mode: 'basic' | 'advanced'
disabled?: boolean
onToggle?: () => void
}
disabled: boolean
isPreview: boolean
children: (wandControlRef: React.MutableRefObject<WandControlHandlers | null>) => React.ReactNode
}
/**
* Generic wrapper component for parameters that manages wand state and renders label + input
*/
export function ParameterWithLabel({
paramId,
title,
isRequired,
visibility,
wandConfig,
canonicalToggle,
disabled,
isPreview,
children,
}: ParameterWithLabelProps) {
const [isSearchActive, setIsSearchActive] = useState(false)
const [searchQuery, setSearchQuery] = useState('')
const searchInputRef = useRef<HTMLInputElement>(null)
const wandControlRef = useRef<WandControlHandlers | null>(null)
const isWandEnabled = wandConfig?.enabled ?? false
const showWand = isWandEnabled && !isPreview && !disabled
const handleSearchClick = (): void => {
setIsSearchActive(true)
setTimeout(() => {
searchInputRef.current?.focus()
}, 0)
}
const handleSearchBlur = (): void => {
if (!searchQuery.trim() && !wandControlRef.current?.isWandStreaming) {
setIsSearchActive(false)
}
}
const handleSearchChange = (value: string): void => {
setSearchQuery(value)
}
const handleSearchSubmit = (): void => {
if (searchQuery.trim() && wandControlRef.current) {
wandControlRef.current.onWandTrigger(searchQuery)
setSearchQuery('')
setIsSearchActive(false)
}
}
const handleSearchCancel = (): void => {
setSearchQuery('')
setIsSearchActive(false)
}
const isStreaming = wandControlRef.current?.isWandStreaming ?? false
return (
<div key={paramId} className='relative min-w-0 space-y-[6px]'>
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
<Label className='flex items-baseline gap-[6px] whitespace-nowrap font-medium text-[13px] text-[var(--text-primary)]'>
{title}
{isRequired && visibility === 'user-only' && <span className='ml-0.5'>*</span>}
</Label>
<div className='flex min-w-0 flex-1 items-center justify-end gap-[6px]'>
{showWand &&
(!isSearchActive ? (
<Button
variant='active'
className='-my-1 h-5 px-2 py-0 text-[11px]'
onClick={handleSearchClick}
>
Generate
</Button>
) : (
<div className='-my-1 flex min-w-[120px] max-w-[280px] flex-1 items-center gap-[4px]'>
<Input
ref={searchInputRef}
value={isStreaming ? 'Generating...' : searchQuery}
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
handleSearchChange(e.target.value)
}
onBlur={(e: React.FocusEvent<HTMLInputElement>) => {
const relatedTarget = e.relatedTarget as HTMLElement | null
if (relatedTarget?.closest('button')) return
handleSearchBlur()
}}
onKeyDown={(e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === 'Enter' && searchQuery.trim() && !isStreaming) {
handleSearchSubmit()
} else if (e.key === 'Escape') {
handleSearchCancel()
}
}}
disabled={isStreaming}
className={cn(
'h-5 min-w-[80px] flex-1 text-[11px]',
isStreaming && 'text-muted-foreground'
)}
placeholder='Generate with AI...'
/>
<Button
variant='tertiary'
disabled={!searchQuery.trim() || isStreaming}
onMouseDown={(e: React.MouseEvent) => {
e.preventDefault()
e.stopPropagation()
}}
onClick={(e: React.MouseEvent) => {
e.stopPropagation()
handleSearchSubmit()
}}
className='h-[20px] w-[20px] flex-shrink-0 p-0'
>
<ArrowUp className='h-[12px] w-[12px]' />
</Button>
</div>
))}
{canonicalToggle && !isPreview && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<button
type='button'
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0 disabled:cursor-not-allowed disabled:opacity-50'
onClick={canonicalToggle.onToggle}
disabled={canonicalToggle.disabled || disabled}
aria-label={
canonicalToggle.mode === 'advanced'
? 'Switch to selector'
: 'Switch to manual ID'
}
>
<ArrowLeftRight
className={cn(
'!h-[12px] !w-[12px]',
canonicalToggle.mode === 'advanced'
? 'text-[var(--text-primary)]'
: 'text-[var(--text-secondary)]'
)}
/>
</button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<p>
{canonicalToggle.mode === 'advanced'
? 'Switch to selector'
: 'Switch to manual ID'}
</p>
</Tooltip.Content>
</Tooltip.Root>
)}
</div>
</div>
<div className='relative w-full min-w-0'>{children(wandControlRef)}</div>
</div>
)
}

View File

@@ -1,100 +0,0 @@
'use client'
import { useEffect, useRef } from 'react'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { SubBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
interface ToolSubBlockRendererProps {
blockId: string
subBlockId: string
toolIndex: number
subBlock: BlockSubBlockConfig
effectiveParamId: string
toolParams: Record<string, string> | undefined
onParamChange: (toolIndex: number, paramId: string, value: string) => void
disabled: boolean
canonicalToggle?: {
mode: 'basic' | 'advanced'
disabled?: boolean
onToggle?: () => void
}
}
/**
* Bridges the subblock store with StoredTool.params via a synthetic store key,
* then delegates all rendering to SubBlock for full parity.
*
* Two effects handle bidirectional sync:
* - tool.params → store (external changes)
* - store → tool.params (user interaction)
*/
export function ToolSubBlockRenderer({
blockId,
subBlockId,
toolIndex,
subBlock,
effectiveParamId,
toolParams,
onParamChange,
disabled,
canonicalToggle,
}: ToolSubBlockRendererProps) {
const syntheticId = `${subBlockId}-tool-${toolIndex}-${effectiveParamId}`
const [storeValue, setStoreValue] = useSubBlockValue(blockId, syntheticId)
const toolParamValue = toolParams?.[effectiveParamId] ?? ''
/** Tracks the last value we pushed to the store from tool.params to avoid echo loops */
const lastPushedToStoreRef = useRef<string | null>(null)
/** Tracks the last value we synced back to tool.params from the store */
const lastPushedToParamsRef = useRef<string | null>(null)
// Sync tool.params → store: push when the prop value changes (including first mount)
useEffect(() => {
if (!toolParamValue && lastPushedToStoreRef.current === null) {
// Skip initializing the store with an empty value on first mount —
// let the SubBlock component use its own default.
lastPushedToStoreRef.current = toolParamValue
lastPushedToParamsRef.current = toolParamValue
return
}
if (toolParamValue !== lastPushedToStoreRef.current) {
lastPushedToStoreRef.current = toolParamValue
lastPushedToParamsRef.current = toolParamValue
setStoreValue(toolParamValue)
}
}, [toolParamValue, setStoreValue])
// Sync store → tool.params: push when the user changes the value via SubBlock
useEffect(() => {
if (storeValue == null) return
const stringValue = typeof storeValue === 'string' ? storeValue : JSON.stringify(storeValue)
if (stringValue !== lastPushedToParamsRef.current) {
lastPushedToParamsRef.current = stringValue
lastPushedToStoreRef.current = stringValue
onParamChange(toolIndex, effectiveParamId, stringValue)
}
}, [storeValue, toolIndex, effectiveParamId, onParamChange])
// Suppress SubBlock's "*" required indicator when the LLM can fill the param
const visibility = subBlock.paramVisibility ?? 'user-or-llm'
const isOptionalForUser = visibility !== 'user-only'
const config = {
...subBlock,
id: syntheticId,
...(isOptionalForUser && { required: false }),
}
return (
<SubBlock
blockId={blockId}
config={config}
isPreview={false}
disabled={disabled}
canonicalToggle={canonicalToggle}
dependencyContext={toolParams}
/>
)
}

View File

@@ -2,12 +2,37 @@
* @vitest-environment node * @vitest-environment node
*/ */
import { describe, expect, it } from 'vitest' import { describe, expect, it } from 'vitest'
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
import { interface StoredTool {
isCustomToolAlreadySelected, type: string
isMcpToolAlreadySelected, title?: string
isWorkflowAlreadySelected, toolId?: string
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/utils' params?: Record<string, string>
customToolId?: string
schema?: any
code?: string
operation?: string
usageControl?: 'auto' | 'force' | 'none'
}
const isMcpToolAlreadySelected = (selectedTools: StoredTool[], mcpToolId: string): boolean => {
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
}
const isCustomToolAlreadySelected = (
selectedTools: StoredTool[],
customToolId: string
): boolean => {
return selectedTools.some(
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
)
}
const isWorkflowAlreadySelected = (selectedTools: StoredTool[], workflowId: string): boolean => {
return selectedTools.some(
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
)
}
describe('isMcpToolAlreadySelected', () => { describe('isMcpToolAlreadySelected', () => {
describe('basic functionality', () => { describe('basic functionality', () => {

View File

@@ -1,31 +0,0 @@
/**
* Represents a tool selected and configured in the workflow
*
* @remarks
* For custom tools (new format), we only store: type, customToolId, usageControl, isExpanded.
* Everything else (title, schema, code) is loaded dynamically from the database.
* Legacy custom tools with inline schema/code are still supported for backwards compatibility.
*/
export interface StoredTool {
/** Block type identifier */
type: string
/** Display title for the tool (optional for new custom tool format) */
title?: string
/** Direct tool ID for execution (optional for new custom tool format) */
toolId?: string
/** Parameter values configured by the user (optional for new custom tool format) */
params?: Record<string, string>
/** Whether the tool details are expanded in UI */
isExpanded?: boolean
/** Database ID for custom tools (new format - reference only) */
customToolId?: string
/** Tool schema for custom tools (legacy format - inline JSON schema) */
// eslint-disable-next-line @typescript-eslint/no-explicit-any
schema?: Record<string, any>
/** Implementation code for custom tools (legacy format - inline) */
code?: string
/** Selected operation for multi-operation tools */
operation?: string
/** Tool usage control mode for LLM */
usageControl?: 'auto' | 'force' | 'none'
}

View File

@@ -1,32 +0,0 @@
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
/**
* Checks if an MCP tool is already selected.
*/
export function isMcpToolAlreadySelected(selectedTools: StoredTool[], mcpToolId: string): boolean {
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
}
/**
* Checks if a custom tool is already selected.
*/
export function isCustomToolAlreadySelected(
selectedTools: StoredTool[],
customToolId: string
): boolean {
return selectedTools.some(
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
)
}
/**
* Checks if a workflow is already selected.
*/
export function isWorkflowAlreadySelected(
selectedTools: StoredTool[],
workflowId: string
): boolean {
return selectedTools.some(
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
)
}

View File

@@ -1,50 +0,0 @@
import { useEffect, useMemo, useState } from 'react'
export function useForeignCredential(
provider: string | undefined,
credentialId: string | undefined
) {
const [isForeign, setIsForeign] = useState<boolean>(false)
const [loading, setLoading] = useState<boolean>(false)
const [error, setError] = useState<string | null>(null)
const normalizedProvider = useMemo(() => (provider || '').toString(), [provider])
const normalizedCredentialId = useMemo(() => credentialId || '', [credentialId])
useEffect(() => {
let cancelled = false
async function check() {
setLoading(true)
setError(null)
try {
if (!normalizedProvider || !normalizedCredentialId) {
if (!cancelled) setIsForeign(false)
return
}
const res = await fetch(
`/api/auth/oauth/credentials?provider=${encodeURIComponent(normalizedProvider)}`
)
if (!res.ok) {
if (!cancelled) setIsForeign(true)
return
}
const data = await res.json()
const isOwn = (data.credentials || []).some((c: any) => c.id === normalizedCredentialId)
if (!cancelled) setIsForeign(!isOwn)
} catch (e) {
if (!cancelled) {
setIsForeign(true)
setError((e as Error).message)
}
} finally {
if (!cancelled) setLoading(false)
}
}
void check()
return () => {
cancelled = true
}
}, [normalizedProvider, normalizedCredentialId])
return { isForeignCredential: isForeign, loading, error }
}

View File

@@ -3,6 +3,7 @@ import { isEqual } from 'lodash'
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react' import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
import { Button, Input, Label, Tooltip } from '@/components/emcn/components' import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import type { FieldDiffStatus } from '@/lib/workflows/diff/types'
import { import {
CheckboxList, CheckboxList,
Code, Code,
@@ -68,15 +69,13 @@ interface SubBlockProps {
isPreview?: boolean isPreview?: boolean
subBlockValues?: Record<string, any> subBlockValues?: Record<string, any>
disabled?: boolean disabled?: boolean
fieldDiffStatus?: FieldDiffStatus
allowExpandInPreview?: boolean allowExpandInPreview?: boolean
canonicalToggle?: { canonicalToggle?: {
mode: 'basic' | 'advanced' mode: 'basic' | 'advanced'
disabled?: boolean disabled?: boolean
onToggle?: () => void onToggle?: () => void
} }
labelSuffix?: React.ReactNode
/** Provides sibling values for dependency resolution in non-preview contexts (e.g. tool-input) */
dependencyContext?: Record<string, unknown>
} }
/** /**
@@ -163,14 +162,16 @@ const getPreviewValue = (
/** /**
* Renders the label with optional validation and description tooltips. * Renders the label with optional validation and description tooltips.
* *
* @remarks
* Handles JSON validation indicators for code blocks and required field markers.
* Includes inline AI generate button when wand is enabled.
*
* @param config - The sub-block configuration defining the label content * @param config - The sub-block configuration defining the label content
* @param isValidJson - Whether the JSON content is valid (for code blocks) * @param isValidJson - Whether the JSON content is valid (for code blocks)
* @param subBlockValues - Current values of all subblocks for evaluating conditional requirements * @param subBlockValues - Current values of all subblocks for evaluating conditional requirements
* @param wandState - State and handlers for the inline AI generate feature * @param wandState - Optional state and handlers for the AI wand feature
* @param canonicalToggle - Metadata and handlers for the basic/advanced mode toggle * @param canonicalToggle - Optional canonical toggle metadata and handlers
* @param canonicalToggleIsDisabled - Whether the canonical toggle is disabled (includes dependsOn gating) * @param canonicalToggleIsDisabled - Whether the canonical toggle is disabled
* @param copyState - State and handler for the copy-to-clipboard button
* @param labelSuffix - Additional content rendered after the label text
* @returns The label JSX element, or `null` for switch types or when no title is defined * @returns The label JSX element, or `null` for switch types or when no title is defined
*/ */
const renderLabel = ( const renderLabel = (
@@ -201,8 +202,7 @@ const renderLabel = (
showCopyButton: boolean showCopyButton: boolean
copied: boolean copied: boolean
onCopy: () => void onCopy: () => void
}, }
labelSuffix?: React.ReactNode
): JSX.Element | null => { ): JSX.Element | null => {
if (config.type === 'switch') return null if (config.type === 'switch') return null
if (!config.title) return null if (!config.title) return null
@@ -215,10 +215,9 @@ const renderLabel = (
return ( return (
<div className='flex items-center justify-between gap-[6px] pl-[2px]'> <div className='flex items-center justify-between gap-[6px] pl-[2px]'>
<Label className='flex items-baseline gap-[6px] whitespace-nowrap'> <Label className='flex items-center gap-[6px] whitespace-nowrap'>
{config.title} {config.title}
{required && <span className='ml-0.5'>*</span>} {required && <span className='ml-0.5'>*</span>}
{labelSuffix}
{config.type === 'code' && {config.type === 'code' &&
config.language === 'json' && config.language === 'json' &&
!isValidJson && !isValidJson &&
@@ -384,25 +383,28 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
prevProps.isPreview === nextProps.isPreview && prevProps.isPreview === nextProps.isPreview &&
valueEqual && valueEqual &&
prevProps.disabled === nextProps.disabled && prevProps.disabled === nextProps.disabled &&
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview && prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
canonicalToggleEqual && canonicalToggleEqual
prevProps.labelSuffix === nextProps.labelSuffix &&
prevProps.dependencyContext === nextProps.dependencyContext
) )
} }
/** /**
* Renders a single workflow sub-block input based on config.type. * Renders a single workflow sub-block input based on config.type.
* *
* @remarks
* Supports multiple input types including short-input, long-input, dropdown,
* combobox, slider, table, code, switch, tool-input, and many more.
* Handles preview mode, disabled states, and AI wand generation.
*
* @param blockId - The parent block identifier * @param blockId - The parent block identifier
* @param config - Configuration defining the input type and properties * @param config - Configuration defining the input type and properties
* @param isPreview - Whether to render in preview mode * @param isPreview - Whether to render in preview mode
* @param subBlockValues - Current values of all subblocks * @param subBlockValues - Current values of all subblocks
* @param disabled - Whether the input is disabled * @param disabled - Whether the input is disabled
* @param fieldDiffStatus - Optional diff status for visual indicators
* @param allowExpandInPreview - Whether to allow expanding in preview mode * @param allowExpandInPreview - Whether to allow expanding in preview mode
* @param canonicalToggle - Metadata and handlers for the basic/advanced mode toggle * @returns The rendered sub-block input component
* @param labelSuffix - Additional content rendered after the label text
* @param dependencyContext - Sibling values for dependency resolution in non-preview contexts (e.g. tool-input)
*/ */
function SubBlockComponent({ function SubBlockComponent({
blockId, blockId,
@@ -410,10 +412,9 @@ function SubBlockComponent({
isPreview = false, isPreview = false,
subBlockValues, subBlockValues,
disabled = false, disabled = false,
fieldDiffStatus,
allowExpandInPreview, allowExpandInPreview,
canonicalToggle, canonicalToggle,
labelSuffix,
dependencyContext,
}: SubBlockProps): JSX.Element { }: SubBlockProps): JSX.Element {
const [isValidJson, setIsValidJson] = useState(true) const [isValidJson, setIsValidJson] = useState(true)
const [isSearchActive, setIsSearchActive] = useState(false) const [isSearchActive, setIsSearchActive] = useState(false)
@@ -422,6 +423,7 @@ function SubBlockComponent({
const searchInputRef = useRef<HTMLInputElement>(null) const searchInputRef = useRef<HTMLInputElement>(null)
const wandControlRef = useRef<WandControlHandlers | null>(null) const wandControlRef = useRef<WandControlHandlers | null>(null)
// Use webhook management hook when config has useWebhookUrl enabled
const webhookManagement = useWebhookManagement({ const webhookManagement = useWebhookManagement({
blockId, blockId,
triggerId: undefined, triggerId: undefined,
@@ -508,12 +510,10 @@ function SubBlockComponent({
| null | null
| undefined | undefined
const contextValues = dependencyContext ?? (isPreview ? subBlockValues : undefined)
const { finalDisabled: gatedDisabled } = useDependsOnGate(blockId, config, { const { finalDisabled: gatedDisabled } = useDependsOnGate(blockId, config, {
disabled, disabled,
isPreview, isPreview,
previewContextValues: contextValues, previewContextValues: isPreview ? subBlockValues : undefined,
}) })
const isDisabled = gatedDisabled const isDisabled = gatedDisabled
@@ -797,7 +797,7 @@ function SubBlockComponent({
disabled={isDisabled} disabled={isDisabled}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue} previewValue={previewValue}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -809,7 +809,7 @@ function SubBlockComponent({
disabled={isDisabled} disabled={isDisabled}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue} previewValue={previewValue}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -821,7 +821,7 @@ function SubBlockComponent({
disabled={isDisabled} disabled={isDisabled}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue} previewValue={previewValue}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -833,7 +833,7 @@ function SubBlockComponent({
disabled={isDisabled} disabled={isDisabled}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue} previewValue={previewValue}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -845,7 +845,7 @@ function SubBlockComponent({
disabled={isDisabled} disabled={isDisabled}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue} previewValue={previewValue}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -868,7 +868,7 @@ function SubBlockComponent({
disabled={isDisabled} disabled={isDisabled}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue as any} previewValue={previewValue as any}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -880,7 +880,7 @@ function SubBlockComponent({
disabled={isDisabled} disabled={isDisabled}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue as any} previewValue={previewValue as any}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -892,7 +892,7 @@ function SubBlockComponent({
disabled={isDisabled} disabled={isDisabled}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue as any} previewValue={previewValue as any}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -917,7 +917,7 @@ function SubBlockComponent({
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue as any} previewValue={previewValue as any}
disabled={isDisabled} disabled={isDisabled}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -953,7 +953,7 @@ function SubBlockComponent({
disabled={isDisabled} disabled={isDisabled}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue} previewValue={previewValue}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -987,7 +987,7 @@ function SubBlockComponent({
disabled={isDisabled} disabled={isDisabled}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue as any} previewValue={previewValue as any}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -999,7 +999,7 @@ function SubBlockComponent({
disabled={isDisabled} disabled={isDisabled}
isPreview={isPreview} isPreview={isPreview}
previewValue={previewValue} previewValue={previewValue}
previewContextValues={contextValues} previewContextValues={isPreview ? subBlockValues : undefined}
/> />
) )
@@ -1059,8 +1059,7 @@ function SubBlockComponent({
showCopyButton: Boolean(config.showCopyButton && config.useWebhookUrl), showCopyButton: Boolean(config.showCopyButton && config.useWebhookUrl),
copied, copied,
onCopy: handleCopy, onCopy: handleCopy,
}, }
labelSuffix
)} )}
{renderInput()} {renderInput()}
</div> </div>

View File

@@ -571,6 +571,7 @@ export function Editor() {
isPreview={false} isPreview={false}
subBlockValues={subBlockState} subBlockValues={subBlockState}
disabled={!canEditBlock} disabled={!canEditBlock}
fieldDiffStatus={undefined}
allowExpandInPreview={false} allowExpandInPreview={false}
canonicalToggle={ canonicalToggle={
isCanonicalSwap && canonicalMode && canonicalId isCanonicalSwap && canonicalMode && canonicalId
@@ -634,6 +635,7 @@ export function Editor() {
isPreview={false} isPreview={false}
subBlockValues={subBlockState} subBlockValues={subBlockState}
disabled={!canEditBlock} disabled={!canEditBlock}
fieldDiffStatus={undefined}
allowExpandInPreview={false} allowExpandInPreview={false}
/> />
{index < advancedOnlySubBlocks.length - 1 && ( {index < advancedOnlySubBlocks.length - 1 && (

View File

@@ -340,7 +340,13 @@ export const Panel = memo(function Panel() {
* Register global keyboard shortcuts using the central commands registry. * Register global keyboard shortcuts using the central commands registry.
* *
* - Mod+Enter: Run / cancel workflow (matches the Run button behavior) * - Mod+Enter: Run / cancel workflow (matches the Run button behavior)
* - C: Focus Copilot tab
* - T: Focus Toolbar tab
* - E: Focus Editor tab
* - Mod+F: Focus Toolbar tab and search input * - Mod+F: Focus Toolbar tab and search input
*
* The tab-switching commands are disabled inside editable elements so typing
* in inputs or textareas is not interrupted.
*/ */
useRegisterGlobalCommands(() => useRegisterGlobalCommands(() =>
createCommands([ createCommands([
@@ -357,6 +363,33 @@ export const Panel = memo(function Panel() {
allowInEditable: false, allowInEditable: false,
}, },
}, },
{
id: 'focus-copilot-tab',
handler: () => {
setActiveTab('copilot')
},
overrides: {
allowInEditable: false,
},
},
{
id: 'focus-toolbar-tab',
handler: () => {
setActiveTab('toolbar')
},
overrides: {
allowInEditable: false,
},
},
{
id: 'focus-editor-tab',
handler: () => {
setActiveTab('editor')
},
overrides: {
allowInEditable: false,
},
},
{ {
id: 'focus-toolbar-search', id: 'focus-toolbar-search',
handler: () => { handler: () => {

View File

@@ -1,4 +1,4 @@
import { useCallback, useEffect, useRef, useState } from 'react' import { useCallback, useRef, useState } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query' import { useQueryClient } from '@tanstack/react-query'
import { v4 as uuidv4 } from 'uuid' import { v4 as uuidv4 } from 'uuid'
@@ -46,13 +46,7 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('useWorkflowExecution') const logger = createLogger('useWorkflowExecution')
/** // Debug state validation result
* Module-level Set tracking which workflows have an active reconnection effect.
* Prevents multiple hook instances (from different components) from starting
* concurrent reconnection streams for the same workflow during the same mount cycle.
*/
const activeReconnections = new Set<string>()
interface DebugValidationResult { interface DebugValidationResult {
isValid: boolean isValid: boolean
error?: string error?: string
@@ -60,7 +54,7 @@ interface DebugValidationResult {
interface BlockEventHandlerConfig { interface BlockEventHandlerConfig {
workflowId?: string workflowId?: string
executionIdRef: { current: string } executionId?: string
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }> workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
activeBlocksSet: Set<string> activeBlocksSet: Set<string>
accumulatedBlockLogs: BlockLog[] accumulatedBlockLogs: BlockLog[]
@@ -114,15 +108,12 @@ export function useWorkflowExecution() {
const queryClient = useQueryClient() const queryClient = useQueryClient()
const currentWorkflow = useCurrentWorkflow() const currentWorkflow = useCurrentWorkflow()
const { activeWorkflowId, workflows } = useWorkflowRegistry() const { activeWorkflowId, workflows } = useWorkflowRegistry()
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries, clearExecutionEntries } = const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } =
useTerminalConsoleStore() useTerminalConsoleStore()
const hasHydrated = useTerminalConsoleStore((s) => s._hasHydrated)
const { getAllVariables } = useEnvironmentStore() const { getAllVariables } = useEnvironmentStore()
const { getVariablesByWorkflowId, variables } = useVariablesStore() const { getVariablesByWorkflowId, variables } = useVariablesStore()
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } = const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
useCurrentWorkflowExecution() useCurrentWorkflowExecution()
const setCurrentExecutionId = useExecutionStore((s) => s.setCurrentExecutionId)
const getCurrentExecutionId = useExecutionStore((s) => s.getCurrentExecutionId)
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting) const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging) const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks) const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
@@ -306,7 +297,7 @@ export function useWorkflowExecution() {
(config: BlockEventHandlerConfig) => { (config: BlockEventHandlerConfig) => {
const { const {
workflowId, workflowId,
executionIdRef, executionId,
workflowEdges, workflowEdges,
activeBlocksSet, activeBlocksSet,
accumulatedBlockLogs, accumulatedBlockLogs,
@@ -317,14 +308,6 @@ export function useWorkflowExecution() {
onBlockCompleteCallback, onBlockCompleteCallback,
} = config } = config
/** Returns true if this execution was cancelled or superseded by another run. */
const isStaleExecution = () =>
!!(
workflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(workflowId) !== executionIdRef.current
)
const updateActiveBlocks = (blockId: string, isActive: boolean) => { const updateActiveBlocks = (blockId: string, isActive: boolean) => {
if (!workflowId) return if (!workflowId) return
if (isActive) { if (isActive) {
@@ -377,7 +360,7 @@ export function useWorkflowExecution() {
endedAt: data.endedAt, endedAt: data.endedAt,
workflowId, workflowId,
blockId: data.blockId, blockId: data.blockId,
executionId: executionIdRef.current, executionId,
blockName: data.blockName || 'Unknown Block', blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown', blockType: data.blockType || 'unknown',
iterationCurrent: data.iterationCurrent, iterationCurrent: data.iterationCurrent,
@@ -400,7 +383,7 @@ export function useWorkflowExecution() {
endedAt: data.endedAt, endedAt: data.endedAt,
workflowId, workflowId,
blockId: data.blockId, blockId: data.blockId,
executionId: executionIdRef.current, executionId,
blockName: data.blockName || 'Unknown Block', blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown', blockType: data.blockType || 'unknown',
iterationCurrent: data.iterationCurrent, iterationCurrent: data.iterationCurrent,
@@ -427,7 +410,7 @@ export function useWorkflowExecution() {
iterationType: data.iterationType, iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId, iterationContainerId: data.iterationContainerId,
}, },
executionIdRef.current executionId
) )
} }
@@ -449,12 +432,11 @@ export function useWorkflowExecution() {
iterationType: data.iterationType, iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId, iterationContainerId: data.iterationContainerId,
}, },
executionIdRef.current executionId
) )
} }
const onBlockStarted = (data: BlockStartedData) => { const onBlockStarted = (data: BlockStartedData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, true) updateActiveBlocks(data.blockId, true)
markIncomingEdges(data.blockId) markIncomingEdges(data.blockId)
@@ -471,7 +453,7 @@ export function useWorkflowExecution() {
endedAt: undefined, endedAt: undefined,
workflowId, workflowId,
blockId: data.blockId, blockId: data.blockId,
executionId: executionIdRef.current, executionId,
blockName: data.blockName || 'Unknown Block', blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown', blockType: data.blockType || 'unknown',
isRunning: true, isRunning: true,
@@ -483,7 +465,6 @@ export function useWorkflowExecution() {
} }
const onBlockCompleted = (data: BlockCompletedData) => { const onBlockCompleted = (data: BlockCompletedData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, false) updateActiveBlocks(data.blockId, false)
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success') if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
@@ -514,7 +495,6 @@ export function useWorkflowExecution() {
} }
const onBlockError = (data: BlockErrorData) => { const onBlockError = (data: BlockErrorData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, false) updateActiveBlocks(data.blockId, false)
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error') if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
@@ -922,6 +902,10 @@ export function useWorkflowExecution() {
// Update block logs with actual stream completion times // Update block logs with actual stream completion times
if (result.logs && streamCompletionTimes.size > 0) { if (result.logs && streamCompletionTimes.size > 0) {
const streamCompletionEndTime = new Date(
Math.max(...Array.from(streamCompletionTimes.values()))
).toISOString()
result.logs.forEach((log: BlockLog) => { result.logs.forEach((log: BlockLog) => {
if (streamCompletionTimes.has(log.blockId)) { if (streamCompletionTimes.has(log.blockId)) {
const completionTime = streamCompletionTimes.get(log.blockId)! const completionTime = streamCompletionTimes.get(log.blockId)!
@@ -1003,6 +987,7 @@ export function useWorkflowExecution() {
return { success: true, stream } return { success: true, stream }
} }
// For manual (non-chat) execution
const manualExecutionId = uuidv4() const manualExecutionId = uuidv4()
try { try {
const result = await executeWorkflow( const result = await executeWorkflow(
@@ -1017,10 +1002,29 @@ export function useWorkflowExecution() {
if (result.metadata.pendingBlocks) { if (result.metadata.pendingBlocks) {
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks) setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
} }
} else if (result && 'success' in result) {
setExecutionResult(result)
// Reset execution state after successful non-debug execution
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
if (isChatExecution) {
if (!result.metadata) {
result.metadata = { duration: 0, startTime: new Date().toISOString() }
}
;(result.metadata as any).source = 'chat'
}
// Invalidate subscription queries to update usage
setTimeout(() => {
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
}, 1000)
} }
return result return result
} catch (error: any) { } catch (error: any) {
const errorResult = handleExecutionError(error, { executionId: manualExecutionId }) const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
// Note: Error logs are already persisted server-side via execution-core.ts
return errorResult return errorResult
} }
}, },
@@ -1271,7 +1275,7 @@ export function useWorkflowExecution() {
if (activeWorkflowId) { if (activeWorkflowId) {
logger.info('Using server-side executor') logger.info('Using server-side executor')
const executionIdRef = { current: '' } const executionId = uuidv4()
let executionResult: ExecutionResult = { let executionResult: ExecutionResult = {
success: false, success: false,
@@ -1289,7 +1293,7 @@ export function useWorkflowExecution() {
try { try {
const blockHandlers = buildBlockEventHandlers({ const blockHandlers = buildBlockEventHandlers({
workflowId: activeWorkflowId, workflowId: activeWorkflowId,
executionIdRef, executionId,
workflowEdges, workflowEdges,
activeBlocksSet, activeBlocksSet,
accumulatedBlockLogs, accumulatedBlockLogs,
@@ -1322,10 +1326,6 @@ export function useWorkflowExecution() {
loops: clientWorkflowState.loops, loops: clientWorkflowState.loops,
parallels: clientWorkflowState.parallels, parallels: clientWorkflowState.parallels,
}, },
onExecutionId: (id) => {
executionIdRef.current = id
setCurrentExecutionId(activeWorkflowId, id)
},
callbacks: { callbacks: {
onExecutionStarted: (data) => { onExecutionStarted: (data) => {
logger.info('Server execution started:', data) logger.info('Server execution started:', data)
@@ -1368,18 +1368,6 @@ export function useWorkflowExecution() {
}, },
onExecutionCompleted: (data) => { onExecutionCompleted: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
executionResult = { executionResult = {
success: data.success, success: data.success,
output: data.output, output: data.output,
@@ -1437,33 +1425,9 @@ export function useWorkflowExecution() {
}) })
} }
} }
const workflowExecState = activeWorkflowId
? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId)
: null
if (activeWorkflowId && !workflowExecState?.isDebugging) {
setExecutionResult(executionResult)
setIsExecuting(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
setTimeout(() => {
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
}, 1000)
}
}, },
onExecutionError: (data) => { onExecutionError: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
executionResult = { executionResult = {
success: false, success: false,
output: {}, output: {},
@@ -1477,53 +1441,43 @@ export function useWorkflowExecution() {
const isPreExecutionError = accumulatedBlockLogs.length === 0 const isPreExecutionError = accumulatedBlockLogs.length === 0
handleExecutionErrorConsole({ handleExecutionErrorConsole({
workflowId: activeWorkflowId, workflowId: activeWorkflowId,
executionId: executionIdRef.current, executionId,
error: data.error, error: data.error,
durationMs: data.duration, durationMs: data.duration,
blockLogs: accumulatedBlockLogs, blockLogs: accumulatedBlockLogs,
isPreExecutionError, isPreExecutionError,
}) })
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
}, },
onExecutionCancelled: (data) => { onExecutionCancelled: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
handleExecutionCancelledConsole({ handleExecutionCancelledConsole({
workflowId: activeWorkflowId, workflowId: activeWorkflowId,
executionId: executionIdRef.current, executionId,
durationMs: data?.duration, durationMs: data?.duration,
}) })
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
}, },
}, },
}) })
return executionResult return executionResult
} catch (error: any) { } catch (error: any) {
// Don't log abort errors - they're intentional user actions
if (error.name === 'AbortError' || error.message?.includes('aborted')) { if (error.name === 'AbortError' || error.message?.includes('aborted')) {
logger.info('Execution aborted by user') logger.info('Execution aborted by user')
return executionResult
// Reset execution state
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
// Return gracefully without error
return {
success: false,
output: {},
metadata: { duration: 0 },
logs: [],
}
} }
logger.error('Server-side execution failed:', error) logger.error('Server-side execution failed:', error)
@@ -1531,6 +1485,7 @@ export function useWorkflowExecution() {
} }
} }
// Fallback: should never reach here
throw new Error('Server-side execution is required') throw new Error('Server-side execution is required')
} }
@@ -1762,28 +1717,25 @@ export function useWorkflowExecution() {
* Handles cancelling the current workflow execution * Handles cancelling the current workflow execution
*/ */
const handleCancelExecution = useCallback(() => { const handleCancelExecution = useCallback(() => {
if (!activeWorkflowId) return
logger.info('Workflow execution cancellation requested') logger.info('Workflow execution cancellation requested')
const storedExecutionId = getCurrentExecutionId(activeWorkflowId) // Cancel the execution stream for this workflow (server-side)
executionStream.cancel(activeWorkflowId ?? undefined)
if (storedExecutionId) { // Mark current chat execution as superseded so its cleanup won't affect new executions
setCurrentExecutionId(activeWorkflowId, null)
fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, {
method: 'POST',
}).catch(() => {})
handleExecutionCancelledConsole({
workflowId: activeWorkflowId,
executionId: storedExecutionId,
})
}
executionStream.cancel(activeWorkflowId)
currentChatExecutionIdRef.current = null currentChatExecutionIdRef.current = null
// Mark all running entries as canceled in the terminal
if (activeWorkflowId) {
cancelRunningEntries(activeWorkflowId)
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
setIsExecuting(activeWorkflowId, false) setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false) setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set()) setActiveBlocks(activeWorkflowId, new Set())
}
// If in debug mode, also reset debug state
if (isDebugging) { if (isDebugging) {
resetDebugState() resetDebugState()
} }
@@ -1795,9 +1747,7 @@ export function useWorkflowExecution() {
setIsDebugging, setIsDebugging,
setActiveBlocks, setActiveBlocks,
activeWorkflowId, activeWorkflowId,
getCurrentExecutionId, cancelRunningEntries,
setCurrentExecutionId,
handleExecutionCancelledConsole,
]) ])
/** /**
@@ -1897,7 +1847,7 @@ export function useWorkflowExecution() {
} }
setIsExecuting(workflowId, true) setIsExecuting(workflowId, true)
const executionIdRef = { current: '' } const executionId = uuidv4()
const accumulatedBlockLogs: BlockLog[] = [] const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>() const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>() const executedBlockIds = new Set<string>()
@@ -1906,7 +1856,7 @@ export function useWorkflowExecution() {
try { try {
const blockHandlers = buildBlockEventHandlers({ const blockHandlers = buildBlockEventHandlers({
workflowId, workflowId,
executionIdRef, executionId,
workflowEdges, workflowEdges,
activeBlocksSet, activeBlocksSet,
accumulatedBlockLogs, accumulatedBlockLogs,
@@ -1921,10 +1871,6 @@ export function useWorkflowExecution() {
startBlockId: blockId, startBlockId: blockId,
sourceSnapshot: effectiveSnapshot, sourceSnapshot: effectiveSnapshot,
input: workflowInput, input: workflowInput,
onExecutionId: (id) => {
executionIdRef.current = id
setCurrentExecutionId(workflowId, id)
},
callbacks: { callbacks: {
onBlockStarted: blockHandlers.onBlockStarted, onBlockStarted: blockHandlers.onBlockStarted,
onBlockCompleted: blockHandlers.onBlockCompleted, onBlockCompleted: blockHandlers.onBlockCompleted,
@@ -1932,6 +1878,7 @@ export function useWorkflowExecution() {
onExecutionCompleted: (data) => { onExecutionCompleted: (data) => {
if (data.success) { if (data.success) {
// Add the start block (trigger) to executed blocks
executedBlockIds.add(blockId) executedBlockIds.add(blockId)
const mergedBlockStates: Record<string, BlockState> = { const mergedBlockStates: Record<string, BlockState> = {
@@ -1955,10 +1902,6 @@ export function useWorkflowExecution() {
} }
setLastExecutionSnapshot(workflowId, updatedSnapshot) setLastExecutionSnapshot(workflowId, updatedSnapshot)
} }
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
}, },
onExecutionError: (data) => { onExecutionError: (data) => {
@@ -1978,27 +1921,19 @@ export function useWorkflowExecution() {
handleExecutionErrorConsole({ handleExecutionErrorConsole({
workflowId, workflowId,
executionId: executionIdRef.current, executionId,
error: data.error, error: data.error,
durationMs: data.duration, durationMs: data.duration,
blockLogs: accumulatedBlockLogs, blockLogs: accumulatedBlockLogs,
}) })
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
}, },
onExecutionCancelled: (data) => { onExecutionCancelled: (data) => {
handleExecutionCancelledConsole({ handleExecutionCancelledConsole({
workflowId, workflowId,
executionId: executionIdRef.current, executionId,
durationMs: data?.duration, durationMs: data?.duration,
}) })
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
}, },
}, },
}) })
@@ -2007,20 +1942,14 @@ export function useWorkflowExecution() {
logger.error('Run-from-block failed:', error) logger.error('Run-from-block failed:', error)
} }
} finally { } finally {
const currentId = getCurrentExecutionId(workflowId)
if (currentId === null || currentId === executionIdRef.current) {
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false) setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set()) setActiveBlocks(workflowId, new Set())
} }
}
}, },
[ [
getLastExecutionSnapshot, getLastExecutionSnapshot,
setLastExecutionSnapshot, setLastExecutionSnapshot,
clearLastExecutionSnapshot, clearLastExecutionSnapshot,
getCurrentExecutionId,
setCurrentExecutionId,
setIsExecuting, setIsExecuting,
setActiveBlocks, setActiveBlocks,
setBlockRunStatus, setBlockRunStatus,
@@ -2050,213 +1979,29 @@ export function useWorkflowExecution() {
const executionId = uuidv4() const executionId = uuidv4()
try { try {
await executeWorkflow(undefined, undefined, executionId, undefined, 'manual', blockId) const result = await executeWorkflow(
undefined,
undefined,
executionId,
undefined,
'manual',
blockId
)
if (result && 'success' in result) {
setExecutionResult(result)
}
} catch (error) { } catch (error) {
const errorResult = handleExecutionError(error, { executionId }) const errorResult = handleExecutionError(error, { executionId })
return errorResult return errorResult
} finally { } finally {
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false) setIsExecuting(workflowId, false)
setIsDebugging(workflowId, false) setIsDebugging(workflowId, false)
setActiveBlocks(workflowId, new Set()) setActiveBlocks(workflowId, new Set())
} }
}, },
[ [activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
activeWorkflowId,
setCurrentExecutionId,
setExecutionResult,
setIsExecuting,
setIsDebugging,
setActiveBlocks,
]
) )
useEffect(() => {
if (!activeWorkflowId || !hasHydrated) return
const entries = useTerminalConsoleStore.getState().entries
const runningEntries = entries.filter(
(e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId
)
if (runningEntries.length === 0) return
if (activeReconnections.has(activeWorkflowId)) return
activeReconnections.add(activeWorkflowId)
executionStream.cancel(activeWorkflowId)
const sorted = [...runningEntries].sort((a, b) => {
const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0
const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0
return bTime - aTime
})
const executionId = sorted[0].executionId!
const otherExecutionIds = new Set(
sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!)
)
if (otherExecutionIds.size > 0) {
cancelRunningEntries(activeWorkflowId)
}
setCurrentExecutionId(activeWorkflowId, executionId)
setIsExecuting(activeWorkflowId, true)
const workflowEdges = useWorkflowStore.getState().edges
const activeBlocksSet = new Set<string>()
const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>()
const executionIdRef = { current: executionId }
const handlers = buildBlockEventHandlers({
workflowId: activeWorkflowId,
executionIdRef,
workflowEdges,
activeBlocksSet,
accumulatedBlockLogs,
accumulatedBlockStates,
executedBlockIds,
consoleMode: 'update',
includeStartConsoleEntry: true,
})
const originalEntries = entries
.filter((e) => e.executionId === executionId)
.map((e) => ({ ...e }))
let cleared = false
let reconnectionComplete = false
let cleanupRan = false
const clearOnce = () => {
if (!cleared) {
cleared = true
clearExecutionEntries(executionId)
}
}
const reconnectWorkflowId = activeWorkflowId
executionStream
.reconnect({
workflowId: reconnectWorkflowId,
executionId,
callbacks: {
onBlockStarted: (data) => {
clearOnce()
handlers.onBlockStarted(data)
},
onBlockCompleted: (data) => {
clearOnce()
handlers.onBlockCompleted(data)
},
onBlockError: (data) => {
clearOnce()
handlers.onBlockError(data)
},
onExecutionCompleted: () => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
},
onExecutionError: (data) => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
handleExecutionErrorConsole({
workflowId: reconnectWorkflowId,
executionId,
error: data.error,
blockLogs: accumulatedBlockLogs,
})
},
onExecutionCancelled: () => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
handleExecutionCancelledConsole({
workflowId: reconnectWorkflowId,
executionId,
})
},
},
})
.catch((error) => {
logger.warn('Execution reconnection failed', { executionId, error })
})
.finally(() => {
if (reconnectionComplete || cleanupRan) return
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) return
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
clearExecutionEntries(executionId)
for (const entry of originalEntries) {
addConsole({
workflowId: entry.workflowId,
blockId: entry.blockId,
blockName: entry.blockName,
blockType: entry.blockType,
executionId: entry.executionId,
executionOrder: entry.executionOrder,
isRunning: false,
warning: 'Execution result unavailable — check the logs page',
})
}
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
})
return () => {
cleanupRan = true
executionStream.cancel(reconnectWorkflowId)
activeReconnections.delete(reconnectWorkflowId)
if (cleared && !reconnectionComplete) {
clearExecutionEntries(executionId)
for (const entry of originalEntries) {
addConsole(entry)
}
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [activeWorkflowId, hasHydrated])
return { return {
isExecuting, isExecuting,
isDebugging, isDebugging,

View File

@@ -473,7 +473,7 @@ function ConnectionsSection({
</div> </div>
)} )}
{/* Environment Variables */} {/* Secrets */}
{envVars.length > 0 && ( {envVars.length > 0 && (
<div className='mb-[2px] last:mb-0'> <div className='mb-[2px] last:mb-0'>
<div <div
@@ -489,7 +489,7 @@ function ConnectionsSection({
'text-[var(--text-secondary)] group-hover:text-[var(--text-primary)]' 'text-[var(--text-secondary)] group-hover:text-[var(--text-primary)]'
)} )}
> >
Environment Variables Secrets
</span> </span>
<ChevronDownIcon <ChevronDownIcon
className={cn( className={cn(

View File

@@ -0,0 +1,17 @@
'use client'
import { CredentialsManager } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/credentials/credentials-manager'
interface CredentialsProps {
onOpenChange?: (open: boolean) => void
registerCloseHandler?: (handler: (open: boolean) => void) => void
registerBeforeLeaveHandler?: (handler: (onProceed: () => void) => void) => void
}
export function Credentials(_props: CredentialsProps) {
return (
<div className='h-full min-h-0'>
<CredentialsManager />
</div>
)
}

View File

@@ -134,7 +134,7 @@ function WorkspaceVariableRow({
<Trash /> <Trash />
</Button> </Button>
</Tooltip.Trigger> </Tooltip.Trigger>
<Tooltip.Content>Delete environment variable</Tooltip.Content> <Tooltip.Content>Delete secret</Tooltip.Content>
</Tooltip.Root> </Tooltip.Root>
</div> </div>
</div> </div>
@@ -637,7 +637,7 @@ export function EnvironmentVariables({ registerBeforeLeaveHandler }: Environment
<Trash /> <Trash />
</Button> </Button>
</Tooltip.Trigger> </Tooltip.Trigger>
<Tooltip.Content>Delete environment variable</Tooltip.Content> <Tooltip.Content>Delete secret</Tooltip.Content>
</Tooltip.Root> </Tooltip.Root>
</div> </div>
</div> </div>
@@ -811,7 +811,7 @@ export function EnvironmentVariables({ registerBeforeLeaveHandler }: Environment
filteredWorkspaceEntries.length === 0 && filteredWorkspaceEntries.length === 0 &&
(envVars.length > 0 || Object.keys(workspaceVars).length > 0) && ( (envVars.length > 0 || Object.keys(workspaceVars).length > 0) && (
<div className='py-[16px] text-center text-[13px] text-[var(--text-muted)]'> <div className='py-[16px] text-center text-[13px] text-[var(--text-muted)]'>
No environment variables found matching "{searchTerm}" No secrets found matching "{searchTerm}"
</div> </div>
)} )}
</> </>

View File

@@ -2,6 +2,7 @@ export { ApiKeys } from './api-keys/api-keys'
export { BYOK } from './byok/byok' export { BYOK } from './byok/byok'
export { Copilot } from './copilot/copilot' export { Copilot } from './copilot/copilot'
export { CredentialSets } from './credential-sets/credential-sets' export { CredentialSets } from './credential-sets/credential-sets'
export { Credentials } from './credentials/credentials'
export { CustomTools } from './custom-tools/custom-tools' export { CustomTools } from './custom-tools/custom-tools'
export { Debug } from './debug/debug' export { Debug } from './debug/debug'
export { EnvironmentVariables } from './environment/environment' export { EnvironmentVariables } from './environment/environment'

View File

@@ -20,7 +20,6 @@ import {
import { import {
Card, Card,
Connections, Connections,
FolderCode,
HexSimple, HexSimple,
Key, Key,
SModal, SModal,
@@ -45,12 +44,11 @@ import {
BYOK, BYOK,
Copilot, Copilot,
CredentialSets, CredentialSets,
Credentials,
CustomTools, CustomTools,
Debug, Debug,
EnvironmentVariables,
FileUploads, FileUploads,
General, General,
Integrations,
MCP, MCP,
Skills, Skills,
Subscription, Subscription,
@@ -80,6 +78,7 @@ interface SettingsModalProps {
type SettingsSection = type SettingsSection =
| 'general' | 'general'
| 'credentials'
| 'environment' | 'environment'
| 'template-profile' | 'template-profile'
| 'integrations' | 'integrations'
@@ -156,11 +155,10 @@ const allNavigationItems: NavigationItem[] = [
requiresHosted: true, requiresHosted: true,
requiresTeam: true, requiresTeam: true,
}, },
{ id: 'integrations', label: 'Integrations', icon: Connections, section: 'tools' }, { id: 'credentials', label: 'Credentials', icon: Connections, section: 'tools' },
{ id: 'custom-tools', label: 'Custom Tools', icon: Wrench, section: 'tools' }, { id: 'custom-tools', label: 'Custom Tools', icon: Wrench, section: 'tools' },
{ id: 'skills', label: 'Skills', icon: AgentSkillsIcon, section: 'tools' }, { id: 'skills', label: 'Skills', icon: AgentSkillsIcon, section: 'tools' },
{ id: 'mcp', label: 'MCP Tools', icon: McpIcon, section: 'tools' }, { id: 'mcp', label: 'MCP Tools', icon: McpIcon, section: 'tools' },
{ id: 'environment', label: 'Environment', icon: FolderCode, section: 'system' },
{ id: 'apikeys', label: 'API Keys', icon: Key, section: 'system' }, { id: 'apikeys', label: 'API Keys', icon: Key, section: 'system' },
{ id: 'workflow-mcp-servers', label: 'MCP Servers', icon: Server, section: 'system' }, { id: 'workflow-mcp-servers', label: 'MCP Servers', icon: Server, section: 'system' },
{ {
@@ -256,9 +254,6 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
if (item.id === 'apikeys' && permissionConfig.hideApiKeysTab) { if (item.id === 'apikeys' && permissionConfig.hideApiKeysTab) {
return false return false
} }
if (item.id === 'environment' && permissionConfig.hideEnvironmentTab) {
return false
}
if (item.id === 'files' && permissionConfig.hideFilesTab) { if (item.id === 'files' && permissionConfig.hideFilesTab) {
return false return false
} }
@@ -324,6 +319,9 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
if (!isBillingEnabled && (activeSection === 'subscription' || activeSection === 'team')) { if (!isBillingEnabled && (activeSection === 'subscription' || activeSection === 'team')) {
return 'general' return 'general'
} }
if (activeSection === 'environment' || activeSection === 'integrations') {
return 'credentials'
}
return activeSection return activeSection
}, [activeSection]) }, [activeSection])
@@ -342,7 +340,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
(sectionId: SettingsSection) => { (sectionId: SettingsSection) => {
if (sectionId === effectiveActiveSection) return if (sectionId === effectiveActiveSection) return
if (effectiveActiveSection === 'environment' && environmentBeforeLeaveHandler.current) { if (effectiveActiveSection === 'credentials' && environmentBeforeLeaveHandler.current) {
environmentBeforeLeaveHandler.current(() => setActiveSection(sectionId)) environmentBeforeLeaveHandler.current(() => setActiveSection(sectionId))
return return
} }
@@ -370,7 +368,11 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
useEffect(() => { useEffect(() => {
const handleOpenSettings = (event: CustomEvent<{ tab: SettingsSection }>) => { const handleOpenSettings = (event: CustomEvent<{ tab: SettingsSection }>) => {
if (event.detail.tab === 'environment' || event.detail.tab === 'integrations') {
setActiveSection('credentials')
} else {
setActiveSection(event.detail.tab) setActiveSection(event.detail.tab)
}
onOpenChange(true) onOpenChange(true)
} }
@@ -479,13 +481,19 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
const handleDialogOpenChange = (newOpen: boolean) => { const handleDialogOpenChange = (newOpen: boolean) => {
if ( if (
!newOpen && !newOpen &&
effectiveActiveSection === 'environment' && effectiveActiveSection === 'credentials' &&
environmentBeforeLeaveHandler.current environmentBeforeLeaveHandler.current
) { ) {
environmentBeforeLeaveHandler.current(() => onOpenChange(false)) environmentBeforeLeaveHandler.current(() => {
if (integrationsCloseHandler.current) {
integrationsCloseHandler.current(newOpen)
} else {
onOpenChange(false)
}
})
} else if ( } else if (
!newOpen && !newOpen &&
effectiveActiveSection === 'integrations' && effectiveActiveSection === 'credentials' &&
integrationsCloseHandler.current integrationsCloseHandler.current
) { ) {
integrationsCloseHandler.current(newOpen) integrationsCloseHandler.current(newOpen)
@@ -502,7 +510,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
</VisuallyHidden.Root> </VisuallyHidden.Root>
<VisuallyHidden.Root> <VisuallyHidden.Root>
<DialogPrimitive.Description> <DialogPrimitive.Description>
Configure your workspace settings, environment variables, integrations, and preferences Configure your workspace settings, credentials, and preferences
</DialogPrimitive.Description> </DialogPrimitive.Description>
</VisuallyHidden.Root> </VisuallyHidden.Root>
@@ -539,18 +547,14 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
</SModalMainHeader> </SModalMainHeader>
<SModalMainBody> <SModalMainBody>
{effectiveActiveSection === 'general' && <General onOpenChange={onOpenChange} />} {effectiveActiveSection === 'general' && <General onOpenChange={onOpenChange} />}
{effectiveActiveSection === 'environment' && ( {effectiveActiveSection === 'credentials' && (
<EnvironmentVariables <Credentials
onOpenChange={onOpenChange}
registerCloseHandler={registerIntegrationsCloseHandler}
registerBeforeLeaveHandler={registerEnvironmentBeforeLeaveHandler} registerBeforeLeaveHandler={registerEnvironmentBeforeLeaveHandler}
/> />
)} )}
{effectiveActiveSection === 'template-profile' && <TemplateProfile />} {effectiveActiveSection === 'template-profile' && <TemplateProfile />}
{effectiveActiveSection === 'integrations' && (
<Integrations
onOpenChange={onOpenChange}
registerCloseHandler={registerIntegrationsCloseHandler}
/>
)}
{effectiveActiveSection === 'credential-sets' && <CredentialSets />} {effectiveActiveSection === 'credential-sets' && <CredentialSets />}
{effectiveActiveSection === 'access-control' && <AccessControl />} {effectiveActiveSection === 'access-control' && <AccessControl />}
{effectiveActiveSection === 'apikeys' && <ApiKeys onOpenChange={onOpenChange} />} {effectiveActiveSection === 'apikeys' && <ApiKeys onOpenChange={onOpenChange} />}

View File

@@ -589,7 +589,6 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
export const scheduleExecution = task({ export const scheduleExecution = task({
id: 'schedule-execution', id: 'schedule-execution',
machine: 'medium-1x',
retry: { retry: {
maxAttempts: 1, maxAttempts: 1,
}, },

View File

@@ -669,7 +669,6 @@ async function executeWebhookJobInternal(
export const webhookExecution = task({ export const webhookExecution = task({
id: 'webhook-execution', id: 'webhook-execution',
machine: 'medium-1x',
retry: { retry: {
maxAttempts: 1, maxAttempts: 1,
}, },

View File

@@ -197,6 +197,5 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
export const workflowExecutionTask = task({ export const workflowExecutionTask = task({
id: 'workflow-execution', id: 'workflow-execution',
machine: 'medium-1x',
run: executeWorkflowJob, run: executeWorkflowJob,
}) })

View File

@@ -394,7 +394,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Page Property Operations // Page Property Operations
{ label: 'List Page Properties', id: 'list_page_properties' }, { label: 'List Page Properties', id: 'list_page_properties' },
{ label: 'Create Page Property', id: 'create_page_property' }, { label: 'Create Page Property', id: 'create_page_property' },
{ label: 'Delete Page Property', id: 'delete_page_property' },
// Search Operations // Search Operations
{ label: 'Search Content', id: 'search' }, { label: 'Search Content', id: 'search' },
{ label: 'Search in Space', id: 'search_in_space' }, { label: 'Search in Space', id: 'search_in_space' },
@@ -415,9 +414,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Operations // Label Operations
{ label: 'List Labels', id: 'list_labels' }, { label: 'List Labels', id: 'list_labels' },
{ label: 'Add Label', id: 'add_label' }, { label: 'Add Label', id: 'add_label' },
{ label: 'Delete Label', id: 'delete_label' },
{ label: 'Get Pages by Label', id: 'get_pages_by_label' },
{ label: 'List Space Labels', id: 'list_space_labels' },
// Space Operations // Space Operations
{ label: 'Get Space', id: 'get_space' }, { label: 'Get Space', id: 'get_space' },
{ label: 'List Spaces', id: 'list_spaces' }, { label: 'List Spaces', id: 'list_spaces' },
@@ -489,8 +485,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space', 'search_in_space',
'get_space', 'get_space',
'list_spaces', 'list_spaces',
'get_pages_by_label',
'list_space_labels',
], ],
not: true, not: true,
}, },
@@ -506,8 +500,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_labels', 'list_labels',
'upload_attachment', 'upload_attachment',
'add_label', 'add_label',
'delete_label',
'delete_page_property',
'get_page_children', 'get_page_children',
'get_page_ancestors', 'get_page_ancestors',
'list_page_versions', 'list_page_versions',
@@ -535,8 +527,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space', 'search_in_space',
'get_space', 'get_space',
'list_spaces', 'list_spaces',
'get_pages_by_label',
'list_space_labels',
], ],
not: true, not: true,
}, },
@@ -552,8 +542,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_labels', 'list_labels',
'upload_attachment', 'upload_attachment',
'add_label', 'add_label',
'delete_label',
'delete_page_property',
'get_page_children', 'get_page_children',
'get_page_ancestors', 'get_page_ancestors',
'list_page_versions', 'list_page_versions',
@@ -578,7 +566,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space', 'search_in_space',
'create_blogpost', 'create_blogpost',
'list_blogposts_in_space', 'list_blogposts_in_space',
'list_space_labels',
], ],
}, },
}, },
@@ -614,14 +601,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
required: true, required: true,
condition: { field: 'operation', value: 'create_page_property' }, condition: { field: 'operation', value: 'create_page_property' },
}, },
{
id: 'propertyId',
title: 'Property ID',
type: 'short-input',
placeholder: 'Enter property ID to delete',
required: true,
condition: { field: 'operation', value: 'delete_page_property' },
},
{ {
id: 'title', id: 'title',
title: 'Title', title: 'Title',
@@ -715,7 +694,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
type: 'short-input', type: 'short-input',
placeholder: 'Enter label name', placeholder: 'Enter label name',
required: true, required: true,
condition: { field: 'operation', value: ['add_label', 'delete_label'] }, condition: { field: 'operation', value: 'add_label' },
}, },
{ {
id: 'labelPrefix', id: 'labelPrefix',
@@ -730,14 +709,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
value: () => 'global', value: () => 'global',
condition: { field: 'operation', value: 'add_label' }, condition: { field: 'operation', value: 'add_label' },
}, },
{
id: 'labelId',
title: 'Label ID',
type: 'short-input',
placeholder: 'Enter label ID',
required: true,
condition: { field: 'operation', value: 'get_pages_by_label' },
},
{ {
id: 'blogPostStatus', id: 'blogPostStatus',
title: 'Status', title: 'Status',
@@ -788,8 +759,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_page_versions', 'list_page_versions',
'list_page_properties', 'list_page_properties',
'list_labels', 'list_labels',
'get_pages_by_label',
'list_space_labels',
], ],
}, },
}, },
@@ -811,8 +780,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_page_versions', 'list_page_versions',
'list_page_properties', 'list_page_properties',
'list_labels', 'list_labels',
'get_pages_by_label',
'list_space_labels',
], ],
}, },
}, },
@@ -833,7 +800,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Property Tools // Property Tools
'confluence_list_page_properties', 'confluence_list_page_properties',
'confluence_create_page_property', 'confluence_create_page_property',
'confluence_delete_page_property',
// Search Tools // Search Tools
'confluence_search', 'confluence_search',
'confluence_search_in_space', 'confluence_search_in_space',
@@ -854,9 +820,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Tools // Label Tools
'confluence_list_labels', 'confluence_list_labels',
'confluence_add_label', 'confluence_add_label',
'confluence_delete_label',
'confluence_get_pages_by_label',
'confluence_list_space_labels',
// Space Tools // Space Tools
'confluence_get_space', 'confluence_get_space',
'confluence_list_spaces', 'confluence_list_spaces',
@@ -889,8 +852,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
return 'confluence_list_page_properties' return 'confluence_list_page_properties'
case 'create_page_property': case 'create_page_property':
return 'confluence_create_page_property' return 'confluence_create_page_property'
case 'delete_page_property':
return 'confluence_delete_page_property'
// Search Operations // Search Operations
case 'search': case 'search':
return 'confluence_search' return 'confluence_search'
@@ -926,12 +887,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
return 'confluence_list_labels' return 'confluence_list_labels'
case 'add_label': case 'add_label':
return 'confluence_add_label' return 'confluence_add_label'
case 'delete_label':
return 'confluence_delete_label'
case 'get_pages_by_label':
return 'confluence_get_pages_by_label'
case 'list_space_labels':
return 'confluence_list_space_labels'
// Space Operations // Space Operations
case 'get_space': case 'get_space':
return 'confluence_get_space' return 'confluence_get_space'
@@ -953,9 +908,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
versionNumber, versionNumber,
propertyKey, propertyKey,
propertyValue, propertyValue,
propertyId,
labelPrefix, labelPrefix,
labelId,
blogPostStatus, blogPostStatus,
purge, purge,
bodyFormat, bodyFormat,
@@ -1006,9 +959,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
} }
} }
// Operations that support generic cursor pagination. // Operations that support cursor pagination
// get_pages_by_label and list_space_labels have dedicated handlers
// below that pass cursor along with their required params (labelId, spaceId).
const supportsCursor = [ const supportsCursor = [
'list_attachments', 'list_attachments',
'list_spaces', 'list_spaces',
@@ -1045,35 +996,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
} }
} }
if (operation === 'delete_page_property') {
return {
credential,
pageId: effectivePageId,
operation,
propertyId,
...rest,
}
}
if (operation === 'get_pages_by_label') {
return {
credential,
operation,
labelId,
cursor: cursor || undefined,
...rest,
}
}
if (operation === 'list_space_labels') {
return {
credential,
operation,
cursor: cursor || undefined,
...rest,
}
}
if (operation === 'upload_attachment') { if (operation === 'upload_attachment') {
const normalizedFile = normalizeFileInput(attachmentFile, { single: true }) const normalizedFile = normalizeFileInput(attachmentFile, { single: true })
if (!normalizedFile) { if (!normalizedFile) {
@@ -1122,9 +1044,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' }, attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
attachmentComment: { type: 'string', description: 'Comment for the attachment' }, attachmentComment: { type: 'string', description: 'Comment for the attachment' },
labelName: { type: 'string', description: 'Label name' }, labelName: { type: 'string', description: 'Label name' },
labelId: { type: 'string', description: 'Label identifier' },
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' }, labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
propertyId: { type: 'string', description: 'Property identifier' },
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' }, blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' }, purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
bodyFormat: { type: 'string', description: 'Body format for comments' }, bodyFormat: { type: 'string', description: 'Body format for comments' },
@@ -1160,7 +1080,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Results // Label Results
labels: { type: 'array', description: 'List of labels' }, labels: { type: 'array', description: 'List of labels' },
labelName: { type: 'string', description: 'Label name' }, labelName: { type: 'string', description: 'Label name' },
labelId: { type: 'string', description: 'Label identifier' },
// Space Results // Space Results
spaces: { type: 'array', description: 'List of spaces' }, spaces: { type: 'array', description: 'List of spaces' },
spaceId: { type: 'string', description: 'Space identifier' }, spaceId: { type: 'string', description: 'Space identifier' },

View File

@@ -196,8 +196,6 @@ export interface SubBlockConfig {
type: SubBlockType type: SubBlockType
mode?: 'basic' | 'advanced' | 'both' | 'trigger' // Default is 'both' if not specified. 'trigger' means only shown in trigger mode mode?: 'basic' | 'advanced' | 'both' | 'trigger' // Default is 'both' if not specified. 'trigger' means only shown in trigger mode
canonicalParamId?: string canonicalParamId?: string
/** Controls parameter visibility in agent/tool-input context */
paramVisibility?: 'user-or-llm' | 'user-only' | 'llm-only' | 'hidden'
required?: required?:
| boolean | boolean
| { | {

View File

@@ -205,10 +205,6 @@ export const CREDENTIAL_SET = {
PREFIX: 'credentialSet:', PREFIX: 'credentialSet:',
} as const } as const
export const CREDENTIAL = {
FOREIGN_LABEL: 'Saved by collaborator',
} as const
export function isCredentialSetValue(value: string | null | undefined): boolean { export function isCredentialSetValue(value: string | null | undefined): boolean {
return typeof value === 'string' && value.startsWith(CREDENTIAL_SET.PREFIX) return typeof value === 'string' && value.startsWith(CREDENTIAL_SET.PREFIX)
} }

View File

@@ -0,0 +1,268 @@
'use client'
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { environmentKeys } from '@/hooks/queries/environment'
import { fetchJson } from '@/hooks/selectors/helpers'
export type WorkspaceCredentialType = 'oauth' | 'env_workspace' | 'env_personal'
export type WorkspaceCredentialRole = 'admin' | 'member'
export type WorkspaceCredentialMemberStatus = 'active' | 'pending' | 'revoked'
export interface WorkspaceCredential {
id: string
workspaceId: string
type: WorkspaceCredentialType
displayName: string
providerId: string | null
accountId: string | null
envKey: string | null
envOwnerUserId: string | null
createdBy: string
createdAt: string
updatedAt: string
role?: WorkspaceCredentialRole
status?: WorkspaceCredentialMemberStatus
}
export interface WorkspaceCredentialMember {
id: string
userId: string
role: WorkspaceCredentialRole
status: WorkspaceCredentialMemberStatus
joinedAt: string | null
invitedBy: string | null
createdAt: string
updatedAt: string
userName: string | null
userEmail: string | null
userImage: string | null
}
interface CredentialListResponse {
credentials?: WorkspaceCredential[]
}
interface CredentialResponse {
credential?: WorkspaceCredential | null
}
interface MembersResponse {
members?: WorkspaceCredentialMember[]
}
export const workspaceCredentialKeys = {
all: ['workspaceCredentials'] as const,
list: (workspaceId?: string, type?: string, providerId?: string) =>
['workspaceCredentials', workspaceId ?? 'none', type ?? 'all', providerId ?? 'all'] as const,
detail: (credentialId?: string) =>
['workspaceCredentials', 'detail', credentialId ?? 'none'] as const,
members: (credentialId?: string) =>
['workspaceCredentials', 'detail', credentialId ?? 'none', 'members'] as const,
}
export function useWorkspaceCredentials(params: {
workspaceId?: string
type?: WorkspaceCredentialType
providerId?: string
enabled?: boolean
}) {
const { workspaceId, type, providerId, enabled = true } = params
return useQuery<WorkspaceCredential[]>({
queryKey: workspaceCredentialKeys.list(workspaceId, type, providerId),
queryFn: async () => {
if (!workspaceId) return []
const data = await fetchJson<CredentialListResponse>('/api/credentials', {
searchParams: {
workspaceId,
type,
providerId,
},
})
return data.credentials ?? []
},
enabled: Boolean(workspaceId) && enabled,
staleTime: 60 * 1000,
})
}
export function useWorkspaceCredential(credentialId?: string, enabled = true) {
return useQuery<WorkspaceCredential | null>({
queryKey: workspaceCredentialKeys.detail(credentialId),
queryFn: async () => {
if (!credentialId) return null
const data = await fetchJson<CredentialResponse>(`/api/credentials/${credentialId}`)
return data.credential ?? null
},
enabled: Boolean(credentialId) && enabled,
staleTime: 60 * 1000,
})
}
export function useCreateWorkspaceCredential() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (payload: {
workspaceId: string
type: WorkspaceCredentialType
displayName?: string
providerId?: string
accountId?: string
envKey?: string
envOwnerUserId?: string
}) => {
const response = await fetch('/api/credentials', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload),
})
if (!response.ok) {
const data = await response.json()
throw new Error(data.error || 'Failed to create credential')
}
return response.json()
},
onSuccess: (_data, variables) => {
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.list(variables.workspaceId),
})
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.all,
})
},
})
}
export function useUpdateWorkspaceCredential() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (payload: {
credentialId: string
displayName?: string
accountId?: string
}) => {
const response = await fetch(`/api/credentials/${payload.credentialId}`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
displayName: payload.displayName,
accountId: payload.accountId,
}),
})
if (!response.ok) {
const data = await response.json()
throw new Error(data.error || 'Failed to update credential')
}
return response.json()
},
onSuccess: (_data, variables) => {
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
})
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.all,
})
},
})
}
export function useDeleteWorkspaceCredential() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (credentialId: string) => {
const response = await fetch(`/api/credentials/${credentialId}`, {
method: 'DELETE',
})
if (!response.ok) {
const data = await response.json()
throw new Error(data.error || 'Failed to delete credential')
}
return response.json()
},
onSuccess: (_data, credentialId) => {
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.detail(credentialId) })
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
queryClient.invalidateQueries({ queryKey: environmentKeys.all })
},
})
}
export function useWorkspaceCredentialMembers(credentialId?: string) {
return useQuery<WorkspaceCredentialMember[]>({
queryKey: workspaceCredentialKeys.members(credentialId),
queryFn: async () => {
if (!credentialId) return []
const data = await fetchJson<MembersResponse>(`/api/credentials/${credentialId}/members`)
return data.members ?? []
},
enabled: Boolean(credentialId),
staleTime: 30 * 1000,
})
}
export function useUpsertWorkspaceCredentialMember() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (payload: {
credentialId: string
userId: string
role: WorkspaceCredentialRole
}) => {
const response = await fetch(`/api/credentials/${payload.credentialId}/members`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
userId: payload.userId,
role: payload.role,
}),
})
if (!response.ok) {
const data = await response.json()
throw new Error(data.error || 'Failed to update credential member')
}
return response.json()
},
onSuccess: (_data, variables) => {
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.members(variables.credentialId),
})
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
})
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
},
})
}
export function useRemoveWorkspaceCredentialMember() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (payload: { credentialId: string; userId: string }) => {
const response = await fetch(
`/api/credentials/${payload.credentialId}/members?userId=${encodeURIComponent(payload.userId)}`,
{ method: 'DELETE' }
)
if (!response.ok) {
const data = await response.json()
throw new Error(data.error || 'Failed to remove credential member')
}
return response.json()
},
onSuccess: (_data, variables) => {
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.members(variables.credentialId),
})
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
})
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
},
})
}

View File

@@ -423,7 +423,7 @@ interface GenerateVersionDescriptionVariables {
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform. const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
Write a brief, factual description (1-3 sentences, under 2000 characters) that states what changed between versions. Write a brief, factual description (1-3 sentences, under 400 characters) that states what changed between versions.
Guidelines: Guidelines:
- Use the specific values provided (credential names, channel names, model names) - Use the specific values provided (credential names, channel names, model names)

View File

@@ -169,9 +169,9 @@ export function useConnectOAuthService() {
interface DisconnectServiceParams { interface DisconnectServiceParams {
provider: string provider: string
providerId: string providerId?: string
serviceId: string serviceId: string
accountId: string accountId?: string
} }
/** /**
@@ -182,7 +182,7 @@ export function useDisconnectOAuthService() {
const queryClient = useQueryClient() const queryClient = useQueryClient()
return useMutation({ return useMutation({
mutationFn: async ({ provider, providerId }: DisconnectServiceParams) => { mutationFn: async ({ provider, providerId, accountId }: DisconnectServiceParams) => {
const response = await fetch('/api/auth/oauth/disconnect', { const response = await fetch('/api/auth/oauth/disconnect', {
method: 'POST', method: 'POST',
headers: { headers: {
@@ -191,6 +191,7 @@ export function useDisconnectOAuthService() {
body: JSON.stringify({ body: JSON.stringify({
provider, provider,
providerId, providerId,
accountId,
}), }),
}) })
@@ -212,7 +213,8 @@ export function useDisconnectOAuthService() {
oauthConnectionsKeys.connections(), oauthConnectionsKeys.connections(),
previousServices.map((svc) => { previousServices.map((svc) => {
if (svc.id === serviceId) { if (svc.id === serviceId) {
const updatedAccounts = svc.accounts?.filter((acc) => acc.id !== accountId) || [] const updatedAccounts =
accountId && svc.accounts ? svc.accounts.filter((acc) => acc.id !== accountId) : []
return { return {
...svc, ...svc,
accounts: updatedAccounts, accounts: updatedAccounts,

View File

@@ -1,6 +1,6 @@
import { useQuery } from '@tanstack/react-query' import { useQuery } from '@tanstack/react-query'
import type { Credential } from '@/lib/oauth' import type { Credential } from '@/lib/oauth'
import { CREDENTIAL, CREDENTIAL_SET } from '@/executor/constants' import { CREDENTIAL_SET } from '@/executor/constants'
import { useCredentialSetDetail } from '@/hooks/queries/credential-sets' import { useCredentialSetDetail } from '@/hooks/queries/credential-sets'
import { fetchJson } from '@/hooks/selectors/helpers' import { fetchJson } from '@/hooks/selectors/helpers'
@@ -13,15 +13,34 @@ interface CredentialDetailResponse {
} }
export const oauthCredentialKeys = { export const oauthCredentialKeys = {
list: (providerId?: string) => ['oauthCredentials', providerId ?? 'none'] as const, list: (providerId?: string, workspaceId?: string, workflowId?: string) =>
[
'oauthCredentials',
providerId ?? 'none',
workspaceId ?? 'none',
workflowId ?? 'none',
] as const,
detail: (credentialId?: string, workflowId?: string) => detail: (credentialId?: string, workflowId?: string) =>
['oauthCredentialDetail', credentialId ?? 'none', workflowId ?? 'none'] as const, ['oauthCredentialDetail', credentialId ?? 'none', workflowId ?? 'none'] as const,
} }
export async function fetchOAuthCredentials(providerId: string): Promise<Credential[]> { interface FetchOAuthCredentialsParams {
providerId: string
workspaceId?: string
workflowId?: string
}
export async function fetchOAuthCredentials(
params: FetchOAuthCredentialsParams
): Promise<Credential[]> {
const { providerId, workspaceId, workflowId } = params
if (!providerId) return [] if (!providerId) return []
const data = await fetchJson<CredentialListResponse>('/api/auth/oauth/credentials', { const data = await fetchJson<CredentialListResponse>('/api/auth/oauth/credentials', {
searchParams: { provider: providerId }, searchParams: {
provider: providerId,
workspaceId,
workflowId,
},
}) })
return data.credentials ?? [] return data.credentials ?? []
} }
@@ -40,10 +59,44 @@ export async function fetchOAuthCredentialDetail(
return data.credentials ?? [] return data.credentials ?? []
} }
export function useOAuthCredentials(providerId?: string, enabled = true) { interface UseOAuthCredentialsOptions {
enabled?: boolean
workspaceId?: string
workflowId?: string
}
function resolveOptions(
enabledOrOptions?: boolean | UseOAuthCredentialsOptions
): Required<UseOAuthCredentialsOptions> {
if (typeof enabledOrOptions === 'boolean') {
return {
enabled: enabledOrOptions,
workspaceId: '',
workflowId: '',
}
}
return {
enabled: enabledOrOptions?.enabled ?? true,
workspaceId: enabledOrOptions?.workspaceId ?? '',
workflowId: enabledOrOptions?.workflowId ?? '',
}
}
export function useOAuthCredentials(
providerId?: string,
enabledOrOptions?: boolean | UseOAuthCredentialsOptions
) {
const { enabled, workspaceId, workflowId } = resolveOptions(enabledOrOptions)
return useQuery<Credential[]>({ return useQuery<Credential[]>({
queryKey: oauthCredentialKeys.list(providerId), queryKey: oauthCredentialKeys.list(providerId, workspaceId, workflowId),
queryFn: () => fetchOAuthCredentials(providerId ?? ''), queryFn: () =>
fetchOAuthCredentials({
providerId: providerId ?? '',
workspaceId: workspaceId || undefined,
workflowId: workflowId || undefined,
}),
enabled: Boolean(providerId) && enabled, enabled: Boolean(providerId) && enabled,
staleTime: 60 * 1000, staleTime: 60 * 1000,
}) })
@@ -62,7 +115,12 @@ export function useOAuthCredentialDetail(
}) })
} }
export function useCredentialName(credentialId?: string, providerId?: string, workflowId?: string) { export function useCredentialName(
credentialId?: string,
providerId?: string,
workflowId?: string,
workspaceId?: string
) {
// Check if this is a credential set value // Check if this is a credential set value
const isCredentialSet = credentialId?.startsWith(CREDENTIAL_SET.PREFIX) ?? false const isCredentialSet = credentialId?.startsWith(CREDENTIAL_SET.PREFIX) ?? false
const credentialSetId = isCredentialSet const credentialSetId = isCredentialSet
@@ -77,7 +135,11 @@ export function useCredentialName(credentialId?: string, providerId?: string, wo
const { data: credentials = [], isFetching: credentialsLoading } = useOAuthCredentials( const { data: credentials = [], isFetching: credentialsLoading } = useOAuthCredentials(
providerId, providerId,
Boolean(providerId) && !isCredentialSet {
enabled: Boolean(providerId) && !isCredentialSet,
workspaceId,
workflowId,
}
) )
const selectedCredential = credentials.find((cred) => cred.id === credentialId) const selectedCredential = credentials.find((cred) => cred.id === credentialId)
@@ -92,18 +154,18 @@ export function useCredentialName(credentialId?: string, providerId?: string, wo
shouldFetchDetail shouldFetchDetail
) )
const detailCredential = foreignCredentials[0]
const hasForeignMeta = foreignCredentials.length > 0 const hasForeignMeta = foreignCredentials.length > 0
const isForeignCredentialSet = isCredentialSet && !credentialSetData && !credentialSetLoading
const displayName = const displayName =
credentialSetData?.name ?? credentialSetData?.name ?? selectedCredential?.name ?? detailCredential?.name ?? null
selectedCredential?.name ??
(hasForeignMeta ? CREDENTIAL.FOREIGN_LABEL : null) ??
(isForeignCredentialSet ? CREDENTIAL.FOREIGN_LABEL : null)
return { return {
displayName, displayName,
isLoading: credentialsLoading || foreignLoading || (isCredentialSet && credentialSetLoading), isLoading:
credentialsLoading ||
foreignLoading ||
(isCredentialSet && credentialSetLoading && !credentialSetData),
hasForeignMeta, hasForeignMeta,
} }
} }

View File

@@ -642,10 +642,6 @@ export function useDeployChildWorkflow() {
queryClient.invalidateQueries({ queryClient.invalidateQueries({
queryKey: workflowKeys.deploymentStatus(variables.workflowId), queryKey: workflowKeys.deploymentStatus(variables.workflowId),
}) })
// Invalidate workflow state so tool input mappings refresh
queryClient.invalidateQueries({
queryKey: workflowKeys.state(variables.workflowId),
})
// Also invalidate deployment queries // Also invalidate deployment queries
queryClient.invalidateQueries({ queryClient.invalidateQueries({
queryKey: deploymentKeys.info(variables.workflowId), queryKey: deploymentKeys.info(variables.workflowId),

View File

@@ -1,4 +1,4 @@
import { useCallback } from 'react' import { useCallback, useRef } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import type { import type {
BlockCompletedData, BlockCompletedData,
@@ -16,18 +16,6 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
const logger = createLogger('useExecutionStream') const logger = createLogger('useExecutionStream')
/**
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
* These should be treated as clean disconnects, not execution errors.
*/
function isClientDisconnectError(error: any): boolean {
if (error.name === 'AbortError') return true
const msg = (error.message ?? '').toLowerCase()
return (
msg.includes('network error') || msg.includes('failed to fetch') || msg.includes('load failed')
)
}
/** /**
* Processes SSE events from a response body and invokes appropriate callbacks. * Processes SSE events from a response body and invokes appropriate callbacks.
*/ */
@@ -133,7 +121,6 @@ export interface ExecuteStreamOptions {
parallels?: Record<string, any> parallels?: Record<string, any>
} }
stopAfterBlockId?: string stopAfterBlockId?: string
onExecutionId?: (executionId: string) => void
callbacks?: ExecutionStreamCallbacks callbacks?: ExecutionStreamCallbacks
} }
@@ -142,40 +129,30 @@ export interface ExecuteFromBlockOptions {
startBlockId: string startBlockId: string
sourceSnapshot: SerializableExecutionState sourceSnapshot: SerializableExecutionState
input?: any input?: any
onExecutionId?: (executionId: string) => void
callbacks?: ExecutionStreamCallbacks callbacks?: ExecutionStreamCallbacks
} }
export interface ReconnectStreamOptions {
workflowId: string
executionId: string
fromEventId?: number
callbacks?: ExecutionStreamCallbacks
}
/**
* Module-level map shared across all hook instances.
* Ensures ANY instance can cancel streams started by ANY other instance,
* which is critical for SPA navigation where the original hook instance unmounts
* but the SSE stream must be cancellable from the new instance.
*/
const sharedAbortControllers = new Map<string, AbortController>()
/** /**
* Hook for executing workflows via server-side SSE streaming. * Hook for executing workflows via server-side SSE streaming.
* Supports concurrent executions via per-workflow AbortController maps. * Supports concurrent executions via per-workflow AbortController maps.
*/ */
export function useExecutionStream() { export function useExecutionStream() {
const execute = useCallback(async (options: ExecuteStreamOptions) => { const abortControllersRef = useRef<Map<string, AbortController>>(new Map())
const { workflowId, callbacks = {}, onExecutionId, ...payload } = options const currentExecutionsRef = useRef<Map<string, { workflowId: string; executionId: string }>>(
new Map()
)
const existing = sharedAbortControllers.get(workflowId) const execute = useCallback(async (options: ExecuteStreamOptions) => {
const { workflowId, callbacks = {}, ...payload } = options
const existing = abortControllersRef.current.get(workflowId)
if (existing) { if (existing) {
existing.abort() existing.abort()
} }
const abortController = new AbortController() const abortController = new AbortController()
sharedAbortControllers.set(workflowId, abortController) abortControllersRef.current.set(workflowId, abortController)
currentExecutionsRef.current.delete(workflowId)
try { try {
const response = await fetch(`/api/workflows/${workflowId}/execute`, { const response = await fetch(`/api/workflows/${workflowId}/execute`, {
@@ -200,48 +177,42 @@ export function useExecutionStream() {
throw new Error('No response body') throw new Error('No response body')
} }
const serverExecutionId = response.headers.get('X-Execution-Id') const executionId = response.headers.get('X-Execution-Id')
if (serverExecutionId) { if (executionId) {
onExecutionId?.(serverExecutionId) currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
} }
const reader = response.body.getReader() const reader = response.body.getReader()
await processSSEStream(reader, callbacks, 'Execution') await processSSEStream(reader, callbacks, 'Execution')
} catch (error: any) { } catch (error: any) {
if (isClientDisconnectError(error)) { if (error.name === 'AbortError') {
logger.info('Execution stream disconnected (page unload or abort)') logger.info('Execution stream cancelled')
return callbacks.onExecutionCancelled?.({ duration: 0 })
} } else {
logger.error('Execution stream error:', error) logger.error('Execution stream error:', error)
callbacks.onExecutionError?.({ callbacks.onExecutionError?.({
error: error.message || 'Unknown error', error: error.message || 'Unknown error',
duration: 0, duration: 0,
}) })
}
throw error throw error
} finally { } finally {
if (sharedAbortControllers.get(workflowId) === abortController) { abortControllersRef.current.delete(workflowId)
sharedAbortControllers.delete(workflowId) currentExecutionsRef.current.delete(workflowId)
}
} }
}, []) }, [])
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => { const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
const { const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options
workflowId,
startBlockId,
sourceSnapshot,
input,
onExecutionId,
callbacks = {},
} = options
const existing = sharedAbortControllers.get(workflowId) const existing = abortControllersRef.current.get(workflowId)
if (existing) { if (existing) {
existing.abort() existing.abort()
} }
const abortController = new AbortController() const abortController = new AbortController()
sharedAbortControllers.set(workflowId, abortController) abortControllersRef.current.set(workflowId, abortController)
currentExecutionsRef.current.delete(workflowId)
try { try {
const response = await fetch(`/api/workflows/${workflowId}/execute`, { const response = await fetch(`/api/workflows/${workflowId}/execute`, {
@@ -275,80 +246,64 @@ export function useExecutionStream() {
throw new Error('No response body') throw new Error('No response body')
} }
const serverExecutionId = response.headers.get('X-Execution-Id') const executionId = response.headers.get('X-Execution-Id')
if (serverExecutionId) { if (executionId) {
onExecutionId?.(serverExecutionId) currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
} }
const reader = response.body.getReader() const reader = response.body.getReader()
await processSSEStream(reader, callbacks, 'Run-from-block') await processSSEStream(reader, callbacks, 'Run-from-block')
} catch (error: any) { } catch (error: any) {
if (isClientDisconnectError(error)) { if (error.name === 'AbortError') {
logger.info('Run-from-block stream disconnected (page unload or abort)') logger.info('Run-from-block execution cancelled')
return callbacks.onExecutionCancelled?.({ duration: 0 })
} } else {
logger.error('Run-from-block execution error:', error) logger.error('Run-from-block execution error:', error)
callbacks.onExecutionError?.({ callbacks.onExecutionError?.({
error: error.message || 'Unknown error', error: error.message || 'Unknown error',
duration: 0, duration: 0,
}) })
}
throw error throw error
} finally { } finally {
if (sharedAbortControllers.get(workflowId) === abortController) { abortControllersRef.current.delete(workflowId)
sharedAbortControllers.delete(workflowId) currentExecutionsRef.current.delete(workflowId)
}
}
}, [])
const reconnect = useCallback(async (options: ReconnectStreamOptions) => {
const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options
const existing = sharedAbortControllers.get(workflowId)
if (existing) {
existing.abort()
}
const abortController = new AbortController()
sharedAbortControllers.set(workflowId, abortController)
try {
const response = await fetch(
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
{ signal: abortController.signal }
)
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
if (!response.body) throw new Error('No response body')
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
} catch (error: any) {
if (isClientDisconnectError(error)) return
logger.error('Reconnection stream error:', error)
throw error
} finally {
if (sharedAbortControllers.get(workflowId) === abortController) {
sharedAbortControllers.delete(workflowId)
}
} }
}, []) }, [])
const cancel = useCallback((workflowId?: string) => { const cancel = useCallback((workflowId?: string) => {
if (workflowId) { if (workflowId) {
const controller = sharedAbortControllers.get(workflowId) const execution = currentExecutionsRef.current.get(workflowId)
if (execution) {
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
method: 'POST',
}).catch(() => {})
}
const controller = abortControllersRef.current.get(workflowId)
if (controller) { if (controller) {
controller.abort() controller.abort()
sharedAbortControllers.delete(workflowId) abortControllersRef.current.delete(workflowId)
} }
currentExecutionsRef.current.delete(workflowId)
} else { } else {
for (const [, controller] of sharedAbortControllers) { for (const [, execution] of currentExecutionsRef.current) {
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
method: 'POST',
}).catch(() => {})
}
for (const [, controller] of abortControllersRef.current) {
controller.abort() controller.abort()
} }
sharedAbortControllers.clear() abortControllersRef.current.clear()
currentExecutionsRef.current.clear()
} }
}, []) }, [])
return { return {
execute, execute,
executeFromBlock, executeFromBlock,
reconnect,
cancel, cancel,
} }
} }

View File

@@ -14,7 +14,7 @@ import {
oneTimeToken, oneTimeToken,
organization, organization,
} from 'better-auth/plugins' } from 'better-auth/plugins'
import { and, eq } from 'drizzle-orm' import { and, eq, inArray, sql } from 'drizzle-orm'
import { headers } from 'next/headers' import { headers } from 'next/headers'
import Stripe from 'stripe' import Stripe from 'stripe'
import { import {
@@ -150,16 +150,6 @@ export const auth = betterAuth({
account: { account: {
create: { create: {
before: async (account) => { before: async (account) => {
// Only one credential per (userId, providerId) is allowed
// If user reconnects (even with a different external account), delete the old one
// and let Better Auth create the new one (returning false breaks account linking flow)
const existing = await db.query.account.findFirst({
where: and(
eq(schema.account.userId, account.userId),
eq(schema.account.providerId, account.providerId)
),
})
const modifiedAccount = { ...account } const modifiedAccount = { ...account }
if (account.providerId === 'salesforce' && account.accessToken) { if (account.providerId === 'salesforce' && account.accessToken) {
@@ -189,32 +179,148 @@ export const auth = betterAuth({
} }
} }
// Handle Microsoft refresh token expiry
if (isMicrosoftProvider(account.providerId)) { if (isMicrosoftProvider(account.providerId)) {
modifiedAccount.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry() modifiedAccount.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
} }
if (existing) {
// Delete the existing account so Better Auth can create the new one
// This allows account linking/re-authorization to succeed
await db.delete(schema.account).where(eq(schema.account.id, existing.id))
// Preserve the existing account ID so references (like workspace notifications) continue to work
modifiedAccount.id = existing.id
logger.info('[account.create.before] Deleted existing account for re-authorization', {
userId: account.userId,
providerId: account.providerId,
existingAccountId: existing.id,
preservingId: true,
})
// Sync webhooks for credential sets after reconnecting (in after hook)
}
return { data: modifiedAccount } return { data: modifiedAccount }
}, },
after: async (account) => { after: async (account) => {
/**
* Migrate credentials from stale account rows to the newly created one.
*
* Each getUserInfo appends a random UUID to the stable external ID so
* that Better Auth never blocks cross-user connections. This means
* re-connecting the same external identity creates a new row. We detect
* the stale siblings here by comparing the stable prefix (everything
* before the trailing UUID), migrate any credential FKs to the new row,
* then delete the stale rows.
*/
try {
const UUID_SUFFIX_RE = /-[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/
const stablePrefix = account.accountId.replace(UUID_SUFFIX_RE, '')
if (stablePrefix && stablePrefix !== account.accountId) {
const siblings = await db
.select({ id: schema.account.id, accountId: schema.account.accountId })
.from(schema.account)
.where(
and(
eq(schema.account.userId, account.userId),
eq(schema.account.providerId, account.providerId),
sql`${schema.account.id} != ${account.id}`
)
)
const staleRows = siblings.filter(
(row) => row.accountId.replace(UUID_SUFFIX_RE, '') === stablePrefix
)
if (staleRows.length > 0) {
const staleIds = staleRows.map((row) => row.id)
await db
.update(schema.credential)
.set({ accountId: account.id })
.where(inArray(schema.credential.accountId, staleIds))
await db.delete(schema.account).where(inArray(schema.account.id, staleIds))
logger.info('[account.create.after] Migrated credentials from stale accounts', {
userId: account.userId,
providerId: account.providerId,
newAccountId: account.id,
migratedFrom: staleIds,
})
}
}
} catch (error) {
logger.error('[account.create.after] Failed to clean up stale accounts', {
userId: account.userId,
providerId: account.providerId,
error,
})
}
/**
* If a pending credential draft exists for this (userId, providerId),
* create the credential now with the user's chosen display name.
* This is deterministic — the account row is guaranteed to exist.
*/
try {
const [draft] = await db
.select()
.from(schema.pendingCredentialDraft)
.where(
and(
eq(schema.pendingCredentialDraft.userId, account.userId),
eq(schema.pendingCredentialDraft.providerId, account.providerId),
sql`${schema.pendingCredentialDraft.expiresAt} > NOW()`
)
)
.limit(1)
if (draft) {
const credentialId = crypto.randomUUID()
const now = new Date()
try {
await db.insert(schema.credential).values({
id: credentialId,
workspaceId: draft.workspaceId,
type: 'oauth',
displayName: draft.displayName,
providerId: account.providerId,
accountId: account.id,
createdBy: account.userId,
createdAt: now,
updatedAt: now,
})
await db.insert(schema.credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId: account.userId,
role: 'admin',
status: 'active',
joinedAt: now,
invitedBy: account.userId,
createdAt: now,
updatedAt: now,
})
logger.info('[account.create.after] Created credential from draft', {
credentialId,
displayName: draft.displayName,
providerId: account.providerId,
accountId: account.id,
})
} catch (insertError: unknown) {
const code =
insertError && typeof insertError === 'object' && 'code' in insertError
? (insertError as { code: string }).code
: undefined
if (code !== '23505') {
throw insertError
}
logger.info('[account.create.after] Credential already exists, skipping draft', {
providerId: account.providerId,
accountId: account.id,
})
}
await db
.delete(schema.pendingCredentialDraft)
.where(eq(schema.pendingCredentialDraft.id, draft.id))
}
} catch (error) {
logger.error('[account.create.after] Failed to create credential from draft', {
userId: account.userId,
providerId: account.providerId,
error,
})
}
try { try {
const { ensureUserStatsExists } = await import('@/lib/billing/core/usage') const { ensureUserStatsExists } = await import('@/lib/billing/core/usage')
await ensureUserStatsExists(account.userId) await ensureUserStatsExists(account.userId)
@@ -1487,7 +1593,7 @@ export const auth = betterAuth({
}) })
return { return {
id: `${data.user_id || data.hub_id.toString()}-${crypto.randomUUID()}`, id: `${(data.user_id || data.hub_id).toString()}-${crypto.randomUUID()}`,
name: data.user || 'HubSpot User', name: data.user || 'HubSpot User',
email: data.user || `hubspot-${data.hub_id}@hubspot.com`, email: data.user || `hubspot-${data.hub_id}@hubspot.com`,
emailVerified: true, emailVerified: true,
@@ -1541,7 +1647,7 @@ export const auth = betterAuth({
const data = await response.json() const data = await response.json()
return { return {
id: `${data.user_id || data.sub}-${crypto.randomUUID()}`, id: `${(data.user_id || data.sub).toString()}-${crypto.randomUUID()}`,
name: data.name || 'Salesforce User', name: data.name || 'Salesforce User',
email: data.email || `salesforce-${data.user_id}@salesforce.com`, email: data.email || `salesforce-${data.user_id}@salesforce.com`,
emailVerified: data.email_verified || true, emailVerified: data.email_verified || true,
@@ -1600,7 +1706,7 @@ export const auth = betterAuth({
const now = new Date() const now = new Date()
return { return {
id: `${profile.data.id}-${crypto.randomUUID()}`, id: `${profile.data.id.toString()}-${crypto.randomUUID()}`,
name: profile.data.name || 'X User', name: profile.data.name || 'X User',
email: `${profile.data.username}@x.com`, email: `${profile.data.username}@x.com`,
image: profile.data.profile_image_url, image: profile.data.profile_image_url,
@@ -1680,7 +1786,7 @@ export const auth = betterAuth({
const now = new Date() const now = new Date()
return { return {
id: `${profile.account_id}-${crypto.randomUUID()}`, id: `${profile.account_id.toString()}-${crypto.randomUUID()}`,
name: profile.name || profile.display_name || 'Confluence User', name: profile.name || profile.display_name || 'Confluence User',
email: profile.email || `${profile.account_id}@atlassian.com`, email: profile.email || `${profile.account_id}@atlassian.com`,
image: profile.picture || undefined, image: profile.picture || undefined,
@@ -1791,7 +1897,7 @@ export const auth = betterAuth({
const now = new Date() const now = new Date()
return { return {
id: `${profile.account_id}-${crypto.randomUUID()}`, id: `${profile.account_id.toString()}-${crypto.randomUUID()}`,
name: profile.name || profile.display_name || 'Jira User', name: profile.name || profile.display_name || 'Jira User',
email: profile.email || `${profile.account_id}@atlassian.com`, email: profile.email || `${profile.account_id}@atlassian.com`,
image: profile.picture || undefined, image: profile.picture || undefined,
@@ -1841,7 +1947,7 @@ export const auth = betterAuth({
const now = new Date() const now = new Date()
return { return {
id: `${data.id}-${crypto.randomUUID()}`, id: `${data.id.toString()}-${crypto.randomUUID()}`,
name: data.email ? data.email.split('@')[0] : 'Airtable User', name: data.email ? data.email.split('@')[0] : 'Airtable User',
email: data.email || `${data.id}@airtable.user`, email: data.email || `${data.id}@airtable.user`,
emailVerified: !!data.email, emailVerified: !!data.email,
@@ -1890,7 +1996,7 @@ export const auth = betterAuth({
const now = new Date() const now = new Date()
return { return {
id: `${profile.bot?.owner?.user?.id || profile.id}-${crypto.randomUUID()}`, id: `${(profile.bot?.owner?.user?.id || profile.id).toString()}-${crypto.randomUUID()}`,
name: profile.name || profile.bot?.owner?.user?.name || 'Notion User', name: profile.name || profile.bot?.owner?.user?.name || 'Notion User',
email: profile.person?.email || `${profile.id}@notion.user`, email: profile.person?.email || `${profile.id}@notion.user`,
emailVerified: !!profile.person?.email, emailVerified: !!profile.person?.email,
@@ -1957,7 +2063,7 @@ export const auth = betterAuth({
const now = new Date() const now = new Date()
return { return {
id: `${data.id}-${crypto.randomUUID()}`, id: `${data.id.toString()}-${crypto.randomUUID()}`,
name: data.name || 'Reddit User', name: data.name || 'Reddit User',
email: `${data.name}@reddit.user`, email: `${data.name}@reddit.user`,
image: data.icon_img || undefined, image: data.icon_img || undefined,
@@ -2029,7 +2135,7 @@ export const auth = betterAuth({
const viewer = data.viewer const viewer = data.viewer
return { return {
id: `${viewer.id}-${crypto.randomUUID()}`, id: `${viewer.id.toString()}-${crypto.randomUUID()}`,
email: viewer.email, email: viewer.email,
name: viewer.name, name: viewer.name,
emailVerified: true, emailVerified: true,
@@ -2092,7 +2198,7 @@ export const auth = betterAuth({
const data = await response.json() const data = await response.json()
return { return {
id: `${data.account_id}-${crypto.randomUUID()}`, id: `${data.account_id.toString()}-${crypto.randomUUID()}`,
email: data.email, email: data.email,
name: data.name?.display_name || data.email, name: data.name?.display_name || data.email,
emailVerified: data.email_verified || false, emailVerified: data.email_verified || false,
@@ -2143,7 +2249,7 @@ export const auth = betterAuth({
const now = new Date() const now = new Date()
return { return {
id: `${profile.gid}-${crypto.randomUUID()}`, id: `${profile.gid.toString()}-${crypto.randomUUID()}`,
name: profile.name || 'Asana User', name: profile.name || 'Asana User',
email: profile.email || `${profile.gid}@asana.user`, email: profile.email || `${profile.gid}@asana.user`,
image: profile.photo?.image_128x128 || undefined, image: profile.photo?.image_128x128 || undefined,
@@ -2378,7 +2484,7 @@ export const auth = betterAuth({
const profile = await response.json() const profile = await response.json()
return { return {
id: `${profile.id}-${crypto.randomUUID()}`, id: `${profile.id.toString()}-${crypto.randomUUID()}`,
name: name:
`${profile.first_name || ''} ${profile.last_name || ''}`.trim() || 'Zoom User', `${profile.first_name || ''} ${profile.last_name || ''}`.trim() || 'Zoom User',
email: profile.email || `${profile.id}@zoom.user`, email: profile.email || `${profile.id}@zoom.user`,
@@ -2445,7 +2551,7 @@ export const auth = betterAuth({
const profile = await response.json() const profile = await response.json()
return { return {
id: `${profile.id}-${crypto.randomUUID()}`, id: `${profile.id.toString()}-${crypto.randomUUID()}`,
name: profile.display_name || 'Spotify User', name: profile.display_name || 'Spotify User',
email: profile.email || `${profile.id}@spotify.user`, email: profile.email || `${profile.id}@spotify.user`,
emailVerified: true, emailVerified: true,

View File

@@ -1,6 +1,6 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { account, workflow as workflowTable } from '@sim/db/schema' import { account, credential, credentialMember, workflow as workflowTable } from '@sim/db/schema'
import { eq } from 'drizzle-orm' import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server' import type { NextRequest } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -12,17 +12,14 @@ export interface CredentialAccessResult {
requesterUserId?: string requesterUserId?: string
credentialOwnerUserId?: string credentialOwnerUserId?: string
workspaceId?: string workspaceId?: string
resolvedCredentialId?: string
} }
/** /**
* Centralizes auth + collaboration rules for credential use. * Centralizes auth + credential membership checks for OAuth usage.
* - Uses checkSessionOrInternalAuth to authenticate the caller * - Workspace-scoped credential IDs enforce active credential_member access.
* - Fetches credential owner * - Legacy account IDs are resolved to workspace-scoped credentials when workflowId is provided.
* - Authorization rules: * - Direct legacy account-ID access without workflowId is restricted to account owners only.
* - session: allow if requester owns the credential; otherwise require workflowId and
* verify BOTH requester and owner have access to the workflow's workspace
* - internal_jwt: require workflowId (by default) and verify credential owner has access to the
* workflow's workspace (requester identity is the system/workflow)
*/ */
export async function authorizeCredentialUse( export async function authorizeCredentialUse(
request: NextRequest, request: NextRequest,
@@ -37,71 +34,173 @@ export async function authorizeCredentialUse(
return { ok: false, error: auth.error || 'Authentication required' } return { ok: false, error: auth.error || 'Authentication required' }
} }
// Lookup credential owner const [workflowContext] = workflowId
const [credRow] = await db ? await db
.select({ workspaceId: workflowTable.workspaceId })
.from(workflowTable)
.where(eq(workflowTable.id, workflowId))
.limit(1)
: [null]
if (workflowId && (!workflowContext || !workflowContext.workspaceId)) {
return { ok: false, error: 'Workflow not found' }
}
const [platformCredential] = await db
.select({
id: credential.id,
workspaceId: credential.workspaceId,
type: credential.type,
accountId: credential.accountId,
})
.from(credential)
.where(eq(credential.id, credentialId))
.limit(1)
if (platformCredential) {
if (platformCredential.type !== 'oauth' || !platformCredential.accountId) {
return { ok: false, error: 'Unsupported credential type for OAuth access' }
}
if (workflowContext && workflowContext.workspaceId !== platformCredential.workspaceId) {
return { ok: false, error: 'Credential is not accessible from this workflow workspace' }
}
const [accountRow] = await db
.select({ userId: account.userId })
.from(account)
.where(eq(account.id, platformCredential.accountId))
.limit(1)
if (!accountRow) {
return { ok: false, error: 'Credential account not found' }
}
const requesterPerm =
auth.authType === 'internal_jwt'
? null
: await getUserEntityPermissions(auth.userId, 'workspace', platformCredential.workspaceId)
if (auth.authType !== 'internal_jwt') {
const [membership] = await db
.select({ id: credentialMember.id })
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, platformCredential.id),
eq(credentialMember.userId, auth.userId),
eq(credentialMember.status, 'active')
)
)
.limit(1)
if (!membership || requesterPerm === null) {
return { ok: false, error: 'Unauthorized' }
}
}
const ownerPerm = await getUserEntityPermissions(
accountRow.userId,
'workspace',
platformCredential.workspaceId
)
if (ownerPerm === null) {
return { ok: false, error: 'Unauthorized' }
}
return {
ok: true,
authType: auth.authType as CredentialAccessResult['authType'],
requesterUserId: auth.userId,
credentialOwnerUserId: accountRow.userId,
workspaceId: platformCredential.workspaceId,
resolvedCredentialId: platformCredential.accountId,
}
}
if (workflowContext?.workspaceId) {
const [workspaceCredential] = await db
.select({
id: credential.id,
workspaceId: credential.workspaceId,
accountId: credential.accountId,
})
.from(credential)
.where(
and(
eq(credential.type, 'oauth'),
eq(credential.workspaceId, workflowContext.workspaceId),
eq(credential.accountId, credentialId)
)
)
.limit(1)
if (!workspaceCredential?.accountId) {
return { ok: false, error: 'Credential not found' }
}
const [accountRow] = await db
.select({ userId: account.userId })
.from(account)
.where(eq(account.id, workspaceCredential.accountId))
.limit(1)
if (!accountRow) {
return { ok: false, error: 'Credential account not found' }
}
if (auth.authType !== 'internal_jwt') {
const [membership] = await db
.select({ id: credentialMember.id })
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, workspaceCredential.id),
eq(credentialMember.userId, auth.userId),
eq(credentialMember.status, 'active')
)
)
.limit(1)
if (!membership) {
return { ok: false, error: 'Unauthorized' }
}
}
const ownerPerm = await getUserEntityPermissions(
accountRow.userId,
'workspace',
workflowContext.workspaceId
)
if (ownerPerm === null) {
return { ok: false, error: 'Unauthorized' }
}
return {
ok: true,
authType: auth.authType as CredentialAccessResult['authType'],
requesterUserId: auth.userId,
credentialOwnerUserId: accountRow.userId,
workspaceId: workflowContext.workspaceId,
resolvedCredentialId: workspaceCredential.accountId,
}
}
const [legacyAccount] = await db
.select({ userId: account.userId }) .select({ userId: account.userId })
.from(account) .from(account)
.where(eq(account.id, credentialId)) .where(eq(account.id, credentialId))
.limit(1) .limit(1)
if (!credRow) { if (!legacyAccount) {
return { ok: false, error: 'Credential not found' } return { ok: false, error: 'Credential not found' }
} }
const credentialOwnerUserId = credRow.userId if (auth.authType === 'internal_jwt') {
// If requester owns the credential, allow immediately
if (auth.authType !== 'internal_jwt' && auth.userId === credentialOwnerUserId) {
return {
ok: true,
authType: auth.authType as CredentialAccessResult['authType'],
requesterUserId: auth.userId,
credentialOwnerUserId,
}
}
// For collaboration paths, workflowId is required to scope to a workspace
if (!workflowId) {
return { ok: false, error: 'workflowId is required' } return { ok: false, error: 'workflowId is required' }
} }
const [wf] = await db if (auth.userId !== legacyAccount.userId) {
.select({ workspaceId: workflowTable.workspaceId })
.from(workflowTable)
.where(eq(workflowTable.id, workflowId))
.limit(1)
if (!wf || !wf.workspaceId) {
return { ok: false, error: 'Workflow not found' }
}
if (auth.authType === 'internal_jwt') {
// Internal calls: verify credential owner belongs to the workflow's workspace
const ownerPerm = await getUserEntityPermissions(
credentialOwnerUserId,
'workspace',
wf.workspaceId
)
if (ownerPerm === null) {
return { ok: false, error: 'Unauthorized' }
}
return {
ok: true,
authType: auth.authType as CredentialAccessResult['authType'],
requesterUserId: auth.userId,
credentialOwnerUserId,
workspaceId: wf.workspaceId,
}
}
// Session: verify BOTH requester and owner belong to the workflow's workspace
const requesterPerm = await getUserEntityPermissions(auth.userId, 'workspace', wf.workspaceId)
const ownerPerm = await getUserEntityPermissions(
credentialOwnerUserId,
'workspace',
wf.workspaceId
)
if (requesterPerm === null || ownerPerm === null) {
return { ok: false, error: 'Unauthorized' } return { ok: false, error: 'Unauthorized' }
} }
@@ -109,7 +208,7 @@ export async function authorizeCredentialUse(
ok: true, ok: true,
authType: auth.authType as CredentialAccessResult['authType'], authType: auth.authType as CredentialAccessResult['authType'],
requesterUserId: auth.userId, requesterUserId: auth.userId,
credentialOwnerUserId, credentialOwnerUserId: legacyAccount.userId,
workspaceId: wf.workspaceId, resolvedCredentialId: credentialId,
} }
} }

View File

@@ -20,8 +20,6 @@ export interface BuildPayloadParams {
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }> fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
commands?: string[] commands?: string[]
chatId?: string chatId?: string
conversationId?: string
prefetch?: boolean
implicitFeedback?: string implicitFeedback?: string
} }
@@ -66,10 +64,6 @@ export async function buildCopilotRequestPayload(
fileAttachments, fileAttachments,
commands, commands,
chatId, chatId,
conversationId,
prefetch,
conversationHistory,
implicitFeedback,
} = params } = params
const selectedModel = options.selectedModel const selectedModel = options.selectedModel
@@ -160,12 +154,6 @@ export async function buildCopilotRequestPayload(
version: SIM_AGENT_VERSION, version: SIM_AGENT_VERSION,
...(contexts && contexts.length > 0 ? { context: contexts } : {}), ...(contexts && contexts.length > 0 ? { context: contexts } : {}),
...(chatId ? { chatId } : {}), ...(chatId ? { chatId } : {}),
...(conversationId ? { conversationId } : {}),
...(Array.isArray(conversationHistory) && conversationHistory.length > 0
? { conversationHistory }
: {}),
...(typeof prefetch === 'boolean' ? { prefetch } : {}),
...(implicitFeedback ? { implicitFeedback } : {}),
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}), ...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
...(integrationTools.length > 0 ? { integrationTools } : {}), ...(integrationTools.length > 0 ? { integrationTools } : {}),
...(credentials ? { credentials } : {}), ...(credentials ? { credentials } : {}),

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { customTools, workflow } from '@sim/db/schema' import { workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, desc, eq, isNull, or } from 'drizzle-orm' import { eq } from 'drizzle-orm'
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants' import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import type { import type {
ExecutionContext, ExecutionContext,
@@ -12,7 +12,6 @@ import { routeExecution } from '@/lib/copilot/tools/server/router'
import { env } from '@/lib/core/config/env' import { env } from '@/lib/core/config/env'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils' import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
import { getTool, resolveToolId } from '@/tools/utils' import { getTool, resolveToolId } from '@/tools/utils'
import { import {
executeCheckDeploymentStatus, executeCheckDeploymentStatus,
@@ -77,247 +76,6 @@ import {
const logger = createLogger('CopilotToolExecutor') const logger = createLogger('CopilotToolExecutor')
type ManageCustomToolOperation = 'add' | 'edit' | 'delete' | 'list'
interface ManageCustomToolSchema {
type: 'function'
function: {
name: string
description?: string
parameters: Record<string, unknown>
}
}
interface ManageCustomToolParams {
operation?: string
toolId?: string
schema?: ManageCustomToolSchema
code?: string
title?: string
workspaceId?: string
}
async function executeManageCustomTool(
rawParams: Record<string, unknown>,
context: ExecutionContext
): Promise<ToolCallResult> {
const params = rawParams as ManageCustomToolParams
const operation = String(params.operation || '').toLowerCase() as ManageCustomToolOperation
const workspaceId = params.workspaceId || context.workspaceId
if (!operation) {
return { success: false, error: "Missing required 'operation' argument" }
}
try {
if (operation === 'list') {
const toolsForUser = workspaceId
? await db
.select()
.from(customTools)
.where(
or(
eq(customTools.workspaceId, workspaceId),
and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId))
)
)
.orderBy(desc(customTools.createdAt))
: await db
.select()
.from(customTools)
.where(and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId)))
.orderBy(desc(customTools.createdAt))
return {
success: true,
output: {
success: true,
operation,
tools: toolsForUser,
count: toolsForUser.length,
},
}
}
if (operation === 'add') {
if (!workspaceId) {
return {
success: false,
error: "workspaceId is required for operation 'add'",
}
}
if (!params.schema || !params.code) {
return {
success: false,
error: "Both 'schema' and 'code' are required for operation 'add'",
}
}
const title = params.title || params.schema.function?.name
if (!title) {
return { success: false, error: "Missing tool title or schema.function.name for 'add'" }
}
const resultTools = await upsertCustomTools({
tools: [
{
title,
schema: params.schema,
code: params.code,
},
],
workspaceId,
userId: context.userId,
})
const created = resultTools.find((tool) => tool.title === title)
return {
success: true,
output: {
success: true,
operation,
toolId: created?.id,
title,
message: `Created custom tool "${title}"`,
},
}
}
if (operation === 'edit') {
if (!workspaceId) {
return {
success: false,
error: "workspaceId is required for operation 'edit'",
}
}
if (!params.toolId) {
return { success: false, error: "'toolId' is required for operation 'edit'" }
}
if (!params.schema && !params.code) {
return {
success: false,
error: "At least one of 'schema' or 'code' is required for operation 'edit'",
}
}
const workspaceTool = await db
.select()
.from(customTools)
.where(and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId)))
.limit(1)
const legacyTool =
workspaceTool.length === 0
? await db
.select()
.from(customTools)
.where(
and(
eq(customTools.id, params.toolId),
isNull(customTools.workspaceId),
eq(customTools.userId, context.userId)
)
)
.limit(1)
: []
const existing = workspaceTool[0] || legacyTool[0]
if (!existing) {
return { success: false, error: `Custom tool not found: ${params.toolId}` }
}
const mergedSchema = params.schema || (existing.schema as ManageCustomToolSchema)
const mergedCode = params.code || existing.code
const title = params.title || mergedSchema.function?.name || existing.title
await upsertCustomTools({
tools: [
{
id: params.toolId,
title,
schema: mergedSchema,
code: mergedCode,
},
],
workspaceId,
userId: context.userId,
})
return {
success: true,
output: {
success: true,
operation,
toolId: params.toolId,
title,
message: `Updated custom tool "${title}"`,
},
}
}
if (operation === 'delete') {
if (!params.toolId) {
return { success: false, error: "'toolId' is required for operation 'delete'" }
}
const workspaceDelete =
workspaceId != null
? await db
.delete(customTools)
.where(
and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId))
)
.returning({ id: customTools.id })
: []
const legacyDelete =
workspaceDelete.length === 0
? await db
.delete(customTools)
.where(
and(
eq(customTools.id, params.toolId),
isNull(customTools.workspaceId),
eq(customTools.userId, context.userId)
)
)
.returning({ id: customTools.id })
: []
const deleted = workspaceDelete[0] || legacyDelete[0]
if (!deleted) {
return { success: false, error: `Custom tool not found: ${params.toolId}` }
}
return {
success: true,
output: {
success: true,
operation,
toolId: params.toolId,
message: 'Deleted custom tool',
},
}
}
return {
success: false,
error: `Unsupported operation for manage_custom_tool: ${operation}`,
}
} catch (error) {
logger.error('manage_custom_tool execution failed', {
operation,
workspaceId,
userId: context.userId,
error: error instanceof Error ? error.message : String(error),
})
return {
success: false,
error: error instanceof Error ? error.message : 'Failed to manage custom tool',
}
}
}
const SERVER_TOOLS = new Set<string>([ const SERVER_TOOLS = new Set<string>([
'get_blocks_and_tools', 'get_blocks_and_tools',
'get_blocks_metadata', 'get_blocks_metadata',
@@ -403,19 +161,6 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
} }
} }
}, },
oauth_request_access: async (p, _c) => {
const providerName = (p.providerName || p.provider_name || 'the provider') as string
return {
success: true,
output: {
success: true,
status: 'requested',
providerName,
message: `Requested ${providerName} OAuth connection. The user should complete the OAuth modal in the UI, then retry credential-dependent actions.`,
},
}
},
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
} }
/** /**

View File

@@ -0,0 +1,62 @@
import { db } from '@sim/db'
import { credential, credentialMember } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
type ActiveCredentialMember = typeof credentialMember.$inferSelect
type CredentialRecord = typeof credential.$inferSelect
export interface CredentialActorContext {
credential: CredentialRecord | null
member: ActiveCredentialMember | null
hasWorkspaceAccess: boolean
canWriteWorkspace: boolean
isAdmin: boolean
}
/**
* Resolves user access context for a credential.
*/
export async function getCredentialActorContext(
credentialId: string,
userId: string
): Promise<CredentialActorContext> {
const [credentialRow] = await db
.select()
.from(credential)
.where(eq(credential.id, credentialId))
.limit(1)
if (!credentialRow) {
return {
credential: null,
member: null,
hasWorkspaceAccess: false,
canWriteWorkspace: false,
isAdmin: false,
}
}
const workspaceAccess = await checkWorkspaceAccess(credentialRow.workspaceId, userId)
const [memberRow] = await db
.select()
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
eq(credentialMember.userId, userId),
eq(credentialMember.status, 'active')
)
)
.limit(1)
const isAdmin = memberRow?.role === 'admin'
return {
credential: credentialRow,
member: memberRow ?? null,
hasWorkspaceAccess: workspaceAccess.hasAccess,
canWriteWorkspace: workspaceAccess.canWrite,
isAdmin,
}
}

View File

@@ -0,0 +1,77 @@
'use client'
export const PENDING_OAUTH_CREDENTIAL_DRAFT_KEY = 'sim.pending-oauth-credential-draft'
export const PENDING_CREDENTIAL_CREATE_REQUEST_KEY = 'sim.pending-credential-create-request'
export interface PendingOAuthCredentialDraft {
workspaceId: string
providerId: string
displayName: string
existingCredentialIds: string[]
existingAccountIds: string[]
requestedAt: number
}
interface PendingOAuthCredentialCreateRequest {
workspaceId: string
type: 'oauth'
providerId: string
displayName: string
serviceId: string
requiredScopes: string[]
requestedAt: number
}
interface PendingSecretCredentialCreateRequest {
workspaceId: string
type: 'env_personal' | 'env_workspace'
envKey?: string
requestedAt: number
}
export type PendingCredentialCreateRequest =
| PendingOAuthCredentialCreateRequest
| PendingSecretCredentialCreateRequest
function parseJson<T>(raw: string | null): T | null {
if (!raw) return null
try {
return JSON.parse(raw) as T
} catch {
return null
}
}
export function readPendingOAuthCredentialDraft(): PendingOAuthCredentialDraft | null {
if (typeof window === 'undefined') return null
return parseJson<PendingOAuthCredentialDraft>(
window.sessionStorage.getItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY)
)
}
export function writePendingOAuthCredentialDraft(payload: PendingOAuthCredentialDraft) {
if (typeof window === 'undefined') return
window.sessionStorage.setItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY, JSON.stringify(payload))
}
export function clearPendingOAuthCredentialDraft() {
if (typeof window === 'undefined') return
window.sessionStorage.removeItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY)
}
export function readPendingCredentialCreateRequest(): PendingCredentialCreateRequest | null {
if (typeof window === 'undefined') return null
return parseJson<PendingCredentialCreateRequest>(
window.sessionStorage.getItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY)
)
}
export function writePendingCredentialCreateRequest(payload: PendingCredentialCreateRequest) {
if (typeof window === 'undefined') return
window.sessionStorage.setItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY, JSON.stringify(payload))
}
export function clearPendingCredentialCreateRequest() {
if (typeof window === 'undefined') return
window.sessionStorage.removeItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY)
}

View File

@@ -0,0 +1,356 @@
import { db } from '@sim/db'
import { credential, credentialMember, permissions, workspace } from '@sim/db/schema'
import { and, eq, inArray, notInArray } from 'drizzle-orm'
interface AccessibleEnvCredential {
type: 'env_workspace' | 'env_personal'
envKey: string
envOwnerUserId: string | null
updatedAt: Date
}
function getPostgresErrorCode(error: unknown): string | undefined {
if (!error || typeof error !== 'object') return undefined
const err = error as { code?: string; cause?: { code?: string } }
return err.code || err.cause?.code
}
export async function getWorkspaceMemberUserIds(workspaceId: string): Promise<string[]> {
const [workspaceRows, permissionRows] = await Promise.all([
db
.select({ ownerId: workspace.ownerId })
.from(workspace)
.where(eq(workspace.id, workspaceId))
.limit(1),
db
.select({ userId: permissions.userId })
.from(permissions)
.where(and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspaceId))),
])
const workspaceRow = workspaceRows[0]
const memberIds = new Set<string>(permissionRows.map((row) => row.userId))
if (workspaceRow?.ownerId) {
memberIds.add(workspaceRow.ownerId)
}
return Array.from(memberIds)
}
export async function getUserWorkspaceIds(userId: string): Promise<string[]> {
const [permissionRows, ownedWorkspaceRows] = await Promise.all([
db
.select({ workspaceId: workspace.id })
.from(permissions)
.innerJoin(
workspace,
and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspace.id))
)
.where(eq(permissions.userId, userId)),
db.select({ workspaceId: workspace.id }).from(workspace).where(eq(workspace.ownerId, userId)),
])
const workspaceIds = new Set<string>(permissionRows.map((row) => row.workspaceId))
for (const row of ownedWorkspaceRows) {
workspaceIds.add(row.workspaceId)
}
return Array.from(workspaceIds)
}
async function upsertCredentialAdminMember(credentialId: string, adminUserId: string) {
const now = new Date()
const [existingMembership] = await db
.select({ id: credentialMember.id, joinedAt: credentialMember.joinedAt })
.from(credentialMember)
.where(
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, adminUserId))
)
.limit(1)
if (existingMembership) {
await db
.update(credentialMember)
.set({
role: 'admin',
status: 'active',
joinedAt: existingMembership.joinedAt ?? now,
invitedBy: adminUserId,
updatedAt: now,
})
.where(eq(credentialMember.id, existingMembership.id))
return
}
await db.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId: adminUserId,
role: 'admin',
status: 'active',
joinedAt: now,
invitedBy: adminUserId,
createdAt: now,
updatedAt: now,
})
}
async function ensureWorkspaceCredentialMemberships(
credentialId: string,
workspaceId: string,
ownerUserId: string
) {
const workspaceMemberUserIds = await getWorkspaceMemberUserIds(workspaceId)
if (!workspaceMemberUserIds.length) return
const existingMemberships = await db
.select({
id: credentialMember.id,
userId: credentialMember.userId,
joinedAt: credentialMember.joinedAt,
})
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
inArray(credentialMember.userId, workspaceMemberUserIds)
)
)
const byUserId = new Map(existingMemberships.map((row) => [row.userId, row]))
const now = new Date()
for (const memberUserId of workspaceMemberUserIds) {
const targetRole = memberUserId === ownerUserId ? 'admin' : 'member'
const existing = byUserId.get(memberUserId)
if (existing) {
await db
.update(credentialMember)
.set({
role: targetRole,
status: 'active',
joinedAt: existing.joinedAt ?? now,
invitedBy: ownerUserId,
updatedAt: now,
})
.where(eq(credentialMember.id, existing.id))
continue
}
await db.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId: memberUserId,
role: targetRole,
status: 'active',
joinedAt: now,
invitedBy: ownerUserId,
createdAt: now,
updatedAt: now,
})
}
}
export async function syncWorkspaceEnvCredentials(params: {
workspaceId: string
envKeys: string[]
actingUserId: string
}) {
const { workspaceId, envKeys, actingUserId } = params
const [workspaceRow] = await db
.select({ ownerId: workspace.ownerId })
.from(workspace)
.where(eq(workspace.id, workspaceId))
.limit(1)
if (!workspaceRow) return
const normalizedKeys = Array.from(new Set(envKeys.filter(Boolean)))
const existingCredentials = await db
.select({
id: credential.id,
envKey: credential.envKey,
})
.from(credential)
.where(and(eq(credential.workspaceId, workspaceId), eq(credential.type, 'env_workspace')))
const existingByKey = new Map(
existingCredentials
.filter((row): row is { id: string; envKey: string } => Boolean(row.envKey))
.map((row) => [row.envKey, row.id])
)
const credentialIdsToEnsureMembership = new Set<string>()
const now = new Date()
for (const envKey of normalizedKeys) {
const existingId = existingByKey.get(envKey)
if (existingId) {
credentialIdsToEnsureMembership.add(existingId)
continue
}
const createdId = crypto.randomUUID()
try {
await db.insert(credential).values({
id: createdId,
workspaceId,
type: 'env_workspace',
displayName: envKey,
envKey,
createdBy: actingUserId,
createdAt: now,
updatedAt: now,
})
credentialIdsToEnsureMembership.add(createdId)
} catch (error: unknown) {
const code = getPostgresErrorCode(error)
if (code !== '23505') throw error
}
}
for (const credentialId of credentialIdsToEnsureMembership) {
await ensureWorkspaceCredentialMemberships(credentialId, workspaceId, workspaceRow.ownerId)
}
if (normalizedKeys.length > 0) {
await db
.delete(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_workspace'),
notInArray(credential.envKey, normalizedKeys)
)
)
return
}
await db
.delete(credential)
.where(and(eq(credential.workspaceId, workspaceId), eq(credential.type, 'env_workspace')))
}
export async function syncPersonalEnvCredentialsForUser(params: {
userId: string
envKeys: string[]
}) {
const { userId, envKeys } = params
const workspaceIds = await getUserWorkspaceIds(userId)
if (!workspaceIds.length) return
const normalizedKeys = Array.from(new Set(envKeys.filter(Boolean)))
const now = new Date()
for (const workspaceId of workspaceIds) {
const existingCredentials = await db
.select({
id: credential.id,
envKey: credential.envKey,
})
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_personal'),
eq(credential.envOwnerUserId, userId)
)
)
const existingByKey = new Map(
existingCredentials
.filter((row): row is { id: string; envKey: string } => Boolean(row.envKey))
.map((row) => [row.envKey, row.id])
)
for (const envKey of normalizedKeys) {
const existingId = existingByKey.get(envKey)
if (existingId) {
await upsertCredentialAdminMember(existingId, userId)
continue
}
const createdId = crypto.randomUUID()
try {
await db.insert(credential).values({
id: createdId,
workspaceId,
type: 'env_personal',
displayName: envKey,
envKey,
envOwnerUserId: userId,
createdBy: userId,
createdAt: now,
updatedAt: now,
})
await upsertCredentialAdminMember(createdId, userId)
} catch (error: unknown) {
const code = getPostgresErrorCode(error)
if (code !== '23505') throw error
}
}
if (normalizedKeys.length > 0) {
await db
.delete(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_personal'),
eq(credential.envOwnerUserId, userId),
notInArray(credential.envKey, normalizedKeys)
)
)
continue
}
await db
.delete(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_personal'),
eq(credential.envOwnerUserId, userId)
)
)
}
}
export async function getAccessibleEnvCredentials(
workspaceId: string,
userId: string
): Promise<AccessibleEnvCredential[]> {
const rows = await db
.select({
type: credential.type,
envKey: credential.envKey,
envOwnerUserId: credential.envOwnerUserId,
updatedAt: credential.updatedAt,
})
.from(credential)
.innerJoin(
credentialMember,
and(
eq(credentialMember.credentialId, credential.id),
eq(credentialMember.userId, userId),
eq(credentialMember.status, 'active')
)
)
.where(
and(
eq(credential.workspaceId, workspaceId),
inArray(credential.type, ['env_workspace', 'env_personal'])
)
)
return rows
.filter(
(row): row is AccessibleEnvCredential =>
(row.type === 'env_workspace' || row.type === 'env_personal') && Boolean(row.envKey)
)
.map((row) => ({
type: row.type,
envKey: row.envKey!,
envOwnerUserId: row.envOwnerUserId,
updatedAt: row.updatedAt,
}))
}

View File

@@ -0,0 +1,195 @@
import { db } from '@sim/db'
import { account, credential, credentialMember } from '@sim/db/schema'
import { and, eq, inArray } from 'drizzle-orm'
import { getServiceConfigByProviderId } from '@/lib/oauth'
interface SyncWorkspaceOAuthCredentialsForUserParams {
workspaceId: string
userId: string
}
interface SyncWorkspaceOAuthCredentialsForUserResult {
createdCredentials: number
updatedMemberships: number
}
function getPostgresErrorCode(error: unknown): string | undefined {
if (!error || typeof error !== 'object') return undefined
const err = error as { code?: string; cause?: { code?: string } }
return err.code || err.cause?.code
}
/**
* Ensures connected OAuth accounts for a user exist as workspace-scoped credentials.
*/
export async function syncWorkspaceOAuthCredentialsForUser(
params: SyncWorkspaceOAuthCredentialsForUserParams
): Promise<SyncWorkspaceOAuthCredentialsForUserResult> {
const { workspaceId, userId } = params
const userAccounts = await db
.select({
id: account.id,
providerId: account.providerId,
accountId: account.accountId,
})
.from(account)
.where(eq(account.userId, userId))
if (userAccounts.length === 0) {
return { createdCredentials: 0, updatedMemberships: 0 }
}
const accountIds = userAccounts.map((row) => row.id)
const existingCredentials = await db
.select({
id: credential.id,
displayName: credential.displayName,
providerId: credential.providerId,
accountId: credential.accountId,
})
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'oauth'),
inArray(credential.accountId, accountIds)
)
)
const now = new Date()
const userAccountById = new Map(userAccounts.map((row) => [row.id, row]))
for (const existingCredential of existingCredentials) {
if (!existingCredential.accountId) continue
const linkedAccount = userAccountById.get(existingCredential.accountId)
if (!linkedAccount) continue
const normalizedLabel =
getServiceConfigByProviderId(linkedAccount.providerId)?.name || linkedAccount.providerId
const shouldNormalizeDisplayName =
existingCredential.displayName === linkedAccount.accountId ||
existingCredential.displayName === linkedAccount.providerId
if (!shouldNormalizeDisplayName || existingCredential.displayName === normalizedLabel) {
continue
}
await db
.update(credential)
.set({
displayName: normalizedLabel,
updatedAt: now,
})
.where(eq(credential.id, existingCredential.id))
}
const existingByAccountId = new Map(
existingCredentials
.filter((row) => Boolean(row.accountId))
.map((row) => [row.accountId!, row.id])
)
let createdCredentials = 0
for (const acc of userAccounts) {
if (existingByAccountId.has(acc.id)) {
continue
}
try {
await db.insert(credential).values({
id: crypto.randomUUID(),
workspaceId,
type: 'oauth',
displayName: getServiceConfigByProviderId(acc.providerId)?.name || acc.providerId,
providerId: acc.providerId,
accountId: acc.id,
createdBy: userId,
createdAt: now,
updatedAt: now,
})
createdCredentials += 1
} catch (error) {
if (getPostgresErrorCode(error) !== '23505') {
throw error
}
}
}
const credentialRows = await db
.select({ id: credential.id, accountId: credential.accountId })
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'oauth'),
inArray(credential.accountId, accountIds)
)
)
const credentialIdByAccountId = new Map(
credentialRows.filter((row) => Boolean(row.accountId)).map((row) => [row.accountId!, row.id])
)
const allCredentialIds = Array.from(credentialIdByAccountId.values())
if (allCredentialIds.length === 0) {
return { createdCredentials, updatedMemberships: 0 }
}
const existingMemberships = await db
.select({
id: credentialMember.id,
credentialId: credentialMember.credentialId,
joinedAt: credentialMember.joinedAt,
})
.from(credentialMember)
.where(
and(
inArray(credentialMember.credentialId, allCredentialIds),
eq(credentialMember.userId, userId)
)
)
const membershipByCredentialId = new Map(
existingMemberships.map((row) => [row.credentialId, row])
)
let updatedMemberships = 0
for (const credentialId of allCredentialIds) {
const existingMembership = membershipByCredentialId.get(credentialId)
if (existingMembership) {
await db
.update(credentialMember)
.set({
role: 'admin',
status: 'active',
joinedAt: existingMembership.joinedAt ?? now,
invitedBy: userId,
updatedAt: now,
})
.where(eq(credentialMember.id, existingMembership.id))
updatedMemberships += 1
continue
}
try {
await db.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId,
role: 'admin',
status: 'active',
joinedAt: now,
invitedBy: userId,
createdAt: now,
updatedAt: now,
})
updatedMemberships += 1
} catch (error) {
if (getPostgresErrorCode(error) !== '23505') {
throw error
}
}
}
return { createdCredentials, updatedMemberships }
}

View File

@@ -1,8 +1,9 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { environment, workspaceEnvironment } from '@sim/db/schema' import { environment, workspaceEnvironment } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm' import { eq, inArray } from 'drizzle-orm'
import { decryptSecret } from '@/lib/core/security/encryption' import { decryptSecret } from '@/lib/core/security/encryption'
import { getAccessibleEnvCredentials } from '@/lib/credentials/environment'
const logger = createLogger('EnvironmentUtils') const logger = createLogger('EnvironmentUtils')
@@ -53,7 +54,7 @@ export async function getPersonalAndWorkspaceEnv(
conflicts: string[] conflicts: string[]
decryptionFailures: string[] decryptionFailures: string[]
}> { }> {
const [personalRows, workspaceRows] = await Promise.all([ const [personalRows, workspaceRows, accessibleEnvCredentials] = await Promise.all([
db.select().from(environment).where(eq(environment.userId, userId)).limit(1), db.select().from(environment).where(eq(environment.userId, userId)).limit(1),
workspaceId workspaceId
? db ? db
@@ -62,10 +63,69 @@ export async function getPersonalAndWorkspaceEnv(
.where(eq(workspaceEnvironment.workspaceId, workspaceId)) .where(eq(workspaceEnvironment.workspaceId, workspaceId))
.limit(1) .limit(1)
: Promise.resolve([] as any[]), : Promise.resolve([] as any[]),
workspaceId ? getAccessibleEnvCredentials(workspaceId, userId) : Promise.resolve([]),
]) ])
const personalEncrypted: Record<string, string> = (personalRows[0]?.variables as any) || {} const ownPersonalEncrypted: Record<string, string> = (personalRows[0]?.variables as any) || {}
const workspaceEncrypted: Record<string, string> = (workspaceRows[0]?.variables as any) || {} const allWorkspaceEncrypted: Record<string, string> = (workspaceRows[0]?.variables as any) || {}
const hasCredentialFiltering = Boolean(workspaceId) && accessibleEnvCredentials.length > 0
const workspaceCredentialKeys = new Set(
accessibleEnvCredentials.filter((row) => row.type === 'env_workspace').map((row) => row.envKey)
)
const personalCredentialRows = accessibleEnvCredentials
.filter((row) => row.type === 'env_personal' && row.envOwnerUserId)
.sort((a, b) => {
const aIsRequester = a.envOwnerUserId === userId
const bIsRequester = b.envOwnerUserId === userId
if (aIsRequester && !bIsRequester) return -1
if (!aIsRequester && bIsRequester) return 1
return b.updatedAt.getTime() - a.updatedAt.getTime()
})
const selectedPersonalOwners = new Map<string, string>()
for (const row of personalCredentialRows) {
if (!selectedPersonalOwners.has(row.envKey) && row.envOwnerUserId) {
selectedPersonalOwners.set(row.envKey, row.envOwnerUserId)
}
}
const ownerUserIds = Array.from(new Set(selectedPersonalOwners.values()))
const ownerEnvironmentRows =
ownerUserIds.length > 0
? await db
.select({
userId: environment.userId,
variables: environment.variables,
})
.from(environment)
.where(inArray(environment.userId, ownerUserIds))
: []
const ownerVariablesByUserId = new Map<string, Record<string, string>>(
ownerEnvironmentRows.map((row) => [row.userId, (row.variables as Record<string, string>) || {}])
)
let personalEncrypted: Record<string, string> = ownPersonalEncrypted
let workspaceEncrypted: Record<string, string> = allWorkspaceEncrypted
if (hasCredentialFiltering) {
personalEncrypted = {}
for (const [envKey, ownerUserId] of selectedPersonalOwners.entries()) {
const ownerVariables = ownerVariablesByUserId.get(ownerUserId)
const encryptedValue = ownerVariables?.[envKey]
if (encryptedValue) {
personalEncrypted[envKey] = encryptedValue
}
}
workspaceEncrypted = Object.fromEntries(
Object.entries(allWorkspaceEncrypted).filter(([envKey]) =>
workspaceCredentialKeys.has(envKey)
)
)
}
const decryptionFailures: string[] = [] const decryptionFailures: string[] = []

View File

@@ -1,246 +0,0 @@
import { createLogger } from '@sim/logger'
import { getRedisClient } from '@/lib/core/config/redis'
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
const logger = createLogger('ExecutionEventBuffer')
const REDIS_PREFIX = 'execution:stream:'
const TTL_SECONDS = 60 * 60 // 1 hour
const EVENT_LIMIT = 1000
const RESERVE_BATCH = 100
const FLUSH_INTERVAL_MS = 15
const FLUSH_MAX_BATCH = 200
function getEventsKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:events`
}
function getSeqKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:seq`
}
function getMetaKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:meta`
}
export type ExecutionStreamStatus = 'active' | 'complete' | 'error' | 'cancelled'
export interface ExecutionStreamMeta {
status: ExecutionStreamStatus
userId?: string
workflowId?: string
updatedAt?: string
}
export interface ExecutionEventEntry {
eventId: number
executionId: string
event: ExecutionEvent
}
export interface ExecutionEventWriter {
write: (event: ExecutionEvent) => Promise<ExecutionEventEntry>
flush: () => Promise<void>
close: () => Promise<void>
}
export async function setExecutionMeta(
executionId: string,
meta: Partial<ExecutionStreamMeta>
): Promise<void> {
const redis = getRedisClient()
if (!redis) {
logger.warn('setExecutionMeta: Redis client unavailable', { executionId })
return
}
try {
const key = getMetaKey(executionId)
const payload: Record<string, string> = {
updatedAt: new Date().toISOString(),
}
if (meta.status) payload.status = meta.status
if (meta.userId) payload.userId = meta.userId
if (meta.workflowId) payload.workflowId = meta.workflowId
await redis.hset(key, payload)
await redis.expire(key, TTL_SECONDS)
} catch (error) {
logger.warn('Failed to update execution meta', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
}
}
export async function getExecutionMeta(executionId: string): Promise<ExecutionStreamMeta | null> {
const redis = getRedisClient()
if (!redis) {
logger.warn('getExecutionMeta: Redis client unavailable', { executionId })
return null
}
try {
const key = getMetaKey(executionId)
const meta = await redis.hgetall(key)
if (!meta || Object.keys(meta).length === 0) return null
return meta as unknown as ExecutionStreamMeta
} catch (error) {
logger.warn('Failed to read execution meta', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
return null
}
}
export async function readExecutionEvents(
executionId: string,
afterEventId: number
): Promise<ExecutionEventEntry[]> {
const redis = getRedisClient()
if (!redis) return []
try {
const raw = await redis.zrangebyscore(getEventsKey(executionId), afterEventId + 1, '+inf')
return raw
.map((entry) => {
try {
return JSON.parse(entry) as ExecutionEventEntry
} catch {
return null
}
})
.filter((entry): entry is ExecutionEventEntry => Boolean(entry))
} catch (error) {
logger.warn('Failed to read execution events', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
return []
}
}
export function createExecutionEventWriter(executionId: string): ExecutionEventWriter {
const redis = getRedisClient()
if (!redis) {
logger.warn(
'createExecutionEventWriter: Redis client unavailable, events will not be buffered',
{
executionId,
}
)
return {
write: async (event) => ({ eventId: 0, executionId, event }),
flush: async () => {},
close: async () => {},
}
}
let pending: ExecutionEventEntry[] = []
let nextEventId = 0
let maxReservedId = 0
let flushTimer: ReturnType<typeof setTimeout> | null = null
const scheduleFlush = () => {
if (flushTimer) return
flushTimer = setTimeout(() => {
flushTimer = null
void flush()
}, FLUSH_INTERVAL_MS)
}
const reserveIds = async (minCount: number) => {
const reserveCount = Math.max(RESERVE_BATCH, minCount)
const newMax = await redis.incrby(getSeqKey(executionId), reserveCount)
const startId = newMax - reserveCount + 1
if (nextEventId === 0 || nextEventId > maxReservedId) {
nextEventId = startId
maxReservedId = newMax
}
}
let flushPromise: Promise<void> | null = null
let closed = false
const inflightWrites = new Set<Promise<ExecutionEventEntry>>()
const doFlush = async () => {
if (pending.length === 0) return
const batch = pending
pending = []
try {
const key = getEventsKey(executionId)
const zaddArgs: (string | number)[] = []
for (const entry of batch) {
zaddArgs.push(entry.eventId, JSON.stringify(entry))
}
const pipeline = redis.pipeline()
pipeline.zadd(key, ...zaddArgs)
pipeline.expire(key, TTL_SECONDS)
pipeline.expire(getSeqKey(executionId), TTL_SECONDS)
pipeline.zremrangebyrank(key, 0, -EVENT_LIMIT - 1)
await pipeline.exec()
} catch (error) {
logger.warn('Failed to flush execution events', {
executionId,
batchSize: batch.length,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
})
pending = batch.concat(pending)
}
}
const flush = async () => {
if (flushPromise) {
await flushPromise
return
}
flushPromise = doFlush()
try {
await flushPromise
} finally {
flushPromise = null
if (pending.length > 0) scheduleFlush()
}
}
const writeCore = async (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
if (closed) return { eventId: 0, executionId, event }
if (nextEventId === 0 || nextEventId > maxReservedId) {
await reserveIds(1)
}
const eventId = nextEventId++
const entry: ExecutionEventEntry = { eventId, executionId, event }
pending.push(entry)
if (pending.length >= FLUSH_MAX_BATCH) {
await flush()
} else {
scheduleFlush()
}
return entry
}
const write = (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
const p = writeCore(event)
inflightWrites.add(p)
const remove = () => inflightWrites.delete(p)
p.then(remove, remove)
return p
}
const close = async () => {
closed = true
if (flushTimer) {
clearTimeout(flushTimer)
flushTimer = null
}
if (inflightWrites.size > 0) {
await Promise.allSettled(inflightWrites)
}
if (flushPromise) {
await flushPromise
}
if (pending.length > 0) {
await doFlush()
}
}
return { write, flush, close }
}

View File

@@ -2364,261 +2364,6 @@ describe('hasWorkflowChanged', () => {
}) })
}) })
describe('Trigger Config Normalization (False Positive Prevention)', () => {
it.concurrent(
'should not detect change when deployed has null fields but current has values from triggerConfig',
() => {
// Core scenario: deployed state has null individual fields, current state has
// values populated from triggerConfig at runtime by populateTriggerFieldsFromConfig
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent(
'should detect change when user edits a trigger field to a different value',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'new-secret' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
}
)
it.concurrent('should not detect change when both sides have no triggerConfig', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent(
'should not detect change when deployed has empty fields and triggerConfig populates them',
() => {
// Empty string is also treated as "empty" by normalizeTriggerConfigValues
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent('should not detect change when triggerId differs', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
triggerId: { value: null },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
triggerId: { value: 'slack_webhook' },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent(
'should not detect change for namespaced system subBlock IDs like samplePayload_slack_webhook',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
samplePayload_slack_webhook: { value: 'old payload' },
triggerInstructions_slack_webhook: { value: 'old instructions' },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
samplePayload_slack_webhook: { value: 'new payload' },
triggerInstructions_slack_webhook: { value: 'new instructions' },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent(
'should handle mixed scenario: some fields from triggerConfig, some user-edited',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
includeFiles: { id: 'includeFiles', type: 'switch', value: false },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
includeFiles: { id: 'includeFiles', type: 'switch', value: true },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
// includeFiles changed from false to true — this IS a real change
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
}
)
})
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => { describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
it.concurrent('should not detect change when webhookId differs', () => { it.concurrent('should not detect change when webhookId differs', () => {
const deployedState = createWorkflowState({ const deployedState = createWorkflowState({

View File

@@ -9,7 +9,6 @@ import {
normalizeLoop, normalizeLoop,
normalizeParallel, normalizeParallel,
normalizeSubBlockValue, normalizeSubBlockValue,
normalizeTriggerConfigValues,
normalizeValue, normalizeValue,
normalizeVariables, normalizeVariables,
sanitizeVariable, sanitizeVariable,
@@ -173,18 +172,14 @@ export function generateWorkflowDiffSummary(
} }
} }
// Normalize trigger config values for both states before comparison
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
// Compare subBlocks using shared helper for filtering (single source of truth) // Compare subBlocks using shared helper for filtering (single source of truth)
const allSubBlockIds = filterSubBlockIds([ const allSubBlockIds = filterSubBlockIds([
...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]), ...new Set([...Object.keys(currentSubBlocks), ...Object.keys(previousSubBlocks)]),
]) ])
for (const subId of allSubBlockIds) { for (const subId of allSubBlockIds) {
const currentSub = normalizedCurrentSubs[subId] as Record<string, unknown> | undefined const currentSub = currentSubBlocks[subId] as Record<string, unknown> | undefined
const previousSub = normalizedPreviousSubs[subId] as Record<string, unknown> | undefined const previousSub = previousSubBlocks[subId] as Record<string, unknown> | undefined
if (!currentSub || !previousSub) { if (!currentSub || !previousSub) {
changes.push({ changes.push({

View File

@@ -4,12 +4,10 @@
import { describe, expect, it } from 'vitest' import { describe, expect, it } from 'vitest'
import type { Loop, Parallel } from '@/stores/workflows/workflow/types' import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
import { import {
filterSubBlockIds,
normalizedStringify, normalizedStringify,
normalizeEdge, normalizeEdge,
normalizeLoop, normalizeLoop,
normalizeParallel, normalizeParallel,
normalizeTriggerConfigValues,
normalizeValue, normalizeValue,
sanitizeInputFormat, sanitizeInputFormat,
sanitizeTools, sanitizeTools,
@@ -586,226 +584,4 @@ describe('Workflow Normalization Utilities', () => {
expect(result2).toBe(result3) expect(result2).toBe(result3)
}) })
}) })
describe('filterSubBlockIds', () => {
it.concurrent('should exclude exact SYSTEM_SUBBLOCK_IDS', () => {
const ids = ['signingSecret', 'samplePayload', 'triggerInstructions', 'botToken']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['botToken', 'signingSecret'])
})
it.concurrent('should exclude namespaced SYSTEM_SUBBLOCK_IDS (prefix matching)', () => {
const ids = [
'signingSecret',
'samplePayload_slack_webhook',
'triggerInstructions_slack_webhook',
'webhookUrlDisplay_slack_webhook',
'botToken',
]
const result = filterSubBlockIds(ids)
expect(result).toEqual(['botToken', 'signingSecret'])
})
it.concurrent('should exclude exact TRIGGER_RUNTIME_SUBBLOCK_IDS', () => {
const ids = ['webhookId', 'triggerPath', 'triggerConfig', 'triggerId', 'signingSecret']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['signingSecret'])
})
it.concurrent('should not exclude IDs that merely contain a system ID substring', () => {
const ids = ['mySamplePayload', 'notSamplePayload']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['mySamplePayload', 'notSamplePayload'])
})
it.concurrent('should return sorted results', () => {
const ids = ['zebra', 'alpha', 'middle']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['alpha', 'middle', 'zebra'])
})
it.concurrent('should handle empty array', () => {
expect(filterSubBlockIds([])).toEqual([])
})
it.concurrent('should handle all IDs being excluded', () => {
const ids = ['webhookId', 'triggerPath', 'samplePayload', 'triggerConfig']
const result = filterSubBlockIds(ids)
expect(result).toEqual([])
})
it.concurrent('should exclude setupScript and scheduleInfo namespaced variants', () => {
const ids = ['setupScript_google_sheets_row', 'scheduleInfo_cron_trigger', 'realField']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['realField'])
})
it.concurrent('should exclude triggerCredentials namespaced variants', () => {
const ids = ['triggerCredentials_slack_webhook', 'signingSecret']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['signingSecret'])
})
it.concurrent('should exclude synthetic tool-input subBlock IDs', () => {
const ids = [
'toolConfig',
'toolConfig-tool-0-query',
'toolConfig-tool-0-url',
'toolConfig-tool-1-status',
'systemPrompt',
]
const result = filterSubBlockIds(ids)
expect(result).toEqual(['systemPrompt', 'toolConfig'])
})
})
describe('normalizeTriggerConfigValues', () => {
it.concurrent('should return subBlocks unchanged when no triggerConfig exists', () => {
const subBlocks = {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
})
it.concurrent('should return subBlocks unchanged when triggerConfig value is null', () => {
const subBlocks = {
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: null },
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
})
it.concurrent(
'should return subBlocks unchanged when triggerConfig value is not an object',
() => {
const subBlocks = {
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: 'string-value' },
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
}
)
it.concurrent('should populate null individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
expect((result.botToken as Record<string, unknown>).value).toBe('token456')
})
it.concurrent('should populate undefined individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: undefined },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
})
it.concurrent('should populate empty string individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
})
it.concurrent('should NOT overwrite existing non-empty individual field values', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'user-edited-secret' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('user-edited-secret')
})
it.concurrent('should skip triggerConfig fields that are null/undefined', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: null, botToken: undefined },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
expect((result.botToken as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should skip fields from triggerConfig that have no matching subBlock', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { nonExistentField: 'value123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result.nonExistentField).toBeUndefined()
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should not mutate the original subBlocks object', () => {
const original = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
normalizeTriggerConfigValues(original)
expect((original.signingSecret as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should preserve other subBlock properties when populating value', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: {
id: 'signingSecret',
type: 'short-input',
value: null,
placeholder: 'Enter signing secret',
},
}
const result = normalizeTriggerConfigValues(subBlocks)
const normalized = result.signingSecret as Record<string, unknown>
expect(normalized.value).toBe('secret123')
expect(normalized.id).toBe('signingSecret')
expect(normalized.type).toBe('short-input')
expect(normalized.placeholder).toBe('Enter signing secret')
})
})
}) })

View File

@@ -411,63 +411,17 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
} }
/** /**
* Pattern matching synthetic subBlock IDs created by ToolSubBlockRenderer. * Filters subBlock IDs to exclude system and trigger runtime subBlocks.
* These IDs follow the format `{subBlockId}-tool-{index}-{paramId}` and are
* mirrors of values already stored in toolConfig.value.tools[N].params.
*/
const SYNTHETIC_TOOL_SUBBLOCK_RE = /-tool-\d+-/
/**
* Filters subBlock IDs to exclude system, trigger runtime, and synthetic tool subBlocks.
* *
* @param subBlockIds - Array of subBlock IDs to filter * @param subBlockIds - Array of subBlock IDs to filter
* @returns Filtered and sorted array of subBlock IDs * @returns Filtered and sorted array of subBlock IDs
*/ */
export function filterSubBlockIds(subBlockIds: string[]): string[] { export function filterSubBlockIds(subBlockIds: string[]): string[] {
return subBlockIds return subBlockIds
.filter((id) => { .filter((id) => !SYSTEM_SUBBLOCK_IDS.includes(id) && !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id))
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
return false
if (SYNTHETIC_TOOL_SUBBLOCK_RE.test(id)) return false
return true
})
.sort() .sort()
} }
/**
* Normalizes trigger block subBlocks by populating null/empty individual fields
* from the triggerConfig aggregate subBlock. This compensates for the runtime
* population done by populateTriggerFieldsFromConfig, ensuring consistent
* comparison between client state (with populated values) and deployed state
* (with null values from DB).
*/
export function normalizeTriggerConfigValues(
subBlocks: Record<string, unknown>
): Record<string, unknown> {
const triggerConfigSub = subBlocks.triggerConfig as Record<string, unknown> | undefined
const triggerConfigValue = triggerConfigSub?.value
if (!triggerConfigValue || typeof triggerConfigValue !== 'object') {
return subBlocks
}
const result = { ...subBlocks }
for (const [fieldId, configValue] of Object.entries(
triggerConfigValue as Record<string, unknown>
)) {
if (configValue === null || configValue === undefined) continue
const existingSub = result[fieldId] as Record<string, unknown> | undefined
if (
existingSub &&
(existingSub.value === null || existingSub.value === undefined || existingSub.value === '')
) {
result[fieldId] = { ...existingSub, value: configValue }
}
}
return result
}
/** /**
* Normalizes a subBlock value with sanitization for specific subBlock types. * Normalizes a subBlock value with sanitization for specific subBlock types.
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed) * Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)

View File

@@ -129,18 +129,6 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
}) })
}, },
setCurrentExecutionId: (workflowId, executionId) => {
set({
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
currentExecutionId: executionId,
}),
})
},
getCurrentExecutionId: (workflowId) => {
return getOrCreate(get().workflowExecutions, workflowId).currentExecutionId
},
clearRunPath: (workflowId) => { clearRunPath: (workflowId) => {
set({ set({
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, { workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {

View File

@@ -35,8 +35,6 @@ export interface WorkflowExecutionState {
lastRunPath: Map<string, BlockRunStatus> lastRunPath: Map<string, BlockRunStatus>
/** Maps edge IDs to their run result from the last execution */ /** Maps edge IDs to their run result from the last execution */
lastRunEdges: Map<string, EdgeRunStatus> lastRunEdges: Map<string, EdgeRunStatus>
/** The execution ID of the currently running execution */
currentExecutionId: string | null
} }
/** /**
@@ -56,7 +54,6 @@ export const defaultWorkflowExecutionState: WorkflowExecutionState = {
debugContext: null, debugContext: null,
lastRunPath: new Map(), lastRunPath: new Map(),
lastRunEdges: new Map(), lastRunEdges: new Map(),
currentExecutionId: null,
} }
/** /**
@@ -99,10 +96,6 @@ export interface ExecutionActions {
setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void
/** Clears the run path and run edges for a workflow */ /** Clears the run path and run edges for a workflow */
clearRunPath: (workflowId: string) => void clearRunPath: (workflowId: string) => void
/** Stores the current execution ID for a workflow */
setCurrentExecutionId: (workflowId: string, executionId: string | null) => void
/** Returns the current execution ID for a workflow */
getCurrentExecutionId: (workflowId: string) => string | null
/** Resets the entire store to its initial empty state */ /** Resets the entire store to its initial empty state */
reset: () => void reset: () => void
/** Stores a serializable execution snapshot for a workflow */ /** Stores a serializable execution snapshot for a workflow */

View File

@@ -1,5 +1,6 @@
export type SettingsSection = export type SettingsSection =
| 'general' | 'general'
| 'credentials'
| 'environment' | 'environment'
| 'template-profile' | 'template-profile'
| 'integrations' | 'integrations'

View File

@@ -310,50 +310,6 @@ function parseModelKey(compositeKey: string): { provider: string; modelId: strin
return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) } return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) }
} }
/**
* Convert legacy/variant Claude IDs into the canonical ID shape used by the model catalog.
*
* Examples:
* - claude-4.5-opus -> claude-opus-4-5
* - claude-opus-4.6 -> claude-opus-4-6
* - anthropic.claude-opus-4-5-20251101-v1:0 -> claude-opus-4-5 (match key only)
*/
function canonicalizeModelMatchKey(modelId: string): string {
if (!modelId) return modelId
const normalized = modelId.trim().toLowerCase()
const toCanonicalClaude = (tier: string, version: string): string => {
const normalizedVersion = version.replace(/\./g, '-')
return `claude-${tier}-${normalizedVersion}`
}
const tierFirstExact = normalized.match(/^claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)$/)
if (tierFirstExact) {
const [, tier, version] = tierFirstExact
return toCanonicalClaude(tier, version)
}
const versionFirstExact = normalized.match(/^claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)$/)
if (versionFirstExact) {
const [, version, tier] = versionFirstExact
return toCanonicalClaude(tier, version)
}
const tierFirstEmbedded = normalized.match(/claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)/)
if (tierFirstEmbedded) {
const [, tier, version] = tierFirstEmbedded
return toCanonicalClaude(tier, version)
}
const versionFirstEmbedded = normalized.match(/claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)/)
if (versionFirstEmbedded) {
const [, version, tier] = versionFirstEmbedded
return toCanonicalClaude(tier, version)
}
return normalized
}
const MODEL_PROVIDER_PRIORITY = [ const MODEL_PROVIDER_PRIORITY = [
'anthropic', 'anthropic',
'bedrock', 'bedrock',
@@ -394,23 +350,12 @@ function normalizeSelectedModelKey(selectedModel: string, models: AvailableModel
const { provider, modelId } = parseModelKey(selectedModel) const { provider, modelId } = parseModelKey(selectedModel)
const targetModelId = modelId || selectedModel const targetModelId = modelId || selectedModel
const targetMatchKey = canonicalizeModelMatchKey(targetModelId)
const matches = models.filter((m) => { const matches = models.filter((m) => m.id.endsWith(`/${targetModelId}`))
const candidateModelId = parseModelKey(m.id).modelId || m.id
const candidateMatchKey = canonicalizeModelMatchKey(candidateModelId)
return (
candidateModelId === targetModelId ||
m.id.endsWith(`/${targetModelId}`) ||
candidateMatchKey === targetMatchKey
)
})
if (matches.length === 0) return selectedModel if (matches.length === 0) return selectedModel
if (provider) { if (provider) {
const sameProvider = matches.find( const sameProvider = matches.find((m) => m.provider === provider)
(m) => m.provider === provider || m.id.startsWith(`${provider}/`)
)
if (sameProvider) return sameProvider.id if (sameProvider) return sameProvider.id
} }
@@ -1148,12 +1093,11 @@ export const useCopilotStore = create<CopilotStore>()(
const chatConfig = chat.config ?? {} const chatConfig = chat.config ?? {}
const chatMode = chatConfig.mode || get().mode const chatMode = chatConfig.mode || get().mode
const chatModel = chatConfig.model || get().selectedModel const chatModel = chatConfig.model || get().selectedModel
const normalizedChatModel = normalizeSelectedModelKey(chatModel, get().availableModels)
logger.debug('[Chat] Restoring chat config', { logger.debug('[Chat] Restoring chat config', {
chatId: chat.id, chatId: chat.id,
mode: chatMode, mode: chatMode,
model: normalizedChatModel, model: chatModel,
hasPlanArtifact: !!planArtifact, hasPlanArtifact: !!planArtifact,
}) })
@@ -1175,7 +1119,7 @@ export const useCopilotStore = create<CopilotStore>()(
showPlanTodos: false, showPlanTodos: false,
streamingPlanContent: planArtifact, streamingPlanContent: planArtifact,
mode: chatMode, mode: chatMode,
selectedModel: normalizedChatModel as CopilotStore['selectedModel'], selectedModel: chatModel as CopilotStore['selectedModel'],
suppressAutoSelect: false, suppressAutoSelect: false,
}) })
@@ -1348,10 +1292,6 @@ export const useCopilotStore = create<CopilotStore>()(
const refreshedConfig = updatedCurrentChat.config ?? {} const refreshedConfig = updatedCurrentChat.config ?? {}
const refreshedMode = refreshedConfig.mode || get().mode const refreshedMode = refreshedConfig.mode || get().mode
const refreshedModel = refreshedConfig.model || get().selectedModel const refreshedModel = refreshedConfig.model || get().selectedModel
const normalizedRefreshedModel = normalizeSelectedModelKey(
refreshedModel,
get().availableModels
)
const toolCallsById = buildToolCallsById(normalizedMessages) const toolCallsById = buildToolCallsById(normalizedMessages)
set({ set({
@@ -1360,7 +1300,7 @@ export const useCopilotStore = create<CopilotStore>()(
toolCallsById, toolCallsById,
streamingPlanContent: refreshedPlanArtifact, streamingPlanContent: refreshedPlanArtifact,
mode: refreshedMode, mode: refreshedMode,
selectedModel: normalizedRefreshedModel as CopilotStore['selectedModel'], selectedModel: refreshedModel as CopilotStore['selectedModel'],
}) })
} }
try { try {
@@ -1380,15 +1320,11 @@ export const useCopilotStore = create<CopilotStore>()(
const chatConfig = mostRecentChat.config ?? {} const chatConfig = mostRecentChat.config ?? {}
const chatMode = chatConfig.mode || get().mode const chatMode = chatConfig.mode || get().mode
const chatModel = chatConfig.model || get().selectedModel const chatModel = chatConfig.model || get().selectedModel
const normalizedChatModel = normalizeSelectedModelKey(
chatModel,
get().availableModels
)
logger.info('[Chat] Auto-selecting most recent chat with config', { logger.info('[Chat] Auto-selecting most recent chat with config', {
chatId: mostRecentChat.id, chatId: mostRecentChat.id,
mode: chatMode, mode: chatMode,
model: normalizedChatModel, model: chatModel,
hasPlanArtifact: !!planArtifact, hasPlanArtifact: !!planArtifact,
}) })
@@ -1400,7 +1336,7 @@ export const useCopilotStore = create<CopilotStore>()(
toolCallsById, toolCallsById,
streamingPlanContent: planArtifact, streamingPlanContent: planArtifact,
mode: chatMode, mode: chatMode,
selectedModel: normalizedChatModel as CopilotStore['selectedModel'], selectedModel: chatModel as CopilotStore['selectedModel'],
}) })
try { try {
await get().loadMessageCheckpoints(mostRecentChat.id) await get().loadMessageCheckpoints(mostRecentChat.id)
@@ -2332,8 +2268,7 @@ export const useCopilotStore = create<CopilotStore>()(
}, },
setSelectedModel: async (model) => { setSelectedModel: async (model) => {
const normalizedModel = normalizeSelectedModelKey(model, get().availableModels) set({ selectedModel: model })
set({ selectedModel: normalizedModel as CopilotStore['selectedModel'] })
}, },
setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }), setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }),
loadAvailableModels: async () => { loadAvailableModels: async () => {

View File

@@ -224,7 +224,7 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
const newEntry = get().entries[0] const newEntry = get().entries[0]
if (newEntry?.error && newEntry.blockType !== 'cancelled') { if (newEntry?.error) {
notifyBlockError({ notifyBlockError({
error: newEntry.error, error: newEntry.error,
blockName: newEntry.blockName || 'Unknown Block', blockName: newEntry.blockName || 'Unknown Block',
@@ -243,11 +243,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
useExecutionStore.getState().clearRunPath(workflowId) useExecutionStore.getState().clearRunPath(workflowId)
}, },
clearExecutionEntries: (executionId: string) =>
set((state) => ({
entries: state.entries.filter((e) => e.executionId !== executionId),
})),
exportConsoleCSV: (workflowId: string) => { exportConsoleCSV: (workflowId: string) => {
const entries = get().entries.filter((entry) => entry.workflowId === workflowId) const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
@@ -475,24 +470,12 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
}, },
merge: (persistedState, currentState) => { merge: (persistedState, currentState) => {
const persisted = persistedState as Partial<ConsoleStore> | undefined const persisted = persistedState as Partial<ConsoleStore> | undefined
const rawEntries = persisted?.entries ?? currentState.entries const entries = (persisted?.entries ?? currentState.entries).map((entry, index) => {
const oneHourAgo = Date.now() - 60 * 60 * 1000
const entries = rawEntries.map((entry, index) => {
let updated = entry
if (entry.executionOrder === undefined) { if (entry.executionOrder === undefined) {
updated = { ...updated, executionOrder: index + 1 } return { ...entry, executionOrder: index + 1 }
} }
if ( return entry
entry.isRunning &&
entry.startedAt &&
new Date(entry.startedAt).getTime() < oneHourAgo
) {
updated = { ...updated, isRunning: false }
}
return updated
}) })
return { return {
...currentState, ...currentState,
entries, entries,

View File

@@ -51,7 +51,6 @@ export interface ConsoleStore {
isOpen: boolean isOpen: boolean
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
clearWorkflowConsole: (workflowId: string) => void clearWorkflowConsole: (workflowId: string) => void
clearExecutionEntries: (executionId: string) => void
exportConsoleCSV: (workflowId: string) => void exportConsoleCSV: (workflowId: string) => void
getWorkflowEntries: (workflowId: string) => ConsoleEntry[] getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
toggleConsole: () => void toggleConsole: () => void

View File

@@ -1,114 +0,0 @@
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceDeleteLabelParams {
accessToken: string
domain: string
pageId: string
labelName: string
cloudId?: string
}
export interface ConfluenceDeleteLabelResponse {
success: boolean
output: {
ts: string
pageId: string
labelName: string
deleted: boolean
}
}
export const confluenceDeleteLabelTool: ToolConfig<
ConfluenceDeleteLabelParams,
ConfluenceDeleteLabelResponse
> = {
id: 'confluence_delete_label',
name: 'Confluence Delete Label',
description: 'Remove a label from a Confluence page.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
pageId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Confluence page ID to remove the label from',
},
labelName: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Name of the label to remove',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: () => '/api/tools/confluence/labels',
method: 'DELETE',
headers: (params: ConfluenceDeleteLabelParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
body: (params: ConfluenceDeleteLabelParams) => ({
domain: params.domain,
accessToken: params.accessToken,
pageId: params.pageId?.trim(),
labelName: params.labelName?.trim(),
cloudId: params.cloudId,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
pageId: data.pageId ?? '',
labelName: data.labelName ?? '',
deleted: true,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
pageId: {
type: 'string',
description: 'Page ID the label was removed from',
},
labelName: {
type: 'string',
description: 'Name of the removed label',
},
deleted: {
type: 'boolean',
description: 'Deletion status',
},
},
}

View File

@@ -1,105 +0,0 @@
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceDeletePagePropertyParams {
accessToken: string
domain: string
pageId: string
propertyId: string
cloudId?: string
}
export interface ConfluenceDeletePagePropertyResponse {
success: boolean
output: {
ts: string
pageId: string
propertyId: string
deleted: boolean
}
}
export const confluenceDeletePagePropertyTool: ToolConfig<
ConfluenceDeletePagePropertyParams,
ConfluenceDeletePagePropertyResponse
> = {
id: 'confluence_delete_page_property',
name: 'Confluence Delete Page Property',
description: 'Delete a content property from a Confluence page by its property ID.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
pageId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the page containing the property',
},
propertyId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the property to delete',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: () => '/api/tools/confluence/page-properties',
method: 'DELETE',
headers: (params: ConfluenceDeletePagePropertyParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
body: (params: ConfluenceDeletePagePropertyParams) => ({
domain: params.domain,
accessToken: params.accessToken,
pageId: params.pageId?.trim(),
propertyId: params.propertyId?.trim(),
cloudId: params.cloudId,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
pageId: data.pageId ?? '',
propertyId: data.propertyId ?? '',
deleted: true,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
pageId: { type: 'string', description: 'ID of the page' },
propertyId: { type: 'string', description: 'ID of the deleted property' },
deleted: { type: 'boolean', description: 'Deletion status' },
},
}

View File

@@ -1,143 +0,0 @@
import { PAGE_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceGetPagesByLabelParams {
accessToken: string
domain: string
labelId: string
limit?: number
cursor?: string
cloudId?: string
}
export interface ConfluenceGetPagesByLabelResponse {
success: boolean
output: {
ts: string
labelId: string
pages: Array<{
id: string
title: string
status: string | null
spaceId: string | null
parentId: string | null
authorId: string | null
createdAt: string | null
version: {
number: number
message?: string
createdAt?: string
} | null
}>
nextCursor: string | null
}
}
export const confluenceGetPagesByLabelTool: ToolConfig<
ConfluenceGetPagesByLabelParams,
ConfluenceGetPagesByLabelResponse
> = {
id: 'confluence_get_pages_by_label',
name: 'Confluence Get Pages by Label',
description: 'Retrieve all pages that have a specific label applied.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
labelId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the label to get pages for',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of pages to return (default: 50, max: 250)',
},
cursor: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination cursor from previous response',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: (params: ConfluenceGetPagesByLabelParams) => {
const query = new URLSearchParams({
domain: params.domain,
accessToken: params.accessToken,
labelId: params.labelId,
limit: String(params.limit || 50),
})
if (params.cursor) {
query.set('cursor', params.cursor)
}
if (params.cloudId) {
query.set('cloudId', params.cloudId)
}
return `/api/tools/confluence/pages-by-label?${query.toString()}`
},
method: 'GET',
headers: (params: ConfluenceGetPagesByLabelParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
labelId: data.labelId ?? '',
pages: data.pages ?? [],
nextCursor: data.nextCursor ?? null,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
labelId: { type: 'string', description: 'ID of the label' },
pages: {
type: 'array',
description: 'Array of pages with this label',
items: {
type: 'object',
properties: PAGE_ITEM_PROPERTIES,
},
},
nextCursor: {
type: 'string',
description: 'Cursor for fetching the next page of results',
optional: true,
},
},
}

View File

@@ -5,14 +5,11 @@ import { confluenceCreatePageTool } from '@/tools/confluence/create_page'
import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property' import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property'
import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment' import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment'
import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment' import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment'
import { confluenceDeleteLabelTool } from '@/tools/confluence/delete_label'
import { confluenceDeletePageTool } from '@/tools/confluence/delete_page' import { confluenceDeletePageTool } from '@/tools/confluence/delete_page'
import { confluenceDeletePagePropertyTool } from '@/tools/confluence/delete_page_property'
import { confluenceGetBlogPostTool } from '@/tools/confluence/get_blogpost' import { confluenceGetBlogPostTool } from '@/tools/confluence/get_blogpost'
import { confluenceGetPageAncestorsTool } from '@/tools/confluence/get_page_ancestors' import { confluenceGetPageAncestorsTool } from '@/tools/confluence/get_page_ancestors'
import { confluenceGetPageChildrenTool } from '@/tools/confluence/get_page_children' import { confluenceGetPageChildrenTool } from '@/tools/confluence/get_page_children'
import { confluenceGetPageVersionTool } from '@/tools/confluence/get_page_version' import { confluenceGetPageVersionTool } from '@/tools/confluence/get_page_version'
import { confluenceGetPagesByLabelTool } from '@/tools/confluence/get_pages_by_label'
import { confluenceGetSpaceTool } from '@/tools/confluence/get_space' import { confluenceGetSpaceTool } from '@/tools/confluence/get_space'
import { confluenceListAttachmentsTool } from '@/tools/confluence/list_attachments' import { confluenceListAttachmentsTool } from '@/tools/confluence/list_attachments'
import { confluenceListBlogPostsTool } from '@/tools/confluence/list_blogposts' import { confluenceListBlogPostsTool } from '@/tools/confluence/list_blogposts'
@@ -22,7 +19,6 @@ import { confluenceListLabelsTool } from '@/tools/confluence/list_labels'
import { confluenceListPagePropertiesTool } from '@/tools/confluence/list_page_properties' import { confluenceListPagePropertiesTool } from '@/tools/confluence/list_page_properties'
import { confluenceListPageVersionsTool } from '@/tools/confluence/list_page_versions' import { confluenceListPageVersionsTool } from '@/tools/confluence/list_page_versions'
import { confluenceListPagesInSpaceTool } from '@/tools/confluence/list_pages_in_space' import { confluenceListPagesInSpaceTool } from '@/tools/confluence/list_pages_in_space'
import { confluenceListSpaceLabelsTool } from '@/tools/confluence/list_space_labels'
import { confluenceListSpacesTool } from '@/tools/confluence/list_spaces' import { confluenceListSpacesTool } from '@/tools/confluence/list_spaces'
import { confluenceRetrieveTool } from '@/tools/confluence/retrieve' import { confluenceRetrieveTool } from '@/tools/confluence/retrieve'
import { confluenceSearchTool } from '@/tools/confluence/search' import { confluenceSearchTool } from '@/tools/confluence/search'
@@ -82,7 +78,6 @@ export {
// Page Properties Tools // Page Properties Tools
confluenceListPagePropertiesTool, confluenceListPagePropertiesTool,
confluenceCreatePagePropertyTool, confluenceCreatePagePropertyTool,
confluenceDeletePagePropertyTool,
// Blog Post Tools // Blog Post Tools
confluenceListBlogPostsTool, confluenceListBlogPostsTool,
confluenceGetBlogPostTool, confluenceGetBlogPostTool,
@@ -103,9 +98,6 @@ export {
// Label Tools // Label Tools
confluenceListLabelsTool, confluenceListLabelsTool,
confluenceAddLabelTool, confluenceAddLabelTool,
confluenceDeleteLabelTool,
confluenceGetPagesByLabelTool,
confluenceListSpaceLabelsTool,
// Space Tools // Space Tools
confluenceGetSpaceTool, confluenceGetSpaceTool,
confluenceListSpacesTool, confluenceListSpacesTool,

View File

@@ -1,134 +0,0 @@
import { LABEL_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceListSpaceLabelsParams {
accessToken: string
domain: string
spaceId: string
limit?: number
cursor?: string
cloudId?: string
}
export interface ConfluenceListSpaceLabelsResponse {
success: boolean
output: {
ts: string
spaceId: string
labels: Array<{
id: string
name: string
prefix: string
}>
nextCursor: string | null
}
}
export const confluenceListSpaceLabelsTool: ToolConfig<
ConfluenceListSpaceLabelsParams,
ConfluenceListSpaceLabelsResponse
> = {
id: 'confluence_list_space_labels',
name: 'Confluence List Space Labels',
description: 'List all labels associated with a Confluence space.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
spaceId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the Confluence space to list labels from',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of labels to return (default: 25, max: 250)',
},
cursor: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination cursor from previous response',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: (params: ConfluenceListSpaceLabelsParams) => {
const query = new URLSearchParams({
domain: params.domain,
accessToken: params.accessToken,
spaceId: params.spaceId,
limit: String(params.limit || 25),
})
if (params.cursor) {
query.set('cursor', params.cursor)
}
if (params.cloudId) {
query.set('cloudId', params.cloudId)
}
return `/api/tools/confluence/space-labels?${query.toString()}`
},
method: 'GET',
headers: (params: ConfluenceListSpaceLabelsParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
spaceId: data.spaceId ?? '',
labels: data.labels ?? [],
nextCursor: data.nextCursor ?? null,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
spaceId: { type: 'string', description: 'ID of the space' },
labels: {
type: 'array',
description: 'Array of labels on the space',
items: {
type: 'object',
properties: LABEL_ITEM_PROPERTIES,
},
},
nextCursor: {
type: 'string',
description: 'Cursor for fetching the next page of results',
optional: true,
},
},
}

View File

@@ -1,7 +1,6 @@
import { import {
buildCanonicalIndex, buildCanonicalIndex,
type CanonicalIndex, type CanonicalIndex,
type CanonicalModeOverrides,
evaluateSubBlockCondition, evaluateSubBlockCondition,
getCanonicalValues, getCanonicalValues,
isCanonicalPair, isCanonicalPair,
@@ -13,10 +12,7 @@ import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
export { export {
buildCanonicalIndex, buildCanonicalIndex,
type CanonicalIndex, type CanonicalIndex,
type CanonicalModeOverrides,
evaluateSubBlockCondition, evaluateSubBlockCondition,
isCanonicalPair,
resolveCanonicalMode,
type SubBlockCondition, type SubBlockCondition,
} }

Some files were not shown because too many files have changed in this diff Show More