Compare commits

..

22 Commits

Author SHA1 Message Date
waleed
6ee73fa22f update change detection to account for synthetic tool ids 2026-02-12 12:28:47 -08:00
waleed
b1cde0265c update styling + move predeploy checks earlier for first time deploys 2026-02-12 12:24:51 -08:00
waleed
54ed579dbd fix spacing and optional tag 2026-02-12 12:05:49 -08:00
waleed
837a13ec4f fix(tool-input): increase gap between SubBlock params for visual clarity
SubBlock's internal gap (10px between label and input) matched the
between-parameter gap (10px), making them indistinguishable. Increase
the between-parameter gap to 14px so consecutive parameters are
visually distinct, matching the separation seen in ParameterWithLabel.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-12 11:54:45 -08:00
waleed
f707636162 fix(tool-input): apply extra top padding only to SubBlock-first path
Revert container padding to py-[8px] (MCP tools were correct).
Wrap SubBlock-first output in a div with pt-[4px] so only registry
tools get extra breathing room from the container top.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-12 11:52:24 -08:00
waleed
b65768bc41 fix(tool-input): increase top padding of expanded tool body
Bump the expanded tool body container's top padding from 8px to 12px
for more breathing room between the header bar and the first parameter.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-12 11:51:07 -08:00
waleed
3e17627d78 fix(tool-input): align (optional) text to baseline instead of center
Use items-baseline instead of items-center on Label flex containers
so the smaller (optional) text aligns with the label text baseline
rather than sitting slightly below it.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-12 11:43:15 -08:00
waleed
a25b26e1e9 fix(tool-input): correct workflow selector visibility and tighten (optional) spacing
- Set workflowId param to user-only in workflow_executor tool config
  so "Select Workflow" no longer shows "(optional)" indicator
- Tighten (optional) label spacing with -ml-[3px] to counteract
  parent Label's gap-[6px], making it feel inline with the label text

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-12 11:42:45 -08:00
waleed
41ed85905d fix(tool-input): auto-refresh workflow inputs after redeploy
After redeploying a child workflow via the stale badge, the workflow
state cache was not invalidated, so WorkflowInputMapperInput kept
showing stale input fields until page refresh. Now invalidates
workflowKeys.state on deploy success.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-12 11:27:50 -08:00
waleed
c43f502ffb fix(tool-input): render uncovered tool params alongside subblocks
The SubBlock-first rendering path was hard-returning after rendering
subblocks, so tool params without matching subblocks (like inputMapping
for workflow tools) were never rendered. Now renders subblocks first,
then any remaining displayParams not covered by subblocks via the legacy
ParameterWithLabel fallback.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-12 11:23:14 -08:00
waleed
a29afd2757 cleanup 2026-02-12 10:41:07 -08:00
waleed
8af561782d add sibling values to subblock context since subblock store isn't relevant in tool input, and removed unused param 2026-02-12 10:34:25 -08:00
waleed
a0ebe0842b fix(tool-input): restore optional indicator, fix folder selector and canonical toggle, extract components
- Attach resolved paramVisibility to subblocks from getSubBlocksForToolInput
- Add labelSuffix prop to SubBlock for "(optional)" badge on user-or-llm params
- Fix folder selector missing for tools with canonicalParamId (e.g. Google Drive)
- Fix canonical toggle not clickable by letting SubBlock handle dependsOn internally
- Extract ParameterWithLabel, ToolSubBlockRenderer, ToolCredentialSelector to components/tools/
- Extract StoredTool interface to types.ts, selection helpers to utils.ts
- Remove dead code (mcpError, refreshTools, oldParamIds, initialParams)
- Strengthen typing: replace any with proper types on icon components and evaluateParameterCondition
2026-02-12 00:39:22 -08:00
waleed
d236cc8ad0 refactor(tool-input): eliminate SyncWrappers, add canonical toggle and dependsOn gating
Replace 17+ individual SyncWrapper components with a single centralized
ToolSubBlockRenderer that bridges the subblock store with StoredTool.params
via synthetic store keys. This reduces ~1000 lines of duplicated wrapper
code and ensures tool-input renders subblock components identically to
the standalone SubBlock path.

- Add ToolSubBlockRenderer with bidirectional store sync
- Add basic/advanced mode toggle (ArrowLeftRight) using collaborative functions
- Add dependsOn gating via useDependsOnGate (fields disable instead of hiding)
- Add paramVisibility field to SubBlockConfig for tool-input visibility control
- Pass canonicalModeOverrides through getSubBlocksForToolInput
- Show (optional) label for non-user-only fields (LLM can inject at runtime)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-11 22:46:44 -08:00
Waleed
81dfeb0bb0 fix(terminal): reconnect to running executions after page refresh (#3200)
* fix(terminal): reconnect to running executions after page refresh

* fix(terminal): use ExecutionEvent type instead of any in reconnection stream

* fix(execution): type event buffer with ExecutionEvent instead of Record<string, unknown>

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* fix(execution): validate fromEventId query param in reconnection endpoint

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* Fix some bugs

* fix(variables): fix tag dropdown and cursor alignment in variables block (#3199)

* feat(confluence): added list space labels, delete label, delete page prop (#3201)

* updated route

* ack comments

* fix(execution): reset execution state in reconnection cleanup to unblock re-entry

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* fix(execution): restore running entries when reconnection is interrupted by navigation

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* done

* remove cast in ioredis types

* ack PR comments

---------

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: Siddharth Ganesan <siddharthganesan@gmail.com>
2026-02-11 19:31:29 -08:00
Waleed
01577a18b4 fix(change-detection): resolve false positive trigger block change detection (#3204) 2026-02-11 17:24:17 -08:00
Vikhyath Mondreti
52aff4d60b fix build 2026-02-11 15:33:22 -08:00
Waleed
3a3bddd6f8 fix(confl): use recommended query param pattern for confluence route (#3202)
* fix(confl): use recommended query param pattern for confluence route

* use unused var
2026-02-11 14:59:26 -08:00
Waleed
639d50d6b9 feat(confluence): added list space labels, delete label, delete page prop (#3201) 2026-02-11 14:40:31 -08:00
Waleed
cec74e09c2 fix(variables): fix tag dropdown and cursor alignment in variables block (#3199) 2026-02-11 14:40:31 -08:00
Waleed
d5a756c9f2 fix(hotkeys): remove C, T, E tab-switching hotkeys (#3197) 2026-02-11 13:24:00 -08:00
Waleed
f3e994baf0 improvement(oom): increase trigger machine size (#3196) 2026-02-11 13:11:28 -08:00
108 changed files with 4535 additions and 17516 deletions

View File

@@ -41,9 +41,6 @@ Diese Tastenkombinationen wechseln zwischen den Panel-Tabs auf der rechten Seite
| Tastenkombination | Aktion |
|----------|--------|
| `C` | Copilot-Tab fokussieren |
| `T` | Toolbar-Tab fokussieren |
| `E` | Editor-Tab fokussieren |
| `Mod` + `F` | Toolbar-Suche fokussieren |
## Globale Navigation

View File

@@ -43,9 +43,6 @@ These shortcuts switch between panel tabs on the right side of the canvas.
| Shortcut | Action |
|----------|--------|
| `C` | Focus Copilot tab |
| `T` | Focus Toolbar tab |
| `E` | Focus Editor tab |
| `Mod` + `F` | Focus Toolbar search |
## Global Navigation

View File

@@ -399,6 +399,28 @@ Create a new custom property (metadata) on a Confluence page.
| ↳ `authorId` | string | Account ID of the version author |
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
### `confluence_delete_page_property`
Delete a content property from a Confluence page by its property ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `pageId` | string | Yes | The ID of the page containing the property |
| `propertyId` | string | Yes | The ID of the property to delete |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `pageId` | string | ID of the page |
| `propertyId` | string | ID of the deleted property |
| `deleted` | boolean | Deletion status |
### `confluence_search`
Search for content across Confluence pages, blog posts, and other content.
@@ -872,6 +894,90 @@ Add a label to a Confluence page for organization and categorization.
| `labelName` | string | Name of the added label |
| `labelId` | string | ID of the added label |
### `confluence_delete_label`
Remove a label from a Confluence page.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `pageId` | string | Yes | Confluence page ID to remove the label from |
| `labelName` | string | Yes | Name of the label to remove |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `pageId` | string | Page ID the label was removed from |
| `labelName` | string | Name of the removed label |
| `deleted` | boolean | Deletion status |
### `confluence_get_pages_by_label`
Retrieve all pages that have a specific label applied.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `labelId` | string | Yes | The ID of the label to get pages for |
| `limit` | number | No | Maximum number of pages to return \(default: 50, max: 250\) |
| `cursor` | string | No | Pagination cursor from previous response |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `labelId` | string | ID of the label |
| `pages` | array | Array of pages with this label |
| ↳ `id` | string | Unique page identifier |
| ↳ `title` | string | Page title |
| ↳ `status` | string | Page status \(e.g., current, archived, trashed, draft\) |
| ↳ `spaceId` | string | ID of the space containing the page |
| ↳ `parentId` | string | ID of the parent page \(null if top-level\) |
| ↳ `authorId` | string | Account ID of the page author |
| ↳ `createdAt` | string | ISO 8601 timestamp when the page was created |
| ↳ `version` | object | Page version information |
| ↳ `number` | number | Version number |
| ↳ `message` | string | Version message |
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
| ↳ `authorId` | string | Account ID of the version author |
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
| `nextCursor` | string | Cursor for fetching the next page of results |
### `confluence_list_space_labels`
List all labels associated with a Confluence space.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `spaceId` | string | Yes | The ID of the Confluence space to list labels from |
| `limit` | number | No | Maximum number of labels to return \(default: 25, max: 250\) |
| `cursor` | string | No | Pagination cursor from previous response |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `spaceId` | string | ID of the space |
| `labels` | array | Array of labels on the space |
| ↳ `id` | string | Unique label identifier |
| ↳ `name` | string | Label name |
| ↳ `prefix` | string | Label prefix/type \(e.g., global, my, team\) |
| `nextCursor` | string | Cursor for fetching the next page of results |
### `confluence_get_space`
Get details about a specific Confluence space.

View File

@@ -42,9 +42,6 @@ Estos atajos cambian entre las pestañas del panel en el lado derecho del lienzo
| Atajo | Acción |
|----------|--------|
| `C` | Enfocar pestaña Copilot |
| `T` | Enfocar pestaña Barra de herramientas |
| `E` | Enfocar pestaña Editor |
| `Mod` + `F` | Enfocar búsqueda de Barra de herramientas |
## Navegación global

View File

@@ -42,9 +42,6 @@ Ces raccourcis permettent de basculer entre les onglets du panneau sur le côté
| Raccourci | Action |
|----------|--------|
| `C` | Activer l'onglet Copilot |
| `T` | Activer l'onglet Barre d'outils |
| `E` | Activer l'onglet Éditeur |
| `Mod` + `F` | Activer la recherche dans la barre d'outils |
## Navigation globale

View File

@@ -41,9 +41,6 @@ import { Callout } from 'fumadocs-ui/components/callout'
| ショートカット | 操作 |
|----------|--------|
| `C` | Copilotタブにフォーカス |
| `T` | Toolbarタブにフォーカス |
| `E` | Editorタブにフォーカス |
| `Mod` + `F` | Toolbar検索にフォーカス |
## グローバルナビゲーション

View File

@@ -41,9 +41,6 @@ import { Callout } from 'fumadocs-ui/components/callout'
| 快捷键 | 操作 |
|----------|--------|
| `C` | 聚焦 Copilot 标签页 |
| `T` | 聚焦 Toolbar 标签页 |
| `E` | 聚焦 Editor 标签页 |
| `Mod` + `F` | 聚焦 Toolbar 搜索 |
## 全局导航

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { account } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, desc, eq } from 'drizzle-orm'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
@@ -31,13 +31,15 @@ export async function GET(request: NextRequest) {
})
.from(account)
.where(and(...whereConditions))
.orderBy(desc(account.updatedAt))
// Use the user's email as the display name (consistent with credential selector)
const userEmail = session.user.email
const accountsWithDisplayName = accounts.map((acc) => ({
id: acc.id,
accountId: acc.accountId,
providerId: acc.providerId,
displayName: acc.accountId || acc.providerId,
displayName: userEmail || acc.providerId,
}))
return NextResponse.json({ accounts: accountsWithDisplayName })

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db'
import { account, credential, credentialMember, user } from '@sim/db/schema'
import { account, user } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { jwtDecode } from 'jwt-decode'
@@ -7,10 +7,8 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
import { evaluateScopeCoverage, type OAuthProvider, parseProvider } from '@/lib/oauth'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
export const dynamic = 'force-dynamic'
@@ -20,7 +18,6 @@ const credentialsQuerySchema = z
.object({
provider: z.string().nullish(),
workflowId: z.string().uuid('Workflow ID must be a valid UUID').nullish(),
workspaceId: z.string().uuid('Workspace ID must be a valid UUID').nullish(),
credentialId: z
.string()
.min(1, 'Credential ID must not be empty')
@@ -38,79 +35,6 @@ interface GoogleIdToken {
name?: string
}
function toCredentialResponse(
id: string,
displayName: string,
providerId: string,
updatedAt: Date,
scope: string | null
) {
const storedScope = scope?.trim()
const grantedScopes = storedScope ? storedScope.split(/[\s,]+/).filter(Boolean) : []
const scopeEvaluation = evaluateScopeCoverage(providerId, grantedScopes)
const [_, featureType = 'default'] = providerId.split('-')
return {
id,
name: displayName,
provider: providerId,
lastUsed: updatedAt.toISOString(),
isDefault: featureType === 'default',
scopes: scopeEvaluation.grantedScopes,
canonicalScopes: scopeEvaluation.canonicalScopes,
missingScopes: scopeEvaluation.missingScopes,
extraScopes: scopeEvaluation.extraScopes,
requiresReauthorization: scopeEvaluation.requiresReauthorization,
}
}
async function getFallbackDisplayName(
requestId: string,
providerParam: string | null | undefined,
accountRow: {
idToken: string | null
accountId: string
userId: string
}
) {
const providerForParse = (providerParam || 'google') as OAuthProvider
const { baseProvider } = parseProvider(providerForParse)
if (accountRow.idToken) {
try {
const decoded = jwtDecode<GoogleIdToken>(accountRow.idToken)
if (decoded.email) return decoded.email
if (decoded.name) return decoded.name
} catch (_error) {
logger.warn(`[${requestId}] Error decoding ID token`, {
accountId: accountRow.accountId,
})
}
}
if (baseProvider === 'github') {
return `${accountRow.accountId} (GitHub)`
}
try {
const userRecord = await db
.select({ email: user.email })
.from(user)
.where(eq(user.id, accountRow.userId))
.limit(1)
if (userRecord.length > 0) {
return userRecord[0].email
}
} catch (_error) {
logger.warn(`[${requestId}] Error fetching user email`, {
userId: accountRow.userId,
})
}
return `${accountRow.accountId} (${baseProvider})`
}
/**
* Get credentials for a specific provider
*/
@@ -122,7 +46,6 @@ export async function GET(request: NextRequest) {
const rawQuery = {
provider: searchParams.get('provider'),
workflowId: searchParams.get('workflowId'),
workspaceId: searchParams.get('workspaceId'),
credentialId: searchParams.get('credentialId'),
}
@@ -155,7 +78,7 @@ export async function GET(request: NextRequest) {
)
}
const { provider: providerParam, workflowId, workspaceId, credentialId } = parseResult.data
const { provider: providerParam, workflowId, credentialId } = parseResult.data
// Authenticate requester (supports session and internal JWT)
const authResult = await checkSessionOrInternalAuth(request)
@@ -165,7 +88,7 @@ export async function GET(request: NextRequest) {
}
const requesterUserId = authResult.userId
let effectiveWorkspaceId = workspaceId ?? undefined
const effectiveUserId = requesterUserId
if (workflowId) {
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
workflowId,
@@ -183,145 +106,101 @@ export async function GET(request: NextRequest) {
{ status: workflowAuthorization.status }
)
}
effectiveWorkspaceId = workflowAuthorization.workflow?.workspaceId || undefined
}
if (effectiveWorkspaceId) {
const workspaceAccess = await checkWorkspaceAccess(effectiveWorkspaceId, requesterUserId)
if (!workspaceAccess.hasAccess) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
}
// Parse the provider to get base provider and feature type (if provider is present)
const { baseProvider } = parseProvider((providerParam || 'google') as OAuthProvider)
let accountsData
if (credentialId) {
const [platformCredential] = await db
.select({
id: credential.id,
workspaceId: credential.workspaceId,
type: credential.type,
displayName: credential.displayName,
providerId: credential.providerId,
accountId: credential.accountId,
accountProviderId: account.providerId,
accountScope: account.scope,
accountUpdatedAt: account.updatedAt,
})
.from(credential)
.leftJoin(account, eq(credential.accountId, account.id))
.where(eq(credential.id, credentialId))
.limit(1)
if (platformCredential) {
if (platformCredential.type !== 'oauth' || !platformCredential.accountId) {
return NextResponse.json({ credentials: [] }, { status: 200 })
}
if (workflowId) {
if (!effectiveWorkspaceId || platformCredential.workspaceId !== effectiveWorkspaceId) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
} else {
const [membership] = await db
.select({ id: credentialMember.id })
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, platformCredential.id),
eq(credentialMember.userId, requesterUserId),
eq(credentialMember.status, 'active')
)
)
.limit(1)
if (!membership) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
}
if (!platformCredential.accountProviderId || !platformCredential.accountUpdatedAt) {
return NextResponse.json({ credentials: [] }, { status: 200 })
}
return NextResponse.json(
{
credentials: [
toCredentialResponse(
platformCredential.id,
platformCredential.displayName,
platformCredential.accountProviderId,
platformCredential.accountUpdatedAt,
platformCredential.accountScope
),
],
},
{ status: 200 }
)
}
}
if (effectiveWorkspaceId && providerParam) {
await syncWorkspaceOAuthCredentialsForUser({
workspaceId: effectiveWorkspaceId,
userId: requesterUserId,
})
const credentialsData = await db
.select({
id: credential.id,
displayName: credential.displayName,
providerId: account.providerId,
scope: account.scope,
updatedAt: account.updatedAt,
})
.from(credential)
.innerJoin(account, eq(credential.accountId, account.id))
.innerJoin(
credentialMember,
and(
eq(credentialMember.credentialId, credential.id),
eq(credentialMember.userId, requesterUserId),
eq(credentialMember.status, 'active')
)
)
.where(
and(
eq(credential.workspaceId, effectiveWorkspaceId),
eq(credential.type, 'oauth'),
eq(account.providerId, providerParam)
)
)
return NextResponse.json(
{
credentials: credentialsData.map((row) =>
toCredentialResponse(row.id, row.displayName, row.providerId, row.updatedAt, row.scope)
),
},
{ status: 200 }
)
}
if (credentialId && workflowId) {
// When both workflowId and credentialId are provided, fetch by ID only.
// Workspace authorization above already proves access; the credential
// may belong to another workspace member (e.g. for display name resolution).
accountsData = await db.select().from(account).where(eq(account.id, credentialId))
} else if (credentialId) {
accountsData = await db
.select()
.from(account)
.where(and(eq(account.userId, requesterUserId), eq(account.id, credentialId)))
.where(and(eq(account.userId, effectiveUserId), eq(account.id, credentialId)))
} else {
// Fetch all credentials for provider and effective user
accountsData = await db
.select()
.from(account)
.where(and(eq(account.userId, requesterUserId), eq(account.providerId, providerParam!)))
.where(and(eq(account.userId, effectiveUserId), eq(account.providerId, providerParam!)))
}
// Transform accounts into credentials
const credentials = await Promise.all(
accountsData.map(async (acc) => {
const displayName = await getFallbackDisplayName(requestId, providerParam, acc)
return toCredentialResponse(acc.id, displayName, acc.providerId, acc.updatedAt, acc.scope)
// Extract the feature type from providerId (e.g., 'google-default' -> 'default')
const [_, featureType = 'default'] = acc.providerId.split('-')
// Try multiple methods to get a user-friendly display name
let displayName = ''
// Method 1: Try to extract email from ID token (works for Google, etc.)
if (acc.idToken) {
try {
const decoded = jwtDecode<GoogleIdToken>(acc.idToken)
if (decoded.email) {
displayName = decoded.email
} else if (decoded.name) {
displayName = decoded.name
}
} catch (_error) {
logger.warn(`[${requestId}] Error decoding ID token`, {
accountId: acc.id,
})
}
}
// Method 2: For GitHub, the accountId might be the username
if (!displayName && baseProvider === 'github') {
displayName = `${acc.accountId} (GitHub)`
}
// Method 3: Try to get the user's email from our database
if (!displayName) {
try {
const userRecord = await db
.select({ email: user.email })
.from(user)
.where(eq(user.id, acc.userId))
.limit(1)
if (userRecord.length > 0) {
displayName = userRecord[0].email
}
} catch (_error) {
logger.warn(`[${requestId}] Error fetching user email`, {
userId: acc.userId,
})
}
}
// Fallback: Use accountId with provider type as context
if (!displayName) {
displayName = `${acc.accountId} (${baseProvider})`
}
const storedScope = acc.scope?.trim()
const grantedScopes = storedScope ? storedScope.split(/[\s,]+/).filter(Boolean) : []
const scopeEvaluation = evaluateScopeCoverage(acc.providerId, grantedScopes)
return {
id: acc.id,
name: displayName,
provider: acc.providerId,
lastUsed: acc.updatedAt.toISOString(),
isDefault: featureType === 'default',
scopes: scopeEvaluation.grantedScopes,
canonicalScopes: scopeEvaluation.canonicalScopes,
missingScopes: scopeEvaluation.missingScopes,
extraScopes: scopeEvaluation.extraScopes,
requiresReauthorization: scopeEvaluation.requiresReauthorization,
}
})
)

View File

@@ -15,7 +15,6 @@ const logger = createLogger('OAuthDisconnectAPI')
const disconnectSchema = z.object({
provider: z.string({ required_error: 'Provider is required' }).min(1, 'Provider is required'),
providerId: z.string().optional(),
accountId: z.string().optional(),
})
/**
@@ -51,20 +50,15 @@ export async function POST(request: NextRequest) {
)
}
const { provider, providerId, accountId } = parseResult.data
const { provider, providerId } = parseResult.data
logger.info(`[${requestId}] Processing OAuth disconnect request`, {
provider,
hasProviderId: !!providerId,
})
// If a specific account row ID is provided, delete that exact account
if (accountId) {
await db
.delete(account)
.where(and(eq(account.userId, session.user.id), eq(account.id, accountId)))
} else if (providerId) {
// If a specific providerId is provided, delete accounts for that provider ID
// If a specific providerId is provided, delete only that account
if (providerId) {
await db
.delete(account)
.where(and(eq(account.userId, session.user.id), eq(account.providerId, providerId)))

View File

@@ -38,18 +38,13 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status })
}
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
const credential = await getCredential(
requestId,
resolvedCredentialId,
authz.credentialOwnerUserId
)
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId)
if (!credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
}
const accessToken = await refreshAccessTokenIfNeeded(
resolvedCredentialId,
credentialId,
authz.credentialOwnerUserId,
requestId
)

View File

@@ -37,19 +37,14 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status })
}
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
const credential = await getCredential(
requestId,
resolvedCredentialId,
authz.credentialOwnerUserId
)
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId)
if (!credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
}
// Refresh access token if needed using the utility function
const accessToken = await refreshAccessTokenIfNeeded(
resolvedCredentialId,
credentialId,
authz.credentialOwnerUserId,
requestId
)

View File

@@ -119,23 +119,14 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
}
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
const credential = await getCredential(
requestId,
resolvedCredentialId,
authz.credentialOwnerUserId
)
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId)
if (!credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
}
try {
const { accessToken } = await refreshTokenIfNeeded(
requestId,
credential,
resolvedCredentialId
)
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId)
let instanceUrl: string | undefined
if (credential.providerId === 'salesforce' && credential.scope) {
@@ -195,20 +186,13 @@ export async function GET(request: NextRequest) {
const { credentialId } = parseResult.data
const authz = await authorizeCredentialUse(request, {
credentialId,
requireWorkflowIdForInternal: false,
})
if (!authz.ok || authz.authType !== 'session' || !authz.credentialOwnerUserId) {
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
// For GET requests, we only support session-based authentication
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!auth.success || auth.authType !== 'session' || !auth.userId) {
return NextResponse.json({ error: 'User not authenticated' }, { status: 401 })
}
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
const credential = await getCredential(
requestId,
resolvedCredentialId,
authz.credentialOwnerUserId
)
const credential = await getCredential(requestId, credentialId, auth.userId)
if (!credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
@@ -220,11 +204,7 @@ export async function GET(request: NextRequest) {
}
try {
const { accessToken } = await refreshTokenIfNeeded(
requestId,
credential,
resolvedCredentialId
)
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId)
// For Salesforce, extract instanceUrl from the scope field
let instanceUrl: string | undefined

View File

@@ -50,7 +50,7 @@ describe('OAuth Utils', () => {
describe('getCredential', () => {
it('should return credential when found', async () => {
const mockCredential = { id: 'credential-id', userId: 'test-user-id' }
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
const credential = await getCredential('request-id', 'credential-id', 'test-user-id')
@@ -59,8 +59,7 @@ describe('OAuth Utils', () => {
expect(mockDbTyped.where).toHaveBeenCalled()
expect(mockDbTyped.limit).toHaveBeenCalledWith(1)
expect(credential).toMatchObject(mockCredential)
expect(credential).toMatchObject({ resolvedCredentialId: 'credential-id' })
expect(credential).toEqual(mockCredential)
})
it('should return undefined when credential is not found', async () => {
@@ -153,7 +152,7 @@ describe('OAuth Utils', () => {
providerId: 'google',
userId: 'test-user-id',
}
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
@@ -170,7 +169,7 @@ describe('OAuth Utils', () => {
providerId: 'google',
userId: 'test-user-id',
}
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
mockRefreshOAuthToken.mockResolvedValueOnce({
accessToken: 'new-token',
@@ -203,7 +202,7 @@ describe('OAuth Utils', () => {
providerId: 'google',
userId: 'test-user-id',
}
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
mockRefreshOAuthToken.mockResolvedValueOnce(null)

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db'
import { account, credential, credentialSetMember } from '@sim/db/schema'
import { account, credentialSetMember } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, desc, eq, inArray } from 'drizzle-orm'
import { refreshOAuthToken } from '@/lib/oauth'
@@ -25,28 +25,6 @@ interface AccountInsertData {
accessTokenExpiresAt?: Date
}
async function resolveOAuthAccountId(
credentialId: string
): Promise<{ accountId: string; usedCredentialTable: boolean } | null> {
const [credentialRow] = await db
.select({
type: credential.type,
accountId: credential.accountId,
})
.from(credential)
.where(eq(credential.id, credentialId))
.limit(1)
if (credentialRow) {
if (credentialRow.type !== 'oauth' || !credentialRow.accountId) {
return null
}
return { accountId: credentialRow.accountId, usedCredentialTable: true }
}
return { accountId: credentialId, usedCredentialTable: false }
}
/**
* Safely inserts an account record, handling duplicate constraint violations gracefully.
* If a duplicate is detected (unique constraint violation), logs a warning and returns success.
@@ -74,16 +52,10 @@ export async function safeAccountInsert(
* Get a credential by ID and verify it belongs to the user
*/
export async function getCredential(requestId: string, credentialId: string, userId: string) {
const resolved = await resolveOAuthAccountId(credentialId)
if (!resolved) {
logger.warn(`[${requestId}] Credential is not an OAuth credential`)
return undefined
}
const credentials = await db
.select()
.from(account)
.where(and(eq(account.id, resolved.accountId), eq(account.userId, userId)))
.where(and(eq(account.id, credentialId), eq(account.userId, userId)))
.limit(1)
if (!credentials.length) {
@@ -91,10 +63,7 @@ export async function getCredential(requestId: string, credentialId: string, use
return undefined
}
return {
...credentials[0],
resolvedCredentialId: resolved.accountId,
}
return credentials[0]
}
export async function getOAuthToken(userId: string, providerId: string): Promise<string | null> {
@@ -269,9 +238,7 @@ export async function refreshAccessTokenIfNeeded(
}
// Update the token in the database
const resolvedCredentialId =
(credential as { resolvedCredentialId?: string }).resolvedCredentialId ?? credentialId
await db.update(account).set(updateData).where(eq(account.id, resolvedCredentialId))
await db.update(account).set(updateData).where(eq(account.id, credentialId))
logger.info(`[${requestId}] Successfully refreshed access token for credential`)
return refreshedToken.accessToken
@@ -307,8 +274,6 @@ export async function refreshTokenIfNeeded(
credential: any,
credentialId: string
): Promise<{ accessToken: string; refreshed: boolean }> {
const resolvedCredentialId = credential.resolvedCredentialId ?? credentialId
// Decide if we should refresh: token missing OR expired
const accessTokenExpiresAt = credential.accessTokenExpiresAt
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
@@ -369,7 +334,7 @@ export async function refreshTokenIfNeeded(
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
}
await db.update(account).set(updateData).where(eq(account.id, resolvedCredentialId))
await db.update(account).set(updateData).where(eq(account.id, credentialId))
logger.info(`[${requestId}] Successfully refreshed access token`)
return { accessToken: refreshedToken, refreshed: true }
@@ -378,7 +343,7 @@ export async function refreshTokenIfNeeded(
`[${requestId}] Refresh attempt failed, checking if another concurrent request succeeded`
)
const freshCredential = await getCredential(requestId, resolvedCredentialId, credential.userId)
const freshCredential = await getCredential(requestId, credentialId, credential.userId)
if (freshCredential?.accessToken) {
const freshExpiresAt = freshCredential.accessTokenExpiresAt
const stillValid = !freshExpiresAt || freshExpiresAt > new Date()

View File

@@ -48,21 +48,16 @@ export async function GET(request: NextRequest) {
const shopData = await shopResponse.json()
const shopInfo = shopData.shop
const stableAccountId = shopInfo.id?.toString() || shopDomain
const existing = await db.query.account.findFirst({
where: and(
eq(account.userId, session.user.id),
eq(account.providerId, 'shopify'),
eq(account.accountId, stableAccountId)
),
where: and(eq(account.userId, session.user.id), eq(account.providerId, 'shopify')),
})
const now = new Date()
const accountData = {
accessToken: accessToken,
accountId: stableAccountId,
accountId: shopInfo.id?.toString() || shopDomain,
scope: scope || '',
updatedAt: now,
idToken: shopDomain,

View File

@@ -52,11 +52,7 @@ export async function POST(request: NextRequest) {
const trelloUser = await userResponse.json()
const existing = await db.query.account.findFirst({
where: and(
eq(account.userId, session.user.id),
eq(account.providerId, 'trello'),
eq(account.accountId, trelloUser.id)
),
where: and(eq(account.userId, session.user.id), eq(account.providerId, 'trello')),
})
const now = new Date()

View File

@@ -113,6 +113,7 @@ const ChatMessageSchema = z.object({
workflowId: z.string().optional(),
knowledgeId: z.string().optional(),
blockId: z.string().optional(),
blockIds: z.array(z.string()).optional(),
templateId: z.string().optional(),
executionId: z.string().optional(),
// For workflow_block, provide both workflowId and blockId
@@ -159,6 +160,20 @@ export async function POST(req: NextRequest) {
commands,
} = ChatMessageSchema.parse(body)
const normalizedContexts = Array.isArray(contexts)
? contexts.map((ctx) => {
if (ctx.kind !== 'blocks') return ctx
if (Array.isArray(ctx.blockIds) && ctx.blockIds.length > 0) return ctx
if (ctx.blockId) {
return {
...ctx,
blockIds: [ctx.blockId],
}
}
return ctx
})
: contexts
// Resolve workflowId - if not provided, use first workflow or find by name
const resolved = await resolveWorkflowIdForUser(
authenticatedUserId,
@@ -176,10 +191,10 @@ export async function POST(req: NextRequest) {
const userMessageIdToUse = userMessageId || crypto.randomUUID()
try {
logger.info(`[${tracker.requestId}] Received chat POST`, {
hasContexts: Array.isArray(contexts),
contextsCount: Array.isArray(contexts) ? contexts.length : 0,
contextsPreview: Array.isArray(contexts)
? contexts.map((c: any) => ({
hasContexts: Array.isArray(normalizedContexts),
contextsCount: Array.isArray(normalizedContexts) ? normalizedContexts.length : 0,
contextsPreview: Array.isArray(normalizedContexts)
? normalizedContexts.map((c: any) => ({
kind: c?.kind,
chatId: c?.chatId,
workflowId: c?.workflowId,
@@ -191,17 +206,25 @@ export async function POST(req: NextRequest) {
} catch {}
// Preprocess contexts server-side
let agentContexts: Array<{ type: string; content: string }> = []
if (Array.isArray(contexts) && contexts.length > 0) {
if (Array.isArray(normalizedContexts) && normalizedContexts.length > 0) {
try {
const { processContextsServer } = await import('@/lib/copilot/process-contents')
const processed = await processContextsServer(contexts as any, authenticatedUserId, message)
const processed = await processContextsServer(
normalizedContexts as any,
authenticatedUserId,
message
)
agentContexts = processed
logger.info(`[${tracker.requestId}] Contexts processed for request`, {
processedCount: agentContexts.length,
kinds: agentContexts.map((c) => c.type),
lengthPreview: agentContexts.map((c) => c.content?.length ?? 0),
})
if (Array.isArray(contexts) && contexts.length > 0 && agentContexts.length === 0) {
if (
Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 &&
agentContexts.length === 0
) {
logger.warn(
`[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.`
)
@@ -246,11 +269,13 @@ export async function POST(req: NextRequest) {
mode,
model: selectedModel,
provider,
conversationId: effectiveConversationId,
conversationHistory,
contexts: agentContexts,
fileAttachments,
commands,
chatId: actualChatId,
prefetch,
implicitFeedback,
},
{
@@ -432,10 +457,15 @@ export async function POST(req: NextRequest) {
content: message,
timestamp: new Date().toISOString(),
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
...(Array.isArray(contexts) && contexts.length > 0 && { contexts }),
...(Array.isArray(contexts) &&
contexts.length > 0 && {
contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }],
...(Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 && {
contexts: normalizedContexts,
}),
...(Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 && {
contentBlocks: [
{ type: 'contexts', contexts: normalizedContexts as any, timestamp: Date.now() },
],
}),
}

View File

@@ -1,194 +0,0 @@
import { db } from '@sim/db'
import { credential, credentialMember, user } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
const logger = createLogger('CredentialMembersAPI')
interface RouteContext {
params: Promise<{ id: string }>
}
async function requireAdminMembership(credentialId: string, userId: string) {
const [membership] = await db
.select({ role: credentialMember.role, status: credentialMember.status })
.from(credentialMember)
.where(
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, userId))
)
.limit(1)
if (!membership || membership.status !== 'active' || membership.role !== 'admin') {
return null
}
return membership
}
export async function GET(_request: NextRequest, context: RouteContext) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id: credentialId } = await context.params
const [cred] = await db
.select({ id: credential.id })
.from(credential)
.where(eq(credential.id, credentialId))
.limit(1)
if (!cred) {
return NextResponse.json({ members: [] }, { status: 200 })
}
const members = await db
.select({
id: credentialMember.id,
userId: credentialMember.userId,
role: credentialMember.role,
status: credentialMember.status,
joinedAt: credentialMember.joinedAt,
userName: user.name,
userEmail: user.email,
})
.from(credentialMember)
.innerJoin(user, eq(credentialMember.userId, user.id))
.where(eq(credentialMember.credentialId, credentialId))
return NextResponse.json({ members })
} catch (error) {
logger.error('Failed to fetch credential members', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
const addMemberSchema = z.object({
userId: z.string().min(1),
role: z.enum(['admin', 'member']).default('member'),
})
export async function POST(request: NextRequest, context: RouteContext) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id: credentialId } = await context.params
const admin = await requireAdminMembership(credentialId, session.user.id)
if (!admin) {
return NextResponse.json({ error: 'Admin access required' }, { status: 403 })
}
const body = await request.json()
const parsed = addMemberSchema.safeParse(body)
if (!parsed.success) {
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 })
}
const { userId, role } = parsed.data
const now = new Date()
const [existing] = await db
.select({ id: credentialMember.id, status: credentialMember.status })
.from(credentialMember)
.where(
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, userId))
)
.limit(1)
if (existing) {
await db
.update(credentialMember)
.set({ role, status: 'active', updatedAt: now })
.where(eq(credentialMember.id, existing.id))
return NextResponse.json({ success: true })
}
await db.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId,
role,
status: 'active',
joinedAt: now,
invitedBy: session.user.id,
createdAt: now,
updatedAt: now,
})
return NextResponse.json({ success: true }, { status: 201 })
} catch (error) {
logger.error('Failed to add credential member', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
export async function DELETE(request: NextRequest, context: RouteContext) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id: credentialId } = await context.params
const targetUserId = new URL(request.url).searchParams.get('userId')
if (!targetUserId) {
return NextResponse.json({ error: 'userId query parameter required' }, { status: 400 })
}
const admin = await requireAdminMembership(credentialId, session.user.id)
if (!admin) {
return NextResponse.json({ error: 'Admin access required' }, { status: 403 })
}
const [target] = await db
.select({
id: credentialMember.id,
role: credentialMember.role,
status: credentialMember.status,
})
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
eq(credentialMember.userId, targetUserId)
)
)
.limit(1)
if (!target) {
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
}
if (target.role === 'admin') {
const activeAdmins = await db
.select({ id: credentialMember.id })
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
eq(credentialMember.role, 'admin'),
eq(credentialMember.status, 'active')
)
)
if (activeAdmins.length <= 1) {
return NextResponse.json({ error: 'Cannot remove the last admin' }, { status: 400 })
}
}
await db.delete(credentialMember).where(eq(credentialMember.id, target.id))
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Failed to remove credential member', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -1,234 +0,0 @@
import { db } from '@sim/db'
import { credential, credentialMember, environment, workspaceEnvironment } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { getCredentialActorContext } from '@/lib/credentials/access'
import {
syncPersonalEnvCredentialsForUser,
syncWorkspaceEnvCredentials,
} from '@/lib/credentials/environment'
const logger = createLogger('CredentialByIdAPI')
const updateCredentialSchema = z
.object({
displayName: z.string().trim().min(1).max(255).optional(),
accountId: z.string().trim().min(1).optional(),
})
.strict()
.refine((data) => Boolean(data.displayName || data.accountId), {
message: 'At least one field must be provided',
path: ['displayName'],
})
async function getCredentialResponse(credentialId: string, userId: string) {
const [row] = await db
.select({
id: credential.id,
workspaceId: credential.workspaceId,
type: credential.type,
displayName: credential.displayName,
providerId: credential.providerId,
accountId: credential.accountId,
envKey: credential.envKey,
envOwnerUserId: credential.envOwnerUserId,
createdBy: credential.createdBy,
createdAt: credential.createdAt,
updatedAt: credential.updatedAt,
role: credentialMember.role,
status: credentialMember.status,
})
.from(credential)
.innerJoin(
credentialMember,
and(eq(credentialMember.credentialId, credential.id), eq(credentialMember.userId, userId))
)
.where(eq(credential.id, credentialId))
.limit(1)
return row ?? null
}
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
try {
const access = await getCredentialActorContext(id, session.user.id)
if (!access.credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
}
if (!access.hasWorkspaceAccess || !access.member) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
const row = await getCredentialResponse(id, session.user.id)
return NextResponse.json({ credential: row }, { status: 200 })
} catch (error) {
logger.error('Failed to fetch credential', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
try {
const parseResult = updateCredentialSchema.safeParse(await request.json())
if (!parseResult.success) {
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
}
const access = await getCredentialActorContext(id, session.user.id)
if (!access.credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
}
if (!access.hasWorkspaceAccess || !access.isAdmin) {
return NextResponse.json({ error: 'Credential admin permission required' }, { status: 403 })
}
if (access.credential.type === 'oauth') {
return NextResponse.json(
{
error:
'OAuth credential editing is disabled. Connect an account and create or use its linked credential.',
},
{ status: 400 }
)
}
return NextResponse.json(
{
error:
'Environment credentials cannot be updated via this endpoint. Use the environment value editor in credentials settings.',
},
{ status: 400 }
)
} catch (error) {
logger.error('Failed to update credential', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id } = await params
try {
const access = await getCredentialActorContext(id, session.user.id)
if (!access.credential) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
}
if (!access.hasWorkspaceAccess || !access.isAdmin) {
return NextResponse.json({ error: 'Credential admin permission required' }, { status: 403 })
}
if (access.credential.type === 'env_personal' && access.credential.envKey) {
const ownerUserId = access.credential.envOwnerUserId
if (!ownerUserId) {
return NextResponse.json({ error: 'Invalid personal secret owner' }, { status: 400 })
}
const [personalRow] = await db
.select({ variables: environment.variables })
.from(environment)
.where(eq(environment.userId, ownerUserId))
.limit(1)
const current = ((personalRow?.variables as Record<string, string> | null) ?? {}) as Record<
string,
string
>
if (access.credential.envKey in current) {
delete current[access.credential.envKey]
}
await db
.insert(environment)
.values({
id: ownerUserId,
userId: ownerUserId,
variables: current,
updatedAt: new Date(),
})
.onConflictDoUpdate({
target: [environment.userId],
set: { variables: current, updatedAt: new Date() },
})
await syncPersonalEnvCredentialsForUser({
userId: ownerUserId,
envKeys: Object.keys(current),
})
return NextResponse.json({ success: true }, { status: 200 })
}
if (access.credential.type === 'env_workspace' && access.credential.envKey) {
const [workspaceRow] = await db
.select({
id: workspaceEnvironment.id,
createdAt: workspaceEnvironment.createdAt,
variables: workspaceEnvironment.variables,
})
.from(workspaceEnvironment)
.where(eq(workspaceEnvironment.workspaceId, access.credential.workspaceId))
.limit(1)
const current = ((workspaceRow?.variables as Record<string, string> | null) ?? {}) as Record<
string,
string
>
if (access.credential.envKey in current) {
delete current[access.credential.envKey]
}
await db
.insert(workspaceEnvironment)
.values({
id: workspaceRow?.id || crypto.randomUUID(),
workspaceId: access.credential.workspaceId,
variables: current,
createdAt: workspaceRow?.createdAt || new Date(),
updatedAt: new Date(),
})
.onConflictDoUpdate({
target: [workspaceEnvironment.workspaceId],
set: { variables: current, updatedAt: new Date() },
})
await syncWorkspaceEnvCredentials({
workspaceId: access.credential.workspaceId,
envKeys: Object.keys(current),
actingUserId: session.user.id,
})
return NextResponse.json({ success: true }, { status: 200 })
}
await db.delete(credential).where(eq(credential.id, id))
return NextResponse.json({ success: true }, { status: 200 })
} catch (error) {
logger.error('Failed to delete credential', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -1,81 +0,0 @@
import { db } from '@sim/db'
import { environment, workspaceEnvironment } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import {
syncPersonalEnvCredentialsForUser,
syncWorkspaceEnvCredentials,
} from '@/lib/credentials/environment'
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('CredentialsBootstrapAPI')
const bootstrapSchema = z.object({
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
})
/**
* Ensures the current user's connected accounts and env vars are reflected as workspace credentials.
*/
export async function POST(request: NextRequest) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const parseResult = bootstrapSchema.safeParse(await request.json())
if (!parseResult.success) {
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
}
const { workspaceId } = parseResult.data
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
if (!workspaceAccess.hasAccess) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
const [personalRow, workspaceRow] = await Promise.all([
db
.select({ variables: environment.variables })
.from(environment)
.where(eq(environment.userId, session.user.id))
.limit(1),
db
.select({ variables: workspaceEnvironment.variables })
.from(workspaceEnvironment)
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
.limit(1),
])
const personalKeys = Object.keys((personalRow[0]?.variables as Record<string, string>) || {})
const workspaceKeys = Object.keys((workspaceRow[0]?.variables as Record<string, string>) || {})
const [oauthSyncResult] = await Promise.all([
syncWorkspaceOAuthCredentialsForUser({ workspaceId, userId: session.user.id }),
syncPersonalEnvCredentialsForUser({ userId: session.user.id, envKeys: personalKeys }),
syncWorkspaceEnvCredentials({
workspaceId,
envKeys: workspaceKeys,
actingUserId: session.user.id,
}),
])
return NextResponse.json({
success: true,
synced: {
oauthCreated: oauthSyncResult.createdCredentials,
oauthMembershipsUpdated: oauthSyncResult.updatedMemberships,
personalEnvKeys: personalKeys.length,
workspaceEnvKeys: workspaceKeys.length,
},
})
} catch (error) {
logger.error('Failed to bootstrap workspace credentials', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -1,73 +0,0 @@
import { db } from '@sim/db'
import { pendingCredentialDraft } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, lt } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
const logger = createLogger('CredentialDraftAPI')
const DRAFT_TTL_MS = 15 * 60 * 1000
const createDraftSchema = z.object({
workspaceId: z.string().min(1),
providerId: z.string().min(1),
displayName: z.string().min(1),
})
export async function POST(request: Request) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const parsed = createDraftSchema.safeParse(body)
if (!parsed.success) {
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 })
}
const { workspaceId, providerId, displayName } = parsed.data
const userId = session.user.id
const now = new Date()
await db
.delete(pendingCredentialDraft)
.where(
and(eq(pendingCredentialDraft.userId, userId), lt(pendingCredentialDraft.expiresAt, now))
)
await db
.insert(pendingCredentialDraft)
.values({
id: crypto.randomUUID(),
userId,
workspaceId,
providerId,
displayName,
expiresAt: new Date(now.getTime() + DRAFT_TTL_MS),
createdAt: now,
})
.onConflictDoUpdate({
target: [
pendingCredentialDraft.userId,
pendingCredentialDraft.providerId,
pendingCredentialDraft.workspaceId,
],
set: {
displayName,
expiresAt: new Date(now.getTime() + DRAFT_TTL_MS),
createdAt: now,
},
})
logger.info('Credential draft saved', { userId, workspaceId, providerId, displayName })
return NextResponse.json({ success: true }, { status: 200 })
} catch (error) {
logger.error('Failed to save credential draft', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -1,112 +0,0 @@
import { db } from '@sim/db'
import { credential, credentialMember } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
const logger = createLogger('CredentialMembershipsAPI')
const leaveCredentialSchema = z.object({
credentialId: z.string().min(1),
})
export async function GET() {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const memberships = await db
.select({
membershipId: credentialMember.id,
credentialId: credential.id,
workspaceId: credential.workspaceId,
type: credential.type,
displayName: credential.displayName,
providerId: credential.providerId,
role: credentialMember.role,
status: credentialMember.status,
joinedAt: credentialMember.joinedAt,
})
.from(credentialMember)
.innerJoin(credential, eq(credentialMember.credentialId, credential.id))
.where(eq(credentialMember.userId, session.user.id))
return NextResponse.json({ memberships }, { status: 200 })
} catch (error) {
logger.error('Failed to list credential memberships', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
export async function DELETE(request: NextRequest) {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const parseResult = leaveCredentialSchema.safeParse({
credentialId: new URL(request.url).searchParams.get('credentialId'),
})
if (!parseResult.success) {
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
}
const { credentialId } = parseResult.data
const [membership] = await db
.select()
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
eq(credentialMember.userId, session.user.id)
)
)
.limit(1)
if (!membership) {
return NextResponse.json({ error: 'Membership not found' }, { status: 404 })
}
if (membership.status !== 'active') {
return NextResponse.json({ success: true }, { status: 200 })
}
if (membership.role === 'admin') {
const activeAdmins = await db
.select({ id: credentialMember.id })
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
eq(credentialMember.role, 'admin'),
eq(credentialMember.status, 'active')
)
)
if (activeAdmins.length <= 1) {
return NextResponse.json(
{ error: 'Cannot leave credential as the last active admin' },
{ status: 400 }
)
}
}
await db
.update(credentialMember)
.set({
status: 'revoked',
updatedAt: new Date(),
})
.where(eq(credentialMember.id, membership.id))
return NextResponse.json({ success: true }, { status: 200 })
} catch (error) {
logger.error('Failed to leave credential', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -1,468 +0,0 @@
import { db } from '@sim/db'
import { account, credential, credentialMember, workspace } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { getWorkspaceMemberUserIds } from '@/lib/credentials/environment'
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
import { getServiceConfigByProviderId } from '@/lib/oauth'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
import { isValidEnvVarName } from '@/executor/constants'
const logger = createLogger('CredentialsAPI')
const credentialTypeSchema = z.enum(['oauth', 'env_workspace', 'env_personal'])
function normalizeEnvKeyInput(raw: string): string {
const trimmed = raw.trim()
const wrappedMatch = /^\{\{\s*([A-Za-z0-9_]+)\s*\}\}$/.exec(trimmed)
return wrappedMatch ? wrappedMatch[1] : trimmed
}
const listCredentialsSchema = z.object({
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
type: credentialTypeSchema.optional(),
providerId: z.string().optional(),
})
const createCredentialSchema = z
.object({
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
type: credentialTypeSchema,
displayName: z.string().trim().min(1).max(255).optional(),
providerId: z.string().trim().min(1).optional(),
accountId: z.string().trim().min(1).optional(),
envKey: z.string().trim().min(1).optional(),
envOwnerUserId: z.string().trim().min(1).optional(),
})
.superRefine((data, ctx) => {
if (data.type === 'oauth') {
if (!data.accountId) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'accountId is required for oauth credentials',
path: ['accountId'],
})
}
if (!data.providerId) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'providerId is required for oauth credentials',
path: ['providerId'],
})
}
if (!data.displayName) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'displayName is required for oauth credentials',
path: ['displayName'],
})
}
return
}
const normalizedEnvKey = data.envKey ? normalizeEnvKeyInput(data.envKey) : ''
if (!normalizedEnvKey) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'envKey is required for env credentials',
path: ['envKey'],
})
return
}
if (!isValidEnvVarName(normalizedEnvKey)) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'envKey must contain only letters, numbers, and underscores',
path: ['envKey'],
})
}
})
interface ExistingCredentialSourceParams {
workspaceId: string
type: 'oauth' | 'env_workspace' | 'env_personal'
accountId?: string | null
envKey?: string | null
envOwnerUserId?: string | null
}
async function findExistingCredentialBySource(params: ExistingCredentialSourceParams) {
const { workspaceId, type, accountId, envKey, envOwnerUserId } = params
if (type === 'oauth' && accountId) {
const [row] = await db
.select()
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'oauth'),
eq(credential.accountId, accountId)
)
)
.limit(1)
return row ?? null
}
if (type === 'env_workspace' && envKey) {
const [row] = await db
.select()
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_workspace'),
eq(credential.envKey, envKey)
)
)
.limit(1)
return row ?? null
}
if (type === 'env_personal' && envKey && envOwnerUserId) {
const [row] = await db
.select()
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_personal'),
eq(credential.envKey, envKey),
eq(credential.envOwnerUserId, envOwnerUserId)
)
)
.limit(1)
return row ?? null
}
return null
}
export async function GET(request: NextRequest) {
const requestId = generateRequestId()
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const { searchParams } = new URL(request.url)
const rawWorkspaceId = searchParams.get('workspaceId')
const rawType = searchParams.get('type')
const rawProviderId = searchParams.get('providerId')
const parseResult = listCredentialsSchema.safeParse({
workspaceId: rawWorkspaceId?.trim(),
type: rawType?.trim() || undefined,
providerId: rawProviderId?.trim() || undefined,
})
if (!parseResult.success) {
logger.warn(`[${requestId}] Invalid credential list request`, {
workspaceId: rawWorkspaceId,
type: rawType,
providerId: rawProviderId,
errors: parseResult.error.errors,
})
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
}
const { workspaceId, type, providerId } = parseResult.data
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
if (!workspaceAccess.hasAccess) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
if (!type || type === 'oauth') {
await syncWorkspaceOAuthCredentialsForUser({ workspaceId, userId: session.user.id })
}
const whereClauses = [
eq(credential.workspaceId, workspaceId),
eq(credentialMember.userId, session.user.id),
eq(credentialMember.status, 'active'),
]
if (type) {
whereClauses.push(eq(credential.type, type))
}
if (providerId) {
whereClauses.push(eq(credential.providerId, providerId))
}
const credentials = await db
.select({
id: credential.id,
workspaceId: credential.workspaceId,
type: credential.type,
displayName: credential.displayName,
providerId: credential.providerId,
accountId: credential.accountId,
envKey: credential.envKey,
envOwnerUserId: credential.envOwnerUserId,
createdBy: credential.createdBy,
createdAt: credential.createdAt,
updatedAt: credential.updatedAt,
role: credentialMember.role,
})
.from(credential)
.innerJoin(
credentialMember,
and(
eq(credentialMember.credentialId, credential.id),
eq(credentialMember.userId, session.user.id),
eq(credentialMember.status, 'active')
)
)
.where(and(...whereClauses))
return NextResponse.json({ credentials })
} catch (error) {
logger.error(`[${requestId}] Failed to list credentials`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const body = await request.json()
const parseResult = createCredentialSchema.safeParse(body)
if (!parseResult.success) {
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
}
const { workspaceId, type, displayName, providerId, accountId, envKey, envOwnerUserId } =
parseResult.data
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
if (!workspaceAccess.canWrite) {
return NextResponse.json({ error: 'Write permission required' }, { status: 403 })
}
let resolvedDisplayName = displayName?.trim() ?? ''
let resolvedProviderId: string | null = providerId ?? null
let resolvedAccountId: string | null = accountId ?? null
const resolvedEnvKey: string | null = envKey ? normalizeEnvKeyInput(envKey) : null
let resolvedEnvOwnerUserId: string | null = null
if (type === 'oauth') {
const [accountRow] = await db
.select({
id: account.id,
userId: account.userId,
providerId: account.providerId,
accountId: account.accountId,
})
.from(account)
.where(eq(account.id, accountId!))
.limit(1)
if (!accountRow) {
return NextResponse.json({ error: 'OAuth account not found' }, { status: 404 })
}
if (accountRow.userId !== session.user.id) {
return NextResponse.json(
{ error: 'Only account owners can create oauth credentials for an account' },
{ status: 403 }
)
}
if (providerId !== accountRow.providerId) {
return NextResponse.json(
{ error: 'providerId does not match the selected OAuth account' },
{ status: 400 }
)
}
if (!resolvedDisplayName) {
resolvedDisplayName =
getServiceConfigByProviderId(accountRow.providerId)?.name || accountRow.providerId
}
} else if (type === 'env_personal') {
resolvedEnvOwnerUserId = envOwnerUserId ?? session.user.id
if (resolvedEnvOwnerUserId !== session.user.id) {
return NextResponse.json(
{ error: 'Only the current user can create personal env credentials for themselves' },
{ status: 403 }
)
}
resolvedProviderId = null
resolvedAccountId = null
resolvedDisplayName = resolvedEnvKey || ''
} else {
resolvedProviderId = null
resolvedAccountId = null
resolvedEnvOwnerUserId = null
resolvedDisplayName = resolvedEnvKey || ''
}
if (!resolvedDisplayName) {
return NextResponse.json({ error: 'Display name is required' }, { status: 400 })
}
const existingCredential = await findExistingCredentialBySource({
workspaceId,
type,
accountId: resolvedAccountId,
envKey: resolvedEnvKey,
envOwnerUserId: resolvedEnvOwnerUserId,
})
if (existingCredential) {
const [membership] = await db
.select({
id: credentialMember.id,
status: credentialMember.status,
role: credentialMember.role,
})
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, existingCredential.id),
eq(credentialMember.userId, session.user.id)
)
)
.limit(1)
if (!membership || membership.status !== 'active') {
return NextResponse.json(
{ error: 'A credential with this source already exists in this workspace' },
{ status: 409 }
)
}
if (
type === 'oauth' &&
membership.role === 'admin' &&
resolvedDisplayName &&
resolvedDisplayName !== existingCredential.displayName
) {
await db
.update(credential)
.set({
displayName: resolvedDisplayName,
updatedAt: new Date(),
})
.where(eq(credential.id, existingCredential.id))
const [updatedCredential] = await db
.select()
.from(credential)
.where(eq(credential.id, existingCredential.id))
.limit(1)
return NextResponse.json(
{ credential: updatedCredential ?? existingCredential },
{ status: 200 }
)
}
return NextResponse.json({ credential: existingCredential }, { status: 200 })
}
const now = new Date()
const credentialId = crypto.randomUUID()
const [workspaceRow] = await db
.select({ ownerId: workspace.ownerId })
.from(workspace)
.where(eq(workspace.id, workspaceId))
.limit(1)
await db.transaction(async (tx) => {
await tx.insert(credential).values({
id: credentialId,
workspaceId,
type,
displayName: resolvedDisplayName,
providerId: resolvedProviderId,
accountId: resolvedAccountId,
envKey: resolvedEnvKey,
envOwnerUserId: resolvedEnvOwnerUserId,
createdBy: session.user.id,
createdAt: now,
updatedAt: now,
})
if (type === 'env_workspace' && workspaceRow?.ownerId) {
const workspaceUserIds = await getWorkspaceMemberUserIds(workspaceId)
if (workspaceUserIds.length > 0) {
for (const memberUserId of workspaceUserIds) {
await tx.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId: memberUserId,
role: memberUserId === workspaceRow.ownerId ? 'admin' : 'member',
status: 'active',
joinedAt: now,
invitedBy: session.user.id,
createdAt: now,
updatedAt: now,
})
}
}
} else {
await tx.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId: session.user.id,
role: 'admin',
status: 'active',
joinedAt: now,
invitedBy: session.user.id,
createdAt: now,
updatedAt: now,
})
}
})
const [created] = await db
.select()
.from(credential)
.where(eq(credential.id, credentialId))
.limit(1)
return NextResponse.json({ credential: created }, { status: 201 })
} catch (error: any) {
if (error?.code === '23505') {
return NextResponse.json(
{ error: 'A credential with this source already exists' },
{ status: 409 }
)
}
if (error?.code === '23503') {
return NextResponse.json(
{ error: 'Invalid credential reference or membership target' },
{ status: 400 }
)
}
if (error?.code === '23514') {
return NextResponse.json(
{ error: 'Credential source data failed validation checks' },
{ status: 400 }
)
}
logger.error(`[${requestId}] Credential create failure details`, {
code: error?.code,
detail: error?.detail,
constraint: error?.constraint,
table: error?.table,
message: error?.message,
})
logger.error(`[${requestId}] Failed to create credential`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -7,7 +7,6 @@ import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
import { generateRequestId } from '@/lib/core/utils/request'
import { syncPersonalEnvCredentialsForUser } from '@/lib/credentials/environment'
import type { EnvironmentVariable } from '@/stores/settings/environment'
const logger = createLogger('EnvironmentAPI')
@@ -54,11 +53,6 @@ export async function POST(req: NextRequest) {
},
})
await syncPersonalEnvCredentialsForUser({
userId: session.user.id,
envKeys: Object.keys(variables),
})
return NextResponse.json({ success: true })
} catch (validationError) {
if (validationError instanceof z.ZodError) {

View File

@@ -191,3 +191,84 @@ export async function GET(request: NextRequest) {
)
}
}
// Delete a label from a page
export async function DELETE(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const {
domain,
accessToken,
cloudId: providedCloudId,
pageId,
labelName,
} = await request.json()
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!pageId) {
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
}
if (!labelName) {
return NextResponse.json({ error: 'Label name is required' }, { status: 400 })
}
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
if (!pageIdValidation.isValid) {
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const encodedLabel = encodeURIComponent(labelName.trim())
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label?name=${encodedLabel}`
const response = await fetch(url, {
method: 'DELETE',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage =
errorData?.message || `Failed to delete Confluence label (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
return NextResponse.json({
pageId,
labelName,
deleted: true,
})
} catch (error) {
logger.error('Error deleting Confluence label:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,103 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
import { getConfluenceCloudId } from '@/tools/confluence/utils'
const logger = createLogger('ConfluencePagesByLabelAPI')
export const dynamic = 'force-dynamic'
export async function GET(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const domain = searchParams.get('domain')
const accessToken = searchParams.get('accessToken')
const labelId = searchParams.get('labelId')
const providedCloudId = searchParams.get('cloudId')
const limit = searchParams.get('limit') || '50'
const cursor = searchParams.get('cursor')
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!labelId) {
return NextResponse.json({ error: 'Label ID is required' }, { status: 400 })
}
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
if (!labelIdValidation.isValid) {
return NextResponse.json({ error: labelIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const queryParams = new URLSearchParams()
queryParams.append('limit', String(Math.min(Number(limit), 250)))
if (cursor) {
queryParams.append('cursor', cursor)
}
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/labels/${labelId}/pages?${queryParams.toString()}`
const response = await fetch(url, {
method: 'GET',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage = errorData?.message || `Failed to get pages by label (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
const data = await response.json()
const pages = (data.results || []).map((page: any) => ({
id: page.id,
title: page.title,
status: page.status ?? null,
spaceId: page.spaceId ?? null,
parentId: page.parentId ?? null,
authorId: page.authorId ?? null,
createdAt: page.createdAt ?? null,
version: page.version ?? null,
}))
return NextResponse.json({
pages,
labelId,
nextCursor: data._links?.next
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
: null,
})
} catch (error) {
logger.error('Error getting pages by label:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,98 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
import { getConfluenceCloudId } from '@/tools/confluence/utils'
const logger = createLogger('ConfluenceSpaceLabelsAPI')
export const dynamic = 'force-dynamic'
export async function GET(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const domain = searchParams.get('domain')
const accessToken = searchParams.get('accessToken')
const spaceId = searchParams.get('spaceId')
const providedCloudId = searchParams.get('cloudId')
const limit = searchParams.get('limit') || '25'
const cursor = searchParams.get('cursor')
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!spaceId) {
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
}
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
if (!spaceIdValidation.isValid) {
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const queryParams = new URLSearchParams()
queryParams.append('limit', String(Math.min(Number(limit), 250)))
if (cursor) {
queryParams.append('cursor', cursor)
}
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/labels?${queryParams.toString()}`
const response = await fetch(url, {
method: 'GET',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage = errorData?.message || `Failed to list space labels (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
const data = await response.json()
const labels = (data.results || []).map((label: any) => ({
id: label.id,
name: label.name,
prefix: label.prefix || 'global',
}))
return NextResponse.json({
labels,
spaceId,
nextCursor: data._links?.next
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
: null,
})
} catch (error) {
logger.error('Error listing space labels:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -29,7 +29,7 @@ const patchBodySchema = z
description: z
.string()
.trim()
.max(500, 'Description must be 500 characters or less')
.max(2000, 'Description must be 2000 characters or less')
.nullable()
.optional(),
isActive: z.literal(true).optional(), // Set to true to activate this version

View File

@@ -12,7 +12,7 @@ import {
import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { markExecutionCancelled } from '@/lib/execution/cancellation'
import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/event-buffer'
import { processInputFileFields } from '@/lib/execution/files'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
@@ -700,15 +700,27 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
let isStreamClosed = false
const eventWriter = createExecutionEventWriter(executionId)
setExecutionMeta(executionId, {
status: 'active',
userId: actorUserId,
workflowId,
}).catch(() => {})
const stream = new ReadableStream<Uint8Array>({
async start(controller) {
const sendEvent = (event: ExecutionEvent) => {
if (isStreamClosed) return
let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null
try {
controller.enqueue(encodeSSEEvent(event))
} catch {
isStreamClosed = true
const sendEvent = (event: ExecutionEvent) => {
if (!isStreamClosed) {
try {
controller.enqueue(encodeSSEEvent(event))
} catch {
isStreamClosed = true
}
}
if (event.type !== 'stream:chunk' && event.type !== 'stream:done') {
eventWriter.write(event).catch(() => {})
}
}
@@ -829,14 +841,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const reader = streamingExec.stream.getReader()
const decoder = new TextDecoder()
let chunkCount = 0
try {
while (true) {
const { done, value } = await reader.read()
if (done) break
chunkCount++
const chunk = decoder.decode(value, { stream: true })
sendEvent({
type: 'stream:chunk',
@@ -951,6 +961,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: result.metadata?.duration || 0,
},
})
finalMetaStatus = 'error'
} else {
logger.info(`[${requestId}] Workflow execution was cancelled`)
@@ -963,6 +974,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: result.metadata?.duration || 0,
},
})
finalMetaStatus = 'cancelled'
}
return
}
@@ -986,6 +998,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
endTime: result.metadata?.endTime || new Date().toISOString(),
},
})
finalMetaStatus = 'complete'
} catch (error: unknown) {
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
const errorMessage = isTimeout
@@ -1017,7 +1030,18 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: executionResult?.metadata?.duration || 0,
},
})
finalMetaStatus = 'error'
} finally {
try {
await eventWriter.close()
} catch (closeError) {
logger.warn(`[${requestId}] Failed to close event writer`, {
error: closeError instanceof Error ? closeError.message : String(closeError),
})
}
if (finalMetaStatus) {
setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {})
}
timeoutController.cleanup()
if (executionId) {
await cleanupExecutionBase64Cache(executionId)
@@ -1032,10 +1056,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
},
cancel() {
isStreamClosed = true
timeoutController.cleanup()
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
timeoutController.abort()
markExecutionCancelled(executionId).catch(() => {})
logger.info(`[${requestId}] Client disconnected from SSE stream`)
},
})

View File

@@ -0,0 +1,170 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import {
type ExecutionStreamStatus,
getExecutionMeta,
readExecutionEvents,
} from '@/lib/execution/event-buffer'
import { formatSSEEvent } from '@/lib/workflows/executor/execution-events'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
const logger = createLogger('ExecutionStreamReconnectAPI')
const POLL_INTERVAL_MS = 500
const MAX_POLL_DURATION_MS = 10 * 60 * 1000 // 10 minutes
function isTerminalStatus(status: ExecutionStreamStatus): boolean {
return status === 'complete' || status === 'error' || status === 'cancelled'
}
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function GET(
req: NextRequest,
{ params }: { params: Promise<{ id: string; executionId: string }> }
) {
const { id: workflowId, executionId } = await params
try {
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
workflowId,
userId: auth.userId,
action: 'read',
})
if (!workflowAuthorization.allowed) {
return NextResponse.json(
{ error: workflowAuthorization.message || 'Access denied' },
{ status: workflowAuthorization.status }
)
}
const meta = await getExecutionMeta(executionId)
if (!meta) {
return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 })
}
if (meta.workflowId && meta.workflowId !== workflowId) {
return NextResponse.json(
{ error: 'Execution does not belong to this workflow' },
{ status: 403 }
)
}
const fromParam = req.nextUrl.searchParams.get('from')
const parsed = fromParam ? Number.parseInt(fromParam, 10) : 0
const fromEventId = Number.isFinite(parsed) && parsed >= 0 ? parsed : 0
logger.info('Reconnection stream requested', {
workflowId,
executionId,
fromEventId,
metaStatus: meta.status,
})
const encoder = new TextEncoder()
let closed = false
const stream = new ReadableStream<Uint8Array>({
async start(controller) {
let lastEventId = fromEventId
const pollDeadline = Date.now() + MAX_POLL_DURATION_MS
const enqueue = (text: string) => {
if (closed) return
try {
controller.enqueue(encoder.encode(text))
} catch {
closed = true
}
}
try {
const events = await readExecutionEvents(executionId, lastEventId)
for (const entry of events) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
const currentMeta = await getExecutionMeta(executionId)
if (!currentMeta || isTerminalStatus(currentMeta.status)) {
enqueue('data: [DONE]\n\n')
if (!closed) controller.close()
return
}
while (!closed && Date.now() < pollDeadline) {
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
if (closed) return
const newEvents = await readExecutionEvents(executionId, lastEventId)
for (const entry of newEvents) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
const polledMeta = await getExecutionMeta(executionId)
if (!polledMeta || isTerminalStatus(polledMeta.status)) {
const finalEvents = await readExecutionEvents(executionId, lastEventId)
for (const entry of finalEvents) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
enqueue('data: [DONE]\n\n')
if (!closed) controller.close()
return
}
}
if (!closed) {
logger.warn('Reconnection stream poll deadline reached', { executionId })
enqueue('data: [DONE]\n\n')
controller.close()
}
} catch (error) {
logger.error('Error in reconnection stream', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
if (!closed) {
try {
controller.close()
} catch {}
}
}
},
cancel() {
closed = true
logger.info('Client disconnected from reconnection stream', { executionId })
},
})
return new NextResponse(stream, {
headers: {
...SSE_HEADERS,
'X-Execution-Id': executionId,
},
})
} catch (error: any) {
logger.error('Failed to start reconnection stream', {
workflowId,
executionId,
error: error.message,
})
return NextResponse.json(
{ error: error.message || 'Failed to start reconnection stream' },
{ status: 500 }
)
}
}

View File

@@ -1,14 +1,12 @@
import { db } from '@sim/db'
import { workspaceEnvironment } from '@sim/db/schema'
import { environment, workspaceEnvironment } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { encryptSecret } from '@/lib/core/security/encryption'
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
import { generateRequestId } from '@/lib/core/utils/request'
import { syncWorkspaceEnvCredentials } from '@/lib/credentials/environment'
import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WorkspaceEnvironmentAPI')
@@ -46,10 +44,44 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { workspaceDecrypted, personalDecrypted, conflicts } = await getPersonalAndWorkspaceEnv(
userId,
workspaceId
)
// Workspace env (encrypted)
const wsEnvRow = await db
.select()
.from(workspaceEnvironment)
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
.limit(1)
const wsEncrypted: Record<string, string> = (wsEnvRow[0]?.variables as any) || {}
// Personal env (encrypted)
const personalRow = await db
.select()
.from(environment)
.where(eq(environment.userId, userId))
.limit(1)
const personalEncrypted: Record<string, string> = (personalRow[0]?.variables as any) || {}
// Decrypt both for UI
const decryptAll = async (src: Record<string, string>) => {
const out: Record<string, string> = {}
for (const [k, v] of Object.entries(src)) {
try {
const { decrypted } = await decryptSecret(v)
out[k] = decrypted
} catch {
out[k] = ''
}
}
return out
}
const [workspaceDecrypted, personalDecrypted] = await Promise.all([
decryptAll(wsEncrypted),
decryptAll(personalEncrypted),
])
const conflicts = Object.keys(personalDecrypted).filter((k) => k in workspaceDecrypted)
return NextResponse.json(
{
@@ -124,12 +156,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
set: { variables: merged, updatedAt: new Date() },
})
await syncWorkspaceEnvCredentials({
workspaceId,
envKeys: Object.keys(merged),
actingUserId: userId,
})
return NextResponse.json({ success: true })
} catch (error: any) {
logger.error(`[${requestId}] Workspace env PUT error`, error)
@@ -196,12 +222,6 @@ export async function DELETE(
set: { variables: current, updatedAt: new Date() },
})
await syncWorkspaceEnvCredentials({
workspaceId,
envKeys: Object.keys(current),
actingUserId: userId,
})
return NextResponse.json({ success: true })
} catch (error: any) {
logger.error(`[${requestId}] Workspace env DELETE error`, error)

View File

@@ -13,9 +13,6 @@ export type CommandId =
| 'goto-logs'
| 'open-search'
| 'run-workflow'
| 'focus-copilot-tab'
| 'focus-toolbar-tab'
| 'focus-editor-tab'
| 'clear-terminal-console'
| 'focus-toolbar-search'
| 'clear-notifications'
@@ -75,21 +72,6 @@ export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
shortcut: 'Mod+Enter',
allowInEditable: false,
},
'focus-copilot-tab': {
id: 'focus-copilot-tab',
shortcut: 'C',
allowInEditable: false,
},
'focus-toolbar-tab': {
id: 'focus-toolbar-tab',
shortcut: 'T',
allowInEditable: false,
},
'focus-editor-tab': {
id: 'focus-editor-tab',
shortcut: 'E',
allowInEditable: false,
},
'clear-terminal-console': {
id: 'clear-terminal-console',
shortcut: 'Mod+D',

View File

@@ -113,7 +113,7 @@ export function VersionDescriptionModal({
className='min-h-[120px] resize-none'
value={description}
onChange={(e) => setDescription(e.target.value)}
maxLength={500}
maxLength={2000}
disabled={isGenerating}
/>
<div className='flex items-center justify-between'>
@@ -123,7 +123,7 @@ export function VersionDescriptionModal({
</p>
)}
{!updateMutation.error && !generateMutation.error && <div />}
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/500</p>
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/2000</p>
</div>
</ModalBody>
<ModalFooter>

View File

@@ -57,6 +57,21 @@ export function useChangeDetection({
}
}
if (block.triggerMode) {
const triggerConfigValue = blockSubValues?.triggerConfig
if (
triggerConfigValue &&
typeof triggerConfigValue === 'object' &&
!subBlocks.triggerConfig
) {
subBlocks.triggerConfig = {
id: 'triggerConfig',
type: 'short-input',
value: triggerConfigValue,
}
}
}
blocksWithSubBlocks[blockId] = {
...block,
subBlocks,

View File

@@ -1,7 +1,10 @@
import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger'
import { runPreDeployChecks } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-predeploy-checks'
import { useNotificationStore } from '@/stores/notifications'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { mergeSubblockState } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('useDeployment')
@@ -35,6 +38,24 @@ export function useDeployment({
return { success: true, shouldOpenModal: true }
}
const { blocks, edges, loops, parallels } = useWorkflowStore.getState()
const liveBlocks = mergeSubblockState(blocks, workflowId)
const checkResult = runPreDeployChecks({
blocks: liveBlocks,
edges,
loops,
parallels,
workflowId,
})
if (!checkResult.passed) {
addNotification({
level: 'error',
message: checkResult.error || 'Pre-deploy validation failed',
workflowId,
})
return { success: false, shouldOpenModal: false }
}
setIsDeploying(true)
try {
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {

View File

@@ -30,7 +30,6 @@ export interface OAuthRequiredModalProps {
requiredScopes?: string[]
serviceId: string
newScopes?: string[]
onConnect?: () => Promise<void> | void
}
const SCOPE_DESCRIPTIONS: Record<string, string> = {
@@ -315,7 +314,6 @@ export function OAuthRequiredModal({
requiredScopes = [],
serviceId,
newScopes = [],
onConnect,
}: OAuthRequiredModalProps) {
const [error, setError] = useState<string | null>(null)
const { baseProvider } = parseProvider(provider)
@@ -361,12 +359,6 @@ export function OAuthRequiredModal({
setError(null)
try {
if (onConnect) {
await onConnect()
onClose()
return
}
const providerId = getProviderIdFromServiceId(serviceId)
logger.info('Linking OAuth2:', {

View File

@@ -3,12 +3,10 @@
import { createElement, useCallback, useEffect, useMemo, useState } from 'react'
import { createLogger } from '@sim/logger'
import { ExternalLink, Users } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button, Combobox } from '@/components/emcn/components'
import { getSubscriptionStatus } from '@/lib/billing/client'
import { getEnv, isTruthy } from '@/lib/core/config/env'
import { getPollingProviderFromOAuth } from '@/lib/credential-sets/providers'
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
import {
getCanonicalScopesForProvider,
getProviderIdFromServiceId,
@@ -20,9 +18,9 @@ import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import type { SubBlockConfig } from '@/blocks/types'
import { CREDENTIAL_SET } from '@/executor/constants'
import { CREDENTIAL, CREDENTIAL_SET } from '@/executor/constants'
import { useCredentialSets } from '@/hooks/queries/credential-sets'
import { useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
import { useOrganizations } from '@/hooks/queries/organization'
import { useSubscriptionData } from '@/hooks/queries/subscription'
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
@@ -48,8 +46,6 @@ export function CredentialSelector({
previewValue,
previewContextValues,
}: CredentialSelectorProps) {
const params = useParams()
const workspaceId = (params?.workspaceId as string) || ''
const [showOAuthModal, setShowOAuthModal] = useState(false)
const [editingValue, setEditingValue] = useState('')
const [isEditing, setIsEditing] = useState(false)
@@ -100,32 +96,53 @@ export function CredentialSelector({
data: credentials = [],
isFetching: credentialsLoading,
refetch: refetchCredentials,
} = useOAuthCredentials(effectiveProviderId, {
enabled: Boolean(effectiveProviderId),
workspaceId,
workflowId: activeWorkflowId || undefined,
})
} = useOAuthCredentials(effectiveProviderId, Boolean(effectiveProviderId))
const selectedCredential = useMemo(
() => credentials.find((cred) => cred.id === selectedId),
[credentials, selectedId]
)
const shouldFetchForeignMeta =
Boolean(selectedId) &&
!selectedCredential &&
Boolean(activeWorkflowId) &&
Boolean(effectiveProviderId)
const { data: foreignCredentials = [], isFetching: foreignMetaLoading } =
useOAuthCredentialDetail(
shouldFetchForeignMeta ? selectedId : undefined,
activeWorkflowId || undefined,
shouldFetchForeignMeta
)
const hasForeignMeta = foreignCredentials.length > 0
const isForeign = Boolean(selectedId && !selectedCredential && hasForeignMeta)
const selectedCredentialSet = useMemo(
() => credentialSets.find((cs) => cs.id === selectedCredentialSetId),
[credentialSets, selectedCredentialSetId]
)
const isForeignCredentialSet = Boolean(isCredentialSetSelected && !selectedCredentialSet)
const resolvedLabel = useMemo(() => {
if (selectedCredentialSet) return selectedCredentialSet.name
if (isForeignCredentialSet) return CREDENTIAL.FOREIGN_LABEL
if (selectedCredential) return selectedCredential.name
if (isForeign) return CREDENTIAL.FOREIGN_LABEL
return ''
}, [selectedCredentialSet, selectedCredential])
}, [selectedCredentialSet, isForeignCredentialSet, selectedCredential, isForeign])
const displayValue = isEditing ? editingValue : resolvedLabel
const invalidSelection =
!isPreview && Boolean(selectedId) && !selectedCredential && !credentialsLoading
!isPreview &&
Boolean(selectedId) &&
!selectedCredential &&
!hasForeignMeta &&
!credentialsLoading &&
!foreignMetaLoading
useEffect(() => {
if (!invalidSelection) return
@@ -136,7 +153,7 @@ export function CredentialSelector({
setStoreValue('')
}, [invalidSelection, selectedId, effectiveProviderId, setStoreValue])
useCredentialRefreshTriggers(refetchCredentials, effectiveProviderId, workspaceId)
useCredentialRefreshTriggers(refetchCredentials)
const handleOpenChange = useCallback(
(isOpen: boolean) => {
@@ -178,18 +195,8 @@ export function CredentialSelector({
)
const handleAddCredential = useCallback(() => {
writePendingCredentialCreateRequest({
workspaceId,
type: 'oauth',
providerId: effectiveProviderId,
displayName: '',
serviceId,
requiredScopes: getCanonicalScopesForProvider(effectiveProviderId),
requestedAt: Date.now(),
})
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
}, [workspaceId, effectiveProviderId, serviceId])
setShowOAuthModal(true)
}, [])
const getProviderIcon = useCallback((providerName: OAuthProvider) => {
const { baseProvider } = parseProvider(providerName)
@@ -244,18 +251,23 @@ export function CredentialSelector({
label: cred.name,
value: cred.id,
}))
credentialItems.push({
label:
credentials.length > 0
? `Connect another ${getProviderName(provider)} account`
: `Connect ${getProviderName(provider)} account`,
value: '__connect_account__',
})
groups.push({
section: 'Personal Credential',
items: credentialItems,
})
if (credentialItems.length > 0) {
groups.push({
section: 'Personal Credential',
items: credentialItems,
})
} else {
groups.push({
section: 'Personal Credential',
items: [
{
label: `Connect ${getProviderName(provider)} account`,
value: '__connect_account__',
},
],
})
}
return { comboboxOptions: [], comboboxGroups: groups }
}
@@ -265,13 +277,12 @@ export function CredentialSelector({
value: cred.id,
}))
options.push({
label:
credentials.length > 0
? `Connect another ${getProviderName(provider)} account`
: `Connect ${getProviderName(provider)} account`,
value: '__connect_account__',
})
if (credentials.length === 0) {
options.push({
label: `Connect ${getProviderName(provider)} account`,
value: '__connect_account__',
})
}
return { comboboxOptions: options, comboboxGroups: undefined }
}, [
@@ -357,7 +368,7 @@ export function CredentialSelector({
}
disabled={effectiveDisabled}
editable={true}
filterOptions={true}
filterOptions={!isForeign && !isForeignCredentialSet}
isLoading={credentialsLoading}
overlayContent={overlayContent}
className={selectedId || isCredentialSetSelected ? 'pl-[28px]' : ''}
@@ -369,13 +380,15 @@ export function CredentialSelector({
<span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' />
Additional permissions required
</div>
<Button
variant='active'
onClick={() => setShowOAuthModal(true)}
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
>
Update access
</Button>
{!isForeign && (
<Button
variant='active'
onClick={() => setShowOAuthModal(true)}
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
>
Update access
</Button>
)}
</div>
)}
@@ -394,11 +407,7 @@ export function CredentialSelector({
)
}
function useCredentialRefreshTriggers(
refetchCredentials: () => Promise<unknown>,
providerId: string,
workspaceId: string
) {
function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>) {
useEffect(() => {
const refresh = () => {
void refetchCredentials()
@@ -416,29 +425,12 @@ function useCredentialRefreshTriggers(
}
}
const handleCredentialsUpdated = (
event: CustomEvent<{ providerId?: string; workspaceId?: string }>
) => {
if (event.detail?.providerId && event.detail.providerId !== providerId) {
return
}
if (event.detail?.workspaceId && workspaceId && event.detail.workspaceId !== workspaceId) {
return
}
refresh()
}
document.addEventListener('visibilitychange', handleVisibilityChange)
window.addEventListener('pageshow', handlePageShow)
window.addEventListener('oauth-credentials-updated', handleCredentialsUpdated as EventListener)
return () => {
document.removeEventListener('visibilitychange', handleVisibilityChange)
window.removeEventListener('pageshow', handlePageShow)
window.removeEventListener(
'oauth-credentials-updated',
handleCredentialsUpdated as EventListener
)
}
}, [providerId, workspaceId, refetchCredentials])
}, [refetchCredentials])
}

View File

@@ -9,7 +9,6 @@ import {
PopoverSection,
} from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
import {
usePersonalEnvironment,
useWorkspaceEnvironment,
@@ -169,15 +168,7 @@ export const EnvVarDropdown: React.FC<EnvVarDropdownProps> = ({
}, [searchTerm])
const openEnvironmentSettings = () => {
if (workspaceId) {
writePendingCredentialCreateRequest({
workspaceId,
type: 'env_personal',
envKey: searchTerm.trim(),
requestedAt: Date.now(),
})
}
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'environment' } }))
onClose?.()
}
@@ -311,7 +302,7 @@ export const EnvVarDropdown: React.FC<EnvVarDropdownProps> = ({
}}
>
<Plus className='h-3 w-3' />
<span>Create Secret</span>
<span>Create environment variable</span>
</PopoverItem>
</PopoverScrollArea>
) : (

View File

@@ -7,6 +7,7 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
import { getBlock } from '@/blocks/registry'
@@ -124,6 +125,8 @@ export function FileSelectorInput({
const serviceId = subBlock.serviceId || ''
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
const { isForeignCredential } = useForeignCredential(effectiveProviderId, normalizedCredentialId)
const selectorResolution = useMemo<SelectorResolution | null>(() => {
return resolveSelectorForSubBlock(subBlock, {
workflowId: workflowIdFromUrl,
@@ -165,6 +168,7 @@ export function FileSelectorInput({
const disabledReason =
finalDisabled ||
isForeignCredential ||
missingCredential ||
missingDomain ||
missingProject ||

View File

@@ -4,6 +4,7 @@ import { useCallback, useEffect, useMemo, useState } from 'react'
import { getProviderIdFromServiceId } from '@/lib/oauth'
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
import type { SubBlockConfig } from '@/blocks/types'
@@ -46,6 +47,10 @@ export function FolderSelectorInput({
subBlock.canonicalParamId === 'copyDestinationId' ||
subBlock.id === 'copyDestinationFolder' ||
subBlock.id === 'manualCopyDestinationFolder'
const { isForeignCredential } = useForeignCredential(
effectiveProviderId,
(connectedCredential as string) || ''
)
// Central dependsOn gating
const { finalDisabled } = useDependsOnGate(blockId, subBlock, {
@@ -114,7 +119,9 @@ export function FolderSelectorInput({
selectorContext={
selectorResolution?.context ?? { credentialId, workflowId: activeWorkflowId || '' }
}
disabled={finalDisabled || missingCredential || !selectorResolution?.key}
disabled={
finalDisabled || isForeignCredential || missingCredential || !selectorResolution?.key
}
isPreview={isPreview}
previewValue={previewValue ?? null}
placeholder={subBlock.placeholder || 'Select folder'}

View File

@@ -7,6 +7,7 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
import { getBlock } from '@/blocks/registry'
@@ -72,6 +73,11 @@ export function ProjectSelectorInput({
const serviceId = subBlock.serviceId || ''
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
const { isForeignCredential } = useForeignCredential(
effectiveProviderId,
(connectedCredential as string) || ''
)
const workflowIdFromUrl = (params?.workflowId as string) || activeWorkflowId || ''
const { finalDisabled } = useDependsOnGate(blockId, subBlock, {
disabled,
@@ -117,7 +123,7 @@ export function ProjectSelectorInput({
subBlock={subBlock}
selectorKey={selectorResolution.key}
selectorContext={selectorResolution.context}
disabled={finalDisabled || missingCredential}
disabled={finalDisabled || isForeignCredential || missingCredential}
isPreview={isPreview}
previewValue={previewValue ?? null}
placeholder={subBlock.placeholder || 'Select project'}

View File

@@ -7,6 +7,7 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
import { getBlock } from '@/blocks/registry'
import type { SubBlockConfig } from '@/blocks/types'
@@ -86,6 +87,8 @@ export function SheetSelectorInput({
const serviceId = subBlock.serviceId || ''
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
const { isForeignCredential } = useForeignCredential(effectiveProviderId, normalizedCredentialId)
const selectorResolution = useMemo<SelectorResolution | null>(() => {
return resolveSelectorForSubBlock(subBlock, {
workflowId: workflowIdFromUrl,
@@ -98,7 +101,11 @@ export function SheetSelectorInput({
const missingSpreadsheet = !normalizedSpreadsheetId
const disabledReason =
finalDisabled || missingCredential || missingSpreadsheet || !selectorResolution?.key
finalDisabled ||
isForeignCredential ||
missingCredential ||
missingSpreadsheet ||
!selectorResolution?.key
if (!selectorResolution?.key) {
return (

View File

@@ -6,6 +6,7 @@ import { Tooltip } from '@/components/emcn'
import { getProviderIdFromServiceId } from '@/lib/oauth'
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
import type { SubBlockConfig } from '@/blocks/types'
@@ -84,6 +85,11 @@ export function SlackSelectorInput({
? (effectiveBotToken as string) || ''
: (effectiveCredential as string) || ''
const { isForeignCredential } = useForeignCredential(
effectiveProviderId,
(effectiveAuthMethod as string) === 'bot_token' ? '' : (effectiveCredential as string) || ''
)
useEffect(() => {
const val = isPreview && previewValue !== undefined ? previewValue : storeValue
if (typeof val === 'string') {
@@ -93,7 +99,7 @@ export function SlackSelectorInput({
const requiresCredential = dependsOn.includes('credential')
const missingCredential = !credential || credential.trim().length === 0
const shouldForceDisable = requiresCredential && missingCredential
const shouldForceDisable = requiresCredential && (missingCredential || isForeignCredential)
const context: SelectorContext = useMemo(
() => ({
@@ -130,7 +136,7 @@ export function SlackSelectorInput({
subBlock={subBlock}
selectorKey={config.selectorKey}
selectorContext={context}
disabled={finalDisabled || shouldForceDisable}
disabled={finalDisabled || shouldForceDisable || isForeignCredential}
isPreview={isPreview}
previewValue={previewValue ?? null}
placeholder={subBlock.placeholder || config.placeholder}

View File

@@ -1,8 +1,6 @@
import { createElement, useCallback, useEffect, useMemo, useState } from 'react'
import { ExternalLink } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button, Combobox } from '@/components/emcn/components'
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
import {
getCanonicalScopesForProvider,
getProviderIdFromServiceId,
@@ -12,7 +10,8 @@ import {
parseProvider,
} from '@/lib/oauth'
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal'
import { useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
import { CREDENTIAL } from '@/executor/constants'
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -55,12 +54,10 @@ export function ToolCredentialSelector({
onChange,
provider,
requiredScopes = [],
label = 'Select credential',
label = 'Select account',
serviceId,
disabled = false,
}: ToolCredentialSelectorProps) {
const params = useParams()
const workspaceId = (params?.workspaceId as string) || ''
const [showOAuthModal, setShowOAuthModal] = useState(false)
const [editingInputValue, setEditingInputValue] = useState('')
const [isEditing, setIsEditing] = useState(false)
@@ -74,32 +71,50 @@ export function ToolCredentialSelector({
data: credentials = [],
isFetching: credentialsLoading,
refetch: refetchCredentials,
} = useOAuthCredentials(effectiveProviderId, {
enabled: Boolean(effectiveProviderId),
workspaceId,
workflowId: activeWorkflowId || undefined,
})
} = useOAuthCredentials(effectiveProviderId, Boolean(effectiveProviderId))
const selectedCredential = useMemo(
() => credentials.find((cred) => cred.id === selectedId),
[credentials, selectedId]
)
const shouldFetchForeignMeta =
Boolean(selectedId) &&
!selectedCredential &&
Boolean(activeWorkflowId) &&
Boolean(effectiveProviderId)
const { data: foreignCredentials = [], isFetching: foreignMetaLoading } =
useOAuthCredentialDetail(
shouldFetchForeignMeta ? selectedId : undefined,
activeWorkflowId || undefined,
shouldFetchForeignMeta
)
const hasForeignMeta = foreignCredentials.length > 0
const isForeign = Boolean(selectedId && !selectedCredential && hasForeignMeta)
const resolvedLabel = useMemo(() => {
if (selectedCredential) return selectedCredential.name
if (isForeign) return CREDENTIAL.FOREIGN_LABEL
return ''
}, [selectedCredential])
}, [selectedCredential, isForeign])
const inputValue = isEditing ? editingInputValue : resolvedLabel
const invalidSelection = Boolean(selectedId) && !selectedCredential && !credentialsLoading
const invalidSelection =
Boolean(selectedId) &&
!selectedCredential &&
!hasForeignMeta &&
!credentialsLoading &&
!foreignMetaLoading
useEffect(() => {
if (!invalidSelection) return
onChange('')
}, [invalidSelection, onChange])
useCredentialRefreshTriggers(refetchCredentials, effectiveProviderId, workspaceId)
useCredentialRefreshTriggers(refetchCredentials)
const handleOpenChange = useCallback(
(isOpen: boolean) => {
@@ -127,18 +142,8 @@ export function ToolCredentialSelector({
)
const handleAddCredential = useCallback(() => {
writePendingCredentialCreateRequest({
workspaceId,
type: 'oauth',
providerId: effectiveProviderId,
displayName: '',
serviceId,
requiredScopes: getCanonicalScopesForProvider(effectiveProviderId),
requestedAt: Date.now(),
})
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
}, [workspaceId, effectiveProviderId, serviceId])
setShowOAuthModal(true)
}, [])
const comboboxOptions = useMemo(() => {
const options = credentials.map((cred) => ({
@@ -146,13 +151,12 @@ export function ToolCredentialSelector({
value: cred.id,
}))
options.push({
label:
credentials.length > 0
? `Connect another ${getProviderName(provider)} account`
: `Connect ${getProviderName(provider)} account`,
value: '__connect_account__',
})
if (credentials.length === 0) {
options.push({
label: `Connect ${getProviderName(provider)} account`,
value: '__connect_account__',
})
}
return options
}, [credentials, provider])
@@ -202,7 +206,7 @@ export function ToolCredentialSelector({
placeholder={label}
disabled={disabled}
editable={true}
filterOptions={true}
filterOptions={!isForeign}
isLoading={credentialsLoading}
overlayContent={overlayContent}
className={selectedId ? 'pl-[28px]' : ''}
@@ -214,13 +218,15 @@ export function ToolCredentialSelector({
<span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' />
Additional permissions required
</div>
<Button
variant='active'
onClick={() => setShowOAuthModal(true)}
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
>
Update access
</Button>
{!isForeign && (
<Button
variant='active'
onClick={() => setShowOAuthModal(true)}
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
>
Update access
</Button>
)}
</div>
)}
@@ -239,11 +245,7 @@ export function ToolCredentialSelector({
)
}
function useCredentialRefreshTriggers(
refetchCredentials: () => Promise<unknown>,
providerId: string,
workspaceId: string
) {
function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>) {
useEffect(() => {
const refresh = () => {
void refetchCredentials()
@@ -261,29 +263,12 @@ function useCredentialRefreshTriggers(
}
}
const handleCredentialsUpdated = (
event: CustomEvent<{ providerId?: string; workspaceId?: string }>
) => {
if (event.detail?.providerId && event.detail.providerId !== providerId) {
return
}
if (event.detail?.workspaceId && workspaceId && event.detail.workspaceId !== workspaceId) {
return
}
refresh()
}
document.addEventListener('visibilitychange', handleVisibilityChange)
window.addEventListener('pageshow', handlePageShow)
window.addEventListener('oauth-credentials-updated', handleCredentialsUpdated as EventListener)
return () => {
document.removeEventListener('visibilitychange', handleVisibilityChange)
window.removeEventListener('pageshow', handlePageShow)
window.removeEventListener(
'oauth-credentials-updated',
handleCredentialsUpdated as EventListener
)
}
}, [providerId, workspaceId, refetchCredentials])
}, [refetchCredentials])
}

View File

@@ -0,0 +1,186 @@
'use client'
import type React from 'react'
import { useRef, useState } from 'react'
import { ArrowLeftRight, ArrowUp } from 'lucide-react'
import { Button, Input, Label, Tooltip } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import type { WandControlHandlers } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
/**
* Props for a generic parameter with label component
*/
export interface ParameterWithLabelProps {
paramId: string
title: string
isRequired: boolean
visibility: string
wandConfig?: {
enabled: boolean
prompt?: string
placeholder?: string
}
canonicalToggle?: {
mode: 'basic' | 'advanced'
disabled?: boolean
onToggle?: () => void
}
disabled: boolean
isPreview: boolean
children: (wandControlRef: React.MutableRefObject<WandControlHandlers | null>) => React.ReactNode
}
/**
* Generic wrapper component for parameters that manages wand state and renders label + input
*/
export function ParameterWithLabel({
paramId,
title,
isRequired,
visibility,
wandConfig,
canonicalToggle,
disabled,
isPreview,
children,
}: ParameterWithLabelProps) {
const [isSearchActive, setIsSearchActive] = useState(false)
const [searchQuery, setSearchQuery] = useState('')
const searchInputRef = useRef<HTMLInputElement>(null)
const wandControlRef = useRef<WandControlHandlers | null>(null)
const isWandEnabled = wandConfig?.enabled ?? false
const showWand = isWandEnabled && !isPreview && !disabled
const handleSearchClick = (): void => {
setIsSearchActive(true)
setTimeout(() => {
searchInputRef.current?.focus()
}, 0)
}
const handleSearchBlur = (): void => {
if (!searchQuery.trim() && !wandControlRef.current?.isWandStreaming) {
setIsSearchActive(false)
}
}
const handleSearchChange = (value: string): void => {
setSearchQuery(value)
}
const handleSearchSubmit = (): void => {
if (searchQuery.trim() && wandControlRef.current) {
wandControlRef.current.onWandTrigger(searchQuery)
setSearchQuery('')
setIsSearchActive(false)
}
}
const handleSearchCancel = (): void => {
setSearchQuery('')
setIsSearchActive(false)
}
const isStreaming = wandControlRef.current?.isWandStreaming ?? false
return (
<div key={paramId} className='relative min-w-0 space-y-[6px]'>
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
<Label className='flex items-baseline gap-[6px] whitespace-nowrap font-medium text-[13px] text-[var(--text-primary)]'>
{title}
{isRequired && visibility === 'user-only' && <span className='ml-0.5'>*</span>}
</Label>
<div className='flex min-w-0 flex-1 items-center justify-end gap-[6px]'>
{showWand &&
(!isSearchActive ? (
<Button
variant='active'
className='-my-1 h-5 px-2 py-0 text-[11px]'
onClick={handleSearchClick}
>
Generate
</Button>
) : (
<div className='-my-1 flex min-w-[120px] max-w-[280px] flex-1 items-center gap-[4px]'>
<Input
ref={searchInputRef}
value={isStreaming ? 'Generating...' : searchQuery}
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
handleSearchChange(e.target.value)
}
onBlur={(e: React.FocusEvent<HTMLInputElement>) => {
const relatedTarget = e.relatedTarget as HTMLElement | null
if (relatedTarget?.closest('button')) return
handleSearchBlur()
}}
onKeyDown={(e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === 'Enter' && searchQuery.trim() && !isStreaming) {
handleSearchSubmit()
} else if (e.key === 'Escape') {
handleSearchCancel()
}
}}
disabled={isStreaming}
className={cn(
'h-5 min-w-[80px] flex-1 text-[11px]',
isStreaming && 'text-muted-foreground'
)}
placeholder='Generate with AI...'
/>
<Button
variant='tertiary'
disabled={!searchQuery.trim() || isStreaming}
onMouseDown={(e: React.MouseEvent) => {
e.preventDefault()
e.stopPropagation()
}}
onClick={(e: React.MouseEvent) => {
e.stopPropagation()
handleSearchSubmit()
}}
className='h-[20px] w-[20px] flex-shrink-0 p-0'
>
<ArrowUp className='h-[12px] w-[12px]' />
</Button>
</div>
))}
{canonicalToggle && !isPreview && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<button
type='button'
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0 disabled:cursor-not-allowed disabled:opacity-50'
onClick={canonicalToggle.onToggle}
disabled={canonicalToggle.disabled || disabled}
aria-label={
canonicalToggle.mode === 'advanced'
? 'Switch to selector'
: 'Switch to manual ID'
}
>
<ArrowLeftRight
className={cn(
'!h-[12px] !w-[12px]',
canonicalToggle.mode === 'advanced'
? 'text-[var(--text-primary)]'
: 'text-[var(--text-secondary)]'
)}
/>
</button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<p>
{canonicalToggle.mode === 'advanced'
? 'Switch to selector'
: 'Switch to manual ID'}
</p>
</Tooltip.Content>
</Tooltip.Root>
)}
</div>
</div>
<div className='relative w-full min-w-0'>{children(wandControlRef)}</div>
</div>
)
}

View File

@@ -0,0 +1,100 @@
'use client'
import { useEffect, useRef } from 'react'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { SubBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
interface ToolSubBlockRendererProps {
blockId: string
subBlockId: string
toolIndex: number
subBlock: BlockSubBlockConfig
effectiveParamId: string
toolParams: Record<string, string> | undefined
onParamChange: (toolIndex: number, paramId: string, value: string) => void
disabled: boolean
canonicalToggle?: {
mode: 'basic' | 'advanced'
disabled?: boolean
onToggle?: () => void
}
}
/**
* Bridges the subblock store with StoredTool.params via a synthetic store key,
* then delegates all rendering to SubBlock for full parity.
*
* Two effects handle bidirectional sync:
* - tool.params → store (external changes)
* - store → tool.params (user interaction)
*/
export function ToolSubBlockRenderer({
blockId,
subBlockId,
toolIndex,
subBlock,
effectiveParamId,
toolParams,
onParamChange,
disabled,
canonicalToggle,
}: ToolSubBlockRendererProps) {
const syntheticId = `${subBlockId}-tool-${toolIndex}-${effectiveParamId}`
const [storeValue, setStoreValue] = useSubBlockValue(blockId, syntheticId)
const toolParamValue = toolParams?.[effectiveParamId] ?? ''
/** Tracks the last value we pushed to the store from tool.params to avoid echo loops */
const lastPushedToStoreRef = useRef<string | null>(null)
/** Tracks the last value we synced back to tool.params from the store */
const lastPushedToParamsRef = useRef<string | null>(null)
// Sync tool.params → store: push when the prop value changes (including first mount)
useEffect(() => {
if (!toolParamValue && lastPushedToStoreRef.current === null) {
// Skip initializing the store with an empty value on first mount —
// let the SubBlock component use its own default.
lastPushedToStoreRef.current = toolParamValue
lastPushedToParamsRef.current = toolParamValue
return
}
if (toolParamValue !== lastPushedToStoreRef.current) {
lastPushedToStoreRef.current = toolParamValue
lastPushedToParamsRef.current = toolParamValue
setStoreValue(toolParamValue)
}
}, [toolParamValue, setStoreValue])
// Sync store → tool.params: push when the user changes the value via SubBlock
useEffect(() => {
if (storeValue == null) return
const stringValue = typeof storeValue === 'string' ? storeValue : JSON.stringify(storeValue)
if (stringValue !== lastPushedToParamsRef.current) {
lastPushedToParamsRef.current = stringValue
lastPushedToStoreRef.current = stringValue
onParamChange(toolIndex, effectiveParamId, stringValue)
}
}, [storeValue, toolIndex, effectiveParamId, onParamChange])
// Suppress SubBlock's "*" required indicator when the LLM can fill the param
const visibility = subBlock.paramVisibility ?? 'user-or-llm'
const isOptionalForUser = visibility !== 'user-only'
const config = {
...subBlock,
id: syntheticId,
...(isOptionalForUser && { required: false }),
}
return (
<SubBlock
blockId={blockId}
config={config}
isPreview={false}
disabled={disabled}
canonicalToggle={canonicalToggle}
dependencyContext={toolParams}
/>
)
}

View File

@@ -2,37 +2,12 @@
* @vitest-environment node
*/
import { describe, expect, it } from 'vitest'
interface StoredTool {
type: string
title?: string
toolId?: string
params?: Record<string, string>
customToolId?: string
schema?: any
code?: string
operation?: string
usageControl?: 'auto' | 'force' | 'none'
}
const isMcpToolAlreadySelected = (selectedTools: StoredTool[], mcpToolId: string): boolean => {
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
}
const isCustomToolAlreadySelected = (
selectedTools: StoredTool[],
customToolId: string
): boolean => {
return selectedTools.some(
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
)
}
const isWorkflowAlreadySelected = (selectedTools: StoredTool[], workflowId: string): boolean => {
return selectedTools.some(
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
)
}
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
import {
isCustomToolAlreadySelected,
isMcpToolAlreadySelected,
isWorkflowAlreadySelected,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/utils'
describe('isMcpToolAlreadySelected', () => {
describe('basic functionality', () => {

View File

@@ -0,0 +1,31 @@
/**
* Represents a tool selected and configured in the workflow
*
* @remarks
* For custom tools (new format), we only store: type, customToolId, usageControl, isExpanded.
* Everything else (title, schema, code) is loaded dynamically from the database.
* Legacy custom tools with inline schema/code are still supported for backwards compatibility.
*/
export interface StoredTool {
/** Block type identifier */
type: string
/** Display title for the tool (optional for new custom tool format) */
title?: string
/** Direct tool ID for execution (optional for new custom tool format) */
toolId?: string
/** Parameter values configured by the user (optional for new custom tool format) */
params?: Record<string, string>
/** Whether the tool details are expanded in UI */
isExpanded?: boolean
/** Database ID for custom tools (new format - reference only) */
customToolId?: string
/** Tool schema for custom tools (legacy format - inline JSON schema) */
// eslint-disable-next-line @typescript-eslint/no-explicit-any
schema?: Record<string, any>
/** Implementation code for custom tools (legacy format - inline) */
code?: string
/** Selected operation for multi-operation tools */
operation?: string
/** Tool usage control mode for LLM */
usageControl?: 'auto' | 'force' | 'none'
}

View File

@@ -0,0 +1,32 @@
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
/**
* Checks if an MCP tool is already selected.
*/
export function isMcpToolAlreadySelected(selectedTools: StoredTool[], mcpToolId: string): boolean {
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
}
/**
* Checks if a custom tool is already selected.
*/
export function isCustomToolAlreadySelected(
selectedTools: StoredTool[],
customToolId: string
): boolean {
return selectedTools.some(
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
)
}
/**
* Checks if a workflow is already selected.
*/
export function isWorkflowAlreadySelected(
selectedTools: StoredTool[],
workflowId: string
): boolean {
return selectedTools.some(
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
)
}

View File

@@ -0,0 +1,50 @@
import { useEffect, useMemo, useState } from 'react'
export function useForeignCredential(
provider: string | undefined,
credentialId: string | undefined
) {
const [isForeign, setIsForeign] = useState<boolean>(false)
const [loading, setLoading] = useState<boolean>(false)
const [error, setError] = useState<string | null>(null)
const normalizedProvider = useMemo(() => (provider || '').toString(), [provider])
const normalizedCredentialId = useMemo(() => credentialId || '', [credentialId])
useEffect(() => {
let cancelled = false
async function check() {
setLoading(true)
setError(null)
try {
if (!normalizedProvider || !normalizedCredentialId) {
if (!cancelled) setIsForeign(false)
return
}
const res = await fetch(
`/api/auth/oauth/credentials?provider=${encodeURIComponent(normalizedProvider)}`
)
if (!res.ok) {
if (!cancelled) setIsForeign(true)
return
}
const data = await res.json()
const isOwn = (data.credentials || []).some((c: any) => c.id === normalizedCredentialId)
if (!cancelled) setIsForeign(!isOwn)
} catch (e) {
if (!cancelled) {
setIsForeign(true)
setError((e as Error).message)
}
} finally {
if (!cancelled) setLoading(false)
}
}
void check()
return () => {
cancelled = true
}
}, [normalizedProvider, normalizedCredentialId])
return { isForeignCredential: isForeign, loading, error }
}

View File

@@ -3,7 +3,6 @@ import { isEqual } from 'lodash'
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
import { cn } from '@/lib/core/utils/cn'
import type { FieldDiffStatus } from '@/lib/workflows/diff/types'
import {
CheckboxList,
Code,
@@ -69,13 +68,15 @@ interface SubBlockProps {
isPreview?: boolean
subBlockValues?: Record<string, any>
disabled?: boolean
fieldDiffStatus?: FieldDiffStatus
allowExpandInPreview?: boolean
canonicalToggle?: {
mode: 'basic' | 'advanced'
disabled?: boolean
onToggle?: () => void
}
labelSuffix?: React.ReactNode
/** Provides sibling values for dependency resolution in non-preview contexts (e.g. tool-input) */
dependencyContext?: Record<string, unknown>
}
/**
@@ -162,16 +163,14 @@ const getPreviewValue = (
/**
* Renders the label with optional validation and description tooltips.
*
* @remarks
* Handles JSON validation indicators for code blocks and required field markers.
* Includes inline AI generate button when wand is enabled.
*
* @param config - The sub-block configuration defining the label content
* @param isValidJson - Whether the JSON content is valid (for code blocks)
* @param subBlockValues - Current values of all subblocks for evaluating conditional requirements
* @param wandState - Optional state and handlers for the AI wand feature
* @param canonicalToggle - Optional canonical toggle metadata and handlers
* @param canonicalToggleIsDisabled - Whether the canonical toggle is disabled
* @param wandState - State and handlers for the inline AI generate feature
* @param canonicalToggle - Metadata and handlers for the basic/advanced mode toggle
* @param canonicalToggleIsDisabled - Whether the canonical toggle is disabled (includes dependsOn gating)
* @param copyState - State and handler for the copy-to-clipboard button
* @param labelSuffix - Additional content rendered after the label text
* @returns The label JSX element, or `null` for switch types or when no title is defined
*/
const renderLabel = (
@@ -202,7 +201,8 @@ const renderLabel = (
showCopyButton: boolean
copied: boolean
onCopy: () => void
}
},
labelSuffix?: React.ReactNode
): JSX.Element | null => {
if (config.type === 'switch') return null
if (!config.title) return null
@@ -215,9 +215,10 @@ const renderLabel = (
return (
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
<Label className='flex items-center gap-[6px] whitespace-nowrap'>
<Label className='flex items-baseline gap-[6px] whitespace-nowrap'>
{config.title}
{required && <span className='ml-0.5'>*</span>}
{labelSuffix}
{config.type === 'code' &&
config.language === 'json' &&
!isValidJson &&
@@ -383,28 +384,25 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
prevProps.isPreview === nextProps.isPreview &&
valueEqual &&
prevProps.disabled === nextProps.disabled &&
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
canonicalToggleEqual
canonicalToggleEqual &&
prevProps.labelSuffix === nextProps.labelSuffix &&
prevProps.dependencyContext === nextProps.dependencyContext
)
}
/**
* Renders a single workflow sub-block input based on config.type.
*
* @remarks
* Supports multiple input types including short-input, long-input, dropdown,
* combobox, slider, table, code, switch, tool-input, and many more.
* Handles preview mode, disabled states, and AI wand generation.
*
* @param blockId - The parent block identifier
* @param config - Configuration defining the input type and properties
* @param isPreview - Whether to render in preview mode
* @param subBlockValues - Current values of all subblocks
* @param disabled - Whether the input is disabled
* @param fieldDiffStatus - Optional diff status for visual indicators
* @param allowExpandInPreview - Whether to allow expanding in preview mode
* @returns The rendered sub-block input component
* @param canonicalToggle - Metadata and handlers for the basic/advanced mode toggle
* @param labelSuffix - Additional content rendered after the label text
* @param dependencyContext - Sibling values for dependency resolution in non-preview contexts (e.g. tool-input)
*/
function SubBlockComponent({
blockId,
@@ -412,9 +410,10 @@ function SubBlockComponent({
isPreview = false,
subBlockValues,
disabled = false,
fieldDiffStatus,
allowExpandInPreview,
canonicalToggle,
labelSuffix,
dependencyContext,
}: SubBlockProps): JSX.Element {
const [isValidJson, setIsValidJson] = useState(true)
const [isSearchActive, setIsSearchActive] = useState(false)
@@ -423,7 +422,6 @@ function SubBlockComponent({
const searchInputRef = useRef<HTMLInputElement>(null)
const wandControlRef = useRef<WandControlHandlers | null>(null)
// Use webhook management hook when config has useWebhookUrl enabled
const webhookManagement = useWebhookManagement({
blockId,
triggerId: undefined,
@@ -510,10 +508,12 @@ function SubBlockComponent({
| null
| undefined
const contextValues = dependencyContext ?? (isPreview ? subBlockValues : undefined)
const { finalDisabled: gatedDisabled } = useDependsOnGate(blockId, config, {
disabled,
isPreview,
previewContextValues: isPreview ? subBlockValues : undefined,
previewContextValues: contextValues,
})
const isDisabled = gatedDisabled
@@ -797,7 +797,7 @@ function SubBlockComponent({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -809,7 +809,7 @@ function SubBlockComponent({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -821,7 +821,7 @@ function SubBlockComponent({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -833,7 +833,7 @@ function SubBlockComponent({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -845,7 +845,7 @@ function SubBlockComponent({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -868,7 +868,7 @@ function SubBlockComponent({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue as any}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -880,7 +880,7 @@ function SubBlockComponent({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue as any}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -892,7 +892,7 @@ function SubBlockComponent({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue as any}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -917,7 +917,7 @@ function SubBlockComponent({
isPreview={isPreview}
previewValue={previewValue as any}
disabled={isDisabled}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -953,7 +953,7 @@ function SubBlockComponent({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -987,7 +987,7 @@ function SubBlockComponent({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue as any}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -999,7 +999,7 @@ function SubBlockComponent({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue}
previewContextValues={isPreview ? subBlockValues : undefined}
previewContextValues={contextValues}
/>
)
@@ -1059,7 +1059,8 @@ function SubBlockComponent({
showCopyButton: Boolean(config.showCopyButton && config.useWebhookUrl),
copied,
onCopy: handleCopy,
}
},
labelSuffix
)}
{renderInput()}
</div>

View File

@@ -571,7 +571,6 @@ export function Editor() {
isPreview={false}
subBlockValues={subBlockState}
disabled={!canEditBlock}
fieldDiffStatus={undefined}
allowExpandInPreview={false}
canonicalToggle={
isCanonicalSwap && canonicalMode && canonicalId
@@ -635,7 +634,6 @@ export function Editor() {
isPreview={false}
subBlockValues={subBlockState}
disabled={!canEditBlock}
fieldDiffStatus={undefined}
allowExpandInPreview={false}
/>
{index < advancedOnlySubBlocks.length - 1 && (

View File

@@ -340,13 +340,7 @@ export const Panel = memo(function Panel() {
* Register global keyboard shortcuts using the central commands registry.
*
* - Mod+Enter: Run / cancel workflow (matches the Run button behavior)
* - C: Focus Copilot tab
* - T: Focus Toolbar tab
* - E: Focus Editor tab
* - Mod+F: Focus Toolbar tab and search input
*
* The tab-switching commands are disabled inside editable elements so typing
* in inputs or textareas is not interrupted.
*/
useRegisterGlobalCommands(() =>
createCommands([
@@ -363,33 +357,6 @@ export const Panel = memo(function Panel() {
allowInEditable: false,
},
},
{
id: 'focus-copilot-tab',
handler: () => {
setActiveTab('copilot')
},
overrides: {
allowInEditable: false,
},
},
{
id: 'focus-toolbar-tab',
handler: () => {
setActiveTab('toolbar')
},
overrides: {
allowInEditable: false,
},
},
{
id: 'focus-editor-tab',
handler: () => {
setActiveTab('editor')
},
overrides: {
allowInEditable: false,
},
},
{
id: 'focus-toolbar-search',
handler: () => {

View File

@@ -1,4 +1,4 @@
import { useCallback, useRef, useState } from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import { v4 as uuidv4 } from 'uuid'
@@ -46,7 +46,13 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('useWorkflowExecution')
// Debug state validation result
/**
* Module-level Set tracking which workflows have an active reconnection effect.
* Prevents multiple hook instances (from different components) from starting
* concurrent reconnection streams for the same workflow during the same mount cycle.
*/
const activeReconnections = new Set<string>()
interface DebugValidationResult {
isValid: boolean
error?: string
@@ -54,7 +60,7 @@ interface DebugValidationResult {
interface BlockEventHandlerConfig {
workflowId?: string
executionId?: string
executionIdRef: { current: string }
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
activeBlocksSet: Set<string>
accumulatedBlockLogs: BlockLog[]
@@ -108,12 +114,15 @@ export function useWorkflowExecution() {
const queryClient = useQueryClient()
const currentWorkflow = useCurrentWorkflow()
const { activeWorkflowId, workflows } = useWorkflowRegistry()
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } =
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries, clearExecutionEntries } =
useTerminalConsoleStore()
const hasHydrated = useTerminalConsoleStore((s) => s._hasHydrated)
const { getAllVariables } = useEnvironmentStore()
const { getVariablesByWorkflowId, variables } = useVariablesStore()
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
useCurrentWorkflowExecution()
const setCurrentExecutionId = useExecutionStore((s) => s.setCurrentExecutionId)
const getCurrentExecutionId = useExecutionStore((s) => s.getCurrentExecutionId)
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
@@ -297,7 +306,7 @@ export function useWorkflowExecution() {
(config: BlockEventHandlerConfig) => {
const {
workflowId,
executionId,
executionIdRef,
workflowEdges,
activeBlocksSet,
accumulatedBlockLogs,
@@ -308,6 +317,14 @@ export function useWorkflowExecution() {
onBlockCompleteCallback,
} = config
/** Returns true if this execution was cancelled or superseded by another run. */
const isStaleExecution = () =>
!!(
workflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(workflowId) !== executionIdRef.current
)
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
if (!workflowId) return
if (isActive) {
@@ -360,7 +377,7 @@ export function useWorkflowExecution() {
endedAt: data.endedAt,
workflowId,
blockId: data.blockId,
executionId,
executionId: executionIdRef.current,
blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown',
iterationCurrent: data.iterationCurrent,
@@ -383,7 +400,7 @@ export function useWorkflowExecution() {
endedAt: data.endedAt,
workflowId,
blockId: data.blockId,
executionId,
executionId: executionIdRef.current,
blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown',
iterationCurrent: data.iterationCurrent,
@@ -410,7 +427,7 @@ export function useWorkflowExecution() {
iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId,
},
executionId
executionIdRef.current
)
}
@@ -432,11 +449,12 @@ export function useWorkflowExecution() {
iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId,
},
executionId
executionIdRef.current
)
}
const onBlockStarted = (data: BlockStartedData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, true)
markIncomingEdges(data.blockId)
@@ -453,7 +471,7 @@ export function useWorkflowExecution() {
endedAt: undefined,
workflowId,
blockId: data.blockId,
executionId,
executionId: executionIdRef.current,
blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown',
isRunning: true,
@@ -465,6 +483,7 @@ export function useWorkflowExecution() {
}
const onBlockCompleted = (data: BlockCompletedData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, false)
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
@@ -495,6 +514,7 @@ export function useWorkflowExecution() {
}
const onBlockError = (data: BlockErrorData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, false)
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
@@ -902,10 +922,6 @@ export function useWorkflowExecution() {
// Update block logs with actual stream completion times
if (result.logs && streamCompletionTimes.size > 0) {
const streamCompletionEndTime = new Date(
Math.max(...Array.from(streamCompletionTimes.values()))
).toISOString()
result.logs.forEach((log: BlockLog) => {
if (streamCompletionTimes.has(log.blockId)) {
const completionTime = streamCompletionTimes.get(log.blockId)!
@@ -987,7 +1003,6 @@ export function useWorkflowExecution() {
return { success: true, stream }
}
// For manual (non-chat) execution
const manualExecutionId = uuidv4()
try {
const result = await executeWorkflow(
@@ -1002,29 +1017,10 @@ export function useWorkflowExecution() {
if (result.metadata.pendingBlocks) {
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
}
} else if (result && 'success' in result) {
setExecutionResult(result)
// Reset execution state after successful non-debug execution
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
if (isChatExecution) {
if (!result.metadata) {
result.metadata = { duration: 0, startTime: new Date().toISOString() }
}
;(result.metadata as any).source = 'chat'
}
// Invalidate subscription queries to update usage
setTimeout(() => {
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
}, 1000)
}
return result
} catch (error: any) {
const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
// Note: Error logs are already persisted server-side via execution-core.ts
return errorResult
}
},
@@ -1275,7 +1271,7 @@ export function useWorkflowExecution() {
if (activeWorkflowId) {
logger.info('Using server-side executor')
const executionId = uuidv4()
const executionIdRef = { current: '' }
let executionResult: ExecutionResult = {
success: false,
@@ -1293,7 +1289,7 @@ export function useWorkflowExecution() {
try {
const blockHandlers = buildBlockEventHandlers({
workflowId: activeWorkflowId,
executionId,
executionIdRef,
workflowEdges,
activeBlocksSet,
accumulatedBlockLogs,
@@ -1326,6 +1322,10 @@ export function useWorkflowExecution() {
loops: clientWorkflowState.loops,
parallels: clientWorkflowState.parallels,
},
onExecutionId: (id) => {
executionIdRef.current = id
setCurrentExecutionId(activeWorkflowId, id)
},
callbacks: {
onExecutionStarted: (data) => {
logger.info('Server execution started:', data)
@@ -1368,6 +1368,18 @@ export function useWorkflowExecution() {
},
onExecutionCompleted: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
executionResult = {
success: data.success,
output: data.output,
@@ -1425,9 +1437,33 @@ export function useWorkflowExecution() {
})
}
}
const workflowExecState = activeWorkflowId
? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId)
: null
if (activeWorkflowId && !workflowExecState?.isDebugging) {
setExecutionResult(executionResult)
setIsExecuting(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
setTimeout(() => {
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
}, 1000)
}
},
onExecutionError: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
executionResult = {
success: false,
output: {},
@@ -1441,43 +1477,53 @@ export function useWorkflowExecution() {
const isPreExecutionError = accumulatedBlockLogs.length === 0
handleExecutionErrorConsole({
workflowId: activeWorkflowId,
executionId,
executionId: executionIdRef.current,
error: data.error,
durationMs: data.duration,
blockLogs: accumulatedBlockLogs,
isPreExecutionError,
})
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
},
onExecutionCancelled: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
handleExecutionCancelledConsole({
workflowId: activeWorkflowId,
executionId,
executionId: executionIdRef.current,
durationMs: data?.duration,
})
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
},
},
})
return executionResult
} catch (error: any) {
// Don't log abort errors - they're intentional user actions
if (error.name === 'AbortError' || error.message?.includes('aborted')) {
logger.info('Execution aborted by user')
// Reset execution state
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
// Return gracefully without error
return {
success: false,
output: {},
metadata: { duration: 0 },
logs: [],
}
return executionResult
}
logger.error('Server-side execution failed:', error)
@@ -1485,7 +1531,6 @@ export function useWorkflowExecution() {
}
}
// Fallback: should never reach here
throw new Error('Server-side execution is required')
}
@@ -1717,25 +1762,28 @@ export function useWorkflowExecution() {
* Handles cancelling the current workflow execution
*/
const handleCancelExecution = useCallback(() => {
if (!activeWorkflowId) return
logger.info('Workflow execution cancellation requested')
// Cancel the execution stream for this workflow (server-side)
executionStream.cancel(activeWorkflowId ?? undefined)
const storedExecutionId = getCurrentExecutionId(activeWorkflowId)
// Mark current chat execution as superseded so its cleanup won't affect new executions
currentChatExecutionIdRef.current = null
// Mark all running entries as canceled in the terminal
if (activeWorkflowId) {
cancelRunningEntries(activeWorkflowId)
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
if (storedExecutionId) {
setCurrentExecutionId(activeWorkflowId, null)
fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, {
method: 'POST',
}).catch(() => {})
handleExecutionCancelledConsole({
workflowId: activeWorkflowId,
executionId: storedExecutionId,
})
}
// If in debug mode, also reset debug state
executionStream.cancel(activeWorkflowId)
currentChatExecutionIdRef.current = null
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
if (isDebugging) {
resetDebugState()
}
@@ -1747,7 +1795,9 @@ export function useWorkflowExecution() {
setIsDebugging,
setActiveBlocks,
activeWorkflowId,
cancelRunningEntries,
getCurrentExecutionId,
setCurrentExecutionId,
handleExecutionCancelledConsole,
])
/**
@@ -1847,7 +1897,7 @@ export function useWorkflowExecution() {
}
setIsExecuting(workflowId, true)
const executionId = uuidv4()
const executionIdRef = { current: '' }
const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>()
@@ -1856,7 +1906,7 @@ export function useWorkflowExecution() {
try {
const blockHandlers = buildBlockEventHandlers({
workflowId,
executionId,
executionIdRef,
workflowEdges,
activeBlocksSet,
accumulatedBlockLogs,
@@ -1871,6 +1921,10 @@ export function useWorkflowExecution() {
startBlockId: blockId,
sourceSnapshot: effectiveSnapshot,
input: workflowInput,
onExecutionId: (id) => {
executionIdRef.current = id
setCurrentExecutionId(workflowId, id)
},
callbacks: {
onBlockStarted: blockHandlers.onBlockStarted,
onBlockCompleted: blockHandlers.onBlockCompleted,
@@ -1878,7 +1932,6 @@ export function useWorkflowExecution() {
onExecutionCompleted: (data) => {
if (data.success) {
// Add the start block (trigger) to executed blocks
executedBlockIds.add(blockId)
const mergedBlockStates: Record<string, BlockState> = {
@@ -1902,6 +1955,10 @@ export function useWorkflowExecution() {
}
setLastExecutionSnapshot(workflowId, updatedSnapshot)
}
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
},
onExecutionError: (data) => {
@@ -1921,19 +1978,27 @@ export function useWorkflowExecution() {
handleExecutionErrorConsole({
workflowId,
executionId,
executionId: executionIdRef.current,
error: data.error,
durationMs: data.duration,
blockLogs: accumulatedBlockLogs,
})
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
},
onExecutionCancelled: (data) => {
handleExecutionCancelledConsole({
workflowId,
executionId,
executionId: executionIdRef.current,
durationMs: data?.duration,
})
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
},
},
})
@@ -1942,14 +2007,20 @@ export function useWorkflowExecution() {
logger.error('Run-from-block failed:', error)
}
} finally {
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
const currentId = getCurrentExecutionId(workflowId)
if (currentId === null || currentId === executionIdRef.current) {
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
}
}
},
[
getLastExecutionSnapshot,
setLastExecutionSnapshot,
clearLastExecutionSnapshot,
getCurrentExecutionId,
setCurrentExecutionId,
setIsExecuting,
setActiveBlocks,
setBlockRunStatus,
@@ -1979,29 +2050,213 @@ export function useWorkflowExecution() {
const executionId = uuidv4()
try {
const result = await executeWorkflow(
undefined,
undefined,
executionId,
undefined,
'manual',
blockId
)
if (result && 'success' in result) {
setExecutionResult(result)
}
await executeWorkflow(undefined, undefined, executionId, undefined, 'manual', blockId)
} catch (error) {
const errorResult = handleExecutionError(error, { executionId })
return errorResult
} finally {
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setIsDebugging(workflowId, false)
setActiveBlocks(workflowId, new Set())
}
},
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
[
activeWorkflowId,
setCurrentExecutionId,
setExecutionResult,
setIsExecuting,
setIsDebugging,
setActiveBlocks,
]
)
useEffect(() => {
if (!activeWorkflowId || !hasHydrated) return
const entries = useTerminalConsoleStore.getState().entries
const runningEntries = entries.filter(
(e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId
)
if (runningEntries.length === 0) return
if (activeReconnections.has(activeWorkflowId)) return
activeReconnections.add(activeWorkflowId)
executionStream.cancel(activeWorkflowId)
const sorted = [...runningEntries].sort((a, b) => {
const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0
const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0
return bTime - aTime
})
const executionId = sorted[0].executionId!
const otherExecutionIds = new Set(
sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!)
)
if (otherExecutionIds.size > 0) {
cancelRunningEntries(activeWorkflowId)
}
setCurrentExecutionId(activeWorkflowId, executionId)
setIsExecuting(activeWorkflowId, true)
const workflowEdges = useWorkflowStore.getState().edges
const activeBlocksSet = new Set<string>()
const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>()
const executionIdRef = { current: executionId }
const handlers = buildBlockEventHandlers({
workflowId: activeWorkflowId,
executionIdRef,
workflowEdges,
activeBlocksSet,
accumulatedBlockLogs,
accumulatedBlockStates,
executedBlockIds,
consoleMode: 'update',
includeStartConsoleEntry: true,
})
const originalEntries = entries
.filter((e) => e.executionId === executionId)
.map((e) => ({ ...e }))
let cleared = false
let reconnectionComplete = false
let cleanupRan = false
const clearOnce = () => {
if (!cleared) {
cleared = true
clearExecutionEntries(executionId)
}
}
const reconnectWorkflowId = activeWorkflowId
executionStream
.reconnect({
workflowId: reconnectWorkflowId,
executionId,
callbacks: {
onBlockStarted: (data) => {
clearOnce()
handlers.onBlockStarted(data)
},
onBlockCompleted: (data) => {
clearOnce()
handlers.onBlockCompleted(data)
},
onBlockError: (data) => {
clearOnce()
handlers.onBlockError(data)
},
onExecutionCompleted: () => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
},
onExecutionError: (data) => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
handleExecutionErrorConsole({
workflowId: reconnectWorkflowId,
executionId,
error: data.error,
blockLogs: accumulatedBlockLogs,
})
},
onExecutionCancelled: () => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
handleExecutionCancelledConsole({
workflowId: reconnectWorkflowId,
executionId,
})
},
},
})
.catch((error) => {
logger.warn('Execution reconnection failed', { executionId, error })
})
.finally(() => {
if (reconnectionComplete || cleanupRan) return
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) return
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
clearExecutionEntries(executionId)
for (const entry of originalEntries) {
addConsole({
workflowId: entry.workflowId,
blockId: entry.blockId,
blockName: entry.blockName,
blockType: entry.blockType,
executionId: entry.executionId,
executionOrder: entry.executionOrder,
isRunning: false,
warning: 'Execution result unavailable — check the logs page',
})
}
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
})
return () => {
cleanupRan = true
executionStream.cancel(reconnectWorkflowId)
activeReconnections.delete(reconnectWorkflowId)
if (cleared && !reconnectionComplete) {
clearExecutionEntries(executionId)
for (const entry of originalEntries) {
addConsole(entry)
}
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [activeWorkflowId, hasHydrated])
return {
isExecuting,
isDebugging,

View File

@@ -473,7 +473,7 @@ function ConnectionsSection({
</div>
)}
{/* Secrets */}
{/* Environment Variables */}
{envVars.length > 0 && (
<div className='mb-[2px] last:mb-0'>
<div
@@ -489,7 +489,7 @@ function ConnectionsSection({
'text-[var(--text-secondary)] group-hover:text-[var(--text-primary)]'
)}
>
Secrets
Environment Variables
</span>
<ChevronDownIcon
className={cn(

View File

@@ -1,17 +0,0 @@
'use client'
import { CredentialsManager } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/credentials/credentials-manager'
interface CredentialsProps {
onOpenChange?: (open: boolean) => void
registerCloseHandler?: (handler: (open: boolean) => void) => void
registerBeforeLeaveHandler?: (handler: (onProceed: () => void) => void) => void
}
export function Credentials(_props: CredentialsProps) {
return (
<div className='h-full min-h-0'>
<CredentialsManager />
</div>
)
}

View File

@@ -134,7 +134,7 @@ function WorkspaceVariableRow({
<Trash />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>Delete secret</Tooltip.Content>
<Tooltip.Content>Delete environment variable</Tooltip.Content>
</Tooltip.Root>
</div>
</div>
@@ -637,7 +637,7 @@ export function EnvironmentVariables({ registerBeforeLeaveHandler }: Environment
<Trash />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>Delete secret</Tooltip.Content>
<Tooltip.Content>Delete environment variable</Tooltip.Content>
</Tooltip.Root>
</div>
</div>
@@ -811,7 +811,7 @@ export function EnvironmentVariables({ registerBeforeLeaveHandler }: Environment
filteredWorkspaceEntries.length === 0 &&
(envVars.length > 0 || Object.keys(workspaceVars).length > 0) && (
<div className='py-[16px] text-center text-[13px] text-[var(--text-muted)]'>
No secrets found matching "{searchTerm}"
No environment variables found matching "{searchTerm}"
</div>
)}
</>

View File

@@ -2,7 +2,6 @@ export { ApiKeys } from './api-keys/api-keys'
export { BYOK } from './byok/byok'
export { Copilot } from './copilot/copilot'
export { CredentialSets } from './credential-sets/credential-sets'
export { Credentials } from './credentials/credentials'
export { CustomTools } from './custom-tools/custom-tools'
export { Debug } from './debug/debug'
export { EnvironmentVariables } from './environment/environment'

View File

@@ -20,6 +20,7 @@ import {
import {
Card,
Connections,
FolderCode,
HexSimple,
Key,
SModal,
@@ -44,11 +45,12 @@ import {
BYOK,
Copilot,
CredentialSets,
Credentials,
CustomTools,
Debug,
EnvironmentVariables,
FileUploads,
General,
Integrations,
MCP,
Skills,
Subscription,
@@ -78,7 +80,6 @@ interface SettingsModalProps {
type SettingsSection =
| 'general'
| 'credentials'
| 'environment'
| 'template-profile'
| 'integrations'
@@ -155,10 +156,11 @@ const allNavigationItems: NavigationItem[] = [
requiresHosted: true,
requiresTeam: true,
},
{ id: 'credentials', label: 'Credentials', icon: Connections, section: 'tools' },
{ id: 'integrations', label: 'Integrations', icon: Connections, section: 'tools' },
{ id: 'custom-tools', label: 'Custom Tools', icon: Wrench, section: 'tools' },
{ id: 'skills', label: 'Skills', icon: AgentSkillsIcon, section: 'tools' },
{ id: 'mcp', label: 'MCP Tools', icon: McpIcon, section: 'tools' },
{ id: 'environment', label: 'Environment', icon: FolderCode, section: 'system' },
{ id: 'apikeys', label: 'API Keys', icon: Key, section: 'system' },
{ id: 'workflow-mcp-servers', label: 'MCP Servers', icon: Server, section: 'system' },
{
@@ -254,6 +256,9 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
if (item.id === 'apikeys' && permissionConfig.hideApiKeysTab) {
return false
}
if (item.id === 'environment' && permissionConfig.hideEnvironmentTab) {
return false
}
if (item.id === 'files' && permissionConfig.hideFilesTab) {
return false
}
@@ -319,9 +324,6 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
if (!isBillingEnabled && (activeSection === 'subscription' || activeSection === 'team')) {
return 'general'
}
if (activeSection === 'environment' || activeSection === 'integrations') {
return 'credentials'
}
return activeSection
}, [activeSection])
@@ -340,7 +342,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
(sectionId: SettingsSection) => {
if (sectionId === effectiveActiveSection) return
if (effectiveActiveSection === 'credentials' && environmentBeforeLeaveHandler.current) {
if (effectiveActiveSection === 'environment' && environmentBeforeLeaveHandler.current) {
environmentBeforeLeaveHandler.current(() => setActiveSection(sectionId))
return
}
@@ -368,11 +370,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
useEffect(() => {
const handleOpenSettings = (event: CustomEvent<{ tab: SettingsSection }>) => {
if (event.detail.tab === 'environment' || event.detail.tab === 'integrations') {
setActiveSection('credentials')
} else {
setActiveSection(event.detail.tab)
}
setActiveSection(event.detail.tab)
onOpenChange(true)
}
@@ -481,19 +479,13 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
const handleDialogOpenChange = (newOpen: boolean) => {
if (
!newOpen &&
effectiveActiveSection === 'credentials' &&
effectiveActiveSection === 'environment' &&
environmentBeforeLeaveHandler.current
) {
environmentBeforeLeaveHandler.current(() => {
if (integrationsCloseHandler.current) {
integrationsCloseHandler.current(newOpen)
} else {
onOpenChange(false)
}
})
environmentBeforeLeaveHandler.current(() => onOpenChange(false))
} else if (
!newOpen &&
effectiveActiveSection === 'credentials' &&
effectiveActiveSection === 'integrations' &&
integrationsCloseHandler.current
) {
integrationsCloseHandler.current(newOpen)
@@ -510,7 +502,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
</VisuallyHidden.Root>
<VisuallyHidden.Root>
<DialogPrimitive.Description>
Configure your workspace settings, credentials, and preferences
Configure your workspace settings, environment variables, integrations, and preferences
</DialogPrimitive.Description>
</VisuallyHidden.Root>
@@ -547,14 +539,18 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
</SModalMainHeader>
<SModalMainBody>
{effectiveActiveSection === 'general' && <General onOpenChange={onOpenChange} />}
{effectiveActiveSection === 'credentials' && (
<Credentials
onOpenChange={onOpenChange}
registerCloseHandler={registerIntegrationsCloseHandler}
{effectiveActiveSection === 'environment' && (
<EnvironmentVariables
registerBeforeLeaveHandler={registerEnvironmentBeforeLeaveHandler}
/>
)}
{effectiveActiveSection === 'template-profile' && <TemplateProfile />}
{effectiveActiveSection === 'integrations' && (
<Integrations
onOpenChange={onOpenChange}
registerCloseHandler={registerIntegrationsCloseHandler}
/>
)}
{effectiveActiveSection === 'credential-sets' && <CredentialSets />}
{effectiveActiveSection === 'access-control' && <AccessControl />}
{effectiveActiveSection === 'apikeys' && <ApiKeys onOpenChange={onOpenChange} />}

View File

@@ -589,6 +589,7 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
export const scheduleExecution = task({
id: 'schedule-execution',
machine: 'medium-1x',
retry: {
maxAttempts: 1,
},

View File

@@ -669,6 +669,7 @@ async function executeWebhookJobInternal(
export const webhookExecution = task({
id: 'webhook-execution',
machine: 'medium-1x',
retry: {
maxAttempts: 1,
},

View File

@@ -197,5 +197,6 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
export const workflowExecutionTask = task({
id: 'workflow-execution',
machine: 'medium-1x',
run: executeWorkflowJob,
})

View File

@@ -394,6 +394,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Page Property Operations
{ label: 'List Page Properties', id: 'list_page_properties' },
{ label: 'Create Page Property', id: 'create_page_property' },
{ label: 'Delete Page Property', id: 'delete_page_property' },
// Search Operations
{ label: 'Search Content', id: 'search' },
{ label: 'Search in Space', id: 'search_in_space' },
@@ -414,6 +415,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Operations
{ label: 'List Labels', id: 'list_labels' },
{ label: 'Add Label', id: 'add_label' },
{ label: 'Delete Label', id: 'delete_label' },
{ label: 'Get Pages by Label', id: 'get_pages_by_label' },
{ label: 'List Space Labels', id: 'list_space_labels' },
// Space Operations
{ label: 'Get Space', id: 'get_space' },
{ label: 'List Spaces', id: 'list_spaces' },
@@ -485,6 +489,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space',
'get_space',
'list_spaces',
'get_pages_by_label',
'list_space_labels',
],
not: true,
},
@@ -500,6 +506,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_labels',
'upload_attachment',
'add_label',
'delete_label',
'delete_page_property',
'get_page_children',
'get_page_ancestors',
'list_page_versions',
@@ -527,6 +535,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space',
'get_space',
'list_spaces',
'get_pages_by_label',
'list_space_labels',
],
not: true,
},
@@ -542,6 +552,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_labels',
'upload_attachment',
'add_label',
'delete_label',
'delete_page_property',
'get_page_children',
'get_page_ancestors',
'list_page_versions',
@@ -566,6 +578,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space',
'create_blogpost',
'list_blogposts_in_space',
'list_space_labels',
],
},
},
@@ -601,6 +614,14 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
required: true,
condition: { field: 'operation', value: 'create_page_property' },
},
{
id: 'propertyId',
title: 'Property ID',
type: 'short-input',
placeholder: 'Enter property ID to delete',
required: true,
condition: { field: 'operation', value: 'delete_page_property' },
},
{
id: 'title',
title: 'Title',
@@ -694,7 +715,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
type: 'short-input',
placeholder: 'Enter label name',
required: true,
condition: { field: 'operation', value: 'add_label' },
condition: { field: 'operation', value: ['add_label', 'delete_label'] },
},
{
id: 'labelPrefix',
@@ -709,6 +730,14 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
value: () => 'global',
condition: { field: 'operation', value: 'add_label' },
},
{
id: 'labelId',
title: 'Label ID',
type: 'short-input',
placeholder: 'Enter label ID',
required: true,
condition: { field: 'operation', value: 'get_pages_by_label' },
},
{
id: 'blogPostStatus',
title: 'Status',
@@ -759,6 +788,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_page_versions',
'list_page_properties',
'list_labels',
'get_pages_by_label',
'list_space_labels',
],
},
},
@@ -780,6 +811,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_page_versions',
'list_page_properties',
'list_labels',
'get_pages_by_label',
'list_space_labels',
],
},
},
@@ -800,6 +833,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Property Tools
'confluence_list_page_properties',
'confluence_create_page_property',
'confluence_delete_page_property',
// Search Tools
'confluence_search',
'confluence_search_in_space',
@@ -820,6 +854,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Tools
'confluence_list_labels',
'confluence_add_label',
'confluence_delete_label',
'confluence_get_pages_by_label',
'confluence_list_space_labels',
// Space Tools
'confluence_get_space',
'confluence_list_spaces',
@@ -852,6 +889,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
return 'confluence_list_page_properties'
case 'create_page_property':
return 'confluence_create_page_property'
case 'delete_page_property':
return 'confluence_delete_page_property'
// Search Operations
case 'search':
return 'confluence_search'
@@ -887,6 +926,12 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
return 'confluence_list_labels'
case 'add_label':
return 'confluence_add_label'
case 'delete_label':
return 'confluence_delete_label'
case 'get_pages_by_label':
return 'confluence_get_pages_by_label'
case 'list_space_labels':
return 'confluence_list_space_labels'
// Space Operations
case 'get_space':
return 'confluence_get_space'
@@ -908,7 +953,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
versionNumber,
propertyKey,
propertyValue,
propertyId,
labelPrefix,
labelId,
blogPostStatus,
purge,
bodyFormat,
@@ -959,7 +1006,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
}
}
// Operations that support cursor pagination
// Operations that support generic cursor pagination.
// get_pages_by_label and list_space_labels have dedicated handlers
// below that pass cursor along with their required params (labelId, spaceId).
const supportsCursor = [
'list_attachments',
'list_spaces',
@@ -996,6 +1045,35 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
}
}
if (operation === 'delete_page_property') {
return {
credential,
pageId: effectivePageId,
operation,
propertyId,
...rest,
}
}
if (operation === 'get_pages_by_label') {
return {
credential,
operation,
labelId,
cursor: cursor || undefined,
...rest,
}
}
if (operation === 'list_space_labels') {
return {
credential,
operation,
cursor: cursor || undefined,
...rest,
}
}
if (operation === 'upload_attachment') {
const normalizedFile = normalizeFileInput(attachmentFile, { single: true })
if (!normalizedFile) {
@@ -1044,7 +1122,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
attachmentComment: { type: 'string', description: 'Comment for the attachment' },
labelName: { type: 'string', description: 'Label name' },
labelId: { type: 'string', description: 'Label identifier' },
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
propertyId: { type: 'string', description: 'Property identifier' },
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
bodyFormat: { type: 'string', description: 'Body format for comments' },
@@ -1080,6 +1160,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Results
labels: { type: 'array', description: 'List of labels' },
labelName: { type: 'string', description: 'Label name' },
labelId: { type: 'string', description: 'Label identifier' },
// Space Results
spaces: { type: 'array', description: 'List of spaces' },
spaceId: { type: 'string', description: 'Space identifier' },

View File

@@ -196,6 +196,8 @@ export interface SubBlockConfig {
type: SubBlockType
mode?: 'basic' | 'advanced' | 'both' | 'trigger' // Default is 'both' if not specified. 'trigger' means only shown in trigger mode
canonicalParamId?: string
/** Controls parameter visibility in agent/tool-input context */
paramVisibility?: 'user-or-llm' | 'user-only' | 'llm-only' | 'hidden'
required?:
| boolean
| {

View File

@@ -205,6 +205,10 @@ export const CREDENTIAL_SET = {
PREFIX: 'credentialSet:',
} as const
export const CREDENTIAL = {
FOREIGN_LABEL: 'Saved by collaborator',
} as const
export function isCredentialSetValue(value: string | null | undefined): boolean {
return typeof value === 'string' && value.startsWith(CREDENTIAL_SET.PREFIX)
}

View File

@@ -1,268 +0,0 @@
'use client'
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { environmentKeys } from '@/hooks/queries/environment'
import { fetchJson } from '@/hooks/selectors/helpers'
export type WorkspaceCredentialType = 'oauth' | 'env_workspace' | 'env_personal'
export type WorkspaceCredentialRole = 'admin' | 'member'
export type WorkspaceCredentialMemberStatus = 'active' | 'pending' | 'revoked'
export interface WorkspaceCredential {
id: string
workspaceId: string
type: WorkspaceCredentialType
displayName: string
providerId: string | null
accountId: string | null
envKey: string | null
envOwnerUserId: string | null
createdBy: string
createdAt: string
updatedAt: string
role?: WorkspaceCredentialRole
status?: WorkspaceCredentialMemberStatus
}
export interface WorkspaceCredentialMember {
id: string
userId: string
role: WorkspaceCredentialRole
status: WorkspaceCredentialMemberStatus
joinedAt: string | null
invitedBy: string | null
createdAt: string
updatedAt: string
userName: string | null
userEmail: string | null
userImage: string | null
}
interface CredentialListResponse {
credentials?: WorkspaceCredential[]
}
interface CredentialResponse {
credential?: WorkspaceCredential | null
}
interface MembersResponse {
members?: WorkspaceCredentialMember[]
}
export const workspaceCredentialKeys = {
all: ['workspaceCredentials'] as const,
list: (workspaceId?: string, type?: string, providerId?: string) =>
['workspaceCredentials', workspaceId ?? 'none', type ?? 'all', providerId ?? 'all'] as const,
detail: (credentialId?: string) =>
['workspaceCredentials', 'detail', credentialId ?? 'none'] as const,
members: (credentialId?: string) =>
['workspaceCredentials', 'detail', credentialId ?? 'none', 'members'] as const,
}
export function useWorkspaceCredentials(params: {
workspaceId?: string
type?: WorkspaceCredentialType
providerId?: string
enabled?: boolean
}) {
const { workspaceId, type, providerId, enabled = true } = params
return useQuery<WorkspaceCredential[]>({
queryKey: workspaceCredentialKeys.list(workspaceId, type, providerId),
queryFn: async () => {
if (!workspaceId) return []
const data = await fetchJson<CredentialListResponse>('/api/credentials', {
searchParams: {
workspaceId,
type,
providerId,
},
})
return data.credentials ?? []
},
enabled: Boolean(workspaceId) && enabled,
staleTime: 60 * 1000,
})
}
export function useWorkspaceCredential(credentialId?: string, enabled = true) {
return useQuery<WorkspaceCredential | null>({
queryKey: workspaceCredentialKeys.detail(credentialId),
queryFn: async () => {
if (!credentialId) return null
const data = await fetchJson<CredentialResponse>(`/api/credentials/${credentialId}`)
return data.credential ?? null
},
enabled: Boolean(credentialId) && enabled,
staleTime: 60 * 1000,
})
}
export function useCreateWorkspaceCredential() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (payload: {
workspaceId: string
type: WorkspaceCredentialType
displayName?: string
providerId?: string
accountId?: string
envKey?: string
envOwnerUserId?: string
}) => {
const response = await fetch('/api/credentials', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload),
})
if (!response.ok) {
const data = await response.json()
throw new Error(data.error || 'Failed to create credential')
}
return response.json()
},
onSuccess: (_data, variables) => {
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.list(variables.workspaceId),
})
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.all,
})
},
})
}
export function useUpdateWorkspaceCredential() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (payload: {
credentialId: string
displayName?: string
accountId?: string
}) => {
const response = await fetch(`/api/credentials/${payload.credentialId}`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
displayName: payload.displayName,
accountId: payload.accountId,
}),
})
if (!response.ok) {
const data = await response.json()
throw new Error(data.error || 'Failed to update credential')
}
return response.json()
},
onSuccess: (_data, variables) => {
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
})
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.all,
})
},
})
}
export function useDeleteWorkspaceCredential() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (credentialId: string) => {
const response = await fetch(`/api/credentials/${credentialId}`, {
method: 'DELETE',
})
if (!response.ok) {
const data = await response.json()
throw new Error(data.error || 'Failed to delete credential')
}
return response.json()
},
onSuccess: (_data, credentialId) => {
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.detail(credentialId) })
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
queryClient.invalidateQueries({ queryKey: environmentKeys.all })
},
})
}
export function useWorkspaceCredentialMembers(credentialId?: string) {
return useQuery<WorkspaceCredentialMember[]>({
queryKey: workspaceCredentialKeys.members(credentialId),
queryFn: async () => {
if (!credentialId) return []
const data = await fetchJson<MembersResponse>(`/api/credentials/${credentialId}/members`)
return data.members ?? []
},
enabled: Boolean(credentialId),
staleTime: 30 * 1000,
})
}
export function useUpsertWorkspaceCredentialMember() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (payload: {
credentialId: string
userId: string
role: WorkspaceCredentialRole
}) => {
const response = await fetch(`/api/credentials/${payload.credentialId}/members`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
userId: payload.userId,
role: payload.role,
}),
})
if (!response.ok) {
const data = await response.json()
throw new Error(data.error || 'Failed to update credential member')
}
return response.json()
},
onSuccess: (_data, variables) => {
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.members(variables.credentialId),
})
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
})
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
},
})
}
export function useRemoveWorkspaceCredentialMember() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (payload: { credentialId: string; userId: string }) => {
const response = await fetch(
`/api/credentials/${payload.credentialId}/members?userId=${encodeURIComponent(payload.userId)}`,
{ method: 'DELETE' }
)
if (!response.ok) {
const data = await response.json()
throw new Error(data.error || 'Failed to remove credential member')
}
return response.json()
},
onSuccess: (_data, variables) => {
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.members(variables.credentialId),
})
queryClient.invalidateQueries({
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
})
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
},
})
}

View File

@@ -423,7 +423,7 @@ interface GenerateVersionDescriptionVariables {
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
Write a brief, factual description (1-3 sentences, under 400 characters) that states what changed between versions.
Write a brief, factual description (1-3 sentences, under 2000 characters) that states what changed between versions.
Guidelines:
- Use the specific values provided (credential names, channel names, model names)

View File

@@ -169,9 +169,9 @@ export function useConnectOAuthService() {
interface DisconnectServiceParams {
provider: string
providerId?: string
providerId: string
serviceId: string
accountId?: string
accountId: string
}
/**
@@ -182,7 +182,7 @@ export function useDisconnectOAuthService() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({ provider, providerId, accountId }: DisconnectServiceParams) => {
mutationFn: async ({ provider, providerId }: DisconnectServiceParams) => {
const response = await fetch('/api/auth/oauth/disconnect', {
method: 'POST',
headers: {
@@ -191,7 +191,6 @@ export function useDisconnectOAuthService() {
body: JSON.stringify({
provider,
providerId,
accountId,
}),
})
@@ -213,8 +212,7 @@ export function useDisconnectOAuthService() {
oauthConnectionsKeys.connections(),
previousServices.map((svc) => {
if (svc.id === serviceId) {
const updatedAccounts =
accountId && svc.accounts ? svc.accounts.filter((acc) => acc.id !== accountId) : []
const updatedAccounts = svc.accounts?.filter((acc) => acc.id !== accountId) || []
return {
...svc,
accounts: updatedAccounts,

View File

@@ -1,6 +1,6 @@
import { useQuery } from '@tanstack/react-query'
import type { Credential } from '@/lib/oauth'
import { CREDENTIAL_SET } from '@/executor/constants'
import { CREDENTIAL, CREDENTIAL_SET } from '@/executor/constants'
import { useCredentialSetDetail } from '@/hooks/queries/credential-sets'
import { fetchJson } from '@/hooks/selectors/helpers'
@@ -13,34 +13,15 @@ interface CredentialDetailResponse {
}
export const oauthCredentialKeys = {
list: (providerId?: string, workspaceId?: string, workflowId?: string) =>
[
'oauthCredentials',
providerId ?? 'none',
workspaceId ?? 'none',
workflowId ?? 'none',
] as const,
list: (providerId?: string) => ['oauthCredentials', providerId ?? 'none'] as const,
detail: (credentialId?: string, workflowId?: string) =>
['oauthCredentialDetail', credentialId ?? 'none', workflowId ?? 'none'] as const,
}
interface FetchOAuthCredentialsParams {
providerId: string
workspaceId?: string
workflowId?: string
}
export async function fetchOAuthCredentials(
params: FetchOAuthCredentialsParams
): Promise<Credential[]> {
const { providerId, workspaceId, workflowId } = params
export async function fetchOAuthCredentials(providerId: string): Promise<Credential[]> {
if (!providerId) return []
const data = await fetchJson<CredentialListResponse>('/api/auth/oauth/credentials', {
searchParams: {
provider: providerId,
workspaceId,
workflowId,
},
searchParams: { provider: providerId },
})
return data.credentials ?? []
}
@@ -59,44 +40,10 @@ export async function fetchOAuthCredentialDetail(
return data.credentials ?? []
}
interface UseOAuthCredentialsOptions {
enabled?: boolean
workspaceId?: string
workflowId?: string
}
function resolveOptions(
enabledOrOptions?: boolean | UseOAuthCredentialsOptions
): Required<UseOAuthCredentialsOptions> {
if (typeof enabledOrOptions === 'boolean') {
return {
enabled: enabledOrOptions,
workspaceId: '',
workflowId: '',
}
}
return {
enabled: enabledOrOptions?.enabled ?? true,
workspaceId: enabledOrOptions?.workspaceId ?? '',
workflowId: enabledOrOptions?.workflowId ?? '',
}
}
export function useOAuthCredentials(
providerId?: string,
enabledOrOptions?: boolean | UseOAuthCredentialsOptions
) {
const { enabled, workspaceId, workflowId } = resolveOptions(enabledOrOptions)
export function useOAuthCredentials(providerId?: string, enabled = true) {
return useQuery<Credential[]>({
queryKey: oauthCredentialKeys.list(providerId, workspaceId, workflowId),
queryFn: () =>
fetchOAuthCredentials({
providerId: providerId ?? '',
workspaceId: workspaceId || undefined,
workflowId: workflowId || undefined,
}),
queryKey: oauthCredentialKeys.list(providerId),
queryFn: () => fetchOAuthCredentials(providerId ?? ''),
enabled: Boolean(providerId) && enabled,
staleTime: 60 * 1000,
})
@@ -115,12 +62,7 @@ export function useOAuthCredentialDetail(
})
}
export function useCredentialName(
credentialId?: string,
providerId?: string,
workflowId?: string,
workspaceId?: string
) {
export function useCredentialName(credentialId?: string, providerId?: string, workflowId?: string) {
// Check if this is a credential set value
const isCredentialSet = credentialId?.startsWith(CREDENTIAL_SET.PREFIX) ?? false
const credentialSetId = isCredentialSet
@@ -135,11 +77,7 @@ export function useCredentialName(
const { data: credentials = [], isFetching: credentialsLoading } = useOAuthCredentials(
providerId,
{
enabled: Boolean(providerId) && !isCredentialSet,
workspaceId,
workflowId,
}
Boolean(providerId) && !isCredentialSet
)
const selectedCredential = credentials.find((cred) => cred.id === credentialId)
@@ -154,18 +92,18 @@ export function useCredentialName(
shouldFetchDetail
)
const detailCredential = foreignCredentials[0]
const hasForeignMeta = foreignCredentials.length > 0
const isForeignCredentialSet = isCredentialSet && !credentialSetData && !credentialSetLoading
const displayName =
credentialSetData?.name ?? selectedCredential?.name ?? detailCredential?.name ?? null
credentialSetData?.name ??
selectedCredential?.name ??
(hasForeignMeta ? CREDENTIAL.FOREIGN_LABEL : null) ??
(isForeignCredentialSet ? CREDENTIAL.FOREIGN_LABEL : null)
return {
displayName,
isLoading:
credentialsLoading ||
foreignLoading ||
(isCredentialSet && credentialSetLoading && !credentialSetData),
isLoading: credentialsLoading || foreignLoading || (isCredentialSet && credentialSetLoading),
hasForeignMeta,
}
}

View File

@@ -642,6 +642,10 @@ export function useDeployChildWorkflow() {
queryClient.invalidateQueries({
queryKey: workflowKeys.deploymentStatus(variables.workflowId),
})
// Invalidate workflow state so tool input mappings refresh
queryClient.invalidateQueries({
queryKey: workflowKeys.state(variables.workflowId),
})
// Also invalidate deployment queries
queryClient.invalidateQueries({
queryKey: deploymentKeys.info(variables.workflowId),

View File

@@ -1,4 +1,4 @@
import { useCallback, useRef } from 'react'
import { useCallback } from 'react'
import { createLogger } from '@sim/logger'
import type {
BlockCompletedData,
@@ -16,6 +16,18 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
const logger = createLogger('useExecutionStream')
/**
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
* These should be treated as clean disconnects, not execution errors.
*/
function isClientDisconnectError(error: any): boolean {
if (error.name === 'AbortError') return true
const msg = (error.message ?? '').toLowerCase()
return (
msg.includes('network error') || msg.includes('failed to fetch') || msg.includes('load failed')
)
}
/**
* Processes SSE events from a response body and invokes appropriate callbacks.
*/
@@ -121,6 +133,7 @@ export interface ExecuteStreamOptions {
parallels?: Record<string, any>
}
stopAfterBlockId?: string
onExecutionId?: (executionId: string) => void
callbacks?: ExecutionStreamCallbacks
}
@@ -129,30 +142,40 @@ export interface ExecuteFromBlockOptions {
startBlockId: string
sourceSnapshot: SerializableExecutionState
input?: any
onExecutionId?: (executionId: string) => void
callbacks?: ExecutionStreamCallbacks
}
export interface ReconnectStreamOptions {
workflowId: string
executionId: string
fromEventId?: number
callbacks?: ExecutionStreamCallbacks
}
/**
* Module-level map shared across all hook instances.
* Ensures ANY instance can cancel streams started by ANY other instance,
* which is critical for SPA navigation where the original hook instance unmounts
* but the SSE stream must be cancellable from the new instance.
*/
const sharedAbortControllers = new Map<string, AbortController>()
/**
* Hook for executing workflows via server-side SSE streaming.
* Supports concurrent executions via per-workflow AbortController maps.
*/
export function useExecutionStream() {
const abortControllersRef = useRef<Map<string, AbortController>>(new Map())
const currentExecutionsRef = useRef<Map<string, { workflowId: string; executionId: string }>>(
new Map()
)
const execute = useCallback(async (options: ExecuteStreamOptions) => {
const { workflowId, callbacks = {}, ...payload } = options
const { workflowId, callbacks = {}, onExecutionId, ...payload } = options
const existing = abortControllersRef.current.get(workflowId)
const existing = sharedAbortControllers.get(workflowId)
if (existing) {
existing.abort()
}
const abortController = new AbortController()
abortControllersRef.current.set(workflowId, abortController)
currentExecutionsRef.current.delete(workflowId)
sharedAbortControllers.set(workflowId, abortController)
try {
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
@@ -177,42 +200,48 @@ export function useExecutionStream() {
throw new Error('No response body')
}
const executionId = response.headers.get('X-Execution-Id')
if (executionId) {
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
const serverExecutionId = response.headers.get('X-Execution-Id')
if (serverExecutionId) {
onExecutionId?.(serverExecutionId)
}
const reader = response.body.getReader()
await processSSEStream(reader, callbacks, 'Execution')
} catch (error: any) {
if (error.name === 'AbortError') {
logger.info('Execution stream cancelled')
callbacks.onExecutionCancelled?.({ duration: 0 })
} else {
logger.error('Execution stream error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
if (isClientDisconnectError(error)) {
logger.info('Execution stream disconnected (page unload or abort)')
return
}
logger.error('Execution stream error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
throw error
} finally {
abortControllersRef.current.delete(workflowId)
currentExecutionsRef.current.delete(workflowId)
if (sharedAbortControllers.get(workflowId) === abortController) {
sharedAbortControllers.delete(workflowId)
}
}
}, [])
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options
const {
workflowId,
startBlockId,
sourceSnapshot,
input,
onExecutionId,
callbacks = {},
} = options
const existing = abortControllersRef.current.get(workflowId)
const existing = sharedAbortControllers.get(workflowId)
if (existing) {
existing.abort()
}
const abortController = new AbortController()
abortControllersRef.current.set(workflowId, abortController)
currentExecutionsRef.current.delete(workflowId)
sharedAbortControllers.set(workflowId, abortController)
try {
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
@@ -246,64 +275,80 @@ export function useExecutionStream() {
throw new Error('No response body')
}
const executionId = response.headers.get('X-Execution-Id')
if (executionId) {
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
const serverExecutionId = response.headers.get('X-Execution-Id')
if (serverExecutionId) {
onExecutionId?.(serverExecutionId)
}
const reader = response.body.getReader()
await processSSEStream(reader, callbacks, 'Run-from-block')
} catch (error: any) {
if (error.name === 'AbortError') {
logger.info('Run-from-block execution cancelled')
callbacks.onExecutionCancelled?.({ duration: 0 })
} else {
logger.error('Run-from-block execution error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
if (isClientDisconnectError(error)) {
logger.info('Run-from-block stream disconnected (page unload or abort)')
return
}
logger.error('Run-from-block execution error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
throw error
} finally {
abortControllersRef.current.delete(workflowId)
currentExecutionsRef.current.delete(workflowId)
if (sharedAbortControllers.get(workflowId) === abortController) {
sharedAbortControllers.delete(workflowId)
}
}
}, [])
const reconnect = useCallback(async (options: ReconnectStreamOptions) => {
const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options
const existing = sharedAbortControllers.get(workflowId)
if (existing) {
existing.abort()
}
const abortController = new AbortController()
sharedAbortControllers.set(workflowId, abortController)
try {
const response = await fetch(
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
{ signal: abortController.signal }
)
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
if (!response.body) throw new Error('No response body')
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
} catch (error: any) {
if (isClientDisconnectError(error)) return
logger.error('Reconnection stream error:', error)
throw error
} finally {
if (sharedAbortControllers.get(workflowId) === abortController) {
sharedAbortControllers.delete(workflowId)
}
}
}, [])
const cancel = useCallback((workflowId?: string) => {
if (workflowId) {
const execution = currentExecutionsRef.current.get(workflowId)
if (execution) {
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
method: 'POST',
}).catch(() => {})
}
const controller = abortControllersRef.current.get(workflowId)
const controller = sharedAbortControllers.get(workflowId)
if (controller) {
controller.abort()
abortControllersRef.current.delete(workflowId)
sharedAbortControllers.delete(workflowId)
}
currentExecutionsRef.current.delete(workflowId)
} else {
for (const [, execution] of currentExecutionsRef.current) {
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
method: 'POST',
}).catch(() => {})
}
for (const [, controller] of abortControllersRef.current) {
for (const [, controller] of sharedAbortControllers) {
controller.abort()
}
abortControllersRef.current.clear()
currentExecutionsRef.current.clear()
sharedAbortControllers.clear()
}
}, [])
return {
execute,
executeFromBlock,
reconnect,
cancel,
}
}

View File

@@ -14,7 +14,7 @@ import {
oneTimeToken,
organization,
} from 'better-auth/plugins'
import { and, eq, inArray, sql } from 'drizzle-orm'
import { and, eq } from 'drizzle-orm'
import { headers } from 'next/headers'
import Stripe from 'stripe'
import {
@@ -150,6 +150,16 @@ export const auth = betterAuth({
account: {
create: {
before: async (account) => {
// Only one credential per (userId, providerId) is allowed
// If user reconnects (even with a different external account), delete the old one
// and let Better Auth create the new one (returning false breaks account linking flow)
const existing = await db.query.account.findFirst({
where: and(
eq(schema.account.userId, account.userId),
eq(schema.account.providerId, account.providerId)
),
})
const modifiedAccount = { ...account }
if (account.providerId === 'salesforce' && account.accessToken) {
@@ -179,148 +189,32 @@ export const auth = betterAuth({
}
}
// Handle Microsoft refresh token expiry
if (isMicrosoftProvider(account.providerId)) {
modifiedAccount.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
}
if (existing) {
// Delete the existing account so Better Auth can create the new one
// This allows account linking/re-authorization to succeed
await db.delete(schema.account).where(eq(schema.account.id, existing.id))
// Preserve the existing account ID so references (like workspace notifications) continue to work
modifiedAccount.id = existing.id
logger.info('[account.create.before] Deleted existing account for re-authorization', {
userId: account.userId,
providerId: account.providerId,
existingAccountId: existing.id,
preservingId: true,
})
// Sync webhooks for credential sets after reconnecting (in after hook)
}
return { data: modifiedAccount }
},
after: async (account) => {
/**
* Migrate credentials from stale account rows to the newly created one.
*
* Each getUserInfo appends a random UUID to the stable external ID so
* that Better Auth never blocks cross-user connections. This means
* re-connecting the same external identity creates a new row. We detect
* the stale siblings here by comparing the stable prefix (everything
* before the trailing UUID), migrate any credential FKs to the new row,
* then delete the stale rows.
*/
try {
const UUID_SUFFIX_RE = /-[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/
const stablePrefix = account.accountId.replace(UUID_SUFFIX_RE, '')
if (stablePrefix && stablePrefix !== account.accountId) {
const siblings = await db
.select({ id: schema.account.id, accountId: schema.account.accountId })
.from(schema.account)
.where(
and(
eq(schema.account.userId, account.userId),
eq(schema.account.providerId, account.providerId),
sql`${schema.account.id} != ${account.id}`
)
)
const staleRows = siblings.filter(
(row) => row.accountId.replace(UUID_SUFFIX_RE, '') === stablePrefix
)
if (staleRows.length > 0) {
const staleIds = staleRows.map((row) => row.id)
await db
.update(schema.credential)
.set({ accountId: account.id })
.where(inArray(schema.credential.accountId, staleIds))
await db.delete(schema.account).where(inArray(schema.account.id, staleIds))
logger.info('[account.create.after] Migrated credentials from stale accounts', {
userId: account.userId,
providerId: account.providerId,
newAccountId: account.id,
migratedFrom: staleIds,
})
}
}
} catch (error) {
logger.error('[account.create.after] Failed to clean up stale accounts', {
userId: account.userId,
providerId: account.providerId,
error,
})
}
/**
* If a pending credential draft exists for this (userId, providerId),
* create the credential now with the user's chosen display name.
* This is deterministic — the account row is guaranteed to exist.
*/
try {
const [draft] = await db
.select()
.from(schema.pendingCredentialDraft)
.where(
and(
eq(schema.pendingCredentialDraft.userId, account.userId),
eq(schema.pendingCredentialDraft.providerId, account.providerId),
sql`${schema.pendingCredentialDraft.expiresAt} > NOW()`
)
)
.limit(1)
if (draft) {
const credentialId = crypto.randomUUID()
const now = new Date()
try {
await db.insert(schema.credential).values({
id: credentialId,
workspaceId: draft.workspaceId,
type: 'oauth',
displayName: draft.displayName,
providerId: account.providerId,
accountId: account.id,
createdBy: account.userId,
createdAt: now,
updatedAt: now,
})
await db.insert(schema.credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId: account.userId,
role: 'admin',
status: 'active',
joinedAt: now,
invitedBy: account.userId,
createdAt: now,
updatedAt: now,
})
logger.info('[account.create.after] Created credential from draft', {
credentialId,
displayName: draft.displayName,
providerId: account.providerId,
accountId: account.id,
})
} catch (insertError: unknown) {
const code =
insertError && typeof insertError === 'object' && 'code' in insertError
? (insertError as { code: string }).code
: undefined
if (code !== '23505') {
throw insertError
}
logger.info('[account.create.after] Credential already exists, skipping draft', {
providerId: account.providerId,
accountId: account.id,
})
}
await db
.delete(schema.pendingCredentialDraft)
.where(eq(schema.pendingCredentialDraft.id, draft.id))
}
} catch (error) {
logger.error('[account.create.after] Failed to create credential from draft', {
userId: account.userId,
providerId: account.providerId,
error,
})
}
try {
const { ensureUserStatsExists } = await import('@/lib/billing/core/usage')
await ensureUserStatsExists(account.userId)
@@ -1593,7 +1487,7 @@ export const auth = betterAuth({
})
return {
id: `${(data.user_id || data.hub_id).toString()}-${crypto.randomUUID()}`,
id: `${data.user_id || data.hub_id.toString()}-${crypto.randomUUID()}`,
name: data.user || 'HubSpot User',
email: data.user || `hubspot-${data.hub_id}@hubspot.com`,
emailVerified: true,
@@ -1647,7 +1541,7 @@ export const auth = betterAuth({
const data = await response.json()
return {
id: `${(data.user_id || data.sub).toString()}-${crypto.randomUUID()}`,
id: `${data.user_id || data.sub}-${crypto.randomUUID()}`,
name: data.name || 'Salesforce User',
email: data.email || `salesforce-${data.user_id}@salesforce.com`,
emailVerified: data.email_verified || true,
@@ -1706,7 +1600,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: `${profile.data.id.toString()}-${crypto.randomUUID()}`,
id: `${profile.data.id}-${crypto.randomUUID()}`,
name: profile.data.name || 'X User',
email: `${profile.data.username}@x.com`,
image: profile.data.profile_image_url,
@@ -1786,7 +1680,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: `${profile.account_id.toString()}-${crypto.randomUUID()}`,
id: `${profile.account_id}-${crypto.randomUUID()}`,
name: profile.name || profile.display_name || 'Confluence User',
email: profile.email || `${profile.account_id}@atlassian.com`,
image: profile.picture || undefined,
@@ -1897,7 +1791,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: `${profile.account_id.toString()}-${crypto.randomUUID()}`,
id: `${profile.account_id}-${crypto.randomUUID()}`,
name: profile.name || profile.display_name || 'Jira User',
email: profile.email || `${profile.account_id}@atlassian.com`,
image: profile.picture || undefined,
@@ -1947,7 +1841,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: `${data.id.toString()}-${crypto.randomUUID()}`,
id: `${data.id}-${crypto.randomUUID()}`,
name: data.email ? data.email.split('@')[0] : 'Airtable User',
email: data.email || `${data.id}@airtable.user`,
emailVerified: !!data.email,
@@ -1996,7 +1890,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: `${(profile.bot?.owner?.user?.id || profile.id).toString()}-${crypto.randomUUID()}`,
id: `${profile.bot?.owner?.user?.id || profile.id}-${crypto.randomUUID()}`,
name: profile.name || profile.bot?.owner?.user?.name || 'Notion User',
email: profile.person?.email || `${profile.id}@notion.user`,
emailVerified: !!profile.person?.email,
@@ -2063,7 +1957,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: `${data.id.toString()}-${crypto.randomUUID()}`,
id: `${data.id}-${crypto.randomUUID()}`,
name: data.name || 'Reddit User',
email: `${data.name}@reddit.user`,
image: data.icon_img || undefined,
@@ -2135,7 +2029,7 @@ export const auth = betterAuth({
const viewer = data.viewer
return {
id: `${viewer.id.toString()}-${crypto.randomUUID()}`,
id: `${viewer.id}-${crypto.randomUUID()}`,
email: viewer.email,
name: viewer.name,
emailVerified: true,
@@ -2198,7 +2092,7 @@ export const auth = betterAuth({
const data = await response.json()
return {
id: `${data.account_id.toString()}-${crypto.randomUUID()}`,
id: `${data.account_id}-${crypto.randomUUID()}`,
email: data.email,
name: data.name?.display_name || data.email,
emailVerified: data.email_verified || false,
@@ -2249,7 +2143,7 @@ export const auth = betterAuth({
const now = new Date()
return {
id: `${profile.gid.toString()}-${crypto.randomUUID()}`,
id: `${profile.gid}-${crypto.randomUUID()}`,
name: profile.name || 'Asana User',
email: profile.email || `${profile.gid}@asana.user`,
image: profile.photo?.image_128x128 || undefined,
@@ -2484,7 +2378,7 @@ export const auth = betterAuth({
const profile = await response.json()
return {
id: `${profile.id.toString()}-${crypto.randomUUID()}`,
id: `${profile.id}-${crypto.randomUUID()}`,
name:
`${profile.first_name || ''} ${profile.last_name || ''}`.trim() || 'Zoom User',
email: profile.email || `${profile.id}@zoom.user`,
@@ -2551,7 +2445,7 @@ export const auth = betterAuth({
const profile = await response.json()
return {
id: `${profile.id.toString()}-${crypto.randomUUID()}`,
id: `${profile.id}-${crypto.randomUUID()}`,
name: profile.display_name || 'Spotify User',
email: profile.email || `${profile.id}@spotify.user`,
emailVerified: true,

View File

@@ -1,6 +1,6 @@
import { db } from '@sim/db'
import { account, credential, credentialMember, workflow as workflowTable } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { account, workflow as workflowTable } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -12,14 +12,17 @@ export interface CredentialAccessResult {
requesterUserId?: string
credentialOwnerUserId?: string
workspaceId?: string
resolvedCredentialId?: string
}
/**
* Centralizes auth + credential membership checks for OAuth usage.
* - Workspace-scoped credential IDs enforce active credential_member access.
* - Legacy account IDs are resolved to workspace-scoped credentials when workflowId is provided.
* - Direct legacy account-ID access without workflowId is restricted to account owners only.
* Centralizes auth + collaboration rules for credential use.
* - Uses checkSessionOrInternalAuth to authenticate the caller
* - Fetches credential owner
* - Authorization rules:
* - session: allow if requester owns the credential; otherwise require workflowId and
* verify BOTH requester and owner have access to the workflow's workspace
* - internal_jwt: require workflowId (by default) and verify credential owner has access to the
* workflow's workspace (requester identity is the system/workflow)
*/
export async function authorizeCredentialUse(
request: NextRequest,
@@ -34,173 +37,71 @@ export async function authorizeCredentialUse(
return { ok: false, error: auth.error || 'Authentication required' }
}
const [workflowContext] = workflowId
? await db
.select({ workspaceId: workflowTable.workspaceId })
.from(workflowTable)
.where(eq(workflowTable.id, workflowId))
.limit(1)
: [null]
if (workflowId && (!workflowContext || !workflowContext.workspaceId)) {
return { ok: false, error: 'Workflow not found' }
}
const [platformCredential] = await db
.select({
id: credential.id,
workspaceId: credential.workspaceId,
type: credential.type,
accountId: credential.accountId,
})
.from(credential)
.where(eq(credential.id, credentialId))
.limit(1)
if (platformCredential) {
if (platformCredential.type !== 'oauth' || !platformCredential.accountId) {
return { ok: false, error: 'Unsupported credential type for OAuth access' }
}
if (workflowContext && workflowContext.workspaceId !== platformCredential.workspaceId) {
return { ok: false, error: 'Credential is not accessible from this workflow workspace' }
}
const [accountRow] = await db
.select({ userId: account.userId })
.from(account)
.where(eq(account.id, platformCredential.accountId))
.limit(1)
if (!accountRow) {
return { ok: false, error: 'Credential account not found' }
}
const requesterPerm =
auth.authType === 'internal_jwt'
? null
: await getUserEntityPermissions(auth.userId, 'workspace', platformCredential.workspaceId)
if (auth.authType !== 'internal_jwt') {
const [membership] = await db
.select({ id: credentialMember.id })
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, platformCredential.id),
eq(credentialMember.userId, auth.userId),
eq(credentialMember.status, 'active')
)
)
.limit(1)
if (!membership || requesterPerm === null) {
return { ok: false, error: 'Unauthorized' }
}
}
const ownerPerm = await getUserEntityPermissions(
accountRow.userId,
'workspace',
platformCredential.workspaceId
)
if (ownerPerm === null) {
return { ok: false, error: 'Unauthorized' }
}
return {
ok: true,
authType: auth.authType as CredentialAccessResult['authType'],
requesterUserId: auth.userId,
credentialOwnerUserId: accountRow.userId,
workspaceId: platformCredential.workspaceId,
resolvedCredentialId: platformCredential.accountId,
}
}
if (workflowContext?.workspaceId) {
const [workspaceCredential] = await db
.select({
id: credential.id,
workspaceId: credential.workspaceId,
accountId: credential.accountId,
})
.from(credential)
.where(
and(
eq(credential.type, 'oauth'),
eq(credential.workspaceId, workflowContext.workspaceId),
eq(credential.accountId, credentialId)
)
)
.limit(1)
if (!workspaceCredential?.accountId) {
return { ok: false, error: 'Credential not found' }
}
const [accountRow] = await db
.select({ userId: account.userId })
.from(account)
.where(eq(account.id, workspaceCredential.accountId))
.limit(1)
if (!accountRow) {
return { ok: false, error: 'Credential account not found' }
}
if (auth.authType !== 'internal_jwt') {
const [membership] = await db
.select({ id: credentialMember.id })
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, workspaceCredential.id),
eq(credentialMember.userId, auth.userId),
eq(credentialMember.status, 'active')
)
)
.limit(1)
if (!membership) {
return { ok: false, error: 'Unauthorized' }
}
}
const ownerPerm = await getUserEntityPermissions(
accountRow.userId,
'workspace',
workflowContext.workspaceId
)
if (ownerPerm === null) {
return { ok: false, error: 'Unauthorized' }
}
return {
ok: true,
authType: auth.authType as CredentialAccessResult['authType'],
requesterUserId: auth.userId,
credentialOwnerUserId: accountRow.userId,
workspaceId: workflowContext.workspaceId,
resolvedCredentialId: workspaceCredential.accountId,
}
}
const [legacyAccount] = await db
// Lookup credential owner
const [credRow] = await db
.select({ userId: account.userId })
.from(account)
.where(eq(account.id, credentialId))
.limit(1)
if (!legacyAccount) {
if (!credRow) {
return { ok: false, error: 'Credential not found' }
}
if (auth.authType === 'internal_jwt') {
const credentialOwnerUserId = credRow.userId
// If requester owns the credential, allow immediately
if (auth.authType !== 'internal_jwt' && auth.userId === credentialOwnerUserId) {
return {
ok: true,
authType: auth.authType as CredentialAccessResult['authType'],
requesterUserId: auth.userId,
credentialOwnerUserId,
}
}
// For collaboration paths, workflowId is required to scope to a workspace
if (!workflowId) {
return { ok: false, error: 'workflowId is required' }
}
if (auth.userId !== legacyAccount.userId) {
const [wf] = await db
.select({ workspaceId: workflowTable.workspaceId })
.from(workflowTable)
.where(eq(workflowTable.id, workflowId))
.limit(1)
if (!wf || !wf.workspaceId) {
return { ok: false, error: 'Workflow not found' }
}
if (auth.authType === 'internal_jwt') {
// Internal calls: verify credential owner belongs to the workflow's workspace
const ownerPerm = await getUserEntityPermissions(
credentialOwnerUserId,
'workspace',
wf.workspaceId
)
if (ownerPerm === null) {
return { ok: false, error: 'Unauthorized' }
}
return {
ok: true,
authType: auth.authType as CredentialAccessResult['authType'],
requesterUserId: auth.userId,
credentialOwnerUserId,
workspaceId: wf.workspaceId,
}
}
// Session: verify BOTH requester and owner belong to the workflow's workspace
const requesterPerm = await getUserEntityPermissions(auth.userId, 'workspace', wf.workspaceId)
const ownerPerm = await getUserEntityPermissions(
credentialOwnerUserId,
'workspace',
wf.workspaceId
)
if (requesterPerm === null || ownerPerm === null) {
return { ok: false, error: 'Unauthorized' }
}
@@ -208,7 +109,7 @@ export async function authorizeCredentialUse(
ok: true,
authType: auth.authType as CredentialAccessResult['authType'],
requesterUserId: auth.userId,
credentialOwnerUserId: legacyAccount.userId,
resolvedCredentialId: credentialId,
credentialOwnerUserId,
workspaceId: wf.workspaceId,
}
}

View File

@@ -20,6 +20,8 @@ export interface BuildPayloadParams {
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
commands?: string[]
chatId?: string
conversationId?: string
prefetch?: boolean
implicitFeedback?: string
}
@@ -64,6 +66,10 @@ export async function buildCopilotRequestPayload(
fileAttachments,
commands,
chatId,
conversationId,
prefetch,
conversationHistory,
implicitFeedback,
} = params
const selectedModel = options.selectedModel
@@ -154,6 +160,12 @@ export async function buildCopilotRequestPayload(
version: SIM_AGENT_VERSION,
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
...(chatId ? { chatId } : {}),
...(conversationId ? { conversationId } : {}),
...(Array.isArray(conversationHistory) && conversationHistory.length > 0
? { conversationHistory }
: {}),
...(typeof prefetch === 'boolean' ? { prefetch } : {}),
...(implicitFeedback ? { implicitFeedback } : {}),
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
...(integrationTools.length > 0 ? { integrationTools } : {}),
...(credentials ? { credentials } : {}),

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { workflow } from '@sim/db/schema'
import { customTools, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { and, desc, eq, isNull, or } from 'drizzle-orm'
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import type {
ExecutionContext,
@@ -12,6 +12,7 @@ import { routeExecution } from '@/lib/copilot/tools/server/router'
import { env } from '@/lib/core/config/env'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
import { getTool, resolveToolId } from '@/tools/utils'
import {
executeCheckDeploymentStatus,
@@ -76,6 +77,247 @@ import {
const logger = createLogger('CopilotToolExecutor')
type ManageCustomToolOperation = 'add' | 'edit' | 'delete' | 'list'
interface ManageCustomToolSchema {
type: 'function'
function: {
name: string
description?: string
parameters: Record<string, unknown>
}
}
interface ManageCustomToolParams {
operation?: string
toolId?: string
schema?: ManageCustomToolSchema
code?: string
title?: string
workspaceId?: string
}
async function executeManageCustomTool(
rawParams: Record<string, unknown>,
context: ExecutionContext
): Promise<ToolCallResult> {
const params = rawParams as ManageCustomToolParams
const operation = String(params.operation || '').toLowerCase() as ManageCustomToolOperation
const workspaceId = params.workspaceId || context.workspaceId
if (!operation) {
return { success: false, error: "Missing required 'operation' argument" }
}
try {
if (operation === 'list') {
const toolsForUser = workspaceId
? await db
.select()
.from(customTools)
.where(
or(
eq(customTools.workspaceId, workspaceId),
and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId))
)
)
.orderBy(desc(customTools.createdAt))
: await db
.select()
.from(customTools)
.where(and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId)))
.orderBy(desc(customTools.createdAt))
return {
success: true,
output: {
success: true,
operation,
tools: toolsForUser,
count: toolsForUser.length,
},
}
}
if (operation === 'add') {
if (!workspaceId) {
return {
success: false,
error: "workspaceId is required for operation 'add'",
}
}
if (!params.schema || !params.code) {
return {
success: false,
error: "Both 'schema' and 'code' are required for operation 'add'",
}
}
const title = params.title || params.schema.function?.name
if (!title) {
return { success: false, error: "Missing tool title or schema.function.name for 'add'" }
}
const resultTools = await upsertCustomTools({
tools: [
{
title,
schema: params.schema,
code: params.code,
},
],
workspaceId,
userId: context.userId,
})
const created = resultTools.find((tool) => tool.title === title)
return {
success: true,
output: {
success: true,
operation,
toolId: created?.id,
title,
message: `Created custom tool "${title}"`,
},
}
}
if (operation === 'edit') {
if (!workspaceId) {
return {
success: false,
error: "workspaceId is required for operation 'edit'",
}
}
if (!params.toolId) {
return { success: false, error: "'toolId' is required for operation 'edit'" }
}
if (!params.schema && !params.code) {
return {
success: false,
error: "At least one of 'schema' or 'code' is required for operation 'edit'",
}
}
const workspaceTool = await db
.select()
.from(customTools)
.where(and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId)))
.limit(1)
const legacyTool =
workspaceTool.length === 0
? await db
.select()
.from(customTools)
.where(
and(
eq(customTools.id, params.toolId),
isNull(customTools.workspaceId),
eq(customTools.userId, context.userId)
)
)
.limit(1)
: []
const existing = workspaceTool[0] || legacyTool[0]
if (!existing) {
return { success: false, error: `Custom tool not found: ${params.toolId}` }
}
const mergedSchema = params.schema || (existing.schema as ManageCustomToolSchema)
const mergedCode = params.code || existing.code
const title = params.title || mergedSchema.function?.name || existing.title
await upsertCustomTools({
tools: [
{
id: params.toolId,
title,
schema: mergedSchema,
code: mergedCode,
},
],
workspaceId,
userId: context.userId,
})
return {
success: true,
output: {
success: true,
operation,
toolId: params.toolId,
title,
message: `Updated custom tool "${title}"`,
},
}
}
if (operation === 'delete') {
if (!params.toolId) {
return { success: false, error: "'toolId' is required for operation 'delete'" }
}
const workspaceDelete =
workspaceId != null
? await db
.delete(customTools)
.where(
and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId))
)
.returning({ id: customTools.id })
: []
const legacyDelete =
workspaceDelete.length === 0
? await db
.delete(customTools)
.where(
and(
eq(customTools.id, params.toolId),
isNull(customTools.workspaceId),
eq(customTools.userId, context.userId)
)
)
.returning({ id: customTools.id })
: []
const deleted = workspaceDelete[0] || legacyDelete[0]
if (!deleted) {
return { success: false, error: `Custom tool not found: ${params.toolId}` }
}
return {
success: true,
output: {
success: true,
operation,
toolId: params.toolId,
message: 'Deleted custom tool',
},
}
}
return {
success: false,
error: `Unsupported operation for manage_custom_tool: ${operation}`,
}
} catch (error) {
logger.error('manage_custom_tool execution failed', {
operation,
workspaceId,
userId: context.userId,
error: error instanceof Error ? error.message : String(error),
})
return {
success: false,
error: error instanceof Error ? error.message : 'Failed to manage custom tool',
}
}
}
const SERVER_TOOLS = new Set<string>([
'get_blocks_and_tools',
'get_blocks_metadata',
@@ -161,6 +403,19 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
}
}
},
oauth_request_access: async (p, _c) => {
const providerName = (p.providerName || p.provider_name || 'the provider') as string
return {
success: true,
output: {
success: true,
status: 'requested',
providerName,
message: `Requested ${providerName} OAuth connection. The user should complete the OAuth modal in the UI, then retry credential-dependent actions.`,
},
}
},
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
}
/**

View File

@@ -1,62 +0,0 @@
import { db } from '@sim/db'
import { credential, credentialMember } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
type ActiveCredentialMember = typeof credentialMember.$inferSelect
type CredentialRecord = typeof credential.$inferSelect
export interface CredentialActorContext {
credential: CredentialRecord | null
member: ActiveCredentialMember | null
hasWorkspaceAccess: boolean
canWriteWorkspace: boolean
isAdmin: boolean
}
/**
* Resolves user access context for a credential.
*/
export async function getCredentialActorContext(
credentialId: string,
userId: string
): Promise<CredentialActorContext> {
const [credentialRow] = await db
.select()
.from(credential)
.where(eq(credential.id, credentialId))
.limit(1)
if (!credentialRow) {
return {
credential: null,
member: null,
hasWorkspaceAccess: false,
canWriteWorkspace: false,
isAdmin: false,
}
}
const workspaceAccess = await checkWorkspaceAccess(credentialRow.workspaceId, userId)
const [memberRow] = await db
.select()
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
eq(credentialMember.userId, userId),
eq(credentialMember.status, 'active')
)
)
.limit(1)
const isAdmin = memberRow?.role === 'admin'
return {
credential: credentialRow,
member: memberRow ?? null,
hasWorkspaceAccess: workspaceAccess.hasAccess,
canWriteWorkspace: workspaceAccess.canWrite,
isAdmin,
}
}

View File

@@ -1,77 +0,0 @@
'use client'
export const PENDING_OAUTH_CREDENTIAL_DRAFT_KEY = 'sim.pending-oauth-credential-draft'
export const PENDING_CREDENTIAL_CREATE_REQUEST_KEY = 'sim.pending-credential-create-request'
export interface PendingOAuthCredentialDraft {
workspaceId: string
providerId: string
displayName: string
existingCredentialIds: string[]
existingAccountIds: string[]
requestedAt: number
}
interface PendingOAuthCredentialCreateRequest {
workspaceId: string
type: 'oauth'
providerId: string
displayName: string
serviceId: string
requiredScopes: string[]
requestedAt: number
}
interface PendingSecretCredentialCreateRequest {
workspaceId: string
type: 'env_personal' | 'env_workspace'
envKey?: string
requestedAt: number
}
export type PendingCredentialCreateRequest =
| PendingOAuthCredentialCreateRequest
| PendingSecretCredentialCreateRequest
function parseJson<T>(raw: string | null): T | null {
if (!raw) return null
try {
return JSON.parse(raw) as T
} catch {
return null
}
}
export function readPendingOAuthCredentialDraft(): PendingOAuthCredentialDraft | null {
if (typeof window === 'undefined') return null
return parseJson<PendingOAuthCredentialDraft>(
window.sessionStorage.getItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY)
)
}
export function writePendingOAuthCredentialDraft(payload: PendingOAuthCredentialDraft) {
if (typeof window === 'undefined') return
window.sessionStorage.setItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY, JSON.stringify(payload))
}
export function clearPendingOAuthCredentialDraft() {
if (typeof window === 'undefined') return
window.sessionStorage.removeItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY)
}
export function readPendingCredentialCreateRequest(): PendingCredentialCreateRequest | null {
if (typeof window === 'undefined') return null
return parseJson<PendingCredentialCreateRequest>(
window.sessionStorage.getItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY)
)
}
export function writePendingCredentialCreateRequest(payload: PendingCredentialCreateRequest) {
if (typeof window === 'undefined') return
window.sessionStorage.setItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY, JSON.stringify(payload))
}
export function clearPendingCredentialCreateRequest() {
if (typeof window === 'undefined') return
window.sessionStorage.removeItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY)
}

View File

@@ -1,356 +0,0 @@
import { db } from '@sim/db'
import { credential, credentialMember, permissions, workspace } from '@sim/db/schema'
import { and, eq, inArray, notInArray } from 'drizzle-orm'
interface AccessibleEnvCredential {
type: 'env_workspace' | 'env_personal'
envKey: string
envOwnerUserId: string | null
updatedAt: Date
}
function getPostgresErrorCode(error: unknown): string | undefined {
if (!error || typeof error !== 'object') return undefined
const err = error as { code?: string; cause?: { code?: string } }
return err.code || err.cause?.code
}
export async function getWorkspaceMemberUserIds(workspaceId: string): Promise<string[]> {
const [workspaceRows, permissionRows] = await Promise.all([
db
.select({ ownerId: workspace.ownerId })
.from(workspace)
.where(eq(workspace.id, workspaceId))
.limit(1),
db
.select({ userId: permissions.userId })
.from(permissions)
.where(and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspaceId))),
])
const workspaceRow = workspaceRows[0]
const memberIds = new Set<string>(permissionRows.map((row) => row.userId))
if (workspaceRow?.ownerId) {
memberIds.add(workspaceRow.ownerId)
}
return Array.from(memberIds)
}
export async function getUserWorkspaceIds(userId: string): Promise<string[]> {
const [permissionRows, ownedWorkspaceRows] = await Promise.all([
db
.select({ workspaceId: workspace.id })
.from(permissions)
.innerJoin(
workspace,
and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspace.id))
)
.where(eq(permissions.userId, userId)),
db.select({ workspaceId: workspace.id }).from(workspace).where(eq(workspace.ownerId, userId)),
])
const workspaceIds = new Set<string>(permissionRows.map((row) => row.workspaceId))
for (const row of ownedWorkspaceRows) {
workspaceIds.add(row.workspaceId)
}
return Array.from(workspaceIds)
}
async function upsertCredentialAdminMember(credentialId: string, adminUserId: string) {
const now = new Date()
const [existingMembership] = await db
.select({ id: credentialMember.id, joinedAt: credentialMember.joinedAt })
.from(credentialMember)
.where(
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, adminUserId))
)
.limit(1)
if (existingMembership) {
await db
.update(credentialMember)
.set({
role: 'admin',
status: 'active',
joinedAt: existingMembership.joinedAt ?? now,
invitedBy: adminUserId,
updatedAt: now,
})
.where(eq(credentialMember.id, existingMembership.id))
return
}
await db.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId: adminUserId,
role: 'admin',
status: 'active',
joinedAt: now,
invitedBy: adminUserId,
createdAt: now,
updatedAt: now,
})
}
async function ensureWorkspaceCredentialMemberships(
credentialId: string,
workspaceId: string,
ownerUserId: string
) {
const workspaceMemberUserIds = await getWorkspaceMemberUserIds(workspaceId)
if (!workspaceMemberUserIds.length) return
const existingMemberships = await db
.select({
id: credentialMember.id,
userId: credentialMember.userId,
joinedAt: credentialMember.joinedAt,
})
.from(credentialMember)
.where(
and(
eq(credentialMember.credentialId, credentialId),
inArray(credentialMember.userId, workspaceMemberUserIds)
)
)
const byUserId = new Map(existingMemberships.map((row) => [row.userId, row]))
const now = new Date()
for (const memberUserId of workspaceMemberUserIds) {
const targetRole = memberUserId === ownerUserId ? 'admin' : 'member'
const existing = byUserId.get(memberUserId)
if (existing) {
await db
.update(credentialMember)
.set({
role: targetRole,
status: 'active',
joinedAt: existing.joinedAt ?? now,
invitedBy: ownerUserId,
updatedAt: now,
})
.where(eq(credentialMember.id, existing.id))
continue
}
await db.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId: memberUserId,
role: targetRole,
status: 'active',
joinedAt: now,
invitedBy: ownerUserId,
createdAt: now,
updatedAt: now,
})
}
}
export async function syncWorkspaceEnvCredentials(params: {
workspaceId: string
envKeys: string[]
actingUserId: string
}) {
const { workspaceId, envKeys, actingUserId } = params
const [workspaceRow] = await db
.select({ ownerId: workspace.ownerId })
.from(workspace)
.where(eq(workspace.id, workspaceId))
.limit(1)
if (!workspaceRow) return
const normalizedKeys = Array.from(new Set(envKeys.filter(Boolean)))
const existingCredentials = await db
.select({
id: credential.id,
envKey: credential.envKey,
})
.from(credential)
.where(and(eq(credential.workspaceId, workspaceId), eq(credential.type, 'env_workspace')))
const existingByKey = new Map(
existingCredentials
.filter((row): row is { id: string; envKey: string } => Boolean(row.envKey))
.map((row) => [row.envKey, row.id])
)
const credentialIdsToEnsureMembership = new Set<string>()
const now = new Date()
for (const envKey of normalizedKeys) {
const existingId = existingByKey.get(envKey)
if (existingId) {
credentialIdsToEnsureMembership.add(existingId)
continue
}
const createdId = crypto.randomUUID()
try {
await db.insert(credential).values({
id: createdId,
workspaceId,
type: 'env_workspace',
displayName: envKey,
envKey,
createdBy: actingUserId,
createdAt: now,
updatedAt: now,
})
credentialIdsToEnsureMembership.add(createdId)
} catch (error: unknown) {
const code = getPostgresErrorCode(error)
if (code !== '23505') throw error
}
}
for (const credentialId of credentialIdsToEnsureMembership) {
await ensureWorkspaceCredentialMemberships(credentialId, workspaceId, workspaceRow.ownerId)
}
if (normalizedKeys.length > 0) {
await db
.delete(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_workspace'),
notInArray(credential.envKey, normalizedKeys)
)
)
return
}
await db
.delete(credential)
.where(and(eq(credential.workspaceId, workspaceId), eq(credential.type, 'env_workspace')))
}
export async function syncPersonalEnvCredentialsForUser(params: {
userId: string
envKeys: string[]
}) {
const { userId, envKeys } = params
const workspaceIds = await getUserWorkspaceIds(userId)
if (!workspaceIds.length) return
const normalizedKeys = Array.from(new Set(envKeys.filter(Boolean)))
const now = new Date()
for (const workspaceId of workspaceIds) {
const existingCredentials = await db
.select({
id: credential.id,
envKey: credential.envKey,
})
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_personal'),
eq(credential.envOwnerUserId, userId)
)
)
const existingByKey = new Map(
existingCredentials
.filter((row): row is { id: string; envKey: string } => Boolean(row.envKey))
.map((row) => [row.envKey, row.id])
)
for (const envKey of normalizedKeys) {
const existingId = existingByKey.get(envKey)
if (existingId) {
await upsertCredentialAdminMember(existingId, userId)
continue
}
const createdId = crypto.randomUUID()
try {
await db.insert(credential).values({
id: createdId,
workspaceId,
type: 'env_personal',
displayName: envKey,
envKey,
envOwnerUserId: userId,
createdBy: userId,
createdAt: now,
updatedAt: now,
})
await upsertCredentialAdminMember(createdId, userId)
} catch (error: unknown) {
const code = getPostgresErrorCode(error)
if (code !== '23505') throw error
}
}
if (normalizedKeys.length > 0) {
await db
.delete(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_personal'),
eq(credential.envOwnerUserId, userId),
notInArray(credential.envKey, normalizedKeys)
)
)
continue
}
await db
.delete(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'env_personal'),
eq(credential.envOwnerUserId, userId)
)
)
}
}
export async function getAccessibleEnvCredentials(
workspaceId: string,
userId: string
): Promise<AccessibleEnvCredential[]> {
const rows = await db
.select({
type: credential.type,
envKey: credential.envKey,
envOwnerUserId: credential.envOwnerUserId,
updatedAt: credential.updatedAt,
})
.from(credential)
.innerJoin(
credentialMember,
and(
eq(credentialMember.credentialId, credential.id),
eq(credentialMember.userId, userId),
eq(credentialMember.status, 'active')
)
)
.where(
and(
eq(credential.workspaceId, workspaceId),
inArray(credential.type, ['env_workspace', 'env_personal'])
)
)
return rows
.filter(
(row): row is AccessibleEnvCredential =>
(row.type === 'env_workspace' || row.type === 'env_personal') && Boolean(row.envKey)
)
.map((row) => ({
type: row.type,
envKey: row.envKey!,
envOwnerUserId: row.envOwnerUserId,
updatedAt: row.updatedAt,
}))
}

View File

@@ -1,195 +0,0 @@
import { db } from '@sim/db'
import { account, credential, credentialMember } from '@sim/db/schema'
import { and, eq, inArray } from 'drizzle-orm'
import { getServiceConfigByProviderId } from '@/lib/oauth'
interface SyncWorkspaceOAuthCredentialsForUserParams {
workspaceId: string
userId: string
}
interface SyncWorkspaceOAuthCredentialsForUserResult {
createdCredentials: number
updatedMemberships: number
}
function getPostgresErrorCode(error: unknown): string | undefined {
if (!error || typeof error !== 'object') return undefined
const err = error as { code?: string; cause?: { code?: string } }
return err.code || err.cause?.code
}
/**
* Ensures connected OAuth accounts for a user exist as workspace-scoped credentials.
*/
export async function syncWorkspaceOAuthCredentialsForUser(
params: SyncWorkspaceOAuthCredentialsForUserParams
): Promise<SyncWorkspaceOAuthCredentialsForUserResult> {
const { workspaceId, userId } = params
const userAccounts = await db
.select({
id: account.id,
providerId: account.providerId,
accountId: account.accountId,
})
.from(account)
.where(eq(account.userId, userId))
if (userAccounts.length === 0) {
return { createdCredentials: 0, updatedMemberships: 0 }
}
const accountIds = userAccounts.map((row) => row.id)
const existingCredentials = await db
.select({
id: credential.id,
displayName: credential.displayName,
providerId: credential.providerId,
accountId: credential.accountId,
})
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'oauth'),
inArray(credential.accountId, accountIds)
)
)
const now = new Date()
const userAccountById = new Map(userAccounts.map((row) => [row.id, row]))
for (const existingCredential of existingCredentials) {
if (!existingCredential.accountId) continue
const linkedAccount = userAccountById.get(existingCredential.accountId)
if (!linkedAccount) continue
const normalizedLabel =
getServiceConfigByProviderId(linkedAccount.providerId)?.name || linkedAccount.providerId
const shouldNormalizeDisplayName =
existingCredential.displayName === linkedAccount.accountId ||
existingCredential.displayName === linkedAccount.providerId
if (!shouldNormalizeDisplayName || existingCredential.displayName === normalizedLabel) {
continue
}
await db
.update(credential)
.set({
displayName: normalizedLabel,
updatedAt: now,
})
.where(eq(credential.id, existingCredential.id))
}
const existingByAccountId = new Map(
existingCredentials
.filter((row) => Boolean(row.accountId))
.map((row) => [row.accountId!, row.id])
)
let createdCredentials = 0
for (const acc of userAccounts) {
if (existingByAccountId.has(acc.id)) {
continue
}
try {
await db.insert(credential).values({
id: crypto.randomUUID(),
workspaceId,
type: 'oauth',
displayName: getServiceConfigByProviderId(acc.providerId)?.name || acc.providerId,
providerId: acc.providerId,
accountId: acc.id,
createdBy: userId,
createdAt: now,
updatedAt: now,
})
createdCredentials += 1
} catch (error) {
if (getPostgresErrorCode(error) !== '23505') {
throw error
}
}
}
const credentialRows = await db
.select({ id: credential.id, accountId: credential.accountId })
.from(credential)
.where(
and(
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'oauth'),
inArray(credential.accountId, accountIds)
)
)
const credentialIdByAccountId = new Map(
credentialRows.filter((row) => Boolean(row.accountId)).map((row) => [row.accountId!, row.id])
)
const allCredentialIds = Array.from(credentialIdByAccountId.values())
if (allCredentialIds.length === 0) {
return { createdCredentials, updatedMemberships: 0 }
}
const existingMemberships = await db
.select({
id: credentialMember.id,
credentialId: credentialMember.credentialId,
joinedAt: credentialMember.joinedAt,
})
.from(credentialMember)
.where(
and(
inArray(credentialMember.credentialId, allCredentialIds),
eq(credentialMember.userId, userId)
)
)
const membershipByCredentialId = new Map(
existingMemberships.map((row) => [row.credentialId, row])
)
let updatedMemberships = 0
for (const credentialId of allCredentialIds) {
const existingMembership = membershipByCredentialId.get(credentialId)
if (existingMembership) {
await db
.update(credentialMember)
.set({
role: 'admin',
status: 'active',
joinedAt: existingMembership.joinedAt ?? now,
invitedBy: userId,
updatedAt: now,
})
.where(eq(credentialMember.id, existingMembership.id))
updatedMemberships += 1
continue
}
try {
await db.insert(credentialMember).values({
id: crypto.randomUUID(),
credentialId,
userId,
role: 'admin',
status: 'active',
joinedAt: now,
invitedBy: userId,
createdAt: now,
updatedAt: now,
})
updatedMemberships += 1
} catch (error) {
if (getPostgresErrorCode(error) !== '23505') {
throw error
}
}
}
return { createdCredentials, updatedMemberships }
}

View File

@@ -1,9 +1,8 @@
import { db } from '@sim/db'
import { environment, workspaceEnvironment } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq, inArray } from 'drizzle-orm'
import { eq } from 'drizzle-orm'
import { decryptSecret } from '@/lib/core/security/encryption'
import { getAccessibleEnvCredentials } from '@/lib/credentials/environment'
const logger = createLogger('EnvironmentUtils')
@@ -54,7 +53,7 @@ export async function getPersonalAndWorkspaceEnv(
conflicts: string[]
decryptionFailures: string[]
}> {
const [personalRows, workspaceRows, accessibleEnvCredentials] = await Promise.all([
const [personalRows, workspaceRows] = await Promise.all([
db.select().from(environment).where(eq(environment.userId, userId)).limit(1),
workspaceId
? db
@@ -63,69 +62,10 @@ export async function getPersonalAndWorkspaceEnv(
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
.limit(1)
: Promise.resolve([] as any[]),
workspaceId ? getAccessibleEnvCredentials(workspaceId, userId) : Promise.resolve([]),
])
const ownPersonalEncrypted: Record<string, string> = (personalRows[0]?.variables as any) || {}
const allWorkspaceEncrypted: Record<string, string> = (workspaceRows[0]?.variables as any) || {}
const hasCredentialFiltering = Boolean(workspaceId) && accessibleEnvCredentials.length > 0
const workspaceCredentialKeys = new Set(
accessibleEnvCredentials.filter((row) => row.type === 'env_workspace').map((row) => row.envKey)
)
const personalCredentialRows = accessibleEnvCredentials
.filter((row) => row.type === 'env_personal' && row.envOwnerUserId)
.sort((a, b) => {
const aIsRequester = a.envOwnerUserId === userId
const bIsRequester = b.envOwnerUserId === userId
if (aIsRequester && !bIsRequester) return -1
if (!aIsRequester && bIsRequester) return 1
return b.updatedAt.getTime() - a.updatedAt.getTime()
})
const selectedPersonalOwners = new Map<string, string>()
for (const row of personalCredentialRows) {
if (!selectedPersonalOwners.has(row.envKey) && row.envOwnerUserId) {
selectedPersonalOwners.set(row.envKey, row.envOwnerUserId)
}
}
const ownerUserIds = Array.from(new Set(selectedPersonalOwners.values()))
const ownerEnvironmentRows =
ownerUserIds.length > 0
? await db
.select({
userId: environment.userId,
variables: environment.variables,
})
.from(environment)
.where(inArray(environment.userId, ownerUserIds))
: []
const ownerVariablesByUserId = new Map<string, Record<string, string>>(
ownerEnvironmentRows.map((row) => [row.userId, (row.variables as Record<string, string>) || {}])
)
let personalEncrypted: Record<string, string> = ownPersonalEncrypted
let workspaceEncrypted: Record<string, string> = allWorkspaceEncrypted
if (hasCredentialFiltering) {
personalEncrypted = {}
for (const [envKey, ownerUserId] of selectedPersonalOwners.entries()) {
const ownerVariables = ownerVariablesByUserId.get(ownerUserId)
const encryptedValue = ownerVariables?.[envKey]
if (encryptedValue) {
personalEncrypted[envKey] = encryptedValue
}
}
workspaceEncrypted = Object.fromEntries(
Object.entries(allWorkspaceEncrypted).filter(([envKey]) =>
workspaceCredentialKeys.has(envKey)
)
)
}
const personalEncrypted: Record<string, string> = (personalRows[0]?.variables as any) || {}
const workspaceEncrypted: Record<string, string> = (workspaceRows[0]?.variables as any) || {}
const decryptionFailures: string[] = []

View File

@@ -0,0 +1,246 @@
import { createLogger } from '@sim/logger'
import { getRedisClient } from '@/lib/core/config/redis'
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
const logger = createLogger('ExecutionEventBuffer')
const REDIS_PREFIX = 'execution:stream:'
const TTL_SECONDS = 60 * 60 // 1 hour
const EVENT_LIMIT = 1000
const RESERVE_BATCH = 100
const FLUSH_INTERVAL_MS = 15
const FLUSH_MAX_BATCH = 200
function getEventsKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:events`
}
function getSeqKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:seq`
}
function getMetaKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:meta`
}
export type ExecutionStreamStatus = 'active' | 'complete' | 'error' | 'cancelled'
export interface ExecutionStreamMeta {
status: ExecutionStreamStatus
userId?: string
workflowId?: string
updatedAt?: string
}
export interface ExecutionEventEntry {
eventId: number
executionId: string
event: ExecutionEvent
}
export interface ExecutionEventWriter {
write: (event: ExecutionEvent) => Promise<ExecutionEventEntry>
flush: () => Promise<void>
close: () => Promise<void>
}
export async function setExecutionMeta(
executionId: string,
meta: Partial<ExecutionStreamMeta>
): Promise<void> {
const redis = getRedisClient()
if (!redis) {
logger.warn('setExecutionMeta: Redis client unavailable', { executionId })
return
}
try {
const key = getMetaKey(executionId)
const payload: Record<string, string> = {
updatedAt: new Date().toISOString(),
}
if (meta.status) payload.status = meta.status
if (meta.userId) payload.userId = meta.userId
if (meta.workflowId) payload.workflowId = meta.workflowId
await redis.hset(key, payload)
await redis.expire(key, TTL_SECONDS)
} catch (error) {
logger.warn('Failed to update execution meta', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
}
}
export async function getExecutionMeta(executionId: string): Promise<ExecutionStreamMeta | null> {
const redis = getRedisClient()
if (!redis) {
logger.warn('getExecutionMeta: Redis client unavailable', { executionId })
return null
}
try {
const key = getMetaKey(executionId)
const meta = await redis.hgetall(key)
if (!meta || Object.keys(meta).length === 0) return null
return meta as unknown as ExecutionStreamMeta
} catch (error) {
logger.warn('Failed to read execution meta', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
return null
}
}
export async function readExecutionEvents(
executionId: string,
afterEventId: number
): Promise<ExecutionEventEntry[]> {
const redis = getRedisClient()
if (!redis) return []
try {
const raw = await redis.zrangebyscore(getEventsKey(executionId), afterEventId + 1, '+inf')
return raw
.map((entry) => {
try {
return JSON.parse(entry) as ExecutionEventEntry
} catch {
return null
}
})
.filter((entry): entry is ExecutionEventEntry => Boolean(entry))
} catch (error) {
logger.warn('Failed to read execution events', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
return []
}
}
export function createExecutionEventWriter(executionId: string): ExecutionEventWriter {
const redis = getRedisClient()
if (!redis) {
logger.warn(
'createExecutionEventWriter: Redis client unavailable, events will not be buffered',
{
executionId,
}
)
return {
write: async (event) => ({ eventId: 0, executionId, event }),
flush: async () => {},
close: async () => {},
}
}
let pending: ExecutionEventEntry[] = []
let nextEventId = 0
let maxReservedId = 0
let flushTimer: ReturnType<typeof setTimeout> | null = null
const scheduleFlush = () => {
if (flushTimer) return
flushTimer = setTimeout(() => {
flushTimer = null
void flush()
}, FLUSH_INTERVAL_MS)
}
const reserveIds = async (minCount: number) => {
const reserveCount = Math.max(RESERVE_BATCH, minCount)
const newMax = await redis.incrby(getSeqKey(executionId), reserveCount)
const startId = newMax - reserveCount + 1
if (nextEventId === 0 || nextEventId > maxReservedId) {
nextEventId = startId
maxReservedId = newMax
}
}
let flushPromise: Promise<void> | null = null
let closed = false
const inflightWrites = new Set<Promise<ExecutionEventEntry>>()
const doFlush = async () => {
if (pending.length === 0) return
const batch = pending
pending = []
try {
const key = getEventsKey(executionId)
const zaddArgs: (string | number)[] = []
for (const entry of batch) {
zaddArgs.push(entry.eventId, JSON.stringify(entry))
}
const pipeline = redis.pipeline()
pipeline.zadd(key, ...zaddArgs)
pipeline.expire(key, TTL_SECONDS)
pipeline.expire(getSeqKey(executionId), TTL_SECONDS)
pipeline.zremrangebyrank(key, 0, -EVENT_LIMIT - 1)
await pipeline.exec()
} catch (error) {
logger.warn('Failed to flush execution events', {
executionId,
batchSize: batch.length,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
})
pending = batch.concat(pending)
}
}
const flush = async () => {
if (flushPromise) {
await flushPromise
return
}
flushPromise = doFlush()
try {
await flushPromise
} finally {
flushPromise = null
if (pending.length > 0) scheduleFlush()
}
}
const writeCore = async (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
if (closed) return { eventId: 0, executionId, event }
if (nextEventId === 0 || nextEventId > maxReservedId) {
await reserveIds(1)
}
const eventId = nextEventId++
const entry: ExecutionEventEntry = { eventId, executionId, event }
pending.push(entry)
if (pending.length >= FLUSH_MAX_BATCH) {
await flush()
} else {
scheduleFlush()
}
return entry
}
const write = (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
const p = writeCore(event)
inflightWrites.add(p)
const remove = () => inflightWrites.delete(p)
p.then(remove, remove)
return p
}
const close = async () => {
closed = true
if (flushTimer) {
clearTimeout(flushTimer)
flushTimer = null
}
if (inflightWrites.size > 0) {
await Promise.allSettled(inflightWrites)
}
if (flushPromise) {
await flushPromise
}
if (pending.length > 0) {
await doFlush()
}
}
return { write, flush, close }
}

View File

@@ -2364,6 +2364,261 @@ describe('hasWorkflowChanged', () => {
})
})
describe('Trigger Config Normalization (False Positive Prevention)', () => {
it.concurrent(
'should not detect change when deployed has null fields but current has values from triggerConfig',
() => {
// Core scenario: deployed state has null individual fields, current state has
// values populated from triggerConfig at runtime by populateTriggerFieldsFromConfig
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent(
'should detect change when user edits a trigger field to a different value',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'new-secret' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
}
)
it.concurrent('should not detect change when both sides have no triggerConfig', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent(
'should not detect change when deployed has empty fields and triggerConfig populates them',
() => {
// Empty string is also treated as "empty" by normalizeTriggerConfigValues
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent('should not detect change when triggerId differs', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
triggerId: { value: null },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
triggerId: { value: 'slack_webhook' },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent(
'should not detect change for namespaced system subBlock IDs like samplePayload_slack_webhook',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
samplePayload_slack_webhook: { value: 'old payload' },
triggerInstructions_slack_webhook: { value: 'old instructions' },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
samplePayload_slack_webhook: { value: 'new payload' },
triggerInstructions_slack_webhook: { value: 'new instructions' },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent(
'should handle mixed scenario: some fields from triggerConfig, some user-edited',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
includeFiles: { id: 'includeFiles', type: 'switch', value: false },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
includeFiles: { id: 'includeFiles', type: 'switch', value: true },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
// includeFiles changed from false to true — this IS a real change
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
}
)
})
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
it.concurrent('should not detect change when webhookId differs', () => {
const deployedState = createWorkflowState({

View File

@@ -9,6 +9,7 @@ import {
normalizeLoop,
normalizeParallel,
normalizeSubBlockValue,
normalizeTriggerConfigValues,
normalizeValue,
normalizeVariables,
sanitizeVariable,
@@ -172,14 +173,18 @@ export function generateWorkflowDiffSummary(
}
}
// Normalize trigger config values for both states before comparison
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
// Compare subBlocks using shared helper for filtering (single source of truth)
const allSubBlockIds = filterSubBlockIds([
...new Set([...Object.keys(currentSubBlocks), ...Object.keys(previousSubBlocks)]),
...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]),
])
for (const subId of allSubBlockIds) {
const currentSub = currentSubBlocks[subId] as Record<string, unknown> | undefined
const previousSub = previousSubBlocks[subId] as Record<string, unknown> | undefined
const currentSub = normalizedCurrentSubs[subId] as Record<string, unknown> | undefined
const previousSub = normalizedPreviousSubs[subId] as Record<string, unknown> | undefined
if (!currentSub || !previousSub) {
changes.push({

View File

@@ -4,10 +4,12 @@
import { describe, expect, it } from 'vitest'
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
import {
filterSubBlockIds,
normalizedStringify,
normalizeEdge,
normalizeLoop,
normalizeParallel,
normalizeTriggerConfigValues,
normalizeValue,
sanitizeInputFormat,
sanitizeTools,
@@ -584,4 +586,226 @@ describe('Workflow Normalization Utilities', () => {
expect(result2).toBe(result3)
})
})
describe('filterSubBlockIds', () => {
it.concurrent('should exclude exact SYSTEM_SUBBLOCK_IDS', () => {
const ids = ['signingSecret', 'samplePayload', 'triggerInstructions', 'botToken']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['botToken', 'signingSecret'])
})
it.concurrent('should exclude namespaced SYSTEM_SUBBLOCK_IDS (prefix matching)', () => {
const ids = [
'signingSecret',
'samplePayload_slack_webhook',
'triggerInstructions_slack_webhook',
'webhookUrlDisplay_slack_webhook',
'botToken',
]
const result = filterSubBlockIds(ids)
expect(result).toEqual(['botToken', 'signingSecret'])
})
it.concurrent('should exclude exact TRIGGER_RUNTIME_SUBBLOCK_IDS', () => {
const ids = ['webhookId', 'triggerPath', 'triggerConfig', 'triggerId', 'signingSecret']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['signingSecret'])
})
it.concurrent('should not exclude IDs that merely contain a system ID substring', () => {
const ids = ['mySamplePayload', 'notSamplePayload']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['mySamplePayload', 'notSamplePayload'])
})
it.concurrent('should return sorted results', () => {
const ids = ['zebra', 'alpha', 'middle']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['alpha', 'middle', 'zebra'])
})
it.concurrent('should handle empty array', () => {
expect(filterSubBlockIds([])).toEqual([])
})
it.concurrent('should handle all IDs being excluded', () => {
const ids = ['webhookId', 'triggerPath', 'samplePayload', 'triggerConfig']
const result = filterSubBlockIds(ids)
expect(result).toEqual([])
})
it.concurrent('should exclude setupScript and scheduleInfo namespaced variants', () => {
const ids = ['setupScript_google_sheets_row', 'scheduleInfo_cron_trigger', 'realField']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['realField'])
})
it.concurrent('should exclude triggerCredentials namespaced variants', () => {
const ids = ['triggerCredentials_slack_webhook', 'signingSecret']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['signingSecret'])
})
it.concurrent('should exclude synthetic tool-input subBlock IDs', () => {
const ids = [
'toolConfig',
'toolConfig-tool-0-query',
'toolConfig-tool-0-url',
'toolConfig-tool-1-status',
'systemPrompt',
]
const result = filterSubBlockIds(ids)
expect(result).toEqual(['systemPrompt', 'toolConfig'])
})
})
describe('normalizeTriggerConfigValues', () => {
it.concurrent('should return subBlocks unchanged when no triggerConfig exists', () => {
const subBlocks = {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
})
it.concurrent('should return subBlocks unchanged when triggerConfig value is null', () => {
const subBlocks = {
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: null },
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
})
it.concurrent(
'should return subBlocks unchanged when triggerConfig value is not an object',
() => {
const subBlocks = {
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: 'string-value' },
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
}
)
it.concurrent('should populate null individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
expect((result.botToken as Record<string, unknown>).value).toBe('token456')
})
it.concurrent('should populate undefined individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: undefined },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
})
it.concurrent('should populate empty string individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
})
it.concurrent('should NOT overwrite existing non-empty individual field values', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'user-edited-secret' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('user-edited-secret')
})
it.concurrent('should skip triggerConfig fields that are null/undefined', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: null, botToken: undefined },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
expect((result.botToken as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should skip fields from triggerConfig that have no matching subBlock', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { nonExistentField: 'value123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result.nonExistentField).toBeUndefined()
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should not mutate the original subBlocks object', () => {
const original = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
normalizeTriggerConfigValues(original)
expect((original.signingSecret as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should preserve other subBlock properties when populating value', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: {
id: 'signingSecret',
type: 'short-input',
value: null,
placeholder: 'Enter signing secret',
},
}
const result = normalizeTriggerConfigValues(subBlocks)
const normalized = result.signingSecret as Record<string, unknown>
expect(normalized.value).toBe('secret123')
expect(normalized.id).toBe('signingSecret')
expect(normalized.type).toBe('short-input')
expect(normalized.placeholder).toBe('Enter signing secret')
})
})
})

View File

@@ -411,17 +411,63 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
}
/**
* Filters subBlock IDs to exclude system and trigger runtime subBlocks.
* Pattern matching synthetic subBlock IDs created by ToolSubBlockRenderer.
* These IDs follow the format `{subBlockId}-tool-{index}-{paramId}` and are
* mirrors of values already stored in toolConfig.value.tools[N].params.
*/
const SYNTHETIC_TOOL_SUBBLOCK_RE = /-tool-\d+-/
/**
* Filters subBlock IDs to exclude system, trigger runtime, and synthetic tool subBlocks.
*
* @param subBlockIds - Array of subBlock IDs to filter
* @returns Filtered and sorted array of subBlock IDs
*/
export function filterSubBlockIds(subBlockIds: string[]): string[] {
return subBlockIds
.filter((id) => !SYSTEM_SUBBLOCK_IDS.includes(id) && !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id))
.filter((id) => {
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
return false
if (SYNTHETIC_TOOL_SUBBLOCK_RE.test(id)) return false
return true
})
.sort()
}
/**
* Normalizes trigger block subBlocks by populating null/empty individual fields
* from the triggerConfig aggregate subBlock. This compensates for the runtime
* population done by populateTriggerFieldsFromConfig, ensuring consistent
* comparison between client state (with populated values) and deployed state
* (with null values from DB).
*/
export function normalizeTriggerConfigValues(
subBlocks: Record<string, unknown>
): Record<string, unknown> {
const triggerConfigSub = subBlocks.triggerConfig as Record<string, unknown> | undefined
const triggerConfigValue = triggerConfigSub?.value
if (!triggerConfigValue || typeof triggerConfigValue !== 'object') {
return subBlocks
}
const result = { ...subBlocks }
for (const [fieldId, configValue] of Object.entries(
triggerConfigValue as Record<string, unknown>
)) {
if (configValue === null || configValue === undefined) continue
const existingSub = result[fieldId] as Record<string, unknown> | undefined
if (
existingSub &&
(existingSub.value === null || existingSub.value === undefined || existingSub.value === '')
) {
result[fieldId] = { ...existingSub, value: configValue }
}
}
return result
}
/**
* Normalizes a subBlock value with sanitization for specific subBlock types.
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)

View File

@@ -129,6 +129,18 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
})
},
setCurrentExecutionId: (workflowId, executionId) => {
set({
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
currentExecutionId: executionId,
}),
})
},
getCurrentExecutionId: (workflowId) => {
return getOrCreate(get().workflowExecutions, workflowId).currentExecutionId
},
clearRunPath: (workflowId) => {
set({
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {

View File

@@ -35,6 +35,8 @@ export interface WorkflowExecutionState {
lastRunPath: Map<string, BlockRunStatus>
/** Maps edge IDs to their run result from the last execution */
lastRunEdges: Map<string, EdgeRunStatus>
/** The execution ID of the currently running execution */
currentExecutionId: string | null
}
/**
@@ -54,6 +56,7 @@ export const defaultWorkflowExecutionState: WorkflowExecutionState = {
debugContext: null,
lastRunPath: new Map(),
lastRunEdges: new Map(),
currentExecutionId: null,
}
/**
@@ -96,6 +99,10 @@ export interface ExecutionActions {
setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void
/** Clears the run path and run edges for a workflow */
clearRunPath: (workflowId: string) => void
/** Stores the current execution ID for a workflow */
setCurrentExecutionId: (workflowId: string, executionId: string | null) => void
/** Returns the current execution ID for a workflow */
getCurrentExecutionId: (workflowId: string) => string | null
/** Resets the entire store to its initial empty state */
reset: () => void
/** Stores a serializable execution snapshot for a workflow */

View File

@@ -1,6 +1,5 @@
export type SettingsSection =
| 'general'
| 'credentials'
| 'environment'
| 'template-profile'
| 'integrations'

View File

@@ -310,6 +310,50 @@ function parseModelKey(compositeKey: string): { provider: string; modelId: strin
return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) }
}
/**
* Convert legacy/variant Claude IDs into the canonical ID shape used by the model catalog.
*
* Examples:
* - claude-4.5-opus -> claude-opus-4-5
* - claude-opus-4.6 -> claude-opus-4-6
* - anthropic.claude-opus-4-5-20251101-v1:0 -> claude-opus-4-5 (match key only)
*/
function canonicalizeModelMatchKey(modelId: string): string {
if (!modelId) return modelId
const normalized = modelId.trim().toLowerCase()
const toCanonicalClaude = (tier: string, version: string): string => {
const normalizedVersion = version.replace(/\./g, '-')
return `claude-${tier}-${normalizedVersion}`
}
const tierFirstExact = normalized.match(/^claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)$/)
if (tierFirstExact) {
const [, tier, version] = tierFirstExact
return toCanonicalClaude(tier, version)
}
const versionFirstExact = normalized.match(/^claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)$/)
if (versionFirstExact) {
const [, version, tier] = versionFirstExact
return toCanonicalClaude(tier, version)
}
const tierFirstEmbedded = normalized.match(/claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)/)
if (tierFirstEmbedded) {
const [, tier, version] = tierFirstEmbedded
return toCanonicalClaude(tier, version)
}
const versionFirstEmbedded = normalized.match(/claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)/)
if (versionFirstEmbedded) {
const [, version, tier] = versionFirstEmbedded
return toCanonicalClaude(tier, version)
}
return normalized
}
const MODEL_PROVIDER_PRIORITY = [
'anthropic',
'bedrock',
@@ -350,12 +394,23 @@ function normalizeSelectedModelKey(selectedModel: string, models: AvailableModel
const { provider, modelId } = parseModelKey(selectedModel)
const targetModelId = modelId || selectedModel
const targetMatchKey = canonicalizeModelMatchKey(targetModelId)
const matches = models.filter((m) => m.id.endsWith(`/${targetModelId}`))
const matches = models.filter((m) => {
const candidateModelId = parseModelKey(m.id).modelId || m.id
const candidateMatchKey = canonicalizeModelMatchKey(candidateModelId)
return (
candidateModelId === targetModelId ||
m.id.endsWith(`/${targetModelId}`) ||
candidateMatchKey === targetMatchKey
)
})
if (matches.length === 0) return selectedModel
if (provider) {
const sameProvider = matches.find((m) => m.provider === provider)
const sameProvider = matches.find(
(m) => m.provider === provider || m.id.startsWith(`${provider}/`)
)
if (sameProvider) return sameProvider.id
}
@@ -1093,11 +1148,12 @@ export const useCopilotStore = create<CopilotStore>()(
const chatConfig = chat.config ?? {}
const chatMode = chatConfig.mode || get().mode
const chatModel = chatConfig.model || get().selectedModel
const normalizedChatModel = normalizeSelectedModelKey(chatModel, get().availableModels)
logger.debug('[Chat] Restoring chat config', {
chatId: chat.id,
mode: chatMode,
model: chatModel,
model: normalizedChatModel,
hasPlanArtifact: !!planArtifact,
})
@@ -1119,7 +1175,7 @@ export const useCopilotStore = create<CopilotStore>()(
showPlanTodos: false,
streamingPlanContent: planArtifact,
mode: chatMode,
selectedModel: chatModel as CopilotStore['selectedModel'],
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
suppressAutoSelect: false,
})
@@ -1292,6 +1348,10 @@ export const useCopilotStore = create<CopilotStore>()(
const refreshedConfig = updatedCurrentChat.config ?? {}
const refreshedMode = refreshedConfig.mode || get().mode
const refreshedModel = refreshedConfig.model || get().selectedModel
const normalizedRefreshedModel = normalizeSelectedModelKey(
refreshedModel,
get().availableModels
)
const toolCallsById = buildToolCallsById(normalizedMessages)
set({
@@ -1300,7 +1360,7 @@ export const useCopilotStore = create<CopilotStore>()(
toolCallsById,
streamingPlanContent: refreshedPlanArtifact,
mode: refreshedMode,
selectedModel: refreshedModel as CopilotStore['selectedModel'],
selectedModel: normalizedRefreshedModel as CopilotStore['selectedModel'],
})
}
try {
@@ -1320,11 +1380,15 @@ export const useCopilotStore = create<CopilotStore>()(
const chatConfig = mostRecentChat.config ?? {}
const chatMode = chatConfig.mode || get().mode
const chatModel = chatConfig.model || get().selectedModel
const normalizedChatModel = normalizeSelectedModelKey(
chatModel,
get().availableModels
)
logger.info('[Chat] Auto-selecting most recent chat with config', {
chatId: mostRecentChat.id,
mode: chatMode,
model: chatModel,
model: normalizedChatModel,
hasPlanArtifact: !!planArtifact,
})
@@ -1336,7 +1400,7 @@ export const useCopilotStore = create<CopilotStore>()(
toolCallsById,
streamingPlanContent: planArtifact,
mode: chatMode,
selectedModel: chatModel as CopilotStore['selectedModel'],
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
})
try {
await get().loadMessageCheckpoints(mostRecentChat.id)
@@ -2268,7 +2332,8 @@ export const useCopilotStore = create<CopilotStore>()(
},
setSelectedModel: async (model) => {
set({ selectedModel: model })
const normalizedModel = normalizeSelectedModelKey(model, get().availableModels)
set({ selectedModel: normalizedModel as CopilotStore['selectedModel'] })
},
setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }),
loadAvailableModels: async () => {

View File

@@ -224,7 +224,7 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
const newEntry = get().entries[0]
if (newEntry?.error) {
if (newEntry?.error && newEntry.blockType !== 'cancelled') {
notifyBlockError({
error: newEntry.error,
blockName: newEntry.blockName || 'Unknown Block',
@@ -243,6 +243,11 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
useExecutionStore.getState().clearRunPath(workflowId)
},
clearExecutionEntries: (executionId: string) =>
set((state) => ({
entries: state.entries.filter((e) => e.executionId !== executionId),
})),
exportConsoleCSV: (workflowId: string) => {
const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
@@ -470,12 +475,24 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
},
merge: (persistedState, currentState) => {
const persisted = persistedState as Partial<ConsoleStore> | undefined
const entries = (persisted?.entries ?? currentState.entries).map((entry, index) => {
const rawEntries = persisted?.entries ?? currentState.entries
const oneHourAgo = Date.now() - 60 * 60 * 1000
const entries = rawEntries.map((entry, index) => {
let updated = entry
if (entry.executionOrder === undefined) {
return { ...entry, executionOrder: index + 1 }
updated = { ...updated, executionOrder: index + 1 }
}
return entry
if (
entry.isRunning &&
entry.startedAt &&
new Date(entry.startedAt).getTime() < oneHourAgo
) {
updated = { ...updated, isRunning: false }
}
return updated
})
return {
...currentState,
entries,

View File

@@ -51,6 +51,7 @@ export interface ConsoleStore {
isOpen: boolean
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
clearWorkflowConsole: (workflowId: string) => void
clearExecutionEntries: (executionId: string) => void
exportConsoleCSV: (workflowId: string) => void
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
toggleConsole: () => void

View File

@@ -0,0 +1,114 @@
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceDeleteLabelParams {
accessToken: string
domain: string
pageId: string
labelName: string
cloudId?: string
}
export interface ConfluenceDeleteLabelResponse {
success: boolean
output: {
ts: string
pageId: string
labelName: string
deleted: boolean
}
}
export const confluenceDeleteLabelTool: ToolConfig<
ConfluenceDeleteLabelParams,
ConfluenceDeleteLabelResponse
> = {
id: 'confluence_delete_label',
name: 'Confluence Delete Label',
description: 'Remove a label from a Confluence page.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
pageId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Confluence page ID to remove the label from',
},
labelName: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Name of the label to remove',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: () => '/api/tools/confluence/labels',
method: 'DELETE',
headers: (params: ConfluenceDeleteLabelParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
body: (params: ConfluenceDeleteLabelParams) => ({
domain: params.domain,
accessToken: params.accessToken,
pageId: params.pageId?.trim(),
labelName: params.labelName?.trim(),
cloudId: params.cloudId,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
pageId: data.pageId ?? '',
labelName: data.labelName ?? '',
deleted: true,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
pageId: {
type: 'string',
description: 'Page ID the label was removed from',
},
labelName: {
type: 'string',
description: 'Name of the removed label',
},
deleted: {
type: 'boolean',
description: 'Deletion status',
},
},
}

View File

@@ -0,0 +1,105 @@
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceDeletePagePropertyParams {
accessToken: string
domain: string
pageId: string
propertyId: string
cloudId?: string
}
export interface ConfluenceDeletePagePropertyResponse {
success: boolean
output: {
ts: string
pageId: string
propertyId: string
deleted: boolean
}
}
export const confluenceDeletePagePropertyTool: ToolConfig<
ConfluenceDeletePagePropertyParams,
ConfluenceDeletePagePropertyResponse
> = {
id: 'confluence_delete_page_property',
name: 'Confluence Delete Page Property',
description: 'Delete a content property from a Confluence page by its property ID.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
pageId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the page containing the property',
},
propertyId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the property to delete',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: () => '/api/tools/confluence/page-properties',
method: 'DELETE',
headers: (params: ConfluenceDeletePagePropertyParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
body: (params: ConfluenceDeletePagePropertyParams) => ({
domain: params.domain,
accessToken: params.accessToken,
pageId: params.pageId?.trim(),
propertyId: params.propertyId?.trim(),
cloudId: params.cloudId,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
pageId: data.pageId ?? '',
propertyId: data.propertyId ?? '',
deleted: true,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
pageId: { type: 'string', description: 'ID of the page' },
propertyId: { type: 'string', description: 'ID of the deleted property' },
deleted: { type: 'boolean', description: 'Deletion status' },
},
}

View File

@@ -0,0 +1,143 @@
import { PAGE_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceGetPagesByLabelParams {
accessToken: string
domain: string
labelId: string
limit?: number
cursor?: string
cloudId?: string
}
export interface ConfluenceGetPagesByLabelResponse {
success: boolean
output: {
ts: string
labelId: string
pages: Array<{
id: string
title: string
status: string | null
spaceId: string | null
parentId: string | null
authorId: string | null
createdAt: string | null
version: {
number: number
message?: string
createdAt?: string
} | null
}>
nextCursor: string | null
}
}
export const confluenceGetPagesByLabelTool: ToolConfig<
ConfluenceGetPagesByLabelParams,
ConfluenceGetPagesByLabelResponse
> = {
id: 'confluence_get_pages_by_label',
name: 'Confluence Get Pages by Label',
description: 'Retrieve all pages that have a specific label applied.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
labelId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the label to get pages for',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of pages to return (default: 50, max: 250)',
},
cursor: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination cursor from previous response',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: (params: ConfluenceGetPagesByLabelParams) => {
const query = new URLSearchParams({
domain: params.domain,
accessToken: params.accessToken,
labelId: params.labelId,
limit: String(params.limit || 50),
})
if (params.cursor) {
query.set('cursor', params.cursor)
}
if (params.cloudId) {
query.set('cloudId', params.cloudId)
}
return `/api/tools/confluence/pages-by-label?${query.toString()}`
},
method: 'GET',
headers: (params: ConfluenceGetPagesByLabelParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
labelId: data.labelId ?? '',
pages: data.pages ?? [],
nextCursor: data.nextCursor ?? null,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
labelId: { type: 'string', description: 'ID of the label' },
pages: {
type: 'array',
description: 'Array of pages with this label',
items: {
type: 'object',
properties: PAGE_ITEM_PROPERTIES,
},
},
nextCursor: {
type: 'string',
description: 'Cursor for fetching the next page of results',
optional: true,
},
},
}

View File

@@ -5,11 +5,14 @@ import { confluenceCreatePageTool } from '@/tools/confluence/create_page'
import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property'
import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment'
import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment'
import { confluenceDeleteLabelTool } from '@/tools/confluence/delete_label'
import { confluenceDeletePageTool } from '@/tools/confluence/delete_page'
import { confluenceDeletePagePropertyTool } from '@/tools/confluence/delete_page_property'
import { confluenceGetBlogPostTool } from '@/tools/confluence/get_blogpost'
import { confluenceGetPageAncestorsTool } from '@/tools/confluence/get_page_ancestors'
import { confluenceGetPageChildrenTool } from '@/tools/confluence/get_page_children'
import { confluenceGetPageVersionTool } from '@/tools/confluence/get_page_version'
import { confluenceGetPagesByLabelTool } from '@/tools/confluence/get_pages_by_label'
import { confluenceGetSpaceTool } from '@/tools/confluence/get_space'
import { confluenceListAttachmentsTool } from '@/tools/confluence/list_attachments'
import { confluenceListBlogPostsTool } from '@/tools/confluence/list_blogposts'
@@ -19,6 +22,7 @@ import { confluenceListLabelsTool } from '@/tools/confluence/list_labels'
import { confluenceListPagePropertiesTool } from '@/tools/confluence/list_page_properties'
import { confluenceListPageVersionsTool } from '@/tools/confluence/list_page_versions'
import { confluenceListPagesInSpaceTool } from '@/tools/confluence/list_pages_in_space'
import { confluenceListSpaceLabelsTool } from '@/tools/confluence/list_space_labels'
import { confluenceListSpacesTool } from '@/tools/confluence/list_spaces'
import { confluenceRetrieveTool } from '@/tools/confluence/retrieve'
import { confluenceSearchTool } from '@/tools/confluence/search'
@@ -78,6 +82,7 @@ export {
// Page Properties Tools
confluenceListPagePropertiesTool,
confluenceCreatePagePropertyTool,
confluenceDeletePagePropertyTool,
// Blog Post Tools
confluenceListBlogPostsTool,
confluenceGetBlogPostTool,
@@ -98,6 +103,9 @@ export {
// Label Tools
confluenceListLabelsTool,
confluenceAddLabelTool,
confluenceDeleteLabelTool,
confluenceGetPagesByLabelTool,
confluenceListSpaceLabelsTool,
// Space Tools
confluenceGetSpaceTool,
confluenceListSpacesTool,

View File

@@ -0,0 +1,134 @@
import { LABEL_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceListSpaceLabelsParams {
accessToken: string
domain: string
spaceId: string
limit?: number
cursor?: string
cloudId?: string
}
export interface ConfluenceListSpaceLabelsResponse {
success: boolean
output: {
ts: string
spaceId: string
labels: Array<{
id: string
name: string
prefix: string
}>
nextCursor: string | null
}
}
export const confluenceListSpaceLabelsTool: ToolConfig<
ConfluenceListSpaceLabelsParams,
ConfluenceListSpaceLabelsResponse
> = {
id: 'confluence_list_space_labels',
name: 'Confluence List Space Labels',
description: 'List all labels associated with a Confluence space.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
spaceId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the Confluence space to list labels from',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of labels to return (default: 25, max: 250)',
},
cursor: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination cursor from previous response',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: (params: ConfluenceListSpaceLabelsParams) => {
const query = new URLSearchParams({
domain: params.domain,
accessToken: params.accessToken,
spaceId: params.spaceId,
limit: String(params.limit || 25),
})
if (params.cursor) {
query.set('cursor', params.cursor)
}
if (params.cloudId) {
query.set('cloudId', params.cloudId)
}
return `/api/tools/confluence/space-labels?${query.toString()}`
},
method: 'GET',
headers: (params: ConfluenceListSpaceLabelsParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
spaceId: data.spaceId ?? '',
labels: data.labels ?? [],
nextCursor: data.nextCursor ?? null,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
spaceId: { type: 'string', description: 'ID of the space' },
labels: {
type: 'array',
description: 'Array of labels on the space',
items: {
type: 'object',
properties: LABEL_ITEM_PROPERTIES,
},
},
nextCursor: {
type: 'string',
description: 'Cursor for fetching the next page of results',
optional: true,
},
},
}

View File

@@ -1,6 +1,7 @@
import {
buildCanonicalIndex,
type CanonicalIndex,
type CanonicalModeOverrides,
evaluateSubBlockCondition,
getCanonicalValues,
isCanonicalPair,
@@ -12,7 +13,10 @@ import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
export {
buildCanonicalIndex,
type CanonicalIndex,
type CanonicalModeOverrides,
evaluateSubBlockCondition,
isCanonicalPair,
resolveCanonicalMode,
type SubBlockCondition,
}

Some files were not shown because too many files have changed in this diff Show More