mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-11 23:14:58 -05:00
Compare commits
3 Commits
feat/creat
...
feat/mult-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
508772cf58 | ||
|
|
7314675f50 | ||
|
|
253161afba |
@@ -41,6 +41,9 @@ Diese Tastenkombinationen wechseln zwischen den Panel-Tabs auf der rechten Seite
|
||||
|
||||
| Tastenkombination | Aktion |
|
||||
|----------|--------|
|
||||
| `C` | Copilot-Tab fokussieren |
|
||||
| `T` | Toolbar-Tab fokussieren |
|
||||
| `E` | Editor-Tab fokussieren |
|
||||
| `Mod` + `F` | Toolbar-Suche fokussieren |
|
||||
|
||||
## Globale Navigation
|
||||
|
||||
@@ -43,6 +43,9 @@ These shortcuts switch between panel tabs on the right side of the canvas.
|
||||
|
||||
| Shortcut | Action |
|
||||
|----------|--------|
|
||||
| `C` | Focus Copilot tab |
|
||||
| `T` | Focus Toolbar tab |
|
||||
| `E` | Focus Editor tab |
|
||||
| `Mod` + `F` | Focus Toolbar search |
|
||||
|
||||
## Global Navigation
|
||||
|
||||
@@ -399,28 +399,6 @@ Create a new custom property (metadata) on a Confluence page.
|
||||
| ↳ `authorId` | string | Account ID of the version author |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||
|
||||
### `confluence_delete_page_property`
|
||||
|
||||
Delete a content property from a Confluence page by its property ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `pageId` | string | Yes | The ID of the page containing the property |
|
||||
| `propertyId` | string | Yes | The ID of the property to delete |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `pageId` | string | ID of the page |
|
||||
| `propertyId` | string | ID of the deleted property |
|
||||
| `deleted` | boolean | Deletion status |
|
||||
|
||||
### `confluence_search`
|
||||
|
||||
Search for content across Confluence pages, blog posts, and other content.
|
||||
@@ -894,90 +872,6 @@ Add a label to a Confluence page for organization and categorization.
|
||||
| `labelName` | string | Name of the added label |
|
||||
| `labelId` | string | ID of the added label |
|
||||
|
||||
### `confluence_delete_label`
|
||||
|
||||
Remove a label from a Confluence page.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `pageId` | string | Yes | Confluence page ID to remove the label from |
|
||||
| `labelName` | string | Yes | Name of the label to remove |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `pageId` | string | Page ID the label was removed from |
|
||||
| `labelName` | string | Name of the removed label |
|
||||
| `deleted` | boolean | Deletion status |
|
||||
|
||||
### `confluence_get_pages_by_label`
|
||||
|
||||
Retrieve all pages that have a specific label applied.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `labelId` | string | Yes | The ID of the label to get pages for |
|
||||
| `limit` | number | No | Maximum number of pages to return \(default: 50, max: 250\) |
|
||||
| `cursor` | string | No | Pagination cursor from previous response |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `labelId` | string | ID of the label |
|
||||
| `pages` | array | Array of pages with this label |
|
||||
| ↳ `id` | string | Unique page identifier |
|
||||
| ↳ `title` | string | Page title |
|
||||
| ↳ `status` | string | Page status \(e.g., current, archived, trashed, draft\) |
|
||||
| ↳ `spaceId` | string | ID of the space containing the page |
|
||||
| ↳ `parentId` | string | ID of the parent page \(null if top-level\) |
|
||||
| ↳ `authorId` | string | Account ID of the page author |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp when the page was created |
|
||||
| ↳ `version` | object | Page version information |
|
||||
| ↳ `number` | number | Version number |
|
||||
| ↳ `message` | string | Version message |
|
||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
||||
| ↳ `authorId` | string | Account ID of the version author |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||
|
||||
### `confluence_list_space_labels`
|
||||
|
||||
List all labels associated with a Confluence space.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `spaceId` | string | Yes | The ID of the Confluence space to list labels from |
|
||||
| `limit` | number | No | Maximum number of labels to return \(default: 25, max: 250\) |
|
||||
| `cursor` | string | No | Pagination cursor from previous response |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `spaceId` | string | ID of the space |
|
||||
| `labels` | array | Array of labels on the space |
|
||||
| ↳ `id` | string | Unique label identifier |
|
||||
| ↳ `name` | string | Label name |
|
||||
| ↳ `prefix` | string | Label prefix/type \(e.g., global, my, team\) |
|
||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||
|
||||
### `confluence_get_space`
|
||||
|
||||
Get details about a specific Confluence space.
|
||||
|
||||
@@ -42,6 +42,9 @@ Estos atajos cambian entre las pestañas del panel en el lado derecho del lienzo
|
||||
|
||||
| Atajo | Acción |
|
||||
|----------|--------|
|
||||
| `C` | Enfocar pestaña Copilot |
|
||||
| `T` | Enfocar pestaña Barra de herramientas |
|
||||
| `E` | Enfocar pestaña Editor |
|
||||
| `Mod` + `F` | Enfocar búsqueda de Barra de herramientas |
|
||||
|
||||
## Navegación global
|
||||
|
||||
@@ -42,6 +42,9 @@ Ces raccourcis permettent de basculer entre les onglets du panneau sur le côté
|
||||
|
||||
| Raccourci | Action |
|
||||
|----------|--------|
|
||||
| `C` | Activer l'onglet Copilot |
|
||||
| `T` | Activer l'onglet Barre d'outils |
|
||||
| `E` | Activer l'onglet Éditeur |
|
||||
| `Mod` + `F` | Activer la recherche dans la barre d'outils |
|
||||
|
||||
## Navigation globale
|
||||
|
||||
@@ -41,6 +41,9 @@ import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
| ショートカット | 操作 |
|
||||
|----------|--------|
|
||||
| `C` | Copilotタブにフォーカス |
|
||||
| `T` | Toolbarタブにフォーカス |
|
||||
| `E` | Editorタブにフォーカス |
|
||||
| `Mod` + `F` | Toolbar検索にフォーカス |
|
||||
|
||||
## グローバルナビゲーション
|
||||
|
||||
@@ -41,6 +41,9 @@ import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
| 快捷键 | 操作 |
|
||||
|----------|--------|
|
||||
| `C` | 聚焦 Copilot 标签页 |
|
||||
| `T` | 聚焦 Toolbar 标签页 |
|
||||
| `E` | 聚焦 Editor 标签页 |
|
||||
| `Mod` + `F` | 聚焦 Toolbar 搜索 |
|
||||
|
||||
## 全局导航
|
||||
|
||||
@@ -1,215 +0,0 @@
|
||||
/**
|
||||
* POST /api/attribution
|
||||
*
|
||||
* Automatic UTM-based referral attribution for new signups.
|
||||
*
|
||||
* Reads the `sim_utm` cookie (set by proxy on auth pages), verifies the user
|
||||
* account was created after the cookie was set, matches a campaign by UTM
|
||||
* specificity, and atomically inserts an attribution record + applies bonus credits.
|
||||
*
|
||||
* Idempotent — the unique constraint on `userId` prevents double-attribution.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { referralAttribution, referralCampaigns, user, userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { cookies } from 'next/headers'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
|
||||
|
||||
const logger = createLogger('AttributionAPI')
|
||||
|
||||
const COOKIE_NAME = 'sim_utm'
|
||||
const CLOCK_DRIFT_TOLERANCE_MS = 60 * 1000
|
||||
|
||||
const UtmCookieSchema = z.object({
|
||||
utm_source: z.string().optional(),
|
||||
utm_medium: z.string().optional(),
|
||||
utm_campaign: z.string().optional(),
|
||||
utm_content: z.string().optional(),
|
||||
referrer_url: z.string().optional(),
|
||||
landing_page: z.string().optional(),
|
||||
created_at: z.string().min(1),
|
||||
})
|
||||
|
||||
/**
|
||||
* Finds the most specific active campaign matching the given UTM params.
|
||||
* Null fields on a campaign act as wildcards. Ties broken by newest campaign.
|
||||
*/
|
||||
async function findMatchingCampaign(utmData: z.infer<typeof UtmCookieSchema>) {
|
||||
const campaigns = await db
|
||||
.select()
|
||||
.from(referralCampaigns)
|
||||
.where(eq(referralCampaigns.isActive, true))
|
||||
|
||||
let bestMatch: (typeof campaigns)[number] | null = null
|
||||
let bestScore = -1
|
||||
|
||||
for (const campaign of campaigns) {
|
||||
let score = 0
|
||||
let mismatch = false
|
||||
|
||||
const fields = [
|
||||
{ campaignVal: campaign.utmSource, utmVal: utmData.utm_source },
|
||||
{ campaignVal: campaign.utmMedium, utmVal: utmData.utm_medium },
|
||||
{ campaignVal: campaign.utmCampaign, utmVal: utmData.utm_campaign },
|
||||
{ campaignVal: campaign.utmContent, utmVal: utmData.utm_content },
|
||||
] as const
|
||||
|
||||
for (const { campaignVal, utmVal } of fields) {
|
||||
if (campaignVal === null) continue
|
||||
if (campaignVal === utmVal) {
|
||||
score++
|
||||
} else {
|
||||
mismatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (!mismatch && score > 0) {
|
||||
if (
|
||||
score > bestScore ||
|
||||
(score === bestScore &&
|
||||
bestMatch &&
|
||||
campaign.createdAt.getTime() > bestMatch.createdAt.getTime())
|
||||
) {
|
||||
bestScore = score
|
||||
bestMatch = campaign
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return bestMatch
|
||||
}
|
||||
|
||||
export async function POST() {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const cookieStore = await cookies()
|
||||
const utmCookie = cookieStore.get(COOKIE_NAME)
|
||||
if (!utmCookie?.value) {
|
||||
return NextResponse.json({ attributed: false, reason: 'no_utm_cookie' })
|
||||
}
|
||||
|
||||
let utmData: z.infer<typeof UtmCookieSchema>
|
||||
try {
|
||||
let decoded: string
|
||||
try {
|
||||
decoded = decodeURIComponent(utmCookie.value)
|
||||
} catch {
|
||||
decoded = utmCookie.value
|
||||
}
|
||||
utmData = UtmCookieSchema.parse(JSON.parse(decoded))
|
||||
} catch {
|
||||
logger.warn('Failed to parse UTM cookie', { userId: session.user.id })
|
||||
cookieStore.delete(COOKIE_NAME)
|
||||
return NextResponse.json({ attributed: false, reason: 'invalid_cookie' })
|
||||
}
|
||||
|
||||
const cookieCreatedAt = Number(utmData.created_at)
|
||||
if (!Number.isFinite(cookieCreatedAt)) {
|
||||
logger.warn('UTM cookie has invalid created_at timestamp', { userId: session.user.id })
|
||||
cookieStore.delete(COOKIE_NAME)
|
||||
return NextResponse.json({ attributed: false, reason: 'invalid_cookie' })
|
||||
}
|
||||
|
||||
const userRows = await db
|
||||
.select({ createdAt: user.createdAt })
|
||||
.from(user)
|
||||
.where(eq(user.id, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
if (userRows.length === 0) {
|
||||
return NextResponse.json({ error: 'User not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const userCreatedAt = userRows[0].createdAt.getTime()
|
||||
if (userCreatedAt < cookieCreatedAt - CLOCK_DRIFT_TOLERANCE_MS) {
|
||||
logger.info('User account predates UTM cookie, skipping attribution', {
|
||||
userId: session.user.id,
|
||||
userCreatedAt: new Date(userCreatedAt).toISOString(),
|
||||
cookieCreatedAt: new Date(cookieCreatedAt).toISOString(),
|
||||
})
|
||||
cookieStore.delete(COOKIE_NAME)
|
||||
return NextResponse.json({ attributed: false, reason: 'account_predates_cookie' })
|
||||
}
|
||||
|
||||
const [existingStats] = await db
|
||||
.select({ id: userStats.id })
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
if (!existingStats) {
|
||||
await db.insert(userStats).values({
|
||||
id: nanoid(),
|
||||
userId: session.user.id,
|
||||
})
|
||||
}
|
||||
|
||||
const matchedCampaign = await findMatchingCampaign(utmData)
|
||||
if (!matchedCampaign) {
|
||||
cookieStore.delete(COOKIE_NAME)
|
||||
return NextResponse.json({ attributed: false, reason: 'no_matching_campaign' })
|
||||
}
|
||||
|
||||
const bonusAmount = Number(matchedCampaign.bonusCreditAmount)
|
||||
|
||||
let attributed = false
|
||||
await db.transaction(async (tx) => {
|
||||
const result = await tx
|
||||
.insert(referralAttribution)
|
||||
.values({
|
||||
id: nanoid(),
|
||||
userId: session.user.id,
|
||||
campaignId: matchedCampaign.id,
|
||||
utmSource: utmData.utm_source || null,
|
||||
utmMedium: utmData.utm_medium || null,
|
||||
utmCampaign: utmData.utm_campaign || null,
|
||||
utmContent: utmData.utm_content || null,
|
||||
referrerUrl: utmData.referrer_url || null,
|
||||
landingPage: utmData.landing_page || null,
|
||||
bonusCreditAmount: bonusAmount.toString(),
|
||||
})
|
||||
.onConflictDoNothing({ target: referralAttribution.userId })
|
||||
.returning({ id: referralAttribution.id })
|
||||
|
||||
if (result.length > 0) {
|
||||
await applyBonusCredits(session.user.id, bonusAmount, tx)
|
||||
attributed = true
|
||||
}
|
||||
})
|
||||
|
||||
if (attributed) {
|
||||
logger.info('Referral attribution created and bonus credits applied', {
|
||||
userId: session.user.id,
|
||||
campaignId: matchedCampaign.id,
|
||||
campaignName: matchedCampaign.name,
|
||||
utmSource: utmData.utm_source,
|
||||
utmCampaign: utmData.utm_campaign,
|
||||
utmContent: utmData.utm_content,
|
||||
bonusAmount,
|
||||
})
|
||||
} else {
|
||||
logger.info('User already attributed, skipping', { userId: session.user.id })
|
||||
}
|
||||
|
||||
cookieStore.delete(COOKIE_NAME)
|
||||
|
||||
return NextResponse.json({
|
||||
attributed,
|
||||
bonusAmount: attributed ? bonusAmount : undefined,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Attribution error', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { and, desc, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
@@ -31,15 +31,13 @@ export async function GET(request: NextRequest) {
|
||||
})
|
||||
.from(account)
|
||||
.where(and(...whereConditions))
|
||||
|
||||
// Use the user's email as the display name (consistent with credential selector)
|
||||
const userEmail = session.user.email
|
||||
.orderBy(desc(account.updatedAt))
|
||||
|
||||
const accountsWithDisplayName = accounts.map((acc) => ({
|
||||
id: acc.id,
|
||||
accountId: acc.accountId,
|
||||
providerId: acc.providerId,
|
||||
displayName: userEmail || acc.providerId,
|
||||
displayName: acc.accountId || acc.providerId,
|
||||
}))
|
||||
|
||||
return NextResponse.json({ accounts: accountsWithDisplayName })
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, user } from '@sim/db/schema'
|
||||
import { account, credential, credentialMember, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { jwtDecode } from 'jwt-decode'
|
||||
@@ -7,8 +7,10 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
|
||||
import { evaluateScopeCoverage, type OAuthProvider, parseProvider } from '@/lib/oauth'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -18,6 +20,7 @@ const credentialsQuerySchema = z
|
||||
.object({
|
||||
provider: z.string().nullish(),
|
||||
workflowId: z.string().uuid('Workflow ID must be a valid UUID').nullish(),
|
||||
workspaceId: z.string().uuid('Workspace ID must be a valid UUID').nullish(),
|
||||
credentialId: z
|
||||
.string()
|
||||
.min(1, 'Credential ID must not be empty')
|
||||
@@ -35,6 +38,79 @@ interface GoogleIdToken {
|
||||
name?: string
|
||||
}
|
||||
|
||||
function toCredentialResponse(
|
||||
id: string,
|
||||
displayName: string,
|
||||
providerId: string,
|
||||
updatedAt: Date,
|
||||
scope: string | null
|
||||
) {
|
||||
const storedScope = scope?.trim()
|
||||
const grantedScopes = storedScope ? storedScope.split(/[\s,]+/).filter(Boolean) : []
|
||||
const scopeEvaluation = evaluateScopeCoverage(providerId, grantedScopes)
|
||||
const [_, featureType = 'default'] = providerId.split('-')
|
||||
|
||||
return {
|
||||
id,
|
||||
name: displayName,
|
||||
provider: providerId,
|
||||
lastUsed: updatedAt.toISOString(),
|
||||
isDefault: featureType === 'default',
|
||||
scopes: scopeEvaluation.grantedScopes,
|
||||
canonicalScopes: scopeEvaluation.canonicalScopes,
|
||||
missingScopes: scopeEvaluation.missingScopes,
|
||||
extraScopes: scopeEvaluation.extraScopes,
|
||||
requiresReauthorization: scopeEvaluation.requiresReauthorization,
|
||||
}
|
||||
}
|
||||
|
||||
async function getFallbackDisplayName(
|
||||
requestId: string,
|
||||
providerParam: string | null | undefined,
|
||||
accountRow: {
|
||||
idToken: string | null
|
||||
accountId: string
|
||||
userId: string
|
||||
}
|
||||
) {
|
||||
const providerForParse = (providerParam || 'google') as OAuthProvider
|
||||
const { baseProvider } = parseProvider(providerForParse)
|
||||
|
||||
if (accountRow.idToken) {
|
||||
try {
|
||||
const decoded = jwtDecode<GoogleIdToken>(accountRow.idToken)
|
||||
if (decoded.email) return decoded.email
|
||||
if (decoded.name) return decoded.name
|
||||
} catch (_error) {
|
||||
logger.warn(`[${requestId}] Error decoding ID token`, {
|
||||
accountId: accountRow.accountId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (baseProvider === 'github') {
|
||||
return `${accountRow.accountId} (GitHub)`
|
||||
}
|
||||
|
||||
try {
|
||||
const userRecord = await db
|
||||
.select({ email: user.email })
|
||||
.from(user)
|
||||
.where(eq(user.id, accountRow.userId))
|
||||
.limit(1)
|
||||
|
||||
if (userRecord.length > 0) {
|
||||
return userRecord[0].email
|
||||
}
|
||||
} catch (_error) {
|
||||
logger.warn(`[${requestId}] Error fetching user email`, {
|
||||
userId: accountRow.userId,
|
||||
})
|
||||
}
|
||||
|
||||
return `${accountRow.accountId} (${baseProvider})`
|
||||
}
|
||||
|
||||
/**
|
||||
* Get credentials for a specific provider
|
||||
*/
|
||||
@@ -46,6 +122,7 @@ export async function GET(request: NextRequest) {
|
||||
const rawQuery = {
|
||||
provider: searchParams.get('provider'),
|
||||
workflowId: searchParams.get('workflowId'),
|
||||
workspaceId: searchParams.get('workspaceId'),
|
||||
credentialId: searchParams.get('credentialId'),
|
||||
}
|
||||
|
||||
@@ -78,7 +155,7 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { provider: providerParam, workflowId, credentialId } = parseResult.data
|
||||
const { provider: providerParam, workflowId, workspaceId, credentialId } = parseResult.data
|
||||
|
||||
// Authenticate requester (supports session and internal JWT)
|
||||
const authResult = await checkSessionOrInternalAuth(request)
|
||||
@@ -88,7 +165,7 @@ export async function GET(request: NextRequest) {
|
||||
}
|
||||
const requesterUserId = authResult.userId
|
||||
|
||||
const effectiveUserId = requesterUserId
|
||||
let effectiveWorkspaceId = workspaceId ?? undefined
|
||||
if (workflowId) {
|
||||
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId,
|
||||
@@ -106,101 +183,145 @@ export async function GET(request: NextRequest) {
|
||||
{ status: workflowAuthorization.status }
|
||||
)
|
||||
}
|
||||
effectiveWorkspaceId = workflowAuthorization.workflow?.workspaceId || undefined
|
||||
}
|
||||
|
||||
// Parse the provider to get base provider and feature type (if provider is present)
|
||||
const { baseProvider } = parseProvider((providerParam || 'google') as OAuthProvider)
|
||||
if (effectiveWorkspaceId) {
|
||||
const workspaceAccess = await checkWorkspaceAccess(effectiveWorkspaceId, requesterUserId)
|
||||
if (!workspaceAccess.hasAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
}
|
||||
|
||||
let accountsData
|
||||
|
||||
if (credentialId) {
|
||||
const [platformCredential] = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
type: credential.type,
|
||||
displayName: credential.displayName,
|
||||
providerId: credential.providerId,
|
||||
accountId: credential.accountId,
|
||||
accountProviderId: account.providerId,
|
||||
accountScope: account.scope,
|
||||
accountUpdatedAt: account.updatedAt,
|
||||
})
|
||||
.from(credential)
|
||||
.leftJoin(account, eq(credential.accountId, account.id))
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (platformCredential) {
|
||||
if (platformCredential.type !== 'oauth' || !platformCredential.accountId) {
|
||||
return NextResponse.json({ credentials: [] }, { status: 200 })
|
||||
}
|
||||
|
||||
if (workflowId) {
|
||||
if (!effectiveWorkspaceId || platformCredential.workspaceId !== effectiveWorkspaceId) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
} else {
|
||||
const [membership] = await db
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, platformCredential.id),
|
||||
eq(credentialMember.userId, requesterUserId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
}
|
||||
|
||||
if (!platformCredential.accountProviderId || !platformCredential.accountUpdatedAt) {
|
||||
return NextResponse.json({ credentials: [] }, { status: 200 })
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
credentials: [
|
||||
toCredentialResponse(
|
||||
platformCredential.id,
|
||||
platformCredential.displayName,
|
||||
platformCredential.accountProviderId,
|
||||
platformCredential.accountUpdatedAt,
|
||||
platformCredential.accountScope
|
||||
),
|
||||
],
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (effectiveWorkspaceId && providerParam) {
|
||||
await syncWorkspaceOAuthCredentialsForUser({
|
||||
workspaceId: effectiveWorkspaceId,
|
||||
userId: requesterUserId,
|
||||
})
|
||||
|
||||
const credentialsData = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
displayName: credential.displayName,
|
||||
providerId: account.providerId,
|
||||
scope: account.scope,
|
||||
updatedAt: account.updatedAt,
|
||||
})
|
||||
.from(credential)
|
||||
.innerJoin(account, eq(credential.accountId, account.id))
|
||||
.innerJoin(
|
||||
credentialMember,
|
||||
and(
|
||||
eq(credentialMember.credentialId, credential.id),
|
||||
eq(credentialMember.userId, requesterUserId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, effectiveWorkspaceId),
|
||||
eq(credential.type, 'oauth'),
|
||||
eq(account.providerId, providerParam)
|
||||
)
|
||||
)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
credentials: credentialsData.map((row) =>
|
||||
toCredentialResponse(row.id, row.displayName, row.providerId, row.updatedAt, row.scope)
|
||||
),
|
||||
},
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
|
||||
if (credentialId && workflowId) {
|
||||
// When both workflowId and credentialId are provided, fetch by ID only.
|
||||
// Workspace authorization above already proves access; the credential
|
||||
// may belong to another workspace member (e.g. for display name resolution).
|
||||
accountsData = await db.select().from(account).where(eq(account.id, credentialId))
|
||||
} else if (credentialId) {
|
||||
accountsData = await db
|
||||
.select()
|
||||
.from(account)
|
||||
.where(and(eq(account.userId, effectiveUserId), eq(account.id, credentialId)))
|
||||
.where(and(eq(account.userId, requesterUserId), eq(account.id, credentialId)))
|
||||
} else {
|
||||
// Fetch all credentials for provider and effective user
|
||||
accountsData = await db
|
||||
.select()
|
||||
.from(account)
|
||||
.where(and(eq(account.userId, effectiveUserId), eq(account.providerId, providerParam!)))
|
||||
.where(and(eq(account.userId, requesterUserId), eq(account.providerId, providerParam!)))
|
||||
}
|
||||
|
||||
// Transform accounts into credentials
|
||||
const credentials = await Promise.all(
|
||||
accountsData.map(async (acc) => {
|
||||
// Extract the feature type from providerId (e.g., 'google-default' -> 'default')
|
||||
const [_, featureType = 'default'] = acc.providerId.split('-')
|
||||
|
||||
// Try multiple methods to get a user-friendly display name
|
||||
let displayName = ''
|
||||
|
||||
// Method 1: Try to extract email from ID token (works for Google, etc.)
|
||||
if (acc.idToken) {
|
||||
try {
|
||||
const decoded = jwtDecode<GoogleIdToken>(acc.idToken)
|
||||
if (decoded.email) {
|
||||
displayName = decoded.email
|
||||
} else if (decoded.name) {
|
||||
displayName = decoded.name
|
||||
}
|
||||
} catch (_error) {
|
||||
logger.warn(`[${requestId}] Error decoding ID token`, {
|
||||
accountId: acc.id,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Method 2: For GitHub, the accountId might be the username
|
||||
if (!displayName && baseProvider === 'github') {
|
||||
displayName = `${acc.accountId} (GitHub)`
|
||||
}
|
||||
|
||||
// Method 3: Try to get the user's email from our database
|
||||
if (!displayName) {
|
||||
try {
|
||||
const userRecord = await db
|
||||
.select({ email: user.email })
|
||||
.from(user)
|
||||
.where(eq(user.id, acc.userId))
|
||||
.limit(1)
|
||||
|
||||
if (userRecord.length > 0) {
|
||||
displayName = userRecord[0].email
|
||||
}
|
||||
} catch (_error) {
|
||||
logger.warn(`[${requestId}] Error fetching user email`, {
|
||||
userId: acc.userId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: Use accountId with provider type as context
|
||||
if (!displayName) {
|
||||
displayName = `${acc.accountId} (${baseProvider})`
|
||||
}
|
||||
|
||||
const storedScope = acc.scope?.trim()
|
||||
const grantedScopes = storedScope ? storedScope.split(/[\s,]+/).filter(Boolean) : []
|
||||
const scopeEvaluation = evaluateScopeCoverage(acc.providerId, grantedScopes)
|
||||
|
||||
return {
|
||||
id: acc.id,
|
||||
name: displayName,
|
||||
provider: acc.providerId,
|
||||
lastUsed: acc.updatedAt.toISOString(),
|
||||
isDefault: featureType === 'default',
|
||||
scopes: scopeEvaluation.grantedScopes,
|
||||
canonicalScopes: scopeEvaluation.canonicalScopes,
|
||||
missingScopes: scopeEvaluation.missingScopes,
|
||||
extraScopes: scopeEvaluation.extraScopes,
|
||||
requiresReauthorization: scopeEvaluation.requiresReauthorization,
|
||||
}
|
||||
const displayName = await getFallbackDisplayName(requestId, providerParam, acc)
|
||||
return toCredentialResponse(acc.id, displayName, acc.providerId, acc.updatedAt, acc.scope)
|
||||
})
|
||||
)
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ const logger = createLogger('OAuthDisconnectAPI')
|
||||
const disconnectSchema = z.object({
|
||||
provider: z.string({ required_error: 'Provider is required' }).min(1, 'Provider is required'),
|
||||
providerId: z.string().optional(),
|
||||
accountId: z.string().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -50,15 +51,20 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const { provider, providerId } = parseResult.data
|
||||
const { provider, providerId, accountId } = parseResult.data
|
||||
|
||||
logger.info(`[${requestId}] Processing OAuth disconnect request`, {
|
||||
provider,
|
||||
hasProviderId: !!providerId,
|
||||
})
|
||||
|
||||
// If a specific providerId is provided, delete only that account
|
||||
if (providerId) {
|
||||
// If a specific account row ID is provided, delete that exact account
|
||||
if (accountId) {
|
||||
await db
|
||||
.delete(account)
|
||||
.where(and(eq(account.userId, session.user.id), eq(account.id, accountId)))
|
||||
} else if (providerId) {
|
||||
// If a specific providerId is provided, delete accounts for that provider ID
|
||||
await db
|
||||
.delete(account)
|
||||
.where(and(eq(account.userId, session.user.id), eq(account.providerId, providerId)))
|
||||
|
||||
@@ -38,13 +38,18 @@ export async function GET(request: NextRequest) {
|
||||
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId)
|
||||
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
|
||||
const credential = await getCredential(
|
||||
requestId,
|
||||
resolvedCredentialId,
|
||||
authz.credentialOwnerUserId
|
||||
)
|
||||
if (!credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const accessToken = await refreshAccessTokenIfNeeded(
|
||||
credentialId,
|
||||
resolvedCredentialId,
|
||||
authz.credentialOwnerUserId,
|
||||
requestId
|
||||
)
|
||||
|
||||
@@ -37,14 +37,19 @@ export async function GET(request: NextRequest) {
|
||||
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status })
|
||||
}
|
||||
|
||||
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId)
|
||||
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
|
||||
const credential = await getCredential(
|
||||
requestId,
|
||||
resolvedCredentialId,
|
||||
authz.credentialOwnerUserId
|
||||
)
|
||||
if (!credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Refresh access token if needed using the utility function
|
||||
const accessToken = await refreshAccessTokenIfNeeded(
|
||||
credentialId,
|
||||
resolvedCredentialId,
|
||||
authz.credentialOwnerUserId,
|
||||
requestId
|
||||
)
|
||||
|
||||
@@ -119,14 +119,23 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
|
||||
}
|
||||
|
||||
const credential = await getCredential(requestId, credentialId, authz.credentialOwnerUserId)
|
||||
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
|
||||
const credential = await getCredential(
|
||||
requestId,
|
||||
resolvedCredentialId,
|
||||
authz.credentialOwnerUserId
|
||||
)
|
||||
|
||||
if (!credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
try {
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId)
|
||||
const { accessToken } = await refreshTokenIfNeeded(
|
||||
requestId,
|
||||
credential,
|
||||
resolvedCredentialId
|
||||
)
|
||||
|
||||
let instanceUrl: string | undefined
|
||||
if (credential.providerId === 'salesforce' && credential.scope) {
|
||||
@@ -186,13 +195,20 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
const { credentialId } = parseResult.data
|
||||
|
||||
// For GET requests, we only support session-based authentication
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || auth.authType !== 'session' || !auth.userId) {
|
||||
return NextResponse.json({ error: 'User not authenticated' }, { status: 401 })
|
||||
const authz = await authorizeCredentialUse(request, {
|
||||
credentialId,
|
||||
requireWorkflowIdForInternal: false,
|
||||
})
|
||||
if (!authz.ok || authz.authType !== 'session' || !authz.credentialOwnerUserId) {
|
||||
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
|
||||
}
|
||||
|
||||
const credential = await getCredential(requestId, credentialId, auth.userId)
|
||||
const resolvedCredentialId = authz.resolvedCredentialId || credentialId
|
||||
const credential = await getCredential(
|
||||
requestId,
|
||||
resolvedCredentialId,
|
||||
authz.credentialOwnerUserId
|
||||
)
|
||||
|
||||
if (!credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
@@ -204,7 +220,11 @@ export async function GET(request: NextRequest) {
|
||||
}
|
||||
|
||||
try {
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId)
|
||||
const { accessToken } = await refreshTokenIfNeeded(
|
||||
requestId,
|
||||
credential,
|
||||
resolvedCredentialId
|
||||
)
|
||||
|
||||
// For Salesforce, extract instanceUrl from the scope field
|
||||
let instanceUrl: string | undefined
|
||||
|
||||
@@ -50,7 +50,7 @@ describe('OAuth Utils', () => {
|
||||
describe('getCredential', () => {
|
||||
it('should return credential when found', async () => {
|
||||
const mockCredential = { id: 'credential-id', userId: 'test-user-id' }
|
||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
|
||||
|
||||
const credential = await getCredential('request-id', 'credential-id', 'test-user-id')
|
||||
|
||||
@@ -59,7 +59,8 @@ describe('OAuth Utils', () => {
|
||||
expect(mockDbTyped.where).toHaveBeenCalled()
|
||||
expect(mockDbTyped.limit).toHaveBeenCalledWith(1)
|
||||
|
||||
expect(credential).toEqual(mockCredential)
|
||||
expect(credential).toMatchObject(mockCredential)
|
||||
expect(credential).toMatchObject({ resolvedCredentialId: 'credential-id' })
|
||||
})
|
||||
|
||||
it('should return undefined when credential is not found', async () => {
|
||||
@@ -152,7 +153,7 @@ describe('OAuth Utils', () => {
|
||||
providerId: 'google',
|
||||
userId: 'test-user-id',
|
||||
}
|
||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
|
||||
|
||||
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
||||
|
||||
@@ -169,7 +170,7 @@ describe('OAuth Utils', () => {
|
||||
providerId: 'google',
|
||||
userId: 'test-user-id',
|
||||
}
|
||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
|
||||
|
||||
mockRefreshOAuthToken.mockResolvedValueOnce({
|
||||
accessToken: 'new-token',
|
||||
@@ -202,7 +203,7 @@ describe('OAuth Utils', () => {
|
||||
providerId: 'google',
|
||||
userId: 'test-user-id',
|
||||
}
|
||||
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||
mockDbTyped.limit.mockReturnValueOnce([]).mockReturnValueOnce([mockCredential])
|
||||
|
||||
mockRefreshOAuthToken.mockResolvedValueOnce(null)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, credentialSetMember } from '@sim/db/schema'
|
||||
import { account, credential, credentialSetMember } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, inArray } from 'drizzle-orm'
|
||||
import { refreshOAuthToken } from '@/lib/oauth'
|
||||
@@ -25,6 +25,28 @@ interface AccountInsertData {
|
||||
accessTokenExpiresAt?: Date
|
||||
}
|
||||
|
||||
async function resolveOAuthAccountId(
|
||||
credentialId: string
|
||||
): Promise<{ accountId: string; usedCredentialTable: boolean } | null> {
|
||||
const [credentialRow] = await db
|
||||
.select({
|
||||
type: credential.type,
|
||||
accountId: credential.accountId,
|
||||
})
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (credentialRow) {
|
||||
if (credentialRow.type !== 'oauth' || !credentialRow.accountId) {
|
||||
return null
|
||||
}
|
||||
return { accountId: credentialRow.accountId, usedCredentialTable: true }
|
||||
}
|
||||
|
||||
return { accountId: credentialId, usedCredentialTable: false }
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely inserts an account record, handling duplicate constraint violations gracefully.
|
||||
* If a duplicate is detected (unique constraint violation), logs a warning and returns success.
|
||||
@@ -52,10 +74,16 @@ export async function safeAccountInsert(
|
||||
* Get a credential by ID and verify it belongs to the user
|
||||
*/
|
||||
export async function getCredential(requestId: string, credentialId: string, userId: string) {
|
||||
const resolved = await resolveOAuthAccountId(credentialId)
|
||||
if (!resolved) {
|
||||
logger.warn(`[${requestId}] Credential is not an OAuth credential`)
|
||||
return undefined
|
||||
}
|
||||
|
||||
const credentials = await db
|
||||
.select()
|
||||
.from(account)
|
||||
.where(and(eq(account.id, credentialId), eq(account.userId, userId)))
|
||||
.where(and(eq(account.id, resolved.accountId), eq(account.userId, userId)))
|
||||
.limit(1)
|
||||
|
||||
if (!credentials.length) {
|
||||
@@ -63,7 +91,10 @@ export async function getCredential(requestId: string, credentialId: string, use
|
||||
return undefined
|
||||
}
|
||||
|
||||
return credentials[0]
|
||||
return {
|
||||
...credentials[0],
|
||||
resolvedCredentialId: resolved.accountId,
|
||||
}
|
||||
}
|
||||
|
||||
export async function getOAuthToken(userId: string, providerId: string): Promise<string | null> {
|
||||
@@ -238,7 +269,9 @@ export async function refreshAccessTokenIfNeeded(
|
||||
}
|
||||
|
||||
// Update the token in the database
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
const resolvedCredentialId =
|
||||
(credential as { resolvedCredentialId?: string }).resolvedCredentialId ?? credentialId
|
||||
await db.update(account).set(updateData).where(eq(account.id, resolvedCredentialId))
|
||||
|
||||
logger.info(`[${requestId}] Successfully refreshed access token for credential`)
|
||||
return refreshedToken.accessToken
|
||||
@@ -274,6 +307,8 @@ export async function refreshTokenIfNeeded(
|
||||
credential: any,
|
||||
credentialId: string
|
||||
): Promise<{ accessToken: string; refreshed: boolean }> {
|
||||
const resolvedCredentialId = credential.resolvedCredentialId ?? credentialId
|
||||
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const accessTokenExpiresAt = credential.accessTokenExpiresAt
|
||||
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
|
||||
@@ -334,7 +369,7 @@ export async function refreshTokenIfNeeded(
|
||||
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
await db.update(account).set(updateData).where(eq(account.id, resolvedCredentialId))
|
||||
|
||||
logger.info(`[${requestId}] Successfully refreshed access token`)
|
||||
return { accessToken: refreshedToken, refreshed: true }
|
||||
@@ -343,7 +378,7 @@ export async function refreshTokenIfNeeded(
|
||||
`[${requestId}] Refresh attempt failed, checking if another concurrent request succeeded`
|
||||
)
|
||||
|
||||
const freshCredential = await getCredential(requestId, credentialId, credential.userId)
|
||||
const freshCredential = await getCredential(requestId, resolvedCredentialId, credential.userId)
|
||||
if (freshCredential?.accessToken) {
|
||||
const freshExpiresAt = freshCredential.accessTokenExpiresAt
|
||||
const stillValid = !freshExpiresAt || freshExpiresAt > new Date()
|
||||
|
||||
@@ -48,16 +48,21 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
const shopData = await shopResponse.json()
|
||||
const shopInfo = shopData.shop
|
||||
const stableAccountId = shopInfo.id?.toString() || shopDomain
|
||||
|
||||
const existing = await db.query.account.findFirst({
|
||||
where: and(eq(account.userId, session.user.id), eq(account.providerId, 'shopify')),
|
||||
where: and(
|
||||
eq(account.userId, session.user.id),
|
||||
eq(account.providerId, 'shopify'),
|
||||
eq(account.accountId, stableAccountId)
|
||||
),
|
||||
})
|
||||
|
||||
const now = new Date()
|
||||
|
||||
const accountData = {
|
||||
accessToken: accessToken,
|
||||
accountId: shopInfo.id?.toString() || shopDomain,
|
||||
accountId: stableAccountId,
|
||||
scope: scope || '',
|
||||
updatedAt: now,
|
||||
idToken: shopDomain,
|
||||
|
||||
@@ -52,7 +52,11 @@ export async function POST(request: NextRequest) {
|
||||
const trelloUser = await userResponse.json()
|
||||
|
||||
const existing = await db.query.account.findFirst({
|
||||
where: and(eq(account.userId, session.user.id), eq(account.providerId, 'trello')),
|
||||
where: and(
|
||||
eq(account.userId, session.user.id),
|
||||
eq(account.providerId, 'trello'),
|
||||
eq(account.accountId, trelloUser.id)
|
||||
),
|
||||
})
|
||||
|
||||
const now = new Date()
|
||||
|
||||
@@ -113,7 +113,6 @@ const ChatMessageSchema = z.object({
|
||||
workflowId: z.string().optional(),
|
||||
knowledgeId: z.string().optional(),
|
||||
blockId: z.string().optional(),
|
||||
blockIds: z.array(z.string()).optional(),
|
||||
templateId: z.string().optional(),
|
||||
executionId: z.string().optional(),
|
||||
// For workflow_block, provide both workflowId and blockId
|
||||
@@ -160,20 +159,6 @@ export async function POST(req: NextRequest) {
|
||||
commands,
|
||||
} = ChatMessageSchema.parse(body)
|
||||
|
||||
const normalizedContexts = Array.isArray(contexts)
|
||||
? contexts.map((ctx) => {
|
||||
if (ctx.kind !== 'blocks') return ctx
|
||||
if (Array.isArray(ctx.blockIds) && ctx.blockIds.length > 0) return ctx
|
||||
if (ctx.blockId) {
|
||||
return {
|
||||
...ctx,
|
||||
blockIds: [ctx.blockId],
|
||||
}
|
||||
}
|
||||
return ctx
|
||||
})
|
||||
: contexts
|
||||
|
||||
// Resolve workflowId - if not provided, use first workflow or find by name
|
||||
const resolved = await resolveWorkflowIdForUser(
|
||||
authenticatedUserId,
|
||||
@@ -191,10 +176,10 @@ export async function POST(req: NextRequest) {
|
||||
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
||||
try {
|
||||
logger.info(`[${tracker.requestId}] Received chat POST`, {
|
||||
hasContexts: Array.isArray(normalizedContexts),
|
||||
contextsCount: Array.isArray(normalizedContexts) ? normalizedContexts.length : 0,
|
||||
contextsPreview: Array.isArray(normalizedContexts)
|
||||
? normalizedContexts.map((c: any) => ({
|
||||
hasContexts: Array.isArray(contexts),
|
||||
contextsCount: Array.isArray(contexts) ? contexts.length : 0,
|
||||
contextsPreview: Array.isArray(contexts)
|
||||
? contexts.map((c: any) => ({
|
||||
kind: c?.kind,
|
||||
chatId: c?.chatId,
|
||||
workflowId: c?.workflowId,
|
||||
@@ -206,25 +191,17 @@ export async function POST(req: NextRequest) {
|
||||
} catch {}
|
||||
// Preprocess contexts server-side
|
||||
let agentContexts: Array<{ type: string; content: string }> = []
|
||||
if (Array.isArray(normalizedContexts) && normalizedContexts.length > 0) {
|
||||
if (Array.isArray(contexts) && contexts.length > 0) {
|
||||
try {
|
||||
const { processContextsServer } = await import('@/lib/copilot/process-contents')
|
||||
const processed = await processContextsServer(
|
||||
normalizedContexts as any,
|
||||
authenticatedUserId,
|
||||
message
|
||||
)
|
||||
const processed = await processContextsServer(contexts as any, authenticatedUserId, message)
|
||||
agentContexts = processed
|
||||
logger.info(`[${tracker.requestId}] Contexts processed for request`, {
|
||||
processedCount: agentContexts.length,
|
||||
kinds: agentContexts.map((c) => c.type),
|
||||
lengthPreview: agentContexts.map((c) => c.content?.length ?? 0),
|
||||
})
|
||||
if (
|
||||
Array.isArray(normalizedContexts) &&
|
||||
normalizedContexts.length > 0 &&
|
||||
agentContexts.length === 0
|
||||
) {
|
||||
if (Array.isArray(contexts) && contexts.length > 0 && agentContexts.length === 0) {
|
||||
logger.warn(
|
||||
`[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.`
|
||||
)
|
||||
@@ -269,13 +246,11 @@ export async function POST(req: NextRequest) {
|
||||
mode,
|
||||
model: selectedModel,
|
||||
provider,
|
||||
conversationId: effectiveConversationId,
|
||||
conversationHistory,
|
||||
contexts: agentContexts,
|
||||
fileAttachments,
|
||||
commands,
|
||||
chatId: actualChatId,
|
||||
prefetch,
|
||||
implicitFeedback,
|
||||
},
|
||||
{
|
||||
@@ -457,15 +432,10 @@ export async function POST(req: NextRequest) {
|
||||
content: message,
|
||||
timestamp: new Date().toISOString(),
|
||||
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
|
||||
...(Array.isArray(normalizedContexts) &&
|
||||
normalizedContexts.length > 0 && {
|
||||
contexts: normalizedContexts,
|
||||
}),
|
||||
...(Array.isArray(normalizedContexts) &&
|
||||
normalizedContexts.length > 0 && {
|
||||
contentBlocks: [
|
||||
{ type: 'contexts', contexts: normalizedContexts as any, timestamp: Date.now() },
|
||||
],
|
||||
...(Array.isArray(contexts) && contexts.length > 0 && { contexts }),
|
||||
...(Array.isArray(contexts) &&
|
||||
contexts.length > 0 && {
|
||||
contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }],
|
||||
}),
|
||||
}
|
||||
|
||||
|
||||
194
apps/sim/app/api/credentials/[id]/members/route.ts
Normal file
194
apps/sim/app/api/credentials/[id]/members/route.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
import { db } from '@sim/db'
|
||||
import { credential, credentialMember, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('CredentialMembersAPI')
|
||||
|
||||
interface RouteContext {
|
||||
params: Promise<{ id: string }>
|
||||
}
|
||||
|
||||
async function requireAdminMembership(credentialId: string, userId: string) {
|
||||
const [membership] = await db
|
||||
.select({ role: credentialMember.role, status: credentialMember.status })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, userId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership || membership.status !== 'active' || membership.role !== 'admin') {
|
||||
return null
|
||||
}
|
||||
return membership
|
||||
}
|
||||
|
||||
export async function GET(_request: NextRequest, context: RouteContext) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: credentialId } = await context.params
|
||||
|
||||
const [cred] = await db
|
||||
.select({ id: credential.id })
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (!cred) {
|
||||
return NextResponse.json({ members: [] }, { status: 200 })
|
||||
}
|
||||
|
||||
const members = await db
|
||||
.select({
|
||||
id: credentialMember.id,
|
||||
userId: credentialMember.userId,
|
||||
role: credentialMember.role,
|
||||
status: credentialMember.status,
|
||||
joinedAt: credentialMember.joinedAt,
|
||||
userName: user.name,
|
||||
userEmail: user.email,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.innerJoin(user, eq(credentialMember.userId, user.id))
|
||||
.where(eq(credentialMember.credentialId, credentialId))
|
||||
|
||||
return NextResponse.json({ members })
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch credential members', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
const addMemberSchema = z.object({
|
||||
userId: z.string().min(1),
|
||||
role: z.enum(['admin', 'member']).default('member'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest, context: RouteContext) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: credentialId } = await context.params
|
||||
|
||||
const admin = await requireAdminMembership(credentialId, session.user.id)
|
||||
if (!admin) {
|
||||
return NextResponse.json({ error: 'Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = addMemberSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 })
|
||||
}
|
||||
|
||||
const { userId, role } = parsed.data
|
||||
const now = new Date()
|
||||
|
||||
const [existing] = await db
|
||||
.select({ id: credentialMember.id, status: credentialMember.status })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, userId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existing) {
|
||||
await db
|
||||
.update(credentialMember)
|
||||
.set({ role, status: 'active', updatedAt: now })
|
||||
.where(eq(credentialMember.id, existing.id))
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
|
||||
await db.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId,
|
||||
role,
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: session.user.id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 201 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to add credential member', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(request: NextRequest, context: RouteContext) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id: credentialId } = await context.params
|
||||
const targetUserId = new URL(request.url).searchParams.get('userId')
|
||||
if (!targetUserId) {
|
||||
return NextResponse.json({ error: 'userId query parameter required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const admin = await requireAdminMembership(credentialId, session.user.id)
|
||||
if (!admin) {
|
||||
return NextResponse.json({ error: 'Admin access required' }, { status: 403 })
|
||||
}
|
||||
|
||||
const [target] = await db
|
||||
.select({
|
||||
id: credentialMember.id,
|
||||
role: credentialMember.role,
|
||||
status: credentialMember.status,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
eq(credentialMember.userId, targetUserId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!target) {
|
||||
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (target.role === 'admin') {
|
||||
const activeAdmins = await db
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
eq(credentialMember.role, 'admin'),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
|
||||
if (activeAdmins.length <= 1) {
|
||||
return NextResponse.json({ error: 'Cannot remove the last admin' }, { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
await db.delete(credentialMember).where(eq(credentialMember.id, target.id))
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
logger.error('Failed to remove credential member', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
234
apps/sim/app/api/credentials/[id]/route.ts
Normal file
234
apps/sim/app/api/credentials/[id]/route.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
import { db } from '@sim/db'
|
||||
import { credential, credentialMember, environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getCredentialActorContext } from '@/lib/credentials/access'
|
||||
import {
|
||||
syncPersonalEnvCredentialsForUser,
|
||||
syncWorkspaceEnvCredentials,
|
||||
} from '@/lib/credentials/environment'
|
||||
|
||||
const logger = createLogger('CredentialByIdAPI')
|
||||
|
||||
const updateCredentialSchema = z
|
||||
.object({
|
||||
displayName: z.string().trim().min(1).max(255).optional(),
|
||||
accountId: z.string().trim().min(1).optional(),
|
||||
})
|
||||
.strict()
|
||||
.refine((data) => Boolean(data.displayName || data.accountId), {
|
||||
message: 'At least one field must be provided',
|
||||
path: ['displayName'],
|
||||
})
|
||||
|
||||
async function getCredentialResponse(credentialId: string, userId: string) {
|
||||
const [row] = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
type: credential.type,
|
||||
displayName: credential.displayName,
|
||||
providerId: credential.providerId,
|
||||
accountId: credential.accountId,
|
||||
envKey: credential.envKey,
|
||||
envOwnerUserId: credential.envOwnerUserId,
|
||||
createdBy: credential.createdBy,
|
||||
createdAt: credential.createdAt,
|
||||
updatedAt: credential.updatedAt,
|
||||
role: credentialMember.role,
|
||||
status: credentialMember.status,
|
||||
})
|
||||
.from(credential)
|
||||
.innerJoin(
|
||||
credentialMember,
|
||||
and(eq(credentialMember.credentialId, credential.id), eq(credentialMember.userId, userId))
|
||||
)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
return row ?? null
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const access = await getCredentialActorContext(id, session.user.id)
|
||||
if (!access.credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
if (!access.hasWorkspaceAccess || !access.member) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const row = await getCredentialResponse(id, session.user.id)
|
||||
return NextResponse.json({ credential: row }, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch credential', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const parseResult = updateCredentialSchema.safeParse(await request.json())
|
||||
if (!parseResult.success) {
|
||||
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
|
||||
}
|
||||
|
||||
const access = await getCredentialActorContext(id, session.user.id)
|
||||
if (!access.credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
if (!access.hasWorkspaceAccess || !access.isAdmin) {
|
||||
return NextResponse.json({ error: 'Credential admin permission required' }, { status: 403 })
|
||||
}
|
||||
|
||||
if (access.credential.type === 'oauth') {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'OAuth credential editing is disabled. Connect an account and create or use its linked credential.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'Environment credentials cannot be updated via this endpoint. Use the environment value editor in credentials settings.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error('Failed to update credential', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const access = await getCredentialActorContext(id, session.user.id)
|
||||
if (!access.credential) {
|
||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||
}
|
||||
if (!access.hasWorkspaceAccess || !access.isAdmin) {
|
||||
return NextResponse.json({ error: 'Credential admin permission required' }, { status: 403 })
|
||||
}
|
||||
|
||||
if (access.credential.type === 'env_personal' && access.credential.envKey) {
|
||||
const ownerUserId = access.credential.envOwnerUserId
|
||||
if (!ownerUserId) {
|
||||
return NextResponse.json({ error: 'Invalid personal secret owner' }, { status: 400 })
|
||||
}
|
||||
|
||||
const [personalRow] = await db
|
||||
.select({ variables: environment.variables })
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, ownerUserId))
|
||||
.limit(1)
|
||||
|
||||
const current = ((personalRow?.variables as Record<string, string> | null) ?? {}) as Record<
|
||||
string,
|
||||
string
|
||||
>
|
||||
if (access.credential.envKey in current) {
|
||||
delete current[access.credential.envKey]
|
||||
}
|
||||
|
||||
await db
|
||||
.insert(environment)
|
||||
.values({
|
||||
id: ownerUserId,
|
||||
userId: ownerUserId,
|
||||
variables: current,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [environment.userId],
|
||||
set: { variables: current, updatedAt: new Date() },
|
||||
})
|
||||
|
||||
await syncPersonalEnvCredentialsForUser({
|
||||
userId: ownerUserId,
|
||||
envKeys: Object.keys(current),
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
}
|
||||
|
||||
if (access.credential.type === 'env_workspace' && access.credential.envKey) {
|
||||
const [workspaceRow] = await db
|
||||
.select({
|
||||
id: workspaceEnvironment.id,
|
||||
createdAt: workspaceEnvironment.createdAt,
|
||||
variables: workspaceEnvironment.variables,
|
||||
})
|
||||
.from(workspaceEnvironment)
|
||||
.where(eq(workspaceEnvironment.workspaceId, access.credential.workspaceId))
|
||||
.limit(1)
|
||||
|
||||
const current = ((workspaceRow?.variables as Record<string, string> | null) ?? {}) as Record<
|
||||
string,
|
||||
string
|
||||
>
|
||||
if (access.credential.envKey in current) {
|
||||
delete current[access.credential.envKey]
|
||||
}
|
||||
|
||||
await db
|
||||
.insert(workspaceEnvironment)
|
||||
.values({
|
||||
id: workspaceRow?.id || crypto.randomUUID(),
|
||||
workspaceId: access.credential.workspaceId,
|
||||
variables: current,
|
||||
createdAt: workspaceRow?.createdAt || new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [workspaceEnvironment.workspaceId],
|
||||
set: { variables: current, updatedAt: new Date() },
|
||||
})
|
||||
|
||||
await syncWorkspaceEnvCredentials({
|
||||
workspaceId: access.credential.workspaceId,
|
||||
envKeys: Object.keys(current),
|
||||
actingUserId: session.user.id,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
}
|
||||
|
||||
await db.delete(credential).where(eq(credential.id, id))
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete credential', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
81
apps/sim/app/api/credentials/bootstrap/route.ts
Normal file
81
apps/sim/app/api/credentials/bootstrap/route.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { db } from '@sim/db'
|
||||
import { environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
syncPersonalEnvCredentialsForUser,
|
||||
syncWorkspaceEnvCredentials,
|
||||
} from '@/lib/credentials/environment'
|
||||
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('CredentialsBootstrapAPI')
|
||||
|
||||
const bootstrapSchema = z.object({
|
||||
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
|
||||
})
|
||||
|
||||
/**
|
||||
* Ensures the current user's connected accounts and env vars are reflected as workspace credentials.
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const parseResult = bootstrapSchema.safeParse(await request.json())
|
||||
if (!parseResult.success) {
|
||||
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
|
||||
}
|
||||
|
||||
const { workspaceId } = parseResult.data
|
||||
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
|
||||
if (!workspaceAccess.hasAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const [personalRow, workspaceRow] = await Promise.all([
|
||||
db
|
||||
.select({ variables: environment.variables })
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, session.user.id))
|
||||
.limit(1),
|
||||
db
|
||||
.select({ variables: workspaceEnvironment.variables })
|
||||
.from(workspaceEnvironment)
|
||||
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
|
||||
.limit(1),
|
||||
])
|
||||
|
||||
const personalKeys = Object.keys((personalRow[0]?.variables as Record<string, string>) || {})
|
||||
const workspaceKeys = Object.keys((workspaceRow[0]?.variables as Record<string, string>) || {})
|
||||
|
||||
const [oauthSyncResult] = await Promise.all([
|
||||
syncWorkspaceOAuthCredentialsForUser({ workspaceId, userId: session.user.id }),
|
||||
syncPersonalEnvCredentialsForUser({ userId: session.user.id, envKeys: personalKeys }),
|
||||
syncWorkspaceEnvCredentials({
|
||||
workspaceId,
|
||||
envKeys: workspaceKeys,
|
||||
actingUserId: session.user.id,
|
||||
}),
|
||||
])
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
synced: {
|
||||
oauthCreated: oauthSyncResult.createdCredentials,
|
||||
oauthMembershipsUpdated: oauthSyncResult.updatedMemberships,
|
||||
personalEnvKeys: personalKeys.length,
|
||||
workspaceEnvKeys: workspaceKeys.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to bootstrap workspace credentials', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
73
apps/sim/app/api/credentials/draft/route.ts
Normal file
73
apps/sim/app/api/credentials/draft/route.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { db } from '@sim/db'
|
||||
import { pendingCredentialDraft } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, lt } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('CredentialDraftAPI')
|
||||
|
||||
const DRAFT_TTL_MS = 15 * 60 * 1000
|
||||
|
||||
const createDraftSchema = z.object({
|
||||
workspaceId: z.string().min(1),
|
||||
providerId: z.string().min(1),
|
||||
displayName: z.string().min(1),
|
||||
})
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const parsed = createDraftSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json({ error: 'Invalid request body' }, { status: 400 })
|
||||
}
|
||||
|
||||
const { workspaceId, providerId, displayName } = parsed.data
|
||||
const userId = session.user.id
|
||||
const now = new Date()
|
||||
|
||||
await db
|
||||
.delete(pendingCredentialDraft)
|
||||
.where(
|
||||
and(eq(pendingCredentialDraft.userId, userId), lt(pendingCredentialDraft.expiresAt, now))
|
||||
)
|
||||
|
||||
await db
|
||||
.insert(pendingCredentialDraft)
|
||||
.values({
|
||||
id: crypto.randomUUID(),
|
||||
userId,
|
||||
workspaceId,
|
||||
providerId,
|
||||
displayName,
|
||||
expiresAt: new Date(now.getTime() + DRAFT_TTL_MS),
|
||||
createdAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [
|
||||
pendingCredentialDraft.userId,
|
||||
pendingCredentialDraft.providerId,
|
||||
pendingCredentialDraft.workspaceId,
|
||||
],
|
||||
set: {
|
||||
displayName,
|
||||
expiresAt: new Date(now.getTime() + DRAFT_TTL_MS),
|
||||
createdAt: now,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info('Credential draft saved', { userId, workspaceId, providerId, displayName })
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to save credential draft', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
112
apps/sim/app/api/credentials/memberships/route.ts
Normal file
112
apps/sim/app/api/credentials/memberships/route.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import { db } from '@sim/db'
|
||||
import { credential, credentialMember } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
const logger = createLogger('CredentialMembershipsAPI')
|
||||
|
||||
const leaveCredentialSchema = z.object({
|
||||
credentialId: z.string().min(1),
|
||||
})
|
||||
|
||||
export async function GET() {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const memberships = await db
|
||||
.select({
|
||||
membershipId: credentialMember.id,
|
||||
credentialId: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
type: credential.type,
|
||||
displayName: credential.displayName,
|
||||
providerId: credential.providerId,
|
||||
role: credentialMember.role,
|
||||
status: credentialMember.status,
|
||||
joinedAt: credentialMember.joinedAt,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.innerJoin(credential, eq(credentialMember.credentialId, credential.id))
|
||||
.where(eq(credentialMember.userId, session.user.id))
|
||||
|
||||
return NextResponse.json({ memberships }, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to list credential memberships', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(request: NextRequest) {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const parseResult = leaveCredentialSchema.safeParse({
|
||||
credentialId: new URL(request.url).searchParams.get('credentialId'),
|
||||
})
|
||||
if (!parseResult.success) {
|
||||
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
|
||||
}
|
||||
|
||||
const { credentialId } = parseResult.data
|
||||
const [membership] = await db
|
||||
.select()
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
eq(credentialMember.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership) {
|
||||
return NextResponse.json({ error: 'Membership not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (membership.status !== 'active') {
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
}
|
||||
|
||||
if (membership.role === 'admin') {
|
||||
const activeAdmins = await db
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
eq(credentialMember.role, 'admin'),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
|
||||
if (activeAdmins.length <= 1) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Cannot leave credential as the last active admin' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
await db
|
||||
.update(credentialMember)
|
||||
.set({
|
||||
status: 'revoked',
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(credentialMember.id, membership.id))
|
||||
|
||||
return NextResponse.json({ success: true }, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Failed to leave credential', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
468
apps/sim/app/api/credentials/route.ts
Normal file
468
apps/sim/app/api/credentials/route.ts
Normal file
@@ -0,0 +1,468 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, credential, credentialMember, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getWorkspaceMemberUserIds } from '@/lib/credentials/environment'
|
||||
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
|
||||
import { getServiceConfigByProviderId } from '@/lib/oauth'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
import { isValidEnvVarName } from '@/executor/constants'
|
||||
|
||||
const logger = createLogger('CredentialsAPI')
|
||||
|
||||
const credentialTypeSchema = z.enum(['oauth', 'env_workspace', 'env_personal'])
|
||||
|
||||
function normalizeEnvKeyInput(raw: string): string {
|
||||
const trimmed = raw.trim()
|
||||
const wrappedMatch = /^\{\{\s*([A-Za-z0-9_]+)\s*\}\}$/.exec(trimmed)
|
||||
return wrappedMatch ? wrappedMatch[1] : trimmed
|
||||
}
|
||||
|
||||
const listCredentialsSchema = z.object({
|
||||
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
|
||||
type: credentialTypeSchema.optional(),
|
||||
providerId: z.string().optional(),
|
||||
})
|
||||
|
||||
const createCredentialSchema = z
|
||||
.object({
|
||||
workspaceId: z.string().uuid('Workspace ID must be a valid UUID'),
|
||||
type: credentialTypeSchema,
|
||||
displayName: z.string().trim().min(1).max(255).optional(),
|
||||
providerId: z.string().trim().min(1).optional(),
|
||||
accountId: z.string().trim().min(1).optional(),
|
||||
envKey: z.string().trim().min(1).optional(),
|
||||
envOwnerUserId: z.string().trim().min(1).optional(),
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
if (data.type === 'oauth') {
|
||||
if (!data.accountId) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'accountId is required for oauth credentials',
|
||||
path: ['accountId'],
|
||||
})
|
||||
}
|
||||
if (!data.providerId) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'providerId is required for oauth credentials',
|
||||
path: ['providerId'],
|
||||
})
|
||||
}
|
||||
if (!data.displayName) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'displayName is required for oauth credentials',
|
||||
path: ['displayName'],
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const normalizedEnvKey = data.envKey ? normalizeEnvKeyInput(data.envKey) : ''
|
||||
if (!normalizedEnvKey) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'envKey is required for env credentials',
|
||||
path: ['envKey'],
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (!isValidEnvVarName(normalizedEnvKey)) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'envKey must contain only letters, numbers, and underscores',
|
||||
path: ['envKey'],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
interface ExistingCredentialSourceParams {
|
||||
workspaceId: string
|
||||
type: 'oauth' | 'env_workspace' | 'env_personal'
|
||||
accountId?: string | null
|
||||
envKey?: string | null
|
||||
envOwnerUserId?: string | null
|
||||
}
|
||||
|
||||
async function findExistingCredentialBySource(params: ExistingCredentialSourceParams) {
|
||||
const { workspaceId, type, accountId, envKey, envOwnerUserId } = params
|
||||
|
||||
if (type === 'oauth' && accountId) {
|
||||
const [row] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'oauth'),
|
||||
eq(credential.accountId, accountId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
return row ?? null
|
||||
}
|
||||
|
||||
if (type === 'env_workspace' && envKey) {
|
||||
const [row] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_workspace'),
|
||||
eq(credential.envKey, envKey)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
return row ?? null
|
||||
}
|
||||
|
||||
if (type === 'env_personal' && envKey && envOwnerUserId) {
|
||||
const [row] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_personal'),
|
||||
eq(credential.envKey, envKey),
|
||||
eq(credential.envOwnerUserId, envOwnerUserId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
return row ?? null
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const rawWorkspaceId = searchParams.get('workspaceId')
|
||||
const rawType = searchParams.get('type')
|
||||
const rawProviderId = searchParams.get('providerId')
|
||||
const parseResult = listCredentialsSchema.safeParse({
|
||||
workspaceId: rawWorkspaceId?.trim(),
|
||||
type: rawType?.trim() || undefined,
|
||||
providerId: rawProviderId?.trim() || undefined,
|
||||
})
|
||||
|
||||
if (!parseResult.success) {
|
||||
logger.warn(`[${requestId}] Invalid credential list request`, {
|
||||
workspaceId: rawWorkspaceId,
|
||||
type: rawType,
|
||||
providerId: rawProviderId,
|
||||
errors: parseResult.error.errors,
|
||||
})
|
||||
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
|
||||
}
|
||||
|
||||
const { workspaceId, type, providerId } = parseResult.data
|
||||
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
|
||||
|
||||
if (!workspaceAccess.hasAccess) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
if (!type || type === 'oauth') {
|
||||
await syncWorkspaceOAuthCredentialsForUser({ workspaceId, userId: session.user.id })
|
||||
}
|
||||
|
||||
const whereClauses = [
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credentialMember.userId, session.user.id),
|
||||
eq(credentialMember.status, 'active'),
|
||||
]
|
||||
|
||||
if (type) {
|
||||
whereClauses.push(eq(credential.type, type))
|
||||
}
|
||||
if (providerId) {
|
||||
whereClauses.push(eq(credential.providerId, providerId))
|
||||
}
|
||||
|
||||
const credentials = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
type: credential.type,
|
||||
displayName: credential.displayName,
|
||||
providerId: credential.providerId,
|
||||
accountId: credential.accountId,
|
||||
envKey: credential.envKey,
|
||||
envOwnerUserId: credential.envOwnerUserId,
|
||||
createdBy: credential.createdBy,
|
||||
createdAt: credential.createdAt,
|
||||
updatedAt: credential.updatedAt,
|
||||
role: credentialMember.role,
|
||||
})
|
||||
.from(credential)
|
||||
.innerJoin(
|
||||
credentialMember,
|
||||
and(
|
||||
eq(credentialMember.credentialId, credential.id),
|
||||
eq(credentialMember.userId, session.user.id),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.where(and(...whereClauses))
|
||||
|
||||
return NextResponse.json({ credentials })
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to list credentials`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const parseResult = createCredentialSchema.safeParse(body)
|
||||
|
||||
if (!parseResult.success) {
|
||||
return NextResponse.json({ error: parseResult.error.errors[0]?.message }, { status: 400 })
|
||||
}
|
||||
|
||||
const { workspaceId, type, displayName, providerId, accountId, envKey, envOwnerUserId } =
|
||||
parseResult.data
|
||||
|
||||
const workspaceAccess = await checkWorkspaceAccess(workspaceId, session.user.id)
|
||||
if (!workspaceAccess.canWrite) {
|
||||
return NextResponse.json({ error: 'Write permission required' }, { status: 403 })
|
||||
}
|
||||
|
||||
let resolvedDisplayName = displayName?.trim() ?? ''
|
||||
let resolvedProviderId: string | null = providerId ?? null
|
||||
let resolvedAccountId: string | null = accountId ?? null
|
||||
const resolvedEnvKey: string | null = envKey ? normalizeEnvKeyInput(envKey) : null
|
||||
let resolvedEnvOwnerUserId: string | null = null
|
||||
|
||||
if (type === 'oauth') {
|
||||
const [accountRow] = await db
|
||||
.select({
|
||||
id: account.id,
|
||||
userId: account.userId,
|
||||
providerId: account.providerId,
|
||||
accountId: account.accountId,
|
||||
})
|
||||
.from(account)
|
||||
.where(eq(account.id, accountId!))
|
||||
.limit(1)
|
||||
|
||||
if (!accountRow) {
|
||||
return NextResponse.json({ error: 'OAuth account not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (accountRow.userId !== session.user.id) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Only account owners can create oauth credentials for an account' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
if (providerId !== accountRow.providerId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'providerId does not match the selected OAuth account' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
if (!resolvedDisplayName) {
|
||||
resolvedDisplayName =
|
||||
getServiceConfigByProviderId(accountRow.providerId)?.name || accountRow.providerId
|
||||
}
|
||||
} else if (type === 'env_personal') {
|
||||
resolvedEnvOwnerUserId = envOwnerUserId ?? session.user.id
|
||||
if (resolvedEnvOwnerUserId !== session.user.id) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Only the current user can create personal env credentials for themselves' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
resolvedProviderId = null
|
||||
resolvedAccountId = null
|
||||
resolvedDisplayName = resolvedEnvKey || ''
|
||||
} else {
|
||||
resolvedProviderId = null
|
||||
resolvedAccountId = null
|
||||
resolvedEnvOwnerUserId = null
|
||||
resolvedDisplayName = resolvedEnvKey || ''
|
||||
}
|
||||
|
||||
if (!resolvedDisplayName) {
|
||||
return NextResponse.json({ error: 'Display name is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const existingCredential = await findExistingCredentialBySource({
|
||||
workspaceId,
|
||||
type,
|
||||
accountId: resolvedAccountId,
|
||||
envKey: resolvedEnvKey,
|
||||
envOwnerUserId: resolvedEnvOwnerUserId,
|
||||
})
|
||||
|
||||
if (existingCredential) {
|
||||
const [membership] = await db
|
||||
.select({
|
||||
id: credentialMember.id,
|
||||
status: credentialMember.status,
|
||||
role: credentialMember.role,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, existingCredential.id),
|
||||
eq(credentialMember.userId, session.user.id)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership || membership.status !== 'active') {
|
||||
return NextResponse.json(
|
||||
{ error: 'A credential with this source already exists in this workspace' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
type === 'oauth' &&
|
||||
membership.role === 'admin' &&
|
||||
resolvedDisplayName &&
|
||||
resolvedDisplayName !== existingCredential.displayName
|
||||
) {
|
||||
await db
|
||||
.update(credential)
|
||||
.set({
|
||||
displayName: resolvedDisplayName,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(credential.id, existingCredential.id))
|
||||
|
||||
const [updatedCredential] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(eq(credential.id, existingCredential.id))
|
||||
.limit(1)
|
||||
|
||||
return NextResponse.json(
|
||||
{ credential: updatedCredential ?? existingCredential },
|
||||
{ status: 200 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json({ credential: existingCredential }, { status: 200 })
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const credentialId = crypto.randomUUID()
|
||||
const [workspaceRow] = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
await db.transaction(async (tx) => {
|
||||
await tx.insert(credential).values({
|
||||
id: credentialId,
|
||||
workspaceId,
|
||||
type,
|
||||
displayName: resolvedDisplayName,
|
||||
providerId: resolvedProviderId,
|
||||
accountId: resolvedAccountId,
|
||||
envKey: resolvedEnvKey,
|
||||
envOwnerUserId: resolvedEnvOwnerUserId,
|
||||
createdBy: session.user.id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
if (type === 'env_workspace' && workspaceRow?.ownerId) {
|
||||
const workspaceUserIds = await getWorkspaceMemberUserIds(workspaceId)
|
||||
if (workspaceUserIds.length > 0) {
|
||||
for (const memberUserId of workspaceUserIds) {
|
||||
await tx.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId: memberUserId,
|
||||
role: memberUserId === workspaceRow.ownerId ? 'admin' : 'member',
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: session.user.id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
await tx.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId: session.user.id,
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: session.user.id,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
const [created] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
return NextResponse.json({ credential: created }, { status: 201 })
|
||||
} catch (error: any) {
|
||||
if (error?.code === '23505') {
|
||||
return NextResponse.json(
|
||||
{ error: 'A credential with this source already exists' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
if (error?.code === '23503') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid credential reference or membership target' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
if (error?.code === '23514') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Credential source data failed validation checks' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
logger.error(`[${requestId}] Credential create failure details`, {
|
||||
code: error?.code,
|
||||
detail: error?.detail,
|
||||
constraint: error?.constraint,
|
||||
table: error?.table,
|
||||
message: error?.message,
|
||||
})
|
||||
logger.error(`[${requestId}] Failed to create credential`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { syncPersonalEnvCredentialsForUser } from '@/lib/credentials/environment'
|
||||
import type { EnvironmentVariable } from '@/stores/settings/environment'
|
||||
|
||||
const logger = createLogger('EnvironmentAPI')
|
||||
@@ -53,6 +54,11 @@ export async function POST(req: NextRequest) {
|
||||
},
|
||||
})
|
||||
|
||||
await syncPersonalEnvCredentialsForUser({
|
||||
userId: session.user.id,
|
||||
envKeys: Object.keys(variables),
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
|
||||
@@ -1,170 +0,0 @@
|
||||
/**
|
||||
* POST /api/referral-code/redeem
|
||||
*
|
||||
* Redeem a referral/promo code to receive bonus credits.
|
||||
*
|
||||
* Body:
|
||||
* - code: string — The referral code to redeem
|
||||
*
|
||||
* Response: { redeemed: boolean, bonusAmount?: number, error?: string }
|
||||
*
|
||||
* Constraints:
|
||||
* - Enterprise users cannot redeem codes
|
||||
* - One redemption per user, ever (unique constraint on userId)
|
||||
* - One redemption per organization for team users (partial unique on organizationId)
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { referralAttribution, referralCampaigns, userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
|
||||
|
||||
const logger = createLogger('ReferralCodeRedemption')
|
||||
|
||||
const RedeemCodeSchema = z.object({
|
||||
code: z.string().min(1, 'Code is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { code } = RedeemCodeSchema.parse(body)
|
||||
|
||||
const subscription = await getHighestPrioritySubscription(session.user.id)
|
||||
|
||||
if (subscription?.plan === 'enterprise') {
|
||||
return NextResponse.json({
|
||||
redeemed: false,
|
||||
error: 'Enterprise accounts cannot redeem referral codes',
|
||||
})
|
||||
}
|
||||
|
||||
const isTeam = subscription?.plan === 'team'
|
||||
const orgId = isTeam ? subscription.referenceId : null
|
||||
|
||||
const normalizedCode = code.trim().toUpperCase()
|
||||
|
||||
const [campaign] = await db
|
||||
.select()
|
||||
.from(referralCampaigns)
|
||||
.where(and(eq(referralCampaigns.code, normalizedCode), eq(referralCampaigns.isActive, true)))
|
||||
.limit(1)
|
||||
|
||||
if (!campaign) {
|
||||
logger.info('Invalid code redemption attempt', {
|
||||
userId: session.user.id,
|
||||
code: normalizedCode,
|
||||
})
|
||||
return NextResponse.json({ error: 'Invalid or expired code' }, { status: 404 })
|
||||
}
|
||||
|
||||
const [existingUserAttribution] = await db
|
||||
.select({ id: referralAttribution.id })
|
||||
.from(referralAttribution)
|
||||
.where(eq(referralAttribution.userId, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
if (existingUserAttribution) {
|
||||
return NextResponse.json({
|
||||
redeemed: false,
|
||||
error: 'You have already redeemed a code',
|
||||
})
|
||||
}
|
||||
|
||||
if (orgId) {
|
||||
const [existingOrgAttribution] = await db
|
||||
.select({ id: referralAttribution.id })
|
||||
.from(referralAttribution)
|
||||
.where(eq(referralAttribution.organizationId, orgId))
|
||||
.limit(1)
|
||||
|
||||
if (existingOrgAttribution) {
|
||||
return NextResponse.json({
|
||||
redeemed: false,
|
||||
error: 'A code has already been redeemed for your organization',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const [existingStats] = await db
|
||||
.select({ id: userStats.id })
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
if (!existingStats) {
|
||||
await db.insert(userStats).values({
|
||||
id: nanoid(),
|
||||
userId: session.user.id,
|
||||
})
|
||||
}
|
||||
|
||||
const bonusAmount = Number(campaign.bonusCreditAmount)
|
||||
|
||||
let redeemed = false
|
||||
await db.transaction(async (tx) => {
|
||||
const result = await tx
|
||||
.insert(referralAttribution)
|
||||
.values({
|
||||
id: nanoid(),
|
||||
userId: session.user.id,
|
||||
organizationId: orgId,
|
||||
campaignId: campaign.id,
|
||||
utmSource: null,
|
||||
utmMedium: null,
|
||||
utmCampaign: null,
|
||||
utmContent: null,
|
||||
referrerUrl: null,
|
||||
landingPage: null,
|
||||
bonusCreditAmount: bonusAmount.toString(),
|
||||
})
|
||||
.onConflictDoNothing()
|
||||
.returning({ id: referralAttribution.id })
|
||||
|
||||
if (result.length > 0) {
|
||||
await applyBonusCredits(session.user.id, bonusAmount, tx)
|
||||
redeemed = true
|
||||
}
|
||||
})
|
||||
|
||||
if (redeemed) {
|
||||
logger.info('Referral code redeemed', {
|
||||
userId: session.user.id,
|
||||
organizationId: orgId,
|
||||
code: normalizedCode,
|
||||
campaignId: campaign.id,
|
||||
campaignName: campaign.name,
|
||||
bonusAmount,
|
||||
})
|
||||
}
|
||||
|
||||
if (!redeemed) {
|
||||
return NextResponse.json({
|
||||
redeemed: false,
|
||||
error: 'You have already redeemed a code',
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
redeemed: true,
|
||||
bonusAmount,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
|
||||
}
|
||||
logger.error('Referral code redemption error', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -191,84 +191,3 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Delete a label from a page
|
||||
export async function DELETE(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
cloudId: providedCloudId,
|
||||
pageId,
|
||||
labelName,
|
||||
} = await request.json()
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!pageId) {
|
||||
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!labelName) {
|
||||
return NextResponse.json({ error: 'Label name is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
||||
if (!pageIdValidation.isValid) {
|
||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const encodedLabel = encodeURIComponent(labelName.trim())
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label?name=${encodedLabel}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage =
|
||||
errorData?.message || `Failed to delete Confluence label (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
pageId,
|
||||
labelName,
|
||||
deleted: true,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error deleting Confluence label:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluencePagesByLabelAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const domain = searchParams.get('domain')
|
||||
const accessToken = searchParams.get('accessToken')
|
||||
const labelId = searchParams.get('labelId')
|
||||
const providedCloudId = searchParams.get('cloudId')
|
||||
const limit = searchParams.get('limit') || '50'
|
||||
const cursor = searchParams.get('cursor')
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!labelId) {
|
||||
return NextResponse.json({ error: 'Label ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
|
||||
if (!labelIdValidation.isValid) {
|
||||
return NextResponse.json({ error: labelIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
||||
if (cursor) {
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/labels/${labelId}/pages?${queryParams.toString()}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to get pages by label (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const pages = (data.results || []).map((page: any) => ({
|
||||
id: page.id,
|
||||
title: page.title,
|
||||
status: page.status ?? null,
|
||||
spaceId: page.spaceId ?? null,
|
||||
parentId: page.parentId ?? null,
|
||||
authorId: page.authorId ?? null,
|
||||
createdAt: page.createdAt ?? null,
|
||||
version: page.version ?? null,
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
pages,
|
||||
labelId,
|
||||
nextCursor: data._links?.next
|
||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
||||
: null,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error getting pages by label:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluenceSpaceLabelsAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const domain = searchParams.get('domain')
|
||||
const accessToken = searchParams.get('accessToken')
|
||||
const spaceId = searchParams.get('spaceId')
|
||||
const providedCloudId = searchParams.get('cloudId')
|
||||
const limit = searchParams.get('limit') || '25'
|
||||
const cursor = searchParams.get('cursor')
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!spaceId) {
|
||||
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
|
||||
if (!spaceIdValidation.isValid) {
|
||||
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
||||
if (cursor) {
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/labels?${queryParams.toString()}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to list space labels (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const labels = (data.results || []).map((label: any) => ({
|
||||
id: label.id,
|
||||
name: label.name,
|
||||
prefix: label.prefix || 'global',
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
labels,
|
||||
spaceId,
|
||||
nextCursor: data._links?.next
|
||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
||||
: null,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error listing space labels:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -66,12 +66,6 @@
|
||||
* Credits:
|
||||
* POST /api/v1/admin/credits - Issue credits to user (by userId or email)
|
||||
*
|
||||
* Referral Campaigns:
|
||||
* GET /api/v1/admin/referral-campaigns - List campaigns (?active=true/false)
|
||||
* POST /api/v1/admin/referral-campaigns - Create campaign
|
||||
* GET /api/v1/admin/referral-campaigns/:id - Get campaign details
|
||||
* PATCH /api/v1/admin/referral-campaigns/:id - Update campaign fields
|
||||
*
|
||||
* Access Control (Permission Groups):
|
||||
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
|
||||
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)
|
||||
@@ -103,7 +97,6 @@ export type {
|
||||
AdminOrganization,
|
||||
AdminOrganizationBillingSummary,
|
||||
AdminOrganizationDetail,
|
||||
AdminReferralCampaign,
|
||||
AdminSeatAnalytics,
|
||||
AdminSingleResponse,
|
||||
AdminSubscription,
|
||||
@@ -118,7 +111,6 @@ export type {
|
||||
AdminWorkspaceMember,
|
||||
DbMember,
|
||||
DbOrganization,
|
||||
DbReferralCampaign,
|
||||
DbSubscription,
|
||||
DbUser,
|
||||
DbUserStats,
|
||||
@@ -147,7 +139,6 @@ export {
|
||||
parseWorkflowVariables,
|
||||
toAdminFolder,
|
||||
toAdminOrganization,
|
||||
toAdminReferralCampaign,
|
||||
toAdminSubscription,
|
||||
toAdminUser,
|
||||
toAdminWorkflow,
|
||||
|
||||
@@ -1,142 +0,0 @@
|
||||
/**
|
||||
* GET /api/v1/admin/referral-campaigns/:id
|
||||
*
|
||||
* Get a single referral campaign by ID.
|
||||
*
|
||||
* PATCH /api/v1/admin/referral-campaigns/:id
|
||||
*
|
||||
* Update campaign fields. All fields are optional.
|
||||
*
|
||||
* Body:
|
||||
* - name: string (non-empty) - Campaign name
|
||||
* - bonusCreditAmount: number (> 0) - Bonus credits in dollars
|
||||
* - isActive: boolean - Enable/disable the campaign
|
||||
* - code: string | null (min 6 chars, auto-uppercased, null to remove) - Redeemable code
|
||||
* - utmSource: string | null - UTM source match (null = wildcard)
|
||||
* - utmMedium: string | null - UTM medium match (null = wildcard)
|
||||
* - utmCampaign: string | null - UTM campaign match (null = wildcard)
|
||||
* - utmContent: string | null - UTM content match (null = wildcard)
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { referralCampaigns } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
badRequestResponse,
|
||||
internalErrorResponse,
|
||||
notFoundResponse,
|
||||
singleResponse,
|
||||
} from '@/app/api/v1/admin/responses'
|
||||
import { toAdminReferralCampaign } from '@/app/api/v1/admin/types'
|
||||
|
||||
const logger = createLogger('AdminReferralCampaignDetailAPI')
|
||||
|
||||
interface RouteParams {
|
||||
id: string
|
||||
}
|
||||
|
||||
export const GET = withAdminAuthParams<RouteParams>(async (_, context) => {
|
||||
try {
|
||||
const { id: campaignId } = await context.params
|
||||
|
||||
const [campaign] = await db
|
||||
.select()
|
||||
.from(referralCampaigns)
|
||||
.where(eq(referralCampaigns.id, campaignId))
|
||||
.limit(1)
|
||||
|
||||
if (!campaign) {
|
||||
return notFoundResponse('Campaign')
|
||||
}
|
||||
|
||||
logger.info(`Admin API: Retrieved referral campaign ${campaignId}`)
|
||||
|
||||
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to get referral campaign', { error })
|
||||
return internalErrorResponse('Failed to get referral campaign')
|
||||
}
|
||||
})
|
||||
|
||||
export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) => {
|
||||
try {
|
||||
const { id: campaignId } = await context.params
|
||||
const body = await request.json()
|
||||
|
||||
const [existing] = await db
|
||||
.select()
|
||||
.from(referralCampaigns)
|
||||
.where(eq(referralCampaigns.id, campaignId))
|
||||
.limit(1)
|
||||
|
||||
if (!existing) {
|
||||
return notFoundResponse('Campaign')
|
||||
}
|
||||
|
||||
const updateData: Record<string, unknown> = { updatedAt: new Date() }
|
||||
|
||||
if (body.name !== undefined) {
|
||||
if (typeof body.name !== 'string' || body.name.trim().length === 0) {
|
||||
return badRequestResponse('name must be a non-empty string')
|
||||
}
|
||||
updateData.name = body.name.trim()
|
||||
}
|
||||
|
||||
if (body.bonusCreditAmount !== undefined) {
|
||||
if (
|
||||
typeof body.bonusCreditAmount !== 'number' ||
|
||||
!Number.isFinite(body.bonusCreditAmount) ||
|
||||
body.bonusCreditAmount <= 0
|
||||
) {
|
||||
return badRequestResponse('bonusCreditAmount must be a positive number')
|
||||
}
|
||||
updateData.bonusCreditAmount = body.bonusCreditAmount.toString()
|
||||
}
|
||||
|
||||
if (body.isActive !== undefined) {
|
||||
if (typeof body.isActive !== 'boolean') {
|
||||
return badRequestResponse('isActive must be a boolean')
|
||||
}
|
||||
updateData.isActive = body.isActive
|
||||
}
|
||||
|
||||
if (body.code !== undefined) {
|
||||
if (body.code !== null) {
|
||||
if (typeof body.code !== 'string') {
|
||||
return badRequestResponse('code must be a string or null')
|
||||
}
|
||||
if (body.code.trim().length < 6) {
|
||||
return badRequestResponse('code must be at least 6 characters')
|
||||
}
|
||||
}
|
||||
updateData.code = body.code ? body.code.trim().toUpperCase() : null
|
||||
}
|
||||
|
||||
for (const field of ['utmSource', 'utmMedium', 'utmCampaign', 'utmContent'] as const) {
|
||||
if (body[field] !== undefined) {
|
||||
if (body[field] !== null && typeof body[field] !== 'string') {
|
||||
return badRequestResponse(`${field} must be a string or null`)
|
||||
}
|
||||
updateData[field] = body[field] || null
|
||||
}
|
||||
}
|
||||
|
||||
const [updated] = await db
|
||||
.update(referralCampaigns)
|
||||
.set(updateData)
|
||||
.where(eq(referralCampaigns.id, campaignId))
|
||||
.returning()
|
||||
|
||||
logger.info(`Admin API: Updated referral campaign ${campaignId}`, {
|
||||
fields: Object.keys(updateData).filter((k) => k !== 'updatedAt'),
|
||||
})
|
||||
|
||||
return singleResponse(toAdminReferralCampaign(updated, getBaseUrl()))
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to update referral campaign', { error })
|
||||
return internalErrorResponse('Failed to update referral campaign')
|
||||
}
|
||||
})
|
||||
@@ -1,140 +0,0 @@
|
||||
/**
|
||||
* GET /api/v1/admin/referral-campaigns
|
||||
*
|
||||
* List referral campaigns with optional filtering and pagination.
|
||||
*
|
||||
* Query Parameters:
|
||||
* - active: string (optional) - Filter by active status ('true' or 'false')
|
||||
* - limit: number (default: 50, max: 250)
|
||||
* - offset: number (default: 0)
|
||||
*
|
||||
* POST /api/v1/admin/referral-campaigns
|
||||
*
|
||||
* Create a new referral campaign.
|
||||
*
|
||||
* Body:
|
||||
* - name: string (required) - Campaign name
|
||||
* - bonusCreditAmount: number (required, > 0) - Bonus credits in dollars
|
||||
* - code: string | null (optional, min 6 chars, auto-uppercased) - Redeemable code
|
||||
* - utmSource: string | null (optional) - UTM source match (null = wildcard)
|
||||
* - utmMedium: string | null (optional) - UTM medium match (null = wildcard)
|
||||
* - utmCampaign: string | null (optional) - UTM campaign match (null = wildcard)
|
||||
* - utmContent: string | null (optional) - UTM content match (null = wildcard)
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { referralCampaigns } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { count, eq, type SQL } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
badRequestResponse,
|
||||
internalErrorResponse,
|
||||
listResponse,
|
||||
singleResponse,
|
||||
} from '@/app/api/v1/admin/responses'
|
||||
import {
|
||||
type AdminReferralCampaign,
|
||||
createPaginationMeta,
|
||||
parsePaginationParams,
|
||||
toAdminReferralCampaign,
|
||||
} from '@/app/api/v1/admin/types'
|
||||
|
||||
const logger = createLogger('AdminReferralCampaignsAPI')
|
||||
|
||||
export const GET = withAdminAuth(async (request) => {
|
||||
const url = new URL(request.url)
|
||||
const { limit, offset } = parsePaginationParams(url)
|
||||
const activeFilter = url.searchParams.get('active')
|
||||
|
||||
try {
|
||||
const conditions: SQL<unknown>[] = []
|
||||
if (activeFilter === 'true') {
|
||||
conditions.push(eq(referralCampaigns.isActive, true))
|
||||
} else if (activeFilter === 'false') {
|
||||
conditions.push(eq(referralCampaigns.isActive, false))
|
||||
}
|
||||
|
||||
const whereClause = conditions.length > 0 ? conditions[0] : undefined
|
||||
const baseUrl = getBaseUrl()
|
||||
|
||||
const [countResult, campaigns] = await Promise.all([
|
||||
db.select({ total: count() }).from(referralCampaigns).where(whereClause),
|
||||
db
|
||||
.select()
|
||||
.from(referralCampaigns)
|
||||
.where(whereClause)
|
||||
.orderBy(referralCampaigns.createdAt)
|
||||
.limit(limit)
|
||||
.offset(offset),
|
||||
])
|
||||
|
||||
const total = countResult[0].total
|
||||
const data: AdminReferralCampaign[] = campaigns.map((c) => toAdminReferralCampaign(c, baseUrl))
|
||||
const pagination = createPaginationMeta(total, limit, offset)
|
||||
|
||||
logger.info(`Admin API: Listed ${data.length} referral campaigns (total: ${total})`)
|
||||
|
||||
return listResponse(data, pagination)
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to list referral campaigns', { error })
|
||||
return internalErrorResponse('Failed to list referral campaigns')
|
||||
}
|
||||
})
|
||||
|
||||
export const POST = withAdminAuth(async (request) => {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { name, code, utmSource, utmMedium, utmCampaign, utmContent, bonusCreditAmount } = body
|
||||
|
||||
if (!name || typeof name !== 'string') {
|
||||
return badRequestResponse('name is required and must be a string')
|
||||
}
|
||||
|
||||
if (
|
||||
typeof bonusCreditAmount !== 'number' ||
|
||||
!Number.isFinite(bonusCreditAmount) ||
|
||||
bonusCreditAmount <= 0
|
||||
) {
|
||||
return badRequestResponse('bonusCreditAmount must be a positive number')
|
||||
}
|
||||
|
||||
if (code !== undefined && code !== null) {
|
||||
if (typeof code !== 'string') {
|
||||
return badRequestResponse('code must be a string or null')
|
||||
}
|
||||
if (code.trim().length < 6) {
|
||||
return badRequestResponse('code must be at least 6 characters')
|
||||
}
|
||||
}
|
||||
|
||||
const id = nanoid()
|
||||
|
||||
const [campaign] = await db
|
||||
.insert(referralCampaigns)
|
||||
.values({
|
||||
id,
|
||||
name,
|
||||
code: code ? code.trim().toUpperCase() : null,
|
||||
utmSource: utmSource || null,
|
||||
utmMedium: utmMedium || null,
|
||||
utmCampaign: utmCampaign || null,
|
||||
utmContent: utmContent || null,
|
||||
bonusCreditAmount: bonusCreditAmount.toString(),
|
||||
})
|
||||
.returning()
|
||||
|
||||
logger.info(`Admin API: Created referral campaign ${id}`, {
|
||||
name,
|
||||
code: campaign.code,
|
||||
bonusCreditAmount,
|
||||
})
|
||||
|
||||
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to create referral campaign', { error })
|
||||
return internalErrorResponse('Failed to create referral campaign')
|
||||
}
|
||||
})
|
||||
@@ -8,7 +8,6 @@
|
||||
import type {
|
||||
member,
|
||||
organization,
|
||||
referralCampaigns,
|
||||
subscription,
|
||||
user,
|
||||
userStats,
|
||||
@@ -32,7 +31,6 @@ export type DbOrganization = InferSelectModel<typeof organization>
|
||||
export type DbSubscription = InferSelectModel<typeof subscription>
|
||||
export type DbMember = InferSelectModel<typeof member>
|
||||
export type DbUserStats = InferSelectModel<typeof userStats>
|
||||
export type DbReferralCampaign = InferSelectModel<typeof referralCampaigns>
|
||||
|
||||
// =============================================================================
|
||||
// Pagination
|
||||
@@ -648,49 +646,3 @@ export interface AdminDeployResult {
|
||||
export interface AdminUndeployResult {
|
||||
isDeployed: boolean
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Referral Campaign Types
|
||||
// =============================================================================
|
||||
|
||||
export interface AdminReferralCampaign {
|
||||
id: string
|
||||
name: string
|
||||
code: string | null
|
||||
utmSource: string | null
|
||||
utmMedium: string | null
|
||||
utmCampaign: string | null
|
||||
utmContent: string | null
|
||||
bonusCreditAmount: string
|
||||
isActive: boolean
|
||||
signupUrl: string | null
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export function toAdminReferralCampaign(
|
||||
dbCampaign: DbReferralCampaign,
|
||||
baseUrl: string
|
||||
): AdminReferralCampaign {
|
||||
const utmParams = new URLSearchParams()
|
||||
if (dbCampaign.utmSource) utmParams.set('utm_source', dbCampaign.utmSource)
|
||||
if (dbCampaign.utmMedium) utmParams.set('utm_medium', dbCampaign.utmMedium)
|
||||
if (dbCampaign.utmCampaign) utmParams.set('utm_campaign', dbCampaign.utmCampaign)
|
||||
if (dbCampaign.utmContent) utmParams.set('utm_content', dbCampaign.utmContent)
|
||||
const query = utmParams.toString()
|
||||
|
||||
return {
|
||||
id: dbCampaign.id,
|
||||
name: dbCampaign.name,
|
||||
code: dbCampaign.code,
|
||||
utmSource: dbCampaign.utmSource,
|
||||
utmMedium: dbCampaign.utmMedium,
|
||||
utmCampaign: dbCampaign.utmCampaign,
|
||||
utmContent: dbCampaign.utmContent,
|
||||
bonusCreditAmount: dbCampaign.bonusCreditAmount,
|
||||
isActive: dbCampaign.isActive,
|
||||
signupUrl: query ? `${baseUrl}/signup?${query}` : null,
|
||||
createdAt: dbCampaign.createdAt.toISOString(),
|
||||
updatedAt: dbCampaign.updatedAt.toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ const patchBodySchema = z
|
||||
description: z
|
||||
.string()
|
||||
.trim()
|
||||
.max(2000, 'Description must be 2000 characters or less')
|
||||
.max(500, 'Description must be 500 characters or less')
|
||||
.nullable()
|
||||
.optional(),
|
||||
isActive: z.literal(true).optional(), // Set to true to activate this version
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/event-buffer'
|
||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { processInputFileFields } from '@/lib/execution/files'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
@@ -700,27 +700,15 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
||||
let isStreamClosed = false
|
||||
|
||||
const eventWriter = createExecutionEventWriter(executionId)
|
||||
setExecutionMeta(executionId, {
|
||||
status: 'active',
|
||||
userId: actorUserId,
|
||||
workflowId,
|
||||
}).catch(() => {})
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null
|
||||
|
||||
const sendEvent = (event: ExecutionEvent) => {
|
||||
if (!isStreamClosed) {
|
||||
try {
|
||||
controller.enqueue(encodeSSEEvent(event))
|
||||
} catch {
|
||||
isStreamClosed = true
|
||||
}
|
||||
}
|
||||
if (event.type !== 'stream:chunk' && event.type !== 'stream:done') {
|
||||
eventWriter.write(event).catch(() => {})
|
||||
if (isStreamClosed) return
|
||||
|
||||
try {
|
||||
controller.enqueue(encodeSSEEvent(event))
|
||||
} catch {
|
||||
isStreamClosed = true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -841,12 +829,14 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
const reader = streamingExec.stream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let chunkCount = 0
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
chunkCount++
|
||||
const chunk = decoder.decode(value, { stream: true })
|
||||
sendEvent({
|
||||
type: 'stream:chunk',
|
||||
@@ -961,7 +951,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'error'
|
||||
} else {
|
||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||
|
||||
@@ -974,7 +963,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'cancelled'
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -998,7 +986,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'complete'
|
||||
} catch (error: unknown) {
|
||||
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
||||
const errorMessage = isTimeout
|
||||
@@ -1030,18 +1017,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
duration: executionResult?.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'error'
|
||||
} finally {
|
||||
try {
|
||||
await eventWriter.close()
|
||||
} catch (closeError) {
|
||||
logger.warn(`[${requestId}] Failed to close event writer`, {
|
||||
error: closeError instanceof Error ? closeError.message : String(closeError),
|
||||
})
|
||||
}
|
||||
if (finalMetaStatus) {
|
||||
setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {})
|
||||
}
|
||||
timeoutController.cleanup()
|
||||
if (executionId) {
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
@@ -1056,7 +1032,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
cancel() {
|
||||
isStreamClosed = true
|
||||
logger.info(`[${requestId}] Client disconnected from SSE stream`)
|
||||
timeoutController.cleanup()
|
||||
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
||||
timeoutController.abort()
|
||||
markExecutionCancelled(executionId).catch(() => {})
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -1,170 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import {
|
||||
type ExecutionStreamStatus,
|
||||
getExecutionMeta,
|
||||
readExecutionEvents,
|
||||
} from '@/lib/execution/event-buffer'
|
||||
import { formatSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
|
||||
const logger = createLogger('ExecutionStreamReconnectAPI')
|
||||
|
||||
const POLL_INTERVAL_MS = 500
|
||||
const MAX_POLL_DURATION_MS = 10 * 60 * 1000 // 10 minutes
|
||||
|
||||
function isTerminalStatus(status: ExecutionStreamStatus): boolean {
|
||||
return status === 'complete' || status === 'error' || status === 'cancelled'
|
||||
}
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET(
|
||||
req: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; executionId: string }> }
|
||||
) {
|
||||
const { id: workflowId, executionId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId,
|
||||
userId: auth.userId,
|
||||
action: 'read',
|
||||
})
|
||||
if (!workflowAuthorization.allowed) {
|
||||
return NextResponse.json(
|
||||
{ error: workflowAuthorization.message || 'Access denied' },
|
||||
{ status: workflowAuthorization.status }
|
||||
)
|
||||
}
|
||||
|
||||
const meta = await getExecutionMeta(executionId)
|
||||
if (!meta) {
|
||||
return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (meta.workflowId && meta.workflowId !== workflowId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Execution does not belong to this workflow' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const fromParam = req.nextUrl.searchParams.get('from')
|
||||
const parsed = fromParam ? Number.parseInt(fromParam, 10) : 0
|
||||
const fromEventId = Number.isFinite(parsed) && parsed >= 0 ? parsed : 0
|
||||
|
||||
logger.info('Reconnection stream requested', {
|
||||
workflowId,
|
||||
executionId,
|
||||
fromEventId,
|
||||
metaStatus: meta.status,
|
||||
})
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
let closed = false
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
let lastEventId = fromEventId
|
||||
const pollDeadline = Date.now() + MAX_POLL_DURATION_MS
|
||||
|
||||
const enqueue = (text: string) => {
|
||||
if (closed) return
|
||||
try {
|
||||
controller.enqueue(encoder.encode(text))
|
||||
} catch {
|
||||
closed = true
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const events = await readExecutionEvents(executionId, lastEventId)
|
||||
for (const entry of events) {
|
||||
if (closed) return
|
||||
enqueue(formatSSEEvent(entry.event))
|
||||
lastEventId = entry.eventId
|
||||
}
|
||||
|
||||
const currentMeta = await getExecutionMeta(executionId)
|
||||
if (!currentMeta || isTerminalStatus(currentMeta.status)) {
|
||||
enqueue('data: [DONE]\n\n')
|
||||
if (!closed) controller.close()
|
||||
return
|
||||
}
|
||||
|
||||
while (!closed && Date.now() < pollDeadline) {
|
||||
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
|
||||
if (closed) return
|
||||
|
||||
const newEvents = await readExecutionEvents(executionId, lastEventId)
|
||||
for (const entry of newEvents) {
|
||||
if (closed) return
|
||||
enqueue(formatSSEEvent(entry.event))
|
||||
lastEventId = entry.eventId
|
||||
}
|
||||
|
||||
const polledMeta = await getExecutionMeta(executionId)
|
||||
if (!polledMeta || isTerminalStatus(polledMeta.status)) {
|
||||
const finalEvents = await readExecutionEvents(executionId, lastEventId)
|
||||
for (const entry of finalEvents) {
|
||||
if (closed) return
|
||||
enqueue(formatSSEEvent(entry.event))
|
||||
lastEventId = entry.eventId
|
||||
}
|
||||
enqueue('data: [DONE]\n\n')
|
||||
if (!closed) controller.close()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (!closed) {
|
||||
logger.warn('Reconnection stream poll deadline reached', { executionId })
|
||||
enqueue('data: [DONE]\n\n')
|
||||
controller.close()
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error in reconnection stream', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
if (!closed) {
|
||||
try {
|
||||
controller.close()
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
},
|
||||
cancel() {
|
||||
closed = true
|
||||
logger.info('Client disconnected from reconnection stream', { executionId })
|
||||
},
|
||||
})
|
||||
|
||||
return new NextResponse(stream, {
|
||||
headers: {
|
||||
...SSE_HEADERS,
|
||||
'X-Execution-Id': executionId,
|
||||
},
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error('Failed to start reconnection stream', {
|
||||
workflowId,
|
||||
executionId,
|
||||
error: error.message,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: error.message || 'Failed to start reconnection stream' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,14 @@
|
||||
import { db } from '@sim/db'
|
||||
import { environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { syncWorkspaceEnvCredentials } from '@/lib/credentials/environment'
|
||||
import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
|
||||
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WorkspaceEnvironmentAPI')
|
||||
@@ -44,44 +46,10 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Workspace env (encrypted)
|
||||
const wsEnvRow = await db
|
||||
.select()
|
||||
.from(workspaceEnvironment)
|
||||
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
const wsEncrypted: Record<string, string> = (wsEnvRow[0]?.variables as any) || {}
|
||||
|
||||
// Personal env (encrypted)
|
||||
const personalRow = await db
|
||||
.select()
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, userId))
|
||||
.limit(1)
|
||||
|
||||
const personalEncrypted: Record<string, string> = (personalRow[0]?.variables as any) || {}
|
||||
|
||||
// Decrypt both for UI
|
||||
const decryptAll = async (src: Record<string, string>) => {
|
||||
const out: Record<string, string> = {}
|
||||
for (const [k, v] of Object.entries(src)) {
|
||||
try {
|
||||
const { decrypted } = await decryptSecret(v)
|
||||
out[k] = decrypted
|
||||
} catch {
|
||||
out[k] = ''
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
const [workspaceDecrypted, personalDecrypted] = await Promise.all([
|
||||
decryptAll(wsEncrypted),
|
||||
decryptAll(personalEncrypted),
|
||||
])
|
||||
|
||||
const conflicts = Object.keys(personalDecrypted).filter((k) => k in workspaceDecrypted)
|
||||
const { workspaceDecrypted, personalDecrypted, conflicts } = await getPersonalAndWorkspaceEnv(
|
||||
userId,
|
||||
workspaceId
|
||||
)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
@@ -156,6 +124,12 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
set: { variables: merged, updatedAt: new Date() },
|
||||
})
|
||||
|
||||
await syncWorkspaceEnvCredentials({
|
||||
workspaceId,
|
||||
envKeys: Object.keys(merged),
|
||||
actingUserId: userId,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Workspace env PUT error`, error)
|
||||
@@ -222,6 +196,12 @@ export async function DELETE(
|
||||
set: { variables: current, updatedAt: new Date() },
|
||||
})
|
||||
|
||||
await syncWorkspaceEnvCredentials({
|
||||
workspaceId,
|
||||
envKeys: Object.keys(current),
|
||||
actingUserId: userId,
|
||||
})
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Workspace env DELETE error`, error)
|
||||
|
||||
@@ -13,6 +13,9 @@ export type CommandId =
|
||||
| 'goto-logs'
|
||||
| 'open-search'
|
||||
| 'run-workflow'
|
||||
| 'focus-copilot-tab'
|
||||
| 'focus-toolbar-tab'
|
||||
| 'focus-editor-tab'
|
||||
| 'clear-terminal-console'
|
||||
| 'focus-toolbar-search'
|
||||
| 'clear-notifications'
|
||||
@@ -72,6 +75,21 @@ export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
|
||||
shortcut: 'Mod+Enter',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'focus-copilot-tab': {
|
||||
id: 'focus-copilot-tab',
|
||||
shortcut: 'C',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'focus-toolbar-tab': {
|
||||
id: 'focus-toolbar-tab',
|
||||
shortcut: 'T',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'focus-editor-tab': {
|
||||
id: 'focus-editor-tab',
|
||||
shortcut: 'E',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'clear-terminal-console': {
|
||||
id: 'clear-terminal-console',
|
||||
shortcut: 'Mod+D',
|
||||
|
||||
@@ -113,7 +113,7 @@ export function VersionDescriptionModal({
|
||||
className='min-h-[120px] resize-none'
|
||||
value={description}
|
||||
onChange={(e) => setDescription(e.target.value)}
|
||||
maxLength={2000}
|
||||
maxLength={500}
|
||||
disabled={isGenerating}
|
||||
/>
|
||||
<div className='flex items-center justify-between'>
|
||||
@@ -123,7 +123,7 @@ export function VersionDescriptionModal({
|
||||
</p>
|
||||
)}
|
||||
{!updateMutation.error && !generateMutation.error && <div />}
|
||||
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/2000</p>
|
||||
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/500</p>
|
||||
</div>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
|
||||
@@ -57,21 +57,6 @@ export function useChangeDetection({
|
||||
}
|
||||
}
|
||||
|
||||
if (block.triggerMode) {
|
||||
const triggerConfigValue = blockSubValues?.triggerConfig
|
||||
if (
|
||||
triggerConfigValue &&
|
||||
typeof triggerConfigValue === 'object' &&
|
||||
!subBlocks.triggerConfig
|
||||
) {
|
||||
subBlocks.triggerConfig = {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: triggerConfigValue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
blocksWithSubBlocks[blockId] = {
|
||||
...block,
|
||||
subBlocks,
|
||||
|
||||
@@ -30,6 +30,7 @@ export interface OAuthRequiredModalProps {
|
||||
requiredScopes?: string[]
|
||||
serviceId: string
|
||||
newScopes?: string[]
|
||||
onConnect?: () => Promise<void> | void
|
||||
}
|
||||
|
||||
const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
||||
@@ -314,6 +315,7 @@ export function OAuthRequiredModal({
|
||||
requiredScopes = [],
|
||||
serviceId,
|
||||
newScopes = [],
|
||||
onConnect,
|
||||
}: OAuthRequiredModalProps) {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const { baseProvider } = parseProvider(provider)
|
||||
@@ -359,6 +361,12 @@ export function OAuthRequiredModal({
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
if (onConnect) {
|
||||
await onConnect()
|
||||
onClose()
|
||||
return
|
||||
}
|
||||
|
||||
const providerId = getProviderIdFromServiceId(serviceId)
|
||||
|
||||
logger.info('Linking OAuth2:', {
|
||||
|
||||
@@ -3,10 +3,12 @@
|
||||
import { createElement, useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ExternalLink, Users } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Button, Combobox } from '@/components/emcn/components'
|
||||
import { getSubscriptionStatus } from '@/lib/billing/client'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { getPollingProviderFromOAuth } from '@/lib/credential-sets/providers'
|
||||
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
|
||||
import {
|
||||
getCanonicalScopesForProvider,
|
||||
getProviderIdFromServiceId,
|
||||
@@ -18,9 +20,9 @@ import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { CREDENTIAL, CREDENTIAL_SET } from '@/executor/constants'
|
||||
import { CREDENTIAL_SET } from '@/executor/constants'
|
||||
import { useCredentialSets } from '@/hooks/queries/credential-sets'
|
||||
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import { useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import { useOrganizations } from '@/hooks/queries/organization'
|
||||
import { useSubscriptionData } from '@/hooks/queries/subscription'
|
||||
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
|
||||
@@ -46,6 +48,8 @@ export function CredentialSelector({
|
||||
previewValue,
|
||||
previewContextValues,
|
||||
}: CredentialSelectorProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = (params?.workspaceId as string) || ''
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
const [editingValue, setEditingValue] = useState('')
|
||||
const [isEditing, setIsEditing] = useState(false)
|
||||
@@ -96,53 +100,32 @@ export function CredentialSelector({
|
||||
data: credentials = [],
|
||||
isFetching: credentialsLoading,
|
||||
refetch: refetchCredentials,
|
||||
} = useOAuthCredentials(effectiveProviderId, Boolean(effectiveProviderId))
|
||||
} = useOAuthCredentials(effectiveProviderId, {
|
||||
enabled: Boolean(effectiveProviderId),
|
||||
workspaceId,
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
|
||||
const selectedCredential = useMemo(
|
||||
() => credentials.find((cred) => cred.id === selectedId),
|
||||
[credentials, selectedId]
|
||||
)
|
||||
|
||||
const shouldFetchForeignMeta =
|
||||
Boolean(selectedId) &&
|
||||
!selectedCredential &&
|
||||
Boolean(activeWorkflowId) &&
|
||||
Boolean(effectiveProviderId)
|
||||
|
||||
const { data: foreignCredentials = [], isFetching: foreignMetaLoading } =
|
||||
useOAuthCredentialDetail(
|
||||
shouldFetchForeignMeta ? selectedId : undefined,
|
||||
activeWorkflowId || undefined,
|
||||
shouldFetchForeignMeta
|
||||
)
|
||||
|
||||
const hasForeignMeta = foreignCredentials.length > 0
|
||||
const isForeign = Boolean(selectedId && !selectedCredential && hasForeignMeta)
|
||||
|
||||
const selectedCredentialSet = useMemo(
|
||||
() => credentialSets.find((cs) => cs.id === selectedCredentialSetId),
|
||||
[credentialSets, selectedCredentialSetId]
|
||||
)
|
||||
|
||||
const isForeignCredentialSet = Boolean(isCredentialSetSelected && !selectedCredentialSet)
|
||||
|
||||
const resolvedLabel = useMemo(() => {
|
||||
if (selectedCredentialSet) return selectedCredentialSet.name
|
||||
if (isForeignCredentialSet) return CREDENTIAL.FOREIGN_LABEL
|
||||
if (selectedCredential) return selectedCredential.name
|
||||
if (isForeign) return CREDENTIAL.FOREIGN_LABEL
|
||||
return ''
|
||||
}, [selectedCredentialSet, isForeignCredentialSet, selectedCredential, isForeign])
|
||||
}, [selectedCredentialSet, selectedCredential])
|
||||
|
||||
const displayValue = isEditing ? editingValue : resolvedLabel
|
||||
|
||||
const invalidSelection =
|
||||
!isPreview &&
|
||||
Boolean(selectedId) &&
|
||||
!selectedCredential &&
|
||||
!hasForeignMeta &&
|
||||
!credentialsLoading &&
|
||||
!foreignMetaLoading
|
||||
!isPreview && Boolean(selectedId) && !selectedCredential && !credentialsLoading
|
||||
|
||||
useEffect(() => {
|
||||
if (!invalidSelection) return
|
||||
@@ -153,7 +136,7 @@ export function CredentialSelector({
|
||||
setStoreValue('')
|
||||
}, [invalidSelection, selectedId, effectiveProviderId, setStoreValue])
|
||||
|
||||
useCredentialRefreshTriggers(refetchCredentials)
|
||||
useCredentialRefreshTriggers(refetchCredentials, effectiveProviderId, workspaceId)
|
||||
|
||||
const handleOpenChange = useCallback(
|
||||
(isOpen: boolean) => {
|
||||
@@ -195,8 +178,18 @@ export function CredentialSelector({
|
||||
)
|
||||
|
||||
const handleAddCredential = useCallback(() => {
|
||||
setShowOAuthModal(true)
|
||||
}, [])
|
||||
writePendingCredentialCreateRequest({
|
||||
workspaceId,
|
||||
type: 'oauth',
|
||||
providerId: effectiveProviderId,
|
||||
displayName: '',
|
||||
serviceId,
|
||||
requiredScopes: getCanonicalScopesForProvider(effectiveProviderId),
|
||||
requestedAt: Date.now(),
|
||||
})
|
||||
|
||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
|
||||
}, [workspaceId, effectiveProviderId, serviceId])
|
||||
|
||||
const getProviderIcon = useCallback((providerName: OAuthProvider) => {
|
||||
const { baseProvider } = parseProvider(providerName)
|
||||
@@ -251,23 +244,18 @@ export function CredentialSelector({
|
||||
label: cred.name,
|
||||
value: cred.id,
|
||||
}))
|
||||
credentialItems.push({
|
||||
label:
|
||||
credentials.length > 0
|
||||
? `Connect another ${getProviderName(provider)} account`
|
||||
: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
})
|
||||
|
||||
if (credentialItems.length > 0) {
|
||||
groups.push({
|
||||
section: 'Personal Credential',
|
||||
items: credentialItems,
|
||||
})
|
||||
} else {
|
||||
groups.push({
|
||||
section: 'Personal Credential',
|
||||
items: [
|
||||
{
|
||||
label: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
groups.push({
|
||||
section: 'Personal Credential',
|
||||
items: credentialItems,
|
||||
})
|
||||
|
||||
return { comboboxOptions: [], comboboxGroups: groups }
|
||||
}
|
||||
@@ -277,12 +265,13 @@ export function CredentialSelector({
|
||||
value: cred.id,
|
||||
}))
|
||||
|
||||
if (credentials.length === 0) {
|
||||
options.push({
|
||||
label: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
})
|
||||
}
|
||||
options.push({
|
||||
label:
|
||||
credentials.length > 0
|
||||
? `Connect another ${getProviderName(provider)} account`
|
||||
: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
})
|
||||
|
||||
return { comboboxOptions: options, comboboxGroups: undefined }
|
||||
}, [
|
||||
@@ -368,7 +357,7 @@ export function CredentialSelector({
|
||||
}
|
||||
disabled={effectiveDisabled}
|
||||
editable={true}
|
||||
filterOptions={!isForeign && !isForeignCredentialSet}
|
||||
filterOptions={true}
|
||||
isLoading={credentialsLoading}
|
||||
overlayContent={overlayContent}
|
||||
className={selectedId || isCredentialSetSelected ? 'pl-[28px]' : ''}
|
||||
@@ -380,15 +369,13 @@ export function CredentialSelector({
|
||||
<span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' />
|
||||
Additional permissions required
|
||||
</div>
|
||||
{!isForeign && (
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={() => setShowOAuthModal(true)}
|
||||
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
|
||||
>
|
||||
Update access
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={() => setShowOAuthModal(true)}
|
||||
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
|
||||
>
|
||||
Update access
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -407,7 +394,11 @@ export function CredentialSelector({
|
||||
)
|
||||
}
|
||||
|
||||
function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>) {
|
||||
function useCredentialRefreshTriggers(
|
||||
refetchCredentials: () => Promise<unknown>,
|
||||
providerId: string,
|
||||
workspaceId: string
|
||||
) {
|
||||
useEffect(() => {
|
||||
const refresh = () => {
|
||||
void refetchCredentials()
|
||||
@@ -425,12 +416,29 @@ function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>
|
||||
}
|
||||
}
|
||||
|
||||
const handleCredentialsUpdated = (
|
||||
event: CustomEvent<{ providerId?: string; workspaceId?: string }>
|
||||
) => {
|
||||
if (event.detail?.providerId && event.detail.providerId !== providerId) {
|
||||
return
|
||||
}
|
||||
if (event.detail?.workspaceId && workspaceId && event.detail.workspaceId !== workspaceId) {
|
||||
return
|
||||
}
|
||||
refresh()
|
||||
}
|
||||
|
||||
document.addEventListener('visibilitychange', handleVisibilityChange)
|
||||
window.addEventListener('pageshow', handlePageShow)
|
||||
window.addEventListener('oauth-credentials-updated', handleCredentialsUpdated as EventListener)
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('visibilitychange', handleVisibilityChange)
|
||||
window.removeEventListener('pageshow', handlePageShow)
|
||||
window.removeEventListener(
|
||||
'oauth-credentials-updated',
|
||||
handleCredentialsUpdated as EventListener
|
||||
)
|
||||
}
|
||||
}, [refetchCredentials])
|
||||
}, [providerId, workspaceId, refetchCredentials])
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
PopoverSection,
|
||||
} from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
|
||||
import {
|
||||
usePersonalEnvironment,
|
||||
useWorkspaceEnvironment,
|
||||
@@ -168,7 +169,15 @@ export const EnvVarDropdown: React.FC<EnvVarDropdownProps> = ({
|
||||
}, [searchTerm])
|
||||
|
||||
const openEnvironmentSettings = () => {
|
||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'environment' } }))
|
||||
if (workspaceId) {
|
||||
writePendingCredentialCreateRequest({
|
||||
workspaceId,
|
||||
type: 'env_personal',
|
||||
envKey: searchTerm.trim(),
|
||||
requestedAt: Date.now(),
|
||||
})
|
||||
}
|
||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
|
||||
onClose?.()
|
||||
}
|
||||
|
||||
@@ -302,7 +311,7 @@ export const EnvVarDropdown: React.FC<EnvVarDropdownProps> = ({
|
||||
}}
|
||||
>
|
||||
<Plus className='h-3 w-3' />
|
||||
<span>Create environment variable</span>
|
||||
<span>Create Secret</span>
|
||||
</PopoverItem>
|
||||
</PopoverScrollArea>
|
||||
) : (
|
||||
|
||||
@@ -7,7 +7,6 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
@@ -125,8 +124,6 @@ export function FileSelectorInput({
|
||||
const serviceId = subBlock.serviceId || ''
|
||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||
|
||||
const { isForeignCredential } = useForeignCredential(effectiveProviderId, normalizedCredentialId)
|
||||
|
||||
const selectorResolution = useMemo<SelectorResolution | null>(() => {
|
||||
return resolveSelectorForSubBlock(subBlock, {
|
||||
workflowId: workflowIdFromUrl,
|
||||
@@ -168,7 +165,6 @@ export function FileSelectorInput({
|
||||
|
||||
const disabledReason =
|
||||
finalDisabled ||
|
||||
isForeignCredential ||
|
||||
missingCredential ||
|
||||
missingDomain ||
|
||||
missingProject ||
|
||||
|
||||
@@ -4,7 +4,6 @@ import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
@@ -47,10 +46,6 @@ export function FolderSelectorInput({
|
||||
subBlock.canonicalParamId === 'copyDestinationId' ||
|
||||
subBlock.id === 'copyDestinationFolder' ||
|
||||
subBlock.id === 'manualCopyDestinationFolder'
|
||||
const { isForeignCredential } = useForeignCredential(
|
||||
effectiveProviderId,
|
||||
(connectedCredential as string) || ''
|
||||
)
|
||||
|
||||
// Central dependsOn gating
|
||||
const { finalDisabled } = useDependsOnGate(blockId, subBlock, {
|
||||
@@ -119,9 +114,7 @@ export function FolderSelectorInput({
|
||||
selectorContext={
|
||||
selectorResolution?.context ?? { credentialId, workflowId: activeWorkflowId || '' }
|
||||
}
|
||||
disabled={
|
||||
finalDisabled || isForeignCredential || missingCredential || !selectorResolution?.key
|
||||
}
|
||||
disabled={finalDisabled || missingCredential || !selectorResolution?.key}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue ?? null}
|
||||
placeholder={subBlock.placeholder || 'Select folder'}
|
||||
|
||||
@@ -7,7 +7,6 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
@@ -73,11 +72,6 @@ export function ProjectSelectorInput({
|
||||
|
||||
const serviceId = subBlock.serviceId || ''
|
||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||
|
||||
const { isForeignCredential } = useForeignCredential(
|
||||
effectiveProviderId,
|
||||
(connectedCredential as string) || ''
|
||||
)
|
||||
const workflowIdFromUrl = (params?.workflowId as string) || activeWorkflowId || ''
|
||||
const { finalDisabled } = useDependsOnGate(blockId, subBlock, {
|
||||
disabled,
|
||||
@@ -123,7 +117,7 @@ export function ProjectSelectorInput({
|
||||
subBlock={subBlock}
|
||||
selectorKey={selectorResolution.key}
|
||||
selectorContext={selectorResolution.context}
|
||||
disabled={finalDisabled || isForeignCredential || missingCredential}
|
||||
disabled={finalDisabled || missingCredential}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue ?? null}
|
||||
placeholder={subBlock.placeholder || 'Select project'}
|
||||
|
||||
@@ -7,7 +7,6 @@ import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
@@ -87,8 +86,6 @@ export function SheetSelectorInput({
|
||||
const serviceId = subBlock.serviceId || ''
|
||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||
|
||||
const { isForeignCredential } = useForeignCredential(effectiveProviderId, normalizedCredentialId)
|
||||
|
||||
const selectorResolution = useMemo<SelectorResolution | null>(() => {
|
||||
return resolveSelectorForSubBlock(subBlock, {
|
||||
workflowId: workflowIdFromUrl,
|
||||
@@ -101,11 +98,7 @@ export function SheetSelectorInput({
|
||||
const missingSpreadsheet = !normalizedSpreadsheetId
|
||||
|
||||
const disabledReason =
|
||||
finalDisabled ||
|
||||
isForeignCredential ||
|
||||
missingCredential ||
|
||||
missingSpreadsheet ||
|
||||
!selectorResolution?.key
|
||||
finalDisabled || missingCredential || missingSpreadsheet || !selectorResolution?.key
|
||||
|
||||
if (!selectorResolution?.key) {
|
||||
return (
|
||||
|
||||
@@ -6,7 +6,6 @@ import { Tooltip } from '@/components/emcn'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { resolvePreviewContextValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/utils'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
@@ -85,11 +84,6 @@ export function SlackSelectorInput({
|
||||
? (effectiveBotToken as string) || ''
|
||||
: (effectiveCredential as string) || ''
|
||||
|
||||
const { isForeignCredential } = useForeignCredential(
|
||||
effectiveProviderId,
|
||||
(effectiveAuthMethod as string) === 'bot_token' ? '' : (effectiveCredential as string) || ''
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
const val = isPreview && previewValue !== undefined ? previewValue : storeValue
|
||||
if (typeof val === 'string') {
|
||||
@@ -99,7 +93,7 @@ export function SlackSelectorInput({
|
||||
|
||||
const requiresCredential = dependsOn.includes('credential')
|
||||
const missingCredential = !credential || credential.trim().length === 0
|
||||
const shouldForceDisable = requiresCredential && (missingCredential || isForeignCredential)
|
||||
const shouldForceDisable = requiresCredential && missingCredential
|
||||
|
||||
const context: SelectorContext = useMemo(
|
||||
() => ({
|
||||
@@ -136,7 +130,7 @@ export function SlackSelectorInput({
|
||||
subBlock={subBlock}
|
||||
selectorKey={config.selectorKey}
|
||||
selectorContext={context}
|
||||
disabled={finalDisabled || shouldForceDisable || isForeignCredential}
|
||||
disabled={finalDisabled || shouldForceDisable}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue ?? null}
|
||||
placeholder={subBlock.placeholder || config.placeholder}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { createElement, useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { ExternalLink } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Button, Combobox } from '@/components/emcn/components'
|
||||
import { writePendingCredentialCreateRequest } from '@/lib/credentials/client-state'
|
||||
import {
|
||||
getCanonicalScopesForProvider,
|
||||
getProviderIdFromServiceId,
|
||||
@@ -10,8 +12,7 @@ import {
|
||||
parseProvider,
|
||||
} from '@/lib/oauth'
|
||||
import { OAuthRequiredModal } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/credential-selector/components/oauth-required-modal'
|
||||
import { CREDENTIAL } from '@/executor/constants'
|
||||
import { useOAuthCredentialDetail, useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import { useOAuthCredentials } from '@/hooks/queries/oauth-credentials'
|
||||
import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
@@ -54,10 +55,12 @@ export function ToolCredentialSelector({
|
||||
onChange,
|
||||
provider,
|
||||
requiredScopes = [],
|
||||
label = 'Select account',
|
||||
label = 'Select credential',
|
||||
serviceId,
|
||||
disabled = false,
|
||||
}: ToolCredentialSelectorProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = (params?.workspaceId as string) || ''
|
||||
const [showOAuthModal, setShowOAuthModal] = useState(false)
|
||||
const [editingInputValue, setEditingInputValue] = useState('')
|
||||
const [isEditing, setIsEditing] = useState(false)
|
||||
@@ -71,50 +74,32 @@ export function ToolCredentialSelector({
|
||||
data: credentials = [],
|
||||
isFetching: credentialsLoading,
|
||||
refetch: refetchCredentials,
|
||||
} = useOAuthCredentials(effectiveProviderId, Boolean(effectiveProviderId))
|
||||
} = useOAuthCredentials(effectiveProviderId, {
|
||||
enabled: Boolean(effectiveProviderId),
|
||||
workspaceId,
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
|
||||
const selectedCredential = useMemo(
|
||||
() => credentials.find((cred) => cred.id === selectedId),
|
||||
[credentials, selectedId]
|
||||
)
|
||||
|
||||
const shouldFetchForeignMeta =
|
||||
Boolean(selectedId) &&
|
||||
!selectedCredential &&
|
||||
Boolean(activeWorkflowId) &&
|
||||
Boolean(effectiveProviderId)
|
||||
|
||||
const { data: foreignCredentials = [], isFetching: foreignMetaLoading } =
|
||||
useOAuthCredentialDetail(
|
||||
shouldFetchForeignMeta ? selectedId : undefined,
|
||||
activeWorkflowId || undefined,
|
||||
shouldFetchForeignMeta
|
||||
)
|
||||
|
||||
const hasForeignMeta = foreignCredentials.length > 0
|
||||
const isForeign = Boolean(selectedId && !selectedCredential && hasForeignMeta)
|
||||
|
||||
const resolvedLabel = useMemo(() => {
|
||||
if (selectedCredential) return selectedCredential.name
|
||||
if (isForeign) return CREDENTIAL.FOREIGN_LABEL
|
||||
return ''
|
||||
}, [selectedCredential, isForeign])
|
||||
}, [selectedCredential])
|
||||
|
||||
const inputValue = isEditing ? editingInputValue : resolvedLabel
|
||||
|
||||
const invalidSelection =
|
||||
Boolean(selectedId) &&
|
||||
!selectedCredential &&
|
||||
!hasForeignMeta &&
|
||||
!credentialsLoading &&
|
||||
!foreignMetaLoading
|
||||
const invalidSelection = Boolean(selectedId) && !selectedCredential && !credentialsLoading
|
||||
|
||||
useEffect(() => {
|
||||
if (!invalidSelection) return
|
||||
onChange('')
|
||||
}, [invalidSelection, onChange])
|
||||
|
||||
useCredentialRefreshTriggers(refetchCredentials)
|
||||
useCredentialRefreshTriggers(refetchCredentials, effectiveProviderId, workspaceId)
|
||||
|
||||
const handleOpenChange = useCallback(
|
||||
(isOpen: boolean) => {
|
||||
@@ -142,8 +127,18 @@ export function ToolCredentialSelector({
|
||||
)
|
||||
|
||||
const handleAddCredential = useCallback(() => {
|
||||
setShowOAuthModal(true)
|
||||
}, [])
|
||||
writePendingCredentialCreateRequest({
|
||||
workspaceId,
|
||||
type: 'oauth',
|
||||
providerId: effectiveProviderId,
|
||||
displayName: '',
|
||||
serviceId,
|
||||
requiredScopes: getCanonicalScopesForProvider(effectiveProviderId),
|
||||
requestedAt: Date.now(),
|
||||
})
|
||||
|
||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'credentials' } }))
|
||||
}, [workspaceId, effectiveProviderId, serviceId])
|
||||
|
||||
const comboboxOptions = useMemo(() => {
|
||||
const options = credentials.map((cred) => ({
|
||||
@@ -151,12 +146,13 @@ export function ToolCredentialSelector({
|
||||
value: cred.id,
|
||||
}))
|
||||
|
||||
if (credentials.length === 0) {
|
||||
options.push({
|
||||
label: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
})
|
||||
}
|
||||
options.push({
|
||||
label:
|
||||
credentials.length > 0
|
||||
? `Connect another ${getProviderName(provider)} account`
|
||||
: `Connect ${getProviderName(provider)} account`,
|
||||
value: '__connect_account__',
|
||||
})
|
||||
|
||||
return options
|
||||
}, [credentials, provider])
|
||||
@@ -206,7 +202,7 @@ export function ToolCredentialSelector({
|
||||
placeholder={label}
|
||||
disabled={disabled}
|
||||
editable={true}
|
||||
filterOptions={!isForeign}
|
||||
filterOptions={true}
|
||||
isLoading={credentialsLoading}
|
||||
overlayContent={overlayContent}
|
||||
className={selectedId ? 'pl-[28px]' : ''}
|
||||
@@ -218,15 +214,13 @@ export function ToolCredentialSelector({
|
||||
<span className='mr-[6px] inline-block h-[6px] w-[6px] rounded-[2px] bg-amber-500' />
|
||||
Additional permissions required
|
||||
</div>
|
||||
{!isForeign && (
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={() => setShowOAuthModal(true)}
|
||||
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
|
||||
>
|
||||
Update access
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={() => setShowOAuthModal(true)}
|
||||
className='w-full px-[8px] py-[4px] font-medium text-[12px]'
|
||||
>
|
||||
Update access
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -245,7 +239,11 @@ export function ToolCredentialSelector({
|
||||
)
|
||||
}
|
||||
|
||||
function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>) {
|
||||
function useCredentialRefreshTriggers(
|
||||
refetchCredentials: () => Promise<unknown>,
|
||||
providerId: string,
|
||||
workspaceId: string
|
||||
) {
|
||||
useEffect(() => {
|
||||
const refresh = () => {
|
||||
void refetchCredentials()
|
||||
@@ -263,12 +261,29 @@ function useCredentialRefreshTriggers(refetchCredentials: () => Promise<unknown>
|
||||
}
|
||||
}
|
||||
|
||||
const handleCredentialsUpdated = (
|
||||
event: CustomEvent<{ providerId?: string; workspaceId?: string }>
|
||||
) => {
|
||||
if (event.detail?.providerId && event.detail.providerId !== providerId) {
|
||||
return
|
||||
}
|
||||
if (event.detail?.workspaceId && workspaceId && event.detail.workspaceId !== workspaceId) {
|
||||
return
|
||||
}
|
||||
refresh()
|
||||
}
|
||||
|
||||
document.addEventListener('visibilitychange', handleVisibilityChange)
|
||||
window.addEventListener('pageshow', handlePageShow)
|
||||
window.addEventListener('oauth-credentials-updated', handleCredentialsUpdated as EventListener)
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('visibilitychange', handleVisibilityChange)
|
||||
window.removeEventListener('pageshow', handlePageShow)
|
||||
window.removeEventListener(
|
||||
'oauth-credentials-updated',
|
||||
handleCredentialsUpdated as EventListener
|
||||
)
|
||||
}
|
||||
}, [refetchCredentials])
|
||||
}, [providerId, workspaceId, refetchCredentials])
|
||||
}
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
|
||||
export function useForeignCredential(
|
||||
provider: string | undefined,
|
||||
credentialId: string | undefined
|
||||
) {
|
||||
const [isForeign, setIsForeign] = useState<boolean>(false)
|
||||
const [loading, setLoading] = useState<boolean>(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const normalizedProvider = useMemo(() => (provider || '').toString(), [provider])
|
||||
const normalizedCredentialId = useMemo(() => credentialId || '', [credentialId])
|
||||
|
||||
useEffect(() => {
|
||||
let cancelled = false
|
||||
async function check() {
|
||||
setLoading(true)
|
||||
setError(null)
|
||||
try {
|
||||
if (!normalizedProvider || !normalizedCredentialId) {
|
||||
if (!cancelled) setIsForeign(false)
|
||||
return
|
||||
}
|
||||
const res = await fetch(
|
||||
`/api/auth/oauth/credentials?provider=${encodeURIComponent(normalizedProvider)}`
|
||||
)
|
||||
if (!res.ok) {
|
||||
if (!cancelled) setIsForeign(true)
|
||||
return
|
||||
}
|
||||
const data = await res.json()
|
||||
const isOwn = (data.credentials || []).some((c: any) => c.id === normalizedCredentialId)
|
||||
if (!cancelled) setIsForeign(!isOwn)
|
||||
} catch (e) {
|
||||
if (!cancelled) {
|
||||
setIsForeign(true)
|
||||
setError((e as Error).message)
|
||||
}
|
||||
} finally {
|
||||
if (!cancelled) setLoading(false)
|
||||
}
|
||||
}
|
||||
void check()
|
||||
return () => {
|
||||
cancelled = true
|
||||
}
|
||||
}, [normalizedProvider, normalizedCredentialId])
|
||||
|
||||
return { isForeignCredential: isForeign, loading, error }
|
||||
}
|
||||
@@ -340,7 +340,13 @@ export const Panel = memo(function Panel() {
|
||||
* Register global keyboard shortcuts using the central commands registry.
|
||||
*
|
||||
* - Mod+Enter: Run / cancel workflow (matches the Run button behavior)
|
||||
* - C: Focus Copilot tab
|
||||
* - T: Focus Toolbar tab
|
||||
* - E: Focus Editor tab
|
||||
* - Mod+F: Focus Toolbar tab and search input
|
||||
*
|
||||
* The tab-switching commands are disabled inside editable elements so typing
|
||||
* in inputs or textareas is not interrupted.
|
||||
*/
|
||||
useRegisterGlobalCommands(() =>
|
||||
createCommands([
|
||||
@@ -357,6 +363,33 @@ export const Panel = memo(function Panel() {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-copilot-tab',
|
||||
handler: () => {
|
||||
setActiveTab('copilot')
|
||||
},
|
||||
overrides: {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-toolbar-tab',
|
||||
handler: () => {
|
||||
setActiveTab('toolbar')
|
||||
},
|
||||
overrides: {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-editor-tab',
|
||||
handler: () => {
|
||||
setActiveTab('editor')
|
||||
},
|
||||
overrides: {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-toolbar-search',
|
||||
handler: () => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
@@ -46,13 +46,7 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('useWorkflowExecution')
|
||||
|
||||
/**
|
||||
* Module-level Set tracking which workflows have an active reconnection effect.
|
||||
* Prevents multiple hook instances (from different components) from starting
|
||||
* concurrent reconnection streams for the same workflow during the same mount cycle.
|
||||
*/
|
||||
const activeReconnections = new Set<string>()
|
||||
|
||||
// Debug state validation result
|
||||
interface DebugValidationResult {
|
||||
isValid: boolean
|
||||
error?: string
|
||||
@@ -60,7 +54,7 @@ interface DebugValidationResult {
|
||||
|
||||
interface BlockEventHandlerConfig {
|
||||
workflowId?: string
|
||||
executionIdRef: { current: string }
|
||||
executionId?: string
|
||||
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
|
||||
activeBlocksSet: Set<string>
|
||||
accumulatedBlockLogs: BlockLog[]
|
||||
@@ -114,15 +108,12 @@ export function useWorkflowExecution() {
|
||||
const queryClient = useQueryClient()
|
||||
const currentWorkflow = useCurrentWorkflow()
|
||||
const { activeWorkflowId, workflows } = useWorkflowRegistry()
|
||||
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries, clearExecutionEntries } =
|
||||
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } =
|
||||
useTerminalConsoleStore()
|
||||
const hasHydrated = useTerminalConsoleStore((s) => s._hasHydrated)
|
||||
const { getAllVariables } = useEnvironmentStore()
|
||||
const { getVariablesByWorkflowId, variables } = useVariablesStore()
|
||||
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
|
||||
useCurrentWorkflowExecution()
|
||||
const setCurrentExecutionId = useExecutionStore((s) => s.setCurrentExecutionId)
|
||||
const getCurrentExecutionId = useExecutionStore((s) => s.getCurrentExecutionId)
|
||||
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
|
||||
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
|
||||
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
|
||||
@@ -306,7 +297,7 @@ export function useWorkflowExecution() {
|
||||
(config: BlockEventHandlerConfig) => {
|
||||
const {
|
||||
workflowId,
|
||||
executionIdRef,
|
||||
executionId,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
@@ -317,14 +308,6 @@ export function useWorkflowExecution() {
|
||||
onBlockCompleteCallback,
|
||||
} = config
|
||||
|
||||
/** Returns true if this execution was cancelled or superseded by another run. */
|
||||
const isStaleExecution = () =>
|
||||
!!(
|
||||
workflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(workflowId) !== executionIdRef.current
|
||||
)
|
||||
|
||||
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
||||
if (!workflowId) return
|
||||
if (isActive) {
|
||||
@@ -377,7 +360,7 @@ export function useWorkflowExecution() {
|
||||
endedAt: data.endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId: executionIdRef.current,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
@@ -400,7 +383,7 @@ export function useWorkflowExecution() {
|
||||
endedAt: data.endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId: executionIdRef.current,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
@@ -427,7 +410,7 @@ export function useWorkflowExecution() {
|
||||
iterationType: data.iterationType,
|
||||
iterationContainerId: data.iterationContainerId,
|
||||
},
|
||||
executionIdRef.current
|
||||
executionId
|
||||
)
|
||||
}
|
||||
|
||||
@@ -449,12 +432,11 @@ export function useWorkflowExecution() {
|
||||
iterationType: data.iterationType,
|
||||
iterationContainerId: data.iterationContainerId,
|
||||
},
|
||||
executionIdRef.current
|
||||
executionId
|
||||
)
|
||||
}
|
||||
|
||||
const onBlockStarted = (data: BlockStartedData) => {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, true)
|
||||
markIncomingEdges(data.blockId)
|
||||
|
||||
@@ -471,7 +453,7 @@ export function useWorkflowExecution() {
|
||||
endedAt: undefined,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId: executionIdRef.current,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
isRunning: true,
|
||||
@@ -483,7 +465,6 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
const onBlockCompleted = (data: BlockCompletedData) => {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, false)
|
||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
|
||||
|
||||
@@ -514,7 +495,6 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
const onBlockError = (data: BlockErrorData) => {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, false)
|
||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
|
||||
|
||||
@@ -922,6 +902,10 @@ export function useWorkflowExecution() {
|
||||
|
||||
// Update block logs with actual stream completion times
|
||||
if (result.logs && streamCompletionTimes.size > 0) {
|
||||
const streamCompletionEndTime = new Date(
|
||||
Math.max(...Array.from(streamCompletionTimes.values()))
|
||||
).toISOString()
|
||||
|
||||
result.logs.forEach((log: BlockLog) => {
|
||||
if (streamCompletionTimes.has(log.blockId)) {
|
||||
const completionTime = streamCompletionTimes.get(log.blockId)!
|
||||
@@ -1003,6 +987,7 @@ export function useWorkflowExecution() {
|
||||
return { success: true, stream }
|
||||
}
|
||||
|
||||
// For manual (non-chat) execution
|
||||
const manualExecutionId = uuidv4()
|
||||
try {
|
||||
const result = await executeWorkflow(
|
||||
@@ -1017,10 +1002,29 @@ export function useWorkflowExecution() {
|
||||
if (result.metadata.pendingBlocks) {
|
||||
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
|
||||
}
|
||||
} else if (result && 'success' in result) {
|
||||
setExecutionResult(result)
|
||||
// Reset execution state after successful non-debug execution
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
|
||||
if (isChatExecution) {
|
||||
if (!result.metadata) {
|
||||
result.metadata = { duration: 0, startTime: new Date().toISOString() }
|
||||
}
|
||||
;(result.metadata as any).source = 'chat'
|
||||
}
|
||||
|
||||
// Invalidate subscription queries to update usage
|
||||
setTimeout(() => {
|
||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||
}, 1000)
|
||||
}
|
||||
return result
|
||||
} catch (error: any) {
|
||||
const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
|
||||
// Note: Error logs are already persisted server-side via execution-core.ts
|
||||
return errorResult
|
||||
}
|
||||
},
|
||||
@@ -1271,7 +1275,7 @@ export function useWorkflowExecution() {
|
||||
if (activeWorkflowId) {
|
||||
logger.info('Using server-side executor')
|
||||
|
||||
const executionIdRef = { current: '' }
|
||||
const executionId = uuidv4()
|
||||
|
||||
let executionResult: ExecutionResult = {
|
||||
success: false,
|
||||
@@ -1289,7 +1293,7 @@ export function useWorkflowExecution() {
|
||||
try {
|
||||
const blockHandlers = buildBlockEventHandlers({
|
||||
workflowId: activeWorkflowId,
|
||||
executionIdRef,
|
||||
executionId,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
@@ -1322,10 +1326,6 @@ export function useWorkflowExecution() {
|
||||
loops: clientWorkflowState.loops,
|
||||
parallels: clientWorkflowState.parallels,
|
||||
},
|
||||
onExecutionId: (id) => {
|
||||
executionIdRef.current = id
|
||||
setCurrentExecutionId(activeWorkflowId, id)
|
||||
},
|
||||
callbacks: {
|
||||
onExecutionStarted: (data) => {
|
||||
logger.info('Server execution started:', data)
|
||||
@@ -1368,18 +1368,6 @@ export function useWorkflowExecution() {
|
||||
},
|
||||
|
||||
onExecutionCompleted: (data) => {
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||
executionIdRef.current
|
||||
)
|
||||
return
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
}
|
||||
|
||||
executionResult = {
|
||||
success: data.success,
|
||||
output: data.output,
|
||||
@@ -1437,33 +1425,9 @@ export function useWorkflowExecution() {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const workflowExecState = activeWorkflowId
|
||||
? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId)
|
||||
: null
|
||||
if (activeWorkflowId && !workflowExecState?.isDebugging) {
|
||||
setExecutionResult(executionResult)
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
setTimeout(() => {
|
||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||
}, 1000)
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||
executionIdRef.current
|
||||
)
|
||||
return
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
}
|
||||
|
||||
executionResult = {
|
||||
success: false,
|
||||
output: {},
|
||||
@@ -1477,53 +1441,43 @@ export function useWorkflowExecution() {
|
||||
const isPreExecutionError = accumulatedBlockLogs.length === 0
|
||||
handleExecutionErrorConsole({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId: executionIdRef.current,
|
||||
executionId,
|
||||
error: data.error,
|
||||
durationMs: data.duration,
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
isPreExecutionError,
|
||||
})
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionCancelled: (data) => {
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||
executionIdRef.current
|
||||
)
|
||||
return
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
}
|
||||
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId: executionIdRef.current,
|
||||
executionId,
|
||||
durationMs: data?.duration,
|
||||
})
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
return executionResult
|
||||
} catch (error: any) {
|
||||
// Don't log abort errors - they're intentional user actions
|
||||
if (error.name === 'AbortError' || error.message?.includes('aborted')) {
|
||||
logger.info('Execution aborted by user')
|
||||
return executionResult
|
||||
|
||||
// Reset execution state
|
||||
if (activeWorkflowId) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
}
|
||||
|
||||
// Return gracefully without error
|
||||
return {
|
||||
success: false,
|
||||
output: {},
|
||||
metadata: { duration: 0 },
|
||||
logs: [],
|
||||
}
|
||||
}
|
||||
|
||||
logger.error('Server-side execution failed:', error)
|
||||
@@ -1531,6 +1485,7 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: should never reach here
|
||||
throw new Error('Server-side execution is required')
|
||||
}
|
||||
|
||||
@@ -1762,28 +1717,25 @@ export function useWorkflowExecution() {
|
||||
* Handles cancelling the current workflow execution
|
||||
*/
|
||||
const handleCancelExecution = useCallback(() => {
|
||||
if (!activeWorkflowId) return
|
||||
logger.info('Workflow execution cancellation requested')
|
||||
|
||||
const storedExecutionId = getCurrentExecutionId(activeWorkflowId)
|
||||
// Cancel the execution stream for this workflow (server-side)
|
||||
executionStream.cancel(activeWorkflowId ?? undefined)
|
||||
|
||||
if (storedExecutionId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, {
|
||||
method: 'POST',
|
||||
}).catch(() => {})
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId: storedExecutionId,
|
||||
})
|
||||
// Mark current chat execution as superseded so its cleanup won't affect new executions
|
||||
currentChatExecutionIdRef.current = null
|
||||
|
||||
// Mark all running entries as canceled in the terminal
|
||||
if (activeWorkflowId) {
|
||||
cancelRunningEntries(activeWorkflowId)
|
||||
|
||||
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
}
|
||||
|
||||
executionStream.cancel(activeWorkflowId)
|
||||
currentChatExecutionIdRef.current = null
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
|
||||
// If in debug mode, also reset debug state
|
||||
if (isDebugging) {
|
||||
resetDebugState()
|
||||
}
|
||||
@@ -1795,9 +1747,7 @@ export function useWorkflowExecution() {
|
||||
setIsDebugging,
|
||||
setActiveBlocks,
|
||||
activeWorkflowId,
|
||||
getCurrentExecutionId,
|
||||
setCurrentExecutionId,
|
||||
handleExecutionCancelledConsole,
|
||||
cancelRunningEntries,
|
||||
])
|
||||
|
||||
/**
|
||||
@@ -1897,7 +1847,7 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
setIsExecuting(workflowId, true)
|
||||
const executionIdRef = { current: '' }
|
||||
const executionId = uuidv4()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
@@ -1906,7 +1856,7 @@ export function useWorkflowExecution() {
|
||||
try {
|
||||
const blockHandlers = buildBlockEventHandlers({
|
||||
workflowId,
|
||||
executionIdRef,
|
||||
executionId,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
@@ -1921,10 +1871,6 @@ export function useWorkflowExecution() {
|
||||
startBlockId: blockId,
|
||||
sourceSnapshot: effectiveSnapshot,
|
||||
input: workflowInput,
|
||||
onExecutionId: (id) => {
|
||||
executionIdRef.current = id
|
||||
setCurrentExecutionId(workflowId, id)
|
||||
},
|
||||
callbacks: {
|
||||
onBlockStarted: blockHandlers.onBlockStarted,
|
||||
onBlockCompleted: blockHandlers.onBlockCompleted,
|
||||
@@ -1932,6 +1878,7 @@ export function useWorkflowExecution() {
|
||||
|
||||
onExecutionCompleted: (data) => {
|
||||
if (data.success) {
|
||||
// Add the start block (trigger) to executed blocks
|
||||
executedBlockIds.add(blockId)
|
||||
|
||||
const mergedBlockStates: Record<string, BlockState> = {
|
||||
@@ -1955,10 +1902,6 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
||||
}
|
||||
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
@@ -1978,27 +1921,19 @@ export function useWorkflowExecution() {
|
||||
|
||||
handleExecutionErrorConsole({
|
||||
workflowId,
|
||||
executionId: executionIdRef.current,
|
||||
executionId,
|
||||
error: data.error,
|
||||
durationMs: data.duration,
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
})
|
||||
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
},
|
||||
|
||||
onExecutionCancelled: (data) => {
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId,
|
||||
executionId: executionIdRef.current,
|
||||
executionId,
|
||||
durationMs: data?.duration,
|
||||
})
|
||||
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
},
|
||||
},
|
||||
})
|
||||
@@ -2007,20 +1942,14 @@ export function useWorkflowExecution() {
|
||||
logger.error('Run-from-block failed:', error)
|
||||
}
|
||||
} finally {
|
||||
const currentId = getCurrentExecutionId(workflowId)
|
||||
if (currentId === null || currentId === executionIdRef.current) {
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
}
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
}
|
||||
},
|
||||
[
|
||||
getLastExecutionSnapshot,
|
||||
setLastExecutionSnapshot,
|
||||
clearLastExecutionSnapshot,
|
||||
getCurrentExecutionId,
|
||||
setCurrentExecutionId,
|
||||
setIsExecuting,
|
||||
setActiveBlocks,
|
||||
setBlockRunStatus,
|
||||
@@ -2050,213 +1979,29 @@ export function useWorkflowExecution() {
|
||||
|
||||
const executionId = uuidv4()
|
||||
try {
|
||||
await executeWorkflow(undefined, undefined, executionId, undefined, 'manual', blockId)
|
||||
const result = await executeWorkflow(
|
||||
undefined,
|
||||
undefined,
|
||||
executionId,
|
||||
undefined,
|
||||
'manual',
|
||||
blockId
|
||||
)
|
||||
if (result && 'success' in result) {
|
||||
setExecutionResult(result)
|
||||
}
|
||||
} catch (error) {
|
||||
const errorResult = handleExecutionError(error, { executionId })
|
||||
return errorResult
|
||||
} finally {
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setIsDebugging(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
}
|
||||
},
|
||||
[
|
||||
activeWorkflowId,
|
||||
setCurrentExecutionId,
|
||||
setExecutionResult,
|
||||
setIsExecuting,
|
||||
setIsDebugging,
|
||||
setActiveBlocks,
|
||||
]
|
||||
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
if (!activeWorkflowId || !hasHydrated) return
|
||||
|
||||
const entries = useTerminalConsoleStore.getState().entries
|
||||
const runningEntries = entries.filter(
|
||||
(e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId
|
||||
)
|
||||
if (runningEntries.length === 0) return
|
||||
|
||||
if (activeReconnections.has(activeWorkflowId)) return
|
||||
activeReconnections.add(activeWorkflowId)
|
||||
|
||||
executionStream.cancel(activeWorkflowId)
|
||||
|
||||
const sorted = [...runningEntries].sort((a, b) => {
|
||||
const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0
|
||||
const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0
|
||||
return bTime - aTime
|
||||
})
|
||||
const executionId = sorted[0].executionId!
|
||||
|
||||
const otherExecutionIds = new Set(
|
||||
sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!)
|
||||
)
|
||||
if (otherExecutionIds.size > 0) {
|
||||
cancelRunningEntries(activeWorkflowId)
|
||||
}
|
||||
|
||||
setCurrentExecutionId(activeWorkflowId, executionId)
|
||||
setIsExecuting(activeWorkflowId, true)
|
||||
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
const activeBlocksSet = new Set<string>()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
|
||||
const executionIdRef = { current: executionId }
|
||||
|
||||
const handlers = buildBlockEventHandlers({
|
||||
workflowId: activeWorkflowId,
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
accumulatedBlockStates,
|
||||
executedBlockIds,
|
||||
consoleMode: 'update',
|
||||
includeStartConsoleEntry: true,
|
||||
})
|
||||
|
||||
const originalEntries = entries
|
||||
.filter((e) => e.executionId === executionId)
|
||||
.map((e) => ({ ...e }))
|
||||
|
||||
let cleared = false
|
||||
let reconnectionComplete = false
|
||||
let cleanupRan = false
|
||||
const clearOnce = () => {
|
||||
if (!cleared) {
|
||||
cleared = true
|
||||
clearExecutionEntries(executionId)
|
||||
}
|
||||
}
|
||||
|
||||
const reconnectWorkflowId = activeWorkflowId
|
||||
|
||||
executionStream
|
||||
.reconnect({
|
||||
workflowId: reconnectWorkflowId,
|
||||
executionId,
|
||||
callbacks: {
|
||||
onBlockStarted: (data) => {
|
||||
clearOnce()
|
||||
handlers.onBlockStarted(data)
|
||||
},
|
||||
onBlockCompleted: (data) => {
|
||||
clearOnce()
|
||||
handlers.onBlockCompleted(data)
|
||||
},
|
||||
onBlockError: (data) => {
|
||||
clearOnce()
|
||||
handlers.onBlockError(data)
|
||||
},
|
||||
onExecutionCompleted: () => {
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
clearOnce()
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
},
|
||||
onExecutionError: (data) => {
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
clearOnce()
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
handleExecutionErrorConsole({
|
||||
workflowId: reconnectWorkflowId,
|
||||
executionId,
|
||||
error: data.error,
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
})
|
||||
},
|
||||
onExecutionCancelled: () => {
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
clearOnce()
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId: reconnectWorkflowId,
|
||||
executionId,
|
||||
})
|
||||
},
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.warn('Execution reconnection failed', { executionId, error })
|
||||
})
|
||||
.finally(() => {
|
||||
if (reconnectionComplete || cleanupRan) return
|
||||
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) return
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
clearExecutionEntries(executionId)
|
||||
for (const entry of originalEntries) {
|
||||
addConsole({
|
||||
workflowId: entry.workflowId,
|
||||
blockId: entry.blockId,
|
||||
blockName: entry.blockName,
|
||||
blockType: entry.blockType,
|
||||
executionId: entry.executionId,
|
||||
executionOrder: entry.executionOrder,
|
||||
isRunning: false,
|
||||
warning: 'Execution result unavailable — check the logs page',
|
||||
})
|
||||
}
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
})
|
||||
|
||||
return () => {
|
||||
cleanupRan = true
|
||||
executionStream.cancel(reconnectWorkflowId)
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
|
||||
if (cleared && !reconnectionComplete) {
|
||||
clearExecutionEntries(executionId)
|
||||
for (const entry of originalEntries) {
|
||||
addConsole(entry)
|
||||
}
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [activeWorkflowId, hasHydrated])
|
||||
|
||||
return {
|
||||
isExecuting,
|
||||
isDebugging,
|
||||
|
||||
@@ -473,7 +473,7 @@ function ConnectionsSection({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Environment Variables */}
|
||||
{/* Secrets */}
|
||||
{envVars.length > 0 && (
|
||||
<div className='mb-[2px] last:mb-0'>
|
||||
<div
|
||||
@@ -489,7 +489,7 @@ function ConnectionsSection({
|
||||
'text-[var(--text-secondary)] group-hover:text-[var(--text-primary)]'
|
||||
)}
|
||||
>
|
||||
Environment Variables
|
||||
Secrets
|
||||
</span>
|
||||
<ChevronDownIcon
|
||||
className={cn(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,17 @@
|
||||
'use client'
|
||||
|
||||
import { CredentialsManager } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/credentials/credentials-manager'
|
||||
|
||||
interface CredentialsProps {
|
||||
onOpenChange?: (open: boolean) => void
|
||||
registerCloseHandler?: (handler: (open: boolean) => void) => void
|
||||
registerBeforeLeaveHandler?: (handler: (onProceed: () => void) => void) => void
|
||||
}
|
||||
|
||||
export function Credentials(_props: CredentialsProps) {
|
||||
return (
|
||||
<div className='h-full min-h-0'>
|
||||
<CredentialsManager />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -134,7 +134,7 @@ function WorkspaceVariableRow({
|
||||
<Trash />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Delete environment variable</Tooltip.Content>
|
||||
<Tooltip.Content>Delete secret</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
@@ -637,7 +637,7 @@ export function EnvironmentVariables({ registerBeforeLeaveHandler }: Environment
|
||||
<Trash />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>Delete environment variable</Tooltip.Content>
|
||||
<Tooltip.Content>Delete secret</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
@@ -811,7 +811,7 @@ export function EnvironmentVariables({ registerBeforeLeaveHandler }: Environment
|
||||
filteredWorkspaceEntries.length === 0 &&
|
||||
(envVars.length > 0 || Object.keys(workspaceVars).length > 0) && (
|
||||
<div className='py-[16px] text-center text-[13px] text-[var(--text-muted)]'>
|
||||
No environment variables found matching "{searchTerm}"
|
||||
No secrets found matching "{searchTerm}"
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
|
||||
@@ -2,6 +2,7 @@ export { ApiKeys } from './api-keys/api-keys'
|
||||
export { BYOK } from './byok/byok'
|
||||
export { Copilot } from './copilot/copilot'
|
||||
export { CredentialSets } from './credential-sets/credential-sets'
|
||||
export { Credentials } from './credentials/credentials'
|
||||
export { CustomTools } from './custom-tools/custom-tools'
|
||||
export { Debug } from './debug/debug'
|
||||
export { EnvironmentVariables } from './environment/environment'
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
export { CancelSubscription } from './cancel-subscription'
|
||||
export { CreditBalance } from './credit-balance'
|
||||
export { PlanCard, type PlanCardProps, type PlanFeature } from './plan-card'
|
||||
export { ReferralCode } from './referral-code'
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
export { ReferralCode } from './referral-code'
|
||||
@@ -1,101 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Button, Input, Label } from '@/components/emcn'
|
||||
|
||||
const logger = createLogger('ReferralCode')
|
||||
|
||||
interface ReferralCodeProps {
|
||||
onRedeemComplete?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Inline referral/promo code entry field with redeem button.
|
||||
* One-time use per account — shows success or "already redeemed" state.
|
||||
*/
|
||||
export function ReferralCode({ onRedeemComplete }: ReferralCodeProps) {
|
||||
const [code, setCode] = useState('')
|
||||
const [isRedeeming, setIsRedeeming] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [success, setSuccess] = useState<{ bonusAmount: number } | null>(null)
|
||||
|
||||
const handleRedeem = async () => {
|
||||
const trimmed = code.trim()
|
||||
if (!trimmed || isRedeeming) return
|
||||
|
||||
setIsRedeeming(true)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/referral-code/redeem', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ code: trimmed }),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to redeem code')
|
||||
}
|
||||
|
||||
if (data.redeemed) {
|
||||
setSuccess({ bonusAmount: data.bonusAmount })
|
||||
setCode('')
|
||||
onRedeemComplete?.()
|
||||
} else {
|
||||
setError(data.error || 'Code could not be redeemed')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Referral code redemption failed', { error: err })
|
||||
setError(err instanceof Error ? err.message : 'Failed to redeem code')
|
||||
} finally {
|
||||
setIsRedeeming(false)
|
||||
}
|
||||
}
|
||||
|
||||
if (success) {
|
||||
return (
|
||||
<div className='flex items-center justify-between'>
|
||||
<Label>Referral Code</Label>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>
|
||||
+${success.bonusAmount} credits applied
|
||||
</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex items-center justify-between gap-[12px]'>
|
||||
<Label className='shrink-0'>Referral Code</Label>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<div className='flex flex-col'>
|
||||
<Input
|
||||
type='text'
|
||||
value={code}
|
||||
onChange={(e) => {
|
||||
setCode(e.target.value)
|
||||
setError(null)
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter') handleRedeem()
|
||||
}}
|
||||
placeholder='Enter code'
|
||||
className='h-[32px] w-[140px] text-[12px]'
|
||||
disabled={isRedeeming}
|
||||
/>
|
||||
{error && <span className='mt-[4px] text-[11px] text-[var(--text-error)]'>{error}</span>}
|
||||
</div>
|
||||
<Button
|
||||
variant='active'
|
||||
className='h-[32px] shrink-0 rounded-[6px] text-[12px]'
|
||||
onClick={handleRedeem}
|
||||
disabled={isRedeeming || !code.trim()}
|
||||
>
|
||||
{isRedeeming ? 'Redeeming...' : 'Redeem'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -17,7 +17,6 @@ import {
|
||||
CancelSubscription,
|
||||
CreditBalance,
|
||||
PlanCard,
|
||||
ReferralCode,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components'
|
||||
import {
|
||||
ENTERPRISE_PLAN_FEATURES,
|
||||
@@ -550,10 +549,6 @@ export function Subscription() {
|
||||
/>
|
||||
)}
|
||||
|
||||
{!subscription.isEnterprise && (
|
||||
<ReferralCode onRedeemComplete={() => refetchSubscription()} />
|
||||
)}
|
||||
|
||||
{/* Next Billing Date - hidden from team members */}
|
||||
{subscription.isPaid &&
|
||||
subscriptionData?.data?.periodEnd &&
|
||||
|
||||
@@ -20,7 +20,6 @@ import {
|
||||
import {
|
||||
Card,
|
||||
Connections,
|
||||
FolderCode,
|
||||
HexSimple,
|
||||
Key,
|
||||
SModal,
|
||||
@@ -45,12 +44,11 @@ import {
|
||||
BYOK,
|
||||
Copilot,
|
||||
CredentialSets,
|
||||
Credentials,
|
||||
CustomTools,
|
||||
Debug,
|
||||
EnvironmentVariables,
|
||||
FileUploads,
|
||||
General,
|
||||
Integrations,
|
||||
MCP,
|
||||
Skills,
|
||||
Subscription,
|
||||
@@ -80,6 +78,7 @@ interface SettingsModalProps {
|
||||
|
||||
type SettingsSection =
|
||||
| 'general'
|
||||
| 'credentials'
|
||||
| 'environment'
|
||||
| 'template-profile'
|
||||
| 'integrations'
|
||||
@@ -156,11 +155,10 @@ const allNavigationItems: NavigationItem[] = [
|
||||
requiresHosted: true,
|
||||
requiresTeam: true,
|
||||
},
|
||||
{ id: 'integrations', label: 'Integrations', icon: Connections, section: 'tools' },
|
||||
{ id: 'credentials', label: 'Credentials', icon: Connections, section: 'tools' },
|
||||
{ id: 'custom-tools', label: 'Custom Tools', icon: Wrench, section: 'tools' },
|
||||
{ id: 'skills', label: 'Skills', icon: AgentSkillsIcon, section: 'tools' },
|
||||
{ id: 'mcp', label: 'MCP Tools', icon: McpIcon, section: 'tools' },
|
||||
{ id: 'environment', label: 'Environment', icon: FolderCode, section: 'system' },
|
||||
{ id: 'apikeys', label: 'API Keys', icon: Key, section: 'system' },
|
||||
{ id: 'workflow-mcp-servers', label: 'MCP Servers', icon: Server, section: 'system' },
|
||||
{
|
||||
@@ -256,9 +254,6 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
if (item.id === 'apikeys' && permissionConfig.hideApiKeysTab) {
|
||||
return false
|
||||
}
|
||||
if (item.id === 'environment' && permissionConfig.hideEnvironmentTab) {
|
||||
return false
|
||||
}
|
||||
if (item.id === 'files' && permissionConfig.hideFilesTab) {
|
||||
return false
|
||||
}
|
||||
@@ -324,6 +319,9 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
if (!isBillingEnabled && (activeSection === 'subscription' || activeSection === 'team')) {
|
||||
return 'general'
|
||||
}
|
||||
if (activeSection === 'environment' || activeSection === 'integrations') {
|
||||
return 'credentials'
|
||||
}
|
||||
return activeSection
|
||||
}, [activeSection])
|
||||
|
||||
@@ -342,7 +340,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
(sectionId: SettingsSection) => {
|
||||
if (sectionId === effectiveActiveSection) return
|
||||
|
||||
if (effectiveActiveSection === 'environment' && environmentBeforeLeaveHandler.current) {
|
||||
if (effectiveActiveSection === 'credentials' && environmentBeforeLeaveHandler.current) {
|
||||
environmentBeforeLeaveHandler.current(() => setActiveSection(sectionId))
|
||||
return
|
||||
}
|
||||
@@ -370,7 +368,11 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
|
||||
useEffect(() => {
|
||||
const handleOpenSettings = (event: CustomEvent<{ tab: SettingsSection }>) => {
|
||||
setActiveSection(event.detail.tab)
|
||||
if (event.detail.tab === 'environment' || event.detail.tab === 'integrations') {
|
||||
setActiveSection('credentials')
|
||||
} else {
|
||||
setActiveSection(event.detail.tab)
|
||||
}
|
||||
onOpenChange(true)
|
||||
}
|
||||
|
||||
@@ -479,13 +481,19 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
const handleDialogOpenChange = (newOpen: boolean) => {
|
||||
if (
|
||||
!newOpen &&
|
||||
effectiveActiveSection === 'environment' &&
|
||||
effectiveActiveSection === 'credentials' &&
|
||||
environmentBeforeLeaveHandler.current
|
||||
) {
|
||||
environmentBeforeLeaveHandler.current(() => onOpenChange(false))
|
||||
environmentBeforeLeaveHandler.current(() => {
|
||||
if (integrationsCloseHandler.current) {
|
||||
integrationsCloseHandler.current(newOpen)
|
||||
} else {
|
||||
onOpenChange(false)
|
||||
}
|
||||
})
|
||||
} else if (
|
||||
!newOpen &&
|
||||
effectiveActiveSection === 'integrations' &&
|
||||
effectiveActiveSection === 'credentials' &&
|
||||
integrationsCloseHandler.current
|
||||
) {
|
||||
integrationsCloseHandler.current(newOpen)
|
||||
@@ -502,7 +510,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
</VisuallyHidden.Root>
|
||||
<VisuallyHidden.Root>
|
||||
<DialogPrimitive.Description>
|
||||
Configure your workspace settings, environment variables, integrations, and preferences
|
||||
Configure your workspace settings, credentials, and preferences
|
||||
</DialogPrimitive.Description>
|
||||
</VisuallyHidden.Root>
|
||||
|
||||
@@ -539,18 +547,14 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
</SModalMainHeader>
|
||||
<SModalMainBody>
|
||||
{effectiveActiveSection === 'general' && <General onOpenChange={onOpenChange} />}
|
||||
{effectiveActiveSection === 'environment' && (
|
||||
<EnvironmentVariables
|
||||
{effectiveActiveSection === 'credentials' && (
|
||||
<Credentials
|
||||
onOpenChange={onOpenChange}
|
||||
registerCloseHandler={registerIntegrationsCloseHandler}
|
||||
registerBeforeLeaveHandler={registerEnvironmentBeforeLeaveHandler}
|
||||
/>
|
||||
)}
|
||||
{effectiveActiveSection === 'template-profile' && <TemplateProfile />}
|
||||
{effectiveActiveSection === 'integrations' && (
|
||||
<Integrations
|
||||
onOpenChange={onOpenChange}
|
||||
registerCloseHandler={registerIntegrationsCloseHandler}
|
||||
/>
|
||||
)}
|
||||
{effectiveActiveSection === 'credential-sets' && <CredentialSets />}
|
||||
{effectiveActiveSection === 'access-control' && <AccessControl />}
|
||||
{effectiveActiveSection === 'apikeys' && <ApiKeys onOpenChange={onOpenChange} />}
|
||||
|
||||
@@ -4,14 +4,12 @@ import { useEffect } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { useReferralAttribution } from '@/hooks/use-referral-attribution'
|
||||
|
||||
const logger = createLogger('WorkspacePage')
|
||||
|
||||
export default function WorkspacePage() {
|
||||
const router = useRouter()
|
||||
const { data: session, isPending } = useSession()
|
||||
useReferralAttribution()
|
||||
|
||||
useEffect(() => {
|
||||
const redirectToFirstWorkspace = async () => {
|
||||
|
||||
@@ -589,7 +589,6 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
|
||||
|
||||
export const scheduleExecution = task({
|
||||
id: 'schedule-execution',
|
||||
machine: 'medium-1x',
|
||||
retry: {
|
||||
maxAttempts: 1,
|
||||
},
|
||||
|
||||
@@ -669,7 +669,6 @@ async function executeWebhookJobInternal(
|
||||
|
||||
export const webhookExecution = task({
|
||||
id: 'webhook-execution',
|
||||
machine: 'medium-1x',
|
||||
retry: {
|
||||
maxAttempts: 1,
|
||||
},
|
||||
|
||||
@@ -197,6 +197,5 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
||||
|
||||
export const workflowExecutionTask = task({
|
||||
id: 'workflow-execution',
|
||||
machine: 'medium-1x',
|
||||
run: executeWorkflowJob,
|
||||
})
|
||||
|
||||
@@ -394,7 +394,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Page Property Operations
|
||||
{ label: 'List Page Properties', id: 'list_page_properties' },
|
||||
{ label: 'Create Page Property', id: 'create_page_property' },
|
||||
{ label: 'Delete Page Property', id: 'delete_page_property' },
|
||||
// Search Operations
|
||||
{ label: 'Search Content', id: 'search' },
|
||||
{ label: 'Search in Space', id: 'search_in_space' },
|
||||
@@ -415,9 +414,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Label Operations
|
||||
{ label: 'List Labels', id: 'list_labels' },
|
||||
{ label: 'Add Label', id: 'add_label' },
|
||||
{ label: 'Delete Label', id: 'delete_label' },
|
||||
{ label: 'Get Pages by Label', id: 'get_pages_by_label' },
|
||||
{ label: 'List Space Labels', id: 'list_space_labels' },
|
||||
// Space Operations
|
||||
{ label: 'Get Space', id: 'get_space' },
|
||||
{ label: 'List Spaces', id: 'list_spaces' },
|
||||
@@ -489,8 +485,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'search_in_space',
|
||||
'get_space',
|
||||
'list_spaces',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
not: true,
|
||||
},
|
||||
@@ -506,8 +500,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_labels',
|
||||
'upload_attachment',
|
||||
'add_label',
|
||||
'delete_label',
|
||||
'delete_page_property',
|
||||
'get_page_children',
|
||||
'get_page_ancestors',
|
||||
'list_page_versions',
|
||||
@@ -535,8 +527,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'search_in_space',
|
||||
'get_space',
|
||||
'list_spaces',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
not: true,
|
||||
},
|
||||
@@ -552,8 +542,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_labels',
|
||||
'upload_attachment',
|
||||
'add_label',
|
||||
'delete_label',
|
||||
'delete_page_property',
|
||||
'get_page_children',
|
||||
'get_page_ancestors',
|
||||
'list_page_versions',
|
||||
@@ -578,7 +566,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'search_in_space',
|
||||
'create_blogpost',
|
||||
'list_blogposts_in_space',
|
||||
'list_space_labels',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -614,14 +601,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'create_page_property' },
|
||||
},
|
||||
{
|
||||
id: 'propertyId',
|
||||
title: 'Property ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter property ID to delete',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'delete_page_property' },
|
||||
},
|
||||
{
|
||||
id: 'title',
|
||||
title: 'Title',
|
||||
@@ -715,7 +694,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter label name',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['add_label', 'delete_label'] },
|
||||
condition: { field: 'operation', value: 'add_label' },
|
||||
},
|
||||
{
|
||||
id: 'labelPrefix',
|
||||
@@ -730,14 +709,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
value: () => 'global',
|
||||
condition: { field: 'operation', value: 'add_label' },
|
||||
},
|
||||
{
|
||||
id: 'labelId',
|
||||
title: 'Label ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter label ID',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'get_pages_by_label' },
|
||||
},
|
||||
{
|
||||
id: 'blogPostStatus',
|
||||
title: 'Status',
|
||||
@@ -788,8 +759,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_page_versions',
|
||||
'list_page_properties',
|
||||
'list_labels',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -811,8 +780,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_page_versions',
|
||||
'list_page_properties',
|
||||
'list_labels',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -833,7 +800,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Property Tools
|
||||
'confluence_list_page_properties',
|
||||
'confluence_create_page_property',
|
||||
'confluence_delete_page_property',
|
||||
// Search Tools
|
||||
'confluence_search',
|
||||
'confluence_search_in_space',
|
||||
@@ -854,9 +820,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Label Tools
|
||||
'confluence_list_labels',
|
||||
'confluence_add_label',
|
||||
'confluence_delete_label',
|
||||
'confluence_get_pages_by_label',
|
||||
'confluence_list_space_labels',
|
||||
// Space Tools
|
||||
'confluence_get_space',
|
||||
'confluence_list_spaces',
|
||||
@@ -889,8 +852,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
return 'confluence_list_page_properties'
|
||||
case 'create_page_property':
|
||||
return 'confluence_create_page_property'
|
||||
case 'delete_page_property':
|
||||
return 'confluence_delete_page_property'
|
||||
// Search Operations
|
||||
case 'search':
|
||||
return 'confluence_search'
|
||||
@@ -926,12 +887,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
return 'confluence_list_labels'
|
||||
case 'add_label':
|
||||
return 'confluence_add_label'
|
||||
case 'delete_label':
|
||||
return 'confluence_delete_label'
|
||||
case 'get_pages_by_label':
|
||||
return 'confluence_get_pages_by_label'
|
||||
case 'list_space_labels':
|
||||
return 'confluence_list_space_labels'
|
||||
// Space Operations
|
||||
case 'get_space':
|
||||
return 'confluence_get_space'
|
||||
@@ -953,9 +908,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
versionNumber,
|
||||
propertyKey,
|
||||
propertyValue,
|
||||
propertyId,
|
||||
labelPrefix,
|
||||
labelId,
|
||||
blogPostStatus,
|
||||
purge,
|
||||
bodyFormat,
|
||||
@@ -1006,9 +959,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
}
|
||||
}
|
||||
|
||||
// Operations that support generic cursor pagination.
|
||||
// get_pages_by_label and list_space_labels have dedicated handlers
|
||||
// below that pass cursor along with their required params (labelId, spaceId).
|
||||
// Operations that support cursor pagination
|
||||
const supportsCursor = [
|
||||
'list_attachments',
|
||||
'list_spaces',
|
||||
@@ -1045,35 +996,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'delete_page_property') {
|
||||
return {
|
||||
credential,
|
||||
pageId: effectivePageId,
|
||||
operation,
|
||||
propertyId,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'get_pages_by_label') {
|
||||
return {
|
||||
credential,
|
||||
operation,
|
||||
labelId,
|
||||
cursor: cursor || undefined,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'list_space_labels') {
|
||||
return {
|
||||
credential,
|
||||
operation,
|
||||
cursor: cursor || undefined,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'upload_attachment') {
|
||||
const normalizedFile = normalizeFileInput(attachmentFile, { single: true })
|
||||
if (!normalizedFile) {
|
||||
@@ -1122,9 +1044,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
|
||||
attachmentComment: { type: 'string', description: 'Comment for the attachment' },
|
||||
labelName: { type: 'string', description: 'Label name' },
|
||||
labelId: { type: 'string', description: 'Label identifier' },
|
||||
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
|
||||
propertyId: { type: 'string', description: 'Property identifier' },
|
||||
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
|
||||
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
|
||||
bodyFormat: { type: 'string', description: 'Body format for comments' },
|
||||
@@ -1160,7 +1080,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Label Results
|
||||
labels: { type: 'array', description: 'List of labels' },
|
||||
labelName: { type: 'string', description: 'Label name' },
|
||||
labelId: { type: 'string', description: 'Label identifier' },
|
||||
// Space Results
|
||||
spaces: { type: 'array', description: 'List of spaces' },
|
||||
spaceId: { type: 'string', description: 'Space identifier' },
|
||||
|
||||
@@ -205,10 +205,6 @@ export const CREDENTIAL_SET = {
|
||||
PREFIX: 'credentialSet:',
|
||||
} as const
|
||||
|
||||
export const CREDENTIAL = {
|
||||
FOREIGN_LABEL: 'Saved by collaborator',
|
||||
} as const
|
||||
|
||||
export function isCredentialSetValue(value: string | null | undefined): boolean {
|
||||
return typeof value === 'string' && value.startsWith(CREDENTIAL_SET.PREFIX)
|
||||
}
|
||||
|
||||
268
apps/sim/hooks/queries/credentials.ts
Normal file
268
apps/sim/hooks/queries/credentials.ts
Normal file
@@ -0,0 +1,268 @@
|
||||
'use client'
|
||||
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { environmentKeys } from '@/hooks/queries/environment'
|
||||
import { fetchJson } from '@/hooks/selectors/helpers'
|
||||
|
||||
export type WorkspaceCredentialType = 'oauth' | 'env_workspace' | 'env_personal'
|
||||
export type WorkspaceCredentialRole = 'admin' | 'member'
|
||||
export type WorkspaceCredentialMemberStatus = 'active' | 'pending' | 'revoked'
|
||||
|
||||
export interface WorkspaceCredential {
|
||||
id: string
|
||||
workspaceId: string
|
||||
type: WorkspaceCredentialType
|
||||
displayName: string
|
||||
providerId: string | null
|
||||
accountId: string | null
|
||||
envKey: string | null
|
||||
envOwnerUserId: string | null
|
||||
createdBy: string
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
role?: WorkspaceCredentialRole
|
||||
status?: WorkspaceCredentialMemberStatus
|
||||
}
|
||||
|
||||
export interface WorkspaceCredentialMember {
|
||||
id: string
|
||||
userId: string
|
||||
role: WorkspaceCredentialRole
|
||||
status: WorkspaceCredentialMemberStatus
|
||||
joinedAt: string | null
|
||||
invitedBy: string | null
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
userName: string | null
|
||||
userEmail: string | null
|
||||
userImage: string | null
|
||||
}
|
||||
|
||||
interface CredentialListResponse {
|
||||
credentials?: WorkspaceCredential[]
|
||||
}
|
||||
|
||||
interface CredentialResponse {
|
||||
credential?: WorkspaceCredential | null
|
||||
}
|
||||
|
||||
interface MembersResponse {
|
||||
members?: WorkspaceCredentialMember[]
|
||||
}
|
||||
|
||||
export const workspaceCredentialKeys = {
|
||||
all: ['workspaceCredentials'] as const,
|
||||
list: (workspaceId?: string, type?: string, providerId?: string) =>
|
||||
['workspaceCredentials', workspaceId ?? 'none', type ?? 'all', providerId ?? 'all'] as const,
|
||||
detail: (credentialId?: string) =>
|
||||
['workspaceCredentials', 'detail', credentialId ?? 'none'] as const,
|
||||
members: (credentialId?: string) =>
|
||||
['workspaceCredentials', 'detail', credentialId ?? 'none', 'members'] as const,
|
||||
}
|
||||
|
||||
export function useWorkspaceCredentials(params: {
|
||||
workspaceId?: string
|
||||
type?: WorkspaceCredentialType
|
||||
providerId?: string
|
||||
enabled?: boolean
|
||||
}) {
|
||||
const { workspaceId, type, providerId, enabled = true } = params
|
||||
|
||||
return useQuery<WorkspaceCredential[]>({
|
||||
queryKey: workspaceCredentialKeys.list(workspaceId, type, providerId),
|
||||
queryFn: async () => {
|
||||
if (!workspaceId) return []
|
||||
const data = await fetchJson<CredentialListResponse>('/api/credentials', {
|
||||
searchParams: {
|
||||
workspaceId,
|
||||
type,
|
||||
providerId,
|
||||
},
|
||||
})
|
||||
return data.credentials ?? []
|
||||
},
|
||||
enabled: Boolean(workspaceId) && enabled,
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
export function useWorkspaceCredential(credentialId?: string, enabled = true) {
|
||||
return useQuery<WorkspaceCredential | null>({
|
||||
queryKey: workspaceCredentialKeys.detail(credentialId),
|
||||
queryFn: async () => {
|
||||
if (!credentialId) return null
|
||||
const data = await fetchJson<CredentialResponse>(`/api/credentials/${credentialId}`)
|
||||
return data.credential ?? null
|
||||
},
|
||||
enabled: Boolean(credentialId) && enabled,
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
export function useCreateWorkspaceCredential() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (payload: {
|
||||
workspaceId: string
|
||||
type: WorkspaceCredentialType
|
||||
displayName?: string
|
||||
providerId?: string
|
||||
accountId?: string
|
||||
envKey?: string
|
||||
envOwnerUserId?: string
|
||||
}) => {
|
||||
const response = await fetch('/api/credentials', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(payload),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const data = await response.json()
|
||||
throw new Error(data.error || 'Failed to create credential')
|
||||
}
|
||||
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.list(variables.workspaceId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.all,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useUpdateWorkspaceCredential() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (payload: {
|
||||
credentialId: string
|
||||
displayName?: string
|
||||
accountId?: string
|
||||
}) => {
|
||||
const response = await fetch(`/api/credentials/${payload.credentialId}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
displayName: payload.displayName,
|
||||
accountId: payload.accountId,
|
||||
}),
|
||||
})
|
||||
if (!response.ok) {
|
||||
const data = await response.json()
|
||||
throw new Error(data.error || 'Failed to update credential')
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.all,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useDeleteWorkspaceCredential() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (credentialId: string) => {
|
||||
const response = await fetch(`/api/credentials/${credentialId}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
if (!response.ok) {
|
||||
const data = await response.json()
|
||||
throw new Error(data.error || 'Failed to delete credential')
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, credentialId) => {
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.detail(credentialId) })
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
|
||||
queryClient.invalidateQueries({ queryKey: environmentKeys.all })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useWorkspaceCredentialMembers(credentialId?: string) {
|
||||
return useQuery<WorkspaceCredentialMember[]>({
|
||||
queryKey: workspaceCredentialKeys.members(credentialId),
|
||||
queryFn: async () => {
|
||||
if (!credentialId) return []
|
||||
const data = await fetchJson<MembersResponse>(`/api/credentials/${credentialId}/members`)
|
||||
return data.members ?? []
|
||||
},
|
||||
enabled: Boolean(credentialId),
|
||||
staleTime: 30 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
export function useUpsertWorkspaceCredentialMember() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (payload: {
|
||||
credentialId: string
|
||||
userId: string
|
||||
role: WorkspaceCredentialRole
|
||||
}) => {
|
||||
const response = await fetch(`/api/credentials/${payload.credentialId}/members`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
userId: payload.userId,
|
||||
role: payload.role,
|
||||
}),
|
||||
})
|
||||
if (!response.ok) {
|
||||
const data = await response.json()
|
||||
throw new Error(data.error || 'Failed to update credential member')
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.members(variables.credentialId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
|
||||
})
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function useRemoveWorkspaceCredentialMember() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async (payload: { credentialId: string; userId: string }) => {
|
||||
const response = await fetch(
|
||||
`/api/credentials/${payload.credentialId}/members?userId=${encodeURIComponent(payload.userId)}`,
|
||||
{ method: 'DELETE' }
|
||||
)
|
||||
if (!response.ok) {
|
||||
const data = await response.json()
|
||||
throw new Error(data.error || 'Failed to remove credential member')
|
||||
}
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.members(variables.credentialId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.detail(variables.credentialId),
|
||||
})
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -423,7 +423,7 @@ interface GenerateVersionDescriptionVariables {
|
||||
|
||||
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
|
||||
|
||||
Write a brief, factual description (1-3 sentences, under 2000 characters) that states what changed between versions.
|
||||
Write a brief, factual description (1-3 sentences, under 400 characters) that states what changed between versions.
|
||||
|
||||
Guidelines:
|
||||
- Use the specific values provided (credential names, channel names, model names)
|
||||
|
||||
@@ -169,9 +169,9 @@ export function useConnectOAuthService() {
|
||||
|
||||
interface DisconnectServiceParams {
|
||||
provider: string
|
||||
providerId: string
|
||||
providerId?: string
|
||||
serviceId: string
|
||||
accountId: string
|
||||
accountId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -182,7 +182,7 @@ export function useDisconnectOAuthService() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({ provider, providerId }: DisconnectServiceParams) => {
|
||||
mutationFn: async ({ provider, providerId, accountId }: DisconnectServiceParams) => {
|
||||
const response = await fetch('/api/auth/oauth/disconnect', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
@@ -191,6 +191,7 @@ export function useDisconnectOAuthService() {
|
||||
body: JSON.stringify({
|
||||
provider,
|
||||
providerId,
|
||||
accountId,
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -212,7 +213,8 @@ export function useDisconnectOAuthService() {
|
||||
oauthConnectionsKeys.connections(),
|
||||
previousServices.map((svc) => {
|
||||
if (svc.id === serviceId) {
|
||||
const updatedAccounts = svc.accounts?.filter((acc) => acc.id !== accountId) || []
|
||||
const updatedAccounts =
|
||||
accountId && svc.accounts ? svc.accounts.filter((acc) => acc.id !== accountId) : []
|
||||
return {
|
||||
...svc,
|
||||
accounts: updatedAccounts,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useQuery } from '@tanstack/react-query'
|
||||
import type { Credential } from '@/lib/oauth'
|
||||
import { CREDENTIAL, CREDENTIAL_SET } from '@/executor/constants'
|
||||
import { CREDENTIAL_SET } from '@/executor/constants'
|
||||
import { useCredentialSetDetail } from '@/hooks/queries/credential-sets'
|
||||
import { fetchJson } from '@/hooks/selectors/helpers'
|
||||
|
||||
@@ -13,15 +13,34 @@ interface CredentialDetailResponse {
|
||||
}
|
||||
|
||||
export const oauthCredentialKeys = {
|
||||
list: (providerId?: string) => ['oauthCredentials', providerId ?? 'none'] as const,
|
||||
list: (providerId?: string, workspaceId?: string, workflowId?: string) =>
|
||||
[
|
||||
'oauthCredentials',
|
||||
providerId ?? 'none',
|
||||
workspaceId ?? 'none',
|
||||
workflowId ?? 'none',
|
||||
] as const,
|
||||
detail: (credentialId?: string, workflowId?: string) =>
|
||||
['oauthCredentialDetail', credentialId ?? 'none', workflowId ?? 'none'] as const,
|
||||
}
|
||||
|
||||
export async function fetchOAuthCredentials(providerId: string): Promise<Credential[]> {
|
||||
interface FetchOAuthCredentialsParams {
|
||||
providerId: string
|
||||
workspaceId?: string
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export async function fetchOAuthCredentials(
|
||||
params: FetchOAuthCredentialsParams
|
||||
): Promise<Credential[]> {
|
||||
const { providerId, workspaceId, workflowId } = params
|
||||
if (!providerId) return []
|
||||
const data = await fetchJson<CredentialListResponse>('/api/auth/oauth/credentials', {
|
||||
searchParams: { provider: providerId },
|
||||
searchParams: {
|
||||
provider: providerId,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
},
|
||||
})
|
||||
return data.credentials ?? []
|
||||
}
|
||||
@@ -40,10 +59,44 @@ export async function fetchOAuthCredentialDetail(
|
||||
return data.credentials ?? []
|
||||
}
|
||||
|
||||
export function useOAuthCredentials(providerId?: string, enabled = true) {
|
||||
interface UseOAuthCredentialsOptions {
|
||||
enabled?: boolean
|
||||
workspaceId?: string
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
function resolveOptions(
|
||||
enabledOrOptions?: boolean | UseOAuthCredentialsOptions
|
||||
): Required<UseOAuthCredentialsOptions> {
|
||||
if (typeof enabledOrOptions === 'boolean') {
|
||||
return {
|
||||
enabled: enabledOrOptions,
|
||||
workspaceId: '',
|
||||
workflowId: '',
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
enabled: enabledOrOptions?.enabled ?? true,
|
||||
workspaceId: enabledOrOptions?.workspaceId ?? '',
|
||||
workflowId: enabledOrOptions?.workflowId ?? '',
|
||||
}
|
||||
}
|
||||
|
||||
export function useOAuthCredentials(
|
||||
providerId?: string,
|
||||
enabledOrOptions?: boolean | UseOAuthCredentialsOptions
|
||||
) {
|
||||
const { enabled, workspaceId, workflowId } = resolveOptions(enabledOrOptions)
|
||||
|
||||
return useQuery<Credential[]>({
|
||||
queryKey: oauthCredentialKeys.list(providerId),
|
||||
queryFn: () => fetchOAuthCredentials(providerId ?? ''),
|
||||
queryKey: oauthCredentialKeys.list(providerId, workspaceId, workflowId),
|
||||
queryFn: () =>
|
||||
fetchOAuthCredentials({
|
||||
providerId: providerId ?? '',
|
||||
workspaceId: workspaceId || undefined,
|
||||
workflowId: workflowId || undefined,
|
||||
}),
|
||||
enabled: Boolean(providerId) && enabled,
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
@@ -62,7 +115,12 @@ export function useOAuthCredentialDetail(
|
||||
})
|
||||
}
|
||||
|
||||
export function useCredentialName(credentialId?: string, providerId?: string, workflowId?: string) {
|
||||
export function useCredentialName(
|
||||
credentialId?: string,
|
||||
providerId?: string,
|
||||
workflowId?: string,
|
||||
workspaceId?: string
|
||||
) {
|
||||
// Check if this is a credential set value
|
||||
const isCredentialSet = credentialId?.startsWith(CREDENTIAL_SET.PREFIX) ?? false
|
||||
const credentialSetId = isCredentialSet
|
||||
@@ -77,7 +135,11 @@ export function useCredentialName(credentialId?: string, providerId?: string, wo
|
||||
|
||||
const { data: credentials = [], isFetching: credentialsLoading } = useOAuthCredentials(
|
||||
providerId,
|
||||
Boolean(providerId) && !isCredentialSet
|
||||
{
|
||||
enabled: Boolean(providerId) && !isCredentialSet,
|
||||
workspaceId,
|
||||
workflowId,
|
||||
}
|
||||
)
|
||||
|
||||
const selectedCredential = credentials.find((cred) => cred.id === credentialId)
|
||||
@@ -92,18 +154,18 @@ export function useCredentialName(credentialId?: string, providerId?: string, wo
|
||||
shouldFetchDetail
|
||||
)
|
||||
|
||||
const detailCredential = foreignCredentials[0]
|
||||
const hasForeignMeta = foreignCredentials.length > 0
|
||||
const isForeignCredentialSet = isCredentialSet && !credentialSetData && !credentialSetLoading
|
||||
|
||||
const displayName =
|
||||
credentialSetData?.name ??
|
||||
selectedCredential?.name ??
|
||||
(hasForeignMeta ? CREDENTIAL.FOREIGN_LABEL : null) ??
|
||||
(isForeignCredentialSet ? CREDENTIAL.FOREIGN_LABEL : null)
|
||||
credentialSetData?.name ?? selectedCredential?.name ?? detailCredential?.name ?? null
|
||||
|
||||
return {
|
||||
displayName,
|
||||
isLoading: credentialsLoading || foreignLoading || (isCredentialSet && credentialSetLoading),
|
||||
isLoading:
|
||||
credentialsLoading ||
|
||||
foreignLoading ||
|
||||
(isCredentialSet && credentialSetLoading && !credentialSetData),
|
||||
hasForeignMeta,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback } from 'react'
|
||||
import { useCallback, useRef } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type {
|
||||
BlockCompletedData,
|
||||
@@ -16,18 +16,6 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
|
||||
const logger = createLogger('useExecutionStream')
|
||||
|
||||
/**
|
||||
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
|
||||
* These should be treated as clean disconnects, not execution errors.
|
||||
*/
|
||||
function isClientDisconnectError(error: any): boolean {
|
||||
if (error.name === 'AbortError') return true
|
||||
const msg = (error.message ?? '').toLowerCase()
|
||||
return (
|
||||
msg.includes('network error') || msg.includes('failed to fetch') || msg.includes('load failed')
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes SSE events from a response body and invokes appropriate callbacks.
|
||||
*/
|
||||
@@ -133,7 +121,6 @@ export interface ExecuteStreamOptions {
|
||||
parallels?: Record<string, any>
|
||||
}
|
||||
stopAfterBlockId?: string
|
||||
onExecutionId?: (executionId: string) => void
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
@@ -142,40 +129,30 @@ export interface ExecuteFromBlockOptions {
|
||||
startBlockId: string
|
||||
sourceSnapshot: SerializableExecutionState
|
||||
input?: any
|
||||
onExecutionId?: (executionId: string) => void
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
export interface ReconnectStreamOptions {
|
||||
workflowId: string
|
||||
executionId: string
|
||||
fromEventId?: number
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
/**
|
||||
* Module-level map shared across all hook instances.
|
||||
* Ensures ANY instance can cancel streams started by ANY other instance,
|
||||
* which is critical for SPA navigation where the original hook instance unmounts
|
||||
* but the SSE stream must be cancellable from the new instance.
|
||||
*/
|
||||
const sharedAbortControllers = new Map<string, AbortController>()
|
||||
|
||||
/**
|
||||
* Hook for executing workflows via server-side SSE streaming.
|
||||
* Supports concurrent executions via per-workflow AbortController maps.
|
||||
*/
|
||||
export function useExecutionStream() {
|
||||
const execute = useCallback(async (options: ExecuteStreamOptions) => {
|
||||
const { workflowId, callbacks = {}, onExecutionId, ...payload } = options
|
||||
const abortControllersRef = useRef<Map<string, AbortController>>(new Map())
|
||||
const currentExecutionsRef = useRef<Map<string, { workflowId: string; executionId: string }>>(
|
||||
new Map()
|
||||
)
|
||||
|
||||
const existing = sharedAbortControllers.get(workflowId)
|
||||
const execute = useCallback(async (options: ExecuteStreamOptions) => {
|
||||
const { workflowId, callbacks = {}, ...payload } = options
|
||||
|
||||
const existing = abortControllersRef.current.get(workflowId)
|
||||
if (existing) {
|
||||
existing.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
sharedAbortControllers.set(workflowId, abortController)
|
||||
abortControllersRef.current.set(workflowId, abortController)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||
@@ -200,48 +177,42 @@ export function useExecutionStream() {
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
const serverExecutionId = response.headers.get('X-Execution-Id')
|
||||
if (serverExecutionId) {
|
||||
onExecutionId?.(serverExecutionId)
|
||||
const executionId = response.headers.get('X-Execution-Id')
|
||||
if (executionId) {
|
||||
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
await processSSEStream(reader, callbacks, 'Execution')
|
||||
} catch (error: any) {
|
||||
if (isClientDisconnectError(error)) {
|
||||
logger.info('Execution stream disconnected (page unload or abort)')
|
||||
return
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Execution stream cancelled')
|
||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
||||
} else {
|
||||
logger.error('Execution stream error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
}
|
||||
logger.error('Execution stream error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
throw error
|
||||
} finally {
|
||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
}
|
||||
abortControllersRef.current.delete(workflowId)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
||||
const {
|
||||
workflowId,
|
||||
startBlockId,
|
||||
sourceSnapshot,
|
||||
input,
|
||||
onExecutionId,
|
||||
callbacks = {},
|
||||
} = options
|
||||
const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options
|
||||
|
||||
const existing = sharedAbortControllers.get(workflowId)
|
||||
const existing = abortControllersRef.current.get(workflowId)
|
||||
if (existing) {
|
||||
existing.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
sharedAbortControllers.set(workflowId, abortController)
|
||||
abortControllersRef.current.set(workflowId, abortController)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||
@@ -275,80 +246,64 @@ export function useExecutionStream() {
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
const serverExecutionId = response.headers.get('X-Execution-Id')
|
||||
if (serverExecutionId) {
|
||||
onExecutionId?.(serverExecutionId)
|
||||
const executionId = response.headers.get('X-Execution-Id')
|
||||
if (executionId) {
|
||||
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
await processSSEStream(reader, callbacks, 'Run-from-block')
|
||||
} catch (error: any) {
|
||||
if (isClientDisconnectError(error)) {
|
||||
logger.info('Run-from-block stream disconnected (page unload or abort)')
|
||||
return
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Run-from-block execution cancelled')
|
||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
||||
} else {
|
||||
logger.error('Run-from-block execution error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
}
|
||||
logger.error('Run-from-block execution error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
throw error
|
||||
} finally {
|
||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const reconnect = useCallback(async (options: ReconnectStreamOptions) => {
|
||||
const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options
|
||||
|
||||
const existing = sharedAbortControllers.get(workflowId)
|
||||
if (existing) {
|
||||
existing.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
sharedAbortControllers.set(workflowId, abortController)
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
|
||||
{ signal: abortController.signal }
|
||||
)
|
||||
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
|
||||
if (!response.body) throw new Error('No response body')
|
||||
|
||||
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
|
||||
} catch (error: any) {
|
||||
if (isClientDisconnectError(error)) return
|
||||
logger.error('Reconnection stream error:', error)
|
||||
throw error
|
||||
} finally {
|
||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
}
|
||||
abortControllersRef.current.delete(workflowId)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
}
|
||||
}, [])
|
||||
|
||||
const cancel = useCallback((workflowId?: string) => {
|
||||
if (workflowId) {
|
||||
const controller = sharedAbortControllers.get(workflowId)
|
||||
const execution = currentExecutionsRef.current.get(workflowId)
|
||||
if (execution) {
|
||||
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
||||
method: 'POST',
|
||||
}).catch(() => {})
|
||||
}
|
||||
|
||||
const controller = abortControllersRef.current.get(workflowId)
|
||||
if (controller) {
|
||||
controller.abort()
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
abortControllersRef.current.delete(workflowId)
|
||||
}
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
} else {
|
||||
for (const [, controller] of sharedAbortControllers) {
|
||||
for (const [, execution] of currentExecutionsRef.current) {
|
||||
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
||||
method: 'POST',
|
||||
}).catch(() => {})
|
||||
}
|
||||
|
||||
for (const [, controller] of abortControllersRef.current) {
|
||||
controller.abort()
|
||||
}
|
||||
sharedAbortControllers.clear()
|
||||
abortControllersRef.current.clear()
|
||||
currentExecutionsRef.current.clear()
|
||||
}
|
||||
}, [])
|
||||
|
||||
return {
|
||||
execute,
|
||||
executeFromBlock,
|
||||
reconnect,
|
||||
cancel,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
|
||||
const logger = createLogger('ReferralAttribution')
|
||||
|
||||
const COOKIE_NAME = 'sim_utm'
|
||||
|
||||
const TERMINAL_REASONS = new Set([
|
||||
'account_predates_cookie',
|
||||
'invalid_cookie',
|
||||
'no_utm_cookie',
|
||||
'no_matching_campaign',
|
||||
])
|
||||
|
||||
/**
|
||||
* Fires a one-shot `POST /api/attribution` when a `sim_utm` cookie is present.
|
||||
* Retries on transient failures; stops on terminal outcomes.
|
||||
*/
|
||||
export function useReferralAttribution() {
|
||||
const calledRef = useRef(false)
|
||||
|
||||
useEffect(() => {
|
||||
if (calledRef.current) return
|
||||
if (!document.cookie.includes(COOKIE_NAME)) return
|
||||
|
||||
calledRef.current = true
|
||||
|
||||
fetch('/api/attribution', { method: 'POST' })
|
||||
.then((res) => res.json())
|
||||
.then((data) => {
|
||||
if (data.attributed) {
|
||||
logger.info('Referral attribution successful', { bonusAmount: data.bonusAmount })
|
||||
} else if (data.error || TERMINAL_REASONS.has(data.reason)) {
|
||||
logger.info('Referral attribution skipped', { reason: data.reason || data.error })
|
||||
} else {
|
||||
calledRef.current = false
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.warn('Referral attribution failed, will retry', { error: err })
|
||||
calledRef.current = false
|
||||
})
|
||||
}, [])
|
||||
}
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
oneTimeToken,
|
||||
organization,
|
||||
} from 'better-auth/plugins'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { and, eq, inArray, sql } from 'drizzle-orm'
|
||||
import { headers } from 'next/headers'
|
||||
import Stripe from 'stripe'
|
||||
import {
|
||||
@@ -150,16 +150,6 @@ export const auth = betterAuth({
|
||||
account: {
|
||||
create: {
|
||||
before: async (account) => {
|
||||
// Only one credential per (userId, providerId) is allowed
|
||||
// If user reconnects (even with a different external account), delete the old one
|
||||
// and let Better Auth create the new one (returning false breaks account linking flow)
|
||||
const existing = await db.query.account.findFirst({
|
||||
where: and(
|
||||
eq(schema.account.userId, account.userId),
|
||||
eq(schema.account.providerId, account.providerId)
|
||||
),
|
||||
})
|
||||
|
||||
const modifiedAccount = { ...account }
|
||||
|
||||
if (account.providerId === 'salesforce' && account.accessToken) {
|
||||
@@ -189,32 +179,148 @@ export const auth = betterAuth({
|
||||
}
|
||||
}
|
||||
|
||||
// Handle Microsoft refresh token expiry
|
||||
if (isMicrosoftProvider(account.providerId)) {
|
||||
modifiedAccount.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
// Delete the existing account so Better Auth can create the new one
|
||||
// This allows account linking/re-authorization to succeed
|
||||
await db.delete(schema.account).where(eq(schema.account.id, existing.id))
|
||||
|
||||
// Preserve the existing account ID so references (like workspace notifications) continue to work
|
||||
modifiedAccount.id = existing.id
|
||||
|
||||
logger.info('[account.create.before] Deleted existing account for re-authorization', {
|
||||
userId: account.userId,
|
||||
providerId: account.providerId,
|
||||
existingAccountId: existing.id,
|
||||
preservingId: true,
|
||||
})
|
||||
|
||||
// Sync webhooks for credential sets after reconnecting (in after hook)
|
||||
}
|
||||
|
||||
return { data: modifiedAccount }
|
||||
},
|
||||
after: async (account) => {
|
||||
/**
|
||||
* Migrate credentials from stale account rows to the newly created one.
|
||||
*
|
||||
* Each getUserInfo appends a random UUID to the stable external ID so
|
||||
* that Better Auth never blocks cross-user connections. This means
|
||||
* re-connecting the same external identity creates a new row. We detect
|
||||
* the stale siblings here by comparing the stable prefix (everything
|
||||
* before the trailing UUID), migrate any credential FKs to the new row,
|
||||
* then delete the stale rows.
|
||||
*/
|
||||
try {
|
||||
const UUID_SUFFIX_RE = /-[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/
|
||||
const stablePrefix = account.accountId.replace(UUID_SUFFIX_RE, '')
|
||||
|
||||
if (stablePrefix && stablePrefix !== account.accountId) {
|
||||
const siblings = await db
|
||||
.select({ id: schema.account.id, accountId: schema.account.accountId })
|
||||
.from(schema.account)
|
||||
.where(
|
||||
and(
|
||||
eq(schema.account.userId, account.userId),
|
||||
eq(schema.account.providerId, account.providerId),
|
||||
sql`${schema.account.id} != ${account.id}`
|
||||
)
|
||||
)
|
||||
|
||||
const staleRows = siblings.filter(
|
||||
(row) => row.accountId.replace(UUID_SUFFIX_RE, '') === stablePrefix
|
||||
)
|
||||
|
||||
if (staleRows.length > 0) {
|
||||
const staleIds = staleRows.map((row) => row.id)
|
||||
|
||||
await db
|
||||
.update(schema.credential)
|
||||
.set({ accountId: account.id })
|
||||
.where(inArray(schema.credential.accountId, staleIds))
|
||||
|
||||
await db.delete(schema.account).where(inArray(schema.account.id, staleIds))
|
||||
|
||||
logger.info('[account.create.after] Migrated credentials from stale accounts', {
|
||||
userId: account.userId,
|
||||
providerId: account.providerId,
|
||||
newAccountId: account.id,
|
||||
migratedFrom: staleIds,
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[account.create.after] Failed to clean up stale accounts', {
|
||||
userId: account.userId,
|
||||
providerId: account.providerId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* If a pending credential draft exists for this (userId, providerId),
|
||||
* create the credential now with the user's chosen display name.
|
||||
* This is deterministic — the account row is guaranteed to exist.
|
||||
*/
|
||||
try {
|
||||
const [draft] = await db
|
||||
.select()
|
||||
.from(schema.pendingCredentialDraft)
|
||||
.where(
|
||||
and(
|
||||
eq(schema.pendingCredentialDraft.userId, account.userId),
|
||||
eq(schema.pendingCredentialDraft.providerId, account.providerId),
|
||||
sql`${schema.pendingCredentialDraft.expiresAt} > NOW()`
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (draft) {
|
||||
const credentialId = crypto.randomUUID()
|
||||
const now = new Date()
|
||||
|
||||
try {
|
||||
await db.insert(schema.credential).values({
|
||||
id: credentialId,
|
||||
workspaceId: draft.workspaceId,
|
||||
type: 'oauth',
|
||||
displayName: draft.displayName,
|
||||
providerId: account.providerId,
|
||||
accountId: account.id,
|
||||
createdBy: account.userId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
await db.insert(schema.credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId: account.userId,
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: account.userId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
|
||||
logger.info('[account.create.after] Created credential from draft', {
|
||||
credentialId,
|
||||
displayName: draft.displayName,
|
||||
providerId: account.providerId,
|
||||
accountId: account.id,
|
||||
})
|
||||
} catch (insertError: unknown) {
|
||||
const code =
|
||||
insertError && typeof insertError === 'object' && 'code' in insertError
|
||||
? (insertError as { code: string }).code
|
||||
: undefined
|
||||
if (code !== '23505') {
|
||||
throw insertError
|
||||
}
|
||||
logger.info('[account.create.after] Credential already exists, skipping draft', {
|
||||
providerId: account.providerId,
|
||||
accountId: account.id,
|
||||
})
|
||||
}
|
||||
|
||||
await db
|
||||
.delete(schema.pendingCredentialDraft)
|
||||
.where(eq(schema.pendingCredentialDraft.id, draft.id))
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[account.create.after] Failed to create credential from draft', {
|
||||
userId: account.userId,
|
||||
providerId: account.providerId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
const { ensureUserStatsExists } = await import('@/lib/billing/core/usage')
|
||||
await ensureUserStatsExists(account.userId)
|
||||
@@ -1487,7 +1593,7 @@ export const auth = betterAuth({
|
||||
})
|
||||
|
||||
return {
|
||||
id: `${data.user_id || data.hub_id.toString()}-${crypto.randomUUID()}`,
|
||||
id: `${(data.user_id || data.hub_id).toString()}-${crypto.randomUUID()}`,
|
||||
name: data.user || 'HubSpot User',
|
||||
email: data.user || `hubspot-${data.hub_id}@hubspot.com`,
|
||||
emailVerified: true,
|
||||
@@ -1541,7 +1647,7 @@ export const auth = betterAuth({
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
id: `${data.user_id || data.sub}-${crypto.randomUUID()}`,
|
||||
id: `${(data.user_id || data.sub).toString()}-${crypto.randomUUID()}`,
|
||||
name: data.name || 'Salesforce User',
|
||||
email: data.email || `salesforce-${data.user_id}@salesforce.com`,
|
||||
emailVerified: data.email_verified || true,
|
||||
@@ -1600,7 +1706,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${profile.data.id}-${crypto.randomUUID()}`,
|
||||
id: `${profile.data.id.toString()}-${crypto.randomUUID()}`,
|
||||
name: profile.data.name || 'X User',
|
||||
email: `${profile.data.username}@x.com`,
|
||||
image: profile.data.profile_image_url,
|
||||
@@ -1680,7 +1786,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${profile.account_id}-${crypto.randomUUID()}`,
|
||||
id: `${profile.account_id.toString()}-${crypto.randomUUID()}`,
|
||||
name: profile.name || profile.display_name || 'Confluence User',
|
||||
email: profile.email || `${profile.account_id}@atlassian.com`,
|
||||
image: profile.picture || undefined,
|
||||
@@ -1791,7 +1897,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${profile.account_id}-${crypto.randomUUID()}`,
|
||||
id: `${profile.account_id.toString()}-${crypto.randomUUID()}`,
|
||||
name: profile.name || profile.display_name || 'Jira User',
|
||||
email: profile.email || `${profile.account_id}@atlassian.com`,
|
||||
image: profile.picture || undefined,
|
||||
@@ -1841,7 +1947,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${data.id}-${crypto.randomUUID()}`,
|
||||
id: `${data.id.toString()}-${crypto.randomUUID()}`,
|
||||
name: data.email ? data.email.split('@')[0] : 'Airtable User',
|
||||
email: data.email || `${data.id}@airtable.user`,
|
||||
emailVerified: !!data.email,
|
||||
@@ -1890,7 +1996,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${profile.bot?.owner?.user?.id || profile.id}-${crypto.randomUUID()}`,
|
||||
id: `${(profile.bot?.owner?.user?.id || profile.id).toString()}-${crypto.randomUUID()}`,
|
||||
name: profile.name || profile.bot?.owner?.user?.name || 'Notion User',
|
||||
email: profile.person?.email || `${profile.id}@notion.user`,
|
||||
emailVerified: !!profile.person?.email,
|
||||
@@ -1957,7 +2063,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${data.id}-${crypto.randomUUID()}`,
|
||||
id: `${data.id.toString()}-${crypto.randomUUID()}`,
|
||||
name: data.name || 'Reddit User',
|
||||
email: `${data.name}@reddit.user`,
|
||||
image: data.icon_img || undefined,
|
||||
@@ -2029,7 +2135,7 @@ export const auth = betterAuth({
|
||||
const viewer = data.viewer
|
||||
|
||||
return {
|
||||
id: `${viewer.id}-${crypto.randomUUID()}`,
|
||||
id: `${viewer.id.toString()}-${crypto.randomUUID()}`,
|
||||
email: viewer.email,
|
||||
name: viewer.name,
|
||||
emailVerified: true,
|
||||
@@ -2092,7 +2198,7 @@ export const auth = betterAuth({
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
id: `${data.account_id}-${crypto.randomUUID()}`,
|
||||
id: `${data.account_id.toString()}-${crypto.randomUUID()}`,
|
||||
email: data.email,
|
||||
name: data.name?.display_name || data.email,
|
||||
emailVerified: data.email_verified || false,
|
||||
@@ -2143,7 +2249,7 @@ export const auth = betterAuth({
|
||||
const now = new Date()
|
||||
|
||||
return {
|
||||
id: `${profile.gid}-${crypto.randomUUID()}`,
|
||||
id: `${profile.gid.toString()}-${crypto.randomUUID()}`,
|
||||
name: profile.name || 'Asana User',
|
||||
email: profile.email || `${profile.gid}@asana.user`,
|
||||
image: profile.photo?.image_128x128 || undefined,
|
||||
@@ -2378,7 +2484,7 @@ export const auth = betterAuth({
|
||||
const profile = await response.json()
|
||||
|
||||
return {
|
||||
id: `${profile.id}-${crypto.randomUUID()}`,
|
||||
id: `${profile.id.toString()}-${crypto.randomUUID()}`,
|
||||
name:
|
||||
`${profile.first_name || ''} ${profile.last_name || ''}`.trim() || 'Zoom User',
|
||||
email: profile.email || `${profile.id}@zoom.user`,
|
||||
@@ -2445,7 +2551,7 @@ export const auth = betterAuth({
|
||||
const profile = await response.json()
|
||||
|
||||
return {
|
||||
id: `${profile.id}-${crypto.randomUUID()}`,
|
||||
id: `${profile.id.toString()}-${crypto.randomUUID()}`,
|
||||
name: profile.display_name || 'Spotify User',
|
||||
email: profile.email || `${profile.id}@spotify.user`,
|
||||
emailVerified: true,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, workflow as workflowTable } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { account, credential, credentialMember, workflow as workflowTable } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
@@ -12,17 +12,14 @@ export interface CredentialAccessResult {
|
||||
requesterUserId?: string
|
||||
credentialOwnerUserId?: string
|
||||
workspaceId?: string
|
||||
resolvedCredentialId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Centralizes auth + collaboration rules for credential use.
|
||||
* - Uses checkSessionOrInternalAuth to authenticate the caller
|
||||
* - Fetches credential owner
|
||||
* - Authorization rules:
|
||||
* - session: allow if requester owns the credential; otherwise require workflowId and
|
||||
* verify BOTH requester and owner have access to the workflow's workspace
|
||||
* - internal_jwt: require workflowId (by default) and verify credential owner has access to the
|
||||
* workflow's workspace (requester identity is the system/workflow)
|
||||
* Centralizes auth + credential membership checks for OAuth usage.
|
||||
* - Workspace-scoped credential IDs enforce active credential_member access.
|
||||
* - Legacy account IDs are resolved to workspace-scoped credentials when workflowId is provided.
|
||||
* - Direct legacy account-ID access without workflowId is restricted to account owners only.
|
||||
*/
|
||||
export async function authorizeCredentialUse(
|
||||
request: NextRequest,
|
||||
@@ -37,71 +34,173 @@ export async function authorizeCredentialUse(
|
||||
return { ok: false, error: auth.error || 'Authentication required' }
|
||||
}
|
||||
|
||||
// Lookup credential owner
|
||||
const [credRow] = await db
|
||||
const [workflowContext] = workflowId
|
||||
? await db
|
||||
.select({ workspaceId: workflowTable.workspaceId })
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
: [null]
|
||||
|
||||
if (workflowId && (!workflowContext || !workflowContext.workspaceId)) {
|
||||
return { ok: false, error: 'Workflow not found' }
|
||||
}
|
||||
|
||||
const [platformCredential] = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
type: credential.type,
|
||||
accountId: credential.accountId,
|
||||
})
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (platformCredential) {
|
||||
if (platformCredential.type !== 'oauth' || !platformCredential.accountId) {
|
||||
return { ok: false, error: 'Unsupported credential type for OAuth access' }
|
||||
}
|
||||
|
||||
if (workflowContext && workflowContext.workspaceId !== platformCredential.workspaceId) {
|
||||
return { ok: false, error: 'Credential is not accessible from this workflow workspace' }
|
||||
}
|
||||
|
||||
const [accountRow] = await db
|
||||
.select({ userId: account.userId })
|
||||
.from(account)
|
||||
.where(eq(account.id, platformCredential.accountId))
|
||||
.limit(1)
|
||||
|
||||
if (!accountRow) {
|
||||
return { ok: false, error: 'Credential account not found' }
|
||||
}
|
||||
|
||||
const requesterPerm =
|
||||
auth.authType === 'internal_jwt'
|
||||
? null
|
||||
: await getUserEntityPermissions(auth.userId, 'workspace', platformCredential.workspaceId)
|
||||
|
||||
if (auth.authType !== 'internal_jwt') {
|
||||
const [membership] = await db
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, platformCredential.id),
|
||||
eq(credentialMember.userId, auth.userId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership || requesterPerm === null) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
}
|
||||
|
||||
const ownerPerm = await getUserEntityPermissions(
|
||||
accountRow.userId,
|
||||
'workspace',
|
||||
platformCredential.workspaceId
|
||||
)
|
||||
if (ownerPerm === null) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
authType: auth.authType as CredentialAccessResult['authType'],
|
||||
requesterUserId: auth.userId,
|
||||
credentialOwnerUserId: accountRow.userId,
|
||||
workspaceId: platformCredential.workspaceId,
|
||||
resolvedCredentialId: platformCredential.accountId,
|
||||
}
|
||||
}
|
||||
|
||||
if (workflowContext?.workspaceId) {
|
||||
const [workspaceCredential] = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
accountId: credential.accountId,
|
||||
})
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.type, 'oauth'),
|
||||
eq(credential.workspaceId, workflowContext.workspaceId),
|
||||
eq(credential.accountId, credentialId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceCredential?.accountId) {
|
||||
return { ok: false, error: 'Credential not found' }
|
||||
}
|
||||
|
||||
const [accountRow] = await db
|
||||
.select({ userId: account.userId })
|
||||
.from(account)
|
||||
.where(eq(account.id, workspaceCredential.accountId))
|
||||
.limit(1)
|
||||
|
||||
if (!accountRow) {
|
||||
return { ok: false, error: 'Credential account not found' }
|
||||
}
|
||||
|
||||
if (auth.authType !== 'internal_jwt') {
|
||||
const [membership] = await db
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, workspaceCredential.id),
|
||||
eq(credentialMember.userId, auth.userId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!membership) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
}
|
||||
|
||||
const ownerPerm = await getUserEntityPermissions(
|
||||
accountRow.userId,
|
||||
'workspace',
|
||||
workflowContext.workspaceId
|
||||
)
|
||||
if (ownerPerm === null) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
authType: auth.authType as CredentialAccessResult['authType'],
|
||||
requesterUserId: auth.userId,
|
||||
credentialOwnerUserId: accountRow.userId,
|
||||
workspaceId: workflowContext.workspaceId,
|
||||
resolvedCredentialId: workspaceCredential.accountId,
|
||||
}
|
||||
}
|
||||
|
||||
const [legacyAccount] = await db
|
||||
.select({ userId: account.userId })
|
||||
.from(account)
|
||||
.where(eq(account.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (!credRow) {
|
||||
if (!legacyAccount) {
|
||||
return { ok: false, error: 'Credential not found' }
|
||||
}
|
||||
|
||||
const credentialOwnerUserId = credRow.userId
|
||||
|
||||
// If requester owns the credential, allow immediately
|
||||
if (auth.authType !== 'internal_jwt' && auth.userId === credentialOwnerUserId) {
|
||||
return {
|
||||
ok: true,
|
||||
authType: auth.authType as CredentialAccessResult['authType'],
|
||||
requesterUserId: auth.userId,
|
||||
credentialOwnerUserId,
|
||||
}
|
||||
}
|
||||
|
||||
// For collaboration paths, workflowId is required to scope to a workspace
|
||||
if (!workflowId) {
|
||||
if (auth.authType === 'internal_jwt') {
|
||||
return { ok: false, error: 'workflowId is required' }
|
||||
}
|
||||
|
||||
const [wf] = await db
|
||||
.select({ workspaceId: workflowTable.workspaceId })
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!wf || !wf.workspaceId) {
|
||||
return { ok: false, error: 'Workflow not found' }
|
||||
}
|
||||
|
||||
if (auth.authType === 'internal_jwt') {
|
||||
// Internal calls: verify credential owner belongs to the workflow's workspace
|
||||
const ownerPerm = await getUserEntityPermissions(
|
||||
credentialOwnerUserId,
|
||||
'workspace',
|
||||
wf.workspaceId
|
||||
)
|
||||
if (ownerPerm === null) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
return {
|
||||
ok: true,
|
||||
authType: auth.authType as CredentialAccessResult['authType'],
|
||||
requesterUserId: auth.userId,
|
||||
credentialOwnerUserId,
|
||||
workspaceId: wf.workspaceId,
|
||||
}
|
||||
}
|
||||
|
||||
// Session: verify BOTH requester and owner belong to the workflow's workspace
|
||||
const requesterPerm = await getUserEntityPermissions(auth.userId, 'workspace', wf.workspaceId)
|
||||
const ownerPerm = await getUserEntityPermissions(
|
||||
credentialOwnerUserId,
|
||||
'workspace',
|
||||
wf.workspaceId
|
||||
)
|
||||
if (requesterPerm === null || ownerPerm === null) {
|
||||
if (auth.userId !== legacyAccount.userId) {
|
||||
return { ok: false, error: 'Unauthorized' }
|
||||
}
|
||||
|
||||
@@ -109,7 +208,7 @@ export async function authorizeCredentialUse(
|
||||
ok: true,
|
||||
authType: auth.authType as CredentialAccessResult['authType'],
|
||||
requesterUserId: auth.userId,
|
||||
credentialOwnerUserId,
|
||||
workspaceId: wf.workspaceId,
|
||||
credentialOwnerUserId: legacyAccount.userId,
|
||||
resolvedCredentialId: credentialId,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { organization, userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import type { DbOrTx } from '@/lib/db/types'
|
||||
|
||||
const logger = createLogger('BonusCredits')
|
||||
|
||||
/**
|
||||
* Apply bonus credits to a user (e.g. referral bonuses, promotional codes).
|
||||
*
|
||||
* Detects the user's current plan and routes credits accordingly:
|
||||
* - Free/Pro: adds to `userStats.creditBalance` and increments `currentUsageLimit`
|
||||
* - Team/Enterprise: adds to `organization.creditBalance` and increments `orgUsageLimit`
|
||||
*
|
||||
* Uses direct increment (not recalculation) so it works correctly for free-tier
|
||||
* users where `setUsageLimitForCredits` would compute planBase=0 and skip the update.
|
||||
*
|
||||
* @param tx - Optional Drizzle transaction context. When provided, all DB writes
|
||||
* participate in the caller's transaction for atomicity.
|
||||
*/
|
||||
export async function applyBonusCredits(
|
||||
userId: string,
|
||||
amount: number,
|
||||
tx?: DbOrTx
|
||||
): Promise<void> {
|
||||
const dbCtx = tx ?? db
|
||||
const subscription = await getHighestPrioritySubscription(userId)
|
||||
const isTeamOrEnterprise = subscription?.plan === 'team' || subscription?.plan === 'enterprise'
|
||||
|
||||
if (isTeamOrEnterprise && subscription?.referenceId) {
|
||||
const orgId = subscription.referenceId
|
||||
|
||||
await dbCtx
|
||||
.update(organization)
|
||||
.set({
|
||||
creditBalance: sql`${organization.creditBalance} + ${amount}`,
|
||||
orgUsageLimit: sql`COALESCE(${organization.orgUsageLimit}, '0')::decimal + ${amount}`,
|
||||
})
|
||||
.where(eq(organization.id, orgId))
|
||||
|
||||
logger.info('Applied bonus credits to organization', {
|
||||
userId,
|
||||
organizationId: orgId,
|
||||
plan: subscription.plan,
|
||||
amount,
|
||||
})
|
||||
} else {
|
||||
await dbCtx
|
||||
.update(userStats)
|
||||
.set({
|
||||
creditBalance: sql`${userStats.creditBalance} + ${amount}`,
|
||||
currentUsageLimit: sql`COALESCE(${userStats.currentUsageLimit}, '0')::decimal + ${amount}`,
|
||||
})
|
||||
.where(eq(userStats.userId, userId))
|
||||
|
||||
logger.info('Applied bonus credits to user', {
|
||||
userId,
|
||||
plan: subscription?.plan || 'free',
|
||||
amount,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -20,8 +20,6 @@ export interface BuildPayloadParams {
|
||||
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
|
||||
commands?: string[]
|
||||
chatId?: string
|
||||
conversationId?: string
|
||||
prefetch?: boolean
|
||||
implicitFeedback?: string
|
||||
}
|
||||
|
||||
@@ -66,10 +64,6 @@ export async function buildCopilotRequestPayload(
|
||||
fileAttachments,
|
||||
commands,
|
||||
chatId,
|
||||
conversationId,
|
||||
prefetch,
|
||||
conversationHistory,
|
||||
implicitFeedback,
|
||||
} = params
|
||||
|
||||
const selectedModel = options.selectedModel
|
||||
@@ -160,12 +154,6 @@ export async function buildCopilotRequestPayload(
|
||||
version: SIM_AGENT_VERSION,
|
||||
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
|
||||
...(chatId ? { chatId } : {}),
|
||||
...(conversationId ? { conversationId } : {}),
|
||||
...(Array.isArray(conversationHistory) && conversationHistory.length > 0
|
||||
? { conversationHistory }
|
||||
: {}),
|
||||
...(typeof prefetch === 'boolean' ? { prefetch } : {}),
|
||||
...(implicitFeedback ? { implicitFeedback } : {}),
|
||||
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
|
||||
...(integrationTools.length > 0 ? { integrationTools } : {}),
|
||||
...(credentials ? { credentials } : {}),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { customTools, workflow } from '@sim/db/schema'
|
||||
import { workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, isNull, or } from 'drizzle-orm'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import type {
|
||||
ExecutionContext,
|
||||
@@ -12,7 +12,6 @@ import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
|
||||
import { getTool, resolveToolId } from '@/tools/utils'
|
||||
import {
|
||||
executeCheckDeploymentStatus,
|
||||
@@ -77,247 +76,6 @@ import {
|
||||
|
||||
const logger = createLogger('CopilotToolExecutor')
|
||||
|
||||
type ManageCustomToolOperation = 'add' | 'edit' | 'delete' | 'list'
|
||||
|
||||
interface ManageCustomToolSchema {
|
||||
type: 'function'
|
||||
function: {
|
||||
name: string
|
||||
description?: string
|
||||
parameters: Record<string, unknown>
|
||||
}
|
||||
}
|
||||
|
||||
interface ManageCustomToolParams {
|
||||
operation?: string
|
||||
toolId?: string
|
||||
schema?: ManageCustomToolSchema
|
||||
code?: string
|
||||
title?: string
|
||||
workspaceId?: string
|
||||
}
|
||||
|
||||
async function executeManageCustomTool(
|
||||
rawParams: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const params = rawParams as ManageCustomToolParams
|
||||
const operation = String(params.operation || '').toLowerCase() as ManageCustomToolOperation
|
||||
const workspaceId = params.workspaceId || context.workspaceId
|
||||
|
||||
if (!operation) {
|
||||
return { success: false, error: "Missing required 'operation' argument" }
|
||||
}
|
||||
|
||||
try {
|
||||
if (operation === 'list') {
|
||||
const toolsForUser = workspaceId
|
||||
? await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(
|
||||
or(
|
||||
eq(customTools.workspaceId, workspaceId),
|
||||
and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId))
|
||||
)
|
||||
)
|
||||
.orderBy(desc(customTools.createdAt))
|
||||
: await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId)))
|
||||
.orderBy(desc(customTools.createdAt))
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
tools: toolsForUser,
|
||||
count: toolsForUser.length,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'add') {
|
||||
if (!workspaceId) {
|
||||
return {
|
||||
success: false,
|
||||
error: "workspaceId is required for operation 'add'",
|
||||
}
|
||||
}
|
||||
if (!params.schema || !params.code) {
|
||||
return {
|
||||
success: false,
|
||||
error: "Both 'schema' and 'code' are required for operation 'add'",
|
||||
}
|
||||
}
|
||||
|
||||
const title = params.title || params.schema.function?.name
|
||||
if (!title) {
|
||||
return { success: false, error: "Missing tool title or schema.function.name for 'add'" }
|
||||
}
|
||||
|
||||
const resultTools = await upsertCustomTools({
|
||||
tools: [
|
||||
{
|
||||
title,
|
||||
schema: params.schema,
|
||||
code: params.code,
|
||||
},
|
||||
],
|
||||
workspaceId,
|
||||
userId: context.userId,
|
||||
})
|
||||
const created = resultTools.find((tool) => tool.title === title)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
toolId: created?.id,
|
||||
title,
|
||||
message: `Created custom tool "${title}"`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'edit') {
|
||||
if (!workspaceId) {
|
||||
return {
|
||||
success: false,
|
||||
error: "workspaceId is required for operation 'edit'",
|
||||
}
|
||||
}
|
||||
if (!params.toolId) {
|
||||
return { success: false, error: "'toolId' is required for operation 'edit'" }
|
||||
}
|
||||
if (!params.schema && !params.code) {
|
||||
return {
|
||||
success: false,
|
||||
error: "At least one of 'schema' or 'code' is required for operation 'edit'",
|
||||
}
|
||||
}
|
||||
|
||||
const workspaceTool = await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId)))
|
||||
.limit(1)
|
||||
|
||||
const legacyTool =
|
||||
workspaceTool.length === 0
|
||||
? await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(
|
||||
and(
|
||||
eq(customTools.id, params.toolId),
|
||||
isNull(customTools.workspaceId),
|
||||
eq(customTools.userId, context.userId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
: []
|
||||
|
||||
const existing = workspaceTool[0] || legacyTool[0]
|
||||
if (!existing) {
|
||||
return { success: false, error: `Custom tool not found: ${params.toolId}` }
|
||||
}
|
||||
|
||||
const mergedSchema = params.schema || (existing.schema as ManageCustomToolSchema)
|
||||
const mergedCode = params.code || existing.code
|
||||
const title = params.title || mergedSchema.function?.name || existing.title
|
||||
|
||||
await upsertCustomTools({
|
||||
tools: [
|
||||
{
|
||||
id: params.toolId,
|
||||
title,
|
||||
schema: mergedSchema,
|
||||
code: mergedCode,
|
||||
},
|
||||
],
|
||||
workspaceId,
|
||||
userId: context.userId,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
toolId: params.toolId,
|
||||
title,
|
||||
message: `Updated custom tool "${title}"`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'delete') {
|
||||
if (!params.toolId) {
|
||||
return { success: false, error: "'toolId' is required for operation 'delete'" }
|
||||
}
|
||||
|
||||
const workspaceDelete =
|
||||
workspaceId != null
|
||||
? await db
|
||||
.delete(customTools)
|
||||
.where(
|
||||
and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId))
|
||||
)
|
||||
.returning({ id: customTools.id })
|
||||
: []
|
||||
|
||||
const legacyDelete =
|
||||
workspaceDelete.length === 0
|
||||
? await db
|
||||
.delete(customTools)
|
||||
.where(
|
||||
and(
|
||||
eq(customTools.id, params.toolId),
|
||||
isNull(customTools.workspaceId),
|
||||
eq(customTools.userId, context.userId)
|
||||
)
|
||||
)
|
||||
.returning({ id: customTools.id })
|
||||
: []
|
||||
|
||||
const deleted = workspaceDelete[0] || legacyDelete[0]
|
||||
if (!deleted) {
|
||||
return { success: false, error: `Custom tool not found: ${params.toolId}` }
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
toolId: params.toolId,
|
||||
message: 'Deleted custom tool',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: `Unsupported operation for manage_custom_tool: ${operation}`,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('manage_custom_tool execution failed', {
|
||||
operation,
|
||||
workspaceId,
|
||||
userId: context.userId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to manage custom tool',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const SERVER_TOOLS = new Set<string>([
|
||||
'get_blocks_and_tools',
|
||||
'get_blocks_metadata',
|
||||
@@ -403,19 +161,6 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||
}
|
||||
}
|
||||
},
|
||||
oauth_request_access: async (p, _c) => {
|
||||
const providerName = (p.providerName || p.provider_name || 'the provider') as string
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
status: 'requested',
|
||||
providerName,
|
||||
message: `Requested ${providerName} OAuth connection. The user should complete the OAuth modal in the UI, then retry credential-dependent actions.`,
|
||||
},
|
||||
}
|
||||
},
|
||||
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
62
apps/sim/lib/credentials/access.ts
Normal file
62
apps/sim/lib/credentials/access.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { db } from '@sim/db'
|
||||
import { credential, credentialMember } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
type ActiveCredentialMember = typeof credentialMember.$inferSelect
|
||||
type CredentialRecord = typeof credential.$inferSelect
|
||||
|
||||
export interface CredentialActorContext {
|
||||
credential: CredentialRecord | null
|
||||
member: ActiveCredentialMember | null
|
||||
hasWorkspaceAccess: boolean
|
||||
canWriteWorkspace: boolean
|
||||
isAdmin: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves user access context for a credential.
|
||||
*/
|
||||
export async function getCredentialActorContext(
|
||||
credentialId: string,
|
||||
userId: string
|
||||
): Promise<CredentialActorContext> {
|
||||
const [credentialRow] = await db
|
||||
.select()
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
|
||||
if (!credentialRow) {
|
||||
return {
|
||||
credential: null,
|
||||
member: null,
|
||||
hasWorkspaceAccess: false,
|
||||
canWriteWorkspace: false,
|
||||
isAdmin: false,
|
||||
}
|
||||
}
|
||||
|
||||
const workspaceAccess = await checkWorkspaceAccess(credentialRow.workspaceId, userId)
|
||||
const [memberRow] = await db
|
||||
.select()
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
eq(credentialMember.userId, userId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
const isAdmin = memberRow?.role === 'admin'
|
||||
|
||||
return {
|
||||
credential: credentialRow,
|
||||
member: memberRow ?? null,
|
||||
hasWorkspaceAccess: workspaceAccess.hasAccess,
|
||||
canWriteWorkspace: workspaceAccess.canWrite,
|
||||
isAdmin,
|
||||
}
|
||||
}
|
||||
77
apps/sim/lib/credentials/client-state.ts
Normal file
77
apps/sim/lib/credentials/client-state.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
'use client'
|
||||
|
||||
export const PENDING_OAUTH_CREDENTIAL_DRAFT_KEY = 'sim.pending-oauth-credential-draft'
|
||||
export const PENDING_CREDENTIAL_CREATE_REQUEST_KEY = 'sim.pending-credential-create-request'
|
||||
|
||||
export interface PendingOAuthCredentialDraft {
|
||||
workspaceId: string
|
||||
providerId: string
|
||||
displayName: string
|
||||
existingCredentialIds: string[]
|
||||
existingAccountIds: string[]
|
||||
requestedAt: number
|
||||
}
|
||||
|
||||
interface PendingOAuthCredentialCreateRequest {
|
||||
workspaceId: string
|
||||
type: 'oauth'
|
||||
providerId: string
|
||||
displayName: string
|
||||
serviceId: string
|
||||
requiredScopes: string[]
|
||||
requestedAt: number
|
||||
}
|
||||
|
||||
interface PendingSecretCredentialCreateRequest {
|
||||
workspaceId: string
|
||||
type: 'env_personal' | 'env_workspace'
|
||||
envKey?: string
|
||||
requestedAt: number
|
||||
}
|
||||
|
||||
export type PendingCredentialCreateRequest =
|
||||
| PendingOAuthCredentialCreateRequest
|
||||
| PendingSecretCredentialCreateRequest
|
||||
|
||||
function parseJson<T>(raw: string | null): T | null {
|
||||
if (!raw) return null
|
||||
try {
|
||||
return JSON.parse(raw) as T
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export function readPendingOAuthCredentialDraft(): PendingOAuthCredentialDraft | null {
|
||||
if (typeof window === 'undefined') return null
|
||||
return parseJson<PendingOAuthCredentialDraft>(
|
||||
window.sessionStorage.getItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY)
|
||||
)
|
||||
}
|
||||
|
||||
export function writePendingOAuthCredentialDraft(payload: PendingOAuthCredentialDraft) {
|
||||
if (typeof window === 'undefined') return
|
||||
window.sessionStorage.setItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY, JSON.stringify(payload))
|
||||
}
|
||||
|
||||
export function clearPendingOAuthCredentialDraft() {
|
||||
if (typeof window === 'undefined') return
|
||||
window.sessionStorage.removeItem(PENDING_OAUTH_CREDENTIAL_DRAFT_KEY)
|
||||
}
|
||||
|
||||
export function readPendingCredentialCreateRequest(): PendingCredentialCreateRequest | null {
|
||||
if (typeof window === 'undefined') return null
|
||||
return parseJson<PendingCredentialCreateRequest>(
|
||||
window.sessionStorage.getItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY)
|
||||
)
|
||||
}
|
||||
|
||||
export function writePendingCredentialCreateRequest(payload: PendingCredentialCreateRequest) {
|
||||
if (typeof window === 'undefined') return
|
||||
window.sessionStorage.setItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY, JSON.stringify(payload))
|
||||
}
|
||||
|
||||
export function clearPendingCredentialCreateRequest() {
|
||||
if (typeof window === 'undefined') return
|
||||
window.sessionStorage.removeItem(PENDING_CREDENTIAL_CREATE_REQUEST_KEY)
|
||||
}
|
||||
356
apps/sim/lib/credentials/environment.ts
Normal file
356
apps/sim/lib/credentials/environment.ts
Normal file
@@ -0,0 +1,356 @@
|
||||
import { db } from '@sim/db'
|
||||
import { credential, credentialMember, permissions, workspace } from '@sim/db/schema'
|
||||
import { and, eq, inArray, notInArray } from 'drizzle-orm'
|
||||
|
||||
interface AccessibleEnvCredential {
|
||||
type: 'env_workspace' | 'env_personal'
|
||||
envKey: string
|
||||
envOwnerUserId: string | null
|
||||
updatedAt: Date
|
||||
}
|
||||
|
||||
function getPostgresErrorCode(error: unknown): string | undefined {
|
||||
if (!error || typeof error !== 'object') return undefined
|
||||
const err = error as { code?: string; cause?: { code?: string } }
|
||||
return err.code || err.cause?.code
|
||||
}
|
||||
|
||||
export async function getWorkspaceMemberUserIds(workspaceId: string): Promise<string[]> {
|
||||
const [workspaceRows, permissionRows] = await Promise.all([
|
||||
db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1),
|
||||
db
|
||||
.select({ userId: permissions.userId })
|
||||
.from(permissions)
|
||||
.where(and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspaceId))),
|
||||
])
|
||||
const workspaceRow = workspaceRows[0]
|
||||
|
||||
const memberIds = new Set<string>(permissionRows.map((row) => row.userId))
|
||||
if (workspaceRow?.ownerId) {
|
||||
memberIds.add(workspaceRow.ownerId)
|
||||
}
|
||||
return Array.from(memberIds)
|
||||
}
|
||||
|
||||
export async function getUserWorkspaceIds(userId: string): Promise<string[]> {
|
||||
const [permissionRows, ownedWorkspaceRows] = await Promise.all([
|
||||
db
|
||||
.select({ workspaceId: workspace.id })
|
||||
.from(permissions)
|
||||
.innerJoin(
|
||||
workspace,
|
||||
and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspace.id))
|
||||
)
|
||||
.where(eq(permissions.userId, userId)),
|
||||
db.select({ workspaceId: workspace.id }).from(workspace).where(eq(workspace.ownerId, userId)),
|
||||
])
|
||||
|
||||
const workspaceIds = new Set<string>(permissionRows.map((row) => row.workspaceId))
|
||||
for (const row of ownedWorkspaceRows) {
|
||||
workspaceIds.add(row.workspaceId)
|
||||
}
|
||||
|
||||
return Array.from(workspaceIds)
|
||||
}
|
||||
|
||||
async function upsertCredentialAdminMember(credentialId: string, adminUserId: string) {
|
||||
const now = new Date()
|
||||
const [existingMembership] = await db
|
||||
.select({ id: credentialMember.id, joinedAt: credentialMember.joinedAt })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, adminUserId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingMembership) {
|
||||
await db
|
||||
.update(credentialMember)
|
||||
.set({
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: existingMembership.joinedAt ?? now,
|
||||
invitedBy: adminUserId,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(credentialMember.id, existingMembership.id))
|
||||
return
|
||||
}
|
||||
|
||||
await db.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId: adminUserId,
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: adminUserId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
}
|
||||
|
||||
async function ensureWorkspaceCredentialMemberships(
|
||||
credentialId: string,
|
||||
workspaceId: string,
|
||||
ownerUserId: string
|
||||
) {
|
||||
const workspaceMemberUserIds = await getWorkspaceMemberUserIds(workspaceId)
|
||||
if (!workspaceMemberUserIds.length) return
|
||||
|
||||
const existingMemberships = await db
|
||||
.select({
|
||||
id: credentialMember.id,
|
||||
userId: credentialMember.userId,
|
||||
joinedAt: credentialMember.joinedAt,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
inArray(credentialMember.userId, workspaceMemberUserIds)
|
||||
)
|
||||
)
|
||||
|
||||
const byUserId = new Map(existingMemberships.map((row) => [row.userId, row]))
|
||||
const now = new Date()
|
||||
|
||||
for (const memberUserId of workspaceMemberUserIds) {
|
||||
const targetRole = memberUserId === ownerUserId ? 'admin' : 'member'
|
||||
const existing = byUserId.get(memberUserId)
|
||||
if (existing) {
|
||||
await db
|
||||
.update(credentialMember)
|
||||
.set({
|
||||
role: targetRole,
|
||||
status: 'active',
|
||||
joinedAt: existing.joinedAt ?? now,
|
||||
invitedBy: ownerUserId,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(credentialMember.id, existing.id))
|
||||
continue
|
||||
}
|
||||
|
||||
await db.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId: memberUserId,
|
||||
role: targetRole,
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: ownerUserId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function syncWorkspaceEnvCredentials(params: {
|
||||
workspaceId: string
|
||||
envKeys: string[]
|
||||
actingUserId: string
|
||||
}) {
|
||||
const { workspaceId, envKeys, actingUserId } = params
|
||||
const [workspaceRow] = await db
|
||||
.select({ ownerId: workspace.ownerId })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
|
||||
if (!workspaceRow) return
|
||||
|
||||
const normalizedKeys = Array.from(new Set(envKeys.filter(Boolean)))
|
||||
const existingCredentials = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
envKey: credential.envKey,
|
||||
})
|
||||
.from(credential)
|
||||
.where(and(eq(credential.workspaceId, workspaceId), eq(credential.type, 'env_workspace')))
|
||||
|
||||
const existingByKey = new Map(
|
||||
existingCredentials
|
||||
.filter((row): row is { id: string; envKey: string } => Boolean(row.envKey))
|
||||
.map((row) => [row.envKey, row.id])
|
||||
)
|
||||
|
||||
const credentialIdsToEnsureMembership = new Set<string>()
|
||||
const now = new Date()
|
||||
|
||||
for (const envKey of normalizedKeys) {
|
||||
const existingId = existingByKey.get(envKey)
|
||||
if (existingId) {
|
||||
credentialIdsToEnsureMembership.add(existingId)
|
||||
continue
|
||||
}
|
||||
|
||||
const createdId = crypto.randomUUID()
|
||||
try {
|
||||
await db.insert(credential).values({
|
||||
id: createdId,
|
||||
workspaceId,
|
||||
type: 'env_workspace',
|
||||
displayName: envKey,
|
||||
envKey,
|
||||
createdBy: actingUserId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
credentialIdsToEnsureMembership.add(createdId)
|
||||
} catch (error: unknown) {
|
||||
const code = getPostgresErrorCode(error)
|
||||
if (code !== '23505') throw error
|
||||
}
|
||||
}
|
||||
|
||||
for (const credentialId of credentialIdsToEnsureMembership) {
|
||||
await ensureWorkspaceCredentialMemberships(credentialId, workspaceId, workspaceRow.ownerId)
|
||||
}
|
||||
|
||||
if (normalizedKeys.length > 0) {
|
||||
await db
|
||||
.delete(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_workspace'),
|
||||
notInArray(credential.envKey, normalizedKeys)
|
||||
)
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
await db
|
||||
.delete(credential)
|
||||
.where(and(eq(credential.workspaceId, workspaceId), eq(credential.type, 'env_workspace')))
|
||||
}
|
||||
|
||||
export async function syncPersonalEnvCredentialsForUser(params: {
|
||||
userId: string
|
||||
envKeys: string[]
|
||||
}) {
|
||||
const { userId, envKeys } = params
|
||||
const workspaceIds = await getUserWorkspaceIds(userId)
|
||||
if (!workspaceIds.length) return
|
||||
|
||||
const normalizedKeys = Array.from(new Set(envKeys.filter(Boolean)))
|
||||
const now = new Date()
|
||||
|
||||
for (const workspaceId of workspaceIds) {
|
||||
const existingCredentials = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
envKey: credential.envKey,
|
||||
})
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_personal'),
|
||||
eq(credential.envOwnerUserId, userId)
|
||||
)
|
||||
)
|
||||
|
||||
const existingByKey = new Map(
|
||||
existingCredentials
|
||||
.filter((row): row is { id: string; envKey: string } => Boolean(row.envKey))
|
||||
.map((row) => [row.envKey, row.id])
|
||||
)
|
||||
|
||||
for (const envKey of normalizedKeys) {
|
||||
const existingId = existingByKey.get(envKey)
|
||||
if (existingId) {
|
||||
await upsertCredentialAdminMember(existingId, userId)
|
||||
continue
|
||||
}
|
||||
|
||||
const createdId = crypto.randomUUID()
|
||||
try {
|
||||
await db.insert(credential).values({
|
||||
id: createdId,
|
||||
workspaceId,
|
||||
type: 'env_personal',
|
||||
displayName: envKey,
|
||||
envKey,
|
||||
envOwnerUserId: userId,
|
||||
createdBy: userId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
await upsertCredentialAdminMember(createdId, userId)
|
||||
} catch (error: unknown) {
|
||||
const code = getPostgresErrorCode(error)
|
||||
if (code !== '23505') throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (normalizedKeys.length > 0) {
|
||||
await db
|
||||
.delete(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_personal'),
|
||||
eq(credential.envOwnerUserId, userId),
|
||||
notInArray(credential.envKey, normalizedKeys)
|
||||
)
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
await db
|
||||
.delete(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'env_personal'),
|
||||
eq(credential.envOwnerUserId, userId)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function getAccessibleEnvCredentials(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<AccessibleEnvCredential[]> {
|
||||
const rows = await db
|
||||
.select({
|
||||
type: credential.type,
|
||||
envKey: credential.envKey,
|
||||
envOwnerUserId: credential.envOwnerUserId,
|
||||
updatedAt: credential.updatedAt,
|
||||
})
|
||||
.from(credential)
|
||||
.innerJoin(
|
||||
credentialMember,
|
||||
and(
|
||||
eq(credentialMember.credentialId, credential.id),
|
||||
eq(credentialMember.userId, userId),
|
||||
eq(credentialMember.status, 'active')
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
inArray(credential.type, ['env_workspace', 'env_personal'])
|
||||
)
|
||||
)
|
||||
|
||||
return rows
|
||||
.filter(
|
||||
(row): row is AccessibleEnvCredential =>
|
||||
(row.type === 'env_workspace' || row.type === 'env_personal') && Boolean(row.envKey)
|
||||
)
|
||||
.map((row) => ({
|
||||
type: row.type,
|
||||
envKey: row.envKey!,
|
||||
envOwnerUserId: row.envOwnerUserId,
|
||||
updatedAt: row.updatedAt,
|
||||
}))
|
||||
}
|
||||
195
apps/sim/lib/credentials/oauth.ts
Normal file
195
apps/sim/lib/credentials/oauth.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, credential, credentialMember } from '@sim/db/schema'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { getServiceConfigByProviderId } from '@/lib/oauth'
|
||||
|
||||
interface SyncWorkspaceOAuthCredentialsForUserParams {
|
||||
workspaceId: string
|
||||
userId: string
|
||||
}
|
||||
|
||||
interface SyncWorkspaceOAuthCredentialsForUserResult {
|
||||
createdCredentials: number
|
||||
updatedMemberships: number
|
||||
}
|
||||
|
||||
function getPostgresErrorCode(error: unknown): string | undefined {
|
||||
if (!error || typeof error !== 'object') return undefined
|
||||
const err = error as { code?: string; cause?: { code?: string } }
|
||||
return err.code || err.cause?.code
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures connected OAuth accounts for a user exist as workspace-scoped credentials.
|
||||
*/
|
||||
export async function syncWorkspaceOAuthCredentialsForUser(
|
||||
params: SyncWorkspaceOAuthCredentialsForUserParams
|
||||
): Promise<SyncWorkspaceOAuthCredentialsForUserResult> {
|
||||
const { workspaceId, userId } = params
|
||||
|
||||
const userAccounts = await db
|
||||
.select({
|
||||
id: account.id,
|
||||
providerId: account.providerId,
|
||||
accountId: account.accountId,
|
||||
})
|
||||
.from(account)
|
||||
.where(eq(account.userId, userId))
|
||||
|
||||
if (userAccounts.length === 0) {
|
||||
return { createdCredentials: 0, updatedMemberships: 0 }
|
||||
}
|
||||
|
||||
const accountIds = userAccounts.map((row) => row.id)
|
||||
const existingCredentials = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
displayName: credential.displayName,
|
||||
providerId: credential.providerId,
|
||||
accountId: credential.accountId,
|
||||
})
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'oauth'),
|
||||
inArray(credential.accountId, accountIds)
|
||||
)
|
||||
)
|
||||
|
||||
const now = new Date()
|
||||
const userAccountById = new Map(userAccounts.map((row) => [row.id, row]))
|
||||
for (const existingCredential of existingCredentials) {
|
||||
if (!existingCredential.accountId) continue
|
||||
const linkedAccount = userAccountById.get(existingCredential.accountId)
|
||||
if (!linkedAccount) continue
|
||||
|
||||
const normalizedLabel =
|
||||
getServiceConfigByProviderId(linkedAccount.providerId)?.name || linkedAccount.providerId
|
||||
const shouldNormalizeDisplayName =
|
||||
existingCredential.displayName === linkedAccount.accountId ||
|
||||
existingCredential.displayName === linkedAccount.providerId
|
||||
|
||||
if (!shouldNormalizeDisplayName || existingCredential.displayName === normalizedLabel) {
|
||||
continue
|
||||
}
|
||||
|
||||
await db
|
||||
.update(credential)
|
||||
.set({
|
||||
displayName: normalizedLabel,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(credential.id, existingCredential.id))
|
||||
}
|
||||
|
||||
const existingByAccountId = new Map(
|
||||
existingCredentials
|
||||
.filter((row) => Boolean(row.accountId))
|
||||
.map((row) => [row.accountId!, row.id])
|
||||
)
|
||||
|
||||
let createdCredentials = 0
|
||||
|
||||
for (const acc of userAccounts) {
|
||||
if (existingByAccountId.has(acc.id)) {
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
await db.insert(credential).values({
|
||||
id: crypto.randomUUID(),
|
||||
workspaceId,
|
||||
type: 'oauth',
|
||||
displayName: getServiceConfigByProviderId(acc.providerId)?.name || acc.providerId,
|
||||
providerId: acc.providerId,
|
||||
accountId: acc.id,
|
||||
createdBy: userId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
createdCredentials += 1
|
||||
} catch (error) {
|
||||
if (getPostgresErrorCode(error) !== '23505') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const credentialRows = await db
|
||||
.select({ id: credential.id, accountId: credential.accountId })
|
||||
.from(credential)
|
||||
.where(
|
||||
and(
|
||||
eq(credential.workspaceId, workspaceId),
|
||||
eq(credential.type, 'oauth'),
|
||||
inArray(credential.accountId, accountIds)
|
||||
)
|
||||
)
|
||||
|
||||
const credentialIdByAccountId = new Map(
|
||||
credentialRows.filter((row) => Boolean(row.accountId)).map((row) => [row.accountId!, row.id])
|
||||
)
|
||||
const allCredentialIds = Array.from(credentialIdByAccountId.values())
|
||||
if (allCredentialIds.length === 0) {
|
||||
return { createdCredentials, updatedMemberships: 0 }
|
||||
}
|
||||
|
||||
const existingMemberships = await db
|
||||
.select({
|
||||
id: credentialMember.id,
|
||||
credentialId: credentialMember.credentialId,
|
||||
joinedAt: credentialMember.joinedAt,
|
||||
})
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
inArray(credentialMember.credentialId, allCredentialIds),
|
||||
eq(credentialMember.userId, userId)
|
||||
)
|
||||
)
|
||||
|
||||
const membershipByCredentialId = new Map(
|
||||
existingMemberships.map((row) => [row.credentialId, row])
|
||||
)
|
||||
let updatedMemberships = 0
|
||||
|
||||
for (const credentialId of allCredentialIds) {
|
||||
const existingMembership = membershipByCredentialId.get(credentialId)
|
||||
if (existingMembership) {
|
||||
await db
|
||||
.update(credentialMember)
|
||||
.set({
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: existingMembership.joinedAt ?? now,
|
||||
invitedBy: userId,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(credentialMember.id, existingMembership.id))
|
||||
updatedMemberships += 1
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
await db.insert(credentialMember).values({
|
||||
id: crypto.randomUUID(),
|
||||
credentialId,
|
||||
userId,
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: userId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
updatedMemberships += 1
|
||||
} catch (error) {
|
||||
if (getPostgresErrorCode(error) !== '23505') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { createdCredentials, updatedMemberships }
|
||||
}
|
||||
@@ -1,8 +1,9 @@
|
||||
import { db } from '@sim/db'
|
||||
import { environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { eq, inArray } from 'drizzle-orm'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { getAccessibleEnvCredentials } from '@/lib/credentials/environment'
|
||||
|
||||
const logger = createLogger('EnvironmentUtils')
|
||||
|
||||
@@ -53,7 +54,7 @@ export async function getPersonalAndWorkspaceEnv(
|
||||
conflicts: string[]
|
||||
decryptionFailures: string[]
|
||||
}> {
|
||||
const [personalRows, workspaceRows] = await Promise.all([
|
||||
const [personalRows, workspaceRows, accessibleEnvCredentials] = await Promise.all([
|
||||
db.select().from(environment).where(eq(environment.userId, userId)).limit(1),
|
||||
workspaceId
|
||||
? db
|
||||
@@ -62,10 +63,69 @@ export async function getPersonalAndWorkspaceEnv(
|
||||
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
|
||||
.limit(1)
|
||||
: Promise.resolve([] as any[]),
|
||||
workspaceId ? getAccessibleEnvCredentials(workspaceId, userId) : Promise.resolve([]),
|
||||
])
|
||||
|
||||
const personalEncrypted: Record<string, string> = (personalRows[0]?.variables as any) || {}
|
||||
const workspaceEncrypted: Record<string, string> = (workspaceRows[0]?.variables as any) || {}
|
||||
const ownPersonalEncrypted: Record<string, string> = (personalRows[0]?.variables as any) || {}
|
||||
const allWorkspaceEncrypted: Record<string, string> = (workspaceRows[0]?.variables as any) || {}
|
||||
|
||||
const hasCredentialFiltering = Boolean(workspaceId) && accessibleEnvCredentials.length > 0
|
||||
const workspaceCredentialKeys = new Set(
|
||||
accessibleEnvCredentials.filter((row) => row.type === 'env_workspace').map((row) => row.envKey)
|
||||
)
|
||||
|
||||
const personalCredentialRows = accessibleEnvCredentials
|
||||
.filter((row) => row.type === 'env_personal' && row.envOwnerUserId)
|
||||
.sort((a, b) => {
|
||||
const aIsRequester = a.envOwnerUserId === userId
|
||||
const bIsRequester = b.envOwnerUserId === userId
|
||||
if (aIsRequester && !bIsRequester) return -1
|
||||
if (!aIsRequester && bIsRequester) return 1
|
||||
return b.updatedAt.getTime() - a.updatedAt.getTime()
|
||||
})
|
||||
|
||||
const selectedPersonalOwners = new Map<string, string>()
|
||||
for (const row of personalCredentialRows) {
|
||||
if (!selectedPersonalOwners.has(row.envKey) && row.envOwnerUserId) {
|
||||
selectedPersonalOwners.set(row.envKey, row.envOwnerUserId)
|
||||
}
|
||||
}
|
||||
|
||||
const ownerUserIds = Array.from(new Set(selectedPersonalOwners.values()))
|
||||
const ownerEnvironmentRows =
|
||||
ownerUserIds.length > 0
|
||||
? await db
|
||||
.select({
|
||||
userId: environment.userId,
|
||||
variables: environment.variables,
|
||||
})
|
||||
.from(environment)
|
||||
.where(inArray(environment.userId, ownerUserIds))
|
||||
: []
|
||||
|
||||
const ownerVariablesByUserId = new Map<string, Record<string, string>>(
|
||||
ownerEnvironmentRows.map((row) => [row.userId, (row.variables as Record<string, string>) || {}])
|
||||
)
|
||||
|
||||
let personalEncrypted: Record<string, string> = ownPersonalEncrypted
|
||||
let workspaceEncrypted: Record<string, string> = allWorkspaceEncrypted
|
||||
|
||||
if (hasCredentialFiltering) {
|
||||
personalEncrypted = {}
|
||||
for (const [envKey, ownerUserId] of selectedPersonalOwners.entries()) {
|
||||
const ownerVariables = ownerVariablesByUserId.get(ownerUserId)
|
||||
const encryptedValue = ownerVariables?.[envKey]
|
||||
if (encryptedValue) {
|
||||
personalEncrypted[envKey] = encryptedValue
|
||||
}
|
||||
}
|
||||
|
||||
workspaceEncrypted = Object.fromEntries(
|
||||
Object.entries(allWorkspaceEncrypted).filter(([envKey]) =>
|
||||
workspaceCredentialKeys.has(envKey)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
const decryptionFailures: string[] = []
|
||||
|
||||
|
||||
@@ -1,246 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getRedisClient } from '@/lib/core/config/redis'
|
||||
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
|
||||
|
||||
const logger = createLogger('ExecutionEventBuffer')
|
||||
|
||||
const REDIS_PREFIX = 'execution:stream:'
|
||||
const TTL_SECONDS = 60 * 60 // 1 hour
|
||||
const EVENT_LIMIT = 1000
|
||||
const RESERVE_BATCH = 100
|
||||
const FLUSH_INTERVAL_MS = 15
|
||||
const FLUSH_MAX_BATCH = 200
|
||||
|
||||
function getEventsKey(executionId: string) {
|
||||
return `${REDIS_PREFIX}${executionId}:events`
|
||||
}
|
||||
|
||||
function getSeqKey(executionId: string) {
|
||||
return `${REDIS_PREFIX}${executionId}:seq`
|
||||
}
|
||||
|
||||
function getMetaKey(executionId: string) {
|
||||
return `${REDIS_PREFIX}${executionId}:meta`
|
||||
}
|
||||
|
||||
export type ExecutionStreamStatus = 'active' | 'complete' | 'error' | 'cancelled'
|
||||
|
||||
export interface ExecutionStreamMeta {
|
||||
status: ExecutionStreamStatus
|
||||
userId?: string
|
||||
workflowId?: string
|
||||
updatedAt?: string
|
||||
}
|
||||
|
||||
export interface ExecutionEventEntry {
|
||||
eventId: number
|
||||
executionId: string
|
||||
event: ExecutionEvent
|
||||
}
|
||||
|
||||
export interface ExecutionEventWriter {
|
||||
write: (event: ExecutionEvent) => Promise<ExecutionEventEntry>
|
||||
flush: () => Promise<void>
|
||||
close: () => Promise<void>
|
||||
}
|
||||
|
||||
export async function setExecutionMeta(
|
||||
executionId: string,
|
||||
meta: Partial<ExecutionStreamMeta>
|
||||
): Promise<void> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn('setExecutionMeta: Redis client unavailable', { executionId })
|
||||
return
|
||||
}
|
||||
try {
|
||||
const key = getMetaKey(executionId)
|
||||
const payload: Record<string, string> = {
|
||||
updatedAt: new Date().toISOString(),
|
||||
}
|
||||
if (meta.status) payload.status = meta.status
|
||||
if (meta.userId) payload.userId = meta.userId
|
||||
if (meta.workflowId) payload.workflowId = meta.workflowId
|
||||
await redis.hset(key, payload)
|
||||
await redis.expire(key, TTL_SECONDS)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to update execution meta', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function getExecutionMeta(executionId: string): Promise<ExecutionStreamMeta | null> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn('getExecutionMeta: Redis client unavailable', { executionId })
|
||||
return null
|
||||
}
|
||||
try {
|
||||
const key = getMetaKey(executionId)
|
||||
const meta = await redis.hgetall(key)
|
||||
if (!meta || Object.keys(meta).length === 0) return null
|
||||
return meta as unknown as ExecutionStreamMeta
|
||||
} catch (error) {
|
||||
logger.warn('Failed to read execution meta', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export async function readExecutionEvents(
|
||||
executionId: string,
|
||||
afterEventId: number
|
||||
): Promise<ExecutionEventEntry[]> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) return []
|
||||
try {
|
||||
const raw = await redis.zrangebyscore(getEventsKey(executionId), afterEventId + 1, '+inf')
|
||||
return raw
|
||||
.map((entry) => {
|
||||
try {
|
||||
return JSON.parse(entry) as ExecutionEventEntry
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
})
|
||||
.filter((entry): entry is ExecutionEventEntry => Boolean(entry))
|
||||
} catch (error) {
|
||||
logger.warn('Failed to read execution events', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
export function createExecutionEventWriter(executionId: string): ExecutionEventWriter {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn(
|
||||
'createExecutionEventWriter: Redis client unavailable, events will not be buffered',
|
||||
{
|
||||
executionId,
|
||||
}
|
||||
)
|
||||
return {
|
||||
write: async (event) => ({ eventId: 0, executionId, event }),
|
||||
flush: async () => {},
|
||||
close: async () => {},
|
||||
}
|
||||
}
|
||||
|
||||
let pending: ExecutionEventEntry[] = []
|
||||
let nextEventId = 0
|
||||
let maxReservedId = 0
|
||||
let flushTimer: ReturnType<typeof setTimeout> | null = null
|
||||
|
||||
const scheduleFlush = () => {
|
||||
if (flushTimer) return
|
||||
flushTimer = setTimeout(() => {
|
||||
flushTimer = null
|
||||
void flush()
|
||||
}, FLUSH_INTERVAL_MS)
|
||||
}
|
||||
|
||||
const reserveIds = async (minCount: number) => {
|
||||
const reserveCount = Math.max(RESERVE_BATCH, minCount)
|
||||
const newMax = await redis.incrby(getSeqKey(executionId), reserveCount)
|
||||
const startId = newMax - reserveCount + 1
|
||||
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||
nextEventId = startId
|
||||
maxReservedId = newMax
|
||||
}
|
||||
}
|
||||
|
||||
let flushPromise: Promise<void> | null = null
|
||||
let closed = false
|
||||
const inflightWrites = new Set<Promise<ExecutionEventEntry>>()
|
||||
|
||||
const doFlush = async () => {
|
||||
if (pending.length === 0) return
|
||||
const batch = pending
|
||||
pending = []
|
||||
try {
|
||||
const key = getEventsKey(executionId)
|
||||
const zaddArgs: (string | number)[] = []
|
||||
for (const entry of batch) {
|
||||
zaddArgs.push(entry.eventId, JSON.stringify(entry))
|
||||
}
|
||||
const pipeline = redis.pipeline()
|
||||
pipeline.zadd(key, ...zaddArgs)
|
||||
pipeline.expire(key, TTL_SECONDS)
|
||||
pipeline.expire(getSeqKey(executionId), TTL_SECONDS)
|
||||
pipeline.zremrangebyrank(key, 0, -EVENT_LIMIT - 1)
|
||||
await pipeline.exec()
|
||||
} catch (error) {
|
||||
logger.warn('Failed to flush execution events', {
|
||||
executionId,
|
||||
batchSize: batch.length,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
pending = batch.concat(pending)
|
||||
}
|
||||
}
|
||||
|
||||
const flush = async () => {
|
||||
if (flushPromise) {
|
||||
await flushPromise
|
||||
return
|
||||
}
|
||||
flushPromise = doFlush()
|
||||
try {
|
||||
await flushPromise
|
||||
} finally {
|
||||
flushPromise = null
|
||||
if (pending.length > 0) scheduleFlush()
|
||||
}
|
||||
}
|
||||
|
||||
const writeCore = async (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
|
||||
if (closed) return { eventId: 0, executionId, event }
|
||||
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||
await reserveIds(1)
|
||||
}
|
||||
const eventId = nextEventId++
|
||||
const entry: ExecutionEventEntry = { eventId, executionId, event }
|
||||
pending.push(entry)
|
||||
if (pending.length >= FLUSH_MAX_BATCH) {
|
||||
await flush()
|
||||
} else {
|
||||
scheduleFlush()
|
||||
}
|
||||
return entry
|
||||
}
|
||||
|
||||
const write = (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
|
||||
const p = writeCore(event)
|
||||
inflightWrites.add(p)
|
||||
const remove = () => inflightWrites.delete(p)
|
||||
p.then(remove, remove)
|
||||
return p
|
||||
}
|
||||
|
||||
const close = async () => {
|
||||
closed = true
|
||||
if (flushTimer) {
|
||||
clearTimeout(flushTimer)
|
||||
flushTimer = null
|
||||
}
|
||||
if (inflightWrites.size > 0) {
|
||||
await Promise.allSettled(inflightWrites)
|
||||
}
|
||||
if (flushPromise) {
|
||||
await flushPromise
|
||||
}
|
||||
if (pending.length > 0) {
|
||||
await doFlush()
|
||||
}
|
||||
}
|
||||
|
||||
return { write, flush, close }
|
||||
}
|
||||
@@ -2364,261 +2364,6 @@ describe('hasWorkflowChanged', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('Trigger Config Normalization (False Positive Prevention)', () => {
|
||||
it.concurrent(
|
||||
'should not detect change when deployed has null fields but current has values from triggerConfig',
|
||||
() => {
|
||||
// Core scenario: deployed state has null individual fields, current state has
|
||||
// values populated from triggerConfig at runtime by populateTriggerFieldsFromConfig
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent(
|
||||
'should detect change when user edits a trigger field to a different value',
|
||||
() => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'old-secret' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'new-secret' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'old-secret' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should not detect change when both sides have no triggerConfig', () => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should not detect change when deployed has empty fields and triggerConfig populates them',
|
||||
() => {
|
||||
// Empty string is also treated as "empty" by normalizeTriggerConfigValues
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should not detect change when triggerId differs', () => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
triggerId: { value: null },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
triggerId: { value: 'slack_webhook' },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should not detect change for namespaced system subBlock IDs like samplePayload_slack_webhook',
|
||||
() => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
samplePayload_slack_webhook: { value: 'old payload' },
|
||||
triggerInstructions_slack_webhook: { value: 'old instructions' },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
samplePayload_slack_webhook: { value: 'new payload' },
|
||||
triggerInstructions_slack_webhook: { value: 'new instructions' },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent(
|
||||
'should handle mixed scenario: some fields from triggerConfig, some user-edited',
|
||||
() => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
includeFiles: { id: 'includeFiles', type: 'switch', value: false },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||
includeFiles: { id: 'includeFiles', type: 'switch', value: true },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
// includeFiles changed from false to true — this IS a real change
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
|
||||
it.concurrent('should not detect change when webhookId differs', () => {
|
||||
const deployedState = createWorkflowState({
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
normalizeLoop,
|
||||
normalizeParallel,
|
||||
normalizeSubBlockValue,
|
||||
normalizeTriggerConfigValues,
|
||||
normalizeValue,
|
||||
normalizeVariables,
|
||||
sanitizeVariable,
|
||||
@@ -173,18 +172,14 @@ export function generateWorkflowDiffSummary(
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize trigger config values for both states before comparison
|
||||
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
|
||||
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
|
||||
|
||||
// Compare subBlocks using shared helper for filtering (single source of truth)
|
||||
const allSubBlockIds = filterSubBlockIds([
|
||||
...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]),
|
||||
...new Set([...Object.keys(currentSubBlocks), ...Object.keys(previousSubBlocks)]),
|
||||
])
|
||||
|
||||
for (const subId of allSubBlockIds) {
|
||||
const currentSub = normalizedCurrentSubs[subId] as Record<string, unknown> | undefined
|
||||
const previousSub = normalizedPreviousSubs[subId] as Record<string, unknown> | undefined
|
||||
const currentSub = currentSubBlocks[subId] as Record<string, unknown> | undefined
|
||||
const previousSub = previousSubBlocks[subId] as Record<string, unknown> | undefined
|
||||
|
||||
if (!currentSub || !previousSub) {
|
||||
changes.push({
|
||||
|
||||
@@ -4,12 +4,10 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
import {
|
||||
filterSubBlockIds,
|
||||
normalizedStringify,
|
||||
normalizeEdge,
|
||||
normalizeLoop,
|
||||
normalizeParallel,
|
||||
normalizeTriggerConfigValues,
|
||||
normalizeValue,
|
||||
sanitizeInputFormat,
|
||||
sanitizeTools,
|
||||
@@ -586,214 +584,4 @@ describe('Workflow Normalization Utilities', () => {
|
||||
expect(result2).toBe(result3)
|
||||
})
|
||||
})
|
||||
|
||||
describe('filterSubBlockIds', () => {
|
||||
it.concurrent('should exclude exact SYSTEM_SUBBLOCK_IDS', () => {
|
||||
const ids = ['signingSecret', 'samplePayload', 'triggerInstructions', 'botToken']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['botToken', 'signingSecret'])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude namespaced SYSTEM_SUBBLOCK_IDS (prefix matching)', () => {
|
||||
const ids = [
|
||||
'signingSecret',
|
||||
'samplePayload_slack_webhook',
|
||||
'triggerInstructions_slack_webhook',
|
||||
'webhookUrlDisplay_slack_webhook',
|
||||
'botToken',
|
||||
]
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['botToken', 'signingSecret'])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude exact TRIGGER_RUNTIME_SUBBLOCK_IDS', () => {
|
||||
const ids = ['webhookId', 'triggerPath', 'triggerConfig', 'triggerId', 'signingSecret']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['signingSecret'])
|
||||
})
|
||||
|
||||
it.concurrent('should not exclude IDs that merely contain a system ID substring', () => {
|
||||
const ids = ['mySamplePayload', 'notSamplePayload']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['mySamplePayload', 'notSamplePayload'])
|
||||
})
|
||||
|
||||
it.concurrent('should return sorted results', () => {
|
||||
const ids = ['zebra', 'alpha', 'middle']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['alpha', 'middle', 'zebra'])
|
||||
})
|
||||
|
||||
it.concurrent('should handle empty array', () => {
|
||||
expect(filterSubBlockIds([])).toEqual([])
|
||||
})
|
||||
|
||||
it.concurrent('should handle all IDs being excluded', () => {
|
||||
const ids = ['webhookId', 'triggerPath', 'samplePayload', 'triggerConfig']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude setupScript and scheduleInfo namespaced variants', () => {
|
||||
const ids = ['setupScript_google_sheets_row', 'scheduleInfo_cron_trigger', 'realField']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['realField'])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude triggerCredentials namespaced variants', () => {
|
||||
const ids = ['triggerCredentials_slack_webhook', 'signingSecret']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['signingSecret'])
|
||||
})
|
||||
})
|
||||
|
||||
describe('normalizeTriggerConfigValues', () => {
|
||||
it.concurrent('should return subBlocks unchanged when no triggerConfig exists', () => {
|
||||
const subBlocks = {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result).toEqual(subBlocks)
|
||||
})
|
||||
|
||||
it.concurrent('should return subBlocks unchanged when triggerConfig value is null', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: null },
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result).toEqual(subBlocks)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should return subBlocks unchanged when triggerConfig value is not an object',
|
||||
() => {
|
||||
const subBlocks = {
|
||||
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: 'string-value' },
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result).toEqual(subBlocks)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should populate null individual fields from triggerConfig', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||
expect((result.botToken as Record<string, unknown>).value).toBe('token456')
|
||||
})
|
||||
|
||||
it.concurrent('should populate undefined individual fields from triggerConfig', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: undefined },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||
})
|
||||
|
||||
it.concurrent('should populate empty string individual fields from triggerConfig', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||
})
|
||||
|
||||
it.concurrent('should NOT overwrite existing non-empty individual field values', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'old-secret' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'user-edited-secret' },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('user-edited-secret')
|
||||
})
|
||||
|
||||
it.concurrent('should skip triggerConfig fields that are null/undefined', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: null, botToken: undefined },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||
expect((result.botToken as Record<string, unknown>).value).toBe(null)
|
||||
})
|
||||
|
||||
it.concurrent('should skip fields from triggerConfig that have no matching subBlock', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { nonExistentField: 'value123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result.nonExistentField).toBeUndefined()
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||
})
|
||||
|
||||
it.concurrent('should not mutate the original subBlocks object', () => {
|
||||
const original = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
normalizeTriggerConfigValues(original)
|
||||
expect((original.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||
})
|
||||
|
||||
it.concurrent('should preserve other subBlock properties when populating value', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: {
|
||||
id: 'signingSecret',
|
||||
type: 'short-input',
|
||||
value: null,
|
||||
placeholder: 'Enter signing secret',
|
||||
},
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
const normalized = result.signingSecret as Record<string, unknown>
|
||||
expect(normalized.value).toBe('secret123')
|
||||
expect(normalized.id).toBe('signingSecret')
|
||||
expect(normalized.type).toBe('short-input')
|
||||
expect(normalized.placeholder).toBe('Enter signing secret')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -418,48 +418,10 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
|
||||
*/
|
||||
export function filterSubBlockIds(subBlockIds: string[]): string[] {
|
||||
return subBlockIds
|
||||
.filter((id) => {
|
||||
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
|
||||
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
|
||||
return false
|
||||
return true
|
||||
})
|
||||
.filter((id) => !SYSTEM_SUBBLOCK_IDS.includes(id) && !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id))
|
||||
.sort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes trigger block subBlocks by populating null/empty individual fields
|
||||
* from the triggerConfig aggregate subBlock. This compensates for the runtime
|
||||
* population done by populateTriggerFieldsFromConfig, ensuring consistent
|
||||
* comparison between client state (with populated values) and deployed state
|
||||
* (with null values from DB).
|
||||
*/
|
||||
export function normalizeTriggerConfigValues(
|
||||
subBlocks: Record<string, unknown>
|
||||
): Record<string, unknown> {
|
||||
const triggerConfigSub = subBlocks.triggerConfig as Record<string, unknown> | undefined
|
||||
const triggerConfigValue = triggerConfigSub?.value
|
||||
if (!triggerConfigValue || typeof triggerConfigValue !== 'object') {
|
||||
return subBlocks
|
||||
}
|
||||
|
||||
const result = { ...subBlocks }
|
||||
for (const [fieldId, configValue] of Object.entries(
|
||||
triggerConfigValue as Record<string, unknown>
|
||||
)) {
|
||||
if (configValue === null || configValue === undefined) continue
|
||||
const existingSub = result[fieldId] as Record<string, unknown> | undefined
|
||||
if (
|
||||
existingSub &&
|
||||
(existingSub.value === null || existingSub.value === undefined || existingSub.value === '')
|
||||
) {
|
||||
result[fieldId] = { ...existingSub, value: configValue }
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes a subBlock value with sanitization for specific subBlock types.
|
||||
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)
|
||||
|
||||
@@ -137,37 +137,6 @@ function handleSecurityFiltering(request: NextRequest): NextResponse | null {
|
||||
return null
|
||||
}
|
||||
|
||||
const UTM_KEYS = ['utm_source', 'utm_medium', 'utm_campaign', 'utm_content'] as const
|
||||
const UTM_COOKIE_NAME = 'sim_utm'
|
||||
const UTM_COOKIE_MAX_AGE = 3600
|
||||
|
||||
/**
|
||||
* Sets a `sim_utm` cookie when UTM params are present on auth pages.
|
||||
* Captures UTM values, the HTTP Referer, landing page, and a timestamp
|
||||
* used by the attribution API to verify the user signed up after visiting the link.
|
||||
*/
|
||||
function setUtmCookie(request: NextRequest, response: NextResponse): void {
|
||||
const { searchParams, pathname } = request.nextUrl
|
||||
const hasUtm = UTM_KEYS.some((key) => searchParams.get(key))
|
||||
if (!hasUtm) return
|
||||
|
||||
const utmData: Record<string, string> = {}
|
||||
for (const key of UTM_KEYS) {
|
||||
const value = searchParams.get(key)
|
||||
if (value) utmData[key] = value
|
||||
}
|
||||
utmData.referrer_url = request.headers.get('referer') || ''
|
||||
utmData.landing_page = pathname
|
||||
utmData.created_at = Date.now().toString()
|
||||
|
||||
response.cookies.set(UTM_COOKIE_NAME, JSON.stringify(utmData), {
|
||||
path: '/',
|
||||
maxAge: UTM_COOKIE_MAX_AGE,
|
||||
sameSite: 'lax',
|
||||
httpOnly: false, // Client-side hook needs to detect cookie presence
|
||||
})
|
||||
}
|
||||
|
||||
export async function proxy(request: NextRequest) {
|
||||
const url = request.nextUrl
|
||||
|
||||
@@ -183,7 +152,6 @@ export async function proxy(request: NextRequest) {
|
||||
}
|
||||
const response = NextResponse.next()
|
||||
response.headers.set('Content-Security-Policy', generateRuntimeCSP())
|
||||
setUtmCookie(request, response)
|
||||
return response
|
||||
}
|
||||
|
||||
|
||||
@@ -129,18 +129,6 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
|
||||
})
|
||||
},
|
||||
|
||||
setCurrentExecutionId: (workflowId, executionId) => {
|
||||
set({
|
||||
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
||||
currentExecutionId: executionId,
|
||||
}),
|
||||
})
|
||||
},
|
||||
|
||||
getCurrentExecutionId: (workflowId) => {
|
||||
return getOrCreate(get().workflowExecutions, workflowId).currentExecutionId
|
||||
},
|
||||
|
||||
clearRunPath: (workflowId) => {
|
||||
set({
|
||||
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
||||
|
||||
@@ -35,8 +35,6 @@ export interface WorkflowExecutionState {
|
||||
lastRunPath: Map<string, BlockRunStatus>
|
||||
/** Maps edge IDs to their run result from the last execution */
|
||||
lastRunEdges: Map<string, EdgeRunStatus>
|
||||
/** The execution ID of the currently running execution */
|
||||
currentExecutionId: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -56,7 +54,6 @@ export const defaultWorkflowExecutionState: WorkflowExecutionState = {
|
||||
debugContext: null,
|
||||
lastRunPath: new Map(),
|
||||
lastRunEdges: new Map(),
|
||||
currentExecutionId: null,
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -99,10 +96,6 @@ export interface ExecutionActions {
|
||||
setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void
|
||||
/** Clears the run path and run edges for a workflow */
|
||||
clearRunPath: (workflowId: string) => void
|
||||
/** Stores the current execution ID for a workflow */
|
||||
setCurrentExecutionId: (workflowId: string, executionId: string | null) => void
|
||||
/** Returns the current execution ID for a workflow */
|
||||
getCurrentExecutionId: (workflowId: string) => string | null
|
||||
/** Resets the entire store to its initial empty state */
|
||||
reset: () => void
|
||||
/** Stores a serializable execution snapshot for a workflow */
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
export type SettingsSection =
|
||||
| 'general'
|
||||
| 'credentials'
|
||||
| 'environment'
|
||||
| 'template-profile'
|
||||
| 'integrations'
|
||||
|
||||
@@ -310,50 +310,6 @@ function parseModelKey(compositeKey: string): { provider: string; modelId: strin
|
||||
return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) }
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert legacy/variant Claude IDs into the canonical ID shape used by the model catalog.
|
||||
*
|
||||
* Examples:
|
||||
* - claude-4.5-opus -> claude-opus-4-5
|
||||
* - claude-opus-4.6 -> claude-opus-4-6
|
||||
* - anthropic.claude-opus-4-5-20251101-v1:0 -> claude-opus-4-5 (match key only)
|
||||
*/
|
||||
function canonicalizeModelMatchKey(modelId: string): string {
|
||||
if (!modelId) return modelId
|
||||
const normalized = modelId.trim().toLowerCase()
|
||||
|
||||
const toCanonicalClaude = (tier: string, version: string): string => {
|
||||
const normalizedVersion = version.replace(/\./g, '-')
|
||||
return `claude-${tier}-${normalizedVersion}`
|
||||
}
|
||||
|
||||
const tierFirstExact = normalized.match(/^claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)$/)
|
||||
if (tierFirstExact) {
|
||||
const [, tier, version] = tierFirstExact
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
const versionFirstExact = normalized.match(/^claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)$/)
|
||||
if (versionFirstExact) {
|
||||
const [, version, tier] = versionFirstExact
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
const tierFirstEmbedded = normalized.match(/claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)/)
|
||||
if (tierFirstEmbedded) {
|
||||
const [, tier, version] = tierFirstEmbedded
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
const versionFirstEmbedded = normalized.match(/claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)/)
|
||||
if (versionFirstEmbedded) {
|
||||
const [, version, tier] = versionFirstEmbedded
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
const MODEL_PROVIDER_PRIORITY = [
|
||||
'anthropic',
|
||||
'bedrock',
|
||||
@@ -394,23 +350,12 @@ function normalizeSelectedModelKey(selectedModel: string, models: AvailableModel
|
||||
|
||||
const { provider, modelId } = parseModelKey(selectedModel)
|
||||
const targetModelId = modelId || selectedModel
|
||||
const targetMatchKey = canonicalizeModelMatchKey(targetModelId)
|
||||
|
||||
const matches = models.filter((m) => {
|
||||
const candidateModelId = parseModelKey(m.id).modelId || m.id
|
||||
const candidateMatchKey = canonicalizeModelMatchKey(candidateModelId)
|
||||
return (
|
||||
candidateModelId === targetModelId ||
|
||||
m.id.endsWith(`/${targetModelId}`) ||
|
||||
candidateMatchKey === targetMatchKey
|
||||
)
|
||||
})
|
||||
const matches = models.filter((m) => m.id.endsWith(`/${targetModelId}`))
|
||||
if (matches.length === 0) return selectedModel
|
||||
|
||||
if (provider) {
|
||||
const sameProvider = matches.find(
|
||||
(m) => m.provider === provider || m.id.startsWith(`${provider}/`)
|
||||
)
|
||||
const sameProvider = matches.find((m) => m.provider === provider)
|
||||
if (sameProvider) return sameProvider.id
|
||||
}
|
||||
|
||||
@@ -1148,12 +1093,11 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const chatConfig = chat.config ?? {}
|
||||
const chatMode = chatConfig.mode || get().mode
|
||||
const chatModel = chatConfig.model || get().selectedModel
|
||||
const normalizedChatModel = normalizeSelectedModelKey(chatModel, get().availableModels)
|
||||
|
||||
logger.debug('[Chat] Restoring chat config', {
|
||||
chatId: chat.id,
|
||||
mode: chatMode,
|
||||
model: normalizedChatModel,
|
||||
model: chatModel,
|
||||
hasPlanArtifact: !!planArtifact,
|
||||
})
|
||||
|
||||
@@ -1175,7 +1119,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
showPlanTodos: false,
|
||||
streamingPlanContent: planArtifact,
|
||||
mode: chatMode,
|
||||
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
|
||||
selectedModel: chatModel as CopilotStore['selectedModel'],
|
||||
suppressAutoSelect: false,
|
||||
})
|
||||
|
||||
@@ -1348,10 +1292,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const refreshedConfig = updatedCurrentChat.config ?? {}
|
||||
const refreshedMode = refreshedConfig.mode || get().mode
|
||||
const refreshedModel = refreshedConfig.model || get().selectedModel
|
||||
const normalizedRefreshedModel = normalizeSelectedModelKey(
|
||||
refreshedModel,
|
||||
get().availableModels
|
||||
)
|
||||
const toolCallsById = buildToolCallsById(normalizedMessages)
|
||||
|
||||
set({
|
||||
@@ -1360,7 +1300,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
toolCallsById,
|
||||
streamingPlanContent: refreshedPlanArtifact,
|
||||
mode: refreshedMode,
|
||||
selectedModel: normalizedRefreshedModel as CopilotStore['selectedModel'],
|
||||
selectedModel: refreshedModel as CopilotStore['selectedModel'],
|
||||
})
|
||||
}
|
||||
try {
|
||||
@@ -1380,15 +1320,11 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const chatConfig = mostRecentChat.config ?? {}
|
||||
const chatMode = chatConfig.mode || get().mode
|
||||
const chatModel = chatConfig.model || get().selectedModel
|
||||
const normalizedChatModel = normalizeSelectedModelKey(
|
||||
chatModel,
|
||||
get().availableModels
|
||||
)
|
||||
|
||||
logger.info('[Chat] Auto-selecting most recent chat with config', {
|
||||
chatId: mostRecentChat.id,
|
||||
mode: chatMode,
|
||||
model: normalizedChatModel,
|
||||
model: chatModel,
|
||||
hasPlanArtifact: !!planArtifact,
|
||||
})
|
||||
|
||||
@@ -1400,7 +1336,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
toolCallsById,
|
||||
streamingPlanContent: planArtifact,
|
||||
mode: chatMode,
|
||||
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
|
||||
selectedModel: chatModel as CopilotStore['selectedModel'],
|
||||
})
|
||||
try {
|
||||
await get().loadMessageCheckpoints(mostRecentChat.id)
|
||||
@@ -2332,8 +2268,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
},
|
||||
|
||||
setSelectedModel: async (model) => {
|
||||
const normalizedModel = normalizeSelectedModelKey(model, get().availableModels)
|
||||
set({ selectedModel: normalizedModel as CopilotStore['selectedModel'] })
|
||||
set({ selectedModel: model })
|
||||
},
|
||||
setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }),
|
||||
loadAvailableModels: async () => {
|
||||
|
||||
@@ -224,7 +224,7 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
|
||||
const newEntry = get().entries[0]
|
||||
|
||||
if (newEntry?.error && newEntry.blockType !== 'cancelled') {
|
||||
if (newEntry?.error) {
|
||||
notifyBlockError({
|
||||
error: newEntry.error,
|
||||
blockName: newEntry.blockName || 'Unknown Block',
|
||||
@@ -243,11 +243,6 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
useExecutionStore.getState().clearRunPath(workflowId)
|
||||
},
|
||||
|
||||
clearExecutionEntries: (executionId: string) =>
|
||||
set((state) => ({
|
||||
entries: state.entries.filter((e) => e.executionId !== executionId),
|
||||
})),
|
||||
|
||||
exportConsoleCSV: (workflowId: string) => {
|
||||
const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
|
||||
|
||||
@@ -475,24 +470,12 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
},
|
||||
merge: (persistedState, currentState) => {
|
||||
const persisted = persistedState as Partial<ConsoleStore> | undefined
|
||||
const rawEntries = persisted?.entries ?? currentState.entries
|
||||
const oneHourAgo = Date.now() - 60 * 60 * 1000
|
||||
|
||||
const entries = rawEntries.map((entry, index) => {
|
||||
let updated = entry
|
||||
const entries = (persisted?.entries ?? currentState.entries).map((entry, index) => {
|
||||
if (entry.executionOrder === undefined) {
|
||||
updated = { ...updated, executionOrder: index + 1 }
|
||||
return { ...entry, executionOrder: index + 1 }
|
||||
}
|
||||
if (
|
||||
entry.isRunning &&
|
||||
entry.startedAt &&
|
||||
new Date(entry.startedAt).getTime() < oneHourAgo
|
||||
) {
|
||||
updated = { ...updated, isRunning: false }
|
||||
}
|
||||
return updated
|
||||
return entry
|
||||
})
|
||||
|
||||
return {
|
||||
...currentState,
|
||||
entries,
|
||||
|
||||
@@ -51,7 +51,6 @@ export interface ConsoleStore {
|
||||
isOpen: boolean
|
||||
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
|
||||
clearWorkflowConsole: (workflowId: string) => void
|
||||
clearExecutionEntries: (executionId: string) => void
|
||||
exportConsoleCSV: (workflowId: string) => void
|
||||
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
|
||||
toggleConsole: () => void
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface ConfluenceDeleteLabelParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
pageId: string
|
||||
labelName: string
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface ConfluenceDeleteLabelResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
ts: string
|
||||
pageId: string
|
||||
labelName: string
|
||||
deleted: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceDeleteLabelTool: ToolConfig<
|
||||
ConfluenceDeleteLabelParams,
|
||||
ConfluenceDeleteLabelResponse
|
||||
> = {
|
||||
id: 'confluence_delete_label',
|
||||
name: 'Confluence Delete Label',
|
||||
description: 'Remove a label from a Confluence page.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Confluence',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
pageId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Confluence page ID to remove the label from',
|
||||
},
|
||||
labelName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Name of the label to remove',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: () => '/api/tools/confluence/labels',
|
||||
method: 'DELETE',
|
||||
headers: (params: ConfluenceDeleteLabelParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
body: (params: ConfluenceDeleteLabelParams) => ({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
pageId: params.pageId?.trim(),
|
||||
labelName: params.labelName?.trim(),
|
||||
cloudId: params.cloudId,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
pageId: data.pageId ?? '',
|
||||
labelName: data.labelName ?? '',
|
||||
deleted: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: TIMESTAMP_OUTPUT,
|
||||
pageId: {
|
||||
type: 'string',
|
||||
description: 'Page ID the label was removed from',
|
||||
},
|
||||
labelName: {
|
||||
type: 'string',
|
||||
description: 'Name of the removed label',
|
||||
},
|
||||
deleted: {
|
||||
type: 'boolean',
|
||||
description: 'Deletion status',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,105 +0,0 @@
|
||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface ConfluenceDeletePagePropertyParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
pageId: string
|
||||
propertyId: string
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface ConfluenceDeletePagePropertyResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
ts: string
|
||||
pageId: string
|
||||
propertyId: string
|
||||
deleted: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceDeletePagePropertyTool: ToolConfig<
|
||||
ConfluenceDeletePagePropertyParams,
|
||||
ConfluenceDeletePagePropertyResponse
|
||||
> = {
|
||||
id: 'confluence_delete_page_property',
|
||||
name: 'Confluence Delete Page Property',
|
||||
description: 'Delete a content property from a Confluence page by its property ID.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Confluence',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
pageId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The ID of the page containing the property',
|
||||
},
|
||||
propertyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The ID of the property to delete',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: () => '/api/tools/confluence/page-properties',
|
||||
method: 'DELETE',
|
||||
headers: (params: ConfluenceDeletePagePropertyParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
body: (params: ConfluenceDeletePagePropertyParams) => ({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
pageId: params.pageId?.trim(),
|
||||
propertyId: params.propertyId?.trim(),
|
||||
cloudId: params.cloudId,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
pageId: data.pageId ?? '',
|
||||
propertyId: data.propertyId ?? '',
|
||||
deleted: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: TIMESTAMP_OUTPUT,
|
||||
pageId: { type: 'string', description: 'ID of the page' },
|
||||
propertyId: { type: 'string', description: 'ID of the deleted property' },
|
||||
deleted: { type: 'boolean', description: 'Deletion status' },
|
||||
},
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
import { PAGE_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface ConfluenceGetPagesByLabelParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
labelId: string
|
||||
limit?: number
|
||||
cursor?: string
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface ConfluenceGetPagesByLabelResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
ts: string
|
||||
labelId: string
|
||||
pages: Array<{
|
||||
id: string
|
||||
title: string
|
||||
status: string | null
|
||||
spaceId: string | null
|
||||
parentId: string | null
|
||||
authorId: string | null
|
||||
createdAt: string | null
|
||||
version: {
|
||||
number: number
|
||||
message?: string
|
||||
createdAt?: string
|
||||
} | null
|
||||
}>
|
||||
nextCursor: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceGetPagesByLabelTool: ToolConfig<
|
||||
ConfluenceGetPagesByLabelParams,
|
||||
ConfluenceGetPagesByLabelResponse
|
||||
> = {
|
||||
id: 'confluence_get_pages_by_label',
|
||||
name: 'Confluence Get Pages by Label',
|
||||
description: 'Retrieve all pages that have a specific label applied.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Confluence',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
labelId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The ID of the label to get pages for',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of pages to return (default: 50, max: 250)',
|
||||
},
|
||||
cursor: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Pagination cursor from previous response',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: ConfluenceGetPagesByLabelParams) => {
|
||||
const query = new URLSearchParams({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
labelId: params.labelId,
|
||||
limit: String(params.limit || 50),
|
||||
})
|
||||
if (params.cursor) {
|
||||
query.set('cursor', params.cursor)
|
||||
}
|
||||
if (params.cloudId) {
|
||||
query.set('cloudId', params.cloudId)
|
||||
}
|
||||
return `/api/tools/confluence/pages-by-label?${query.toString()}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: ConfluenceGetPagesByLabelParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
labelId: data.labelId ?? '',
|
||||
pages: data.pages ?? [],
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: TIMESTAMP_OUTPUT,
|
||||
labelId: { type: 'string', description: 'ID of the label' },
|
||||
pages: {
|
||||
type: 'array',
|
||||
description: 'Array of pages with this label',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: PAGE_ITEM_PROPERTIES,
|
||||
},
|
||||
},
|
||||
nextCursor: {
|
||||
type: 'string',
|
||||
description: 'Cursor for fetching the next page of results',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user