mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-16 01:15:26 -05:00
Compare commits
13 Commits
feat/sim-p
...
active-exe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
42d09ef8cb | ||
|
|
cbb98a0868 | ||
|
|
5b0532d473 | ||
|
|
3ef6b05035 | ||
|
|
7fbbc7ba7a | ||
|
|
a337aa7dfe | ||
|
|
022e84c4b1 | ||
|
|
602e371a7a | ||
|
|
9a06cae591 | ||
|
|
dce47a101c | ||
|
|
1130f8ddb2 | ||
|
|
fc97ce007d | ||
|
|
6c006cdfec |
@@ -1157,6 +1157,21 @@ export function AirweaveIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function GoogleBooksIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 478.633 540.068'>
|
||||
<path
|
||||
fill='#1C51A4'
|
||||
d='M449.059,218.231L245.519,99.538l-0.061,193.23c0.031,1.504-0.368,2.977-1.166,4.204c-0.798,1.258-1.565,1.995-2.915,2.547c-1.35,0.552-2.792,0.706-4.204,0.399c-1.412-0.307-2.7-1.043-3.713-2.117l-69.166-70.609l-69.381,70.179c-1.013,0.982-2.301,1.657-3.652,1.903c-1.381,0.246-2.792,0.092-4.081-0.491c-1.289-0.583-1.626-0.522-2.394-1.749c-0.767-1.197-1.197-2.608-1.197-4.081L85.031,6.007l-2.915-1.289C43.973-11.638,0,16.409,0,59.891v420.306c0,46.029,49.312,74.782,88.775,51.767l360.285-210.138C488.491,298.782,488.491,241.246,449.059,218.231z'
|
||||
/>
|
||||
<path
|
||||
fill='#80D7FB'
|
||||
d='M88.805,8.124c-2.179-1.289-4.419-2.363-6.659-3.345l0.123,288.663c0,1.442,0.43,2.854,1.197,4.081c0.767,1.197,1.872,2.148,3.161,2.731c1.289,0.583,2.7,0.736,4.081,0.491c1.381-0.246,2.639-0.921,3.652-1.903l69.749-69.688l69.811,69.749c1.013,1.074,2.301,1.81,3.713,2.117c1.412,0.307,2.884,0.153,4.204-0.399c1.319-0.552,2.455-1.565,3.253-2.792c0.798-1.258,1.197-2.731,1.166-4.204V99.998L88.805,8.124z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function GoogleDocsIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
|
||||
@@ -38,6 +38,7 @@ import {
|
||||
GithubIcon,
|
||||
GitLabIcon,
|
||||
GmailIcon,
|
||||
GoogleBooksIcon,
|
||||
GoogleCalendarIcon,
|
||||
GoogleDocsIcon,
|
||||
GoogleDriveIcon,
|
||||
@@ -172,6 +173,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
github_v2: GithubIcon,
|
||||
gitlab: GitLabIcon,
|
||||
gmail_v2: GmailIcon,
|
||||
google_books: GoogleBooksIcon,
|
||||
google_calendar_v2: GoogleCalendarIcon,
|
||||
google_docs: GoogleDocsIcon,
|
||||
google_drive: GoogleDriveIcon,
|
||||
|
||||
96
apps/docs/content/docs/en/tools/google_books.mdx
Normal file
96
apps/docs/content/docs/en/tools/google_books.mdx
Normal file
@@ -0,0 +1,96 @@
|
||||
---
|
||||
title: Google Books
|
||||
description: Search and retrieve book information
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="google_books"
|
||||
color="#E0E0E0"
|
||||
/>
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Search for books using the Google Books API. Find volumes by title, author, ISBN, or keywords, and retrieve detailed information about specific books including descriptions, ratings, and publication details.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `google_books_volume_search`
|
||||
|
||||
Search for books using the Google Books API
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Google Books API key |
|
||||
| `query` | string | Yes | Search query. Supports special keywords: intitle:, inauthor:, inpublisher:, subject:, isbn: |
|
||||
| `filter` | string | No | Filter results by availability \(partial, full, free-ebooks, paid-ebooks, ebooks\) |
|
||||
| `printType` | string | No | Restrict to print type \(all, books, magazines\) |
|
||||
| `orderBy` | string | No | Sort order \(relevance, newest\) |
|
||||
| `startIndex` | number | No | Index of the first result to return \(for pagination\) |
|
||||
| `maxResults` | number | No | Maximum number of results to return \(1-40\) |
|
||||
| `langRestrict` | string | No | Restrict results to a specific language \(ISO 639-1 code\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `totalItems` | number | Total number of matching results |
|
||||
| `volumes` | array | List of matching volumes |
|
||||
| ↳ `id` | string | Volume ID |
|
||||
| ↳ `title` | string | Book title |
|
||||
| ↳ `subtitle` | string | Book subtitle |
|
||||
| ↳ `authors` | array | List of authors |
|
||||
| ↳ `publisher` | string | Publisher name |
|
||||
| ↳ `publishedDate` | string | Publication date |
|
||||
| ↳ `description` | string | Book description |
|
||||
| ↳ `pageCount` | number | Number of pages |
|
||||
| ↳ `categories` | array | Book categories |
|
||||
| ↳ `averageRating` | number | Average rating \(1-5\) |
|
||||
| ↳ `ratingsCount` | number | Number of ratings |
|
||||
| ↳ `language` | string | Language code |
|
||||
| ↳ `previewLink` | string | Link to preview on Google Books |
|
||||
| ↳ `infoLink` | string | Link to info page |
|
||||
| ↳ `thumbnailUrl` | string | Book cover thumbnail URL |
|
||||
| ↳ `isbn10` | string | ISBN-10 identifier |
|
||||
| ↳ `isbn13` | string | ISBN-13 identifier |
|
||||
|
||||
### `google_books_volume_details`
|
||||
|
||||
Get detailed information about a specific book volume
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Google Books API key |
|
||||
| `volumeId` | string | Yes | The ID of the volume to retrieve |
|
||||
| `projection` | string | No | Projection level \(full, lite\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Volume ID |
|
||||
| `title` | string | Book title |
|
||||
| `subtitle` | string | Book subtitle |
|
||||
| `authors` | array | List of authors |
|
||||
| `publisher` | string | Publisher name |
|
||||
| `publishedDate` | string | Publication date |
|
||||
| `description` | string | Book description |
|
||||
| `pageCount` | number | Number of pages |
|
||||
| `categories` | array | Book categories |
|
||||
| `averageRating` | number | Average rating \(1-5\) |
|
||||
| `ratingsCount` | number | Number of ratings |
|
||||
| `language` | string | Language code |
|
||||
| `previewLink` | string | Link to preview on Google Books |
|
||||
| `infoLink` | string | Link to info page |
|
||||
| `thumbnailUrl` | string | Book cover thumbnail URL |
|
||||
| `isbn10` | string | ISBN-10 identifier |
|
||||
| `isbn13` | string | ISBN-13 identifier |
|
||||
|
||||
|
||||
@@ -33,6 +33,7 @@
|
||||
"github",
|
||||
"gitlab",
|
||||
"gmail",
|
||||
"google_books",
|
||||
"google_calendar",
|
||||
"google_docs",
|
||||
"google_drive",
|
||||
|
||||
@@ -71,6 +71,7 @@ Retrieve an object from an AWS S3 bucket
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `accessKeyId` | string | Yes | Your AWS Access Key ID |
|
||||
| `secretAccessKey` | string | Yes | Your AWS Secret Access Key |
|
||||
| `region` | string | No | Optional region override when URL does not include region \(e.g., us-east-1, eu-west-1\) |
|
||||
| `s3Uri` | string | Yes | S3 Object URL \(e.g., https://bucket.s3.region.amazonaws.com/path/to/file\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -79,7 +79,7 @@ Send messages to Slack channels or direct messages. Supports Slack mrkdwn format
|
||||
| `channel` | string | No | Slack channel ID \(e.g., C1234567890\) |
|
||||
| `dmUserId` | string | No | Slack user ID for direct messages \(e.g., U1234567890\) |
|
||||
| `text` | string | Yes | Message text to send \(supports Slack mrkdwn formatting\) |
|
||||
| `thread_ts` | string | No | Thread timestamp to reply to \(creates thread reply\) |
|
||||
| `threadTs` | string | No | Thread timestamp to reply to \(creates thread reply\) |
|
||||
| `files` | file[] | No | Files to attach to the message |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -13,6 +13,7 @@ BETTER_AUTH_URL=http://localhost:3000
|
||||
|
||||
# NextJS (Required)
|
||||
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||
# INTERNAL_API_BASE_URL=http://sim-app.default.svc.cluster.local:3000 # Optional: internal URL for server-side /api self-calls; defaults to NEXT_PUBLIC_APP_URL
|
||||
|
||||
# Security (Required)
|
||||
ENCRYPTION_KEY=your_encryption_key # Use `openssl rand -hex 32` to generate, used to encrypt environment variables
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { Artifact, Message, PushNotificationConfig, Task, TaskState } from '@a2a-js/sdk'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
/** A2A v0.3 JSON-RPC method names */
|
||||
export const A2A_METHODS = {
|
||||
@@ -118,7 +118,7 @@ export interface ExecuteRequestResult {
|
||||
export async function buildExecuteRequest(
|
||||
config: ExecuteRequestConfig
|
||||
): Promise<ExecuteRequestResult> {
|
||||
const url = `${getBaseUrl()}/api/workflows/${config.workflowId}/execute`
|
||||
const url = `${getInternalApiBaseUrl()}/api/workflows/${config.workflowId}/execute`
|
||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||
let useInternalAuth = false
|
||||
|
||||
|
||||
187
apps/sim/app/api/attribution/route.ts
Normal file
187
apps/sim/app/api/attribution/route.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
/**
|
||||
* POST /api/attribution
|
||||
*
|
||||
* Automatic UTM-based referral attribution.
|
||||
*
|
||||
* Reads the `sim_utm` cookie (set by proxy on auth pages), matches a campaign
|
||||
* by UTM specificity, and atomically inserts an attribution record + applies
|
||||
* bonus credits.
|
||||
*
|
||||
* Idempotent — the unique constraint on `userId` prevents double-attribution.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { referralAttribution, referralCampaigns, userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { cookies } from 'next/headers'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
|
||||
|
||||
const logger = createLogger('AttributionAPI')
|
||||
|
||||
const COOKIE_NAME = 'sim_utm'
|
||||
|
||||
const UtmCookieSchema = z.object({
|
||||
utm_source: z.string().optional(),
|
||||
utm_medium: z.string().optional(),
|
||||
utm_campaign: z.string().optional(),
|
||||
utm_content: z.string().optional(),
|
||||
referrer_url: z.string().optional(),
|
||||
landing_page: z.string().optional(),
|
||||
created_at: z.string().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* Finds the most specific active campaign matching the given UTM params.
|
||||
* Null fields on a campaign act as wildcards. Ties broken by newest campaign.
|
||||
*/
|
||||
async function findMatchingCampaign(utmData: z.infer<typeof UtmCookieSchema>) {
|
||||
const campaigns = await db
|
||||
.select()
|
||||
.from(referralCampaigns)
|
||||
.where(eq(referralCampaigns.isActive, true))
|
||||
|
||||
let bestMatch: (typeof campaigns)[number] | null = null
|
||||
let bestScore = -1
|
||||
|
||||
for (const campaign of campaigns) {
|
||||
let score = 0
|
||||
let mismatch = false
|
||||
|
||||
const fields = [
|
||||
{ campaignVal: campaign.utmSource, utmVal: utmData.utm_source },
|
||||
{ campaignVal: campaign.utmMedium, utmVal: utmData.utm_medium },
|
||||
{ campaignVal: campaign.utmCampaign, utmVal: utmData.utm_campaign },
|
||||
{ campaignVal: campaign.utmContent, utmVal: utmData.utm_content },
|
||||
] as const
|
||||
|
||||
for (const { campaignVal, utmVal } of fields) {
|
||||
if (campaignVal === null) continue
|
||||
if (campaignVal === utmVal) {
|
||||
score++
|
||||
} else {
|
||||
mismatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (!mismatch && score > 0) {
|
||||
if (
|
||||
score > bestScore ||
|
||||
(score === bestScore &&
|
||||
bestMatch &&
|
||||
campaign.createdAt.getTime() > bestMatch.createdAt.getTime())
|
||||
) {
|
||||
bestScore = score
|
||||
bestMatch = campaign
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return bestMatch
|
||||
}
|
||||
|
||||
export async function POST() {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const cookieStore = await cookies()
|
||||
const utmCookie = cookieStore.get(COOKIE_NAME)
|
||||
if (!utmCookie?.value) {
|
||||
return NextResponse.json({ attributed: false, reason: 'no_utm_cookie' })
|
||||
}
|
||||
|
||||
let utmData: z.infer<typeof UtmCookieSchema>
|
||||
try {
|
||||
let decoded: string
|
||||
try {
|
||||
decoded = decodeURIComponent(utmCookie.value)
|
||||
} catch {
|
||||
decoded = utmCookie.value
|
||||
}
|
||||
utmData = UtmCookieSchema.parse(JSON.parse(decoded))
|
||||
} catch {
|
||||
logger.warn('Failed to parse UTM cookie', { userId: session.user.id })
|
||||
cookieStore.delete(COOKIE_NAME)
|
||||
return NextResponse.json({ attributed: false, reason: 'invalid_cookie' })
|
||||
}
|
||||
|
||||
const matchedCampaign = await findMatchingCampaign(utmData)
|
||||
if (!matchedCampaign) {
|
||||
cookieStore.delete(COOKIE_NAME)
|
||||
return NextResponse.json({ attributed: false, reason: 'no_matching_campaign' })
|
||||
}
|
||||
|
||||
const bonusAmount = Number(matchedCampaign.bonusCreditAmount)
|
||||
|
||||
let attributed = false
|
||||
await db.transaction(async (tx) => {
|
||||
const [existingStats] = await tx
|
||||
.select({ id: userStats.id })
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
if (!existingStats) {
|
||||
await tx.insert(userStats).values({
|
||||
id: nanoid(),
|
||||
userId: session.user.id,
|
||||
})
|
||||
}
|
||||
|
||||
const result = await tx
|
||||
.insert(referralAttribution)
|
||||
.values({
|
||||
id: nanoid(),
|
||||
userId: session.user.id,
|
||||
campaignId: matchedCampaign.id,
|
||||
utmSource: utmData.utm_source || null,
|
||||
utmMedium: utmData.utm_medium || null,
|
||||
utmCampaign: utmData.utm_campaign || null,
|
||||
utmContent: utmData.utm_content || null,
|
||||
referrerUrl: utmData.referrer_url || null,
|
||||
landingPage: utmData.landing_page || null,
|
||||
bonusCreditAmount: bonusAmount.toString(),
|
||||
})
|
||||
.onConflictDoNothing({ target: referralAttribution.userId })
|
||||
.returning({ id: referralAttribution.id })
|
||||
|
||||
if (result.length > 0) {
|
||||
await applyBonusCredits(session.user.id, bonusAmount, tx)
|
||||
attributed = true
|
||||
}
|
||||
})
|
||||
|
||||
if (attributed) {
|
||||
logger.info('Referral attribution created and bonus credits applied', {
|
||||
userId: session.user.id,
|
||||
campaignId: matchedCampaign.id,
|
||||
campaignName: matchedCampaign.name,
|
||||
utmSource: utmData.utm_source,
|
||||
utmCampaign: utmData.utm_campaign,
|
||||
utmContent: utmData.utm_content,
|
||||
bonusAmount,
|
||||
})
|
||||
} else {
|
||||
logger.info('User already attributed, skipping', { userId: session.user.id })
|
||||
}
|
||||
|
||||
cookieStore.delete(COOKIE_NAME)
|
||||
|
||||
return NextResponse.json({
|
||||
attributed,
|
||||
bonusAmount: attributed ? bonusAmount : undefined,
|
||||
reason: attributed ? undefined : 'already_attributed',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Attribution error', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -18,9 +18,9 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
||||
setupCommonApiMocks()
|
||||
mockCryptoUuid()
|
||||
|
||||
// Mock getBaseUrl to return localhost for tests
|
||||
vi.doMock('@/lib/core/utils/urls', () => ({
|
||||
getBaseUrl: vi.fn(() => 'http://localhost:3000'),
|
||||
getInternalApiBaseUrl: vi.fn(() => 'http://localhost:3000'),
|
||||
getBaseDomain: vi.fn(() => 'localhost:3000'),
|
||||
getEmailDomain: vi.fn(() => 'localhost:3000'),
|
||||
}))
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
createRequestTracker,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/request-helpers'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { isUuidV4 } from '@/executor/constants'
|
||||
|
||||
@@ -99,7 +99,7 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
const stateResponse = await fetch(
|
||||
`${getBaseUrl()}/api/workflows/${checkpoint.workflowId}/state`,
|
||||
`${getInternalApiBaseUrl()}/api/workflows/${checkpoint.workflowId}/state`,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
|
||||
@@ -72,6 +72,7 @@ describe('MCP Serve Route', () => {
|
||||
}))
|
||||
vi.doMock('@/lib/core/utils/urls', () => ({
|
||||
getBaseUrl: () => 'http://localhost:3000',
|
||||
getInternalApiBaseUrl: () => 'http://localhost:3000',
|
||||
}))
|
||||
vi.doMock('@/lib/core/execution-limits', () => ({
|
||||
getMaxExecutionTimeout: () => 10_000,
|
||||
|
||||
@@ -22,7 +22,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { type AuthResult, checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WorkflowMcpServeAPI')
|
||||
@@ -285,7 +285,7 @@ async function handleToolsCall(
|
||||
)
|
||||
}
|
||||
|
||||
const executeUrl = `${getBaseUrl()}/api/workflows/${tool.workflowId}/execute`
|
||||
const executeUrl = `${getInternalApiBaseUrl()}/api/workflows/${tool.workflowId}/execute`
|
||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||
|
||||
if (publicServerOwnerId) {
|
||||
|
||||
170
apps/sim/app/api/referral-code/redeem/route.ts
Normal file
170
apps/sim/app/api/referral-code/redeem/route.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
/**
|
||||
* POST /api/referral-code/redeem
|
||||
*
|
||||
* Redeem a referral/promo code to receive bonus credits.
|
||||
*
|
||||
* Body:
|
||||
* - code: string — The referral code to redeem
|
||||
*
|
||||
* Response: { redeemed: boolean, bonusAmount?: number, error?: string }
|
||||
*
|
||||
* Constraints:
|
||||
* - Enterprise users cannot redeem codes
|
||||
* - One redemption per user, ever (unique constraint on userId)
|
||||
* - One redemption per organization for team users (partial unique on organizationId)
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { referralAttribution, referralCampaigns, userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
|
||||
|
||||
const logger = createLogger('ReferralCodeRedemption')
|
||||
|
||||
const RedeemCodeSchema = z.object({
|
||||
code: z.string().min(1, 'Code is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { code } = RedeemCodeSchema.parse(body)
|
||||
|
||||
const subscription = await getHighestPrioritySubscription(session.user.id)
|
||||
|
||||
if (subscription?.plan === 'enterprise') {
|
||||
return NextResponse.json({
|
||||
redeemed: false,
|
||||
error: 'Enterprise accounts cannot redeem referral codes',
|
||||
})
|
||||
}
|
||||
|
||||
const isTeam = subscription?.plan === 'team'
|
||||
const orgId = isTeam ? subscription.referenceId : null
|
||||
|
||||
const normalizedCode = code.trim().toUpperCase()
|
||||
|
||||
const [campaign] = await db
|
||||
.select()
|
||||
.from(referralCampaigns)
|
||||
.where(and(eq(referralCampaigns.code, normalizedCode), eq(referralCampaigns.isActive, true)))
|
||||
.limit(1)
|
||||
|
||||
if (!campaign) {
|
||||
logger.info('Invalid code redemption attempt', {
|
||||
userId: session.user.id,
|
||||
code: normalizedCode,
|
||||
})
|
||||
return NextResponse.json({ error: 'Invalid or expired code' }, { status: 404 })
|
||||
}
|
||||
|
||||
const [existingUserAttribution] = await db
|
||||
.select({ id: referralAttribution.id })
|
||||
.from(referralAttribution)
|
||||
.where(eq(referralAttribution.userId, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
if (existingUserAttribution) {
|
||||
return NextResponse.json({
|
||||
redeemed: false,
|
||||
error: 'You have already redeemed a code',
|
||||
})
|
||||
}
|
||||
|
||||
if (orgId) {
|
||||
const [existingOrgAttribution] = await db
|
||||
.select({ id: referralAttribution.id })
|
||||
.from(referralAttribution)
|
||||
.where(eq(referralAttribution.organizationId, orgId))
|
||||
.limit(1)
|
||||
|
||||
if (existingOrgAttribution) {
|
||||
return NextResponse.json({
|
||||
redeemed: false,
|
||||
error: 'A code has already been redeemed for your organization',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const bonusAmount = Number(campaign.bonusCreditAmount)
|
||||
|
||||
let redeemed = false
|
||||
await db.transaction(async (tx) => {
|
||||
const [existingStats] = await tx
|
||||
.select({ id: userStats.id })
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
if (!existingStats) {
|
||||
await tx.insert(userStats).values({
|
||||
id: nanoid(),
|
||||
userId: session.user.id,
|
||||
})
|
||||
}
|
||||
|
||||
const result = await tx
|
||||
.insert(referralAttribution)
|
||||
.values({
|
||||
id: nanoid(),
|
||||
userId: session.user.id,
|
||||
organizationId: orgId,
|
||||
campaignId: campaign.id,
|
||||
utmSource: null,
|
||||
utmMedium: null,
|
||||
utmCampaign: null,
|
||||
utmContent: null,
|
||||
referrerUrl: null,
|
||||
landingPage: null,
|
||||
bonusCreditAmount: bonusAmount.toString(),
|
||||
})
|
||||
.onConflictDoNothing()
|
||||
.returning({ id: referralAttribution.id })
|
||||
|
||||
if (result.length > 0) {
|
||||
await applyBonusCredits(session.user.id, bonusAmount, tx)
|
||||
redeemed = true
|
||||
}
|
||||
})
|
||||
|
||||
if (redeemed) {
|
||||
logger.info('Referral code redeemed', {
|
||||
userId: session.user.id,
|
||||
organizationId: orgId,
|
||||
code: normalizedCode,
|
||||
campaignId: campaign.id,
|
||||
campaignName: campaign.name,
|
||||
bonusAmount,
|
||||
})
|
||||
}
|
||||
|
||||
if (!redeemed) {
|
||||
return NextResponse.json({
|
||||
redeemed: false,
|
||||
error: 'You have already redeemed a code',
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
redeemed: true,
|
||||
bonusAmount,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
|
||||
}
|
||||
logger.error('Referral code redemption error', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import {
|
||||
type RegenerateStateInput,
|
||||
regenerateWorkflowStateIds,
|
||||
@@ -115,15 +115,18 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
// Step 3: Save the workflow state using the existing state endpoint (like imports do)
|
||||
// Ensure variables in state are remapped for the new workflow as well
|
||||
const workflowStateWithVariables = { ...workflowState, variables: remappedVariables }
|
||||
const stateResponse = await fetch(`${getBaseUrl()}/api/workflows/${newWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
// Forward the session cookie for authentication
|
||||
cookie: request.headers.get('cookie') || '',
|
||||
},
|
||||
body: JSON.stringify(workflowStateWithVariables),
|
||||
})
|
||||
const stateResponse = await fetch(
|
||||
`${getInternalApiBaseUrl()}/api/workflows/${newWorkflowId}/state`,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
// Forward the session cookie for authentication
|
||||
cookie: request.headers.get('cookie') || '',
|
||||
},
|
||||
body: JSON.stringify(workflowStateWithVariables),
|
||||
}
|
||||
)
|
||||
|
||||
if (!stateResponse.ok) {
|
||||
logger.error(`[${requestId}] Failed to save workflow state for template use`)
|
||||
|
||||
@@ -66,6 +66,12 @@
|
||||
* Credits:
|
||||
* POST /api/v1/admin/credits - Issue credits to user (by userId or email)
|
||||
*
|
||||
* Referral Campaigns:
|
||||
* GET /api/v1/admin/referral-campaigns - List campaigns (?active=true/false)
|
||||
* POST /api/v1/admin/referral-campaigns - Create campaign
|
||||
* GET /api/v1/admin/referral-campaigns/:id - Get campaign details
|
||||
* PATCH /api/v1/admin/referral-campaigns/:id - Update campaign fields
|
||||
*
|
||||
* Access Control (Permission Groups):
|
||||
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
|
||||
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)
|
||||
@@ -97,6 +103,7 @@ export type {
|
||||
AdminOrganization,
|
||||
AdminOrganizationBillingSummary,
|
||||
AdminOrganizationDetail,
|
||||
AdminReferralCampaign,
|
||||
AdminSeatAnalytics,
|
||||
AdminSingleResponse,
|
||||
AdminSubscription,
|
||||
@@ -111,6 +118,7 @@ export type {
|
||||
AdminWorkspaceMember,
|
||||
DbMember,
|
||||
DbOrganization,
|
||||
DbReferralCampaign,
|
||||
DbSubscription,
|
||||
DbUser,
|
||||
DbUserStats,
|
||||
@@ -139,6 +147,7 @@ export {
|
||||
parseWorkflowVariables,
|
||||
toAdminFolder,
|
||||
toAdminOrganization,
|
||||
toAdminReferralCampaign,
|
||||
toAdminSubscription,
|
||||
toAdminUser,
|
||||
toAdminWorkflow,
|
||||
|
||||
142
apps/sim/app/api/v1/admin/referral-campaigns/[id]/route.ts
Normal file
142
apps/sim/app/api/v1/admin/referral-campaigns/[id]/route.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
/**
|
||||
* GET /api/v1/admin/referral-campaigns/:id
|
||||
*
|
||||
* Get a single referral campaign by ID.
|
||||
*
|
||||
* PATCH /api/v1/admin/referral-campaigns/:id
|
||||
*
|
||||
* Update campaign fields. All fields are optional.
|
||||
*
|
||||
* Body:
|
||||
* - name: string (non-empty) - Campaign name
|
||||
* - bonusCreditAmount: number (> 0) - Bonus credits in dollars
|
||||
* - isActive: boolean - Enable/disable the campaign
|
||||
* - code: string | null (min 6 chars, auto-uppercased, null to remove) - Redeemable code
|
||||
* - utmSource: string | null - UTM source match (null = wildcard)
|
||||
* - utmMedium: string | null - UTM medium match (null = wildcard)
|
||||
* - utmCampaign: string | null - UTM campaign match (null = wildcard)
|
||||
* - utmContent: string | null - UTM content match (null = wildcard)
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { referralCampaigns } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
badRequestResponse,
|
||||
internalErrorResponse,
|
||||
notFoundResponse,
|
||||
singleResponse,
|
||||
} from '@/app/api/v1/admin/responses'
|
||||
import { toAdminReferralCampaign } from '@/app/api/v1/admin/types'
|
||||
|
||||
const logger = createLogger('AdminReferralCampaignDetailAPI')
|
||||
|
||||
interface RouteParams {
|
||||
id: string
|
||||
}
|
||||
|
||||
export const GET = withAdminAuthParams<RouteParams>(async (_, context) => {
|
||||
try {
|
||||
const { id: campaignId } = await context.params
|
||||
|
||||
const [campaign] = await db
|
||||
.select()
|
||||
.from(referralCampaigns)
|
||||
.where(eq(referralCampaigns.id, campaignId))
|
||||
.limit(1)
|
||||
|
||||
if (!campaign) {
|
||||
return notFoundResponse('Campaign')
|
||||
}
|
||||
|
||||
logger.info(`Admin API: Retrieved referral campaign ${campaignId}`)
|
||||
|
||||
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to get referral campaign', { error })
|
||||
return internalErrorResponse('Failed to get referral campaign')
|
||||
}
|
||||
})
|
||||
|
||||
export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) => {
|
||||
try {
|
||||
const { id: campaignId } = await context.params
|
||||
const body = await request.json()
|
||||
|
||||
const [existing] = await db
|
||||
.select()
|
||||
.from(referralCampaigns)
|
||||
.where(eq(referralCampaigns.id, campaignId))
|
||||
.limit(1)
|
||||
|
||||
if (!existing) {
|
||||
return notFoundResponse('Campaign')
|
||||
}
|
||||
|
||||
const updateData: Record<string, unknown> = { updatedAt: new Date() }
|
||||
|
||||
if (body.name !== undefined) {
|
||||
if (typeof body.name !== 'string' || body.name.trim().length === 0) {
|
||||
return badRequestResponse('name must be a non-empty string')
|
||||
}
|
||||
updateData.name = body.name.trim()
|
||||
}
|
||||
|
||||
if (body.bonusCreditAmount !== undefined) {
|
||||
if (
|
||||
typeof body.bonusCreditAmount !== 'number' ||
|
||||
!Number.isFinite(body.bonusCreditAmount) ||
|
||||
body.bonusCreditAmount <= 0
|
||||
) {
|
||||
return badRequestResponse('bonusCreditAmount must be a positive number')
|
||||
}
|
||||
updateData.bonusCreditAmount = body.bonusCreditAmount.toString()
|
||||
}
|
||||
|
||||
if (body.isActive !== undefined) {
|
||||
if (typeof body.isActive !== 'boolean') {
|
||||
return badRequestResponse('isActive must be a boolean')
|
||||
}
|
||||
updateData.isActive = body.isActive
|
||||
}
|
||||
|
||||
if (body.code !== undefined) {
|
||||
if (body.code !== null) {
|
||||
if (typeof body.code !== 'string') {
|
||||
return badRequestResponse('code must be a string or null')
|
||||
}
|
||||
if (body.code.trim().length < 6) {
|
||||
return badRequestResponse('code must be at least 6 characters')
|
||||
}
|
||||
}
|
||||
updateData.code = body.code ? body.code.trim().toUpperCase() : null
|
||||
}
|
||||
|
||||
for (const field of ['utmSource', 'utmMedium', 'utmCampaign', 'utmContent'] as const) {
|
||||
if (body[field] !== undefined) {
|
||||
if (body[field] !== null && typeof body[field] !== 'string') {
|
||||
return badRequestResponse(`${field} must be a string or null`)
|
||||
}
|
||||
updateData[field] = body[field] || null
|
||||
}
|
||||
}
|
||||
|
||||
const [updated] = await db
|
||||
.update(referralCampaigns)
|
||||
.set(updateData)
|
||||
.where(eq(referralCampaigns.id, campaignId))
|
||||
.returning()
|
||||
|
||||
logger.info(`Admin API: Updated referral campaign ${campaignId}`, {
|
||||
fields: Object.keys(updateData).filter((k) => k !== 'updatedAt'),
|
||||
})
|
||||
|
||||
return singleResponse(toAdminReferralCampaign(updated, getBaseUrl()))
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to update referral campaign', { error })
|
||||
return internalErrorResponse('Failed to update referral campaign')
|
||||
}
|
||||
})
|
||||
140
apps/sim/app/api/v1/admin/referral-campaigns/route.ts
Normal file
140
apps/sim/app/api/v1/admin/referral-campaigns/route.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
/**
|
||||
* GET /api/v1/admin/referral-campaigns
|
||||
*
|
||||
* List referral campaigns with optional filtering and pagination.
|
||||
*
|
||||
* Query Parameters:
|
||||
* - active: string (optional) - Filter by active status ('true' or 'false')
|
||||
* - limit: number (default: 50, max: 250)
|
||||
* - offset: number (default: 0)
|
||||
*
|
||||
* POST /api/v1/admin/referral-campaigns
|
||||
*
|
||||
* Create a new referral campaign.
|
||||
*
|
||||
* Body:
|
||||
* - name: string (required) - Campaign name
|
||||
* - bonusCreditAmount: number (required, > 0) - Bonus credits in dollars
|
||||
* - code: string | null (optional, min 6 chars, auto-uppercased) - Redeemable code
|
||||
* - utmSource: string | null (optional) - UTM source match (null = wildcard)
|
||||
* - utmMedium: string | null (optional) - UTM medium match (null = wildcard)
|
||||
* - utmCampaign: string | null (optional) - UTM campaign match (null = wildcard)
|
||||
* - utmContent: string | null (optional) - UTM content match (null = wildcard)
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { referralCampaigns } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { count, eq, type SQL } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
badRequestResponse,
|
||||
internalErrorResponse,
|
||||
listResponse,
|
||||
singleResponse,
|
||||
} from '@/app/api/v1/admin/responses'
|
||||
import {
|
||||
type AdminReferralCampaign,
|
||||
createPaginationMeta,
|
||||
parsePaginationParams,
|
||||
toAdminReferralCampaign,
|
||||
} from '@/app/api/v1/admin/types'
|
||||
|
||||
const logger = createLogger('AdminReferralCampaignsAPI')
|
||||
|
||||
export const GET = withAdminAuth(async (request) => {
|
||||
const url = new URL(request.url)
|
||||
const { limit, offset } = parsePaginationParams(url)
|
||||
const activeFilter = url.searchParams.get('active')
|
||||
|
||||
try {
|
||||
const conditions: SQL<unknown>[] = []
|
||||
if (activeFilter === 'true') {
|
||||
conditions.push(eq(referralCampaigns.isActive, true))
|
||||
} else if (activeFilter === 'false') {
|
||||
conditions.push(eq(referralCampaigns.isActive, false))
|
||||
}
|
||||
|
||||
const whereClause = conditions.length > 0 ? conditions[0] : undefined
|
||||
const baseUrl = getBaseUrl()
|
||||
|
||||
const [countResult, campaigns] = await Promise.all([
|
||||
db.select({ total: count() }).from(referralCampaigns).where(whereClause),
|
||||
db
|
||||
.select()
|
||||
.from(referralCampaigns)
|
||||
.where(whereClause)
|
||||
.orderBy(referralCampaigns.createdAt)
|
||||
.limit(limit)
|
||||
.offset(offset),
|
||||
])
|
||||
|
||||
const total = countResult[0].total
|
||||
const data: AdminReferralCampaign[] = campaigns.map((c) => toAdminReferralCampaign(c, baseUrl))
|
||||
const pagination = createPaginationMeta(total, limit, offset)
|
||||
|
||||
logger.info(`Admin API: Listed ${data.length} referral campaigns (total: ${total})`)
|
||||
|
||||
return listResponse(data, pagination)
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to list referral campaigns', { error })
|
||||
return internalErrorResponse('Failed to list referral campaigns')
|
||||
}
|
||||
})
|
||||
|
||||
export const POST = withAdminAuth(async (request) => {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { name, code, utmSource, utmMedium, utmCampaign, utmContent, bonusCreditAmount } = body
|
||||
|
||||
if (!name || typeof name !== 'string') {
|
||||
return badRequestResponse('name is required and must be a string')
|
||||
}
|
||||
|
||||
if (
|
||||
typeof bonusCreditAmount !== 'number' ||
|
||||
!Number.isFinite(bonusCreditAmount) ||
|
||||
bonusCreditAmount <= 0
|
||||
) {
|
||||
return badRequestResponse('bonusCreditAmount must be a positive number')
|
||||
}
|
||||
|
||||
if (code !== undefined && code !== null) {
|
||||
if (typeof code !== 'string') {
|
||||
return badRequestResponse('code must be a string or null')
|
||||
}
|
||||
if (code.trim().length < 6) {
|
||||
return badRequestResponse('code must be at least 6 characters')
|
||||
}
|
||||
}
|
||||
|
||||
const id = nanoid()
|
||||
|
||||
const [campaign] = await db
|
||||
.insert(referralCampaigns)
|
||||
.values({
|
||||
id,
|
||||
name,
|
||||
code: code ? code.trim().toUpperCase() : null,
|
||||
utmSource: utmSource || null,
|
||||
utmMedium: utmMedium || null,
|
||||
utmCampaign: utmCampaign || null,
|
||||
utmContent: utmContent || null,
|
||||
bonusCreditAmount: bonusCreditAmount.toString(),
|
||||
})
|
||||
.returning()
|
||||
|
||||
logger.info(`Admin API: Created referral campaign ${id}`, {
|
||||
name,
|
||||
code: campaign.code,
|
||||
bonusCreditAmount,
|
||||
})
|
||||
|
||||
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to create referral campaign', { error })
|
||||
return internalErrorResponse('Failed to create referral campaign')
|
||||
}
|
||||
})
|
||||
@@ -8,6 +8,7 @@
|
||||
import type {
|
||||
member,
|
||||
organization,
|
||||
referralCampaigns,
|
||||
subscription,
|
||||
user,
|
||||
userStats,
|
||||
@@ -31,6 +32,7 @@ export type DbOrganization = InferSelectModel<typeof organization>
|
||||
export type DbSubscription = InferSelectModel<typeof subscription>
|
||||
export type DbMember = InferSelectModel<typeof member>
|
||||
export type DbUserStats = InferSelectModel<typeof userStats>
|
||||
export type DbReferralCampaign = InferSelectModel<typeof referralCampaigns>
|
||||
|
||||
// =============================================================================
|
||||
// Pagination
|
||||
@@ -646,3 +648,49 @@ export interface AdminDeployResult {
|
||||
export interface AdminUndeployResult {
|
||||
isDeployed: boolean
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Referral Campaign Types
|
||||
// =============================================================================
|
||||
|
||||
export interface AdminReferralCampaign {
|
||||
id: string
|
||||
name: string
|
||||
code: string | null
|
||||
utmSource: string | null
|
||||
utmMedium: string | null
|
||||
utmCampaign: string | null
|
||||
utmContent: string | null
|
||||
bonusCreditAmount: string
|
||||
isActive: boolean
|
||||
signupUrl: string | null
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
|
||||
export function toAdminReferralCampaign(
|
||||
dbCampaign: DbReferralCampaign,
|
||||
baseUrl: string
|
||||
): AdminReferralCampaign {
|
||||
const utmParams = new URLSearchParams()
|
||||
if (dbCampaign.utmSource) utmParams.set('utm_source', dbCampaign.utmSource)
|
||||
if (dbCampaign.utmMedium) utmParams.set('utm_medium', dbCampaign.utmMedium)
|
||||
if (dbCampaign.utmCampaign) utmParams.set('utm_campaign', dbCampaign.utmCampaign)
|
||||
if (dbCampaign.utmContent) utmParams.set('utm_content', dbCampaign.utmContent)
|
||||
const query = utmParams.toString()
|
||||
|
||||
return {
|
||||
id: dbCampaign.id,
|
||||
name: dbCampaign.name,
|
||||
code: dbCampaign.code,
|
||||
utmSource: dbCampaign.utmSource,
|
||||
utmMedium: dbCampaign.utmMedium,
|
||||
utmCampaign: dbCampaign.utmCampaign,
|
||||
utmContent: dbCampaign.utmContent,
|
||||
bonusCreditAmount: dbCampaign.bonusCreditAmount,
|
||||
isActive: dbCampaign.isActive,
|
||||
signupUrl: query ? `${baseUrl}/signup?${query}` : null,
|
||||
createdAt: dbCampaign.createdAt.toISOString(),
|
||||
updatedAt: dbCampaign.updatedAt.toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -238,6 +238,11 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
||||
finalSystemPrompt += currentTimeContext
|
||||
}
|
||||
|
||||
if (generationType === 'cron-expression') {
|
||||
finalSystemPrompt +=
|
||||
'\n\nIMPORTANT: Return ONLY the raw cron expression (e.g., "0 9 * * 1-5"). Do NOT wrap it in markdown code blocks, backticks, or quotes. Do NOT include any explanation or text before or after the expression.'
|
||||
}
|
||||
|
||||
if (generationType === 'json-object') {
|
||||
finalSystemPrompt +=
|
||||
'\n\nIMPORTANT: Return ONLY the raw JSON object. Do NOT wrap it in markdown code blocks (no ```json or ```). Do NOT include any explanation or text before or after the JSON. The response must start with { and end with }.'
|
||||
|
||||
@@ -12,7 +12,7 @@ import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/per
|
||||
|
||||
const logger = createLogger('WorkspaceBYOKKeysAPI')
|
||||
|
||||
const VALID_PROVIDERS = ['openai', 'anthropic', 'google', 'mistral', 'exa'] as const
|
||||
const VALID_PROVIDERS = ['openai', 'anthropic', 'google', 'mistral'] as const
|
||||
|
||||
const UpsertKeySchema = z.object({
|
||||
providerId: z.enum(VALID_PROVIDERS),
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { runPreDeployChecks } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-predeploy-checks'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('useDeployment')
|
||||
|
||||
@@ -35,6 +38,24 @@ export function useDeployment({
|
||||
return { success: true, shouldOpenModal: true }
|
||||
}
|
||||
|
||||
const { blocks, edges, loops, parallels } = useWorkflowStore.getState()
|
||||
const liveBlocks = mergeSubblockState(blocks, workflowId)
|
||||
const checkResult = runPreDeployChecks({
|
||||
blocks: liveBlocks,
|
||||
edges,
|
||||
loops,
|
||||
parallels,
|
||||
workflowId,
|
||||
})
|
||||
if (!checkResult.passed) {
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message: checkResult.error || 'Pre-deploy validation failed',
|
||||
workflowId,
|
||||
})
|
||||
return { success: false, shouldOpenModal: false }
|
||||
}
|
||||
|
||||
setIsDeploying(true)
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {
|
||||
|
||||
@@ -239,7 +239,12 @@ export const ComboBox = memo(function ComboBox({
|
||||
*/
|
||||
const defaultOptionValue = useMemo(() => {
|
||||
if (defaultValue !== undefined) {
|
||||
return defaultValue
|
||||
// Validate that the default value exists in the available (filtered) options
|
||||
const defaultInOptions = evaluatedOptions.find((opt) => getOptionValue(opt) === defaultValue)
|
||||
if (defaultInOptions) {
|
||||
return defaultValue
|
||||
}
|
||||
// Default not available (e.g. provider disabled) — fall through to other fallbacks
|
||||
}
|
||||
|
||||
// For model field, default to claude-sonnet-4-5 if available
|
||||
|
||||
@@ -4,6 +4,7 @@ import { Button, Combobox } from '@/components/emcn/components'
|
||||
import {
|
||||
getCanonicalScopesForProvider,
|
||||
getProviderIdFromServiceId,
|
||||
getServiceConfigByProviderId,
|
||||
OAUTH_PROVIDERS,
|
||||
type OAuthProvider,
|
||||
type OAuthService,
|
||||
@@ -26,6 +27,11 @@ const getProviderIcon = (providerName: OAuthProvider) => {
|
||||
}
|
||||
|
||||
const getProviderName = (providerName: OAuthProvider) => {
|
||||
const serviceConfig = getServiceConfigByProviderId(providerName)
|
||||
if (serviceConfig) {
|
||||
return serviceConfig.name
|
||||
}
|
||||
|
||||
const { baseProvider } = parseProvider(providerName)
|
||||
const baseProviderConfig = OAUTH_PROVIDERS[baseProvider]
|
||||
|
||||
@@ -54,7 +60,7 @@ export function ToolCredentialSelector({
|
||||
onChange,
|
||||
provider,
|
||||
requiredScopes = [],
|
||||
label = 'Select account',
|
||||
label,
|
||||
serviceId,
|
||||
disabled = false,
|
||||
}: ToolCredentialSelectorProps) {
|
||||
@@ -64,6 +70,7 @@ export function ToolCredentialSelector({
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
|
||||
const selectedId = value || ''
|
||||
const effectiveLabel = label || `Select ${getProviderName(provider)} account`
|
||||
|
||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||
|
||||
@@ -203,7 +210,7 @@ export function ToolCredentialSelector({
|
||||
selectedValue={selectedId}
|
||||
onChange={handleComboboxChange}
|
||||
onOpenChange={handleOpenChange}
|
||||
placeholder={label}
|
||||
placeholder={effectiveLabel}
|
||||
disabled={disabled}
|
||||
editable={true}
|
||||
filterOptions={!isForeign}
|
||||
@@ -0,0 +1,186 @@
|
||||
'use client'
|
||||
|
||||
import type React from 'react'
|
||||
import { useRef, useState } from 'react'
|
||||
import { ArrowLeftRight, ArrowUp } from 'lucide-react'
|
||||
import { Button, Input, Label, Tooltip } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { WandControlHandlers } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
|
||||
|
||||
/**
|
||||
* Props for a generic parameter with label component
|
||||
*/
|
||||
export interface ParameterWithLabelProps {
|
||||
paramId: string
|
||||
title: string
|
||||
isRequired: boolean
|
||||
visibility: string
|
||||
wandConfig?: {
|
||||
enabled: boolean
|
||||
prompt?: string
|
||||
placeholder?: string
|
||||
}
|
||||
canonicalToggle?: {
|
||||
mode: 'basic' | 'advanced'
|
||||
disabled?: boolean
|
||||
onToggle?: () => void
|
||||
}
|
||||
disabled: boolean
|
||||
isPreview: boolean
|
||||
children: (wandControlRef: React.MutableRefObject<WandControlHandlers | null>) => React.ReactNode
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic wrapper component for parameters that manages wand state and renders label + input
|
||||
*/
|
||||
export function ParameterWithLabel({
|
||||
paramId,
|
||||
title,
|
||||
isRequired,
|
||||
visibility,
|
||||
wandConfig,
|
||||
canonicalToggle,
|
||||
disabled,
|
||||
isPreview,
|
||||
children,
|
||||
}: ParameterWithLabelProps) {
|
||||
const [isSearchActive, setIsSearchActive] = useState(false)
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const searchInputRef = useRef<HTMLInputElement>(null)
|
||||
const wandControlRef = useRef<WandControlHandlers | null>(null)
|
||||
|
||||
const isWandEnabled = wandConfig?.enabled ?? false
|
||||
const showWand = isWandEnabled && !isPreview && !disabled
|
||||
|
||||
const handleSearchClick = (): void => {
|
||||
setIsSearchActive(true)
|
||||
setTimeout(() => {
|
||||
searchInputRef.current?.focus()
|
||||
}, 0)
|
||||
}
|
||||
|
||||
const handleSearchBlur = (): void => {
|
||||
if (!searchQuery.trim() && !wandControlRef.current?.isWandStreaming) {
|
||||
setIsSearchActive(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleSearchChange = (value: string): void => {
|
||||
setSearchQuery(value)
|
||||
}
|
||||
|
||||
const handleSearchSubmit = (): void => {
|
||||
if (searchQuery.trim() && wandControlRef.current) {
|
||||
wandControlRef.current.onWandTrigger(searchQuery)
|
||||
setSearchQuery('')
|
||||
setIsSearchActive(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleSearchCancel = (): void => {
|
||||
setSearchQuery('')
|
||||
setIsSearchActive(false)
|
||||
}
|
||||
|
||||
const isStreaming = wandControlRef.current?.isWandStreaming ?? false
|
||||
|
||||
return (
|
||||
<div key={paramId} className='relative min-w-0 space-y-[6px]'>
|
||||
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
|
||||
<Label className='flex items-baseline gap-[6px] whitespace-nowrap font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
{title}
|
||||
{isRequired && visibility === 'user-only' && <span className='ml-0.5'>*</span>}
|
||||
</Label>
|
||||
<div className='flex min-w-0 flex-1 items-center justify-end gap-[6px]'>
|
||||
{showWand &&
|
||||
(!isSearchActive ? (
|
||||
<Button
|
||||
variant='active'
|
||||
className='-my-1 h-5 px-2 py-0 text-[11px]'
|
||||
onClick={handleSearchClick}
|
||||
>
|
||||
Generate
|
||||
</Button>
|
||||
) : (
|
||||
<div className='-my-1 flex min-w-[120px] max-w-[280px] flex-1 items-center gap-[4px]'>
|
||||
<Input
|
||||
ref={searchInputRef}
|
||||
value={isStreaming ? 'Generating...' : searchQuery}
|
||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
||||
handleSearchChange(e.target.value)
|
||||
}
|
||||
onBlur={(e: React.FocusEvent<HTMLInputElement>) => {
|
||||
const relatedTarget = e.relatedTarget as HTMLElement | null
|
||||
if (relatedTarget?.closest('button')) return
|
||||
handleSearchBlur()
|
||||
}}
|
||||
onKeyDown={(e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||
if (e.key === 'Enter' && searchQuery.trim() && !isStreaming) {
|
||||
handleSearchSubmit()
|
||||
} else if (e.key === 'Escape') {
|
||||
handleSearchCancel()
|
||||
}
|
||||
}}
|
||||
disabled={isStreaming}
|
||||
className={cn(
|
||||
'h-5 min-w-[80px] flex-1 text-[11px]',
|
||||
isStreaming && 'text-muted-foreground'
|
||||
)}
|
||||
placeholder='Generate with AI...'
|
||||
/>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
disabled={!searchQuery.trim() || isStreaming}
|
||||
onMouseDown={(e: React.MouseEvent) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
}}
|
||||
onClick={(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
handleSearchSubmit()
|
||||
}}
|
||||
className='h-[20px] w-[20px] flex-shrink-0 p-0'
|
||||
>
|
||||
<ArrowUp className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</div>
|
||||
))}
|
||||
{canonicalToggle && !isPreview && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<button
|
||||
type='button'
|
||||
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0 disabled:cursor-not-allowed disabled:opacity-50'
|
||||
onClick={canonicalToggle.onToggle}
|
||||
disabled={canonicalToggle.disabled || disabled}
|
||||
aria-label={
|
||||
canonicalToggle.mode === 'advanced'
|
||||
? 'Switch to selector'
|
||||
: 'Switch to manual ID'
|
||||
}
|
||||
>
|
||||
<ArrowLeftRight
|
||||
className={cn(
|
||||
'!h-[12px] !w-[12px]',
|
||||
canonicalToggle.mode === 'advanced'
|
||||
? 'text-[var(--text-primary)]'
|
||||
: 'text-[var(--text-secondary)]'
|
||||
)}
|
||||
/>
|
||||
</button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
<p>
|
||||
{canonicalToggle.mode === 'advanced'
|
||||
? 'Switch to selector'
|
||||
: 'Switch to manual ID'}
|
||||
</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className='relative w-full min-w-0'>{children(wandControlRef)}</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,105 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef } from 'react'
|
||||
import { SubBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
|
||||
import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
|
||||
interface ToolSubBlockRendererProps {
|
||||
blockId: string
|
||||
subBlockId: string
|
||||
toolIndex: number
|
||||
subBlock: BlockSubBlockConfig
|
||||
effectiveParamId: string
|
||||
toolParams: Record<string, string> | undefined
|
||||
onParamChange: (toolIndex: number, paramId: string, value: string) => void
|
||||
disabled: boolean
|
||||
canonicalToggle?: {
|
||||
mode: 'basic' | 'advanced'
|
||||
disabled?: boolean
|
||||
onToggle?: () => void
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* SubBlock types whose store values are objects/arrays/non-strings.
|
||||
* tool.params stores strings (via JSON.stringify), so when syncing
|
||||
* back to the store we parse them to restore the native shape.
|
||||
*/
|
||||
const OBJECT_SUBBLOCK_TYPES = new Set(['file-upload', 'table', 'grouped-checkbox-list'])
|
||||
|
||||
/**
|
||||
* Bridges the subblock store with StoredTool.params via a synthetic store key,
|
||||
* then delegates all rendering to SubBlock for full parity.
|
||||
*/
|
||||
export function ToolSubBlockRenderer({
|
||||
blockId,
|
||||
subBlockId,
|
||||
toolIndex,
|
||||
subBlock,
|
||||
effectiveParamId,
|
||||
toolParams,
|
||||
onParamChange,
|
||||
disabled,
|
||||
canonicalToggle,
|
||||
}: ToolSubBlockRendererProps) {
|
||||
const syntheticId = `${subBlockId}-tool-${toolIndex}-${effectiveParamId}`
|
||||
const toolParamValue = toolParams?.[effectiveParamId] ?? ''
|
||||
const isObjectType = OBJECT_SUBBLOCK_TYPES.has(subBlock.type)
|
||||
|
||||
const syncedRef = useRef<string | null>(null)
|
||||
const onParamChangeRef = useRef(onParamChange)
|
||||
onParamChangeRef.current = onParamChange
|
||||
|
||||
useEffect(() => {
|
||||
const unsub = useSubBlockStore.subscribe((state, prevState) => {
|
||||
const wfId = useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (!wfId) return
|
||||
const newVal = state.workflowValues[wfId]?.[blockId]?.[syntheticId]
|
||||
const oldVal = prevState.workflowValues[wfId]?.[blockId]?.[syntheticId]
|
||||
if (newVal === oldVal) return
|
||||
const stringified =
|
||||
newVal == null ? '' : typeof newVal === 'string' ? newVal : JSON.stringify(newVal)
|
||||
if (stringified === syncedRef.current) return
|
||||
syncedRef.current = stringified
|
||||
onParamChangeRef.current(toolIndex, effectiveParamId, stringified)
|
||||
})
|
||||
return unsub
|
||||
}, [blockId, syntheticId, toolIndex, effectiveParamId])
|
||||
|
||||
useEffect(() => {
|
||||
if (toolParamValue === syncedRef.current) return
|
||||
syncedRef.current = toolParamValue
|
||||
if (isObjectType && toolParamValue) {
|
||||
try {
|
||||
const parsed = JSON.parse(toolParamValue)
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
useSubBlockStore.getState().setValue(blockId, syntheticId, parsed)
|
||||
return
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
useSubBlockStore.getState().setValue(blockId, syntheticId, toolParamValue)
|
||||
}, [toolParamValue, blockId, syntheticId, isObjectType])
|
||||
|
||||
const visibility = subBlock.paramVisibility ?? 'user-or-llm'
|
||||
const isOptionalForUser = visibility !== 'user-only'
|
||||
|
||||
const config = {
|
||||
...subBlock,
|
||||
id: syntheticId,
|
||||
...(isOptionalForUser && { required: false }),
|
||||
}
|
||||
|
||||
return (
|
||||
<SubBlock
|
||||
blockId={blockId}
|
||||
config={config}
|
||||
isPreview={false}
|
||||
disabled={disabled}
|
||||
canonicalToggle={canonicalToggle}
|
||||
dependencyContext={toolParams}
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -2,37 +2,12 @@
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
interface StoredTool {
|
||||
type: string
|
||||
title?: string
|
||||
toolId?: string
|
||||
params?: Record<string, string>
|
||||
customToolId?: string
|
||||
schema?: any
|
||||
code?: string
|
||||
operation?: string
|
||||
usageControl?: 'auto' | 'force' | 'none'
|
||||
}
|
||||
|
||||
const isMcpToolAlreadySelected = (selectedTools: StoredTool[], mcpToolId: string): boolean => {
|
||||
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
|
||||
}
|
||||
|
||||
const isCustomToolAlreadySelected = (
|
||||
selectedTools: StoredTool[],
|
||||
customToolId: string
|
||||
): boolean => {
|
||||
return selectedTools.some(
|
||||
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
|
||||
)
|
||||
}
|
||||
|
||||
const isWorkflowAlreadySelected = (selectedTools: StoredTool[], workflowId: string): boolean => {
|
||||
return selectedTools.some(
|
||||
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
|
||||
)
|
||||
}
|
||||
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
|
||||
import {
|
||||
isCustomToolAlreadySelected,
|
||||
isMcpToolAlreadySelected,
|
||||
isWorkflowAlreadySelected,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/utils'
|
||||
|
||||
describe('isMcpToolAlreadySelected', () => {
|
||||
describe('basic functionality', () => {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
* Represents a tool selected and configured in the workflow
|
||||
*
|
||||
* @remarks
|
||||
* For custom tools (new format), we only store: type, customToolId, usageControl, isExpanded.
|
||||
* Everything else (title, schema, code) is loaded dynamically from the database.
|
||||
* Legacy custom tools with inline schema/code are still supported for backwards compatibility.
|
||||
*/
|
||||
export interface StoredTool {
|
||||
/** Block type identifier */
|
||||
type: string
|
||||
/** Display title for the tool (optional for new custom tool format) */
|
||||
title?: string
|
||||
/** Direct tool ID for execution (optional for new custom tool format) */
|
||||
toolId?: string
|
||||
/** Parameter values configured by the user (optional for new custom tool format) */
|
||||
params?: Record<string, string>
|
||||
/** Whether the tool details are expanded in UI */
|
||||
isExpanded?: boolean
|
||||
/** Database ID for custom tools (new format - reference only) */
|
||||
customToolId?: string
|
||||
/** Tool schema for custom tools (legacy format - inline JSON schema) */
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
schema?: Record<string, any>
|
||||
/** Implementation code for custom tools (legacy format - inline) */
|
||||
code?: string
|
||||
/** Selected operation for multi-operation tools */
|
||||
operation?: string
|
||||
/** Tool usage control mode for LLM */
|
||||
usageControl?: 'auto' | 'force' | 'none'
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
|
||||
|
||||
/**
|
||||
* Checks if an MCP tool is already selected.
|
||||
*/
|
||||
export function isMcpToolAlreadySelected(selectedTools: StoredTool[], mcpToolId: string): boolean {
|
||||
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a custom tool is already selected.
|
||||
*/
|
||||
export function isCustomToolAlreadySelected(
|
||||
selectedTools: StoredTool[],
|
||||
customToolId: string
|
||||
): boolean {
|
||||
return selectedTools.some(
|
||||
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a workflow is already selected.
|
||||
*/
|
||||
export function isWorkflowAlreadySelected(
|
||||
selectedTools: StoredTool[],
|
||||
workflowId: string
|
||||
): boolean {
|
||||
return selectedTools.some(
|
||||
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
|
||||
)
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import { isEqual } from 'lodash'
|
||||
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
|
||||
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { FieldDiffStatus } from '@/lib/workflows/diff/types'
|
||||
import {
|
||||
CheckboxList,
|
||||
Code,
|
||||
@@ -69,13 +68,15 @@ interface SubBlockProps {
|
||||
isPreview?: boolean
|
||||
subBlockValues?: Record<string, any>
|
||||
disabled?: boolean
|
||||
fieldDiffStatus?: FieldDiffStatus
|
||||
allowExpandInPreview?: boolean
|
||||
canonicalToggle?: {
|
||||
mode: 'basic' | 'advanced'
|
||||
disabled?: boolean
|
||||
onToggle?: () => void
|
||||
}
|
||||
labelSuffix?: React.ReactNode
|
||||
/** Provides sibling values for dependency resolution in non-preview contexts (e.g. tool-input) */
|
||||
dependencyContext?: Record<string, unknown>
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -162,16 +163,14 @@ const getPreviewValue = (
|
||||
/**
|
||||
* Renders the label with optional validation and description tooltips.
|
||||
*
|
||||
* @remarks
|
||||
* Handles JSON validation indicators for code blocks and required field markers.
|
||||
* Includes inline AI generate button when wand is enabled.
|
||||
*
|
||||
* @param config - The sub-block configuration defining the label content
|
||||
* @param isValidJson - Whether the JSON content is valid (for code blocks)
|
||||
* @param subBlockValues - Current values of all subblocks for evaluating conditional requirements
|
||||
* @param wandState - Optional state and handlers for the AI wand feature
|
||||
* @param canonicalToggle - Optional canonical toggle metadata and handlers
|
||||
* @param canonicalToggleIsDisabled - Whether the canonical toggle is disabled
|
||||
* @param wandState - State and handlers for the inline AI generate feature
|
||||
* @param canonicalToggle - Metadata and handlers for the basic/advanced mode toggle
|
||||
* @param canonicalToggleIsDisabled - Whether the canonical toggle is disabled (includes dependsOn gating)
|
||||
* @param copyState - State and handler for the copy-to-clipboard button
|
||||
* @param labelSuffix - Additional content rendered after the label text
|
||||
* @returns The label JSX element, or `null` for switch types or when no title is defined
|
||||
*/
|
||||
const renderLabel = (
|
||||
@@ -202,7 +201,8 @@ const renderLabel = (
|
||||
showCopyButton: boolean
|
||||
copied: boolean
|
||||
onCopy: () => void
|
||||
}
|
||||
},
|
||||
labelSuffix?: React.ReactNode
|
||||
): JSX.Element | null => {
|
||||
if (config.type === 'switch') return null
|
||||
if (!config.title) return null
|
||||
@@ -215,9 +215,10 @@ const renderLabel = (
|
||||
|
||||
return (
|
||||
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
|
||||
<Label className='flex items-center gap-[6px] whitespace-nowrap'>
|
||||
<Label className='flex items-baseline gap-[6px] whitespace-nowrap'>
|
||||
{config.title}
|
||||
{required && <span className='ml-0.5'>*</span>}
|
||||
{labelSuffix}
|
||||
{config.type === 'code' &&
|
||||
config.language === 'json' &&
|
||||
!isValidJson &&
|
||||
@@ -383,28 +384,25 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
||||
prevProps.isPreview === nextProps.isPreview &&
|
||||
valueEqual &&
|
||||
prevProps.disabled === nextProps.disabled &&
|
||||
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
||||
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
||||
canonicalToggleEqual
|
||||
canonicalToggleEqual &&
|
||||
prevProps.labelSuffix === nextProps.labelSuffix &&
|
||||
prevProps.dependencyContext === nextProps.dependencyContext
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders a single workflow sub-block input based on config.type.
|
||||
*
|
||||
* @remarks
|
||||
* Supports multiple input types including short-input, long-input, dropdown,
|
||||
* combobox, slider, table, code, switch, tool-input, and many more.
|
||||
* Handles preview mode, disabled states, and AI wand generation.
|
||||
*
|
||||
* @param blockId - The parent block identifier
|
||||
* @param config - Configuration defining the input type and properties
|
||||
* @param isPreview - Whether to render in preview mode
|
||||
* @param subBlockValues - Current values of all subblocks
|
||||
* @param disabled - Whether the input is disabled
|
||||
* @param fieldDiffStatus - Optional diff status for visual indicators
|
||||
* @param allowExpandInPreview - Whether to allow expanding in preview mode
|
||||
* @returns The rendered sub-block input component
|
||||
* @param canonicalToggle - Metadata and handlers for the basic/advanced mode toggle
|
||||
* @param labelSuffix - Additional content rendered after the label text
|
||||
* @param dependencyContext - Sibling values for dependency resolution in non-preview contexts (e.g. tool-input)
|
||||
*/
|
||||
function SubBlockComponent({
|
||||
blockId,
|
||||
@@ -412,9 +410,10 @@ function SubBlockComponent({
|
||||
isPreview = false,
|
||||
subBlockValues,
|
||||
disabled = false,
|
||||
fieldDiffStatus,
|
||||
allowExpandInPreview,
|
||||
canonicalToggle,
|
||||
labelSuffix,
|
||||
dependencyContext,
|
||||
}: SubBlockProps): JSX.Element {
|
||||
const [isValidJson, setIsValidJson] = useState(true)
|
||||
const [isSearchActive, setIsSearchActive] = useState(false)
|
||||
@@ -423,7 +422,6 @@ function SubBlockComponent({
|
||||
const searchInputRef = useRef<HTMLInputElement>(null)
|
||||
const wandControlRef = useRef<WandControlHandlers | null>(null)
|
||||
|
||||
// Use webhook management hook when config has useWebhookUrl enabled
|
||||
const webhookManagement = useWebhookManagement({
|
||||
blockId,
|
||||
triggerId: undefined,
|
||||
@@ -510,10 +508,12 @@ function SubBlockComponent({
|
||||
| null
|
||||
| undefined
|
||||
|
||||
const contextValues = dependencyContext ?? (isPreview ? subBlockValues : undefined)
|
||||
|
||||
const { finalDisabled: gatedDisabled } = useDependsOnGate(blockId, config, {
|
||||
disabled,
|
||||
isPreview,
|
||||
previewContextValues: isPreview ? subBlockValues : undefined,
|
||||
previewContextValues: contextValues,
|
||||
})
|
||||
|
||||
const isDisabled = gatedDisabled
|
||||
@@ -797,7 +797,7 @@ function SubBlockComponent({
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -809,7 +809,7 @@ function SubBlockComponent({
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -821,7 +821,7 @@ function SubBlockComponent({
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -833,7 +833,7 @@ function SubBlockComponent({
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -845,7 +845,7 @@ function SubBlockComponent({
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -868,7 +868,7 @@ function SubBlockComponent({
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue as any}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -880,7 +880,7 @@ function SubBlockComponent({
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue as any}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -892,7 +892,7 @@ function SubBlockComponent({
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue as any}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -917,7 +917,7 @@ function SubBlockComponent({
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue as any}
|
||||
disabled={isDisabled}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -953,7 +953,7 @@ function SubBlockComponent({
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -987,7 +987,7 @@ function SubBlockComponent({
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue as any}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -999,7 +999,7 @@ function SubBlockComponent({
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue}
|
||||
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||
previewContextValues={contextValues}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -1059,7 +1059,8 @@ function SubBlockComponent({
|
||||
showCopyButton: Boolean(config.showCopyButton && config.useWebhookUrl),
|
||||
copied,
|
||||
onCopy: handleCopy,
|
||||
}
|
||||
},
|
||||
labelSuffix
|
||||
)}
|
||||
{renderInput()}
|
||||
</div>
|
||||
|
||||
@@ -571,7 +571,6 @@ export function Editor() {
|
||||
isPreview={false}
|
||||
subBlockValues={subBlockState}
|
||||
disabled={!canEditBlock}
|
||||
fieldDiffStatus={undefined}
|
||||
allowExpandInPreview={false}
|
||||
canonicalToggle={
|
||||
isCanonicalSwap && canonicalMode && canonicalId
|
||||
@@ -635,7 +634,6 @@ export function Editor() {
|
||||
isPreview={false}
|
||||
subBlockValues={subBlockState}
|
||||
disabled={!canEditBlock}
|
||||
fieldDiffStatus={undefined}
|
||||
allowExpandInPreview={false}
|
||||
/>
|
||||
{index < advancedOnlySubBlocks.length - 1 && (
|
||||
|
||||
@@ -3,7 +3,6 @@ import {
|
||||
buildCanonicalIndex,
|
||||
evaluateSubBlockCondition,
|
||||
isSubBlockFeatureEnabled,
|
||||
isSubBlockHiddenByHostedKey,
|
||||
isSubBlockVisibleForMode,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
import type { BlockConfig, SubBlockConfig, SubBlockType } from '@/blocks/types'
|
||||
@@ -109,9 +108,6 @@ export function useEditorSubblockLayout(
|
||||
// Check required feature if specified - declarative feature gating
|
||||
if (!isSubBlockFeatureEnabled(block)) return false
|
||||
|
||||
// Hide tool API key fields when hosted key is available
|
||||
if (isSubBlockHiddenByHostedKey(block)) return false
|
||||
|
||||
// Special handling for trigger-config type (legacy trigger configuration UI)
|
||||
if (block.type === ('trigger-config' as SubBlockType)) {
|
||||
const isPureTriggerBlock = config?.triggers?.enabled && config.category === 'triggers'
|
||||
|
||||
@@ -88,21 +88,38 @@ export function useTerminalFilters() {
|
||||
let result = entries
|
||||
|
||||
if (hasActiveFilters) {
|
||||
result = entries.filter((entry) => {
|
||||
// Block ID filter
|
||||
if (filters.blockIds.size > 0 && !filters.blockIds.has(entry.blockId)) {
|
||||
return false
|
||||
}
|
||||
// Determine which top-level entries pass the filters
|
||||
const visibleBlockIds = new Set<string>()
|
||||
for (const entry of entries) {
|
||||
if (entry.parentWorkflowBlockId) continue
|
||||
|
||||
// Status filter
|
||||
if (filters.statuses.size > 0) {
|
||||
let passes = true
|
||||
if (filters.blockIds.size > 0 && !filters.blockIds.has(entry.blockId)) {
|
||||
passes = false
|
||||
}
|
||||
if (passes && filters.statuses.size > 0) {
|
||||
const isError = !!entry.error
|
||||
const hasStatus = isError ? filters.statuses.has('error') : filters.statuses.has('info')
|
||||
if (!hasStatus) return false
|
||||
if (!hasStatus) passes = false
|
||||
}
|
||||
if (passes) {
|
||||
visibleBlockIds.add(entry.blockId)
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
// Propagate visibility to child workflow entries (handles arbitrary nesting).
|
||||
// Keep iterating until no new children are discovered.
|
||||
let prevSize = 0
|
||||
while (visibleBlockIds.size !== prevSize) {
|
||||
prevSize = visibleBlockIds.size
|
||||
for (const entry of entries) {
|
||||
if (entry.parentWorkflowBlockId && visibleBlockIds.has(entry.parentWorkflowBlockId)) {
|
||||
visibleBlockIds.add(entry.blockId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result = entries.filter((entry) => visibleBlockIds.has(entry.blockId))
|
||||
}
|
||||
|
||||
// Sort by executionOrder (monotonically increasing integer from server)
|
||||
|
||||
@@ -339,7 +339,8 @@ const SubflowNodeRow = memo(function SubflowNodeRow({
|
||||
})
|
||||
|
||||
/**
|
||||
* Entry node component - dispatches to appropriate component based on node type
|
||||
* Entry node component - dispatches to appropriate component based on node type.
|
||||
* Handles recursive rendering for workflow nodes with arbitrarily nested children.
|
||||
*/
|
||||
const EntryNodeRow = memo(function EntryNodeRow({
|
||||
node,
|
||||
@@ -380,6 +381,98 @@ const EntryNodeRow = memo(function EntryNodeRow({
|
||||
)
|
||||
}
|
||||
|
||||
if (nodeType === 'workflow') {
|
||||
const { entry, children } = node
|
||||
const BlockIcon = getBlockIcon(entry.blockType)
|
||||
const hasError = Boolean(entry.error) || children.some((c) => c.entry.error)
|
||||
const bgColor = getBlockColor(entry.blockType)
|
||||
const nodeId = entry.id
|
||||
const isExpanded = expandedNodes.has(nodeId)
|
||||
const hasChildren = children.length > 0
|
||||
const isSelected = selectedEntryId === entry.id
|
||||
const isRunning = Boolean(entry.isRunning)
|
||||
const isCanceled = Boolean(entry.isCanceled)
|
||||
|
||||
return (
|
||||
<div className='flex min-w-0 flex-col'>
|
||||
{/* Workflow Block Header */}
|
||||
<div
|
||||
data-entry-id={entry.id}
|
||||
className={clsx(
|
||||
ROW_STYLES.base,
|
||||
'h-[26px]',
|
||||
isSelected ? ROW_STYLES.selected : ROW_STYLES.hover
|
||||
)}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (hasChildren) {
|
||||
onToggleNode(nodeId)
|
||||
}
|
||||
onSelectEntry(entry)
|
||||
}}
|
||||
>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||
<div
|
||||
className='flex h-[14px] w-[14px] flex-shrink-0 items-center justify-center rounded-[4px]'
|
||||
style={{ background: bgColor }}
|
||||
>
|
||||
{BlockIcon && <BlockIcon className='h-[9px] w-[9px] text-white' />}
|
||||
</div>
|
||||
<span
|
||||
className={clsx(
|
||||
'min-w-0 truncate font-medium text-[13px]',
|
||||
hasError
|
||||
? 'text-[var(--text-error)]'
|
||||
: isSelected || isExpanded
|
||||
? 'text-[var(--text-primary)]'
|
||||
: 'text-[var(--text-tertiary)] group-hover:text-[var(--text-primary)]'
|
||||
)}
|
||||
>
|
||||
{entry.blockName}
|
||||
</span>
|
||||
{hasChildren && (
|
||||
<ChevronDown
|
||||
className={clsx(
|
||||
'h-[8px] w-[8px] flex-shrink-0 text-[var(--text-tertiary)] transition-transform duration-100 group-hover:text-[var(--text-primary)]',
|
||||
!isExpanded && '-rotate-90'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<span
|
||||
className={clsx(
|
||||
'flex-shrink-0 font-medium text-[13px]',
|
||||
!isRunning &&
|
||||
(isCanceled ? 'text-[var(--text-secondary)]' : 'text-[var(--text-tertiary)]')
|
||||
)}
|
||||
>
|
||||
<StatusDisplay
|
||||
isRunning={isRunning}
|
||||
isCanceled={isCanceled}
|
||||
formattedDuration={formatDuration(entry.durationMs, { precision: 2 }) ?? '-'}
|
||||
/>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Nested Child Workflow Blocks (recursive) */}
|
||||
{isExpanded && hasChildren && (
|
||||
<div className={ROW_STYLES.nested}>
|
||||
{children.map((child) => (
|
||||
<EntryNodeRow
|
||||
key={child.entry.id}
|
||||
node={child}
|
||||
selectedEntryId={selectedEntryId}
|
||||
onSelectEntry={onSelectEntry}
|
||||
expandedNodes={expandedNodes}
|
||||
onToggleNode={onToggleNode}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Regular block
|
||||
return (
|
||||
<BlockRow
|
||||
@@ -555,6 +648,8 @@ export const Terminal = memo(function Terminal() {
|
||||
const uniqueBlocks = useMemo(() => {
|
||||
const blocksMap = new Map<string, { blockId: string; blockName: string; blockType: string }>()
|
||||
allWorkflowEntries.forEach((entry) => {
|
||||
// Skip child workflow entries — they use synthetic IDs and shouldn't appear in filters
|
||||
if (entry.parentWorkflowBlockId) return
|
||||
if (!blocksMap.has(entry.blockId)) {
|
||||
blocksMap.set(entry.blockId, {
|
||||
blockId: entry.blockId,
|
||||
@@ -667,19 +762,22 @@ export const Terminal = memo(function Terminal() {
|
||||
|
||||
const newestExec = executionGroups[0]
|
||||
|
||||
// Collect all node IDs that should be expanded (subflows and their iterations)
|
||||
// Collect all expandable node IDs recursively (subflows, iterations, and workflow nodes)
|
||||
const nodeIdsToExpand: string[] = []
|
||||
for (const node of newestExec.entryTree) {
|
||||
if (node.nodeType === 'subflow' && node.children.length > 0) {
|
||||
nodeIdsToExpand.push(node.entry.id)
|
||||
// Also expand all iteration children
|
||||
for (const iterNode of node.children) {
|
||||
if (iterNode.nodeType === 'iteration') {
|
||||
nodeIdsToExpand.push(iterNode.entry.id)
|
||||
}
|
||||
const collectExpandableNodes = (nodes: EntryNode[]) => {
|
||||
for (const node of nodes) {
|
||||
if (node.children.length === 0) continue
|
||||
if (
|
||||
node.nodeType === 'subflow' ||
|
||||
node.nodeType === 'iteration' ||
|
||||
node.nodeType === 'workflow'
|
||||
) {
|
||||
nodeIdsToExpand.push(node.entry.id)
|
||||
collectExpandableNodes(node.children)
|
||||
}
|
||||
}
|
||||
}
|
||||
collectExpandableNodes(newestExec.entryTree)
|
||||
|
||||
if (nodeIdsToExpand.length > 0) {
|
||||
setExpandedNodes((prev) => {
|
||||
|
||||
@@ -120,10 +120,10 @@ export function isSubflowBlockType(blockType: string): boolean {
|
||||
/**
|
||||
* Node type for the tree structure
|
||||
*/
|
||||
export type EntryNodeType = 'block' | 'subflow' | 'iteration'
|
||||
export type EntryNodeType = 'block' | 'subflow' | 'iteration' | 'workflow'
|
||||
|
||||
/**
|
||||
* Entry node for tree structure - represents a block, subflow, or iteration
|
||||
* Entry node for tree structure - represents a block, subflow, iteration, or workflow
|
||||
*/
|
||||
export interface EntryNode {
|
||||
/** The console entry (for blocks) or synthetic entry (for subflows/iterations) */
|
||||
@@ -175,12 +175,17 @@ interface IterationGroup {
|
||||
* Sorts by start time to ensure chronological order.
|
||||
*/
|
||||
function buildEntryTree(entries: ConsoleEntry[]): EntryNode[] {
|
||||
// Separate regular blocks from iteration entries
|
||||
// Separate regular blocks from iteration entries and child workflow entries
|
||||
const regularBlocks: ConsoleEntry[] = []
|
||||
const iterationEntries: ConsoleEntry[] = []
|
||||
const childWorkflowEntries = new Map<string, ConsoleEntry[]>()
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.iterationType && entry.iterationCurrent !== undefined) {
|
||||
if (entry.parentWorkflowBlockId) {
|
||||
const existing = childWorkflowEntries.get(entry.parentWorkflowBlockId) || []
|
||||
existing.push(entry)
|
||||
childWorkflowEntries.set(entry.parentWorkflowBlockId, existing)
|
||||
} else if (entry.iterationType && entry.iterationCurrent !== undefined) {
|
||||
iterationEntries.push(entry)
|
||||
} else {
|
||||
regularBlocks.push(entry)
|
||||
@@ -338,12 +343,53 @@ function buildEntryTree(entries: ConsoleEntry[]): EntryNode[] {
|
||||
})
|
||||
}
|
||||
|
||||
// Build nodes for regular blocks
|
||||
const regularNodes: EntryNode[] = regularBlocks.map((entry) => ({
|
||||
entry,
|
||||
children: [],
|
||||
nodeType: 'block' as const,
|
||||
}))
|
||||
/**
|
||||
* Recursively builds child nodes for workflow blocks.
|
||||
* Handles multi-level nesting where a child workflow block itself has children.
|
||||
*/
|
||||
const buildWorkflowChildNodes = (parentBlockId: string): EntryNode[] => {
|
||||
const childEntries = childWorkflowEntries.get(parentBlockId)
|
||||
if (!childEntries || childEntries.length === 0) return []
|
||||
|
||||
childEntries.sort((a, b) => {
|
||||
const aTime = new Date(a.startedAt || a.timestamp).getTime()
|
||||
const bTime = new Date(b.startedAt || b.timestamp).getTime()
|
||||
return aTime - bTime
|
||||
})
|
||||
|
||||
return childEntries.map((child) => {
|
||||
const nestedChildren = buildWorkflowChildNodes(child.blockId)
|
||||
if (nestedChildren.length > 0) {
|
||||
return {
|
||||
entry: child,
|
||||
children: nestedChildren,
|
||||
nodeType: 'workflow' as const,
|
||||
}
|
||||
}
|
||||
return {
|
||||
entry: child,
|
||||
children: [],
|
||||
nodeType: 'block' as const,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Build nodes for regular blocks, promoting workflow blocks with children to 'workflow' nodes
|
||||
const regularNodes: EntryNode[] = regularBlocks.map((entry) => {
|
||||
const childNodes = buildWorkflowChildNodes(entry.blockId)
|
||||
if (childNodes.length > 0) {
|
||||
return {
|
||||
entry,
|
||||
children: childNodes,
|
||||
nodeType: 'workflow' as const,
|
||||
}
|
||||
}
|
||||
return {
|
||||
entry,
|
||||
children: [],
|
||||
nodeType: 'block' as const,
|
||||
}
|
||||
})
|
||||
|
||||
// Combine all nodes and sort by executionOrder ascending (oldest first, top-down)
|
||||
const allNodes = [...subflowNodes, ...regularNodes]
|
||||
|
||||
@@ -15,7 +15,6 @@ import {
|
||||
evaluateSubBlockCondition,
|
||||
hasAdvancedValues,
|
||||
isSubBlockFeatureEnabled,
|
||||
isSubBlockHiddenByHostedKey,
|
||||
isSubBlockVisibleForMode,
|
||||
resolveDependencyValue,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
@@ -829,7 +828,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
if (block.hidden) return false
|
||||
if (block.hideFromPreview) return false
|
||||
if (!isSubBlockFeatureEnabled(block)) return false
|
||||
if (isSubBlockHiddenByHostedKey(block)) return false
|
||||
|
||||
const isPureTriggerBlock = config?.triggers?.enabled && config.category === 'triggers'
|
||||
|
||||
|
||||
@@ -38,7 +38,11 @@ import { useCurrentWorkflowExecution, useExecutionStore } from '@/stores/executi
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useVariablesStore } from '@/stores/panel'
|
||||
import { useEnvironmentStore } from '@/stores/settings/environment'
|
||||
import { useTerminalConsoleStore } from '@/stores/terminal'
|
||||
import {
|
||||
extractChildWorkflowEntries,
|
||||
hasChildTraceSpans,
|
||||
useTerminalConsoleStore,
|
||||
} from '@/stores/terminal'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
@@ -63,6 +67,7 @@ interface BlockEventHandlerConfig {
|
||||
executionIdRef: { current: string }
|
||||
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
|
||||
activeBlocksSet: Set<string>
|
||||
activeBlockRefCounts: Map<string, number>
|
||||
accumulatedBlockLogs: BlockLog[]
|
||||
accumulatedBlockStates: Map<string, BlockState>
|
||||
executedBlockIds: Set<string>
|
||||
@@ -309,6 +314,7 @@ export function useWorkflowExecution() {
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
activeBlockRefCounts,
|
||||
accumulatedBlockLogs,
|
||||
accumulatedBlockStates,
|
||||
executedBlockIds,
|
||||
@@ -328,9 +334,18 @@ export function useWorkflowExecution() {
|
||||
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
||||
if (!workflowId) return
|
||||
if (isActive) {
|
||||
const count = activeBlockRefCounts.get(blockId) ?? 0
|
||||
activeBlockRefCounts.set(blockId, count + 1)
|
||||
activeBlocksSet.add(blockId)
|
||||
} else {
|
||||
activeBlocksSet.delete(blockId)
|
||||
const count = activeBlockRefCounts.get(blockId) ?? 1
|
||||
const next = count - 1
|
||||
if (next <= 0) {
|
||||
activeBlockRefCounts.delete(blockId)
|
||||
activeBlocksSet.delete(blockId)
|
||||
} else {
|
||||
activeBlockRefCounts.set(blockId, next)
|
||||
}
|
||||
}
|
||||
setActiveBlocks(workflowId, new Set(activeBlocksSet))
|
||||
}
|
||||
@@ -506,6 +521,20 @@ export function useWorkflowExecution() {
|
||||
addConsoleEntry(data, data.output as NormalizedBlockOutput)
|
||||
}
|
||||
|
||||
// Extract child workflow trace spans into separate console entries
|
||||
if (data.blockType === 'workflow' && hasChildTraceSpans(data.output)) {
|
||||
const childEntries = extractChildWorkflowEntries({
|
||||
parentBlockId: data.blockId,
|
||||
executionId: executionIdRef.current,
|
||||
executionOrder: data.executionOrder,
|
||||
workflowId: workflowId!,
|
||||
childTraceSpans: data.output.childTraceSpans,
|
||||
})
|
||||
for (const entry of childEntries) {
|
||||
addConsole(entry)
|
||||
}
|
||||
}
|
||||
|
||||
if (onBlockCompleteCallback) {
|
||||
onBlockCompleteCallback(data.blockId, data.output).catch((error) => {
|
||||
logger.error('Error in onBlockComplete callback:', error)
|
||||
@@ -1280,6 +1309,7 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
const activeBlocksSet = new Set<string>()
|
||||
const activeBlockRefCounts = new Map<string, number>()
|
||||
const streamedContent = new Map<string, string>()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
@@ -1292,6 +1322,7 @@ export function useWorkflowExecution() {
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
activeBlockRefCounts,
|
||||
accumulatedBlockLogs,
|
||||
accumulatedBlockStates,
|
||||
executedBlockIds,
|
||||
@@ -1902,6 +1933,7 @@ export function useWorkflowExecution() {
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
const activeBlocksSet = new Set<string>()
|
||||
const activeBlockRefCounts = new Map<string, number>()
|
||||
|
||||
try {
|
||||
const blockHandlers = buildBlockEventHandlers({
|
||||
@@ -1909,6 +1941,7 @@ export function useWorkflowExecution() {
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
activeBlockRefCounts,
|
||||
accumulatedBlockLogs,
|
||||
accumulatedBlockStates,
|
||||
executedBlockIds,
|
||||
@@ -2104,6 +2137,7 @@ export function useWorkflowExecution() {
|
||||
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
const activeBlocksSet = new Set<string>()
|
||||
const activeBlockRefCounts = new Map<string, number>()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
@@ -2115,6 +2149,7 @@ export function useWorkflowExecution() {
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
activeBlockRefCounts,
|
||||
accumulatedBlockLogs,
|
||||
accumulatedBlockStates,
|
||||
executedBlockIds,
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import type { ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import { useExecutionStore } from '@/stores/execution'
|
||||
import { useTerminalConsoleStore } from '@/stores/terminal'
|
||||
import {
|
||||
extractChildWorkflowEntries,
|
||||
hasChildTraceSpans,
|
||||
useTerminalConsoleStore,
|
||||
} from '@/stores/terminal'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
@@ -39,6 +43,7 @@ export async function executeWorkflowWithFullLogging(
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
|
||||
const activeBlocksSet = new Set<string>()
|
||||
const activeBlockRefCounts = new Map<string, number>()
|
||||
|
||||
const payload: any = {
|
||||
input: options.workflowInput,
|
||||
@@ -103,6 +108,8 @@ export async function executeWorkflowWithFullLogging(
|
||||
|
||||
switch (event.type) {
|
||||
case 'block:started': {
|
||||
const startCount = activeBlockRefCounts.get(event.data.blockId) ?? 0
|
||||
activeBlockRefCounts.set(event.data.blockId, startCount + 1)
|
||||
activeBlocksSet.add(event.data.blockId)
|
||||
setActiveBlocks(wfId, new Set(activeBlocksSet))
|
||||
|
||||
@@ -115,8 +122,14 @@ export async function executeWorkflowWithFullLogging(
|
||||
break
|
||||
}
|
||||
|
||||
case 'block:completed':
|
||||
activeBlocksSet.delete(event.data.blockId)
|
||||
case 'block:completed': {
|
||||
const completeCount = activeBlockRefCounts.get(event.data.blockId) ?? 1
|
||||
if (completeCount <= 1) {
|
||||
activeBlockRefCounts.delete(event.data.blockId)
|
||||
activeBlocksSet.delete(event.data.blockId)
|
||||
} else {
|
||||
activeBlockRefCounts.set(event.data.blockId, completeCount - 1)
|
||||
}
|
||||
setActiveBlocks(wfId, new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(wfId, event.data.blockId, 'success')
|
||||
@@ -140,13 +153,34 @@ export async function executeWorkflowWithFullLogging(
|
||||
iterationContainerId: event.data.iterationContainerId,
|
||||
})
|
||||
|
||||
// Extract child workflow trace spans into separate console entries
|
||||
if (event.data.blockType === 'workflow' && hasChildTraceSpans(event.data.output)) {
|
||||
const childEntries = extractChildWorkflowEntries({
|
||||
parentBlockId: event.data.blockId,
|
||||
executionId,
|
||||
executionOrder: event.data.executionOrder,
|
||||
workflowId: activeWorkflowId,
|
||||
childTraceSpans: event.data.output.childTraceSpans,
|
||||
})
|
||||
for (const entry of childEntries) {
|
||||
addConsole(entry)
|
||||
}
|
||||
}
|
||||
|
||||
if (options.onBlockComplete) {
|
||||
options.onBlockComplete(event.data.blockId, event.data.output).catch(() => {})
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'block:error':
|
||||
activeBlocksSet.delete(event.data.blockId)
|
||||
case 'block:error': {
|
||||
const errorCount = activeBlockRefCounts.get(event.data.blockId) ?? 1
|
||||
if (errorCount <= 1) {
|
||||
activeBlockRefCounts.delete(event.data.blockId)
|
||||
activeBlocksSet.delete(event.data.blockId)
|
||||
} else {
|
||||
activeBlockRefCounts.set(event.data.blockId, errorCount - 1)
|
||||
}
|
||||
setActiveBlocks(wfId, new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(wfId, event.data.blockId, 'error')
|
||||
@@ -171,6 +205,7 @@ export async function executeWorkflowWithFullLogging(
|
||||
iterationContainerId: event.data.iterationContainerId,
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
case 'execution:completed':
|
||||
executionResult = {
|
||||
|
||||
@@ -13,15 +13,15 @@ import {
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
} from '@/components/emcn'
|
||||
import { AnthropicIcon, ExaAIIcon, GeminiIcon, MistralIcon, OpenAIIcon } from '@/components/icons'
|
||||
import { AnthropicIcon, GeminiIcon, MistralIcon, OpenAIIcon } from '@/components/icons'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import {
|
||||
type BYOKKey,
|
||||
type BYOKProviderId,
|
||||
useBYOKKeys,
|
||||
useDeleteBYOKKey,
|
||||
useUpsertBYOKKey,
|
||||
} from '@/hooks/queries/byok-keys'
|
||||
import type { BYOKProviderId } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('BYOKSettings')
|
||||
|
||||
@@ -60,13 +60,6 @@ const PROVIDERS: {
|
||||
description: 'LLM calls and Knowledge Base OCR',
|
||||
placeholder: 'Enter your API key',
|
||||
},
|
||||
{
|
||||
id: 'exa',
|
||||
name: 'Exa',
|
||||
icon: ExaAIIcon,
|
||||
description: 'AI-powered search and research',
|
||||
placeholder: 'Enter your Exa API key',
|
||||
},
|
||||
]
|
||||
|
||||
function BYOKKeySkeleton() {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export { CancelSubscription } from './cancel-subscription'
|
||||
export { CreditBalance } from './credit-balance'
|
||||
export { PlanCard, type PlanCardProps, type PlanFeature } from './plan-card'
|
||||
export { ReferralCode } from './referral-code'
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
export { ReferralCode } from './referral-code'
|
||||
@@ -0,0 +1,103 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Button, Input, Label } from '@/components/emcn'
|
||||
|
||||
const logger = createLogger('ReferralCode')
|
||||
|
||||
interface ReferralCodeProps {
|
||||
onRedeemComplete?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Inline referral/promo code entry field with redeem button.
|
||||
* One-time use per account — shows success or "already redeemed" state.
|
||||
*/
|
||||
export function ReferralCode({ onRedeemComplete }: ReferralCodeProps) {
|
||||
const [code, setCode] = useState('')
|
||||
const [isRedeeming, setIsRedeeming] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [success, setSuccess] = useState<{ bonusAmount: number } | null>(null)
|
||||
|
||||
const handleRedeem = async () => {
|
||||
const trimmed = code.trim()
|
||||
if (!trimmed || isRedeeming) return
|
||||
|
||||
setIsRedeeming(true)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/referral-code/redeem', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ code: trimmed }),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Failed to redeem code')
|
||||
}
|
||||
|
||||
if (data.redeemed) {
|
||||
setSuccess({ bonusAmount: data.bonusAmount })
|
||||
setCode('')
|
||||
onRedeemComplete?.()
|
||||
} else {
|
||||
setError(data.error || 'Code could not be redeemed')
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Referral code redemption failed', { error: err })
|
||||
setError(err instanceof Error ? err.message : 'Failed to redeem code')
|
||||
} finally {
|
||||
setIsRedeeming(false)
|
||||
}
|
||||
}
|
||||
|
||||
if (success) {
|
||||
return (
|
||||
<div className='flex items-center justify-between'>
|
||||
<Label>Referral Code</Label>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>
|
||||
+${success.bonusAmount} credits applied
|
||||
</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex flex-col'>
|
||||
<div className='flex items-center justify-between gap-[12px]'>
|
||||
<Label className='shrink-0'>Referral Code</Label>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Input
|
||||
type='text'
|
||||
value={code}
|
||||
onChange={(e) => {
|
||||
setCode(e.target.value)
|
||||
setError(null)
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter') handleRedeem()
|
||||
}}
|
||||
placeholder='Enter code'
|
||||
className='h-[32px] w-[140px] text-[12px]'
|
||||
disabled={isRedeeming}
|
||||
/>
|
||||
<Button
|
||||
variant='active'
|
||||
className='h-[32px] shrink-0 rounded-[6px] text-[12px]'
|
||||
onClick={handleRedeem}
|
||||
disabled={isRedeeming || !code.trim()}
|
||||
>
|
||||
{isRedeeming ? 'Redeeming...' : 'Redeem'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className='mt-[4px] min-h-[18px] text-right'>
|
||||
{error && <span className='text-[11px] text-[var(--text-error)]'>{error}</span>}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
CancelSubscription,
|
||||
CreditBalance,
|
||||
PlanCard,
|
||||
ReferralCode,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components'
|
||||
import {
|
||||
ENTERPRISE_PLAN_FEATURES,
|
||||
@@ -549,6 +550,10 @@ export function Subscription() {
|
||||
/>
|
||||
)}
|
||||
|
||||
{!subscription.isEnterprise && (
|
||||
<ReferralCode onRedeemComplete={() => refetchSubscription()} />
|
||||
)}
|
||||
|
||||
{/* Next Billing Date - hidden from team members */}
|
||||
{subscription.isPaid &&
|
||||
subscriptionData?.data?.periodEnd &&
|
||||
|
||||
@@ -4,12 +4,14 @@ import { useEffect } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { useReferralAttribution } from '@/hooks/use-referral-attribution'
|
||||
|
||||
const logger = createLogger('WorkspacePage')
|
||||
|
||||
export default function WorkspacePage() {
|
||||
const router = useRouter()
|
||||
const { data: session, isPending } = useSession()
|
||||
useReferralAttribution()
|
||||
|
||||
useEffect(() => {
|
||||
const redirectToFirstWorkspace = async () => {
|
||||
|
||||
@@ -2,11 +2,10 @@ import { createLogger } from '@sim/logger'
|
||||
import { AgentIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import { getApiKeyCondition } from '@/blocks/utils'
|
||||
import { getApiKeyCondition, getModelOptions } from '@/blocks/utils'
|
||||
import {
|
||||
getBaseModelProviders,
|
||||
getMaxTemperature,
|
||||
getProviderIcon,
|
||||
getReasoningEffortValuesForModel,
|
||||
getThinkingLevelsForModel,
|
||||
getVerbosityValuesForModel,
|
||||
@@ -18,7 +17,6 @@ import {
|
||||
providers,
|
||||
supportsTemperature,
|
||||
} from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('AgentBlock')
|
||||
@@ -121,21 +119,7 @@ Return ONLY the JSON array.`,
|
||||
placeholder: 'Type or select a model...',
|
||||
required: true,
|
||||
defaultValue: 'claude-sonnet-4-5',
|
||||
options: () => {
|
||||
const providersState = useProvidersStore.getState()
|
||||
const baseModels = providersState.providers.base.models
|
||||
const ollamaModels = providersState.providers.ollama.models
|
||||
const vllmModels = providersState.providers.vllm.models
|
||||
const openrouterModels = providersState.providers.openrouter.models
|
||||
const allModels = Array.from(
|
||||
new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels])
|
||||
)
|
||||
|
||||
return allModels.map((model) => {
|
||||
const icon = getProviderIcon(model)
|
||||
return { label: model, id: model, ...(icon && { icon }) }
|
||||
})
|
||||
},
|
||||
options: getModelOptions,
|
||||
},
|
||||
{
|
||||
id: 'vertexCredential',
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ChartBarIcon } from '@/components/icons'
|
||||
import type { BlockConfig, ParamType } from '@/blocks/types'
|
||||
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||
import {
|
||||
getModelOptions,
|
||||
getProviderCredentialSubBlocks,
|
||||
PROVIDER_CREDENTIAL_INPUTS,
|
||||
} from '@/blocks/utils'
|
||||
import type { ProviderId } from '@/providers/types'
|
||||
import { getBaseModelProviders, getProviderIcon } from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers/store'
|
||||
import { getBaseModelProviders } from '@/providers/utils'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('EvaluatorBlock')
|
||||
@@ -175,21 +178,7 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
|
||||
placeholder: 'Type or select a model...',
|
||||
required: true,
|
||||
defaultValue: 'claude-sonnet-4-5',
|
||||
options: () => {
|
||||
const providersState = useProvidersStore.getState()
|
||||
const baseModels = providersState.providers.base.models
|
||||
const ollamaModels = providersState.providers.ollama.models
|
||||
const vllmModels = providersState.providers.vllm.models
|
||||
const openrouterModels = providersState.providers.openrouter.models
|
||||
const allModels = Array.from(
|
||||
new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels])
|
||||
)
|
||||
|
||||
return allModels.map((model) => {
|
||||
const icon = getProviderIcon(model)
|
||||
return { label: model, id: model, ...(icon && { icon }) }
|
||||
})
|
||||
},
|
||||
options: getModelOptions,
|
||||
},
|
||||
...getProviderCredentialSubBlocks(),
|
||||
{
|
||||
|
||||
@@ -297,7 +297,6 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
placeholder: 'Enter your Exa API key',
|
||||
password: true,
|
||||
required: true,
|
||||
hideWhenHosted: true,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
|
||||
201
apps/sim/blocks/blocks/google_books.ts
Normal file
201
apps/sim/blocks/blocks/google_books.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
import { GoogleBooksIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
|
||||
export const GoogleBooksBlock: BlockConfig = {
|
||||
type: 'google_books',
|
||||
name: 'Google Books',
|
||||
description: 'Search and retrieve book information',
|
||||
authMode: AuthMode.ApiKey,
|
||||
longDescription:
|
||||
'Search for books using the Google Books API. Find volumes by title, author, ISBN, or keywords, and retrieve detailed information about specific books including descriptions, ratings, and publication details.',
|
||||
docsLink: 'https://docs.sim.ai/tools/google_books',
|
||||
category: 'tools',
|
||||
bgColor: '#E0E0E0',
|
||||
icon: GoogleBooksIcon,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Search Volumes', id: 'volume_search' },
|
||||
{ label: 'Get Volume Details', id: 'volume_details' },
|
||||
],
|
||||
value: () => 'volume_search',
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'Enter your Google Books API key',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
title: 'Search Query',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., intitle:harry potter inauthor:rowling',
|
||||
condition: { field: 'operation', value: 'volume_search' },
|
||||
required: { field: 'operation', value: 'volume_search' },
|
||||
},
|
||||
{
|
||||
id: 'filter',
|
||||
title: 'Filter',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'None', id: '' },
|
||||
{ label: 'Partial Preview', id: 'partial' },
|
||||
{ label: 'Full Preview', id: 'full' },
|
||||
{ label: 'Free eBooks', id: 'free-ebooks' },
|
||||
{ label: 'Paid eBooks', id: 'paid-ebooks' },
|
||||
{ label: 'All eBooks', id: 'ebooks' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'volume_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'printType',
|
||||
title: 'Print Type',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: 'all' },
|
||||
{ label: 'Books', id: 'books' },
|
||||
{ label: 'Magazines', id: 'magazines' },
|
||||
],
|
||||
value: () => 'all',
|
||||
condition: { field: 'operation', value: 'volume_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'orderBy',
|
||||
title: 'Order By',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Relevance', id: 'relevance' },
|
||||
{ label: 'Newest', id: 'newest' },
|
||||
],
|
||||
value: () => 'relevance',
|
||||
condition: { field: 'operation', value: 'volume_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'maxResults',
|
||||
title: 'Max Results',
|
||||
type: 'short-input',
|
||||
placeholder: 'Number of results (1-40)',
|
||||
condition: { field: 'operation', value: 'volume_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'startIndex',
|
||||
title: 'Start Index',
|
||||
type: 'short-input',
|
||||
placeholder: 'Starting index for pagination',
|
||||
condition: { field: 'operation', value: 'volume_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'langRestrict',
|
||||
title: 'Language',
|
||||
type: 'short-input',
|
||||
placeholder: 'ISO 639-1 code (e.g., en, es, fr)',
|
||||
condition: { field: 'operation', value: 'volume_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'volumeId',
|
||||
title: 'Volume ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Google Books volume ID',
|
||||
condition: { field: 'operation', value: 'volume_details' },
|
||||
required: { field: 'operation', value: 'volume_details' },
|
||||
},
|
||||
{
|
||||
id: 'projection',
|
||||
title: 'Projection',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Full', id: 'full' },
|
||||
{ label: 'Lite', id: 'lite' },
|
||||
],
|
||||
value: () => 'full',
|
||||
condition: { field: 'operation', value: 'volume_details' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
],
|
||||
|
||||
tools: {
|
||||
access: ['google_books_volume_search', 'google_books_volume_details'],
|
||||
config: {
|
||||
tool: (params) => `google_books_${params.operation}`,
|
||||
params: (params) => {
|
||||
const { operation, ...rest } = params
|
||||
|
||||
let maxResults: number | undefined
|
||||
if (params.maxResults) {
|
||||
maxResults = Number.parseInt(params.maxResults, 10)
|
||||
if (Number.isNaN(maxResults)) {
|
||||
maxResults = undefined
|
||||
}
|
||||
}
|
||||
|
||||
let startIndex: number | undefined
|
||||
if (params.startIndex) {
|
||||
startIndex = Number.parseInt(params.startIndex, 10)
|
||||
if (Number.isNaN(startIndex)) {
|
||||
startIndex = undefined
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
...rest,
|
||||
maxResults,
|
||||
startIndex,
|
||||
filter: params.filter || undefined,
|
||||
printType: params.printType || undefined,
|
||||
orderBy: params.orderBy || undefined,
|
||||
projection: params.projection || undefined,
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
apiKey: { type: 'string', description: 'Google Books API key' },
|
||||
query: { type: 'string', description: 'Search query' },
|
||||
filter: { type: 'string', description: 'Filter by availability' },
|
||||
printType: { type: 'string', description: 'Print type filter' },
|
||||
orderBy: { type: 'string', description: 'Sort order' },
|
||||
maxResults: { type: 'string', description: 'Maximum number of results' },
|
||||
startIndex: { type: 'string', description: 'Starting index for pagination' },
|
||||
langRestrict: { type: 'string', description: 'Language restriction' },
|
||||
volumeId: { type: 'string', description: 'Volume ID for details' },
|
||||
projection: { type: 'string', description: 'Projection level' },
|
||||
},
|
||||
|
||||
outputs: {
|
||||
totalItems: { type: 'number', description: 'Total number of matching results' },
|
||||
volumes: { type: 'json', description: 'List of matching volumes' },
|
||||
id: { type: 'string', description: 'Volume ID' },
|
||||
title: { type: 'string', description: 'Book title' },
|
||||
subtitle: { type: 'string', description: 'Book subtitle' },
|
||||
authors: { type: 'json', description: 'List of authors' },
|
||||
publisher: { type: 'string', description: 'Publisher name' },
|
||||
publishedDate: { type: 'string', description: 'Publication date' },
|
||||
description: { type: 'string', description: 'Book description' },
|
||||
pageCount: { type: 'number', description: 'Number of pages' },
|
||||
categories: { type: 'json', description: 'Book categories' },
|
||||
averageRating: { type: 'number', description: 'Average rating (1-5)' },
|
||||
ratingsCount: { type: 'number', description: 'Number of ratings' },
|
||||
language: { type: 'string', description: 'Language code' },
|
||||
previewLink: { type: 'string', description: 'Link to preview on Google Books' },
|
||||
infoLink: { type: 'string', description: 'Link to info page' },
|
||||
thumbnailUrl: { type: 'string', description: 'Book cover thumbnail URL' },
|
||||
isbn10: { type: 'string', description: 'ISBN-10 identifier' },
|
||||
isbn13: { type: 'string', description: 'ISBN-13 identifier' },
|
||||
},
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
import { ShieldCheckIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||
import { getProviderIcon } from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers/store'
|
||||
import {
|
||||
getModelOptions,
|
||||
getProviderCredentialSubBlocks,
|
||||
PROVIDER_CREDENTIAL_INPUTS,
|
||||
} from '@/blocks/utils'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
export interface GuardrailsResponse extends ToolResponse {
|
||||
@@ -111,21 +113,7 @@ Return ONLY the regex pattern - no explanations, no quotes, no forward slashes,
|
||||
type: 'combobox',
|
||||
placeholder: 'Type or select a model...',
|
||||
required: true,
|
||||
options: () => {
|
||||
const providersState = useProvidersStore.getState()
|
||||
const baseModels = providersState.providers.base.models
|
||||
const ollamaModels = providersState.providers.ollama.models
|
||||
const vllmModels = providersState.providers.vllm.models
|
||||
const openrouterModels = providersState.providers.openrouter.models
|
||||
const allModels = Array.from(
|
||||
new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels])
|
||||
)
|
||||
|
||||
return allModels.map((model) => {
|
||||
const icon = getProviderIcon(model)
|
||||
return { label: model, id: model, ...(icon && { icon }) }
|
||||
})
|
||||
},
|
||||
options: getModelOptions,
|
||||
condition: {
|
||||
field: 'validationType',
|
||||
value: ['hallucination'],
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import { ConnectIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig } from '@/blocks/types'
|
||||
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||
import {
|
||||
getModelOptions,
|
||||
getProviderCredentialSubBlocks,
|
||||
PROVIDER_CREDENTIAL_INPUTS,
|
||||
} from '@/blocks/utils'
|
||||
import type { ProviderId } from '@/providers/types'
|
||||
import { getBaseModelProviders, getProviderIcon } from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers'
|
||||
import { getBaseModelProviders } from '@/providers/utils'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
interface RouterResponse extends ToolResponse {
|
||||
@@ -134,25 +137,6 @@ Respond with a JSON object containing:
|
||||
- reasoning: A brief explanation (1-2 sentences) of why you chose this route`
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to get model options for both router versions.
|
||||
*/
|
||||
const getModelOptions = () => {
|
||||
const providersState = useProvidersStore.getState()
|
||||
const baseModels = providersState.providers.base.models
|
||||
const ollamaModels = providersState.providers.ollama.models
|
||||
const vllmModels = providersState.providers.vllm.models
|
||||
const openrouterModels = providersState.providers.openrouter.models
|
||||
const allModels = Array.from(
|
||||
new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels])
|
||||
)
|
||||
|
||||
return allModels.map((model) => {
|
||||
const icon = getProviderIcon(model)
|
||||
return { label: model, id: model, ...(icon && { icon }) }
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Legacy Router Block (block-based routing).
|
||||
* Hidden from toolbar but still supported for existing workflows.
|
||||
|
||||
@@ -122,6 +122,25 @@ export const ScheduleBlock: BlockConfig = {
|
||||
required: true,
|
||||
mode: 'trigger',
|
||||
condition: { field: 'scheduleType', value: 'custom' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `You are an expert at writing cron expressions. Generate a valid cron expression based on the user's description.
|
||||
|
||||
Cron format: minute hour day-of-month month day-of-week
|
||||
- minute: 0-59
|
||||
- hour: 0-23
|
||||
- day-of-month: 1-31
|
||||
- month: 1-12
|
||||
- day-of-week: 0-7 (0 and 7 are Sunday)
|
||||
|
||||
Special characters: * (any), , (list), - (range), / (step)
|
||||
|
||||
{context}
|
||||
|
||||
Return ONLY the cron expression, nothing else. No explanation, no backticks, no quotes.`,
|
||||
placeholder: 'Describe your schedule (e.g., "every weekday at 9am")',
|
||||
generationType: 'cron-expression',
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
|
||||
@@ -604,7 +604,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
case 'send': {
|
||||
baseParams.text = text
|
||||
if (threadTs) {
|
||||
baseParams.thread_ts = threadTs
|
||||
baseParams.threadTs = threadTs
|
||||
}
|
||||
// files is the canonical param from attachmentFiles (basic) or files (advanced)
|
||||
const normalizedFiles = normalizeFileInput(files)
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { TranslateIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig } from '@/blocks/types'
|
||||
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||
import { getProviderIcon } from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers/store'
|
||||
import {
|
||||
getModelOptions,
|
||||
getProviderCredentialSubBlocks,
|
||||
PROVIDER_CREDENTIAL_INPUTS,
|
||||
} from '@/blocks/utils'
|
||||
|
||||
const getTranslationPrompt = (targetLanguage: string) =>
|
||||
`Translate the following text into ${targetLanguage || 'English'}. Output ONLY the translated text with no additional commentary, explanations, or notes.`
|
||||
@@ -38,18 +40,7 @@ export const TranslateBlock: BlockConfig = {
|
||||
type: 'combobox',
|
||||
placeholder: 'Type or select a model...',
|
||||
required: true,
|
||||
options: () => {
|
||||
const providersState = useProvidersStore.getState()
|
||||
const baseModels = providersState.providers.base.models
|
||||
const ollamaModels = providersState.providers.ollama.models
|
||||
const openrouterModels = providersState.providers.openrouter.models
|
||||
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
|
||||
|
||||
return allModels.map((model) => {
|
||||
const icon = getProviderIcon(model)
|
||||
return { label: model, id: model, ...(icon && { icon }) }
|
||||
})
|
||||
},
|
||||
options: getModelOptions,
|
||||
},
|
||||
...getProviderCredentialSubBlocks(),
|
||||
{
|
||||
|
||||
@@ -39,6 +39,7 @@ import { GitHubBlock, GitHubV2Block } from '@/blocks/blocks/github'
|
||||
import { GitLabBlock } from '@/blocks/blocks/gitlab'
|
||||
import { GmailBlock, GmailV2Block } from '@/blocks/blocks/gmail'
|
||||
import { GoogleSearchBlock } from '@/blocks/blocks/google'
|
||||
import { GoogleBooksBlock } from '@/blocks/blocks/google_books'
|
||||
import { GoogleCalendarBlock, GoogleCalendarV2Block } from '@/blocks/blocks/google_calendar'
|
||||
import { GoogleDocsBlock } from '@/blocks/blocks/google_docs'
|
||||
import { GoogleDriveBlock } from '@/blocks/blocks/google_drive'
|
||||
@@ -214,6 +215,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
gmail_v2: GmailV2Block,
|
||||
google_calendar: GoogleCalendarBlock,
|
||||
google_calendar_v2: GoogleCalendarV2Block,
|
||||
google_books: GoogleBooksBlock,
|
||||
google_docs: GoogleDocsBlock,
|
||||
google_drive: GoogleDriveBlock,
|
||||
google_forms: GoogleFormsBlock,
|
||||
|
||||
@@ -40,6 +40,7 @@ export type GenerationType =
|
||||
| 'neo4j-parameters'
|
||||
| 'timestamp'
|
||||
| 'timezone'
|
||||
| 'cron-expression'
|
||||
|
||||
export type SubBlockType =
|
||||
| 'short-input' // Single line input
|
||||
@@ -196,6 +197,8 @@ export interface SubBlockConfig {
|
||||
type: SubBlockType
|
||||
mode?: 'basic' | 'advanced' | 'both' | 'trigger' // Default is 'both' if not specified. 'trigger' means only shown in trigger mode
|
||||
canonicalParamId?: string
|
||||
/** Controls parameter visibility in agent/tool-input context */
|
||||
paramVisibility?: 'user-or-llm' | 'user-only' | 'llm-only' | 'hidden'
|
||||
required?:
|
||||
| boolean
|
||||
| {
|
||||
@@ -243,7 +246,6 @@ export interface SubBlockConfig {
|
||||
hidden?: boolean
|
||||
hideFromPreview?: boolean // Hide this subblock from the workflow block preview
|
||||
requiresFeature?: string // Environment variable name that must be truthy for this subblock to be visible
|
||||
hideWhenHosted?: boolean // Hide this subblock when running on hosted sim
|
||||
description?: string
|
||||
tooltip?: string // Tooltip text displayed via info icon next to the title
|
||||
value?: (params: Record<string, any>) => string
|
||||
|
||||
@@ -1,8 +1,32 @@
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import type { BlockOutput, OutputFieldDefinition, SubBlockConfig } from '@/blocks/types'
|
||||
import { getHostedModels, getProviderFromModel, providers } from '@/providers/utils'
|
||||
import {
|
||||
getHostedModels,
|
||||
getProviderFromModel,
|
||||
getProviderIcon,
|
||||
providers,
|
||||
} from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers/store'
|
||||
|
||||
/**
|
||||
* Returns model options for combobox subblocks, combining all provider sources.
|
||||
*/
|
||||
export function getModelOptions() {
|
||||
const providersState = useProvidersStore.getState()
|
||||
const baseModels = providersState.providers.base.models
|
||||
const ollamaModels = providersState.providers.ollama.models
|
||||
const vllmModels = providersState.providers.vllm.models
|
||||
const openrouterModels = providersState.providers.openrouter.models
|
||||
const allModels = Array.from(
|
||||
new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels])
|
||||
)
|
||||
|
||||
return allModels.map((model) => {
|
||||
const icon = getProviderIcon(model)
|
||||
return { label: model, id: model, ...(icon && { icon }) }
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a field is included in the dependsOn config.
|
||||
* Handles both simple array format and object format with all/any fields.
|
||||
|
||||
@@ -1157,6 +1157,21 @@ export function AirweaveIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function GoogleBooksIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 478.633 540.068'>
|
||||
<path
|
||||
fill='#1C51A4'
|
||||
d='M449.059,218.231L245.519,99.538l-0.061,193.23c0.031,1.504-0.368,2.977-1.166,4.204c-0.798,1.258-1.565,1.995-2.915,2.547c-1.35,0.552-2.792,0.706-4.204,0.399c-1.412-0.307-2.7-1.043-3.713-2.117l-69.166-70.609l-69.381,70.179c-1.013,0.982-2.301,1.657-3.652,1.903c-1.381,0.246-2.792,0.092-4.081-0.491c-1.289-0.583-1.626-0.522-2.394-1.749c-0.767-1.197-1.197-2.608-1.197-4.081L85.031,6.007l-2.915-1.289C43.973-11.638,0,16.409,0,59.891v420.306c0,46.029,49.312,74.782,88.775,51.767l360.285-210.138C488.491,298.782,488.491,241.246,449.059,218.231z'
|
||||
/>
|
||||
<path
|
||||
fill='#80D7FB'
|
||||
d='M88.805,8.124c-2.179-1.289-4.419-2.363-6.659-3.345l0.123,288.663c0,1.442,0.43,2.854,1.197,4.081c0.767,1.197,1.872,2.148,3.161,2.731c1.289,0.583,2.7,0.736,4.081,0.491c1.381-0.246,2.639-0.921,3.652-1.903l69.749-69.688l69.811,69.749c1.013,1.074,2.301,1.81,3.713,2.117c1.412,0.307,2.884,0.153,4.204-0.399c1.319-0.552,2.455-1.565,3.253-2.792c0.798-1.258,1.197-2.731,1.166-4.204V99.998L88.805,8.124z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function GoogleDocsIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
|
||||
@@ -428,7 +428,7 @@ export class BlockExecutor {
|
||||
block: SerializedBlock,
|
||||
executionOrder: number
|
||||
): void {
|
||||
const blockId = node.id
|
||||
const blockId = node.metadata?.originalBlockId ?? node.id
|
||||
const blockName = block.metadata?.name ?? blockId
|
||||
const blockType = block.metadata?.id ?? DEFAULTS.BLOCK_TYPE
|
||||
|
||||
@@ -456,7 +456,7 @@ export class BlockExecutor {
|
||||
executionOrder: number,
|
||||
endedAt: string
|
||||
): void {
|
||||
const blockId = node.id
|
||||
const blockId = node.metadata?.originalBlockId ?? node.id
|
||||
const blockName = block.metadata?.name ?? blockId
|
||||
const blockType = block.metadata?.id ?? DEFAULTS.BLOCK_TYPE
|
||||
|
||||
|
||||
@@ -62,9 +62,12 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
await validateModelProvider(ctx.userId, model, ctx)
|
||||
|
||||
const providerId = getProviderFromModel(model)
|
||||
const formattedTools = await this.formatTools(ctx, filteredInputs.tools || [])
|
||||
const formattedTools = await this.formatTools(
|
||||
ctx,
|
||||
filteredInputs.tools || [],
|
||||
block.canonicalModes
|
||||
)
|
||||
|
||||
// Resolve skill metadata for progressive disclosure
|
||||
const skillInputs = filteredInputs.skills ?? []
|
||||
let skillMetadata: Array<{ name: string; description: string }> = []
|
||||
if (skillInputs.length > 0 && ctx.workspaceId) {
|
||||
@@ -221,7 +224,11 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
})
|
||||
}
|
||||
|
||||
private async formatTools(ctx: ExecutionContext, inputTools: ToolInput[]): Promise<any[]> {
|
||||
private async formatTools(
|
||||
ctx: ExecutionContext,
|
||||
inputTools: ToolInput[],
|
||||
canonicalModes?: Record<string, 'basic' | 'advanced'>
|
||||
): Promise<any[]> {
|
||||
if (!Array.isArray(inputTools)) return []
|
||||
|
||||
const filtered = inputTools.filter((tool) => {
|
||||
@@ -249,7 +256,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
if (tool.type === 'custom-tool' && (tool.schema || tool.customToolId)) {
|
||||
return await this.createCustomTool(ctx, tool)
|
||||
}
|
||||
return this.transformBlockTool(ctx, tool)
|
||||
return this.transformBlockTool(ctx, tool, canonicalModes)
|
||||
} catch (error) {
|
||||
logger.error(`[AgentHandler] Error creating tool:`, { tool, error })
|
||||
return null
|
||||
@@ -720,12 +727,17 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
}
|
||||
}
|
||||
|
||||
private async transformBlockTool(ctx: ExecutionContext, tool: ToolInput) {
|
||||
private async transformBlockTool(
|
||||
ctx: ExecutionContext,
|
||||
tool: ToolInput,
|
||||
canonicalModes?: Record<string, 'basic' | 'advanced'>
|
||||
) {
|
||||
const transformedTool = await transformBlockTool(tool, {
|
||||
selectedOperation: tool.operation,
|
||||
getAllBlocks,
|
||||
getToolAsync: (toolId: string) => getToolAsync(toolId, ctx.workflowId),
|
||||
getTool,
|
||||
canonicalModes,
|
||||
})
|
||||
|
||||
if (transformedTool) {
|
||||
|
||||
@@ -148,4 +148,218 @@ describe('GenericBlockHandler', () => {
|
||||
)
|
||||
})
|
||||
|
||||
describe('Knowledge block cost tracking', () => {
|
||||
beforeEach(() => {
|
||||
// Set up knowledge block mock
|
||||
mockBlock = {
|
||||
...mockBlock,
|
||||
config: { tool: 'knowledge_search', params: {} },
|
||||
}
|
||||
|
||||
mockTool = {
|
||||
...mockTool,
|
||||
id: 'knowledge_search',
|
||||
name: 'Knowledge Search',
|
||||
}
|
||||
|
||||
mockGetTool.mockImplementation((toolId) => {
|
||||
if (toolId === 'knowledge_search') {
|
||||
return mockTool
|
||||
}
|
||||
return undefined
|
||||
})
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should extract and restructure cost information from knowledge tools',
|
||||
async () => {
|
||||
const inputs = { query: 'test query' }
|
||||
const mockToolResponse = {
|
||||
success: true,
|
||||
output: {
|
||||
results: [],
|
||||
query: 'test query',
|
||||
totalResults: 0,
|
||||
cost: {
|
||||
input: 0.00001042,
|
||||
output: 0,
|
||||
total: 0.00001042,
|
||||
tokens: {
|
||||
input: 521,
|
||||
output: 0,
|
||||
total: 521,
|
||||
},
|
||||
model: 'text-embedding-3-small',
|
||||
pricing: {
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
mockExecuteTool.mockResolvedValue(mockToolResponse)
|
||||
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
// Verify cost information is restructured correctly for enhanced logging
|
||||
expect(result).toEqual({
|
||||
results: [],
|
||||
query: 'test query',
|
||||
totalResults: 0,
|
||||
cost: {
|
||||
input: 0.00001042,
|
||||
output: 0,
|
||||
total: 0.00001042,
|
||||
},
|
||||
tokens: {
|
||||
input: 521,
|
||||
output: 0,
|
||||
total: 521,
|
||||
},
|
||||
model: 'text-embedding-3-small',
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should handle knowledge_upload_chunk cost information', async () => {
|
||||
// Update to upload_chunk tool
|
||||
mockBlock.config.tool = 'knowledge_upload_chunk'
|
||||
mockTool.id = 'knowledge_upload_chunk'
|
||||
mockTool.name = 'Knowledge Upload Chunk'
|
||||
|
||||
mockGetTool.mockImplementation((toolId) => {
|
||||
if (toolId === 'knowledge_upload_chunk') {
|
||||
return mockTool
|
||||
}
|
||||
return undefined
|
||||
})
|
||||
|
||||
const inputs = { content: 'test content' }
|
||||
const mockToolResponse = {
|
||||
success: true,
|
||||
output: {
|
||||
data: {
|
||||
id: 'chunk-123',
|
||||
content: 'test content',
|
||||
chunkIndex: 0,
|
||||
},
|
||||
message: 'Successfully uploaded chunk',
|
||||
documentId: 'doc-123',
|
||||
cost: {
|
||||
input: 0.00000521,
|
||||
output: 0,
|
||||
total: 0.00000521,
|
||||
tokens: {
|
||||
input: 260,
|
||||
output: 0,
|
||||
total: 260,
|
||||
},
|
||||
model: 'text-embedding-3-small',
|
||||
pricing: {
|
||||
input: 0.02,
|
||||
output: 0,
|
||||
updatedAt: '2025-07-10',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
mockExecuteTool.mockResolvedValue(mockToolResponse)
|
||||
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
// Verify cost information is restructured correctly
|
||||
expect(result).toEqual({
|
||||
data: {
|
||||
id: 'chunk-123',
|
||||
content: 'test content',
|
||||
chunkIndex: 0,
|
||||
},
|
||||
message: 'Successfully uploaded chunk',
|
||||
documentId: 'doc-123',
|
||||
cost: {
|
||||
input: 0.00000521,
|
||||
output: 0,
|
||||
total: 0.00000521,
|
||||
},
|
||||
tokens: {
|
||||
input: 260,
|
||||
output: 0,
|
||||
total: 260,
|
||||
},
|
||||
model: 'text-embedding-3-small',
|
||||
})
|
||||
})
|
||||
|
||||
it('should pass through output unchanged for knowledge tools without cost info', async () => {
|
||||
const inputs = { query: 'test query' }
|
||||
const mockToolResponse = {
|
||||
success: true,
|
||||
output: {
|
||||
results: [],
|
||||
query: 'test query',
|
||||
totalResults: 0,
|
||||
// No cost information
|
||||
},
|
||||
}
|
||||
|
||||
mockExecuteTool.mockResolvedValue(mockToolResponse)
|
||||
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
// Should return original output without cost transformation
|
||||
expect(result).toEqual({
|
||||
results: [],
|
||||
query: 'test query',
|
||||
totalResults: 0,
|
||||
})
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should process cost info for all tools (universal cost extraction)',
|
||||
async () => {
|
||||
mockBlock.config.tool = 'some_other_tool'
|
||||
mockTool.id = 'some_other_tool'
|
||||
|
||||
mockGetTool.mockImplementation((toolId) => {
|
||||
if (toolId === 'some_other_tool') {
|
||||
return mockTool
|
||||
}
|
||||
return undefined
|
||||
})
|
||||
|
||||
const inputs = { param: 'value' }
|
||||
const mockToolResponse = {
|
||||
success: true,
|
||||
output: {
|
||||
result: 'success',
|
||||
cost: {
|
||||
input: 0.001,
|
||||
output: 0.002,
|
||||
total: 0.003,
|
||||
tokens: { input: 100, output: 50, total: 150 },
|
||||
model: 'some-model',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
mockExecuteTool.mockResolvedValue(mockToolResponse)
|
||||
|
||||
const result = await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect(result).toEqual({
|
||||
result: 'success',
|
||||
cost: {
|
||||
input: 0.001,
|
||||
output: 0.002,
|
||||
total: 0.003,
|
||||
},
|
||||
tokens: { input: 100, output: 50, total: 150 },
|
||||
model: 'some-model',
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -97,7 +97,27 @@ export class GenericBlockHandler implements BlockHandler {
|
||||
throw error
|
||||
}
|
||||
|
||||
return result.output
|
||||
const output = result.output
|
||||
let cost = null
|
||||
|
||||
if (output?.cost) {
|
||||
cost = output.cost
|
||||
}
|
||||
|
||||
if (cost) {
|
||||
return {
|
||||
...output,
|
||||
cost: {
|
||||
input: cost.input,
|
||||
output: cost.output,
|
||||
total: cost.total,
|
||||
},
|
||||
tokens: cost.tokens,
|
||||
model: cost.model,
|
||||
}
|
||||
}
|
||||
|
||||
return output
|
||||
} catch (error: any) {
|
||||
if (!error.message || error.message === 'undefined (undefined)') {
|
||||
let errorMessage = `Block execution of ${tool?.name || block.config.tool} failed`
|
||||
|
||||
@@ -2,7 +2,7 @@ import { db } from '@sim/db'
|
||||
import { account } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { generateRouterPrompt, generateRouterV2Prompt } from '@/blocks/blocks/router'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
@@ -79,7 +79,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
const providerId = getProviderFromModel(routerConfig.model)
|
||||
|
||||
try {
|
||||
const url = new URL('/api/providers', getBaseUrl())
|
||||
const url = new URL('/api/providers', getInternalApiBaseUrl())
|
||||
if (ctx.userId) url.searchParams.set('userId', ctx.userId)
|
||||
|
||||
const messages = [{ role: 'user', content: routerConfig.prompt }]
|
||||
@@ -209,7 +209,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
const providerId = getProviderFromModel(routerConfig.model)
|
||||
|
||||
try {
|
||||
const url = new URL('/api/providers', getBaseUrl())
|
||||
const url = new URL('/api/providers', getInternalApiBaseUrl())
|
||||
if (ctx.userId) url.searchParams.set('userId', ctx.userId)
|
||||
|
||||
const messages = [{ role: 'user', content: routerConfig.context }]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getBaseUrl, getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { HTTP } from '@/executor/constants'
|
||||
|
||||
export async function buildAuthHeaders(): Promise<Record<string, string>> {
|
||||
@@ -16,7 +16,8 @@ export async function buildAuthHeaders(): Promise<Record<string, string>> {
|
||||
}
|
||||
|
||||
export function buildAPIUrl(path: string, params?: Record<string, string>): URL {
|
||||
const url = new URL(path, getBaseUrl())
|
||||
const baseUrl = path.startsWith('/api/') ? getInternalApiBaseUrl() : getBaseUrl()
|
||||
const url = new URL(path, baseUrl)
|
||||
|
||||
if (params) {
|
||||
for (const [key, value] of Object.entries(params)) {
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { API_ENDPOINTS } from '@/stores/constants'
|
||||
import type { BYOKProviderId } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('BYOKKeysQueries')
|
||||
|
||||
export type BYOKProviderId = 'openai' | 'anthropic' | 'google' | 'mistral'
|
||||
|
||||
export interface BYOKKey {
|
||||
id: string
|
||||
providerId: BYOKProviderId
|
||||
|
||||
@@ -642,6 +642,10 @@ export function useDeployChildWorkflow() {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowKeys.deploymentStatus(variables.workflowId),
|
||||
})
|
||||
// Invalidate workflow state so tool input mappings refresh
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workflowKeys.state(variables.workflowId),
|
||||
})
|
||||
// Also invalidate deployment queries
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: deploymentKeys.info(variables.workflowId),
|
||||
|
||||
46
apps/sim/hooks/use-referral-attribution.ts
Normal file
46
apps/sim/hooks/use-referral-attribution.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
|
||||
const logger = createLogger('ReferralAttribution')
|
||||
|
||||
const COOKIE_NAME = 'sim_utm'
|
||||
|
||||
const TERMINAL_REASONS = new Set([
|
||||
'invalid_cookie',
|
||||
'no_utm_cookie',
|
||||
'no_matching_campaign',
|
||||
'already_attributed',
|
||||
])
|
||||
|
||||
/**
|
||||
* Fires a one-shot `POST /api/attribution` when a `sim_utm` cookie is present.
|
||||
* Retries on transient failures; stops on terminal outcomes.
|
||||
*/
|
||||
export function useReferralAttribution() {
|
||||
const calledRef = useRef(false)
|
||||
|
||||
useEffect(() => {
|
||||
if (calledRef.current) return
|
||||
if (!document.cookie.includes(COOKIE_NAME)) return
|
||||
|
||||
calledRef.current = true
|
||||
|
||||
fetch('/api/attribution', { method: 'POST' })
|
||||
.then((res) => res.json())
|
||||
.then((data) => {
|
||||
if (data.attributed) {
|
||||
logger.info('Referral attribution successful', { bonusAmount: data.bonusAmount })
|
||||
} else if (data.error || TERMINAL_REASONS.has(data.reason)) {
|
||||
logger.info('Referral attribution skipped', { reason: data.reason || data.error })
|
||||
} else {
|
||||
calledRef.current = false
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.warn('Referral attribution failed, will retry', { error: err })
|
||||
calledRef.current = false
|
||||
})
|
||||
}, [])
|
||||
}
|
||||
@@ -7,10 +7,11 @@ import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { getHostedModels } from '@/providers/models'
|
||||
import { useProvidersStore } from '@/stores/providers/store'
|
||||
import type { BYOKProviderId } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('BYOKKeys')
|
||||
|
||||
export type BYOKProviderId = 'openai' | 'anthropic' | 'google' | 'mistral'
|
||||
|
||||
export interface BYOKKeyResult {
|
||||
apiKey: string
|
||||
isBYOK: true
|
||||
|
||||
@@ -25,9 +25,9 @@ export interface ModelUsageMetadata {
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata for 'fixed' category charges (e.g., tool cost breakdown)
|
||||
* Metadata for 'fixed' category charges (currently empty, extensible)
|
||||
*/
|
||||
export type FixedUsageMetadata = Record<string, unknown>
|
||||
export type FixedUsageMetadata = Record<string, never>
|
||||
|
||||
/**
|
||||
* Union type for all metadata types
|
||||
@@ -60,8 +60,6 @@ export interface LogFixedUsageParams {
|
||||
workspaceId?: string
|
||||
workflowId?: string
|
||||
executionId?: string
|
||||
/** Optional metadata (e.g., tool cost breakdown from API) */
|
||||
metadata?: FixedUsageMetadata
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -121,7 +119,7 @@ export async function logFixedUsage(params: LogFixedUsageParams): Promise<void>
|
||||
category: 'fixed',
|
||||
source: params.source,
|
||||
description: params.description,
|
||||
metadata: params.metadata ?? null,
|
||||
metadata: null,
|
||||
cost: params.cost.toString(),
|
||||
workspaceId: params.workspaceId ?? null,
|
||||
workflowId: params.workflowId ?? null,
|
||||
|
||||
64
apps/sim/lib/billing/credits/bonus.ts
Normal file
64
apps/sim/lib/billing/credits/bonus.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { db } from '@sim/db'
|
||||
import { organization, userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import type { DbOrTx } from '@/lib/db/types'
|
||||
|
||||
const logger = createLogger('BonusCredits')
|
||||
|
||||
/**
|
||||
* Apply bonus credits to a user (e.g. referral bonuses, promotional codes).
|
||||
*
|
||||
* Detects the user's current plan and routes credits accordingly:
|
||||
* - Free/Pro: adds to `userStats.creditBalance` and increments `currentUsageLimit`
|
||||
* - Team/Enterprise: adds to `organization.creditBalance` and increments `orgUsageLimit`
|
||||
*
|
||||
* Uses direct increment (not recalculation) so it works correctly for free-tier
|
||||
* users where `setUsageLimitForCredits` would compute planBase=0 and skip the update.
|
||||
*
|
||||
* @param tx - Optional Drizzle transaction context. When provided, all DB writes
|
||||
* participate in the caller's transaction for atomicity.
|
||||
*/
|
||||
export async function applyBonusCredits(
|
||||
userId: string,
|
||||
amount: number,
|
||||
tx?: DbOrTx
|
||||
): Promise<void> {
|
||||
const dbCtx = tx ?? db
|
||||
const subscription = await getHighestPrioritySubscription(userId)
|
||||
const isTeamOrEnterprise = subscription?.plan === 'team' || subscription?.plan === 'enterprise'
|
||||
|
||||
if (isTeamOrEnterprise && subscription?.referenceId) {
|
||||
const orgId = subscription.referenceId
|
||||
|
||||
await dbCtx
|
||||
.update(organization)
|
||||
.set({
|
||||
creditBalance: sql`${organization.creditBalance} + ${amount}`,
|
||||
orgUsageLimit: sql`COALESCE(${organization.orgUsageLimit}, '0')::decimal + ${amount}`,
|
||||
})
|
||||
.where(eq(organization.id, orgId))
|
||||
|
||||
logger.info('Applied bonus credits to organization', {
|
||||
userId,
|
||||
organizationId: orgId,
|
||||
plan: subscription.plan,
|
||||
amount,
|
||||
})
|
||||
} else {
|
||||
await dbCtx
|
||||
.update(userStats)
|
||||
.set({
|
||||
creditBalance: sql`${userStats.creditBalance} + ${amount}`,
|
||||
currentUsageLimit: sql`COALESCE(${userStats.currentUsageLimit}, '0')::decimal + ${amount}`,
|
||||
})
|
||||
.where(eq(userStats.userId, userId))
|
||||
|
||||
logger.info('Applied bonus credits to user', {
|
||||
userId,
|
||||
plan: subscription?.plan || 'free',
|
||||
amount,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -220,6 +220,7 @@ export const env = createEnv({
|
||||
SOCKET_SERVER_URL: z.string().url().optional(), // WebSocket server URL for real-time features
|
||||
SOCKET_PORT: z.number().optional(), // Port for WebSocket server
|
||||
PORT: z.number().optional(), // Main application port
|
||||
INTERNAL_API_BASE_URL: z.string().optional(), // Optional internal base URL for server-side self-calls; must include protocol if set (e.g., http://sim-app.namespace.svc.cluster.local:3000)
|
||||
ALLOWED_ORIGINS: z.string().optional(), // CORS allowed origins
|
||||
|
||||
// OAuth Integration Credentials - All optional, enables third-party integrations
|
||||
|
||||
@@ -934,31 +934,6 @@ export const PlatformEvents = {
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track hosted key throttled (rate limited)
|
||||
*/
|
||||
hostedKeyThrottled: (attrs: {
|
||||
toolId: string
|
||||
envVarName: string
|
||||
attempt: number
|
||||
maxRetries: number
|
||||
delayMs: number
|
||||
userId?: string
|
||||
workspaceId?: string
|
||||
workflowId?: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.hosted_key.throttled', {
|
||||
'tool.id': attrs.toolId,
|
||||
'hosted_key.env_var': attrs.envVarName,
|
||||
'throttle.attempt': attrs.attempt,
|
||||
'throttle.max_retries': attrs.maxRetries,
|
||||
'throttle.delay_ms': attrs.delayMs,
|
||||
...(attrs.userId && { 'user.id': attrs.userId }),
|
||||
...(attrs.workspaceId && { 'workspace.id': attrs.workspaceId }),
|
||||
...(attrs.workflowId && { 'workflow.id': attrs.workflowId }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track chat deployed (workflow deployed as chat interface)
|
||||
*/
|
||||
|
||||
@@ -1,6 +1,19 @@
|
||||
import { getEnv } from '@/lib/core/config/env'
|
||||
import { isProd } from '@/lib/core/config/feature-flags'
|
||||
|
||||
function hasHttpProtocol(url: string): boolean {
|
||||
return /^https?:\/\//i.test(url)
|
||||
}
|
||||
|
||||
function normalizeBaseUrl(url: string): string {
|
||||
if (hasHttpProtocol(url)) {
|
||||
return url
|
||||
}
|
||||
|
||||
const protocol = isProd ? 'https://' : 'http://'
|
||||
return `${protocol}${url}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the base URL of the application from NEXT_PUBLIC_APP_URL
|
||||
* This ensures webhooks, callbacks, and other integrations always use the correct public URL
|
||||
@@ -8,7 +21,7 @@ import { isProd } from '@/lib/core/config/feature-flags'
|
||||
* @throws Error if NEXT_PUBLIC_APP_URL is not configured
|
||||
*/
|
||||
export function getBaseUrl(): string {
|
||||
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL')
|
||||
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL')?.trim()
|
||||
|
||||
if (!baseUrl) {
|
||||
throw new Error(
|
||||
@@ -16,12 +29,26 @@ export function getBaseUrl(): string {
|
||||
)
|
||||
}
|
||||
|
||||
if (baseUrl.startsWith('http://') || baseUrl.startsWith('https://')) {
|
||||
return baseUrl
|
||||
return normalizeBaseUrl(baseUrl)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the base URL used by server-side internal API calls.
|
||||
* Falls back to NEXT_PUBLIC_APP_URL when INTERNAL_API_BASE_URL is not set.
|
||||
*/
|
||||
export function getInternalApiBaseUrl(): string {
|
||||
const internalBaseUrl = getEnv('INTERNAL_API_BASE_URL')?.trim()
|
||||
if (!internalBaseUrl) {
|
||||
return getBaseUrl()
|
||||
}
|
||||
|
||||
const protocol = isProd ? 'https://' : 'http://'
|
||||
return `${protocol}${baseUrl}`
|
||||
if (!hasHttpProtocol(internalBaseUrl)) {
|
||||
throw new Error(
|
||||
'INTERNAL_API_BASE_URL must include protocol (http:// or https://), e.g. http://sim-app.default.svc.cluster.local:3000'
|
||||
)
|
||||
}
|
||||
|
||||
return internalBaseUrl
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -2,7 +2,7 @@ import { db } from '@sim/db'
|
||||
import { account } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { executeProviderRequest } from '@/providers'
|
||||
import { getProviderFromModel } from '@/providers/utils'
|
||||
@@ -61,7 +61,7 @@ async function queryKnowledgeBase(
|
||||
})
|
||||
|
||||
// Call the knowledge base search API directly
|
||||
const searchUrl = `${getBaseUrl()}/api/knowledge/search`
|
||||
const searchUrl = `${getInternalApiBaseUrl()}/api/knowledge/search`
|
||||
|
||||
const response = await fetch(searchUrl, {
|
||||
method: 'POST',
|
||||
|
||||
@@ -539,8 +539,8 @@ async function executeMistralOCRRequest(
|
||||
const isInternalRoute = url.startsWith('/')
|
||||
|
||||
if (isInternalRoute) {
|
||||
const { getBaseUrl } = await import('@/lib/core/utils/urls')
|
||||
url = `${getBaseUrl()}${url}`
|
||||
const { getInternalApiBaseUrl } = await import('@/lib/core/utils/urls')
|
||||
url = `${getInternalApiBaseUrl()}${url}`
|
||||
}
|
||||
|
||||
let headers =
|
||||
|
||||
@@ -11,7 +11,7 @@ import { and, eq, isNull, or, sql } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { isOrganizationOnTeamOrEnterprisePlan } from '@/lib/billing'
|
||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getOAuthToken, refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import type { GmailAttachment } from '@/tools/gmail/types'
|
||||
import { downloadAttachments, extractAttachmentInfo } from '@/tools/gmail/utils'
|
||||
@@ -691,7 +691,7 @@ async function processEmails(
|
||||
`[${requestId}] Sending ${config.includeRawEmail ? 'simplified + raw' : 'simplified'} email payload for ${email.id}`
|
||||
)
|
||||
|
||||
const webhookUrl = `${getBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||
const webhookUrl = `${getInternalApiBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||
|
||||
const response = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
|
||||
@@ -7,7 +7,7 @@ import type { FetchMessageObject, MailboxLockObject } from 'imapflow'
|
||||
import { ImapFlow } from 'imapflow'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
||||
|
||||
const logger = createLogger('ImapPollingService')
|
||||
@@ -639,7 +639,7 @@ async function processEmails(
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const webhookUrl = `${getBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||
const webhookUrl = `${getInternalApiBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||
|
||||
const response = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
|
||||
@@ -12,7 +12,7 @@ import { htmlToText } from 'html-to-text'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { isOrganizationOnTeamOrEnterprisePlan } from '@/lib/billing'
|
||||
import { pollingIdempotency } from '@/lib/core/idempotency'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getOAuthToken, refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
||||
|
||||
@@ -601,7 +601,7 @@ async function processOutlookEmails(
|
||||
`[${requestId}] Processing email: ${email.subject} from ${email.from?.emailAddress?.address}`
|
||||
)
|
||||
|
||||
const webhookUrl = `${getBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||
const webhookUrl = `${getInternalApiBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||
|
||||
const response = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
||||
|
||||
const logger = createLogger('RssPollingService')
|
||||
@@ -376,7 +376,7 @@ async function processRssItems(
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const webhookUrl = `${getBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||
const webhookUrl = `${getInternalApiBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||
|
||||
const response = await fetch(webhookUrl, {
|
||||
method: 'POST',
|
||||
|
||||
@@ -645,6 +645,18 @@ describe('Workflow Normalization Utilities', () => {
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['signingSecret'])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude synthetic tool-input subBlock IDs', () => {
|
||||
const ids = [
|
||||
'toolConfig',
|
||||
'toolConfig-tool-0-query',
|
||||
'toolConfig-tool-0-url',
|
||||
'toolConfig-tool-1-status',
|
||||
'systemPrompt',
|
||||
]
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['systemPrompt', 'toolConfig'])
|
||||
})
|
||||
})
|
||||
|
||||
describe('normalizeTriggerConfigValues', () => {
|
||||
|
||||
@@ -411,7 +411,14 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters subBlock IDs to exclude system and trigger runtime subBlocks.
|
||||
* Pattern matching synthetic subBlock IDs created by ToolSubBlockRenderer.
|
||||
* These IDs follow the format `{subBlockId}-tool-{index}-{paramId}` and are
|
||||
* mirrors of values already stored in toolConfig.value.tools[N].params.
|
||||
*/
|
||||
const SYNTHETIC_TOOL_SUBBLOCK_RE = /-tool-\d+-/
|
||||
|
||||
/**
|
||||
* Filters subBlock IDs to exclude system, trigger runtime, and synthetic tool subBlocks.
|
||||
*
|
||||
* @param subBlockIds - Array of subBlock IDs to filter
|
||||
* @returns Filtered and sorted array of subBlock IDs
|
||||
@@ -422,6 +429,7 @@ export function filterSubBlockIds(subBlockIds: string[]): string[] {
|
||||
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
|
||||
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
|
||||
return false
|
||||
if (SYNTHETIC_TOOL_SUBBLOCK_RE.test(id)) return false
|
||||
return true
|
||||
})
|
||||
.sort()
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
|
||||
export type CanonicalMode = 'basic' | 'advanced'
|
||||
@@ -271,12 +270,3 @@ export function isSubBlockFeatureEnabled(subBlock: SubBlockConfig): boolean {
|
||||
if (!subBlock.requiresFeature) return true
|
||||
return isTruthy(getEnv(subBlock.requiresFeature))
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a subblock should be hidden because we're running on hosted Sim.
|
||||
* Used for tool API key fields that should be hidden when Sim provides hosted keys.
|
||||
*/
|
||||
export function isSubBlockHiddenByHostedKey(subBlock: SubBlockConfig): boolean {
|
||||
if (!subBlock.hideWhenHosted) return false
|
||||
return isHosted
|
||||
}
|
||||
|
||||
@@ -112,6 +112,8 @@ export interface ProviderToolConfig {
|
||||
required: string[]
|
||||
}
|
||||
usageControl?: ToolUsageControl
|
||||
/** Block-level params transformer — converts SubBlock values to tool-ready params */
|
||||
paramsTransform?: (params: Record<string, any>) => Record<string, any>
|
||||
}
|
||||
|
||||
export interface Message {
|
||||
|
||||
@@ -4,6 +4,12 @@ import type { ChatCompletionChunk } from 'openai/resources/chat/completions'
|
||||
import type { CompletionUsage } from 'openai/resources/completions'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
type CanonicalGroup,
|
||||
getCanonicalValues,
|
||||
isCanonicalPair,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
import { isCustomTool } from '@/executor/constants'
|
||||
import {
|
||||
getComputerUseModels,
|
||||
@@ -437,9 +443,10 @@ export async function transformBlockTool(
|
||||
getAllBlocks: () => any[]
|
||||
getTool: (toolId: string) => any
|
||||
getToolAsync?: (toolId: string) => Promise<any>
|
||||
canonicalModes?: Record<string, 'basic' | 'advanced'>
|
||||
}
|
||||
): Promise<ProviderToolConfig | null> {
|
||||
const { selectedOperation, getAllBlocks, getTool, getToolAsync } = options
|
||||
const { selectedOperation, getAllBlocks, getTool, getToolAsync, canonicalModes } = options
|
||||
|
||||
const blockDef = getAllBlocks().find((b: any) => b.type === block.type)
|
||||
if (!blockDef) {
|
||||
@@ -516,12 +523,66 @@ export async function transformBlockTool(
|
||||
uniqueToolId = `${toolConfig.id}_${userProvidedParams.knowledgeBaseId}`
|
||||
}
|
||||
|
||||
const blockParamsFn = blockDef?.tools?.config?.params as
|
||||
| ((p: Record<string, any>) => Record<string, any>)
|
||||
| undefined
|
||||
const blockInputDefs = blockDef?.inputs as Record<string, any> | undefined
|
||||
|
||||
const canonicalGroups: CanonicalGroup[] = blockDef?.subBlocks
|
||||
? Object.values(buildCanonicalIndex(blockDef.subBlocks).groupsById).filter(isCanonicalPair)
|
||||
: []
|
||||
|
||||
const needsTransform = blockParamsFn || blockInputDefs || canonicalGroups.length > 0
|
||||
const paramsTransform = needsTransform
|
||||
? (params: Record<string, any>): Record<string, any> => {
|
||||
let result = { ...params }
|
||||
|
||||
for (const group of canonicalGroups) {
|
||||
const { basicValue, advancedValue } = getCanonicalValues(group, result)
|
||||
const scopedKey = `${block.type}:${group.canonicalId}`
|
||||
const pairMode = canonicalModes?.[scopedKey] ?? 'basic'
|
||||
const chosen = pairMode === 'advanced' ? advancedValue : basicValue
|
||||
|
||||
const sourceIds = [group.basicId, ...group.advancedIds].filter(Boolean) as string[]
|
||||
sourceIds.forEach((id) => delete result[id])
|
||||
|
||||
if (chosen !== undefined) {
|
||||
result[group.canonicalId] = chosen
|
||||
}
|
||||
}
|
||||
|
||||
if (blockParamsFn) {
|
||||
const transformed = blockParamsFn(result)
|
||||
result = { ...result, ...transformed }
|
||||
}
|
||||
|
||||
if (blockInputDefs) {
|
||||
for (const [key, schema] of Object.entries(blockInputDefs)) {
|
||||
const value = result[key]
|
||||
if (typeof value === 'string' && value.trim().length > 0) {
|
||||
const inputType = typeof schema === 'object' ? schema.type : schema
|
||||
if (inputType === 'json' || inputType === 'array') {
|
||||
try {
|
||||
result[key] = JSON.parse(value.trim())
|
||||
} catch {
|
||||
// Not valid JSON — keep as string
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
: undefined
|
||||
|
||||
return {
|
||||
id: uniqueToolId,
|
||||
name: toolName,
|
||||
description: toolDescription,
|
||||
params: userProvidedParams,
|
||||
parameters: llmSchema,
|
||||
paramsTransform,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1028,7 +1089,11 @@ export function getMaxOutputTokensForModel(model: string): number {
|
||||
* Prepare tool execution parameters, separating tool parameters from system parameters
|
||||
*/
|
||||
export function prepareToolExecution(
|
||||
tool: { params?: Record<string, any>; parameters?: Record<string, any> },
|
||||
tool: {
|
||||
params?: Record<string, any>
|
||||
parameters?: Record<string, any>
|
||||
paramsTransform?: (params: Record<string, any>) => Record<string, any>
|
||||
},
|
||||
llmArgs: Record<string, any>,
|
||||
request: {
|
||||
workflowId?: string
|
||||
@@ -1045,8 +1110,15 @@ export function prepareToolExecution(
|
||||
toolParams: Record<string, any>
|
||||
executionParams: Record<string, any>
|
||||
} {
|
||||
// Use centralized merge logic from tools/params
|
||||
const toolParams = mergeToolParameters(tool.params || {}, llmArgs) as Record<string, any>
|
||||
let toolParams = mergeToolParameters(tool.params || {}, llmArgs) as Record<string, any>
|
||||
|
||||
if (tool.paramsTransform) {
|
||||
try {
|
||||
toolParams = tool.paramsTransform(toolParams)
|
||||
} catch (err) {
|
||||
logger.warn('paramsTransform failed, using raw params', { error: err })
|
||||
}
|
||||
}
|
||||
|
||||
const executionParams = {
|
||||
...toolParams,
|
||||
|
||||
@@ -30,8 +30,8 @@ export const vertexProvider: ProviderConfig = {
|
||||
executeRequest: async (
|
||||
request: ProviderRequest
|
||||
): Promise<ProviderResponse | StreamingExecution> => {
|
||||
const vertexProject = env.VERTEX_PROJECT || request.vertexProject
|
||||
const vertexLocation = env.VERTEX_LOCATION || request.vertexLocation || 'us-central1'
|
||||
const vertexProject = request.vertexProject || env.VERTEX_PROJECT
|
||||
const vertexLocation = request.vertexLocation || env.VERTEX_LOCATION || 'us-central1'
|
||||
|
||||
if (!vertexProject) {
|
||||
throw new Error(
|
||||
|
||||
@@ -137,6 +137,36 @@ function handleSecurityFiltering(request: NextRequest): NextResponse | null {
|
||||
return null
|
||||
}
|
||||
|
||||
const UTM_KEYS = ['utm_source', 'utm_medium', 'utm_campaign', 'utm_content'] as const
|
||||
const UTM_COOKIE_NAME = 'sim_utm'
|
||||
const UTM_COOKIE_MAX_AGE = 3600
|
||||
|
||||
/**
|
||||
* Sets a `sim_utm` cookie when UTM params are present on auth pages.
|
||||
* Captures UTM values, the HTTP Referer, landing page, and a timestamp.
|
||||
*/
|
||||
function setUtmCookie(request: NextRequest, response: NextResponse): void {
|
||||
const { searchParams, pathname } = request.nextUrl
|
||||
const hasUtm = UTM_KEYS.some((key) => searchParams.get(key))
|
||||
if (!hasUtm) return
|
||||
|
||||
const utmData: Record<string, string> = {}
|
||||
for (const key of UTM_KEYS) {
|
||||
const value = searchParams.get(key)
|
||||
if (value) utmData[key] = value
|
||||
}
|
||||
utmData.referrer_url = request.headers.get('referer') || ''
|
||||
utmData.landing_page = pathname
|
||||
utmData.created_at = Date.now().toString()
|
||||
|
||||
response.cookies.set(UTM_COOKIE_NAME, JSON.stringify(utmData), {
|
||||
path: '/',
|
||||
maxAge: UTM_COOKIE_MAX_AGE,
|
||||
sameSite: 'lax',
|
||||
httpOnly: false, // Client-side hook needs to detect cookie presence
|
||||
})
|
||||
}
|
||||
|
||||
export async function proxy(request: NextRequest) {
|
||||
const url = request.nextUrl
|
||||
|
||||
@@ -148,10 +178,13 @@ export async function proxy(request: NextRequest) {
|
||||
|
||||
if (url.pathname === '/login' || url.pathname === '/signup') {
|
||||
if (hasActiveSession) {
|
||||
return NextResponse.redirect(new URL('/workspace', request.url))
|
||||
const redirect = NextResponse.redirect(new URL('/workspace', request.url))
|
||||
setUtmCookie(request, redirect)
|
||||
return redirect
|
||||
}
|
||||
const response = NextResponse.next()
|
||||
response.headers.set('Content-Security-Policy', generateRuntimeCSP())
|
||||
setUtmCookie(request, response)
|
||||
return response
|
||||
}
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ import {
|
||||
isCanonicalPair,
|
||||
isNonEmptyValue,
|
||||
isSubBlockFeatureEnabled,
|
||||
isSubBlockHiddenByHostedKey,
|
||||
resolveCanonicalMode,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
import { getBlock } from '@/blocks'
|
||||
@@ -50,7 +49,6 @@ function shouldSerializeSubBlock(
|
||||
canonicalModeOverrides?: CanonicalModeOverrides
|
||||
): boolean {
|
||||
if (!isSubBlockFeatureEnabled(subBlockConfig)) return false
|
||||
if (isSubBlockHiddenByHostedKey(subBlockConfig)) return false
|
||||
|
||||
if (subBlockConfig.mode === 'trigger') {
|
||||
if (!isTriggerContext && !isTriggerCategory) return false
|
||||
@@ -282,7 +280,7 @@ export class Serializer {
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
const serialized: SerializedBlock = {
|
||||
id: block.id,
|
||||
position: block.position,
|
||||
config: {
|
||||
@@ -302,6 +300,12 @@ export class Serializer {
|
||||
},
|
||||
enabled: block.enabled,
|
||||
}
|
||||
|
||||
if (block.data?.canonicalModes) {
|
||||
serialized.canonicalModes = block.data.canonicalModes as Record<string, 'basic' | 'advanced'>
|
||||
}
|
||||
|
||||
return serialized
|
||||
}
|
||||
|
||||
private extractParams(block: BlockState): Record<string, any> {
|
||||
|
||||
@@ -38,6 +38,8 @@ export interface SerializedBlock {
|
||||
color?: string
|
||||
}
|
||||
enabled: boolean
|
||||
/** Canonical mode overrides from block.data (used by agent handler for tool param resolution) */
|
||||
canonicalModes?: Record<string, 'basic' | 'advanced'>
|
||||
}
|
||||
|
||||
export interface SerializedLoop {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export { indexedDBStorage } from './storage'
|
||||
export { useTerminalConsoleStore } from './store'
|
||||
export type { ConsoleEntry, ConsoleStore, ConsoleUpdate } from './types'
|
||||
export { extractChildWorkflowEntries, hasChildTraceSpans } from './utils'
|
||||
|
||||
@@ -224,7 +224,11 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
|
||||
const newEntry = get().entries[0]
|
||||
|
||||
if (newEntry?.error && newEntry.blockType !== 'cancelled') {
|
||||
if (
|
||||
newEntry?.error &&
|
||||
newEntry.blockType !== 'cancelled' &&
|
||||
!newEntry.parentWorkflowBlockId
|
||||
) {
|
||||
notifyBlockError({
|
||||
error: newEntry.error,
|
||||
blockName: newEntry.blockName || 'Unknown Block',
|
||||
@@ -249,7 +253,9 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
})),
|
||||
|
||||
exportConsoleCSV: (workflowId: string) => {
|
||||
const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
|
||||
const entries = get().entries.filter(
|
||||
(entry) => entry.workflowId === workflowId && !entry.parentWorkflowBlockId
|
||||
)
|
||||
|
||||
if (entries.length === 0) {
|
||||
return
|
||||
|
||||
@@ -22,6 +22,7 @@ export interface ConsoleEntry {
|
||||
iterationTotal?: number
|
||||
iterationType?: SubflowType
|
||||
iterationContainerId?: string
|
||||
parentWorkflowBlockId?: string
|
||||
isRunning?: boolean
|
||||
isCanceled?: boolean
|
||||
}
|
||||
@@ -44,6 +45,7 @@ export interface ConsoleUpdate {
|
||||
iterationTotal?: number
|
||||
iterationType?: SubflowType
|
||||
iterationContainerId?: string
|
||||
parentWorkflowBlockId?: string
|
||||
}
|
||||
|
||||
export interface ConsoleStore {
|
||||
|
||||
78
apps/sim/stores/terminal/console/utils.ts
Normal file
78
apps/sim/stores/terminal/console/utils.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import type { TraceSpan } from '@/lib/logs/types'
|
||||
import type { ConsoleEntry } from '@/stores/terminal/console/types'
|
||||
|
||||
/**
|
||||
* Parameters for extracting child workflow entries from trace spans
|
||||
*/
|
||||
interface ExtractChildWorkflowEntriesParams {
|
||||
parentBlockId: string
|
||||
executionId: string
|
||||
executionOrder: number
|
||||
workflowId: string
|
||||
childTraceSpans: TraceSpan[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts child workflow trace spans into console entry payloads.
|
||||
* Handles recursive nesting for multi-level child workflows by flattening
|
||||
* nested children with a parent block ID chain.
|
||||
*/
|
||||
export function extractChildWorkflowEntries(
|
||||
params: ExtractChildWorkflowEntriesParams
|
||||
): Omit<ConsoleEntry, 'id' | 'timestamp'>[] {
|
||||
const { parentBlockId, executionId, executionOrder, workflowId, childTraceSpans } = params
|
||||
const entries: Omit<ConsoleEntry, 'id' | 'timestamp'>[] = []
|
||||
|
||||
for (const span of childTraceSpans) {
|
||||
if (!span.blockId) continue
|
||||
|
||||
const childBlockId = `child-${parentBlockId}-${span.blockId}`
|
||||
|
||||
entries.push({
|
||||
blockId: childBlockId,
|
||||
blockName: span.name || 'Unknown Block',
|
||||
blockType: span.type || 'unknown',
|
||||
parentWorkflowBlockId: parentBlockId,
|
||||
input: span.input || {},
|
||||
output: (span.output || {}) as ConsoleEntry['output'],
|
||||
durationMs: span.duration,
|
||||
startedAt: span.startTime,
|
||||
endedAt: span.endTime,
|
||||
success: span.status !== 'error',
|
||||
error:
|
||||
span.status === 'error'
|
||||
? (span.output?.error as string) || `${span.name || 'Block'} failed`
|
||||
: undefined,
|
||||
executionId,
|
||||
executionOrder,
|
||||
workflowId,
|
||||
})
|
||||
|
||||
// Recursively extract nested child workflow spans
|
||||
if (span.children && span.children.length > 0 && span.type === 'workflow') {
|
||||
const nestedEntries = extractChildWorkflowEntries({
|
||||
parentBlockId: childBlockId,
|
||||
executionId,
|
||||
executionOrder,
|
||||
workflowId,
|
||||
childTraceSpans: span.children,
|
||||
})
|
||||
entries.push(...nestedEntries)
|
||||
}
|
||||
}
|
||||
|
||||
return entries
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a block completed event output contains child trace spans
|
||||
*/
|
||||
export function hasChildTraceSpans(output: unknown): output is Record<string, unknown> & {
|
||||
childTraceSpans: TraceSpan[]
|
||||
} {
|
||||
return (
|
||||
output !== null &&
|
||||
typeof output === 'object' &&
|
||||
Array.isArray((output as Record<string, unknown>).childTraceSpans)
|
||||
)
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
export type { ConsoleEntry, ConsoleStore, ConsoleUpdate } from './console'
|
||||
export { useTerminalConsoleStore } from './console'
|
||||
export { extractChildWorkflowEntries, hasChildTraceSpans, useTerminalConsoleStore } from './console'
|
||||
export { useTerminalStore } from './store'
|
||||
export type { TerminalState } from './types'
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ExaAnswerParams, ExaAnswerResponse } from '@/tools/exa/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('ExaAnswerTool')
|
||||
|
||||
export const answerTool: ToolConfig<ExaAnswerParams, ExaAnswerResponse> = {
|
||||
id: 'exa_answer',
|
||||
name: 'Exa Answer',
|
||||
@@ -30,24 +27,6 @@ export const answerTool: ToolConfig<ExaAnswerParams, ExaAnswerResponse> = {
|
||||
description: 'Exa AI API Key',
|
||||
},
|
||||
},
|
||||
hosting: {
|
||||
envKeys: ['EXA_API_KEY_1', 'EXA_API_KEY_2', 'EXA_API_KEY_3'],
|
||||
apiKeyParam: 'apiKey',
|
||||
byokProviderId: 'exa',
|
||||
pricing: {
|
||||
type: 'custom',
|
||||
getCost: (_params, output) => {
|
||||
// Use _costDollars from Exa API response (internal field, stripped from final output)
|
||||
const costDollars = output._costDollars as { total?: number } | undefined
|
||||
if (costDollars?.total) {
|
||||
return { cost: costDollars.total, metadata: { costDollars } }
|
||||
}
|
||||
// Fallback: $5/1000 requests
|
||||
logger.warn('Exa answer response missing costDollars, using fallback pricing')
|
||||
return 0.005
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://api.exa.ai/answer',
|
||||
@@ -82,7 +61,6 @@ export const answerTool: ToolConfig<ExaAnswerParams, ExaAnswerResponse> = {
|
||||
url: citation.url,
|
||||
text: citation.text || '',
|
||||
})) || [],
|
||||
_costDollars: data.costDollars,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ExaFindSimilarLinksParams, ExaFindSimilarLinksResponse } from '@/tools/exa/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('ExaFindSimilarLinksTool')
|
||||
|
||||
export const findSimilarLinksTool: ToolConfig<
|
||||
ExaFindSimilarLinksParams,
|
||||
ExaFindSimilarLinksResponse
|
||||
@@ -79,26 +76,6 @@ export const findSimilarLinksTool: ToolConfig<
|
||||
description: 'Exa AI API Key',
|
||||
},
|
||||
},
|
||||
hosting: {
|
||||
envKeys: ['EXA_API_KEY_1', 'EXA_API_KEY_2', 'EXA_API_KEY_3'],
|
||||
apiKeyParam: 'apiKey',
|
||||
byokProviderId: 'exa',
|
||||
pricing: {
|
||||
type: 'custom',
|
||||
getCost: (_params, output) => {
|
||||
// Use _costDollars from Exa API response (internal field, stripped from final output)
|
||||
const costDollars = output._costDollars as { total?: number } | undefined
|
||||
if (costDollars?.total) {
|
||||
return { cost: costDollars.total, metadata: { costDollars } }
|
||||
}
|
||||
// Fallback: $5/1000 (1-25 results) or $25/1000 (26-100 results)
|
||||
logger.warn('Exa find_similar_links response missing costDollars, using fallback pricing')
|
||||
const similarLinks = output.similarLinks as unknown[] | undefined
|
||||
const resultCount = similarLinks?.length || 0
|
||||
return resultCount <= 25 ? 0.005 : 0.025
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://api.exa.ai/findSimilar',
|
||||
@@ -163,7 +140,6 @@ export const findSimilarLinksTool: ToolConfig<
|
||||
highlights: result.highlights,
|
||||
score: result.score || 0,
|
||||
})),
|
||||
_costDollars: data.costDollars,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ExaGetContentsParams, ExaGetContentsResponse } from '@/tools/exa/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('ExaGetContentsTool')
|
||||
|
||||
export const getContentsTool: ToolConfig<ExaGetContentsParams, ExaGetContentsResponse> = {
|
||||
id: 'exa_get_contents',
|
||||
name: 'Exa Get Contents',
|
||||
@@ -64,25 +61,6 @@ export const getContentsTool: ToolConfig<ExaGetContentsParams, ExaGetContentsRes
|
||||
description: 'Exa AI API Key',
|
||||
},
|
||||
},
|
||||
hosting: {
|
||||
envKeys: ['EXA_API_KEY_1', 'EXA_API_KEY_2', 'EXA_API_KEY_3'],
|
||||
apiKeyParam: 'apiKey',
|
||||
byokProviderId: 'exa',
|
||||
pricing: {
|
||||
type: 'custom',
|
||||
getCost: (_params, output) => {
|
||||
// Use _costDollars from Exa API response (internal field, stripped from final output)
|
||||
const costDollars = output._costDollars as { total?: number } | undefined
|
||||
if (costDollars?.total) {
|
||||
return { cost: costDollars.total, metadata: { costDollars } }
|
||||
}
|
||||
// Fallback: $1/1000 pages
|
||||
logger.warn('Exa get_contents response missing costDollars, using fallback pricing')
|
||||
const results = output.results as unknown[] | undefined
|
||||
return (results?.length || 0) * 0.001
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://api.exa.ai/contents',
|
||||
@@ -154,7 +132,6 @@ export const getContentsTool: ToolConfig<ExaGetContentsParams, ExaGetContentsRes
|
||||
summary: result.summary || '',
|
||||
highlights: result.highlights,
|
||||
})),
|
||||
_costDollars: data.costDollars,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -34,26 +34,6 @@ export const researchTool: ToolConfig<ExaResearchParams, ExaResearchResponse> =
|
||||
description: 'Exa AI API Key',
|
||||
},
|
||||
},
|
||||
hosting: {
|
||||
envKeys: ['EXA_API_KEY_1', 'EXA_API_KEY_2', 'EXA_API_KEY_3'],
|
||||
apiKeyParam: 'apiKey',
|
||||
byokProviderId: 'exa',
|
||||
pricing: {
|
||||
type: 'custom',
|
||||
getCost: (params, output) => {
|
||||
// Use _costDollars from Exa API response (internal field, stripped from final output)
|
||||
const costDollars = output._costDollars as { total?: number } | undefined
|
||||
if (costDollars?.total) {
|
||||
return { cost: costDollars.total, metadata: { costDollars } }
|
||||
}
|
||||
|
||||
// Fallback to estimate if cost not available
|
||||
logger.warn('Exa research response missing costDollars, using fallback pricing')
|
||||
const model = params.model || 'exa-research'
|
||||
return model === 'exa-research-pro' ? 0.055 : 0.03
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://api.exa.ai/research/v1',
|
||||
@@ -131,8 +111,6 @@ export const researchTool: ToolConfig<ExaResearchParams, ExaResearchResponse> =
|
||||
score: 1.0,
|
||||
},
|
||||
],
|
||||
// Include cost breakdown for pricing calculation (internal field, stripped from final output)
|
||||
_costDollars: taskData.costDollars,
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ExaSearchParams, ExaSearchResponse } from '@/tools/exa/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('ExaSearchTool')
|
||||
|
||||
export const searchTool: ToolConfig<ExaSearchParams, ExaSearchResponse> = {
|
||||
id: 'exa_search',
|
||||
name: 'Exa Search',
|
||||
@@ -89,31 +86,6 @@ export const searchTool: ToolConfig<ExaSearchParams, ExaSearchResponse> = {
|
||||
description: 'Exa AI API Key',
|
||||
},
|
||||
},
|
||||
hosting: {
|
||||
envKeys: ['EXA_API_KEY_1', 'EXA_API_KEY_2', 'EXA_API_KEY_3'],
|
||||
apiKeyParam: 'apiKey',
|
||||
byokProviderId: 'exa',
|
||||
pricing: {
|
||||
type: 'custom',
|
||||
getCost: (params, output) => {
|
||||
// Use _costDollars from Exa API response (internal field, stripped from final output)
|
||||
const costDollars = output._costDollars as { total?: number } | undefined
|
||||
if (costDollars?.total) {
|
||||
return { cost: costDollars.total, metadata: { costDollars } }
|
||||
}
|
||||
|
||||
// Fallback: estimate based on search type and result count
|
||||
logger.warn('Exa search response missing costDollars, using fallback pricing')
|
||||
const isDeepSearch = params.type === 'neural'
|
||||
if (isDeepSearch) {
|
||||
return 0.015
|
||||
}
|
||||
const results = output.results as unknown[] | undefined
|
||||
const resultCount = results?.length || 0
|
||||
return resultCount <= 25 ? 0.005 : 0.025
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://api.exa.ai/search',
|
||||
@@ -195,7 +167,6 @@ export const searchTool: ToolConfig<ExaSearchParams, ExaSearchResponse> = {
|
||||
highlights: result.highlights,
|
||||
score: result.score,
|
||||
})),
|
||||
_costDollars: data.costDollars,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,11 +6,6 @@ export interface ExaBaseParams {
|
||||
apiKey: string
|
||||
}
|
||||
|
||||
/** Cost breakdown returned by Exa API responses */
|
||||
export interface ExaCostDollars {
|
||||
total: number
|
||||
}
|
||||
|
||||
// Search tool types
|
||||
export interface ExaSearchParams extends ExaBaseParams {
|
||||
query: string
|
||||
@@ -55,7 +50,6 @@ export interface ExaSearchResult {
|
||||
export interface ExaSearchResponse extends ToolResponse {
|
||||
output: {
|
||||
results: ExaSearchResult[]
|
||||
costDollars?: ExaCostDollars
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,7 +78,6 @@ export interface ExaGetContentsResult {
|
||||
export interface ExaGetContentsResponse extends ToolResponse {
|
||||
output: {
|
||||
results: ExaGetContentsResult[]
|
||||
costDollars?: ExaCostDollars
|
||||
}
|
||||
}
|
||||
|
||||
@@ -127,7 +120,6 @@ export interface ExaSimilarLink {
|
||||
export interface ExaFindSimilarLinksResponse extends ToolResponse {
|
||||
output: {
|
||||
similarLinks: ExaSimilarLink[]
|
||||
costDollars?: ExaCostDollars
|
||||
}
|
||||
}
|
||||
|
||||
@@ -145,7 +137,6 @@ export interface ExaAnswerResponse extends ToolResponse {
|
||||
url: string
|
||||
text: string
|
||||
}[]
|
||||
costDollars?: ExaCostDollars
|
||||
}
|
||||
}
|
||||
|
||||
@@ -167,7 +158,6 @@ export interface ExaResearchResponse extends ToolResponse {
|
||||
author?: string
|
||||
score: number
|
||||
}[]
|
||||
costDollars?: ExaCostDollars
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -95,7 +95,7 @@ export const fileParserTool: ToolConfig<FileParserInput, FileParserOutput> = {
|
||||
filePath: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
visibility: 'hidden',
|
||||
description: 'Path to the file(s). Can be a single path, URL, or an array of paths.',
|
||||
},
|
||||
file: {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user