mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-14 00:15:09 -05:00
Compare commits
2 Commits
cursor/cop
...
fix/copilo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
04820a0075 | ||
|
|
09c9477091 |
@@ -1157,21 +1157,6 @@ export function AirweaveIcon(props: SVGProps<SVGSVGElement>) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function GoogleBooksIcon(props: SVGProps<SVGSVGElement>) {
|
|
||||||
return (
|
|
||||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 478.633 540.068'>
|
|
||||||
<path
|
|
||||||
fill='#1C51A4'
|
|
||||||
d='M449.059,218.231L245.519,99.538l-0.061,193.23c0.031,1.504-0.368,2.977-1.166,4.204c-0.798,1.258-1.565,1.995-2.915,2.547c-1.35,0.552-2.792,0.706-4.204,0.399c-1.412-0.307-2.7-1.043-3.713-2.117l-69.166-70.609l-69.381,70.179c-1.013,0.982-2.301,1.657-3.652,1.903c-1.381,0.246-2.792,0.092-4.081-0.491c-1.289-0.583-1.626-0.522-2.394-1.749c-0.767-1.197-1.197-2.608-1.197-4.081L85.031,6.007l-2.915-1.289C43.973-11.638,0,16.409,0,59.891v420.306c0,46.029,49.312,74.782,88.775,51.767l360.285-210.138C488.491,298.782,488.491,241.246,449.059,218.231z'
|
|
||||||
/>
|
|
||||||
<path
|
|
||||||
fill='#80D7FB'
|
|
||||||
d='M88.805,8.124c-2.179-1.289-4.419-2.363-6.659-3.345l0.123,288.663c0,1.442,0.43,2.854,1.197,4.081c0.767,1.197,1.872,2.148,3.161,2.731c1.289,0.583,2.7,0.736,4.081,0.491c1.381-0.246,2.639-0.921,3.652-1.903l69.749-69.688l69.811,69.749c1.013,1.074,2.301,1.81,3.713,2.117c1.412,0.307,2.884,0.153,4.204-0.399c1.319-0.552,2.455-1.565,3.253-2.792c0.798-1.258,1.197-2.731,1.166-4.204V99.998L88.805,8.124z'
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function GoogleDocsIcon(props: SVGProps<SVGSVGElement>) {
|
export function GoogleDocsIcon(props: SVGProps<SVGSVGElement>) {
|
||||||
return (
|
return (
|
||||||
<svg
|
<svg
|
||||||
|
|||||||
@@ -38,7 +38,6 @@ import {
|
|||||||
GithubIcon,
|
GithubIcon,
|
||||||
GitLabIcon,
|
GitLabIcon,
|
||||||
GmailIcon,
|
GmailIcon,
|
||||||
GoogleBooksIcon,
|
|
||||||
GoogleCalendarIcon,
|
GoogleCalendarIcon,
|
||||||
GoogleDocsIcon,
|
GoogleDocsIcon,
|
||||||
GoogleDriveIcon,
|
GoogleDriveIcon,
|
||||||
@@ -173,7 +172,6 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
|||||||
github_v2: GithubIcon,
|
github_v2: GithubIcon,
|
||||||
gitlab: GitLabIcon,
|
gitlab: GitLabIcon,
|
||||||
gmail_v2: GmailIcon,
|
gmail_v2: GmailIcon,
|
||||||
google_books: GoogleBooksIcon,
|
|
||||||
google_calendar_v2: GoogleCalendarIcon,
|
google_calendar_v2: GoogleCalendarIcon,
|
||||||
google_docs: GoogleDocsIcon,
|
google_docs: GoogleDocsIcon,
|
||||||
google_drive: GoogleDriveIcon,
|
google_drive: GoogleDriveIcon,
|
||||||
|
|||||||
@@ -1,96 +0,0 @@
|
|||||||
---
|
|
||||||
title: Google Books
|
|
||||||
description: Search and retrieve book information
|
|
||||||
---
|
|
||||||
|
|
||||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|
||||||
|
|
||||||
<BlockInfoCard
|
|
||||||
type="google_books"
|
|
||||||
color="#FFFFFF"
|
|
||||||
/>
|
|
||||||
|
|
||||||
## Usage Instructions
|
|
||||||
|
|
||||||
Search for books using the Google Books API. Find volumes by title, author, ISBN, or keywords, and retrieve detailed information about specific books including descriptions, ratings, and publication details.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Tools
|
|
||||||
|
|
||||||
### `google_books_volume_search`
|
|
||||||
|
|
||||||
Search for books using the Google Books API
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `apiKey` | string | Yes | Google Books API key |
|
|
||||||
| `query` | string | Yes | Search query. Supports special keywords: intitle:, inauthor:, inpublisher:, subject:, isbn: |
|
|
||||||
| `filter` | string | No | Filter results by availability \(partial, full, free-ebooks, paid-ebooks, ebooks\) |
|
|
||||||
| `printType` | string | No | Restrict to print type \(all, books, magazines\) |
|
|
||||||
| `orderBy` | string | No | Sort order \(relevance, newest\) |
|
|
||||||
| `startIndex` | number | No | Index of the first result to return \(for pagination\) |
|
|
||||||
| `maxResults` | number | No | Maximum number of results to return \(1-40\) |
|
|
||||||
| `langRestrict` | string | No | Restrict results to a specific language \(ISO 639-1 code\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `totalItems` | number | Total number of matching results |
|
|
||||||
| `volumes` | array | List of matching volumes |
|
|
||||||
| ↳ `id` | string | Volume ID |
|
|
||||||
| ↳ `title` | string | Book title |
|
|
||||||
| ↳ `subtitle` | string | Book subtitle |
|
|
||||||
| ↳ `authors` | array | List of authors |
|
|
||||||
| ↳ `publisher` | string | Publisher name |
|
|
||||||
| ↳ `publishedDate` | string | Publication date |
|
|
||||||
| ↳ `description` | string | Book description |
|
|
||||||
| ↳ `pageCount` | number | Number of pages |
|
|
||||||
| ↳ `categories` | array | Book categories |
|
|
||||||
| ↳ `averageRating` | number | Average rating \(1-5\) |
|
|
||||||
| ↳ `ratingsCount` | number | Number of ratings |
|
|
||||||
| ↳ `language` | string | Language code |
|
|
||||||
| ↳ `previewLink` | string | Link to preview on Google Books |
|
|
||||||
| ↳ `infoLink` | string | Link to info page |
|
|
||||||
| ↳ `thumbnailUrl` | string | Book cover thumbnail URL |
|
|
||||||
| ↳ `isbn10` | string | ISBN-10 identifier |
|
|
||||||
| ↳ `isbn13` | string | ISBN-13 identifier |
|
|
||||||
|
|
||||||
### `google_books_volume_details`
|
|
||||||
|
|
||||||
Get detailed information about a specific book volume
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `apiKey` | string | Yes | Google Books API key |
|
|
||||||
| `volumeId` | string | Yes | The ID of the volume to retrieve |
|
|
||||||
| `projection` | string | No | Projection level \(full, lite\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `id` | string | Volume ID |
|
|
||||||
| `title` | string | Book title |
|
|
||||||
| `subtitle` | string | Book subtitle |
|
|
||||||
| `authors` | array | List of authors |
|
|
||||||
| `publisher` | string | Publisher name |
|
|
||||||
| `publishedDate` | string | Publication date |
|
|
||||||
| `description` | string | Book description |
|
|
||||||
| `pageCount` | number | Number of pages |
|
|
||||||
| `categories` | array | Book categories |
|
|
||||||
| `averageRating` | number | Average rating \(1-5\) |
|
|
||||||
| `ratingsCount` | number | Number of ratings |
|
|
||||||
| `language` | string | Language code |
|
|
||||||
| `previewLink` | string | Link to preview on Google Books |
|
|
||||||
| `infoLink` | string | Link to info page |
|
|
||||||
| `thumbnailUrl` | string | Book cover thumbnail URL |
|
|
||||||
| `isbn10` | string | ISBN-10 identifier |
|
|
||||||
| `isbn13` | string | ISBN-13 identifier |
|
|
||||||
|
|
||||||
|
|
||||||
@@ -33,7 +33,6 @@
|
|||||||
"github",
|
"github",
|
||||||
"gitlab",
|
"gitlab",
|
||||||
"gmail",
|
"gmail",
|
||||||
"google_books",
|
|
||||||
"google_calendar",
|
"google_calendar",
|
||||||
"google_docs",
|
"google_docs",
|
||||||
"google_drive",
|
"google_drive",
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ BETTER_AUTH_URL=http://localhost:3000
|
|||||||
|
|
||||||
# NextJS (Required)
|
# NextJS (Required)
|
||||||
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||||
# INTERNAL_API_BASE_URL=http://sim-app.default.svc.cluster.local:3000 # Optional: internal URL for server-side /api self-calls; defaults to NEXT_PUBLIC_APP_URL
|
|
||||||
|
|
||||||
# Security (Required)
|
# Security (Required)
|
||||||
ENCRYPTION_KEY=your_encryption_key # Use `openssl rand -hex 32` to generate, used to encrypt environment variables
|
ENCRYPTION_KEY=your_encryption_key # Use `openssl rand -hex 32` to generate, used to encrypt environment variables
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import type { Artifact, Message, PushNotificationConfig, Task, TaskState } from '@a2a-js/sdk'
|
import type { Artifact, Message, PushNotificationConfig, Task, TaskState } from '@a2a-js/sdk'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { generateInternalToken } from '@/lib/auth/internal'
|
import { generateInternalToken } from '@/lib/auth/internal'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
|
||||||
/** A2A v0.3 JSON-RPC method names */
|
/** A2A v0.3 JSON-RPC method names */
|
||||||
export const A2A_METHODS = {
|
export const A2A_METHODS = {
|
||||||
@@ -118,7 +118,7 @@ export interface ExecuteRequestResult {
|
|||||||
export async function buildExecuteRequest(
|
export async function buildExecuteRequest(
|
||||||
config: ExecuteRequestConfig
|
config: ExecuteRequestConfig
|
||||||
): Promise<ExecuteRequestResult> {
|
): Promise<ExecuteRequestResult> {
|
||||||
const url = `${getInternalApiBaseUrl()}/api/workflows/${config.workflowId}/execute`
|
const url = `${getBaseUrl()}/api/workflows/${config.workflowId}/execute`
|
||||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||||
let useInternalAuth = false
|
let useInternalAuth = false
|
||||||
|
|
||||||
|
|||||||
@@ -1,187 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /api/attribution
|
|
||||||
*
|
|
||||||
* Automatic UTM-based referral attribution.
|
|
||||||
*
|
|
||||||
* Reads the `sim_utm` cookie (set by proxy on auth pages), matches a campaign
|
|
||||||
* by UTM specificity, and atomically inserts an attribution record + applies
|
|
||||||
* bonus credits.
|
|
||||||
*
|
|
||||||
* Idempotent — the unique constraint on `userId` prevents double-attribution.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { db } from '@sim/db'
|
|
||||||
import { referralAttribution, referralCampaigns, userStats } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { nanoid } from 'nanoid'
|
|
||||||
import { cookies } from 'next/headers'
|
|
||||||
import { NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
|
|
||||||
|
|
||||||
const logger = createLogger('AttributionAPI')
|
|
||||||
|
|
||||||
const COOKIE_NAME = 'sim_utm'
|
|
||||||
|
|
||||||
const UtmCookieSchema = z.object({
|
|
||||||
utm_source: z.string().optional(),
|
|
||||||
utm_medium: z.string().optional(),
|
|
||||||
utm_campaign: z.string().optional(),
|
|
||||||
utm_content: z.string().optional(),
|
|
||||||
referrer_url: z.string().optional(),
|
|
||||||
landing_page: z.string().optional(),
|
|
||||||
created_at: z.string().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Finds the most specific active campaign matching the given UTM params.
|
|
||||||
* Null fields on a campaign act as wildcards. Ties broken by newest campaign.
|
|
||||||
*/
|
|
||||||
async function findMatchingCampaign(utmData: z.infer<typeof UtmCookieSchema>) {
|
|
||||||
const campaigns = await db
|
|
||||||
.select()
|
|
||||||
.from(referralCampaigns)
|
|
||||||
.where(eq(referralCampaigns.isActive, true))
|
|
||||||
|
|
||||||
let bestMatch: (typeof campaigns)[number] | null = null
|
|
||||||
let bestScore = -1
|
|
||||||
|
|
||||||
for (const campaign of campaigns) {
|
|
||||||
let score = 0
|
|
||||||
let mismatch = false
|
|
||||||
|
|
||||||
const fields = [
|
|
||||||
{ campaignVal: campaign.utmSource, utmVal: utmData.utm_source },
|
|
||||||
{ campaignVal: campaign.utmMedium, utmVal: utmData.utm_medium },
|
|
||||||
{ campaignVal: campaign.utmCampaign, utmVal: utmData.utm_campaign },
|
|
||||||
{ campaignVal: campaign.utmContent, utmVal: utmData.utm_content },
|
|
||||||
] as const
|
|
||||||
|
|
||||||
for (const { campaignVal, utmVal } of fields) {
|
|
||||||
if (campaignVal === null) continue
|
|
||||||
if (campaignVal === utmVal) {
|
|
||||||
score++
|
|
||||||
} else {
|
|
||||||
mismatch = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!mismatch && score > 0) {
|
|
||||||
if (
|
|
||||||
score > bestScore ||
|
|
||||||
(score === bestScore &&
|
|
||||||
bestMatch &&
|
|
||||||
campaign.createdAt.getTime() > bestMatch.createdAt.getTime())
|
|
||||||
) {
|
|
||||||
bestScore = score
|
|
||||||
bestMatch = campaign
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return bestMatch
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function POST() {
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cookieStore = await cookies()
|
|
||||||
const utmCookie = cookieStore.get(COOKIE_NAME)
|
|
||||||
if (!utmCookie?.value) {
|
|
||||||
return NextResponse.json({ attributed: false, reason: 'no_utm_cookie' })
|
|
||||||
}
|
|
||||||
|
|
||||||
let utmData: z.infer<typeof UtmCookieSchema>
|
|
||||||
try {
|
|
||||||
let decoded: string
|
|
||||||
try {
|
|
||||||
decoded = decodeURIComponent(utmCookie.value)
|
|
||||||
} catch {
|
|
||||||
decoded = utmCookie.value
|
|
||||||
}
|
|
||||||
utmData = UtmCookieSchema.parse(JSON.parse(decoded))
|
|
||||||
} catch {
|
|
||||||
logger.warn('Failed to parse UTM cookie', { userId: session.user.id })
|
|
||||||
cookieStore.delete(COOKIE_NAME)
|
|
||||||
return NextResponse.json({ attributed: false, reason: 'invalid_cookie' })
|
|
||||||
}
|
|
||||||
|
|
||||||
const matchedCampaign = await findMatchingCampaign(utmData)
|
|
||||||
if (!matchedCampaign) {
|
|
||||||
cookieStore.delete(COOKIE_NAME)
|
|
||||||
return NextResponse.json({ attributed: false, reason: 'no_matching_campaign' })
|
|
||||||
}
|
|
||||||
|
|
||||||
const bonusAmount = Number(matchedCampaign.bonusCreditAmount)
|
|
||||||
|
|
||||||
let attributed = false
|
|
||||||
await db.transaction(async (tx) => {
|
|
||||||
const [existingStats] = await tx
|
|
||||||
.select({ id: userStats.id })
|
|
||||||
.from(userStats)
|
|
||||||
.where(eq(userStats.userId, session.user.id))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!existingStats) {
|
|
||||||
await tx.insert(userStats).values({
|
|
||||||
id: nanoid(),
|
|
||||||
userId: session.user.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await tx
|
|
||||||
.insert(referralAttribution)
|
|
||||||
.values({
|
|
||||||
id: nanoid(),
|
|
||||||
userId: session.user.id,
|
|
||||||
campaignId: matchedCampaign.id,
|
|
||||||
utmSource: utmData.utm_source || null,
|
|
||||||
utmMedium: utmData.utm_medium || null,
|
|
||||||
utmCampaign: utmData.utm_campaign || null,
|
|
||||||
utmContent: utmData.utm_content || null,
|
|
||||||
referrerUrl: utmData.referrer_url || null,
|
|
||||||
landingPage: utmData.landing_page || null,
|
|
||||||
bonusCreditAmount: bonusAmount.toString(),
|
|
||||||
})
|
|
||||||
.onConflictDoNothing({ target: referralAttribution.userId })
|
|
||||||
.returning({ id: referralAttribution.id })
|
|
||||||
|
|
||||||
if (result.length > 0) {
|
|
||||||
await applyBonusCredits(session.user.id, bonusAmount, tx)
|
|
||||||
attributed = true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (attributed) {
|
|
||||||
logger.info('Referral attribution created and bonus credits applied', {
|
|
||||||
userId: session.user.id,
|
|
||||||
campaignId: matchedCampaign.id,
|
|
||||||
campaignName: matchedCampaign.name,
|
|
||||||
utmSource: utmData.utm_source,
|
|
||||||
utmCampaign: utmData.utm_campaign,
|
|
||||||
utmContent: utmData.utm_content,
|
|
||||||
bonusAmount,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
logger.info('User already attributed, skipping', { userId: session.user.id })
|
|
||||||
}
|
|
||||||
|
|
||||||
cookieStore.delete(COOKIE_NAME)
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
attributed,
|
|
||||||
bonusAmount: attributed ? bonusAmount : undefined,
|
|
||||||
reason: attributed ? undefined : 'already_attributed',
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Attribution error', { error })
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -4,10 +4,20 @@
|
|||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { databaseMock, loggerMock } from '@sim/testing'
|
import { loggerMock } from '@sim/testing'
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
vi.mock('@sim/db', () => databaseMock)
|
vi.mock('@sim/db', () => ({
|
||||||
|
db: {
|
||||||
|
select: vi.fn().mockReturnThis(),
|
||||||
|
from: vi.fn().mockReturnThis(),
|
||||||
|
where: vi.fn().mockReturnThis(),
|
||||||
|
limit: vi.fn().mockReturnValue([]),
|
||||||
|
update: vi.fn().mockReturnThis(),
|
||||||
|
set: vi.fn().mockReturnThis(),
|
||||||
|
orderBy: vi.fn().mockReturnThis(),
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
|
||||||
vi.mock('@/lib/oauth/oauth', () => ({
|
vi.mock('@/lib/oauth/oauth', () => ({
|
||||||
refreshOAuthToken: vi.fn(),
|
refreshOAuthToken: vi.fn(),
|
||||||
@@ -24,36 +34,13 @@ import {
|
|||||||
refreshTokenIfNeeded,
|
refreshTokenIfNeeded,
|
||||||
} from '@/app/api/auth/oauth/utils'
|
} from '@/app/api/auth/oauth/utils'
|
||||||
|
|
||||||
const mockDb = db as any
|
const mockDbTyped = db as any
|
||||||
const mockRefreshOAuthToken = refreshOAuthToken as any
|
const mockRefreshOAuthToken = refreshOAuthToken as any
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a chainable mock for db.select() calls.
|
|
||||||
* Returns a nested chain: select() -> from() -> where() -> limit() / orderBy()
|
|
||||||
*/
|
|
||||||
function mockSelectChain(limitResult: unknown[]) {
|
|
||||||
const mockLimit = vi.fn().mockReturnValue(limitResult)
|
|
||||||
const mockOrderBy = vi.fn().mockReturnValue(limitResult)
|
|
||||||
const mockWhere = vi.fn().mockReturnValue({ limit: mockLimit, orderBy: mockOrderBy })
|
|
||||||
const mockFrom = vi.fn().mockReturnValue({ where: mockWhere })
|
|
||||||
mockDb.select.mockReturnValueOnce({ from: mockFrom })
|
|
||||||
return { mockFrom, mockWhere, mockLimit }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a chainable mock for db.update() calls.
|
|
||||||
* Returns a nested chain: update() -> set() -> where()
|
|
||||||
*/
|
|
||||||
function mockUpdateChain() {
|
|
||||||
const mockWhere = vi.fn().mockResolvedValue({})
|
|
||||||
const mockSet = vi.fn().mockReturnValue({ where: mockWhere })
|
|
||||||
mockDb.update.mockReturnValueOnce({ set: mockSet })
|
|
||||||
return { mockSet, mockWhere }
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('OAuth Utils', () => {
|
describe('OAuth Utils', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks()
|
vi.clearAllMocks()
|
||||||
|
mockDbTyped.limit.mockReturnValue([])
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -63,20 +50,20 @@ describe('OAuth Utils', () => {
|
|||||||
describe('getCredential', () => {
|
describe('getCredential', () => {
|
||||||
it('should return credential when found', async () => {
|
it('should return credential when found', async () => {
|
||||||
const mockCredential = { id: 'credential-id', userId: 'test-user-id' }
|
const mockCredential = { id: 'credential-id', userId: 'test-user-id' }
|
||||||
const { mockFrom, mockWhere, mockLimit } = mockSelectChain([mockCredential])
|
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||||
|
|
||||||
const credential = await getCredential('request-id', 'credential-id', 'test-user-id')
|
const credential = await getCredential('request-id', 'credential-id', 'test-user-id')
|
||||||
|
|
||||||
expect(mockDb.select).toHaveBeenCalled()
|
expect(mockDbTyped.select).toHaveBeenCalled()
|
||||||
expect(mockFrom).toHaveBeenCalled()
|
expect(mockDbTyped.from).toHaveBeenCalled()
|
||||||
expect(mockWhere).toHaveBeenCalled()
|
expect(mockDbTyped.where).toHaveBeenCalled()
|
||||||
expect(mockLimit).toHaveBeenCalledWith(1)
|
expect(mockDbTyped.limit).toHaveBeenCalledWith(1)
|
||||||
|
|
||||||
expect(credential).toEqual(mockCredential)
|
expect(credential).toEqual(mockCredential)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return undefined when credential is not found', async () => {
|
it('should return undefined when credential is not found', async () => {
|
||||||
mockSelectChain([])
|
mockDbTyped.limit.mockReturnValueOnce([])
|
||||||
|
|
||||||
const credential = await getCredential('request-id', 'nonexistent-id', 'test-user-id')
|
const credential = await getCredential('request-id', 'nonexistent-id', 'test-user-id')
|
||||||
|
|
||||||
@@ -115,12 +102,11 @@ describe('OAuth Utils', () => {
|
|||||||
refreshToken: 'new-refresh-token',
|
refreshToken: 'new-refresh-token',
|
||||||
})
|
})
|
||||||
|
|
||||||
mockUpdateChain()
|
|
||||||
|
|
||||||
const result = await refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
|
const result = await refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
|
||||||
|
|
||||||
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
|
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
|
||||||
expect(mockDb.update).toHaveBeenCalled()
|
expect(mockDbTyped.update).toHaveBeenCalled()
|
||||||
|
expect(mockDbTyped.set).toHaveBeenCalled()
|
||||||
expect(result).toEqual({ accessToken: 'new-token', refreshed: true })
|
expect(result).toEqual({ accessToken: 'new-token', refreshed: true })
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -166,7 +152,7 @@ describe('OAuth Utils', () => {
|
|||||||
providerId: 'google',
|
providerId: 'google',
|
||||||
userId: 'test-user-id',
|
userId: 'test-user-id',
|
||||||
}
|
}
|
||||||
mockSelectChain([mockCredential])
|
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||||
|
|
||||||
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
||||||
|
|
||||||
@@ -183,8 +169,7 @@ describe('OAuth Utils', () => {
|
|||||||
providerId: 'google',
|
providerId: 'google',
|
||||||
userId: 'test-user-id',
|
userId: 'test-user-id',
|
||||||
}
|
}
|
||||||
mockSelectChain([mockCredential])
|
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||||
mockUpdateChain()
|
|
||||||
|
|
||||||
mockRefreshOAuthToken.mockResolvedValueOnce({
|
mockRefreshOAuthToken.mockResolvedValueOnce({
|
||||||
accessToken: 'new-token',
|
accessToken: 'new-token',
|
||||||
@@ -195,12 +180,13 @@ describe('OAuth Utils', () => {
|
|||||||
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
||||||
|
|
||||||
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
|
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
|
||||||
expect(mockDb.update).toHaveBeenCalled()
|
expect(mockDbTyped.update).toHaveBeenCalled()
|
||||||
|
expect(mockDbTyped.set).toHaveBeenCalled()
|
||||||
expect(token).toBe('new-token')
|
expect(token).toBe('new-token')
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should return null if credential not found', async () => {
|
it('should return null if credential not found', async () => {
|
||||||
mockSelectChain([])
|
mockDbTyped.limit.mockReturnValueOnce([])
|
||||||
|
|
||||||
const token = await refreshAccessTokenIfNeeded('nonexistent-id', 'test-user-id', 'request-id')
|
const token = await refreshAccessTokenIfNeeded('nonexistent-id', 'test-user-id', 'request-id')
|
||||||
|
|
||||||
@@ -216,7 +202,7 @@ describe('OAuth Utils', () => {
|
|||||||
providerId: 'google',
|
providerId: 'google',
|
||||||
userId: 'test-user-id',
|
userId: 'test-user-id',
|
||||||
}
|
}
|
||||||
mockSelectChain([mockCredential])
|
mockDbTyped.limit.mockReturnValueOnce([mockCredential])
|
||||||
|
|
||||||
mockRefreshOAuthToken.mockResolvedValueOnce(null)
|
mockRefreshOAuthToken.mockResolvedValueOnce(null)
|
||||||
|
|
||||||
|
|||||||
@@ -18,9 +18,9 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
|||||||
setupCommonApiMocks()
|
setupCommonApiMocks()
|
||||||
mockCryptoUuid()
|
mockCryptoUuid()
|
||||||
|
|
||||||
|
// Mock getBaseUrl to return localhost for tests
|
||||||
vi.doMock('@/lib/core/utils/urls', () => ({
|
vi.doMock('@/lib/core/utils/urls', () => ({
|
||||||
getBaseUrl: vi.fn(() => 'http://localhost:3000'),
|
getBaseUrl: vi.fn(() => 'http://localhost:3000'),
|
||||||
getInternalApiBaseUrl: vi.fn(() => 'http://localhost:3000'),
|
|
||||||
getBaseDomain: vi.fn(() => 'localhost:3000'),
|
getBaseDomain: vi.fn(() => 'localhost:3000'),
|
||||||
getEmailDomain: vi.fn(() => 'localhost:3000'),
|
getEmailDomain: vi.fn(() => 'localhost:3000'),
|
||||||
}))
|
}))
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import {
|
|||||||
createRequestTracker,
|
createRequestTracker,
|
||||||
createUnauthorizedResponse,
|
createUnauthorizedResponse,
|
||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
import { isUuidV4 } from '@/executor/constants'
|
import { isUuidV4 } from '@/executor/constants'
|
||||||
|
|
||||||
@@ -99,7 +99,7 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const stateResponse = await fetch(
|
const stateResponse = await fetch(
|
||||||
`${getInternalApiBaseUrl()}/api/workflows/${checkpoint.workflowId}/state`,
|
`${getBaseUrl()}/api/workflows/${checkpoint.workflowId}/state`,
|
||||||
{
|
{
|
||||||
method: 'PUT',
|
method: 'PUT',
|
||||||
headers: {
|
headers: {
|
||||||
|
|||||||
@@ -4,12 +4,16 @@
|
|||||||
*
|
*
|
||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
import { createEnvMock, databaseMock, loggerMock } from '@sim/testing'
|
import { createEnvMock, createMockLogger } from '@sim/testing'
|
||||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
|
const loggerMock = vi.hoisted(() => ({
|
||||||
|
createLogger: () => createMockLogger(),
|
||||||
|
}))
|
||||||
|
|
||||||
vi.mock('drizzle-orm')
|
vi.mock('drizzle-orm')
|
||||||
vi.mock('@sim/logger', () => loggerMock)
|
vi.mock('@sim/logger', () => loggerMock)
|
||||||
vi.mock('@sim/db', () => databaseMock)
|
vi.mock('@sim/db')
|
||||||
vi.mock('@/lib/knowledge/documents/utils', () => ({
|
vi.mock('@/lib/knowledge/documents/utils', () => ({
|
||||||
retryWithExponentialBackoff: (fn: any) => fn(),
|
retryWithExponentialBackoff: (fn: any) => fn(),
|
||||||
}))
|
}))
|
||||||
|
|||||||
@@ -72,7 +72,6 @@ describe('MCP Serve Route', () => {
|
|||||||
}))
|
}))
|
||||||
vi.doMock('@/lib/core/utils/urls', () => ({
|
vi.doMock('@/lib/core/utils/urls', () => ({
|
||||||
getBaseUrl: () => 'http://localhost:3000',
|
getBaseUrl: () => 'http://localhost:3000',
|
||||||
getInternalApiBaseUrl: () => 'http://localhost:3000',
|
|
||||||
}))
|
}))
|
||||||
vi.doMock('@/lib/core/execution-limits', () => ({
|
vi.doMock('@/lib/core/execution-limits', () => ({
|
||||||
getMaxExecutionTimeout: () => 10_000,
|
getMaxExecutionTimeout: () => 10_000,
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { type AuthResult, checkHybridAuth } from '@/lib/auth/hybrid'
|
import { type AuthResult, checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateInternalToken } from '@/lib/auth/internal'
|
import { generateInternalToken } from '@/lib/auth/internal'
|
||||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowMcpServeAPI')
|
const logger = createLogger('WorkflowMcpServeAPI')
|
||||||
@@ -285,7 +285,7 @@ async function handleToolsCall(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const executeUrl = `${getInternalApiBaseUrl()}/api/workflows/${tool.workflowId}/execute`
|
const executeUrl = `${getBaseUrl()}/api/workflows/${tool.workflowId}/execute`
|
||||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||||
|
|
||||||
if (publicServerOwnerId) {
|
if (publicServerOwnerId) {
|
||||||
|
|||||||
@@ -1,170 +0,0 @@
|
|||||||
/**
|
|
||||||
* POST /api/referral-code/redeem
|
|
||||||
*
|
|
||||||
* Redeem a referral/promo code to receive bonus credits.
|
|
||||||
*
|
|
||||||
* Body:
|
|
||||||
* - code: string — The referral code to redeem
|
|
||||||
*
|
|
||||||
* Response: { redeemed: boolean, bonusAmount?: number, error?: string }
|
|
||||||
*
|
|
||||||
* Constraints:
|
|
||||||
* - Enterprise users cannot redeem codes
|
|
||||||
* - One redemption per user, ever (unique constraint on userId)
|
|
||||||
* - One redemption per organization for team users (partial unique on organizationId)
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { db } from '@sim/db'
|
|
||||||
import { referralAttribution, referralCampaigns, userStats } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { and, eq } from 'drizzle-orm'
|
|
||||||
import { nanoid } from 'nanoid'
|
|
||||||
import { NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
|
||||||
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
|
|
||||||
|
|
||||||
const logger = createLogger('ReferralCodeRedemption')
|
|
||||||
|
|
||||||
const RedeemCodeSchema = z.object({
|
|
||||||
code: z.string().min(1, 'Code is required'),
|
|
||||||
})
|
|
||||||
|
|
||||||
export async function POST(request: Request) {
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
const { code } = RedeemCodeSchema.parse(body)
|
|
||||||
|
|
||||||
const subscription = await getHighestPrioritySubscription(session.user.id)
|
|
||||||
|
|
||||||
if (subscription?.plan === 'enterprise') {
|
|
||||||
return NextResponse.json({
|
|
||||||
redeemed: false,
|
|
||||||
error: 'Enterprise accounts cannot redeem referral codes',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const isTeam = subscription?.plan === 'team'
|
|
||||||
const orgId = isTeam ? subscription.referenceId : null
|
|
||||||
|
|
||||||
const normalizedCode = code.trim().toUpperCase()
|
|
||||||
|
|
||||||
const [campaign] = await db
|
|
||||||
.select()
|
|
||||||
.from(referralCampaigns)
|
|
||||||
.where(and(eq(referralCampaigns.code, normalizedCode), eq(referralCampaigns.isActive, true)))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!campaign) {
|
|
||||||
logger.info('Invalid code redemption attempt', {
|
|
||||||
userId: session.user.id,
|
|
||||||
code: normalizedCode,
|
|
||||||
})
|
|
||||||
return NextResponse.json({ error: 'Invalid or expired code' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const [existingUserAttribution] = await db
|
|
||||||
.select({ id: referralAttribution.id })
|
|
||||||
.from(referralAttribution)
|
|
||||||
.where(eq(referralAttribution.userId, session.user.id))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (existingUserAttribution) {
|
|
||||||
return NextResponse.json({
|
|
||||||
redeemed: false,
|
|
||||||
error: 'You have already redeemed a code',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (orgId) {
|
|
||||||
const [existingOrgAttribution] = await db
|
|
||||||
.select({ id: referralAttribution.id })
|
|
||||||
.from(referralAttribution)
|
|
||||||
.where(eq(referralAttribution.organizationId, orgId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (existingOrgAttribution) {
|
|
||||||
return NextResponse.json({
|
|
||||||
redeemed: false,
|
|
||||||
error: 'A code has already been redeemed for your organization',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const bonusAmount = Number(campaign.bonusCreditAmount)
|
|
||||||
|
|
||||||
let redeemed = false
|
|
||||||
await db.transaction(async (tx) => {
|
|
||||||
const [existingStats] = await tx
|
|
||||||
.select({ id: userStats.id })
|
|
||||||
.from(userStats)
|
|
||||||
.where(eq(userStats.userId, session.user.id))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!existingStats) {
|
|
||||||
await tx.insert(userStats).values({
|
|
||||||
id: nanoid(),
|
|
||||||
userId: session.user.id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await tx
|
|
||||||
.insert(referralAttribution)
|
|
||||||
.values({
|
|
||||||
id: nanoid(),
|
|
||||||
userId: session.user.id,
|
|
||||||
organizationId: orgId,
|
|
||||||
campaignId: campaign.id,
|
|
||||||
utmSource: null,
|
|
||||||
utmMedium: null,
|
|
||||||
utmCampaign: null,
|
|
||||||
utmContent: null,
|
|
||||||
referrerUrl: null,
|
|
||||||
landingPage: null,
|
|
||||||
bonusCreditAmount: bonusAmount.toString(),
|
|
||||||
})
|
|
||||||
.onConflictDoNothing()
|
|
||||||
.returning({ id: referralAttribution.id })
|
|
||||||
|
|
||||||
if (result.length > 0) {
|
|
||||||
await applyBonusCredits(session.user.id, bonusAmount, tx)
|
|
||||||
redeemed = true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (redeemed) {
|
|
||||||
logger.info('Referral code redeemed', {
|
|
||||||
userId: session.user.id,
|
|
||||||
organizationId: orgId,
|
|
||||||
code: normalizedCode,
|
|
||||||
campaignId: campaign.id,
|
|
||||||
campaignName: campaign.name,
|
|
||||||
bonusAmount,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!redeemed) {
|
|
||||||
return NextResponse.json({
|
|
||||||
redeemed: false,
|
|
||||||
error: 'You have already redeemed a code',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
redeemed: true,
|
|
||||||
bonusAmount,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
|
|
||||||
}
|
|
||||||
logger.error('Referral code redemption error', { error })
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -3,14 +3,17 @@
|
|||||||
*
|
*
|
||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
import { databaseMock, loggerMock } from '@sim/testing'
|
import { loggerMock } from '@sim/testing'
|
||||||
import { NextRequest } from 'next/server'
|
import { NextRequest } from 'next/server'
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
const { mockGetSession, mockAuthorizeWorkflowByWorkspacePermission } = vi.hoisted(() => ({
|
const { mockGetSession, mockAuthorizeWorkflowByWorkspacePermission, mockDbSelect, mockDbUpdate } =
|
||||||
mockGetSession: vi.fn(),
|
vi.hoisted(() => ({
|
||||||
mockAuthorizeWorkflowByWorkspacePermission: vi.fn(),
|
mockGetSession: vi.fn(),
|
||||||
}))
|
mockAuthorizeWorkflowByWorkspacePermission: vi.fn(),
|
||||||
|
mockDbSelect: vi.fn(),
|
||||||
|
mockDbUpdate: vi.fn(),
|
||||||
|
}))
|
||||||
|
|
||||||
vi.mock('@/lib/auth', () => ({
|
vi.mock('@/lib/auth', () => ({
|
||||||
getSession: mockGetSession,
|
getSession: mockGetSession,
|
||||||
@@ -20,7 +23,12 @@ vi.mock('@/lib/workflows/utils', () => ({
|
|||||||
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.mock('@sim/db', () => databaseMock)
|
vi.mock('@sim/db', () => ({
|
||||||
|
db: {
|
||||||
|
select: mockDbSelect,
|
||||||
|
update: mockDbUpdate,
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
|
||||||
vi.mock('@sim/db/schema', () => ({
|
vi.mock('@sim/db/schema', () => ({
|
||||||
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
|
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
|
||||||
@@ -51,9 +59,6 @@ function createParams(id: string): { params: Promise<{ id: string }> } {
|
|||||||
return { params: Promise.resolve({ id }) }
|
return { params: Promise.resolve({ id }) }
|
||||||
}
|
}
|
||||||
|
|
||||||
const mockDbSelect = databaseMock.db.select as ReturnType<typeof vi.fn>
|
|
||||||
const mockDbUpdate = databaseMock.db.update as ReturnType<typeof vi.fn>
|
|
||||||
|
|
||||||
function mockDbChain(selectResults: unknown[][]) {
|
function mockDbChain(selectResults: unknown[][]) {
|
||||||
let selectCallIndex = 0
|
let selectCallIndex = 0
|
||||||
mockDbSelect.mockImplementation(() => ({
|
mockDbSelect.mockImplementation(() => ({
|
||||||
|
|||||||
@@ -3,14 +3,17 @@
|
|||||||
*
|
*
|
||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
import { databaseMock, loggerMock } from '@sim/testing'
|
import { loggerMock } from '@sim/testing'
|
||||||
import { NextRequest } from 'next/server'
|
import { NextRequest } from 'next/server'
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
const { mockGetSession, mockAuthorizeWorkflowByWorkspacePermission } = vi.hoisted(() => ({
|
const { mockGetSession, mockAuthorizeWorkflowByWorkspacePermission, mockDbSelect } = vi.hoisted(
|
||||||
mockGetSession: vi.fn(),
|
() => ({
|
||||||
mockAuthorizeWorkflowByWorkspacePermission: vi.fn(),
|
mockGetSession: vi.fn(),
|
||||||
}))
|
mockAuthorizeWorkflowByWorkspacePermission: vi.fn(),
|
||||||
|
mockDbSelect: vi.fn(),
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
vi.mock('@/lib/auth', () => ({
|
vi.mock('@/lib/auth', () => ({
|
||||||
getSession: mockGetSession,
|
getSession: mockGetSession,
|
||||||
@@ -20,7 +23,11 @@ vi.mock('@/lib/workflows/utils', () => ({
|
|||||||
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.mock('@sim/db', () => databaseMock)
|
vi.mock('@sim/db', () => ({
|
||||||
|
db: {
|
||||||
|
select: mockDbSelect,
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
|
||||||
vi.mock('@sim/db/schema', () => ({
|
vi.mock('@sim/db/schema', () => ({
|
||||||
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
|
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
|
||||||
@@ -55,8 +62,6 @@ function createRequest(url: string): NextRequest {
|
|||||||
return new NextRequest(new URL(url), { method: 'GET' })
|
return new NextRequest(new URL(url), { method: 'GET' })
|
||||||
}
|
}
|
||||||
|
|
||||||
const mockDbSelect = databaseMock.db.select as ReturnType<typeof vi.fn>
|
|
||||||
|
|
||||||
function mockDbChain(results: any[]) {
|
function mockDbChain(results: any[]) {
|
||||||
let callIndex = 0
|
let callIndex = 0
|
||||||
mockDbSelect.mockImplementation(() => ({
|
mockDbSelect.mockImplementation(() => ({
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import {
|
import {
|
||||||
type RegenerateStateInput,
|
type RegenerateStateInput,
|
||||||
regenerateWorkflowStateIds,
|
regenerateWorkflowStateIds,
|
||||||
@@ -115,18 +115,15 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
|||||||
// Step 3: Save the workflow state using the existing state endpoint (like imports do)
|
// Step 3: Save the workflow state using the existing state endpoint (like imports do)
|
||||||
// Ensure variables in state are remapped for the new workflow as well
|
// Ensure variables in state are remapped for the new workflow as well
|
||||||
const workflowStateWithVariables = { ...workflowState, variables: remappedVariables }
|
const workflowStateWithVariables = { ...workflowState, variables: remappedVariables }
|
||||||
const stateResponse = await fetch(
|
const stateResponse = await fetch(`${getBaseUrl()}/api/workflows/${newWorkflowId}/state`, {
|
||||||
`${getInternalApiBaseUrl()}/api/workflows/${newWorkflowId}/state`,
|
method: 'PUT',
|
||||||
{
|
headers: {
|
||||||
method: 'PUT',
|
'Content-Type': 'application/json',
|
||||||
headers: {
|
// Forward the session cookie for authentication
|
||||||
'Content-Type': 'application/json',
|
cookie: request.headers.get('cookie') || '',
|
||||||
// Forward the session cookie for authentication
|
},
|
||||||
cookie: request.headers.get('cookie') || '',
|
body: JSON.stringify(workflowStateWithVariables),
|
||||||
},
|
})
|
||||||
body: JSON.stringify(workflowStateWithVariables),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!stateResponse.ok) {
|
if (!stateResponse.ok) {
|
||||||
logger.error(`[${requestId}] Failed to save workflow state for template use`)
|
logger.error(`[${requestId}] Failed to save workflow state for template use`)
|
||||||
|
|||||||
@@ -66,12 +66,6 @@
|
|||||||
* Credits:
|
* Credits:
|
||||||
* POST /api/v1/admin/credits - Issue credits to user (by userId or email)
|
* POST /api/v1/admin/credits - Issue credits to user (by userId or email)
|
||||||
*
|
*
|
||||||
* Referral Campaigns:
|
|
||||||
* GET /api/v1/admin/referral-campaigns - List campaigns (?active=true/false)
|
|
||||||
* POST /api/v1/admin/referral-campaigns - Create campaign
|
|
||||||
* GET /api/v1/admin/referral-campaigns/:id - Get campaign details
|
|
||||||
* PATCH /api/v1/admin/referral-campaigns/:id - Update campaign fields
|
|
||||||
*
|
|
||||||
* Access Control (Permission Groups):
|
* Access Control (Permission Groups):
|
||||||
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
|
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
|
||||||
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)
|
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)
|
||||||
@@ -103,7 +97,6 @@ export type {
|
|||||||
AdminOrganization,
|
AdminOrganization,
|
||||||
AdminOrganizationBillingSummary,
|
AdminOrganizationBillingSummary,
|
||||||
AdminOrganizationDetail,
|
AdminOrganizationDetail,
|
||||||
AdminReferralCampaign,
|
|
||||||
AdminSeatAnalytics,
|
AdminSeatAnalytics,
|
||||||
AdminSingleResponse,
|
AdminSingleResponse,
|
||||||
AdminSubscription,
|
AdminSubscription,
|
||||||
@@ -118,7 +111,6 @@ export type {
|
|||||||
AdminWorkspaceMember,
|
AdminWorkspaceMember,
|
||||||
DbMember,
|
DbMember,
|
||||||
DbOrganization,
|
DbOrganization,
|
||||||
DbReferralCampaign,
|
|
||||||
DbSubscription,
|
DbSubscription,
|
||||||
DbUser,
|
DbUser,
|
||||||
DbUserStats,
|
DbUserStats,
|
||||||
@@ -147,7 +139,6 @@ export {
|
|||||||
parseWorkflowVariables,
|
parseWorkflowVariables,
|
||||||
toAdminFolder,
|
toAdminFolder,
|
||||||
toAdminOrganization,
|
toAdminOrganization,
|
||||||
toAdminReferralCampaign,
|
|
||||||
toAdminSubscription,
|
toAdminSubscription,
|
||||||
toAdminUser,
|
toAdminUser,
|
||||||
toAdminWorkflow,
|
toAdminWorkflow,
|
||||||
|
|||||||
@@ -1,142 +0,0 @@
|
|||||||
/**
|
|
||||||
* GET /api/v1/admin/referral-campaigns/:id
|
|
||||||
*
|
|
||||||
* Get a single referral campaign by ID.
|
|
||||||
*
|
|
||||||
* PATCH /api/v1/admin/referral-campaigns/:id
|
|
||||||
*
|
|
||||||
* Update campaign fields. All fields are optional.
|
|
||||||
*
|
|
||||||
* Body:
|
|
||||||
* - name: string (non-empty) - Campaign name
|
|
||||||
* - bonusCreditAmount: number (> 0) - Bonus credits in dollars
|
|
||||||
* - isActive: boolean - Enable/disable the campaign
|
|
||||||
* - code: string | null (min 6 chars, auto-uppercased, null to remove) - Redeemable code
|
|
||||||
* - utmSource: string | null - UTM source match (null = wildcard)
|
|
||||||
* - utmMedium: string | null - UTM medium match (null = wildcard)
|
|
||||||
* - utmCampaign: string | null - UTM campaign match (null = wildcard)
|
|
||||||
* - utmContent: string | null - UTM content match (null = wildcard)
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { db } from '@sim/db'
|
|
||||||
import { referralCampaigns } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
|
|
||||||
import {
|
|
||||||
badRequestResponse,
|
|
||||||
internalErrorResponse,
|
|
||||||
notFoundResponse,
|
|
||||||
singleResponse,
|
|
||||||
} from '@/app/api/v1/admin/responses'
|
|
||||||
import { toAdminReferralCampaign } from '@/app/api/v1/admin/types'
|
|
||||||
|
|
||||||
const logger = createLogger('AdminReferralCampaignDetailAPI')
|
|
||||||
|
|
||||||
interface RouteParams {
|
|
||||||
id: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export const GET = withAdminAuthParams<RouteParams>(async (_, context) => {
|
|
||||||
try {
|
|
||||||
const { id: campaignId } = await context.params
|
|
||||||
|
|
||||||
const [campaign] = await db
|
|
||||||
.select()
|
|
||||||
.from(referralCampaigns)
|
|
||||||
.where(eq(referralCampaigns.id, campaignId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!campaign) {
|
|
||||||
return notFoundResponse('Campaign')
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`Admin API: Retrieved referral campaign ${campaignId}`)
|
|
||||||
|
|
||||||
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Admin API: Failed to get referral campaign', { error })
|
|
||||||
return internalErrorResponse('Failed to get referral campaign')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) => {
|
|
||||||
try {
|
|
||||||
const { id: campaignId } = await context.params
|
|
||||||
const body = await request.json()
|
|
||||||
|
|
||||||
const [existing] = await db
|
|
||||||
.select()
|
|
||||||
.from(referralCampaigns)
|
|
||||||
.where(eq(referralCampaigns.id, campaignId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!existing) {
|
|
||||||
return notFoundResponse('Campaign')
|
|
||||||
}
|
|
||||||
|
|
||||||
const updateData: Record<string, unknown> = { updatedAt: new Date() }
|
|
||||||
|
|
||||||
if (body.name !== undefined) {
|
|
||||||
if (typeof body.name !== 'string' || body.name.trim().length === 0) {
|
|
||||||
return badRequestResponse('name must be a non-empty string')
|
|
||||||
}
|
|
||||||
updateData.name = body.name.trim()
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.bonusCreditAmount !== undefined) {
|
|
||||||
if (
|
|
||||||
typeof body.bonusCreditAmount !== 'number' ||
|
|
||||||
!Number.isFinite(body.bonusCreditAmount) ||
|
|
||||||
body.bonusCreditAmount <= 0
|
|
||||||
) {
|
|
||||||
return badRequestResponse('bonusCreditAmount must be a positive number')
|
|
||||||
}
|
|
||||||
updateData.bonusCreditAmount = body.bonusCreditAmount.toString()
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.isActive !== undefined) {
|
|
||||||
if (typeof body.isActive !== 'boolean') {
|
|
||||||
return badRequestResponse('isActive must be a boolean')
|
|
||||||
}
|
|
||||||
updateData.isActive = body.isActive
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.code !== undefined) {
|
|
||||||
if (body.code !== null) {
|
|
||||||
if (typeof body.code !== 'string') {
|
|
||||||
return badRequestResponse('code must be a string or null')
|
|
||||||
}
|
|
||||||
if (body.code.trim().length < 6) {
|
|
||||||
return badRequestResponse('code must be at least 6 characters')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
updateData.code = body.code ? body.code.trim().toUpperCase() : null
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const field of ['utmSource', 'utmMedium', 'utmCampaign', 'utmContent'] as const) {
|
|
||||||
if (body[field] !== undefined) {
|
|
||||||
if (body[field] !== null && typeof body[field] !== 'string') {
|
|
||||||
return badRequestResponse(`${field} must be a string or null`)
|
|
||||||
}
|
|
||||||
updateData[field] = body[field] || null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const [updated] = await db
|
|
||||||
.update(referralCampaigns)
|
|
||||||
.set(updateData)
|
|
||||||
.where(eq(referralCampaigns.id, campaignId))
|
|
||||||
.returning()
|
|
||||||
|
|
||||||
logger.info(`Admin API: Updated referral campaign ${campaignId}`, {
|
|
||||||
fields: Object.keys(updateData).filter((k) => k !== 'updatedAt'),
|
|
||||||
})
|
|
||||||
|
|
||||||
return singleResponse(toAdminReferralCampaign(updated, getBaseUrl()))
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Admin API: Failed to update referral campaign', { error })
|
|
||||||
return internalErrorResponse('Failed to update referral campaign')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
/**
|
|
||||||
* GET /api/v1/admin/referral-campaigns
|
|
||||||
*
|
|
||||||
* List referral campaigns with optional filtering and pagination.
|
|
||||||
*
|
|
||||||
* Query Parameters:
|
|
||||||
* - active: string (optional) - Filter by active status ('true' or 'false')
|
|
||||||
* - limit: number (default: 50, max: 250)
|
|
||||||
* - offset: number (default: 0)
|
|
||||||
*
|
|
||||||
* POST /api/v1/admin/referral-campaigns
|
|
||||||
*
|
|
||||||
* Create a new referral campaign.
|
|
||||||
*
|
|
||||||
* Body:
|
|
||||||
* - name: string (required) - Campaign name
|
|
||||||
* - bonusCreditAmount: number (required, > 0) - Bonus credits in dollars
|
|
||||||
* - code: string | null (optional, min 6 chars, auto-uppercased) - Redeemable code
|
|
||||||
* - utmSource: string | null (optional) - UTM source match (null = wildcard)
|
|
||||||
* - utmMedium: string | null (optional) - UTM medium match (null = wildcard)
|
|
||||||
* - utmCampaign: string | null (optional) - UTM campaign match (null = wildcard)
|
|
||||||
* - utmContent: string | null (optional) - UTM content match (null = wildcard)
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { db } from '@sim/db'
|
|
||||||
import { referralCampaigns } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { count, eq, type SQL } from 'drizzle-orm'
|
|
||||||
import { nanoid } from 'nanoid'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
|
|
||||||
import {
|
|
||||||
badRequestResponse,
|
|
||||||
internalErrorResponse,
|
|
||||||
listResponse,
|
|
||||||
singleResponse,
|
|
||||||
} from '@/app/api/v1/admin/responses'
|
|
||||||
import {
|
|
||||||
type AdminReferralCampaign,
|
|
||||||
createPaginationMeta,
|
|
||||||
parsePaginationParams,
|
|
||||||
toAdminReferralCampaign,
|
|
||||||
} from '@/app/api/v1/admin/types'
|
|
||||||
|
|
||||||
const logger = createLogger('AdminReferralCampaignsAPI')
|
|
||||||
|
|
||||||
export const GET = withAdminAuth(async (request) => {
|
|
||||||
const url = new URL(request.url)
|
|
||||||
const { limit, offset } = parsePaginationParams(url)
|
|
||||||
const activeFilter = url.searchParams.get('active')
|
|
||||||
|
|
||||||
try {
|
|
||||||
const conditions: SQL<unknown>[] = []
|
|
||||||
if (activeFilter === 'true') {
|
|
||||||
conditions.push(eq(referralCampaigns.isActive, true))
|
|
||||||
} else if (activeFilter === 'false') {
|
|
||||||
conditions.push(eq(referralCampaigns.isActive, false))
|
|
||||||
}
|
|
||||||
|
|
||||||
const whereClause = conditions.length > 0 ? conditions[0] : undefined
|
|
||||||
const baseUrl = getBaseUrl()
|
|
||||||
|
|
||||||
const [countResult, campaigns] = await Promise.all([
|
|
||||||
db.select({ total: count() }).from(referralCampaigns).where(whereClause),
|
|
||||||
db
|
|
||||||
.select()
|
|
||||||
.from(referralCampaigns)
|
|
||||||
.where(whereClause)
|
|
||||||
.orderBy(referralCampaigns.createdAt)
|
|
||||||
.limit(limit)
|
|
||||||
.offset(offset),
|
|
||||||
])
|
|
||||||
|
|
||||||
const total = countResult[0].total
|
|
||||||
const data: AdminReferralCampaign[] = campaigns.map((c) => toAdminReferralCampaign(c, baseUrl))
|
|
||||||
const pagination = createPaginationMeta(total, limit, offset)
|
|
||||||
|
|
||||||
logger.info(`Admin API: Listed ${data.length} referral campaigns (total: ${total})`)
|
|
||||||
|
|
||||||
return listResponse(data, pagination)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Admin API: Failed to list referral campaigns', { error })
|
|
||||||
return internalErrorResponse('Failed to list referral campaigns')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
export const POST = withAdminAuth(async (request) => {
|
|
||||||
try {
|
|
||||||
const body = await request.json()
|
|
||||||
const { name, code, utmSource, utmMedium, utmCampaign, utmContent, bonusCreditAmount } = body
|
|
||||||
|
|
||||||
if (!name || typeof name !== 'string') {
|
|
||||||
return badRequestResponse('name is required and must be a string')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
typeof bonusCreditAmount !== 'number' ||
|
|
||||||
!Number.isFinite(bonusCreditAmount) ||
|
|
||||||
bonusCreditAmount <= 0
|
|
||||||
) {
|
|
||||||
return badRequestResponse('bonusCreditAmount must be a positive number')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (code !== undefined && code !== null) {
|
|
||||||
if (typeof code !== 'string') {
|
|
||||||
return badRequestResponse('code must be a string or null')
|
|
||||||
}
|
|
||||||
if (code.trim().length < 6) {
|
|
||||||
return badRequestResponse('code must be at least 6 characters')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const id = nanoid()
|
|
||||||
|
|
||||||
const [campaign] = await db
|
|
||||||
.insert(referralCampaigns)
|
|
||||||
.values({
|
|
||||||
id,
|
|
||||||
name,
|
|
||||||
code: code ? code.trim().toUpperCase() : null,
|
|
||||||
utmSource: utmSource || null,
|
|
||||||
utmMedium: utmMedium || null,
|
|
||||||
utmCampaign: utmCampaign || null,
|
|
||||||
utmContent: utmContent || null,
|
|
||||||
bonusCreditAmount: bonusCreditAmount.toString(),
|
|
||||||
})
|
|
||||||
.returning()
|
|
||||||
|
|
||||||
logger.info(`Admin API: Created referral campaign ${id}`, {
|
|
||||||
name,
|
|
||||||
code: campaign.code,
|
|
||||||
bonusCreditAmount,
|
|
||||||
})
|
|
||||||
|
|
||||||
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Admin API: Failed to create referral campaign', { error })
|
|
||||||
return internalErrorResponse('Failed to create referral campaign')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
@@ -8,7 +8,6 @@
|
|||||||
import type {
|
import type {
|
||||||
member,
|
member,
|
||||||
organization,
|
organization,
|
||||||
referralCampaigns,
|
|
||||||
subscription,
|
subscription,
|
||||||
user,
|
user,
|
||||||
userStats,
|
userStats,
|
||||||
@@ -32,7 +31,6 @@ export type DbOrganization = InferSelectModel<typeof organization>
|
|||||||
export type DbSubscription = InferSelectModel<typeof subscription>
|
export type DbSubscription = InferSelectModel<typeof subscription>
|
||||||
export type DbMember = InferSelectModel<typeof member>
|
export type DbMember = InferSelectModel<typeof member>
|
||||||
export type DbUserStats = InferSelectModel<typeof userStats>
|
export type DbUserStats = InferSelectModel<typeof userStats>
|
||||||
export type DbReferralCampaign = InferSelectModel<typeof referralCampaigns>
|
|
||||||
|
|
||||||
// =============================================================================
|
// =============================================================================
|
||||||
// Pagination
|
// Pagination
|
||||||
@@ -648,49 +646,3 @@ export interface AdminDeployResult {
|
|||||||
export interface AdminUndeployResult {
|
export interface AdminUndeployResult {
|
||||||
isDeployed: boolean
|
isDeployed: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
// =============================================================================
|
|
||||||
// Referral Campaign Types
|
|
||||||
// =============================================================================
|
|
||||||
|
|
||||||
export interface AdminReferralCampaign {
|
|
||||||
id: string
|
|
||||||
name: string
|
|
||||||
code: string | null
|
|
||||||
utmSource: string | null
|
|
||||||
utmMedium: string | null
|
|
||||||
utmCampaign: string | null
|
|
||||||
utmContent: string | null
|
|
||||||
bonusCreditAmount: string
|
|
||||||
isActive: boolean
|
|
||||||
signupUrl: string | null
|
|
||||||
createdAt: string
|
|
||||||
updatedAt: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export function toAdminReferralCampaign(
|
|
||||||
dbCampaign: DbReferralCampaign,
|
|
||||||
baseUrl: string
|
|
||||||
): AdminReferralCampaign {
|
|
||||||
const utmParams = new URLSearchParams()
|
|
||||||
if (dbCampaign.utmSource) utmParams.set('utm_source', dbCampaign.utmSource)
|
|
||||||
if (dbCampaign.utmMedium) utmParams.set('utm_medium', dbCampaign.utmMedium)
|
|
||||||
if (dbCampaign.utmCampaign) utmParams.set('utm_campaign', dbCampaign.utmCampaign)
|
|
||||||
if (dbCampaign.utmContent) utmParams.set('utm_content', dbCampaign.utmContent)
|
|
||||||
const query = utmParams.toString()
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: dbCampaign.id,
|
|
||||||
name: dbCampaign.name,
|
|
||||||
code: dbCampaign.code,
|
|
||||||
utmSource: dbCampaign.utmSource,
|
|
||||||
utmMedium: dbCampaign.utmMedium,
|
|
||||||
utmCampaign: dbCampaign.utmCampaign,
|
|
||||||
utmContent: dbCampaign.utmContent,
|
|
||||||
bonusCreditAmount: dbCampaign.bonusCreditAmount,
|
|
||||||
isActive: dbCampaign.isActive,
|
|
||||||
signupUrl: query ? `${baseUrl}/signup?${query}` : null,
|
|
||||||
createdAt: dbCampaign.createdAt.toISOString(),
|
|
||||||
updatedAt: dbCampaign.updatedAt.toISOString(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { loggerMock, setupGlobalFetchMock } from '@sim/testing'
|
import { loggerMock } from '@sim/testing'
|
||||||
import { NextRequest } from 'next/server'
|
import { NextRequest } from 'next/server'
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
@@ -284,7 +284,9 @@ describe('Workflow By ID API Route', () => {
|
|||||||
where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]),
|
where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]),
|
||||||
})
|
})
|
||||||
|
|
||||||
setupGlobalFetchMock({ ok: true })
|
global.fetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
})
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||||
method: 'DELETE',
|
method: 'DELETE',
|
||||||
@@ -329,7 +331,9 @@ describe('Workflow By ID API Route', () => {
|
|||||||
where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]),
|
where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]),
|
||||||
})
|
})
|
||||||
|
|
||||||
setupGlobalFetchMock({ ok: true })
|
global.fetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
})
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||||
method: 'DELETE',
|
method: 'DELETE',
|
||||||
|
|||||||
@@ -131,8 +131,10 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Handle scroll management
|
// Handle scroll management (80px stickiness for copilot)
|
||||||
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage)
|
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage, {
|
||||||
|
stickinessThreshold: 40,
|
||||||
|
})
|
||||||
|
|
||||||
// Handle chat history grouping
|
// Handle chat history grouping
|
||||||
const { groupedChats, handleHistoryDropdownOpen: handleHistoryDropdownOpenHook } = useChatHistory(
|
const { groupedChats, handleHistoryDropdownOpen: handleHistoryDropdownOpenHook } = useChatHistory(
|
||||||
|
|||||||
@@ -1,10 +1,7 @@
|
|||||||
import { useCallback, useState } from 'react'
|
import { useCallback, useState } from 'react'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { runPreDeployChecks } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/deploy/hooks/use-predeploy-checks'
|
|
||||||
import { useNotificationStore } from '@/stores/notifications'
|
import { useNotificationStore } from '@/stores/notifications'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
|
||||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
|
||||||
|
|
||||||
const logger = createLogger('useDeployment')
|
const logger = createLogger('useDeployment')
|
||||||
|
|
||||||
@@ -38,24 +35,6 @@ export function useDeployment({
|
|||||||
return { success: true, shouldOpenModal: true }
|
return { success: true, shouldOpenModal: true }
|
||||||
}
|
}
|
||||||
|
|
||||||
const { blocks, edges, loops, parallels } = useWorkflowStore.getState()
|
|
||||||
const liveBlocks = mergeSubblockState(blocks, workflowId)
|
|
||||||
const checkResult = runPreDeployChecks({
|
|
||||||
blocks: liveBlocks,
|
|
||||||
edges,
|
|
||||||
loops,
|
|
||||||
parallels,
|
|
||||||
workflowId,
|
|
||||||
})
|
|
||||||
if (!checkResult.passed) {
|
|
||||||
addNotification({
|
|
||||||
level: 'error',
|
|
||||||
message: checkResult.error || 'Pre-deploy validation failed',
|
|
||||||
workflowId,
|
|
||||||
})
|
|
||||||
return { success: false, shouldOpenModal: false }
|
|
||||||
}
|
|
||||||
|
|
||||||
setIsDeploying(true)
|
setIsDeploying(true)
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {
|
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {
|
||||||
|
|||||||
@@ -239,12 +239,7 @@ export const ComboBox = memo(function ComboBox({
|
|||||||
*/
|
*/
|
||||||
const defaultOptionValue = useMemo(() => {
|
const defaultOptionValue = useMemo(() => {
|
||||||
if (defaultValue !== undefined) {
|
if (defaultValue !== undefined) {
|
||||||
// Validate that the default value exists in the available (filtered) options
|
return defaultValue
|
||||||
const defaultInOptions = evaluatedOptions.find((opt) => getOptionValue(opt) === defaultValue)
|
|
||||||
if (defaultInOptions) {
|
|
||||||
return defaultValue
|
|
||||||
}
|
|
||||||
// Default not available (e.g. provider disabled) — fall through to other fallbacks
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// For model field, default to claude-sonnet-4-5 if available
|
// For model field, default to claude-sonnet-4-5 if available
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import { Button, Combobox } from '@/components/emcn/components'
|
|||||||
import {
|
import {
|
||||||
getCanonicalScopesForProvider,
|
getCanonicalScopesForProvider,
|
||||||
getProviderIdFromServiceId,
|
getProviderIdFromServiceId,
|
||||||
getServiceConfigByProviderId,
|
|
||||||
OAUTH_PROVIDERS,
|
OAUTH_PROVIDERS,
|
||||||
type OAuthProvider,
|
type OAuthProvider,
|
||||||
type OAuthService,
|
type OAuthService,
|
||||||
@@ -27,11 +26,6 @@ const getProviderIcon = (providerName: OAuthProvider) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const getProviderName = (providerName: OAuthProvider) => {
|
const getProviderName = (providerName: OAuthProvider) => {
|
||||||
const serviceConfig = getServiceConfigByProviderId(providerName)
|
|
||||||
if (serviceConfig) {
|
|
||||||
return serviceConfig.name
|
|
||||||
}
|
|
||||||
|
|
||||||
const { baseProvider } = parseProvider(providerName)
|
const { baseProvider } = parseProvider(providerName)
|
||||||
const baseProviderConfig = OAUTH_PROVIDERS[baseProvider]
|
const baseProviderConfig = OAUTH_PROVIDERS[baseProvider]
|
||||||
|
|
||||||
@@ -60,7 +54,7 @@ export function ToolCredentialSelector({
|
|||||||
onChange,
|
onChange,
|
||||||
provider,
|
provider,
|
||||||
requiredScopes = [],
|
requiredScopes = [],
|
||||||
label,
|
label = 'Select account',
|
||||||
serviceId,
|
serviceId,
|
||||||
disabled = false,
|
disabled = false,
|
||||||
}: ToolCredentialSelectorProps) {
|
}: ToolCredentialSelectorProps) {
|
||||||
@@ -70,7 +64,6 @@ export function ToolCredentialSelector({
|
|||||||
const { activeWorkflowId } = useWorkflowRegistry()
|
const { activeWorkflowId } = useWorkflowRegistry()
|
||||||
|
|
||||||
const selectedId = value || ''
|
const selectedId = value || ''
|
||||||
const effectiveLabel = label || `Select ${getProviderName(provider)} account`
|
|
||||||
|
|
||||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||||
|
|
||||||
@@ -210,7 +203,7 @@ export function ToolCredentialSelector({
|
|||||||
selectedValue={selectedId}
|
selectedValue={selectedId}
|
||||||
onChange={handleComboboxChange}
|
onChange={handleComboboxChange}
|
||||||
onOpenChange={handleOpenChange}
|
onOpenChange={handleOpenChange}
|
||||||
placeholder={effectiveLabel}
|
placeholder={label}
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
editable={true}
|
editable={true}
|
||||||
filterOptions={!isForeign}
|
filterOptions={!isForeign}
|
||||||
@@ -1,186 +0,0 @@
|
|||||||
'use client'
|
|
||||||
|
|
||||||
import type React from 'react'
|
|
||||||
import { useRef, useState } from 'react'
|
|
||||||
import { ArrowLeftRight, ArrowUp } from 'lucide-react'
|
|
||||||
import { Button, Input, Label, Tooltip } from '@/components/emcn'
|
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
|
||||||
import type { WandControlHandlers } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Props for a generic parameter with label component
|
|
||||||
*/
|
|
||||||
export interface ParameterWithLabelProps {
|
|
||||||
paramId: string
|
|
||||||
title: string
|
|
||||||
isRequired: boolean
|
|
||||||
visibility: string
|
|
||||||
wandConfig?: {
|
|
||||||
enabled: boolean
|
|
||||||
prompt?: string
|
|
||||||
placeholder?: string
|
|
||||||
}
|
|
||||||
canonicalToggle?: {
|
|
||||||
mode: 'basic' | 'advanced'
|
|
||||||
disabled?: boolean
|
|
||||||
onToggle?: () => void
|
|
||||||
}
|
|
||||||
disabled: boolean
|
|
||||||
isPreview: boolean
|
|
||||||
children: (wandControlRef: React.MutableRefObject<WandControlHandlers | null>) => React.ReactNode
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generic wrapper component for parameters that manages wand state and renders label + input
|
|
||||||
*/
|
|
||||||
export function ParameterWithLabel({
|
|
||||||
paramId,
|
|
||||||
title,
|
|
||||||
isRequired,
|
|
||||||
visibility,
|
|
||||||
wandConfig,
|
|
||||||
canonicalToggle,
|
|
||||||
disabled,
|
|
||||||
isPreview,
|
|
||||||
children,
|
|
||||||
}: ParameterWithLabelProps) {
|
|
||||||
const [isSearchActive, setIsSearchActive] = useState(false)
|
|
||||||
const [searchQuery, setSearchQuery] = useState('')
|
|
||||||
const searchInputRef = useRef<HTMLInputElement>(null)
|
|
||||||
const wandControlRef = useRef<WandControlHandlers | null>(null)
|
|
||||||
|
|
||||||
const isWandEnabled = wandConfig?.enabled ?? false
|
|
||||||
const showWand = isWandEnabled && !isPreview && !disabled
|
|
||||||
|
|
||||||
const handleSearchClick = (): void => {
|
|
||||||
setIsSearchActive(true)
|
|
||||||
setTimeout(() => {
|
|
||||||
searchInputRef.current?.focus()
|
|
||||||
}, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleSearchBlur = (): void => {
|
|
||||||
if (!searchQuery.trim() && !wandControlRef.current?.isWandStreaming) {
|
|
||||||
setIsSearchActive(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleSearchChange = (value: string): void => {
|
|
||||||
setSearchQuery(value)
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleSearchSubmit = (): void => {
|
|
||||||
if (searchQuery.trim() && wandControlRef.current) {
|
|
||||||
wandControlRef.current.onWandTrigger(searchQuery)
|
|
||||||
setSearchQuery('')
|
|
||||||
setIsSearchActive(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const handleSearchCancel = (): void => {
|
|
||||||
setSearchQuery('')
|
|
||||||
setIsSearchActive(false)
|
|
||||||
}
|
|
||||||
|
|
||||||
const isStreaming = wandControlRef.current?.isWandStreaming ?? false
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div key={paramId} className='relative min-w-0 space-y-[6px]'>
|
|
||||||
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
|
|
||||||
<Label className='flex items-baseline gap-[6px] whitespace-nowrap font-medium text-[13px] text-[var(--text-primary)]'>
|
|
||||||
{title}
|
|
||||||
{isRequired && visibility === 'user-only' && <span className='ml-0.5'>*</span>}
|
|
||||||
</Label>
|
|
||||||
<div className='flex min-w-0 flex-1 items-center justify-end gap-[6px]'>
|
|
||||||
{showWand &&
|
|
||||||
(!isSearchActive ? (
|
|
||||||
<Button
|
|
||||||
variant='active'
|
|
||||||
className='-my-1 h-5 px-2 py-0 text-[11px]'
|
|
||||||
onClick={handleSearchClick}
|
|
||||||
>
|
|
||||||
Generate
|
|
||||||
</Button>
|
|
||||||
) : (
|
|
||||||
<div className='-my-1 flex min-w-[120px] max-w-[280px] flex-1 items-center gap-[4px]'>
|
|
||||||
<Input
|
|
||||||
ref={searchInputRef}
|
|
||||||
value={isStreaming ? 'Generating...' : searchQuery}
|
|
||||||
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
|
||||||
handleSearchChange(e.target.value)
|
|
||||||
}
|
|
||||||
onBlur={(e: React.FocusEvent<HTMLInputElement>) => {
|
|
||||||
const relatedTarget = e.relatedTarget as HTMLElement | null
|
|
||||||
if (relatedTarget?.closest('button')) return
|
|
||||||
handleSearchBlur()
|
|
||||||
}}
|
|
||||||
onKeyDown={(e: React.KeyboardEvent<HTMLInputElement>) => {
|
|
||||||
if (e.key === 'Enter' && searchQuery.trim() && !isStreaming) {
|
|
||||||
handleSearchSubmit()
|
|
||||||
} else if (e.key === 'Escape') {
|
|
||||||
handleSearchCancel()
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
disabled={isStreaming}
|
|
||||||
className={cn(
|
|
||||||
'h-5 min-w-[80px] flex-1 text-[11px]',
|
|
||||||
isStreaming && 'text-muted-foreground'
|
|
||||||
)}
|
|
||||||
placeholder='Generate with AI...'
|
|
||||||
/>
|
|
||||||
<Button
|
|
||||||
variant='tertiary'
|
|
||||||
disabled={!searchQuery.trim() || isStreaming}
|
|
||||||
onMouseDown={(e: React.MouseEvent) => {
|
|
||||||
e.preventDefault()
|
|
||||||
e.stopPropagation()
|
|
||||||
}}
|
|
||||||
onClick={(e: React.MouseEvent) => {
|
|
||||||
e.stopPropagation()
|
|
||||||
handleSearchSubmit()
|
|
||||||
}}
|
|
||||||
className='h-[20px] w-[20px] flex-shrink-0 p-0'
|
|
||||||
>
|
|
||||||
<ArrowUp className='h-[12px] w-[12px]' />
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
{canonicalToggle && !isPreview && (
|
|
||||||
<Tooltip.Root>
|
|
||||||
<Tooltip.Trigger asChild>
|
|
||||||
<button
|
|
||||||
type='button'
|
|
||||||
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0 disabled:cursor-not-allowed disabled:opacity-50'
|
|
||||||
onClick={canonicalToggle.onToggle}
|
|
||||||
disabled={canonicalToggle.disabled || disabled}
|
|
||||||
aria-label={
|
|
||||||
canonicalToggle.mode === 'advanced'
|
|
||||||
? 'Switch to selector'
|
|
||||||
: 'Switch to manual ID'
|
|
||||||
}
|
|
||||||
>
|
|
||||||
<ArrowLeftRight
|
|
||||||
className={cn(
|
|
||||||
'!h-[12px] !w-[12px]',
|
|
||||||
canonicalToggle.mode === 'advanced'
|
|
||||||
? 'text-[var(--text-primary)]'
|
|
||||||
: 'text-[var(--text-secondary)]'
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
</button>
|
|
||||||
</Tooltip.Trigger>
|
|
||||||
<Tooltip.Content side='top'>
|
|
||||||
<p>
|
|
||||||
{canonicalToggle.mode === 'advanced'
|
|
||||||
? 'Switch to selector'
|
|
||||||
: 'Switch to manual ID'}
|
|
||||||
</p>
|
|
||||||
</Tooltip.Content>
|
|
||||||
</Tooltip.Root>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className='relative w-full min-w-0'>{children(wandControlRef)}</div>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
@@ -1,114 +0,0 @@
|
|||||||
'use client'
|
|
||||||
|
|
||||||
import { useEffect, useRef } from 'react'
|
|
||||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
|
||||||
import { SubBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
|
|
||||||
import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
|
|
||||||
|
|
||||||
interface ToolSubBlockRendererProps {
|
|
||||||
blockId: string
|
|
||||||
subBlockId: string
|
|
||||||
toolIndex: number
|
|
||||||
subBlock: BlockSubBlockConfig
|
|
||||||
effectiveParamId: string
|
|
||||||
toolParams: Record<string, string> | undefined
|
|
||||||
onParamChange: (toolIndex: number, paramId: string, value: string) => void
|
|
||||||
disabled: boolean
|
|
||||||
canonicalToggle?: {
|
|
||||||
mode: 'basic' | 'advanced'
|
|
||||||
disabled?: boolean
|
|
||||||
onToggle?: () => void
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* SubBlock types whose store values are objects/arrays/non-strings.
|
|
||||||
* tool.params stores strings (via JSON.stringify), so when syncing
|
|
||||||
* back to the store we parse them to restore the native shape.
|
|
||||||
*/
|
|
||||||
const OBJECT_SUBBLOCK_TYPES = new Set(['file-upload', 'table', 'grouped-checkbox-list'])
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Bridges the subblock store with StoredTool.params via a synthetic store key,
|
|
||||||
* then delegates all rendering to SubBlock for full parity.
|
|
||||||
*/
|
|
||||||
export function ToolSubBlockRenderer({
|
|
||||||
blockId,
|
|
||||||
subBlockId,
|
|
||||||
toolIndex,
|
|
||||||
subBlock,
|
|
||||||
effectiveParamId,
|
|
||||||
toolParams,
|
|
||||||
onParamChange,
|
|
||||||
disabled,
|
|
||||||
canonicalToggle,
|
|
||||||
}: ToolSubBlockRendererProps) {
|
|
||||||
const syntheticId = `${subBlockId}-tool-${toolIndex}-${effectiveParamId}`
|
|
||||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, syntheticId)
|
|
||||||
|
|
||||||
const toolParamValue = toolParams?.[effectiveParamId] ?? ''
|
|
||||||
const isObjectType = OBJECT_SUBBLOCK_TYPES.has(subBlock.type)
|
|
||||||
|
|
||||||
const lastPushedToStoreRef = useRef<string | null>(null)
|
|
||||||
const lastPushedToParamsRef = useRef<string | null>(null)
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (!toolParamValue && lastPushedToStoreRef.current === null) {
|
|
||||||
lastPushedToStoreRef.current = toolParamValue
|
|
||||||
lastPushedToParamsRef.current = toolParamValue
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (toolParamValue !== lastPushedToStoreRef.current) {
|
|
||||||
lastPushedToStoreRef.current = toolParamValue
|
|
||||||
lastPushedToParamsRef.current = toolParamValue
|
|
||||||
|
|
||||||
if (isObjectType && typeof toolParamValue === 'string' && toolParamValue) {
|
|
||||||
try {
|
|
||||||
const parsed = JSON.parse(toolParamValue)
|
|
||||||
if (typeof parsed === 'object' && parsed !== null) {
|
|
||||||
setStoreValue(parsed)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// Not valid JSON — fall through to set as string
|
|
||||||
}
|
|
||||||
}
|
|
||||||
setStoreValue(toolParamValue)
|
|
||||||
}
|
|
||||||
}, [toolParamValue, setStoreValue, isObjectType])
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (storeValue == null && lastPushedToParamsRef.current === null) return
|
|
||||||
const stringValue =
|
|
||||||
storeValue == null
|
|
||||||
? ''
|
|
||||||
: typeof storeValue === 'string'
|
|
||||||
? storeValue
|
|
||||||
: JSON.stringify(storeValue)
|
|
||||||
if (stringValue !== lastPushedToParamsRef.current) {
|
|
||||||
lastPushedToParamsRef.current = stringValue
|
|
||||||
lastPushedToStoreRef.current = stringValue
|
|
||||||
onParamChange(toolIndex, effectiveParamId, stringValue)
|
|
||||||
}
|
|
||||||
}, [storeValue, toolIndex, effectiveParamId, onParamChange])
|
|
||||||
|
|
||||||
const visibility = subBlock.paramVisibility ?? 'user-or-llm'
|
|
||||||
const isOptionalForUser = visibility !== 'user-only'
|
|
||||||
|
|
||||||
const config = {
|
|
||||||
...subBlock,
|
|
||||||
id: syntheticId,
|
|
||||||
...(isOptionalForUser && { required: false }),
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<SubBlock
|
|
||||||
blockId={blockId}
|
|
||||||
config={config}
|
|
||||||
isPreview={false}
|
|
||||||
disabled={disabled}
|
|
||||||
canonicalToggle={canonicalToggle}
|
|
||||||
dependencyContext={toolParams}
|
|
||||||
/>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
@@ -2,12 +2,37 @@
|
|||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
import { describe, expect, it } from 'vitest'
|
import { describe, expect, it } from 'vitest'
|
||||||
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
|
|
||||||
import {
|
interface StoredTool {
|
||||||
isCustomToolAlreadySelected,
|
type: string
|
||||||
isMcpToolAlreadySelected,
|
title?: string
|
||||||
isWorkflowAlreadySelected,
|
toolId?: string
|
||||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/utils'
|
params?: Record<string, string>
|
||||||
|
customToolId?: string
|
||||||
|
schema?: any
|
||||||
|
code?: string
|
||||||
|
operation?: string
|
||||||
|
usageControl?: 'auto' | 'force' | 'none'
|
||||||
|
}
|
||||||
|
|
||||||
|
const isMcpToolAlreadySelected = (selectedTools: StoredTool[], mcpToolId: string): boolean => {
|
||||||
|
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isCustomToolAlreadySelected = (
|
||||||
|
selectedTools: StoredTool[],
|
||||||
|
customToolId: string
|
||||||
|
): boolean => {
|
||||||
|
return selectedTools.some(
|
||||||
|
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isWorkflowAlreadySelected = (selectedTools: StoredTool[], workflowId: string): boolean => {
|
||||||
|
return selectedTools.some(
|
||||||
|
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
describe('isMcpToolAlreadySelected', () => {
|
describe('isMcpToolAlreadySelected', () => {
|
||||||
describe('basic functionality', () => {
|
describe('basic functionality', () => {
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,31 +0,0 @@
|
|||||||
/**
|
|
||||||
* Represents a tool selected and configured in the workflow
|
|
||||||
*
|
|
||||||
* @remarks
|
|
||||||
* For custom tools (new format), we only store: type, customToolId, usageControl, isExpanded.
|
|
||||||
* Everything else (title, schema, code) is loaded dynamically from the database.
|
|
||||||
* Legacy custom tools with inline schema/code are still supported for backwards compatibility.
|
|
||||||
*/
|
|
||||||
export interface StoredTool {
|
|
||||||
/** Block type identifier */
|
|
||||||
type: string
|
|
||||||
/** Display title for the tool (optional for new custom tool format) */
|
|
||||||
title?: string
|
|
||||||
/** Direct tool ID for execution (optional for new custom tool format) */
|
|
||||||
toolId?: string
|
|
||||||
/** Parameter values configured by the user (optional for new custom tool format) */
|
|
||||||
params?: Record<string, string>
|
|
||||||
/** Whether the tool details are expanded in UI */
|
|
||||||
isExpanded?: boolean
|
|
||||||
/** Database ID for custom tools (new format - reference only) */
|
|
||||||
customToolId?: string
|
|
||||||
/** Tool schema for custom tools (legacy format - inline JSON schema) */
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
schema?: Record<string, any>
|
|
||||||
/** Implementation code for custom tools (legacy format - inline) */
|
|
||||||
code?: string
|
|
||||||
/** Selected operation for multi-operation tools */
|
|
||||||
operation?: string
|
|
||||||
/** Tool usage control mode for LLM */
|
|
||||||
usageControl?: 'auto' | 'force' | 'none'
|
|
||||||
}
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if an MCP tool is already selected.
|
|
||||||
*/
|
|
||||||
export function isMcpToolAlreadySelected(selectedTools: StoredTool[], mcpToolId: string): boolean {
|
|
||||||
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if a custom tool is already selected.
|
|
||||||
*/
|
|
||||||
export function isCustomToolAlreadySelected(
|
|
||||||
selectedTools: StoredTool[],
|
|
||||||
customToolId: string
|
|
||||||
): boolean {
|
|
||||||
return selectedTools.some(
|
|
||||||
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if a workflow is already selected.
|
|
||||||
*/
|
|
||||||
export function isWorkflowAlreadySelected(
|
|
||||||
selectedTools: StoredTool[],
|
|
||||||
workflowId: string
|
|
||||||
): boolean {
|
|
||||||
return selectedTools.some(
|
|
||||||
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
|
|
||||||
)
|
|
||||||
}
|
|
||||||
@@ -3,6 +3,7 @@ import { isEqual } from 'lodash'
|
|||||||
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
|
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
|
||||||
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
|
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
|
import type { FieldDiffStatus } from '@/lib/workflows/diff/types'
|
||||||
import {
|
import {
|
||||||
CheckboxList,
|
CheckboxList,
|
||||||
Code,
|
Code,
|
||||||
@@ -68,15 +69,13 @@ interface SubBlockProps {
|
|||||||
isPreview?: boolean
|
isPreview?: boolean
|
||||||
subBlockValues?: Record<string, any>
|
subBlockValues?: Record<string, any>
|
||||||
disabled?: boolean
|
disabled?: boolean
|
||||||
|
fieldDiffStatus?: FieldDiffStatus
|
||||||
allowExpandInPreview?: boolean
|
allowExpandInPreview?: boolean
|
||||||
canonicalToggle?: {
|
canonicalToggle?: {
|
||||||
mode: 'basic' | 'advanced'
|
mode: 'basic' | 'advanced'
|
||||||
disabled?: boolean
|
disabled?: boolean
|
||||||
onToggle?: () => void
|
onToggle?: () => void
|
||||||
}
|
}
|
||||||
labelSuffix?: React.ReactNode
|
|
||||||
/** Provides sibling values for dependency resolution in non-preview contexts (e.g. tool-input) */
|
|
||||||
dependencyContext?: Record<string, unknown>
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -163,14 +162,16 @@ const getPreviewValue = (
|
|||||||
/**
|
/**
|
||||||
* Renders the label with optional validation and description tooltips.
|
* Renders the label with optional validation and description tooltips.
|
||||||
*
|
*
|
||||||
|
* @remarks
|
||||||
|
* Handles JSON validation indicators for code blocks and required field markers.
|
||||||
|
* Includes inline AI generate button when wand is enabled.
|
||||||
|
*
|
||||||
* @param config - The sub-block configuration defining the label content
|
* @param config - The sub-block configuration defining the label content
|
||||||
* @param isValidJson - Whether the JSON content is valid (for code blocks)
|
* @param isValidJson - Whether the JSON content is valid (for code blocks)
|
||||||
* @param subBlockValues - Current values of all subblocks for evaluating conditional requirements
|
* @param subBlockValues - Current values of all subblocks for evaluating conditional requirements
|
||||||
* @param wandState - State and handlers for the inline AI generate feature
|
* @param wandState - Optional state and handlers for the AI wand feature
|
||||||
* @param canonicalToggle - Metadata and handlers for the basic/advanced mode toggle
|
* @param canonicalToggle - Optional canonical toggle metadata and handlers
|
||||||
* @param canonicalToggleIsDisabled - Whether the canonical toggle is disabled (includes dependsOn gating)
|
* @param canonicalToggleIsDisabled - Whether the canonical toggle is disabled
|
||||||
* @param copyState - State and handler for the copy-to-clipboard button
|
|
||||||
* @param labelSuffix - Additional content rendered after the label text
|
|
||||||
* @returns The label JSX element, or `null` for switch types or when no title is defined
|
* @returns The label JSX element, or `null` for switch types or when no title is defined
|
||||||
*/
|
*/
|
||||||
const renderLabel = (
|
const renderLabel = (
|
||||||
@@ -201,8 +202,7 @@ const renderLabel = (
|
|||||||
showCopyButton: boolean
|
showCopyButton: boolean
|
||||||
copied: boolean
|
copied: boolean
|
||||||
onCopy: () => void
|
onCopy: () => void
|
||||||
},
|
}
|
||||||
labelSuffix?: React.ReactNode
|
|
||||||
): JSX.Element | null => {
|
): JSX.Element | null => {
|
||||||
if (config.type === 'switch') return null
|
if (config.type === 'switch') return null
|
||||||
if (!config.title) return null
|
if (!config.title) return null
|
||||||
@@ -215,10 +215,9 @@ const renderLabel = (
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
|
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
|
||||||
<Label className='flex items-baseline gap-[6px] whitespace-nowrap'>
|
<Label className='flex items-center gap-[6px] whitespace-nowrap'>
|
||||||
{config.title}
|
{config.title}
|
||||||
{required && <span className='ml-0.5'>*</span>}
|
{required && <span className='ml-0.5'>*</span>}
|
||||||
{labelSuffix}
|
|
||||||
{config.type === 'code' &&
|
{config.type === 'code' &&
|
||||||
config.language === 'json' &&
|
config.language === 'json' &&
|
||||||
!isValidJson &&
|
!isValidJson &&
|
||||||
@@ -384,25 +383,28 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
|||||||
prevProps.isPreview === nextProps.isPreview &&
|
prevProps.isPreview === nextProps.isPreview &&
|
||||||
valueEqual &&
|
valueEqual &&
|
||||||
prevProps.disabled === nextProps.disabled &&
|
prevProps.disabled === nextProps.disabled &&
|
||||||
|
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
||||||
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
||||||
canonicalToggleEqual &&
|
canonicalToggleEqual
|
||||||
prevProps.labelSuffix === nextProps.labelSuffix &&
|
|
||||||
prevProps.dependencyContext === nextProps.dependencyContext
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Renders a single workflow sub-block input based on config.type.
|
* Renders a single workflow sub-block input based on config.type.
|
||||||
*
|
*
|
||||||
|
* @remarks
|
||||||
|
* Supports multiple input types including short-input, long-input, dropdown,
|
||||||
|
* combobox, slider, table, code, switch, tool-input, and many more.
|
||||||
|
* Handles preview mode, disabled states, and AI wand generation.
|
||||||
|
*
|
||||||
* @param blockId - The parent block identifier
|
* @param blockId - The parent block identifier
|
||||||
* @param config - Configuration defining the input type and properties
|
* @param config - Configuration defining the input type and properties
|
||||||
* @param isPreview - Whether to render in preview mode
|
* @param isPreview - Whether to render in preview mode
|
||||||
* @param subBlockValues - Current values of all subblocks
|
* @param subBlockValues - Current values of all subblocks
|
||||||
* @param disabled - Whether the input is disabled
|
* @param disabled - Whether the input is disabled
|
||||||
|
* @param fieldDiffStatus - Optional diff status for visual indicators
|
||||||
* @param allowExpandInPreview - Whether to allow expanding in preview mode
|
* @param allowExpandInPreview - Whether to allow expanding in preview mode
|
||||||
* @param canonicalToggle - Metadata and handlers for the basic/advanced mode toggle
|
* @returns The rendered sub-block input component
|
||||||
* @param labelSuffix - Additional content rendered after the label text
|
|
||||||
* @param dependencyContext - Sibling values for dependency resolution in non-preview contexts (e.g. tool-input)
|
|
||||||
*/
|
*/
|
||||||
function SubBlockComponent({
|
function SubBlockComponent({
|
||||||
blockId,
|
blockId,
|
||||||
@@ -410,10 +412,9 @@ function SubBlockComponent({
|
|||||||
isPreview = false,
|
isPreview = false,
|
||||||
subBlockValues,
|
subBlockValues,
|
||||||
disabled = false,
|
disabled = false,
|
||||||
|
fieldDiffStatus,
|
||||||
allowExpandInPreview,
|
allowExpandInPreview,
|
||||||
canonicalToggle,
|
canonicalToggle,
|
||||||
labelSuffix,
|
|
||||||
dependencyContext,
|
|
||||||
}: SubBlockProps): JSX.Element {
|
}: SubBlockProps): JSX.Element {
|
||||||
const [isValidJson, setIsValidJson] = useState(true)
|
const [isValidJson, setIsValidJson] = useState(true)
|
||||||
const [isSearchActive, setIsSearchActive] = useState(false)
|
const [isSearchActive, setIsSearchActive] = useState(false)
|
||||||
@@ -422,6 +423,7 @@ function SubBlockComponent({
|
|||||||
const searchInputRef = useRef<HTMLInputElement>(null)
|
const searchInputRef = useRef<HTMLInputElement>(null)
|
||||||
const wandControlRef = useRef<WandControlHandlers | null>(null)
|
const wandControlRef = useRef<WandControlHandlers | null>(null)
|
||||||
|
|
||||||
|
// Use webhook management hook when config has useWebhookUrl enabled
|
||||||
const webhookManagement = useWebhookManagement({
|
const webhookManagement = useWebhookManagement({
|
||||||
blockId,
|
blockId,
|
||||||
triggerId: undefined,
|
triggerId: undefined,
|
||||||
@@ -508,12 +510,10 @@ function SubBlockComponent({
|
|||||||
| null
|
| null
|
||||||
| undefined
|
| undefined
|
||||||
|
|
||||||
const contextValues = dependencyContext ?? (isPreview ? subBlockValues : undefined)
|
|
||||||
|
|
||||||
const { finalDisabled: gatedDisabled } = useDependsOnGate(blockId, config, {
|
const { finalDisabled: gatedDisabled } = useDependsOnGate(blockId, config, {
|
||||||
disabled,
|
disabled,
|
||||||
isPreview,
|
isPreview,
|
||||||
previewContextValues: contextValues,
|
previewContextValues: isPreview ? subBlockValues : undefined,
|
||||||
})
|
})
|
||||||
|
|
||||||
const isDisabled = gatedDisabled
|
const isDisabled = gatedDisabled
|
||||||
@@ -797,7 +797,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -809,7 +809,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -821,7 +821,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -833,7 +833,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -845,7 +845,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -868,7 +868,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue as any}
|
previewValue={previewValue as any}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -880,7 +880,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue as any}
|
previewValue={previewValue as any}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -892,7 +892,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue as any}
|
previewValue={previewValue as any}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -917,7 +917,7 @@ function SubBlockComponent({
|
|||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue as any}
|
previewValue={previewValue as any}
|
||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -953,7 +953,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -987,7 +987,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue as any}
|
previewValue={previewValue as any}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -999,7 +999,7 @@ function SubBlockComponent({
|
|||||||
disabled={isDisabled}
|
disabled={isDisabled}
|
||||||
isPreview={isPreview}
|
isPreview={isPreview}
|
||||||
previewValue={previewValue}
|
previewValue={previewValue}
|
||||||
previewContextValues={contextValues}
|
previewContextValues={isPreview ? subBlockValues : undefined}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1059,8 +1059,7 @@ function SubBlockComponent({
|
|||||||
showCopyButton: Boolean(config.showCopyButton && config.useWebhookUrl),
|
showCopyButton: Boolean(config.showCopyButton && config.useWebhookUrl),
|
||||||
copied,
|
copied,
|
||||||
onCopy: handleCopy,
|
onCopy: handleCopy,
|
||||||
},
|
}
|
||||||
labelSuffix
|
|
||||||
)}
|
)}
|
||||||
{renderInput()}
|
{renderInput()}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -571,6 +571,7 @@ export function Editor() {
|
|||||||
isPreview={false}
|
isPreview={false}
|
||||||
subBlockValues={subBlockState}
|
subBlockValues={subBlockState}
|
||||||
disabled={!canEditBlock}
|
disabled={!canEditBlock}
|
||||||
|
fieldDiffStatus={undefined}
|
||||||
allowExpandInPreview={false}
|
allowExpandInPreview={false}
|
||||||
canonicalToggle={
|
canonicalToggle={
|
||||||
isCanonicalSwap && canonicalMode && canonicalId
|
isCanonicalSwap && canonicalMode && canonicalId
|
||||||
@@ -634,6 +635,7 @@ export function Editor() {
|
|||||||
isPreview={false}
|
isPreview={false}
|
||||||
subBlockValues={subBlockState}
|
subBlockValues={subBlockState}
|
||||||
disabled={!canEditBlock}
|
disabled={!canEditBlock}
|
||||||
|
fieldDiffStatus={undefined}
|
||||||
allowExpandInPreview={false}
|
allowExpandInPreview={false}
|
||||||
/>
|
/>
|
||||||
{index < advancedOnlySubBlocks.length - 1 && (
|
{index < advancedOnlySubBlocks.length - 1 && (
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ interface UseScrollManagementOptions {
|
|||||||
/**
|
/**
|
||||||
* Distance from bottom (in pixels) within which auto-scroll stays active
|
* Distance from bottom (in pixels) within which auto-scroll stays active
|
||||||
* @remarks Lower values = less sticky (user can scroll away easier)
|
* @remarks Lower values = less sticky (user can scroll away easier)
|
||||||
* @defaultValue 30
|
* @defaultValue 100
|
||||||
*/
|
*/
|
||||||
stickinessThreshold?: number
|
stickinessThreshold?: number
|
||||||
}
|
}
|
||||||
@@ -41,7 +41,7 @@ export function useScrollManagement(
|
|||||||
const lastScrollTopRef = useRef(0)
|
const lastScrollTopRef = useRef(0)
|
||||||
|
|
||||||
const scrollBehavior = options?.behavior ?? 'smooth'
|
const scrollBehavior = options?.behavior ?? 'smooth'
|
||||||
const stickinessThreshold = options?.stickinessThreshold ?? 30
|
const stickinessThreshold = options?.stickinessThreshold ?? 100
|
||||||
|
|
||||||
/** Scrolls the container to the bottom */
|
/** Scrolls the container to the bottom */
|
||||||
const scrollToBottom = useCallback(() => {
|
const scrollToBottom = useCallback(() => {
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
export { CancelSubscription } from './cancel-subscription'
|
export { CancelSubscription } from './cancel-subscription'
|
||||||
export { CreditBalance } from './credit-balance'
|
export { CreditBalance } from './credit-balance'
|
||||||
export { PlanCard, type PlanCardProps, type PlanFeature } from './plan-card'
|
export { PlanCard, type PlanCardProps, type PlanFeature } from './plan-card'
|
||||||
export { ReferralCode } from './referral-code'
|
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
export { ReferralCode } from './referral-code'
|
|
||||||
@@ -1,103 +0,0 @@
|
|||||||
'use client'
|
|
||||||
|
|
||||||
import { useState } from 'react'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Button, Input, Label } from '@/components/emcn'
|
|
||||||
|
|
||||||
const logger = createLogger('ReferralCode')
|
|
||||||
|
|
||||||
interface ReferralCodeProps {
|
|
||||||
onRedeemComplete?: () => void
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Inline referral/promo code entry field with redeem button.
|
|
||||||
* One-time use per account — shows success or "already redeemed" state.
|
|
||||||
*/
|
|
||||||
export function ReferralCode({ onRedeemComplete }: ReferralCodeProps) {
|
|
||||||
const [code, setCode] = useState('')
|
|
||||||
const [isRedeeming, setIsRedeeming] = useState(false)
|
|
||||||
const [error, setError] = useState<string | null>(null)
|
|
||||||
const [success, setSuccess] = useState<{ bonusAmount: number } | null>(null)
|
|
||||||
|
|
||||||
const handleRedeem = async () => {
|
|
||||||
const trimmed = code.trim()
|
|
||||||
if (!trimmed || isRedeeming) return
|
|
||||||
|
|
||||||
setIsRedeeming(true)
|
|
||||||
setError(null)
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch('/api/referral-code/redeem', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ code: trimmed }),
|
|
||||||
})
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(data.error || 'Failed to redeem code')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data.redeemed) {
|
|
||||||
setSuccess({ bonusAmount: data.bonusAmount })
|
|
||||||
setCode('')
|
|
||||||
onRedeemComplete?.()
|
|
||||||
} else {
|
|
||||||
setError(data.error || 'Code could not be redeemed')
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error('Referral code redemption failed', { error: err })
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to redeem code')
|
|
||||||
} finally {
|
|
||||||
setIsRedeeming(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (success) {
|
|
||||||
return (
|
|
||||||
<div className='flex items-center justify-between'>
|
|
||||||
<Label>Referral Code</Label>
|
|
||||||
<span className='text-[12px] text-[var(--text-secondary)]'>
|
|
||||||
+${success.bonusAmount} credits applied
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className='flex flex-col'>
|
|
||||||
<div className='flex items-center justify-between gap-[12px]'>
|
|
||||||
<Label className='shrink-0'>Referral Code</Label>
|
|
||||||
<div className='flex items-center gap-[8px]'>
|
|
||||||
<Input
|
|
||||||
type='text'
|
|
||||||
value={code}
|
|
||||||
onChange={(e) => {
|
|
||||||
setCode(e.target.value)
|
|
||||||
setError(null)
|
|
||||||
}}
|
|
||||||
onKeyDown={(e) => {
|
|
||||||
if (e.key === 'Enter') handleRedeem()
|
|
||||||
}}
|
|
||||||
placeholder='Enter code'
|
|
||||||
className='h-[32px] w-[140px] text-[12px]'
|
|
||||||
disabled={isRedeeming}
|
|
||||||
/>
|
|
||||||
<Button
|
|
||||||
variant='active'
|
|
||||||
className='h-[32px] shrink-0 rounded-[6px] text-[12px]'
|
|
||||||
onClick={handleRedeem}
|
|
||||||
disabled={isRedeeming || !code.trim()}
|
|
||||||
>
|
|
||||||
{isRedeeming ? 'Redeeming...' : 'Redeem'}
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className='mt-[4px] min-h-[18px] text-right'>
|
|
||||||
{error && <span className='text-[11px] text-[var(--text-error)]'>{error}</span>}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
@@ -17,7 +17,6 @@ import {
|
|||||||
CancelSubscription,
|
CancelSubscription,
|
||||||
CreditBalance,
|
CreditBalance,
|
||||||
PlanCard,
|
PlanCard,
|
||||||
ReferralCode,
|
|
||||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components'
|
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components'
|
||||||
import {
|
import {
|
||||||
ENTERPRISE_PLAN_FEATURES,
|
ENTERPRISE_PLAN_FEATURES,
|
||||||
@@ -550,10 +549,6 @@ export function Subscription() {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{!subscription.isEnterprise && (
|
|
||||||
<ReferralCode onRedeemComplete={() => refetchSubscription()} />
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Next Billing Date - hidden from team members */}
|
{/* Next Billing Date - hidden from team members */}
|
||||||
{subscription.isPaid &&
|
{subscription.isPaid &&
|
||||||
subscriptionData?.data?.periodEnd &&
|
subscriptionData?.data?.periodEnd &&
|
||||||
|
|||||||
@@ -4,14 +4,12 @@ import { useEffect } from 'react'
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { useRouter } from 'next/navigation'
|
import { useRouter } from 'next/navigation'
|
||||||
import { useSession } from '@/lib/auth/auth-client'
|
import { useSession } from '@/lib/auth/auth-client'
|
||||||
import { useReferralAttribution } from '@/hooks/use-referral-attribution'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkspacePage')
|
const logger = createLogger('WorkspacePage')
|
||||||
|
|
||||||
export default function WorkspacePage() {
|
export default function WorkspacePage() {
|
||||||
const router = useRouter()
|
const router = useRouter()
|
||||||
const { data: session, isPending } = useSession()
|
const { data: session, isPending } = useSession()
|
||||||
useReferralAttribution()
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const redirectToFirstWorkspace = async () => {
|
const redirectToFirstWorkspace = async () => {
|
||||||
|
|||||||
@@ -2,10 +2,11 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { AgentIcon } from '@/components/icons'
|
import { AgentIcon } from '@/components/icons'
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
import { AuthMode } from '@/blocks/types'
|
import { AuthMode } from '@/blocks/types'
|
||||||
import { getApiKeyCondition, getModelOptions } from '@/blocks/utils'
|
import { getApiKeyCondition } from '@/blocks/utils'
|
||||||
import {
|
import {
|
||||||
getBaseModelProviders,
|
getBaseModelProviders,
|
||||||
getMaxTemperature,
|
getMaxTemperature,
|
||||||
|
getProviderIcon,
|
||||||
getReasoningEffortValuesForModel,
|
getReasoningEffortValuesForModel,
|
||||||
getThinkingLevelsForModel,
|
getThinkingLevelsForModel,
|
||||||
getVerbosityValuesForModel,
|
getVerbosityValuesForModel,
|
||||||
@@ -17,6 +18,7 @@ import {
|
|||||||
providers,
|
providers,
|
||||||
supportsTemperature,
|
supportsTemperature,
|
||||||
} from '@/providers/utils'
|
} from '@/providers/utils'
|
||||||
|
import { useProvidersStore } from '@/stores/providers'
|
||||||
import type { ToolResponse } from '@/tools/types'
|
import type { ToolResponse } from '@/tools/types'
|
||||||
|
|
||||||
const logger = createLogger('AgentBlock')
|
const logger = createLogger('AgentBlock')
|
||||||
@@ -119,7 +121,21 @@ Return ONLY the JSON array.`,
|
|||||||
placeholder: 'Type or select a model...',
|
placeholder: 'Type or select a model...',
|
||||||
required: true,
|
required: true,
|
||||||
defaultValue: 'claude-sonnet-4-5',
|
defaultValue: 'claude-sonnet-4-5',
|
||||||
options: getModelOptions,
|
options: () => {
|
||||||
|
const providersState = useProvidersStore.getState()
|
||||||
|
const baseModels = providersState.providers.base.models
|
||||||
|
const ollamaModels = providersState.providers.ollama.models
|
||||||
|
const vllmModels = providersState.providers.vllm.models
|
||||||
|
const openrouterModels = providersState.providers.openrouter.models
|
||||||
|
const allModels = Array.from(
|
||||||
|
new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels])
|
||||||
|
)
|
||||||
|
|
||||||
|
return allModels.map((model) => {
|
||||||
|
const icon = getProviderIcon(model)
|
||||||
|
return { label: model, id: model, ...(icon && { icon }) }
|
||||||
|
})
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'vertexCredential',
|
id: 'vertexCredential',
|
||||||
|
|||||||
@@ -1,13 +1,10 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { ChartBarIcon } from '@/components/icons'
|
import { ChartBarIcon } from '@/components/icons'
|
||||||
import type { BlockConfig, ParamType } from '@/blocks/types'
|
import type { BlockConfig, ParamType } from '@/blocks/types'
|
||||||
import {
|
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||||
getModelOptions,
|
|
||||||
getProviderCredentialSubBlocks,
|
|
||||||
PROVIDER_CREDENTIAL_INPUTS,
|
|
||||||
} from '@/blocks/utils'
|
|
||||||
import type { ProviderId } from '@/providers/types'
|
import type { ProviderId } from '@/providers/types'
|
||||||
import { getBaseModelProviders } from '@/providers/utils'
|
import { getBaseModelProviders, getProviderIcon } from '@/providers/utils'
|
||||||
|
import { useProvidersStore } from '@/stores/providers/store'
|
||||||
import type { ToolResponse } from '@/tools/types'
|
import type { ToolResponse } from '@/tools/types'
|
||||||
|
|
||||||
const logger = createLogger('EvaluatorBlock')
|
const logger = createLogger('EvaluatorBlock')
|
||||||
@@ -178,7 +175,21 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
|
|||||||
placeholder: 'Type or select a model...',
|
placeholder: 'Type or select a model...',
|
||||||
required: true,
|
required: true,
|
||||||
defaultValue: 'claude-sonnet-4-5',
|
defaultValue: 'claude-sonnet-4-5',
|
||||||
options: getModelOptions,
|
options: () => {
|
||||||
|
const providersState = useProvidersStore.getState()
|
||||||
|
const baseModels = providersState.providers.base.models
|
||||||
|
const ollamaModels = providersState.providers.ollama.models
|
||||||
|
const vllmModels = providersState.providers.vllm.models
|
||||||
|
const openrouterModels = providersState.providers.openrouter.models
|
||||||
|
const allModels = Array.from(
|
||||||
|
new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels])
|
||||||
|
)
|
||||||
|
|
||||||
|
return allModels.map((model) => {
|
||||||
|
const icon = getProviderIcon(model)
|
||||||
|
return { label: model, id: model, ...(icon && { icon }) }
|
||||||
|
})
|
||||||
|
},
|
||||||
},
|
},
|
||||||
...getProviderCredentialSubBlocks(),
|
...getProviderCredentialSubBlocks(),
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,201 +0,0 @@
|
|||||||
import { GoogleBooksIcon } from '@/components/icons'
|
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
|
||||||
import { AuthMode } from '@/blocks/types'
|
|
||||||
|
|
||||||
export const GoogleBooksBlock: BlockConfig = {
|
|
||||||
type: 'google_books',
|
|
||||||
name: 'Google Books',
|
|
||||||
description: 'Search and retrieve book information',
|
|
||||||
authMode: AuthMode.ApiKey,
|
|
||||||
longDescription:
|
|
||||||
'Search for books using the Google Books API. Find volumes by title, author, ISBN, or keywords, and retrieve detailed information about specific books including descriptions, ratings, and publication details.',
|
|
||||||
docsLink: 'https://docs.sim.ai/tools/google_books',
|
|
||||||
category: 'tools',
|
|
||||||
bgColor: '#E0E0E0',
|
|
||||||
icon: GoogleBooksIcon,
|
|
||||||
|
|
||||||
subBlocks: [
|
|
||||||
{
|
|
||||||
id: 'operation',
|
|
||||||
title: 'Operation',
|
|
||||||
type: 'dropdown',
|
|
||||||
options: [
|
|
||||||
{ label: 'Search Volumes', id: 'volume_search' },
|
|
||||||
{ label: 'Get Volume Details', id: 'volume_details' },
|
|
||||||
],
|
|
||||||
value: () => 'volume_search',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'apiKey',
|
|
||||||
title: 'API Key',
|
|
||||||
type: 'short-input',
|
|
||||||
password: true,
|
|
||||||
placeholder: 'Enter your Google Books API key',
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'query',
|
|
||||||
title: 'Search Query',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'e.g., intitle:harry potter inauthor:rowling',
|
|
||||||
condition: { field: 'operation', value: 'volume_search' },
|
|
||||||
required: { field: 'operation', value: 'volume_search' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'filter',
|
|
||||||
title: 'Filter',
|
|
||||||
type: 'dropdown',
|
|
||||||
options: [
|
|
||||||
{ label: 'None', id: '' },
|
|
||||||
{ label: 'Partial Preview', id: 'partial' },
|
|
||||||
{ label: 'Full Preview', id: 'full' },
|
|
||||||
{ label: 'Free eBooks', id: 'free-ebooks' },
|
|
||||||
{ label: 'Paid eBooks', id: 'paid-ebooks' },
|
|
||||||
{ label: 'All eBooks', id: 'ebooks' },
|
|
||||||
],
|
|
||||||
condition: { field: 'operation', value: 'volume_search' },
|
|
||||||
mode: 'advanced',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'printType',
|
|
||||||
title: 'Print Type',
|
|
||||||
type: 'dropdown',
|
|
||||||
options: [
|
|
||||||
{ label: 'All', id: 'all' },
|
|
||||||
{ label: 'Books', id: 'books' },
|
|
||||||
{ label: 'Magazines', id: 'magazines' },
|
|
||||||
],
|
|
||||||
value: () => 'all',
|
|
||||||
condition: { field: 'operation', value: 'volume_search' },
|
|
||||||
mode: 'advanced',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'orderBy',
|
|
||||||
title: 'Order By',
|
|
||||||
type: 'dropdown',
|
|
||||||
options: [
|
|
||||||
{ label: 'Relevance', id: 'relevance' },
|
|
||||||
{ label: 'Newest', id: 'newest' },
|
|
||||||
],
|
|
||||||
value: () => 'relevance',
|
|
||||||
condition: { field: 'operation', value: 'volume_search' },
|
|
||||||
mode: 'advanced',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'maxResults',
|
|
||||||
title: 'Max Results',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Number of results (1-40)',
|
|
||||||
condition: { field: 'operation', value: 'volume_search' },
|
|
||||||
mode: 'advanced',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'startIndex',
|
|
||||||
title: 'Start Index',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Starting index for pagination',
|
|
||||||
condition: { field: 'operation', value: 'volume_search' },
|
|
||||||
mode: 'advanced',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'langRestrict',
|
|
||||||
title: 'Language',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'ISO 639-1 code (e.g., en, es, fr)',
|
|
||||||
condition: { field: 'operation', value: 'volume_search' },
|
|
||||||
mode: 'advanced',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'volumeId',
|
|
||||||
title: 'Volume ID',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Google Books volume ID',
|
|
||||||
condition: { field: 'operation', value: 'volume_details' },
|
|
||||||
required: { field: 'operation', value: 'volume_details' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'projection',
|
|
||||||
title: 'Projection',
|
|
||||||
type: 'dropdown',
|
|
||||||
options: [
|
|
||||||
{ label: 'Full', id: 'full' },
|
|
||||||
{ label: 'Lite', id: 'lite' },
|
|
||||||
],
|
|
||||||
value: () => 'full',
|
|
||||||
condition: { field: 'operation', value: 'volume_details' },
|
|
||||||
mode: 'advanced',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
|
|
||||||
tools: {
|
|
||||||
access: ['google_books_volume_search', 'google_books_volume_details'],
|
|
||||||
config: {
|
|
||||||
tool: (params) => `google_books_${params.operation}`,
|
|
||||||
params: (params) => {
|
|
||||||
const { operation, ...rest } = params
|
|
||||||
|
|
||||||
let maxResults: number | undefined
|
|
||||||
if (params.maxResults) {
|
|
||||||
maxResults = Number.parseInt(params.maxResults, 10)
|
|
||||||
if (Number.isNaN(maxResults)) {
|
|
||||||
maxResults = undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let startIndex: number | undefined
|
|
||||||
if (params.startIndex) {
|
|
||||||
startIndex = Number.parseInt(params.startIndex, 10)
|
|
||||||
if (Number.isNaN(startIndex)) {
|
|
||||||
startIndex = undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
...rest,
|
|
||||||
maxResults,
|
|
||||||
startIndex,
|
|
||||||
filter: params.filter || undefined,
|
|
||||||
printType: params.printType || undefined,
|
|
||||||
orderBy: params.orderBy || undefined,
|
|
||||||
projection: params.projection || undefined,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
inputs: {
|
|
||||||
operation: { type: 'string', description: 'Operation to perform' },
|
|
||||||
apiKey: { type: 'string', description: 'Google Books API key' },
|
|
||||||
query: { type: 'string', description: 'Search query' },
|
|
||||||
filter: { type: 'string', description: 'Filter by availability' },
|
|
||||||
printType: { type: 'string', description: 'Print type filter' },
|
|
||||||
orderBy: { type: 'string', description: 'Sort order' },
|
|
||||||
maxResults: { type: 'string', description: 'Maximum number of results' },
|
|
||||||
startIndex: { type: 'string', description: 'Starting index for pagination' },
|
|
||||||
langRestrict: { type: 'string', description: 'Language restriction' },
|
|
||||||
volumeId: { type: 'string', description: 'Volume ID for details' },
|
|
||||||
projection: { type: 'string', description: 'Projection level' },
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
totalItems: { type: 'number', description: 'Total number of matching results' },
|
|
||||||
volumes: { type: 'json', description: 'List of matching volumes' },
|
|
||||||
id: { type: 'string', description: 'Volume ID' },
|
|
||||||
title: { type: 'string', description: 'Book title' },
|
|
||||||
subtitle: { type: 'string', description: 'Book subtitle' },
|
|
||||||
authors: { type: 'json', description: 'List of authors' },
|
|
||||||
publisher: { type: 'string', description: 'Publisher name' },
|
|
||||||
publishedDate: { type: 'string', description: 'Publication date' },
|
|
||||||
description: { type: 'string', description: 'Book description' },
|
|
||||||
pageCount: { type: 'number', description: 'Number of pages' },
|
|
||||||
categories: { type: 'json', description: 'Book categories' },
|
|
||||||
averageRating: { type: 'number', description: 'Average rating (1-5)' },
|
|
||||||
ratingsCount: { type: 'number', description: 'Number of ratings' },
|
|
||||||
language: { type: 'string', description: 'Language code' },
|
|
||||||
previewLink: { type: 'string', description: 'Link to preview on Google Books' },
|
|
||||||
infoLink: { type: 'string', description: 'Link to info page' },
|
|
||||||
thumbnailUrl: { type: 'string', description: 'Book cover thumbnail URL' },
|
|
||||||
isbn10: { type: 'string', description: 'ISBN-10 identifier' },
|
|
||||||
isbn13: { type: 'string', description: 'ISBN-13 identifier' },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,10 +1,8 @@
|
|||||||
import { ShieldCheckIcon } from '@/components/icons'
|
import { ShieldCheckIcon } from '@/components/icons'
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
import {
|
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||||
getModelOptions,
|
import { getProviderIcon } from '@/providers/utils'
|
||||||
getProviderCredentialSubBlocks,
|
import { useProvidersStore } from '@/stores/providers/store'
|
||||||
PROVIDER_CREDENTIAL_INPUTS,
|
|
||||||
} from '@/blocks/utils'
|
|
||||||
import type { ToolResponse } from '@/tools/types'
|
import type { ToolResponse } from '@/tools/types'
|
||||||
|
|
||||||
export interface GuardrailsResponse extends ToolResponse {
|
export interface GuardrailsResponse extends ToolResponse {
|
||||||
@@ -113,7 +111,21 @@ Return ONLY the regex pattern - no explanations, no quotes, no forward slashes,
|
|||||||
type: 'combobox',
|
type: 'combobox',
|
||||||
placeholder: 'Type or select a model...',
|
placeholder: 'Type or select a model...',
|
||||||
required: true,
|
required: true,
|
||||||
options: getModelOptions,
|
options: () => {
|
||||||
|
const providersState = useProvidersStore.getState()
|
||||||
|
const baseModels = providersState.providers.base.models
|
||||||
|
const ollamaModels = providersState.providers.ollama.models
|
||||||
|
const vllmModels = providersState.providers.vllm.models
|
||||||
|
const openrouterModels = providersState.providers.openrouter.models
|
||||||
|
const allModels = Array.from(
|
||||||
|
new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels])
|
||||||
|
)
|
||||||
|
|
||||||
|
return allModels.map((model) => {
|
||||||
|
const icon = getProviderIcon(model)
|
||||||
|
return { label: model, id: model, ...(icon && { icon }) }
|
||||||
|
})
|
||||||
|
},
|
||||||
condition: {
|
condition: {
|
||||||
field: 'validationType',
|
field: 'validationType',
|
||||||
value: ['hallucination'],
|
value: ['hallucination'],
|
||||||
|
|||||||
@@ -1,12 +1,9 @@
|
|||||||
import { ConnectIcon } from '@/components/icons'
|
import { ConnectIcon } from '@/components/icons'
|
||||||
import { AuthMode, type BlockConfig } from '@/blocks/types'
|
import { AuthMode, type BlockConfig } from '@/blocks/types'
|
||||||
import {
|
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||||
getModelOptions,
|
|
||||||
getProviderCredentialSubBlocks,
|
|
||||||
PROVIDER_CREDENTIAL_INPUTS,
|
|
||||||
} from '@/blocks/utils'
|
|
||||||
import type { ProviderId } from '@/providers/types'
|
import type { ProviderId } from '@/providers/types'
|
||||||
import { getBaseModelProviders } from '@/providers/utils'
|
import { getBaseModelProviders, getProviderIcon } from '@/providers/utils'
|
||||||
|
import { useProvidersStore } from '@/stores/providers'
|
||||||
import type { ToolResponse } from '@/tools/types'
|
import type { ToolResponse } from '@/tools/types'
|
||||||
|
|
||||||
interface RouterResponse extends ToolResponse {
|
interface RouterResponse extends ToolResponse {
|
||||||
@@ -137,6 +134,25 @@ Respond with a JSON object containing:
|
|||||||
- reasoning: A brief explanation (1-2 sentences) of why you chose this route`
|
- reasoning: A brief explanation (1-2 sentences) of why you chose this route`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to get model options for both router versions.
|
||||||
|
*/
|
||||||
|
const getModelOptions = () => {
|
||||||
|
const providersState = useProvidersStore.getState()
|
||||||
|
const baseModels = providersState.providers.base.models
|
||||||
|
const ollamaModels = providersState.providers.ollama.models
|
||||||
|
const vllmModels = providersState.providers.vllm.models
|
||||||
|
const openrouterModels = providersState.providers.openrouter.models
|
||||||
|
const allModels = Array.from(
|
||||||
|
new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels])
|
||||||
|
)
|
||||||
|
|
||||||
|
return allModels.map((model) => {
|
||||||
|
const icon = getProviderIcon(model)
|
||||||
|
return { label: model, id: model, ...(icon && { icon }) }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Legacy Router Block (block-based routing).
|
* Legacy Router Block (block-based routing).
|
||||||
* Hidden from toolbar but still supported for existing workflows.
|
* Hidden from toolbar but still supported for existing workflows.
|
||||||
|
|||||||
@@ -58,16 +58,6 @@ export const S3Block: BlockConfig<S3Response> = {
|
|||||||
},
|
},
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
id: 'getObjectRegion',
|
|
||||||
title: 'AWS Region',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Used when S3 URL does not include region',
|
|
||||||
condition: {
|
|
||||||
field: 'operation',
|
|
||||||
value: ['get_object'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
id: 'bucketName',
|
id: 'bucketName',
|
||||||
title: 'Bucket Name',
|
title: 'Bucket Name',
|
||||||
@@ -301,11 +291,34 @@ export const S3Block: BlockConfig<S3Response> = {
|
|||||||
if (!params.s3Uri) {
|
if (!params.s3Uri) {
|
||||||
throw new Error('S3 Object URL is required')
|
throw new Error('S3 Object URL is required')
|
||||||
}
|
}
|
||||||
return {
|
|
||||||
accessKeyId: params.accessKeyId,
|
// Parse S3 URI for get_object
|
||||||
secretAccessKey: params.secretAccessKey,
|
try {
|
||||||
region: params.getObjectRegion || params.region,
|
const url = new URL(params.s3Uri)
|
||||||
s3Uri: params.s3Uri,
|
const hostname = url.hostname
|
||||||
|
const bucketName = hostname.split('.')[0]
|
||||||
|
const regionMatch = hostname.match(/s3[.-]([^.]+)\.amazonaws\.com/)
|
||||||
|
const region = regionMatch ? regionMatch[1] : params.region
|
||||||
|
const objectKey = url.pathname.startsWith('/')
|
||||||
|
? url.pathname.substring(1)
|
||||||
|
: url.pathname
|
||||||
|
|
||||||
|
if (!bucketName || !objectKey) {
|
||||||
|
throw new Error('Could not parse S3 URL')
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
accessKeyId: params.accessKeyId,
|
||||||
|
secretAccessKey: params.secretAccessKey,
|
||||||
|
region,
|
||||||
|
bucketName,
|
||||||
|
objectKey,
|
||||||
|
s3Uri: params.s3Uri,
|
||||||
|
}
|
||||||
|
} catch (_error) {
|
||||||
|
throw new Error(
|
||||||
|
'Invalid S3 Object URL format. Expected: https://bucket-name.s3.region.amazonaws.com/path/to/file'
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -388,7 +401,6 @@ export const S3Block: BlockConfig<S3Response> = {
|
|||||||
acl: { type: 'string', description: 'Access control list' },
|
acl: { type: 'string', description: 'Access control list' },
|
||||||
// Download inputs
|
// Download inputs
|
||||||
s3Uri: { type: 'string', description: 'S3 object URL' },
|
s3Uri: { type: 'string', description: 'S3 object URL' },
|
||||||
getObjectRegion: { type: 'string', description: 'Optional AWS region override for downloads' },
|
|
||||||
// List inputs
|
// List inputs
|
||||||
prefix: { type: 'string', description: 'Prefix filter' },
|
prefix: { type: 'string', description: 'Prefix filter' },
|
||||||
maxKeys: { type: 'number', description: 'Maximum results' },
|
maxKeys: { type: 'number', description: 'Maximum results' },
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
import { TranslateIcon } from '@/components/icons'
|
import { TranslateIcon } from '@/components/icons'
|
||||||
import { AuthMode, type BlockConfig } from '@/blocks/types'
|
import { AuthMode, type BlockConfig } from '@/blocks/types'
|
||||||
import {
|
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||||
getModelOptions,
|
import { getProviderIcon } from '@/providers/utils'
|
||||||
getProviderCredentialSubBlocks,
|
import { useProvidersStore } from '@/stores/providers/store'
|
||||||
PROVIDER_CREDENTIAL_INPUTS,
|
|
||||||
} from '@/blocks/utils'
|
|
||||||
|
|
||||||
const getTranslationPrompt = (targetLanguage: string) =>
|
const getTranslationPrompt = (targetLanguage: string) =>
|
||||||
`Translate the following text into ${targetLanguage || 'English'}. Output ONLY the translated text with no additional commentary, explanations, or notes.`
|
`Translate the following text into ${targetLanguage || 'English'}. Output ONLY the translated text with no additional commentary, explanations, or notes.`
|
||||||
@@ -40,7 +38,18 @@ export const TranslateBlock: BlockConfig = {
|
|||||||
type: 'combobox',
|
type: 'combobox',
|
||||||
placeholder: 'Type or select a model...',
|
placeholder: 'Type or select a model...',
|
||||||
required: true,
|
required: true,
|
||||||
options: getModelOptions,
|
options: () => {
|
||||||
|
const providersState = useProvidersStore.getState()
|
||||||
|
const baseModels = providersState.providers.base.models
|
||||||
|
const ollamaModels = providersState.providers.ollama.models
|
||||||
|
const openrouterModels = providersState.providers.openrouter.models
|
||||||
|
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
|
||||||
|
|
||||||
|
return allModels.map((model) => {
|
||||||
|
const icon = getProviderIcon(model)
|
||||||
|
return { label: model, id: model, ...(icon && { icon }) }
|
||||||
|
})
|
||||||
|
},
|
||||||
},
|
},
|
||||||
...getProviderCredentialSubBlocks(),
|
...getProviderCredentialSubBlocks(),
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -39,7 +39,6 @@ import { GitHubBlock, GitHubV2Block } from '@/blocks/blocks/github'
|
|||||||
import { GitLabBlock } from '@/blocks/blocks/gitlab'
|
import { GitLabBlock } from '@/blocks/blocks/gitlab'
|
||||||
import { GmailBlock, GmailV2Block } from '@/blocks/blocks/gmail'
|
import { GmailBlock, GmailV2Block } from '@/blocks/blocks/gmail'
|
||||||
import { GoogleSearchBlock } from '@/blocks/blocks/google'
|
import { GoogleSearchBlock } from '@/blocks/blocks/google'
|
||||||
import { GoogleBooksBlock } from '@/blocks/blocks/google_books'
|
|
||||||
import { GoogleCalendarBlock, GoogleCalendarV2Block } from '@/blocks/blocks/google_calendar'
|
import { GoogleCalendarBlock, GoogleCalendarV2Block } from '@/blocks/blocks/google_calendar'
|
||||||
import { GoogleDocsBlock } from '@/blocks/blocks/google_docs'
|
import { GoogleDocsBlock } from '@/blocks/blocks/google_docs'
|
||||||
import { GoogleDriveBlock } from '@/blocks/blocks/google_drive'
|
import { GoogleDriveBlock } from '@/blocks/blocks/google_drive'
|
||||||
@@ -215,7 +214,6 @@ export const registry: Record<string, BlockConfig> = {
|
|||||||
gmail_v2: GmailV2Block,
|
gmail_v2: GmailV2Block,
|
||||||
google_calendar: GoogleCalendarBlock,
|
google_calendar: GoogleCalendarBlock,
|
||||||
google_calendar_v2: GoogleCalendarV2Block,
|
google_calendar_v2: GoogleCalendarV2Block,
|
||||||
google_books: GoogleBooksBlock,
|
|
||||||
google_docs: GoogleDocsBlock,
|
google_docs: GoogleDocsBlock,
|
||||||
google_drive: GoogleDriveBlock,
|
google_drive: GoogleDriveBlock,
|
||||||
google_forms: GoogleFormsBlock,
|
google_forms: GoogleFormsBlock,
|
||||||
|
|||||||
@@ -196,8 +196,6 @@ export interface SubBlockConfig {
|
|||||||
type: SubBlockType
|
type: SubBlockType
|
||||||
mode?: 'basic' | 'advanced' | 'both' | 'trigger' // Default is 'both' if not specified. 'trigger' means only shown in trigger mode
|
mode?: 'basic' | 'advanced' | 'both' | 'trigger' // Default is 'both' if not specified. 'trigger' means only shown in trigger mode
|
||||||
canonicalParamId?: string
|
canonicalParamId?: string
|
||||||
/** Controls parameter visibility in agent/tool-input context */
|
|
||||||
paramVisibility?: 'user-or-llm' | 'user-only' | 'llm-only' | 'hidden'
|
|
||||||
required?:
|
required?:
|
||||||
| boolean
|
| boolean
|
||||||
| {
|
| {
|
||||||
|
|||||||
@@ -1,32 +1,8 @@
|
|||||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||||
import type { BlockOutput, OutputFieldDefinition, SubBlockConfig } from '@/blocks/types'
|
import type { BlockOutput, OutputFieldDefinition, SubBlockConfig } from '@/blocks/types'
|
||||||
import {
|
import { getHostedModels, getProviderFromModel, providers } from '@/providers/utils'
|
||||||
getHostedModels,
|
|
||||||
getProviderFromModel,
|
|
||||||
getProviderIcon,
|
|
||||||
providers,
|
|
||||||
} from '@/providers/utils'
|
|
||||||
import { useProvidersStore } from '@/stores/providers/store'
|
import { useProvidersStore } from '@/stores/providers/store'
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns model options for combobox subblocks, combining all provider sources.
|
|
||||||
*/
|
|
||||||
export function getModelOptions() {
|
|
||||||
const providersState = useProvidersStore.getState()
|
|
||||||
const baseModels = providersState.providers.base.models
|
|
||||||
const ollamaModels = providersState.providers.ollama.models
|
|
||||||
const vllmModels = providersState.providers.vllm.models
|
|
||||||
const openrouterModels = providersState.providers.openrouter.models
|
|
||||||
const allModels = Array.from(
|
|
||||||
new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels])
|
|
||||||
)
|
|
||||||
|
|
||||||
return allModels.map((model) => {
|
|
||||||
const icon = getProviderIcon(model)
|
|
||||||
return { label: model, id: model, ...(icon && { icon }) }
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if a field is included in the dependsOn config.
|
* Checks if a field is included in the dependsOn config.
|
||||||
* Handles both simple array format and object format with all/any fields.
|
* Handles both simple array format and object format with all/any fields.
|
||||||
|
|||||||
@@ -1157,21 +1157,6 @@ export function AirweaveIcon(props: SVGProps<SVGSVGElement>) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function GoogleBooksIcon(props: SVGProps<SVGSVGElement>) {
|
|
||||||
return (
|
|
||||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 478.633 540.068'>
|
|
||||||
<path
|
|
||||||
fill='#1C51A4'
|
|
||||||
d='M449.059,218.231L245.519,99.538l-0.061,193.23c0.031,1.504-0.368,2.977-1.166,4.204c-0.798,1.258-1.565,1.995-2.915,2.547c-1.35,0.552-2.792,0.706-4.204,0.399c-1.412-0.307-2.7-1.043-3.713-2.117l-69.166-70.609l-69.381,70.179c-1.013,0.982-2.301,1.657-3.652,1.903c-1.381,0.246-2.792,0.092-4.081-0.491c-1.289-0.583-1.626-0.522-2.394-1.749c-0.767-1.197-1.197-2.608-1.197-4.081L85.031,6.007l-2.915-1.289C43.973-11.638,0,16.409,0,59.891v420.306c0,46.029,49.312,74.782,88.775,51.767l360.285-210.138C488.491,298.782,488.491,241.246,449.059,218.231z'
|
|
||||||
/>
|
|
||||||
<path
|
|
||||||
fill='#80D7FB'
|
|
||||||
d='M88.805,8.124c-2.179-1.289-4.419-2.363-6.659-3.345l0.123,288.663c0,1.442,0.43,2.854,1.197,4.081c0.767,1.197,1.872,2.148,3.161,2.731c1.289,0.583,2.7,0.736,4.081,0.491c1.381-0.246,2.639-0.921,3.652-1.903l69.749-69.688l69.811,69.749c1.013,1.074,2.301,1.81,3.713,2.117c1.412,0.307,2.884,0.153,4.204-0.399c1.319-0.552,2.455-1.565,3.253-2.792c0.798-1.258,1.197-2.731,1.166-4.204V99.998L88.805,8.124z'
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function GoogleDocsIcon(props: SVGProps<SVGSVGElement>) {
|
export function GoogleDocsIcon(props: SVGProps<SVGSVGElement>) {
|
||||||
return (
|
return (
|
||||||
<svg
|
<svg
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { setupGlobalFetchMock } from '@sim/testing'
|
|
||||||
import { afterEach, beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
||||||
import { getAllBlocks } from '@/blocks'
|
import { getAllBlocks } from '@/blocks'
|
||||||
import { BlockType, isMcpTool } from '@/executor/constants'
|
import { BlockType, isMcpTool } from '@/executor/constants'
|
||||||
@@ -62,30 +61,6 @@ vi.mock('@/providers', () => ({
|
|||||||
}),
|
}),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.mock('@/executor/utils/http', () => ({
|
|
||||||
buildAuthHeaders: vi.fn().mockResolvedValue({ 'Content-Type': 'application/json' }),
|
|
||||||
buildAPIUrl: vi.fn((path: string, params?: Record<string, string>) => {
|
|
||||||
const url = new URL(path, 'http://localhost:3000')
|
|
||||||
if (params) {
|
|
||||||
for (const [key, value] of Object.entries(params)) {
|
|
||||||
if (value !== undefined && value !== null) {
|
|
||||||
url.searchParams.set(key, value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return url
|
|
||||||
}),
|
|
||||||
extractAPIErrorMessage: vi.fn(async (response: Response) => {
|
|
||||||
const defaultMessage = `API request failed with status ${response.status}`
|
|
||||||
try {
|
|
||||||
const errorData = await response.json()
|
|
||||||
return errorData.error || defaultMessage
|
|
||||||
} catch {
|
|
||||||
return defaultMessage
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.mock('@sim/db', () => ({
|
vi.mock('@sim/db', () => ({
|
||||||
db: {
|
db: {
|
||||||
select: vi.fn().mockReturnValue({
|
select: vi.fn().mockReturnValue({
|
||||||
@@ -109,7 +84,7 @@ vi.mock('@sim/db/schema', () => ({
|
|||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
setupGlobalFetchMock()
|
global.fetch = Object.assign(vi.fn(), { preconnect: vi.fn() }) as typeof fetch
|
||||||
|
|
||||||
const mockGetAllBlocks = getAllBlocks as Mock
|
const mockGetAllBlocks = getAllBlocks as Mock
|
||||||
const mockExecuteTool = executeTool as Mock
|
const mockExecuteTool = executeTool as Mock
|
||||||
@@ -1926,301 +1901,5 @@ describe('AgentBlockHandler', () => {
|
|||||||
|
|
||||||
expect(discoveryCalls[0].url).toContain('serverId=mcp-legacy-server')
|
expect(discoveryCalls[0].url).toContain('serverId=mcp-legacy-server')
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('customToolId resolution - DB as source of truth', () => {
|
|
||||||
const staleInlineSchema = {
|
|
||||||
function: {
|
|
||||||
name: 'formatReport',
|
|
||||||
description: 'Formats a report',
|
|
||||||
parameters: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
title: { type: 'string', description: 'Report title' },
|
|
||||||
content: { type: 'string', description: 'Report content' },
|
|
||||||
},
|
|
||||||
required: ['title', 'content'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const dbSchema = {
|
|
||||||
function: {
|
|
||||||
name: 'formatReport',
|
|
||||||
description: 'Formats a report',
|
|
||||||
parameters: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
title: { type: 'string', description: 'Report title' },
|
|
||||||
content: { type: 'string', description: 'Report content' },
|
|
||||||
format: { type: 'string', description: 'Output format' },
|
|
||||||
},
|
|
||||||
required: ['title', 'content', 'format'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const staleInlineCode = 'return { title, content };'
|
|
||||||
const dbCode = 'return { title, content, format };'
|
|
||||||
|
|
||||||
function mockFetchForCustomTool(toolId: string) {
|
|
||||||
mockFetch.mockImplementation((url: string) => {
|
|
||||||
if (typeof url === 'string' && url.includes('/api/tools/custom')) {
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
headers: { get: () => null },
|
|
||||||
json: () =>
|
|
||||||
Promise.resolve({
|
|
||||||
data: [
|
|
||||||
{
|
|
||||||
id: toolId,
|
|
||||||
title: 'formatReport',
|
|
||||||
schema: dbSchema,
|
|
||||||
code: dbCode,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
headers: { get: () => null },
|
|
||||||
json: () => Promise.resolve({}),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function mockFetchFailure() {
|
|
||||||
mockFetch.mockImplementation((url: string) => {
|
|
||||||
if (typeof url === 'string' && url.includes('/api/tools/custom')) {
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: false,
|
|
||||||
status: 500,
|
|
||||||
headers: { get: () => null },
|
|
||||||
json: () => Promise.resolve({}),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
headers: { get: () => null },
|
|
||||||
json: () => Promise.resolve({}),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
Object.defineProperty(global, 'window', {
|
|
||||||
value: undefined,
|
|
||||||
writable: true,
|
|
||||||
configurable: true,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should always fetch latest schema from DB when customToolId is present', async () => {
|
|
||||||
const toolId = 'custom-tool-123'
|
|
||||||
mockFetchForCustomTool(toolId)
|
|
||||||
|
|
||||||
const inputs = {
|
|
||||||
model: 'gpt-4o',
|
|
||||||
userPrompt: 'Format a report',
|
|
||||||
apiKey: 'test-api-key',
|
|
||||||
tools: [
|
|
||||||
{
|
|
||||||
type: 'custom-tool',
|
|
||||||
customToolId: toolId,
|
|
||||||
title: 'formatReport',
|
|
||||||
schema: staleInlineSchema,
|
|
||||||
code: staleInlineCode,
|
|
||||||
usageControl: 'auto' as const,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
mockGetProviderFromModel.mockReturnValue('openai')
|
|
||||||
|
|
||||||
await handler.execute(mockContext, mockBlock, inputs)
|
|
||||||
|
|
||||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
|
||||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
|
||||||
const tools = providerCall[1].tools
|
|
||||||
|
|
||||||
expect(tools.length).toBe(1)
|
|
||||||
// DB schema wins over stale inline — includes format param
|
|
||||||
expect(tools[0].parameters.required).toContain('format')
|
|
||||||
expect(tools[0].parameters.properties).toHaveProperty('format')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should fetch from DB when customToolId has no inline schema', async () => {
|
|
||||||
const toolId = 'custom-tool-123'
|
|
||||||
mockFetchForCustomTool(toolId)
|
|
||||||
|
|
||||||
const inputs = {
|
|
||||||
model: 'gpt-4o',
|
|
||||||
userPrompt: 'Format a report',
|
|
||||||
apiKey: 'test-api-key',
|
|
||||||
tools: [
|
|
||||||
{
|
|
||||||
type: 'custom-tool',
|
|
||||||
customToolId: toolId,
|
|
||||||
usageControl: 'auto' as const,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
mockGetProviderFromModel.mockReturnValue('openai')
|
|
||||||
|
|
||||||
await handler.execute(mockContext, mockBlock, inputs)
|
|
||||||
|
|
||||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
|
||||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
|
||||||
const tools = providerCall[1].tools
|
|
||||||
|
|
||||||
expect(tools.length).toBe(1)
|
|
||||||
expect(tools[0].name).toBe('formatReport')
|
|
||||||
expect(tools[0].parameters.required).toContain('format')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should fall back to inline schema when DB fetch fails and inline exists', async () => {
|
|
||||||
mockFetchFailure()
|
|
||||||
|
|
||||||
const inputs = {
|
|
||||||
model: 'gpt-4o',
|
|
||||||
userPrompt: 'Format a report',
|
|
||||||
apiKey: 'test-api-key',
|
|
||||||
tools: [
|
|
||||||
{
|
|
||||||
type: 'custom-tool',
|
|
||||||
customToolId: 'custom-tool-123',
|
|
||||||
title: 'formatReport',
|
|
||||||
schema: staleInlineSchema,
|
|
||||||
code: staleInlineCode,
|
|
||||||
usageControl: 'auto' as const,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
mockGetProviderFromModel.mockReturnValue('openai')
|
|
||||||
|
|
||||||
await handler.execute(mockContext, mockBlock, inputs)
|
|
||||||
|
|
||||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
|
||||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
|
||||||
const tools = providerCall[1].tools
|
|
||||||
|
|
||||||
expect(tools.length).toBe(1)
|
|
||||||
expect(tools[0].name).toBe('formatReport')
|
|
||||||
expect(tools[0].parameters.required).not.toContain('format')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should return null when DB fetch fails and no inline schema exists', async () => {
|
|
||||||
mockFetchFailure()
|
|
||||||
|
|
||||||
const inputs = {
|
|
||||||
model: 'gpt-4o',
|
|
||||||
userPrompt: 'Format a report',
|
|
||||||
apiKey: 'test-api-key',
|
|
||||||
tools: [
|
|
||||||
{
|
|
||||||
type: 'custom-tool',
|
|
||||||
customToolId: 'custom-tool-123',
|
|
||||||
usageControl: 'auto' as const,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
mockGetProviderFromModel.mockReturnValue('openai')
|
|
||||||
|
|
||||||
await handler.execute(mockContext, mockBlock, inputs)
|
|
||||||
|
|
||||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
|
||||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
|
||||||
const tools = providerCall[1].tools
|
|
||||||
|
|
||||||
expect(tools.length).toBe(0)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should use DB code for executeFunction when customToolId resolves', async () => {
|
|
||||||
const toolId = 'custom-tool-123'
|
|
||||||
mockFetchForCustomTool(toolId)
|
|
||||||
|
|
||||||
let capturedTools: any[] = []
|
|
||||||
Promise.all = vi.fn().mockImplementation((promises: Promise<any>[]) => {
|
|
||||||
const result = originalPromiseAll.call(Promise, promises)
|
|
||||||
result.then((tools: any[]) => {
|
|
||||||
if (tools?.length) {
|
|
||||||
capturedTools = tools.filter((t) => t !== null)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return result
|
|
||||||
})
|
|
||||||
|
|
||||||
const inputs = {
|
|
||||||
model: 'gpt-4o',
|
|
||||||
userPrompt: 'Format a report',
|
|
||||||
apiKey: 'test-api-key',
|
|
||||||
tools: [
|
|
||||||
{
|
|
||||||
type: 'custom-tool',
|
|
||||||
customToolId: toolId,
|
|
||||||
title: 'formatReport',
|
|
||||||
schema: staleInlineSchema,
|
|
||||||
code: staleInlineCode,
|
|
||||||
usageControl: 'auto' as const,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
mockGetProviderFromModel.mockReturnValue('openai')
|
|
||||||
|
|
||||||
await handler.execute(mockContext, mockBlock, inputs)
|
|
||||||
|
|
||||||
expect(capturedTools.length).toBe(1)
|
|
||||||
expect(typeof capturedTools[0].executeFunction).toBe('function')
|
|
||||||
|
|
||||||
await capturedTools[0].executeFunction({ title: 'Q1', format: 'pdf' })
|
|
||||||
|
|
||||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
|
||||||
'function_execute',
|
|
||||||
expect.objectContaining({
|
|
||||||
code: dbCode,
|
|
||||||
}),
|
|
||||||
false,
|
|
||||||
expect.any(Object)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should not fetch from DB when no customToolId is present', async () => {
|
|
||||||
const inputs = {
|
|
||||||
model: 'gpt-4o',
|
|
||||||
userPrompt: 'Use the tool',
|
|
||||||
apiKey: 'test-api-key',
|
|
||||||
tools: [
|
|
||||||
{
|
|
||||||
type: 'custom-tool',
|
|
||||||
title: 'formatReport',
|
|
||||||
schema: staleInlineSchema,
|
|
||||||
code: staleInlineCode,
|
|
||||||
usageControl: 'auto' as const,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
mockGetProviderFromModel.mockReturnValue('openai')
|
|
||||||
|
|
||||||
await handler.execute(mockContext, mockBlock, inputs)
|
|
||||||
|
|
||||||
const customToolFetches = mockFetch.mock.calls.filter(
|
|
||||||
(call: any[]) => typeof call[0] === 'string' && call[0].includes('/api/tools/custom')
|
|
||||||
)
|
|
||||||
expect(customToolFetches.length).toBe(0)
|
|
||||||
|
|
||||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
|
||||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
|
||||||
const tools = providerCall[1].tools
|
|
||||||
|
|
||||||
expect(tools.length).toBe(1)
|
|
||||||
expect(tools[0].name).toBe('formatReport')
|
|
||||||
expect(tools[0].parameters.required).not.toContain('format')
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -62,12 +62,9 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
await validateModelProvider(ctx.userId, model, ctx)
|
await validateModelProvider(ctx.userId, model, ctx)
|
||||||
|
|
||||||
const providerId = getProviderFromModel(model)
|
const providerId = getProviderFromModel(model)
|
||||||
const formattedTools = await this.formatTools(
|
const formattedTools = await this.formatTools(ctx, filteredInputs.tools || [])
|
||||||
ctx,
|
|
||||||
filteredInputs.tools || [],
|
|
||||||
block.canonicalModes
|
|
||||||
)
|
|
||||||
|
|
||||||
|
// Resolve skill metadata for progressive disclosure
|
||||||
const skillInputs = filteredInputs.skills ?? []
|
const skillInputs = filteredInputs.skills ?? []
|
||||||
let skillMetadata: Array<{ name: string; description: string }> = []
|
let skillMetadata: Array<{ name: string; description: string }> = []
|
||||||
if (skillInputs.length > 0 && ctx.workspaceId) {
|
if (skillInputs.length > 0 && ctx.workspaceId) {
|
||||||
@@ -224,11 +221,7 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
private async formatTools(
|
private async formatTools(ctx: ExecutionContext, inputTools: ToolInput[]): Promise<any[]> {
|
||||||
ctx: ExecutionContext,
|
|
||||||
inputTools: ToolInput[],
|
|
||||||
canonicalModes?: Record<string, 'basic' | 'advanced'>
|
|
||||||
): Promise<any[]> {
|
|
||||||
if (!Array.isArray(inputTools)) return []
|
if (!Array.isArray(inputTools)) return []
|
||||||
|
|
||||||
const filtered = inputTools.filter((tool) => {
|
const filtered = inputTools.filter((tool) => {
|
||||||
@@ -256,7 +249,7 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
if (tool.type === 'custom-tool' && (tool.schema || tool.customToolId)) {
|
if (tool.type === 'custom-tool' && (tool.schema || tool.customToolId)) {
|
||||||
return await this.createCustomTool(ctx, tool)
|
return await this.createCustomTool(ctx, tool)
|
||||||
}
|
}
|
||||||
return this.transformBlockTool(ctx, tool, canonicalModes)
|
return this.transformBlockTool(ctx, tool)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[AgentHandler] Error creating tool:`, { tool, error })
|
logger.error(`[AgentHandler] Error creating tool:`, { tool, error })
|
||||||
return null
|
return null
|
||||||
@@ -279,16 +272,15 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
let code = tool.code
|
let code = tool.code
|
||||||
let title = tool.title
|
let title = tool.title
|
||||||
|
|
||||||
if (tool.customToolId) {
|
if (tool.customToolId && !schema) {
|
||||||
const resolved = await this.fetchCustomToolById(ctx, tool.customToolId)
|
const resolved = await this.fetchCustomToolById(ctx, tool.customToolId)
|
||||||
if (resolved) {
|
if (!resolved) {
|
||||||
schema = resolved.schema
|
|
||||||
code = resolved.code
|
|
||||||
title = resolved.title
|
|
||||||
} else if (!schema) {
|
|
||||||
logger.error(`Custom tool not found: ${tool.customToolId}`)
|
logger.error(`Custom tool not found: ${tool.customToolId}`)
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
schema = resolved.schema
|
||||||
|
code = resolved.code
|
||||||
|
title = resolved.title
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!schema?.function) {
|
if (!schema?.function) {
|
||||||
@@ -727,17 +719,12 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async transformBlockTool(
|
private async transformBlockTool(ctx: ExecutionContext, tool: ToolInput) {
|
||||||
ctx: ExecutionContext,
|
|
||||||
tool: ToolInput,
|
|
||||||
canonicalModes?: Record<string, 'basic' | 'advanced'>
|
|
||||||
) {
|
|
||||||
const transformedTool = await transformBlockTool(tool, {
|
const transformedTool = await transformBlockTool(tool, {
|
||||||
selectedOperation: tool.operation,
|
selectedOperation: tool.operation,
|
||||||
getAllBlocks,
|
getAllBlocks,
|
||||||
getToolAsync: (toolId: string) => getToolAsync(toolId, ctx.workflowId),
|
getToolAsync: (toolId: string) => getToolAsync(toolId, ctx.workflowId),
|
||||||
getTool,
|
getTool,
|
||||||
canonicalModes,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if (transformedTool) {
|
if (transformedTool) {
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { db } from '@sim/db'
|
|||||||
import { account } from '@sim/db/schema'
|
import { account } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
import { generateRouterPrompt, generateRouterV2Prompt } from '@/blocks/blocks/router'
|
import { generateRouterPrompt, generateRouterV2Prompt } from '@/blocks/blocks/router'
|
||||||
import type { BlockOutput } from '@/blocks/types'
|
import type { BlockOutput } from '@/blocks/types'
|
||||||
@@ -79,7 +79,7 @@ export class RouterBlockHandler implements BlockHandler {
|
|||||||
const providerId = getProviderFromModel(routerConfig.model)
|
const providerId = getProviderFromModel(routerConfig.model)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const url = new URL('/api/providers', getInternalApiBaseUrl())
|
const url = new URL('/api/providers', getBaseUrl())
|
||||||
if (ctx.userId) url.searchParams.set('userId', ctx.userId)
|
if (ctx.userId) url.searchParams.set('userId', ctx.userId)
|
||||||
|
|
||||||
const messages = [{ role: 'user', content: routerConfig.prompt }]
|
const messages = [{ role: 'user', content: routerConfig.prompt }]
|
||||||
@@ -209,7 +209,7 @@ export class RouterBlockHandler implements BlockHandler {
|
|||||||
const providerId = getProviderFromModel(routerConfig.model)
|
const providerId = getProviderFromModel(routerConfig.model)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const url = new URL('/api/providers', getInternalApiBaseUrl())
|
const url = new URL('/api/providers', getBaseUrl())
|
||||||
if (ctx.userId) url.searchParams.set('userId', ctx.userId)
|
if (ctx.userId) url.searchParams.set('userId', ctx.userId)
|
||||||
|
|
||||||
const messages = [{ role: 'user', content: routerConfig.context }]
|
const messages = [{ role: 'user', content: routerConfig.context }]
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { setupGlobalFetchMock } from '@sim/testing'
|
|
||||||
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
||||||
import { BlockType } from '@/executor/constants'
|
import { BlockType } from '@/executor/constants'
|
||||||
import { WorkflowBlockHandler } from '@/executor/handlers/workflow/workflow-handler'
|
import { WorkflowBlockHandler } from '@/executor/handlers/workflow/workflow-handler'
|
||||||
@@ -10,7 +9,7 @@ vi.mock('@/lib/auth/internal', () => ({
|
|||||||
}))
|
}))
|
||||||
|
|
||||||
// Mock fetch globally
|
// Mock fetch globally
|
||||||
setupGlobalFetchMock()
|
global.fetch = vi.fn()
|
||||||
|
|
||||||
describe('WorkflowBlockHandler', () => {
|
describe('WorkflowBlockHandler', () => {
|
||||||
let handler: WorkflowBlockHandler
|
let handler: WorkflowBlockHandler
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { generateInternalToken } from '@/lib/auth/internal'
|
import { generateInternalToken } from '@/lib/auth/internal'
|
||||||
import { getBaseUrl, getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { HTTP } from '@/executor/constants'
|
import { HTTP } from '@/executor/constants'
|
||||||
|
|
||||||
export async function buildAuthHeaders(): Promise<Record<string, string>> {
|
export async function buildAuthHeaders(): Promise<Record<string, string>> {
|
||||||
@@ -16,8 +16,7 @@ export async function buildAuthHeaders(): Promise<Record<string, string>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function buildAPIUrl(path: string, params?: Record<string, string>): URL {
|
export function buildAPIUrl(path: string, params?: Record<string, string>): URL {
|
||||||
const baseUrl = path.startsWith('/api/') ? getInternalApiBaseUrl() : getBaseUrl()
|
const url = new URL(path, getBaseUrl())
|
||||||
const url = new URL(path, baseUrl)
|
|
||||||
|
|
||||||
if (params) {
|
if (params) {
|
||||||
for (const [key, value] of Object.entries(params)) {
|
for (const [key, value] of Object.entries(params)) {
|
||||||
|
|||||||
@@ -642,10 +642,6 @@ export function useDeployChildWorkflow() {
|
|||||||
queryClient.invalidateQueries({
|
queryClient.invalidateQueries({
|
||||||
queryKey: workflowKeys.deploymentStatus(variables.workflowId),
|
queryKey: workflowKeys.deploymentStatus(variables.workflowId),
|
||||||
})
|
})
|
||||||
// Invalidate workflow state so tool input mappings refresh
|
|
||||||
queryClient.invalidateQueries({
|
|
||||||
queryKey: workflowKeys.state(variables.workflowId),
|
|
||||||
})
|
|
||||||
// Also invalidate deployment queries
|
// Also invalidate deployment queries
|
||||||
queryClient.invalidateQueries({
|
queryClient.invalidateQueries({
|
||||||
queryKey: deploymentKeys.info(variables.workflowId),
|
queryKey: deploymentKeys.info(variables.workflowId),
|
||||||
|
|||||||
@@ -1,46 +0,0 @@
|
|||||||
'use client'
|
|
||||||
|
|
||||||
import { useEffect, useRef } from 'react'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
|
|
||||||
const logger = createLogger('ReferralAttribution')
|
|
||||||
|
|
||||||
const COOKIE_NAME = 'sim_utm'
|
|
||||||
|
|
||||||
const TERMINAL_REASONS = new Set([
|
|
||||||
'invalid_cookie',
|
|
||||||
'no_utm_cookie',
|
|
||||||
'no_matching_campaign',
|
|
||||||
'already_attributed',
|
|
||||||
])
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fires a one-shot `POST /api/attribution` when a `sim_utm` cookie is present.
|
|
||||||
* Retries on transient failures; stops on terminal outcomes.
|
|
||||||
*/
|
|
||||||
export function useReferralAttribution() {
|
|
||||||
const calledRef = useRef(false)
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (calledRef.current) return
|
|
||||||
if (!document.cookie.includes(COOKIE_NAME)) return
|
|
||||||
|
|
||||||
calledRef.current = true
|
|
||||||
|
|
||||||
fetch('/api/attribution', { method: 'POST' })
|
|
||||||
.then((res) => res.json())
|
|
||||||
.then((data) => {
|
|
||||||
if (data.attributed) {
|
|
||||||
logger.info('Referral attribution successful', { bonusAmount: data.bonusAmount })
|
|
||||||
} else if (data.error || TERMINAL_REASONS.has(data.reason)) {
|
|
||||||
logger.info('Referral attribution skipped', { reason: data.reason || data.error })
|
|
||||||
} else {
|
|
||||||
calledRef.current = false
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
logger.warn('Referral attribution failed, will retry', { error: err })
|
|
||||||
calledRef.current = false
|
|
||||||
})
|
|
||||||
}, [])
|
|
||||||
}
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { organization, userStats } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq, sql } from 'drizzle-orm'
|
|
||||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
|
||||||
import type { DbOrTx } from '@/lib/db/types'
|
|
||||||
|
|
||||||
const logger = createLogger('BonusCredits')
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Apply bonus credits to a user (e.g. referral bonuses, promotional codes).
|
|
||||||
*
|
|
||||||
* Detects the user's current plan and routes credits accordingly:
|
|
||||||
* - Free/Pro: adds to `userStats.creditBalance` and increments `currentUsageLimit`
|
|
||||||
* - Team/Enterprise: adds to `organization.creditBalance` and increments `orgUsageLimit`
|
|
||||||
*
|
|
||||||
* Uses direct increment (not recalculation) so it works correctly for free-tier
|
|
||||||
* users where `setUsageLimitForCredits` would compute planBase=0 and skip the update.
|
|
||||||
*
|
|
||||||
* @param tx - Optional Drizzle transaction context. When provided, all DB writes
|
|
||||||
* participate in the caller's transaction for atomicity.
|
|
||||||
*/
|
|
||||||
export async function applyBonusCredits(
|
|
||||||
userId: string,
|
|
||||||
amount: number,
|
|
||||||
tx?: DbOrTx
|
|
||||||
): Promise<void> {
|
|
||||||
const dbCtx = tx ?? db
|
|
||||||
const subscription = await getHighestPrioritySubscription(userId)
|
|
||||||
const isTeamOrEnterprise = subscription?.plan === 'team' || subscription?.plan === 'enterprise'
|
|
||||||
|
|
||||||
if (isTeamOrEnterprise && subscription?.referenceId) {
|
|
||||||
const orgId = subscription.referenceId
|
|
||||||
|
|
||||||
await dbCtx
|
|
||||||
.update(organization)
|
|
||||||
.set({
|
|
||||||
creditBalance: sql`${organization.creditBalance} + ${amount}`,
|
|
||||||
orgUsageLimit: sql`COALESCE(${organization.orgUsageLimit}, '0')::decimal + ${amount}`,
|
|
||||||
})
|
|
||||||
.where(eq(organization.id, orgId))
|
|
||||||
|
|
||||||
logger.info('Applied bonus credits to organization', {
|
|
||||||
userId,
|
|
||||||
organizationId: orgId,
|
|
||||||
plan: subscription.plan,
|
|
||||||
amount,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
await dbCtx
|
|
||||||
.update(userStats)
|
|
||||||
.set({
|
|
||||||
creditBalance: sql`${userStats.creditBalance} + ${amount}`,
|
|
||||||
currentUsageLimit: sql`COALESCE(${userStats.currentUsageLimit}, '0')::decimal + ${amount}`,
|
|
||||||
})
|
|
||||||
.where(eq(userStats.userId, userId))
|
|
||||||
|
|
||||||
logger.info('Applied bonus credits to user', {
|
|
||||||
userId,
|
|
||||||
plan: subscription?.plan || 'free',
|
|
||||||
amount,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -220,7 +220,6 @@ export const env = createEnv({
|
|||||||
SOCKET_SERVER_URL: z.string().url().optional(), // WebSocket server URL for real-time features
|
SOCKET_SERVER_URL: z.string().url().optional(), // WebSocket server URL for real-time features
|
||||||
SOCKET_PORT: z.number().optional(), // Port for WebSocket server
|
SOCKET_PORT: z.number().optional(), // Port for WebSocket server
|
||||||
PORT: z.number().optional(), // Main application port
|
PORT: z.number().optional(), // Main application port
|
||||||
INTERNAL_API_BASE_URL: z.string().optional(), // Optional internal base URL for server-side self-calls; must include protocol if set (e.g., http://sim-app.namespace.svc.cluster.local:3000)
|
|
||||||
ALLOWED_ORIGINS: z.string().optional(), // CORS allowed origins
|
ALLOWED_ORIGINS: z.string().optional(), // CORS allowed origins
|
||||||
|
|
||||||
// OAuth Integration Credentials - All optional, enables third-party integrations
|
// OAuth Integration Credentials - All optional, enables third-party integrations
|
||||||
|
|||||||
@@ -1,19 +1,6 @@
|
|||||||
import { getEnv } from '@/lib/core/config/env'
|
import { getEnv } from '@/lib/core/config/env'
|
||||||
import { isProd } from '@/lib/core/config/feature-flags'
|
import { isProd } from '@/lib/core/config/feature-flags'
|
||||||
|
|
||||||
function hasHttpProtocol(url: string): boolean {
|
|
||||||
return /^https?:\/\//i.test(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeBaseUrl(url: string): string {
|
|
||||||
if (hasHttpProtocol(url)) {
|
|
||||||
return url
|
|
||||||
}
|
|
||||||
|
|
||||||
const protocol = isProd ? 'https://' : 'http://'
|
|
||||||
return `${protocol}${url}`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the base URL of the application from NEXT_PUBLIC_APP_URL
|
* Returns the base URL of the application from NEXT_PUBLIC_APP_URL
|
||||||
* This ensures webhooks, callbacks, and other integrations always use the correct public URL
|
* This ensures webhooks, callbacks, and other integrations always use the correct public URL
|
||||||
@@ -21,7 +8,7 @@ function normalizeBaseUrl(url: string): string {
|
|||||||
* @throws Error if NEXT_PUBLIC_APP_URL is not configured
|
* @throws Error if NEXT_PUBLIC_APP_URL is not configured
|
||||||
*/
|
*/
|
||||||
export function getBaseUrl(): string {
|
export function getBaseUrl(): string {
|
||||||
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL')?.trim()
|
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL')
|
||||||
|
|
||||||
if (!baseUrl) {
|
if (!baseUrl) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
@@ -29,26 +16,12 @@ export function getBaseUrl(): string {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return normalizeBaseUrl(baseUrl)
|
if (baseUrl.startsWith('http://') || baseUrl.startsWith('https://')) {
|
||||||
}
|
return baseUrl
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the base URL used by server-side internal API calls.
|
|
||||||
* Falls back to NEXT_PUBLIC_APP_URL when INTERNAL_API_BASE_URL is not set.
|
|
||||||
*/
|
|
||||||
export function getInternalApiBaseUrl(): string {
|
|
||||||
const internalBaseUrl = getEnv('INTERNAL_API_BASE_URL')?.trim()
|
|
||||||
if (!internalBaseUrl) {
|
|
||||||
return getBaseUrl()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!hasHttpProtocol(internalBaseUrl)) {
|
const protocol = isProd ? 'https://' : 'http://'
|
||||||
throw new Error(
|
return `${protocol}${baseUrl}`
|
||||||
'INTERNAL_API_BASE_URL must include protocol (http:// or https://), e.g. http://sim-app.default.svc.cluster.local:3000'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return internalBaseUrl
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { db } from '@sim/db'
|
|||||||
import { account } from '@sim/db/schema'
|
import { account } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
import { executeProviderRequest } from '@/providers'
|
import { executeProviderRequest } from '@/providers'
|
||||||
import { getProviderFromModel } from '@/providers/utils'
|
import { getProviderFromModel } from '@/providers/utils'
|
||||||
@@ -61,7 +61,7 @@ async function queryKnowledgeBase(
|
|||||||
})
|
})
|
||||||
|
|
||||||
// Call the knowledge base search API directly
|
// Call the knowledge base search API directly
|
||||||
const searchUrl = `${getInternalApiBaseUrl()}/api/knowledge/search`
|
const searchUrl = `${getBaseUrl()}/api/knowledge/search`
|
||||||
|
|
||||||
const response = await fetch(searchUrl, {
|
const response = await fetch(searchUrl, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
|||||||
@@ -539,8 +539,8 @@ async function executeMistralOCRRequest(
|
|||||||
const isInternalRoute = url.startsWith('/')
|
const isInternalRoute = url.startsWith('/')
|
||||||
|
|
||||||
if (isInternalRoute) {
|
if (isInternalRoute) {
|
||||||
const { getInternalApiBaseUrl } = await import('@/lib/core/utils/urls')
|
const { getBaseUrl } = await import('@/lib/core/utils/urls')
|
||||||
url = `${getInternalApiBaseUrl()}${url}`
|
url = `${getBaseUrl()}${url}`
|
||||||
}
|
}
|
||||||
|
|
||||||
let headers =
|
let headers =
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { createEnvMock, loggerMock } from '@sim/testing'
|
import { createEnvMock, createMockLogger } from '@sim/testing'
|
||||||
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -10,6 +10,10 @@ import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
|||||||
* mock functions can intercept.
|
* mock functions can intercept.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
const loggerMock = vi.hoisted(() => ({
|
||||||
|
createLogger: () => createMockLogger(),
|
||||||
|
}))
|
||||||
|
|
||||||
const mockSend = vi.fn()
|
const mockSend = vi.fn()
|
||||||
const mockBatchSend = vi.fn()
|
const mockBatchSend = vi.fn()
|
||||||
const mockAzureBeginSend = vi.fn()
|
const mockAzureBeginSend = vi.fn()
|
||||||
|
|||||||
@@ -1,8 +1,20 @@
|
|||||||
import { createEnvMock, databaseMock, loggerMock } from '@sim/testing'
|
import { createEnvMock, createMockLogger } from '@sim/testing'
|
||||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
import type { EmailType } from '@/lib/messaging/email/mailer'
|
import type { EmailType } from '@/lib/messaging/email/mailer'
|
||||||
|
|
||||||
vi.mock('@sim/db', () => databaseMock)
|
const loggerMock = vi.hoisted(() => ({
|
||||||
|
createLogger: () => createMockLogger(),
|
||||||
|
}))
|
||||||
|
|
||||||
|
const mockDb = vi.hoisted(() => ({
|
||||||
|
select: vi.fn(),
|
||||||
|
insert: vi.fn(),
|
||||||
|
update: vi.fn(),
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.mock('@sim/db', () => ({
|
||||||
|
db: mockDb,
|
||||||
|
}))
|
||||||
|
|
||||||
vi.mock('@sim/db/schema', () => ({
|
vi.mock('@sim/db/schema', () => ({
|
||||||
user: { id: 'id', email: 'email' },
|
user: { id: 'id', email: 'email' },
|
||||||
@@ -18,8 +30,6 @@ vi.mock('drizzle-orm', () => ({
|
|||||||
eq: vi.fn((a, b) => ({ type: 'eq', left: a, right: b })),
|
eq: vi.fn((a, b) => ({ type: 'eq', left: a, right: b })),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
const mockDb = databaseMock.db as Record<string, ReturnType<typeof vi.fn>>
|
|
||||||
|
|
||||||
vi.mock('@/lib/core/config/env', () => createEnvMock({ BETTER_AUTH_SECRET: 'test-secret-key' }))
|
vi.mock('@/lib/core/config/env', () => createEnvMock({ BETTER_AUTH_SECRET: 'test-secret-key' }))
|
||||||
|
|
||||||
vi.mock('@sim/logger', () => loggerMock)
|
vi.mock('@sim/logger', () => loggerMock)
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import { and, eq, isNull, or, sql } from 'drizzle-orm'
|
|||||||
import { nanoid } from 'nanoid'
|
import { nanoid } from 'nanoid'
|
||||||
import { isOrganizationOnTeamOrEnterprisePlan } from '@/lib/billing'
|
import { isOrganizationOnTeamOrEnterprisePlan } from '@/lib/billing'
|
||||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { getOAuthToken, refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { getOAuthToken, refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
import type { GmailAttachment } from '@/tools/gmail/types'
|
import type { GmailAttachment } from '@/tools/gmail/types'
|
||||||
import { downloadAttachments, extractAttachmentInfo } from '@/tools/gmail/utils'
|
import { downloadAttachments, extractAttachmentInfo } from '@/tools/gmail/utils'
|
||||||
@@ -691,7 +691,7 @@ async function processEmails(
|
|||||||
`[${requestId}] Sending ${config.includeRawEmail ? 'simplified + raw' : 'simplified'} email payload for ${email.id}`
|
`[${requestId}] Sending ${config.includeRawEmail ? 'simplified + raw' : 'simplified'} email payload for ${email.id}`
|
||||||
)
|
)
|
||||||
|
|
||||||
const webhookUrl = `${getInternalApiBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
const webhookUrl = `${getBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||||
|
|
||||||
const response = await fetch(webhookUrl, {
|
const response = await fetch(webhookUrl, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import type { FetchMessageObject, MailboxLockObject } from 'imapflow'
|
|||||||
import { ImapFlow } from 'imapflow'
|
import { ImapFlow } from 'imapflow'
|
||||||
import { nanoid } from 'nanoid'
|
import { nanoid } from 'nanoid'
|
||||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
||||||
|
|
||||||
const logger = createLogger('ImapPollingService')
|
const logger = createLogger('ImapPollingService')
|
||||||
@@ -639,7 +639,7 @@ async function processEmails(
|
|||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
}
|
}
|
||||||
|
|
||||||
const webhookUrl = `${getInternalApiBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
const webhookUrl = `${getBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||||
|
|
||||||
const response = await fetch(webhookUrl, {
|
const response = await fetch(webhookUrl, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import { htmlToText } from 'html-to-text'
|
|||||||
import { nanoid } from 'nanoid'
|
import { nanoid } from 'nanoid'
|
||||||
import { isOrganizationOnTeamOrEnterprisePlan } from '@/lib/billing'
|
import { isOrganizationOnTeamOrEnterprisePlan } from '@/lib/billing'
|
||||||
import { pollingIdempotency } from '@/lib/core/idempotency'
|
import { pollingIdempotency } from '@/lib/core/idempotency'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { getOAuthToken, refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { getOAuthToken, refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
||||||
|
|
||||||
@@ -601,7 +601,7 @@ async function processOutlookEmails(
|
|||||||
`[${requestId}] Processing email: ${email.subject} from ${email.from?.emailAddress?.address}`
|
`[${requestId}] Processing email: ${email.subject} from ${email.from?.emailAddress?.address}`
|
||||||
)
|
)
|
||||||
|
|
||||||
const webhookUrl = `${getInternalApiBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
const webhookUrl = `${getBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||||
|
|
||||||
const response = await fetch(webhookUrl, {
|
const response = await fetch(webhookUrl, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import {
|
|||||||
secureFetchWithPinnedIP,
|
secureFetchWithPinnedIP,
|
||||||
validateUrlWithDNS,
|
validateUrlWithDNS,
|
||||||
} from '@/lib/core/security/input-validation.server'
|
} from '@/lib/core/security/input-validation.server'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
||||||
|
|
||||||
const logger = createLogger('RssPollingService')
|
const logger = createLogger('RssPollingService')
|
||||||
@@ -376,7 +376,7 @@ async function processRssItems(
|
|||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
}
|
}
|
||||||
|
|
||||||
const webhookUrl = `${getInternalApiBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
const webhookUrl = `${getBaseUrl()}/api/webhooks/trigger/${webhookData.path}`
|
||||||
|
|
||||||
const response = await fetch(webhookUrl, {
|
const response = await fetch(webhookUrl, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
|||||||
@@ -645,18 +645,6 @@ describe('Workflow Normalization Utilities', () => {
|
|||||||
const result = filterSubBlockIds(ids)
|
const result = filterSubBlockIds(ids)
|
||||||
expect(result).toEqual(['signingSecret'])
|
expect(result).toEqual(['signingSecret'])
|
||||||
})
|
})
|
||||||
|
|
||||||
it.concurrent('should exclude synthetic tool-input subBlock IDs', () => {
|
|
||||||
const ids = [
|
|
||||||
'toolConfig',
|
|
||||||
'toolConfig-tool-0-query',
|
|
||||||
'toolConfig-tool-0-url',
|
|
||||||
'toolConfig-tool-1-status',
|
|
||||||
'systemPrompt',
|
|
||||||
]
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['systemPrompt', 'toolConfig'])
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('normalizeTriggerConfigValues', () => {
|
describe('normalizeTriggerConfigValues', () => {
|
||||||
|
|||||||
@@ -411,14 +411,7 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Pattern matching synthetic subBlock IDs created by ToolSubBlockRenderer.
|
* Filters subBlock IDs to exclude system and trigger runtime subBlocks.
|
||||||
* These IDs follow the format `{subBlockId}-tool-{index}-{paramId}` and are
|
|
||||||
* mirrors of values already stored in toolConfig.value.tools[N].params.
|
|
||||||
*/
|
|
||||||
const SYNTHETIC_TOOL_SUBBLOCK_RE = /-tool-\d+-/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filters subBlock IDs to exclude system, trigger runtime, and synthetic tool subBlocks.
|
|
||||||
*
|
*
|
||||||
* @param subBlockIds - Array of subBlock IDs to filter
|
* @param subBlockIds - Array of subBlock IDs to filter
|
||||||
* @returns Filtered and sorted array of subBlock IDs
|
* @returns Filtered and sorted array of subBlock IDs
|
||||||
@@ -429,7 +422,6 @@ export function filterSubBlockIds(subBlockIds: string[]): string[] {
|
|||||||
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
|
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
|
||||||
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
|
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
|
||||||
return false
|
return false
|
||||||
if (SYNTHETIC_TOOL_SUBBLOCK_RE.test(id)) return false
|
|
||||||
return true
|
return true
|
||||||
})
|
})
|
||||||
.sort()
|
.sort()
|
||||||
|
|||||||
@@ -1,11 +1,18 @@
|
|||||||
/**
|
/**
|
||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
import { loggerMock } from '@sim/testing'
|
|
||||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
|
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||||
|
|
||||||
vi.mock('@sim/logger', () => loggerMock)
|
// Mock all external dependencies before imports
|
||||||
|
vi.mock('@sim/logger', () => ({
|
||||||
|
createLogger: () => ({
|
||||||
|
info: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
vi.mock('@/stores/workflows/workflow/store', () => ({
|
vi.mock('@/stores/workflows/workflow/store', () => ({
|
||||||
useWorkflowStore: {
|
useWorkflowStore: {
|
||||||
|
|||||||
@@ -14,15 +14,22 @@ import {
|
|||||||
databaseMock,
|
databaseMock,
|
||||||
expectWorkflowAccessDenied,
|
expectWorkflowAccessDenied,
|
||||||
expectWorkflowAccessGranted,
|
expectWorkflowAccessGranted,
|
||||||
mockAuth,
|
|
||||||
} from '@sim/testing'
|
} from '@sim/testing'
|
||||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
const mockDb = databaseMock.db
|
vi.mock('@sim/db', () => databaseMock)
|
||||||
|
|
||||||
|
// Mock the auth module
|
||||||
|
vi.mock('@/lib/auth', () => ({
|
||||||
|
getSession: vi.fn(),
|
||||||
|
}))
|
||||||
|
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { getSession } from '@/lib/auth'
|
||||||
|
// Import after mocks are set up
|
||||||
|
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
describe('validateWorkflowPermissions', () => {
|
describe('validateWorkflowPermissions', () => {
|
||||||
const auth = mockAuth()
|
|
||||||
|
|
||||||
const mockSession = createSession({ userId: 'user-1', email: 'user1@test.com' })
|
const mockSession = createSession({ userId: 'user-1', email: 'user1@test.com' })
|
||||||
const mockWorkflow = createWorkflowRecord({
|
const mockWorkflow = createWorkflowRecord({
|
||||||
id: 'wf-1',
|
id: 'wf-1',
|
||||||
@@ -35,17 +42,13 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.resetModules()
|
|
||||||
vi.clearAllMocks()
|
vi.clearAllMocks()
|
||||||
|
|
||||||
vi.doMock('@sim/db', () => databaseMock)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('authentication', () => {
|
describe('authentication', () => {
|
||||||
it('should return 401 when no session exists', async () => {
|
it('should return 401 when no session exists', async () => {
|
||||||
auth.setUnauthenticated()
|
vi.mocked(getSession).mockResolvedValue(null)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'read')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'read')
|
||||||
|
|
||||||
expectWorkflowAccessDenied(result, 401)
|
expectWorkflowAccessDenied(result, 401)
|
||||||
@@ -53,9 +56,8 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should return 401 when session has no user id', async () => {
|
it('should return 401 when session has no user id', async () => {
|
||||||
auth.mockGetSession.mockResolvedValue({ user: {} } as any)
|
vi.mocked(getSession).mockResolvedValue({ user: {} } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'read')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'read')
|
||||||
|
|
||||||
expectWorkflowAccessDenied(result, 401)
|
expectWorkflowAccessDenied(result, 401)
|
||||||
@@ -64,14 +66,14 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
|
|
||||||
describe('workflow not found', () => {
|
describe('workflow not found', () => {
|
||||||
it('should return 404 when workflow does not exist', async () => {
|
it('should return 404 when workflow does not exist', async () => {
|
||||||
auth.mockGetSession.mockResolvedValue(mockSession as any)
|
vi.mocked(getSession).mockResolvedValue(mockSession as any)
|
||||||
|
|
||||||
|
// Mock workflow query to return empty
|
||||||
const mockLimit = vi.fn().mockResolvedValue([])
|
const mockLimit = vi.fn().mockResolvedValue([])
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('non-existent', 'req-1', 'read')
|
const result = await validateWorkflowPermissions('non-existent', 'req-1', 'read')
|
||||||
|
|
||||||
expectWorkflowAccessDenied(result, 404)
|
expectWorkflowAccessDenied(result, 404)
|
||||||
@@ -81,42 +83,43 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
|
|
||||||
describe('owner access', () => {
|
describe('owner access', () => {
|
||||||
it('should deny access to workflow owner without workspace permissions for read action', async () => {
|
it('should deny access to workflow owner without workspace permissions for read action', async () => {
|
||||||
auth.setAuthenticated({ id: 'owner-1', email: 'owner-1@test.com' })
|
const ownerSession = createSession({ userId: 'owner-1' })
|
||||||
|
vi.mocked(getSession).mockResolvedValue(ownerSession as any)
|
||||||
|
|
||||||
|
// Mock workflow query
|
||||||
const mockLimit = vi.fn().mockResolvedValue([mockWorkflow])
|
const mockLimit = vi.fn().mockResolvedValue([mockWorkflow])
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'read')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'read')
|
||||||
|
|
||||||
expectWorkflowAccessDenied(result, 403)
|
expectWorkflowAccessDenied(result, 403)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should deny access to workflow owner without workspace permissions for write action', async () => {
|
it('should deny access to workflow owner without workspace permissions for write action', async () => {
|
||||||
auth.setAuthenticated({ id: 'owner-1', email: 'owner-1@test.com' })
|
const ownerSession = createSession({ userId: 'owner-1' })
|
||||||
|
vi.mocked(getSession).mockResolvedValue(ownerSession as any)
|
||||||
|
|
||||||
const mockLimit = vi.fn().mockResolvedValue([mockWorkflow])
|
const mockLimit = vi.fn().mockResolvedValue([mockWorkflow])
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'write')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'write')
|
||||||
|
|
||||||
expectWorkflowAccessDenied(result, 403)
|
expectWorkflowAccessDenied(result, 403)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should deny access to workflow owner without workspace permissions for admin action', async () => {
|
it('should deny access to workflow owner without workspace permissions for admin action', async () => {
|
||||||
auth.setAuthenticated({ id: 'owner-1', email: 'owner-1@test.com' })
|
const ownerSession = createSession({ userId: 'owner-1' })
|
||||||
|
vi.mocked(getSession).mockResolvedValue(ownerSession as any)
|
||||||
|
|
||||||
const mockLimit = vi.fn().mockResolvedValue([mockWorkflow])
|
const mockLimit = vi.fn().mockResolvedValue([mockWorkflow])
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'admin')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'admin')
|
||||||
|
|
||||||
expectWorkflowAccessDenied(result, 403)
|
expectWorkflowAccessDenied(result, 403)
|
||||||
@@ -125,10 +128,11 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
|
|
||||||
describe('workspace member access with permissions', () => {
|
describe('workspace member access with permissions', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
auth.mockGetSession.mockResolvedValue(mockSession as any)
|
vi.mocked(getSession).mockResolvedValue(mockSession as any)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should grant read access to user with read permission', async () => {
|
it('should grant read access to user with read permission', async () => {
|
||||||
|
// First call: workflow query, second call: workspace owner, third call: permission
|
||||||
let callCount = 0
|
let callCount = 0
|
||||||
const mockLimit = vi.fn().mockImplementation(() => {
|
const mockLimit = vi.fn().mockImplementation(() => {
|
||||||
callCount++
|
callCount++
|
||||||
@@ -137,9 +141,8 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
})
|
})
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'read')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'read')
|
||||||
|
|
||||||
expectWorkflowAccessGranted(result)
|
expectWorkflowAccessGranted(result)
|
||||||
@@ -154,9 +157,8 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
})
|
})
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'write')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'write')
|
||||||
|
|
||||||
expectWorkflowAccessDenied(result, 403)
|
expectWorkflowAccessDenied(result, 403)
|
||||||
@@ -172,9 +174,8 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
})
|
})
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'write')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'write')
|
||||||
|
|
||||||
expectWorkflowAccessGranted(result)
|
expectWorkflowAccessGranted(result)
|
||||||
@@ -189,9 +190,8 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
})
|
})
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'write')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'write')
|
||||||
|
|
||||||
expectWorkflowAccessGranted(result)
|
expectWorkflowAccessGranted(result)
|
||||||
@@ -206,9 +206,8 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
})
|
})
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'admin')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'admin')
|
||||||
|
|
||||||
expectWorkflowAccessDenied(result, 403)
|
expectWorkflowAccessDenied(result, 403)
|
||||||
@@ -224,9 +223,8 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
})
|
})
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'admin')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'admin')
|
||||||
|
|
||||||
expectWorkflowAccessGranted(result)
|
expectWorkflowAccessGranted(result)
|
||||||
@@ -235,19 +233,18 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
|
|
||||||
describe('no workspace permission', () => {
|
describe('no workspace permission', () => {
|
||||||
it('should deny access to user without any workspace permission', async () => {
|
it('should deny access to user without any workspace permission', async () => {
|
||||||
auth.mockGetSession.mockResolvedValue(mockSession as any)
|
vi.mocked(getSession).mockResolvedValue(mockSession as any)
|
||||||
|
|
||||||
let callCount = 0
|
let callCount = 0
|
||||||
const mockLimit = vi.fn().mockImplementation(() => {
|
const mockLimit = vi.fn().mockImplementation(() => {
|
||||||
callCount++
|
callCount++
|
||||||
if (callCount === 1) return Promise.resolve([mockWorkflow])
|
if (callCount === 1) return Promise.resolve([mockWorkflow])
|
||||||
return Promise.resolve([])
|
return Promise.resolve([]) // No permission record
|
||||||
})
|
})
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'read')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1', 'read')
|
||||||
|
|
||||||
expectWorkflowAccessDenied(result, 403)
|
expectWorkflowAccessDenied(result, 403)
|
||||||
@@ -262,14 +259,13 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
workspaceId: null,
|
workspaceId: null,
|
||||||
})
|
})
|
||||||
|
|
||||||
auth.mockGetSession.mockResolvedValue(mockSession as any)
|
vi.mocked(getSession).mockResolvedValue(mockSession as any)
|
||||||
|
|
||||||
const mockLimit = vi.fn().mockResolvedValue([workflowWithoutWorkspace])
|
const mockLimit = vi.fn().mockResolvedValue([workflowWithoutWorkspace])
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-2', 'req-1', 'read')
|
const result = await validateWorkflowPermissions('wf-2', 'req-1', 'read')
|
||||||
|
|
||||||
expectWorkflowAccessDenied(result, 403)
|
expectWorkflowAccessDenied(result, 403)
|
||||||
@@ -282,14 +278,13 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
workspaceId: null,
|
workspaceId: null,
|
||||||
})
|
})
|
||||||
|
|
||||||
auth.mockGetSession.mockResolvedValue(mockSession as any)
|
vi.mocked(getSession).mockResolvedValue(mockSession as any)
|
||||||
|
|
||||||
const mockLimit = vi.fn().mockResolvedValue([workflowWithoutWorkspace])
|
const mockLimit = vi.fn().mockResolvedValue([workflowWithoutWorkspace])
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-2', 'req-1', 'read')
|
const result = await validateWorkflowPermissions('wf-2', 'req-1', 'read')
|
||||||
|
|
||||||
expectWorkflowAccessDenied(result, 403)
|
expectWorkflowAccessDenied(result, 403)
|
||||||
@@ -298,7 +293,7 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
|
|
||||||
describe('default action', () => {
|
describe('default action', () => {
|
||||||
it('should default to read action when not specified', async () => {
|
it('should default to read action when not specified', async () => {
|
||||||
auth.mockGetSession.mockResolvedValue(mockSession as any)
|
vi.mocked(getSession).mockResolvedValue(mockSession as any)
|
||||||
|
|
||||||
let callCount = 0
|
let callCount = 0
|
||||||
const mockLimit = vi.fn().mockImplementation(() => {
|
const mockLimit = vi.fn().mockImplementation(() => {
|
||||||
@@ -308,9 +303,8 @@ describe('validateWorkflowPermissions', () => {
|
|||||||
})
|
})
|
||||||
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
const mockWhere = vi.fn(() => ({ limit: mockLimit }))
|
||||||
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
const mockFrom = vi.fn(() => ({ where: mockWhere }))
|
||||||
vi.mocked(mockDb.select).mockReturnValue({ from: mockFrom } as any)
|
vi.mocked(db.select).mockReturnValue({ from: mockFrom } as any)
|
||||||
|
|
||||||
const { validateWorkflowPermissions } = await import('@/lib/workflows/utils')
|
|
||||||
const result = await validateWorkflowPermissions('wf-1', 'req-1')
|
const result = await validateWorkflowPermissions('wf-1', 'req-1')
|
||||||
|
|
||||||
expectWorkflowAccessGranted(result)
|
expectWorkflowAccessGranted(result)
|
||||||
|
|||||||
@@ -1,7 +1,17 @@
|
|||||||
import { databaseMock, drizzleOrmMock } from '@sim/testing'
|
import { drizzleOrmMock } from '@sim/testing/mocks'
|
||||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
vi.mock('@sim/db', () => databaseMock)
|
vi.mock('@sim/db', () => ({
|
||||||
|
db: {
|
||||||
|
select: vi.fn(),
|
||||||
|
from: vi.fn(),
|
||||||
|
where: vi.fn(),
|
||||||
|
limit: vi.fn(),
|
||||||
|
innerJoin: vi.fn(),
|
||||||
|
leftJoin: vi.fn(),
|
||||||
|
orderBy: vi.fn(),
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
|
||||||
vi.mock('@sim/db/schema', () => ({
|
vi.mock('@sim/db/schema', () => ({
|
||||||
permissions: {
|
permissions: {
|
||||||
|
|||||||
@@ -112,8 +112,6 @@ export interface ProviderToolConfig {
|
|||||||
required: string[]
|
required: string[]
|
||||||
}
|
}
|
||||||
usageControl?: ToolUsageControl
|
usageControl?: ToolUsageControl
|
||||||
/** Block-level params transformer — converts SubBlock values to tool-ready params */
|
|
||||||
paramsTransform?: (params: Record<string, any>) => Record<string, any>
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Message {
|
export interface Message {
|
||||||
|
|||||||
@@ -4,12 +4,6 @@ import type { ChatCompletionChunk } from 'openai/resources/chat/completions'
|
|||||||
import type { CompletionUsage } from 'openai/resources/completions'
|
import type { CompletionUsage } from 'openai/resources/completions'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||||
import {
|
|
||||||
buildCanonicalIndex,
|
|
||||||
type CanonicalGroup,
|
|
||||||
getCanonicalValues,
|
|
||||||
isCanonicalPair,
|
|
||||||
} from '@/lib/workflows/subblocks/visibility'
|
|
||||||
import { isCustomTool } from '@/executor/constants'
|
import { isCustomTool } from '@/executor/constants'
|
||||||
import {
|
import {
|
||||||
getComputerUseModels,
|
getComputerUseModels,
|
||||||
@@ -443,10 +437,9 @@ export async function transformBlockTool(
|
|||||||
getAllBlocks: () => any[]
|
getAllBlocks: () => any[]
|
||||||
getTool: (toolId: string) => any
|
getTool: (toolId: string) => any
|
||||||
getToolAsync?: (toolId: string) => Promise<any>
|
getToolAsync?: (toolId: string) => Promise<any>
|
||||||
canonicalModes?: Record<string, 'basic' | 'advanced'>
|
|
||||||
}
|
}
|
||||||
): Promise<ProviderToolConfig | null> {
|
): Promise<ProviderToolConfig | null> {
|
||||||
const { selectedOperation, getAllBlocks, getTool, getToolAsync, canonicalModes } = options
|
const { selectedOperation, getAllBlocks, getTool, getToolAsync } = options
|
||||||
|
|
||||||
const blockDef = getAllBlocks().find((b: any) => b.type === block.type)
|
const blockDef = getAllBlocks().find((b: any) => b.type === block.type)
|
||||||
if (!blockDef) {
|
if (!blockDef) {
|
||||||
@@ -523,66 +516,12 @@ export async function transformBlockTool(
|
|||||||
uniqueToolId = `${toolConfig.id}_${userProvidedParams.knowledgeBaseId}`
|
uniqueToolId = `${toolConfig.id}_${userProvidedParams.knowledgeBaseId}`
|
||||||
}
|
}
|
||||||
|
|
||||||
const blockParamsFn = blockDef?.tools?.config?.params as
|
|
||||||
| ((p: Record<string, any>) => Record<string, any>)
|
|
||||||
| undefined
|
|
||||||
const blockInputDefs = blockDef?.inputs as Record<string, any> | undefined
|
|
||||||
|
|
||||||
const canonicalGroups: CanonicalGroup[] = blockDef?.subBlocks
|
|
||||||
? Object.values(buildCanonicalIndex(blockDef.subBlocks).groupsById).filter(isCanonicalPair)
|
|
||||||
: []
|
|
||||||
|
|
||||||
const needsTransform = blockParamsFn || blockInputDefs || canonicalGroups.length > 0
|
|
||||||
const paramsTransform = needsTransform
|
|
||||||
? (params: Record<string, any>): Record<string, any> => {
|
|
||||||
let result = { ...params }
|
|
||||||
|
|
||||||
for (const group of canonicalGroups) {
|
|
||||||
const { basicValue, advancedValue } = getCanonicalValues(group, result)
|
|
||||||
const scopedKey = `${block.type}:${group.canonicalId}`
|
|
||||||
const pairMode = canonicalModes?.[scopedKey] ?? 'basic'
|
|
||||||
const chosen = pairMode === 'advanced' ? advancedValue : basicValue
|
|
||||||
|
|
||||||
const sourceIds = [group.basicId, ...group.advancedIds].filter(Boolean) as string[]
|
|
||||||
sourceIds.forEach((id) => delete result[id])
|
|
||||||
|
|
||||||
if (chosen !== undefined) {
|
|
||||||
result[group.canonicalId] = chosen
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (blockParamsFn) {
|
|
||||||
const transformed = blockParamsFn(result)
|
|
||||||
result = { ...result, ...transformed }
|
|
||||||
}
|
|
||||||
|
|
||||||
if (blockInputDefs) {
|
|
||||||
for (const [key, schema] of Object.entries(blockInputDefs)) {
|
|
||||||
const value = result[key]
|
|
||||||
if (typeof value === 'string' && value.trim().length > 0) {
|
|
||||||
const inputType = typeof schema === 'object' ? schema.type : schema
|
|
||||||
if (inputType === 'json' || inputType === 'array') {
|
|
||||||
try {
|
|
||||||
result[key] = JSON.parse(value.trim())
|
|
||||||
} catch {
|
|
||||||
// Not valid JSON — keep as string
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
: undefined
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: uniqueToolId,
|
id: uniqueToolId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
description: toolDescription,
|
description: toolDescription,
|
||||||
params: userProvidedParams,
|
params: userProvidedParams,
|
||||||
parameters: llmSchema,
|
parameters: llmSchema,
|
||||||
paramsTransform,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1089,11 +1028,7 @@ export function getMaxOutputTokensForModel(model: string): number {
|
|||||||
* Prepare tool execution parameters, separating tool parameters from system parameters
|
* Prepare tool execution parameters, separating tool parameters from system parameters
|
||||||
*/
|
*/
|
||||||
export function prepareToolExecution(
|
export function prepareToolExecution(
|
||||||
tool: {
|
tool: { params?: Record<string, any>; parameters?: Record<string, any> },
|
||||||
params?: Record<string, any>
|
|
||||||
parameters?: Record<string, any>
|
|
||||||
paramsTransform?: (params: Record<string, any>) => Record<string, any>
|
|
||||||
},
|
|
||||||
llmArgs: Record<string, any>,
|
llmArgs: Record<string, any>,
|
||||||
request: {
|
request: {
|
||||||
workflowId?: string
|
workflowId?: string
|
||||||
@@ -1110,15 +1045,8 @@ export function prepareToolExecution(
|
|||||||
toolParams: Record<string, any>
|
toolParams: Record<string, any>
|
||||||
executionParams: Record<string, any>
|
executionParams: Record<string, any>
|
||||||
} {
|
} {
|
||||||
let toolParams = mergeToolParameters(tool.params || {}, llmArgs) as Record<string, any>
|
// Use centralized merge logic from tools/params
|
||||||
|
const toolParams = mergeToolParameters(tool.params || {}, llmArgs) as Record<string, any>
|
||||||
if (tool.paramsTransform) {
|
|
||||||
try {
|
|
||||||
toolParams = tool.paramsTransform(toolParams)
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn('paramsTransform failed, using raw params', { error: err })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const executionParams = {
|
const executionParams = {
|
||||||
...toolParams,
|
...toolParams,
|
||||||
|
|||||||
@@ -30,8 +30,8 @@ export const vertexProvider: ProviderConfig = {
|
|||||||
executeRequest: async (
|
executeRequest: async (
|
||||||
request: ProviderRequest
|
request: ProviderRequest
|
||||||
): Promise<ProviderResponse | StreamingExecution> => {
|
): Promise<ProviderResponse | StreamingExecution> => {
|
||||||
const vertexProject = request.vertexProject || env.VERTEX_PROJECT
|
const vertexProject = env.VERTEX_PROJECT || request.vertexProject
|
||||||
const vertexLocation = request.vertexLocation || env.VERTEX_LOCATION || 'us-central1'
|
const vertexLocation = env.VERTEX_LOCATION || request.vertexLocation || 'us-central1'
|
||||||
|
|
||||||
if (!vertexProject) {
|
if (!vertexProject) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
|||||||
@@ -137,36 +137,6 @@ function handleSecurityFiltering(request: NextRequest): NextResponse | null {
|
|||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const UTM_KEYS = ['utm_source', 'utm_medium', 'utm_campaign', 'utm_content'] as const
|
|
||||||
const UTM_COOKIE_NAME = 'sim_utm'
|
|
||||||
const UTM_COOKIE_MAX_AGE = 3600
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sets a `sim_utm` cookie when UTM params are present on auth pages.
|
|
||||||
* Captures UTM values, the HTTP Referer, landing page, and a timestamp.
|
|
||||||
*/
|
|
||||||
function setUtmCookie(request: NextRequest, response: NextResponse): void {
|
|
||||||
const { searchParams, pathname } = request.nextUrl
|
|
||||||
const hasUtm = UTM_KEYS.some((key) => searchParams.get(key))
|
|
||||||
if (!hasUtm) return
|
|
||||||
|
|
||||||
const utmData: Record<string, string> = {}
|
|
||||||
for (const key of UTM_KEYS) {
|
|
||||||
const value = searchParams.get(key)
|
|
||||||
if (value) utmData[key] = value
|
|
||||||
}
|
|
||||||
utmData.referrer_url = request.headers.get('referer') || ''
|
|
||||||
utmData.landing_page = pathname
|
|
||||||
utmData.created_at = Date.now().toString()
|
|
||||||
|
|
||||||
response.cookies.set(UTM_COOKIE_NAME, JSON.stringify(utmData), {
|
|
||||||
path: '/',
|
|
||||||
maxAge: UTM_COOKIE_MAX_AGE,
|
|
||||||
sameSite: 'lax',
|
|
||||||
httpOnly: false, // Client-side hook needs to detect cookie presence
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function proxy(request: NextRequest) {
|
export async function proxy(request: NextRequest) {
|
||||||
const url = request.nextUrl
|
const url = request.nextUrl
|
||||||
|
|
||||||
@@ -178,13 +148,10 @@ export async function proxy(request: NextRequest) {
|
|||||||
|
|
||||||
if (url.pathname === '/login' || url.pathname === '/signup') {
|
if (url.pathname === '/login' || url.pathname === '/signup') {
|
||||||
if (hasActiveSession) {
|
if (hasActiveSession) {
|
||||||
const redirect = NextResponse.redirect(new URL('/workspace', request.url))
|
return NextResponse.redirect(new URL('/workspace', request.url))
|
||||||
setUtmCookie(request, redirect)
|
|
||||||
return redirect
|
|
||||||
}
|
}
|
||||||
const response = NextResponse.next()
|
const response = NextResponse.next()
|
||||||
response.headers.set('Content-Security-Policy', generateRuntimeCSP())
|
response.headers.set('Content-Security-Policy', generateRuntimeCSP())
|
||||||
setUtmCookie(request, response)
|
|
||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -280,7 +280,7 @@ export class Serializer {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const serialized: SerializedBlock = {
|
return {
|
||||||
id: block.id,
|
id: block.id,
|
||||||
position: block.position,
|
position: block.position,
|
||||||
config: {
|
config: {
|
||||||
@@ -300,12 +300,6 @@ export class Serializer {
|
|||||||
},
|
},
|
||||||
enabled: block.enabled,
|
enabled: block.enabled,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (block.data?.canonicalModes) {
|
|
||||||
serialized.canonicalModes = block.data.canonicalModes as Record<string, 'basic' | 'advanced'>
|
|
||||||
}
|
|
||||||
|
|
||||||
return serialized
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private extractParams(block: BlockState): Record<string, any> {
|
private extractParams(block: BlockState): Record<string, any> {
|
||||||
|
|||||||
@@ -38,8 +38,6 @@ export interface SerializedBlock {
|
|||||||
color?: string
|
color?: string
|
||||||
}
|
}
|
||||||
enabled: boolean
|
enabled: boolean
|
||||||
/** Canonical mode overrides from block.data (used by agent handler for tool param resolution) */
|
|
||||||
canonicalModes?: Record<string, 'basic' | 'advanced'>
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SerializedLoop {
|
export interface SerializedLoop {
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ export const fileParserTool: ToolConfig<FileParserInput, FileParserOutput> = {
|
|||||||
filePath: {
|
filePath: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
required: false,
|
required: false,
|
||||||
visibility: 'hidden',
|
visibility: 'user-only',
|
||||||
description: 'Path to the file(s). Can be a single path, URL, or an array of paths.',
|
description: 'Path to the file(s). Can be a single path, URL, or an array of paths.',
|
||||||
},
|
},
|
||||||
file: {
|
file: {
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
export * from './types'
|
|
||||||
export { googleBooksVolumeDetailsTool } from './volume_details'
|
|
||||||
export { googleBooksVolumeSearchTool } from './volume_search'
|
|
||||||
@@ -1,124 +0,0 @@
|
|||||||
import type { ToolResponse } from '@/tools/types'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Raw volume item from Google Books API search response
|
|
||||||
*/
|
|
||||||
export interface GoogleBooksVolumeItem {
|
|
||||||
id: string
|
|
||||||
volumeInfo: {
|
|
||||||
title?: string
|
|
||||||
subtitle?: string
|
|
||||||
authors?: string[]
|
|
||||||
publisher?: string
|
|
||||||
publishedDate?: string
|
|
||||||
description?: string
|
|
||||||
pageCount?: number
|
|
||||||
categories?: string[]
|
|
||||||
averageRating?: number
|
|
||||||
ratingsCount?: number
|
|
||||||
language?: string
|
|
||||||
previewLink?: string
|
|
||||||
infoLink?: string
|
|
||||||
imageLinks?: {
|
|
||||||
thumbnail?: string
|
|
||||||
smallThumbnail?: string
|
|
||||||
}
|
|
||||||
industryIdentifiers?: Array<{
|
|
||||||
type: string
|
|
||||||
identifier: string
|
|
||||||
}>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Raw volume response from Google Books API details endpoint
|
|
||||||
*/
|
|
||||||
export interface GoogleBooksVolumeResponse {
|
|
||||||
id: string
|
|
||||||
volumeInfo: {
|
|
||||||
title?: string
|
|
||||||
subtitle?: string
|
|
||||||
authors?: string[]
|
|
||||||
publisher?: string
|
|
||||||
publishedDate?: string
|
|
||||||
description?: string
|
|
||||||
pageCount?: number
|
|
||||||
categories?: string[]
|
|
||||||
averageRating?: number
|
|
||||||
ratingsCount?: number
|
|
||||||
language?: string
|
|
||||||
previewLink?: string
|
|
||||||
infoLink?: string
|
|
||||||
imageLinks?: {
|
|
||||||
thumbnail?: string
|
|
||||||
smallThumbnail?: string
|
|
||||||
}
|
|
||||||
industryIdentifiers?: Array<{
|
|
||||||
type: string
|
|
||||||
identifier: string
|
|
||||||
}>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Volume information structure shared between search and details responses
|
|
||||||
*/
|
|
||||||
export interface VolumeInfo {
|
|
||||||
id: string
|
|
||||||
title: string
|
|
||||||
subtitle: string | null
|
|
||||||
authors: string[]
|
|
||||||
publisher: string | null
|
|
||||||
publishedDate: string | null
|
|
||||||
description: string | null
|
|
||||||
pageCount: number | null
|
|
||||||
categories: string[]
|
|
||||||
averageRating: number | null
|
|
||||||
ratingsCount: number | null
|
|
||||||
language: string | null
|
|
||||||
previewLink: string | null
|
|
||||||
infoLink: string | null
|
|
||||||
thumbnailUrl: string | null
|
|
||||||
isbn10: string | null
|
|
||||||
isbn13: string | null
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parameters for searching volumes
|
|
||||||
*/
|
|
||||||
export interface GoogleBooksVolumeSearchParams {
|
|
||||||
apiKey: string
|
|
||||||
query: string
|
|
||||||
filter?: 'partial' | 'full' | 'free-ebooks' | 'paid-ebooks' | 'ebooks'
|
|
||||||
printType?: 'all' | 'books' | 'magazines'
|
|
||||||
orderBy?: 'relevance' | 'newest'
|
|
||||||
startIndex?: number
|
|
||||||
maxResults?: number
|
|
||||||
langRestrict?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Response from volume search
|
|
||||||
*/
|
|
||||||
export interface GoogleBooksVolumeSearchResponse extends ToolResponse {
|
|
||||||
output: {
|
|
||||||
totalItems: number
|
|
||||||
volumes: VolumeInfo[]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parameters for getting volume details
|
|
||||||
*/
|
|
||||||
export interface GoogleBooksVolumeDetailsParams {
|
|
||||||
apiKey: string
|
|
||||||
volumeId: string
|
|
||||||
projection?: 'full' | 'lite'
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Response from volume details
|
|
||||||
*/
|
|
||||||
export interface GoogleBooksVolumeDetailsResponse extends ToolResponse {
|
|
||||||
output: VolumeInfo
|
|
||||||
}
|
|
||||||
@@ -1,172 +0,0 @@
|
|||||||
import type {
|
|
||||||
GoogleBooksVolumeDetailsParams,
|
|
||||||
GoogleBooksVolumeDetailsResponse,
|
|
||||||
GoogleBooksVolumeResponse,
|
|
||||||
} from '@/tools/google_books/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export const googleBooksVolumeDetailsTool: ToolConfig<
|
|
||||||
GoogleBooksVolumeDetailsParams,
|
|
||||||
GoogleBooksVolumeDetailsResponse
|
|
||||||
> = {
|
|
||||||
id: 'google_books_volume_details',
|
|
||||||
name: 'Google Books Volume Details',
|
|
||||||
description: 'Get detailed information about a specific book volume',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
params: {
|
|
||||||
apiKey: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Google Books API key',
|
|
||||||
},
|
|
||||||
volumeId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the volume to retrieve',
|
|
||||||
},
|
|
||||||
projection: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Projection level (full, lite)',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: (params) => {
|
|
||||||
const url = new URL(`https://www.googleapis.com/books/v1/volumes/${params.volumeId.trim()}`)
|
|
||||||
url.searchParams.set('key', params.apiKey.trim())
|
|
||||||
|
|
||||||
if (params.projection) {
|
|
||||||
url.searchParams.set('projection', params.projection)
|
|
||||||
}
|
|
||||||
|
|
||||||
return url.toString()
|
|
||||||
},
|
|
||||||
method: 'GET',
|
|
||||||
headers: () => ({
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data: GoogleBooksVolumeResponse = await response.json()
|
|
||||||
|
|
||||||
if (!data.volumeInfo) {
|
|
||||||
throw new Error('Volume not found')
|
|
||||||
}
|
|
||||||
|
|
||||||
const info = data.volumeInfo
|
|
||||||
const identifiers = info.industryIdentifiers ?? []
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
id: data.id,
|
|
||||||
title: info.title ?? '',
|
|
||||||
subtitle: info.subtitle ?? null,
|
|
||||||
authors: info.authors ?? [],
|
|
||||||
publisher: info.publisher ?? null,
|
|
||||||
publishedDate: info.publishedDate ?? null,
|
|
||||||
description: info.description ?? null,
|
|
||||||
pageCount: info.pageCount ?? null,
|
|
||||||
categories: info.categories ?? [],
|
|
||||||
averageRating: info.averageRating ?? null,
|
|
||||||
ratingsCount: info.ratingsCount ?? null,
|
|
||||||
language: info.language ?? null,
|
|
||||||
previewLink: info.previewLink ?? null,
|
|
||||||
infoLink: info.infoLink ?? null,
|
|
||||||
thumbnailUrl: info.imageLinks?.thumbnail ?? info.imageLinks?.smallThumbnail ?? null,
|
|
||||||
isbn10: identifiers.find((id) => id.type === 'ISBN_10')?.identifier ?? null,
|
|
||||||
isbn13: identifiers.find((id) => id.type === 'ISBN_13')?.identifier ?? null,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
id: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Volume ID',
|
|
||||||
},
|
|
||||||
title: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Book title',
|
|
||||||
},
|
|
||||||
subtitle: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Book subtitle',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
authors: {
|
|
||||||
type: 'array',
|
|
||||||
description: 'List of authors',
|
|
||||||
},
|
|
||||||
publisher: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Publisher name',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
publishedDate: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Publication date',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Book description',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
pageCount: {
|
|
||||||
type: 'number',
|
|
||||||
description: 'Number of pages',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
categories: {
|
|
||||||
type: 'array',
|
|
||||||
description: 'Book categories',
|
|
||||||
},
|
|
||||||
averageRating: {
|
|
||||||
type: 'number',
|
|
||||||
description: 'Average rating (1-5)',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
ratingsCount: {
|
|
||||||
type: 'number',
|
|
||||||
description: 'Number of ratings',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
language: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Language code',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
previewLink: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Link to preview on Google Books',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
infoLink: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Link to info page',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
thumbnailUrl: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Book cover thumbnail URL',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
isbn10: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'ISBN-10 identifier',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
isbn13: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'ISBN-13 identifier',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,176 +0,0 @@
|
|||||||
import type {
|
|
||||||
GoogleBooksVolumeItem,
|
|
||||||
GoogleBooksVolumeSearchParams,
|
|
||||||
GoogleBooksVolumeSearchResponse,
|
|
||||||
VolumeInfo,
|
|
||||||
} from '@/tools/google_books/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
function extractVolumeInfo(item: GoogleBooksVolumeItem): VolumeInfo {
|
|
||||||
const info = item.volumeInfo
|
|
||||||
const identifiers = info.industryIdentifiers ?? []
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: item.id,
|
|
||||||
title: info.title ?? '',
|
|
||||||
subtitle: info.subtitle ?? null,
|
|
||||||
authors: info.authors ?? [],
|
|
||||||
publisher: info.publisher ?? null,
|
|
||||||
publishedDate: info.publishedDate ?? null,
|
|
||||||
description: info.description ?? null,
|
|
||||||
pageCount: info.pageCount ?? null,
|
|
||||||
categories: info.categories ?? [],
|
|
||||||
averageRating: info.averageRating ?? null,
|
|
||||||
ratingsCount: info.ratingsCount ?? null,
|
|
||||||
language: info.language ?? null,
|
|
||||||
previewLink: info.previewLink ?? null,
|
|
||||||
infoLink: info.infoLink ?? null,
|
|
||||||
thumbnailUrl: info.imageLinks?.thumbnail ?? info.imageLinks?.smallThumbnail ?? null,
|
|
||||||
isbn10: identifiers.find((id) => id.type === 'ISBN_10')?.identifier ?? null,
|
|
||||||
isbn13: identifiers.find((id) => id.type === 'ISBN_13')?.identifier ?? null,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const googleBooksVolumeSearchTool: ToolConfig<
|
|
||||||
GoogleBooksVolumeSearchParams,
|
|
||||||
GoogleBooksVolumeSearchResponse
|
|
||||||
> = {
|
|
||||||
id: 'google_books_volume_search',
|
|
||||||
name: 'Google Books Volume Search',
|
|
||||||
description: 'Search for books using the Google Books API',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
params: {
|
|
||||||
apiKey: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Google Books API key',
|
|
||||||
},
|
|
||||||
query: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description:
|
|
||||||
'Search query. Supports special keywords: intitle:, inauthor:, inpublisher:, subject:, isbn:',
|
|
||||||
},
|
|
||||||
filter: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description:
|
|
||||||
'Filter results by availability (partial, full, free-ebooks, paid-ebooks, ebooks)',
|
|
||||||
},
|
|
||||||
printType: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Restrict to print type (all, books, magazines)',
|
|
||||||
},
|
|
||||||
orderBy: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Sort order (relevance, newest)',
|
|
||||||
},
|
|
||||||
startIndex: {
|
|
||||||
type: 'number',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Index of the first result to return (for pagination)',
|
|
||||||
},
|
|
||||||
maxResults: {
|
|
||||||
type: 'number',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Maximum number of results to return (1-40)',
|
|
||||||
},
|
|
||||||
langRestrict: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Restrict results to a specific language (ISO 639-1 code)',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: (params) => {
|
|
||||||
const url = new URL('https://www.googleapis.com/books/v1/volumes')
|
|
||||||
url.searchParams.set('q', params.query.trim())
|
|
||||||
url.searchParams.set('key', params.apiKey.trim())
|
|
||||||
|
|
||||||
if (params.filter) {
|
|
||||||
url.searchParams.set('filter', params.filter)
|
|
||||||
}
|
|
||||||
if (params.printType) {
|
|
||||||
url.searchParams.set('printType', params.printType)
|
|
||||||
}
|
|
||||||
if (params.orderBy) {
|
|
||||||
url.searchParams.set('orderBy', params.orderBy)
|
|
||||||
}
|
|
||||||
if (params.startIndex !== undefined) {
|
|
||||||
url.searchParams.set('startIndex', String(params.startIndex))
|
|
||||||
}
|
|
||||||
if (params.maxResults !== undefined) {
|
|
||||||
url.searchParams.set('maxResults', String(params.maxResults))
|
|
||||||
}
|
|
||||||
if (params.langRestrict) {
|
|
||||||
url.searchParams.set('langRestrict', params.langRestrict)
|
|
||||||
}
|
|
||||||
|
|
||||||
return url.toString()
|
|
||||||
},
|
|
||||||
method: 'GET',
|
|
||||||
headers: () => ({
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const items: GoogleBooksVolumeItem[] = data.items ?? []
|
|
||||||
const volumes = items.map(extractVolumeInfo)
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
totalItems: data.totalItems ?? 0,
|
|
||||||
volumes,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
totalItems: {
|
|
||||||
type: 'number',
|
|
||||||
description: 'Total number of matching results',
|
|
||||||
},
|
|
||||||
volumes: {
|
|
||||||
type: 'array',
|
|
||||||
description: 'List of matching volumes',
|
|
||||||
items: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
id: { type: 'string', description: 'Volume ID' },
|
|
||||||
title: { type: 'string', description: 'Book title' },
|
|
||||||
subtitle: { type: 'string', description: 'Book subtitle' },
|
|
||||||
authors: { type: 'array', description: 'List of authors' },
|
|
||||||
publisher: { type: 'string', description: 'Publisher name' },
|
|
||||||
publishedDate: { type: 'string', description: 'Publication date' },
|
|
||||||
description: { type: 'string', description: 'Book description' },
|
|
||||||
pageCount: { type: 'number', description: 'Number of pages' },
|
|
||||||
categories: { type: 'array', description: 'Book categories' },
|
|
||||||
averageRating: { type: 'number', description: 'Average rating (1-5)' },
|
|
||||||
ratingsCount: { type: 'number', description: 'Number of ratings' },
|
|
||||||
language: { type: 'string', description: 'Language code' },
|
|
||||||
previewLink: { type: 'string', description: 'Link to preview on Google Books' },
|
|
||||||
infoLink: { type: 'string', description: 'Link to info page' },
|
|
||||||
thumbnailUrl: { type: 'string', description: 'Book cover thumbnail URL' },
|
|
||||||
isbn10: { type: 'string', description: 'ISBN-10 identifier' },
|
|
||||||
isbn13: { type: 'string', description: 'ISBN-13 identifier' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -6,7 +6,7 @@ import {
|
|||||||
validateUrlWithDNS,
|
validateUrlWithDNS,
|
||||||
} from '@/lib/core/security/input-validation.server'
|
} from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { getBaseUrl, getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { parseMcpToolId } from '@/lib/mcp/utils'
|
import { parseMcpToolId } from '@/lib/mcp/utils'
|
||||||
import { isCustomTool, isMcpTool } from '@/executor/constants'
|
import { isCustomTool, isMcpTool } from '@/executor/constants'
|
||||||
import { resolveSkillContent } from '@/executor/handlers/agent/skills-resolver'
|
import { resolveSkillContent } from '@/executor/handlers/agent/skills-resolver'
|
||||||
@@ -285,7 +285,7 @@ export async function executeTool(
|
|||||||
`[${requestId}] Tool ${toolId} needs access token for credential: ${contextParams.credential}`
|
`[${requestId}] Tool ${toolId} needs access token for credential: ${contextParams.credential}`
|
||||||
)
|
)
|
||||||
try {
|
try {
|
||||||
const baseUrl = getInternalApiBaseUrl()
|
const baseUrl = getBaseUrl()
|
||||||
|
|
||||||
const workflowId = contextParams._context?.workflowId
|
const workflowId = contextParams._context?.workflowId
|
||||||
const userId = contextParams._context?.userId
|
const userId = contextParams._context?.userId
|
||||||
@@ -597,12 +597,12 @@ async function executeToolRequest(
|
|||||||
const requestParams = formatRequestParams(tool, params)
|
const requestParams = formatRequestParams(tool, params)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const baseUrl = getBaseUrl()
|
||||||
const endpointUrl =
|
const endpointUrl =
|
||||||
typeof tool.request.url === 'function' ? tool.request.url(params) : tool.request.url
|
typeof tool.request.url === 'function' ? tool.request.url(params) : tool.request.url
|
||||||
const isInternalRoute = endpointUrl.startsWith('/api/')
|
|
||||||
const baseUrl = isInternalRoute ? getInternalApiBaseUrl() : getBaseUrl()
|
|
||||||
|
|
||||||
const fullUrlObj = new URL(endpointUrl, baseUrl)
|
const fullUrlObj = new URL(endpointUrl, baseUrl)
|
||||||
|
const isInternalRoute = endpointUrl.startsWith('/api/')
|
||||||
|
|
||||||
if (isInternalRoute) {
|
if (isInternalRoute) {
|
||||||
const workflowId = params._context?.workflowId
|
const workflowId = params._context?.workflowId
|
||||||
@@ -922,7 +922,7 @@ async function executeMcpTool(
|
|||||||
|
|
||||||
const { serverId, toolName } = parseMcpToolId(toolId)
|
const { serverId, toolName } = parseMcpToolId(toolId)
|
||||||
|
|
||||||
const baseUrl = getInternalApiBaseUrl()
|
const baseUrl = getBaseUrl()
|
||||||
|
|
||||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||||
|
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ export const jiraAddAttachmentTool: ToolConfig<JiraAddAttachmentParams, JiraAddA
|
|||||||
files: {
|
files: {
|
||||||
type: 'file[]',
|
type: 'file[]',
|
||||||
required: true,
|
required: true,
|
||||||
visibility: 'user-only',
|
visibility: 'hidden',
|
||||||
description: 'Files to attach to the Jira issue',
|
description: 'Files to attach to the Jira issue',
|
||||||
},
|
},
|
||||||
cloudId: {
|
cloudId: {
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ export const linearCreateAttachmentTool: ToolConfig<
|
|||||||
file: {
|
file: {
|
||||||
type: 'file',
|
type: 'file',
|
||||||
required: false,
|
required: false,
|
||||||
visibility: 'user-only',
|
visibility: 'hidden',
|
||||||
description: 'File to attach',
|
description: 'File to attach',
|
||||||
},
|
},
|
||||||
title: {
|
title: {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import type { BaseImageRequestBody } from '@/tools/openai/types'
|
import type { BaseImageRequestBody } from '@/tools/openai/types'
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
@@ -122,7 +122,7 @@ export const imageTool: ToolConfig = {
|
|||||||
if (imageUrl && !base64Image) {
|
if (imageUrl && !base64Image) {
|
||||||
try {
|
try {
|
||||||
logger.info('Fetching image from URL via proxy...')
|
logger.info('Fetching image from URL via proxy...')
|
||||||
const baseUrl = getInternalApiBaseUrl()
|
const baseUrl = getBaseUrl()
|
||||||
const proxyUrl = new URL('/api/tools/image', baseUrl)
|
const proxyUrl = new URL('/api/tools/image', baseUrl)
|
||||||
proxyUrl.searchParams.append('url', imageUrl)
|
proxyUrl.searchParams.append('url', imageUrl)
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import {
|
import {
|
||||||
buildCanonicalIndex,
|
buildCanonicalIndex,
|
||||||
type CanonicalIndex,
|
type CanonicalIndex,
|
||||||
type CanonicalModeOverrides,
|
|
||||||
evaluateSubBlockCondition,
|
evaluateSubBlockCondition,
|
||||||
getCanonicalValues,
|
getCanonicalValues,
|
||||||
isCanonicalPair,
|
isCanonicalPair,
|
||||||
@@ -13,10 +12,7 @@ import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
|
|||||||
export {
|
export {
|
||||||
buildCanonicalIndex,
|
buildCanonicalIndex,
|
||||||
type CanonicalIndex,
|
type CanonicalIndex,
|
||||||
type CanonicalModeOverrides,
|
|
||||||
evaluateSubBlockCondition,
|
evaluateSubBlockCondition,
|
||||||
isCanonicalPair,
|
|
||||||
resolveCanonicalMode,
|
|
||||||
type SubBlockCondition,
|
type SubBlockCondition,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,17 +1,13 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
|
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
|
||||||
import {
|
import {
|
||||||
buildCanonicalIndex,
|
|
||||||
type CanonicalModeOverrides,
|
|
||||||
evaluateSubBlockCondition,
|
evaluateSubBlockCondition,
|
||||||
isCanonicalPair,
|
|
||||||
resolveCanonicalMode,
|
|
||||||
type SubBlockCondition,
|
type SubBlockCondition,
|
||||||
} from '@/lib/workflows/subblocks/visibility'
|
} from '@/lib/workflows/subblocks/visibility'
|
||||||
import type { SubBlockConfig as BlockSubBlockConfig, GenerationType } from '@/blocks/types'
|
import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
|
||||||
import { safeAssign } from '@/tools/safe-assign'
|
import { safeAssign } from '@/tools/safe-assign'
|
||||||
import { isEmptyTagValue } from '@/tools/shared/tags'
|
import { isEmptyTagValue } from '@/tools/shared/tags'
|
||||||
import type { OAuthConfig, ParameterVisibility, ToolConfig } from '@/tools/types'
|
import type { ParameterVisibility, ToolConfig } from '@/tools/types'
|
||||||
import { getTool } from '@/tools/utils'
|
import { getTool } from '@/tools/utils'
|
||||||
|
|
||||||
const logger = createLogger('ToolsParams')
|
const logger = createLogger('ToolsParams')
|
||||||
@@ -68,14 +64,6 @@ export interface UIComponentConfig {
|
|||||||
mode?: 'basic' | 'advanced' | 'both' | 'trigger'
|
mode?: 'basic' | 'advanced' | 'both' | 'trigger'
|
||||||
/** The actual subblock ID this config was derived from */
|
/** The actual subblock ID this config was derived from */
|
||||||
actualSubBlockId?: string
|
actualSubBlockId?: string
|
||||||
/** Wand configuration for AI assistance */
|
|
||||||
wandConfig?: {
|
|
||||||
enabled: boolean
|
|
||||||
prompt: string
|
|
||||||
generationType?: GenerationType
|
|
||||||
placeholder?: string
|
|
||||||
maintainHistory?: boolean
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SubBlockConfig {
|
export interface SubBlockConfig {
|
||||||
@@ -339,7 +327,6 @@ export function getToolParametersConfig(
|
|||||||
canonicalParamId: subBlock.canonicalParamId,
|
canonicalParamId: subBlock.canonicalParamId,
|
||||||
mode: subBlock.mode,
|
mode: subBlock.mode,
|
||||||
actualSubBlockId: subBlock.id,
|
actualSubBlockId: subBlock.id,
|
||||||
wandConfig: subBlock.wandConfig,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -825,200 +812,3 @@ export function formatParameterLabel(paramId: string): string {
|
|||||||
// Simple case - just capitalize first letter
|
// Simple case - just capitalize first letter
|
||||||
return paramId.charAt(0).toUpperCase() + paramId.slice(1)
|
return paramId.charAt(0).toUpperCase() + paramId.slice(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* SubBlock IDs that are "structural" — they control tool routing or auth,
|
|
||||||
* not user-facing parameters. These are excluded from tool-input rendering
|
|
||||||
* unless they have an explicit paramVisibility set.
|
|
||||||
*/
|
|
||||||
const STRUCTURAL_SUBBLOCK_IDS = new Set(['operation', 'authMethod', 'destinationType'])
|
|
||||||
|
|
||||||
/**
|
|
||||||
* SubBlock types that represent auth/credential inputs handled separately
|
|
||||||
* by the tool-input OAuth credential selector.
|
|
||||||
*/
|
|
||||||
const AUTH_SUBBLOCK_TYPES = new Set(['oauth-input'])
|
|
||||||
|
|
||||||
/**
|
|
||||||
* SubBlock types that should never appear in tool-input context.
|
|
||||||
*/
|
|
||||||
const EXCLUDED_SUBBLOCK_TYPES = new Set([
|
|
||||||
'tool-input',
|
|
||||||
'skill-input',
|
|
||||||
'condition-input',
|
|
||||||
'eval-input',
|
|
||||||
'webhook-config',
|
|
||||||
'schedule-info',
|
|
||||||
'trigger-save',
|
|
||||||
'input-format',
|
|
||||||
'response-format',
|
|
||||||
'mcp-server-selector',
|
|
||||||
'mcp-tool-selector',
|
|
||||||
'mcp-dynamic-args',
|
|
||||||
'input-mapping',
|
|
||||||
'variables-input',
|
|
||||||
'messages-input',
|
|
||||||
'router-input',
|
|
||||||
'text',
|
|
||||||
])
|
|
||||||
|
|
||||||
export interface SubBlocksForToolInput {
|
|
||||||
toolConfig: ToolConfig
|
|
||||||
subBlocks: BlockSubBlockConfig[]
|
|
||||||
oauthConfig?: OAuthConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns filtered SubBlockConfig[] for rendering in tool-input context.
|
|
||||||
* Uses subblock definitions as the primary source of UI metadata,
|
|
||||||
* getting all features (wandConfig, rich conditions, dependsOn, etc.) for free.
|
|
||||||
*
|
|
||||||
* For blocks without paramVisibility annotations, falls back to inferring
|
|
||||||
* visibility from the tool's param definitions.
|
|
||||||
*/
|
|
||||||
export function getSubBlocksForToolInput(
|
|
||||||
toolId: string,
|
|
||||||
blockType: string,
|
|
||||||
currentValues?: Record<string, unknown>,
|
|
||||||
canonicalModeOverrides?: CanonicalModeOverrides
|
|
||||||
): SubBlocksForToolInput | null {
|
|
||||||
try {
|
|
||||||
const toolConfig = getTool(toolId)
|
|
||||||
if (!toolConfig) {
|
|
||||||
logger.warn(`Tool not found: ${toolId}`)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const blockConfigs = getBlockConfigurations()
|
|
||||||
const blockConfig = blockConfigs[blockType]
|
|
||||||
if (!blockConfig?.subBlocks?.length) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const allSubBlocks = blockConfig.subBlocks as BlockSubBlockConfig[]
|
|
||||||
const canonicalIndex = buildCanonicalIndex(allSubBlocks)
|
|
||||||
|
|
||||||
// Build values for condition evaluation
|
|
||||||
const values = currentValues || {}
|
|
||||||
const valuesWithOperation = { ...values }
|
|
||||||
if (valuesWithOperation.operation === undefined) {
|
|
||||||
const parts = toolId.split('_')
|
|
||||||
valuesWithOperation.operation =
|
|
||||||
parts.length >= 3 ? parts.slice(2).join('_') : parts[parts.length - 1]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build a map of tool param IDs to their resolved visibility
|
|
||||||
const toolParamVisibility: Record<string, ParameterVisibility> = {}
|
|
||||||
for (const [paramId, param] of Object.entries(toolConfig.params || {})) {
|
|
||||||
toolParamVisibility[paramId] =
|
|
||||||
param.visibility ?? (param.required ? 'user-or-llm' : 'user-only')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Track which canonical groups we've already included (to avoid duplicates)
|
|
||||||
const includedCanonicalIds = new Set<string>()
|
|
||||||
|
|
||||||
const filtered: BlockSubBlockConfig[] = []
|
|
||||||
|
|
||||||
for (const sb of allSubBlocks) {
|
|
||||||
// Skip excluded types
|
|
||||||
if (EXCLUDED_SUBBLOCK_TYPES.has(sb.type)) continue
|
|
||||||
|
|
||||||
// Skip trigger-mode-only subblocks
|
|
||||||
if (sb.mode === 'trigger') continue
|
|
||||||
|
|
||||||
// Determine the effective param ID (canonical or subblock id)
|
|
||||||
const effectiveParamId = sb.canonicalParamId || sb.id
|
|
||||||
|
|
||||||
// Resolve paramVisibility: explicit > inferred from tool params > skip
|
|
||||||
let visibility = sb.paramVisibility
|
|
||||||
if (!visibility) {
|
|
||||||
// Infer from structural checks
|
|
||||||
if (STRUCTURAL_SUBBLOCK_IDS.has(sb.id)) {
|
|
||||||
visibility = 'hidden'
|
|
||||||
} else if (AUTH_SUBBLOCK_TYPES.has(sb.type)) {
|
|
||||||
visibility = 'hidden'
|
|
||||||
} else if (
|
|
||||||
sb.password &&
|
|
||||||
(sb.id === 'botToken' || sb.id === 'accessToken' || sb.id === 'apiKey')
|
|
||||||
) {
|
|
||||||
// Auth tokens without explicit paramVisibility are hidden
|
|
||||||
// (they're handled by the OAuth credential selector or structurally)
|
|
||||||
// But only if they don't have a matching tool param
|
|
||||||
if (!(sb.id in toolParamVisibility)) {
|
|
||||||
visibility = 'hidden'
|
|
||||||
} else {
|
|
||||||
visibility = toolParamVisibility[sb.id] || 'user-or-llm'
|
|
||||||
}
|
|
||||||
} else if (effectiveParamId in toolParamVisibility) {
|
|
||||||
// Fallback: infer from tool param visibility
|
|
||||||
visibility = toolParamVisibility[effectiveParamId]
|
|
||||||
} else if (sb.id in toolParamVisibility) {
|
|
||||||
visibility = toolParamVisibility[sb.id]
|
|
||||||
} else if (sb.canonicalParamId) {
|
|
||||||
// SubBlock has a canonicalParamId that doesn't directly match a tool param.
|
|
||||||
// This means the block's params() function transforms it before sending to the tool
|
|
||||||
// (e.g. listFolderId → folderId). These are user-facing inputs, default to user-or-llm.
|
|
||||||
visibility = 'user-or-llm'
|
|
||||||
} else {
|
|
||||||
// SubBlock has no corresponding tool param — skip it
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter by visibility: exclude hidden and llm-only
|
|
||||||
if (visibility === 'hidden' || visibility === 'llm-only') continue
|
|
||||||
|
|
||||||
// Evaluate condition against current values
|
|
||||||
if (sb.condition) {
|
|
||||||
const conditionMet = evaluateSubBlockCondition(
|
|
||||||
sb.condition as SubBlockCondition,
|
|
||||||
valuesWithOperation
|
|
||||||
)
|
|
||||||
if (!conditionMet) continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle canonical pairs: only include the active mode variant
|
|
||||||
const canonicalId = canonicalIndex.canonicalIdBySubBlockId[sb.id]
|
|
||||||
if (canonicalId) {
|
|
||||||
const group = canonicalIndex.groupsById[canonicalId]
|
|
||||||
if (group && isCanonicalPair(group)) {
|
|
||||||
if (includedCanonicalIds.has(canonicalId)) continue
|
|
||||||
includedCanonicalIds.add(canonicalId)
|
|
||||||
|
|
||||||
// Determine active mode
|
|
||||||
const mode = resolveCanonicalMode(group, valuesWithOperation, canonicalModeOverrides)
|
|
||||||
if (mode === 'advanced') {
|
|
||||||
// Find the advanced variant
|
|
||||||
const advancedSb = allSubBlocks.find((s) => group.advancedIds.includes(s.id))
|
|
||||||
if (advancedSb) {
|
|
||||||
filtered.push({ ...advancedSb, paramVisibility: visibility })
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Include basic variant (current sb if it's the basic one)
|
|
||||||
if (group.basicId === sb.id) {
|
|
||||||
filtered.push({ ...sb, paramVisibility: visibility })
|
|
||||||
} else {
|
|
||||||
const basicSb = allSubBlocks.find((s) => s.id === group.basicId)
|
|
||||||
if (basicSb) {
|
|
||||||
filtered.push({ ...basicSb, paramVisibility: visibility })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Non-canonical, non-hidden, condition-passing subblock
|
|
||||||
filtered.push({ ...sb, paramVisibility: visibility })
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
toolConfig,
|
|
||||||
subBlocks: filtered,
|
|
||||||
oauthConfig: toolConfig.oauth,
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error getting subblocks for tool input:', error)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ export const pulseParserTool: ToolConfig<PulseParserInput, PulseParserOutput> =
|
|||||||
file: {
|
file: {
|
||||||
type: 'file',
|
type: 'file',
|
||||||
required: false,
|
required: false,
|
||||||
visibility: 'user-only',
|
visibility: 'hidden',
|
||||||
description: 'Document file to be processed',
|
description: 'Document file to be processed',
|
||||||
},
|
},
|
||||||
fileUpload: {
|
fileUpload: {
|
||||||
@@ -268,7 +268,7 @@ export const pulseParserV2Tool: ToolConfig<PulseParserV2Input, PulseParserOutput
|
|||||||
file: {
|
file: {
|
||||||
type: 'file',
|
type: 'file',
|
||||||
required: true,
|
required: true,
|
||||||
visibility: 'user-only',
|
visibility: 'hidden',
|
||||||
description: 'Document to be processed',
|
description: 'Document to be processed',
|
||||||
},
|
},
|
||||||
pages: pulseParserTool.params.pages,
|
pages: pulseParserTool.params.pages,
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ export const reductoParserTool: ToolConfig<ReductoParserInput, ReductoParserOutp
|
|||||||
file: {
|
file: {
|
||||||
type: 'file',
|
type: 'file',
|
||||||
required: false,
|
required: false,
|
||||||
visibility: 'user-only',
|
visibility: 'hidden',
|
||||||
description: 'Document file to be processed',
|
description: 'Document file to be processed',
|
||||||
},
|
},
|
||||||
fileUpload: {
|
fileUpload: {
|
||||||
@@ -196,7 +196,7 @@ export const reductoParserV2Tool: ToolConfig<ReductoParserV2Input, ReductoParser
|
|||||||
file: {
|
file: {
|
||||||
type: 'file',
|
type: 'file',
|
||||||
required: true,
|
required: true,
|
||||||
visibility: 'user-only',
|
visibility: 'hidden',
|
||||||
description: 'PDF document to be processed',
|
description: 'PDF document to be processed',
|
||||||
},
|
},
|
||||||
pages: reductoParserTool.params.pages,
|
pages: reductoParserTool.params.pages,
|
||||||
|
|||||||
@@ -526,7 +526,6 @@ import {
|
|||||||
gmailUnarchiveV2Tool,
|
gmailUnarchiveV2Tool,
|
||||||
} from '@/tools/gmail'
|
} from '@/tools/gmail'
|
||||||
import { googleSearchTool } from '@/tools/google'
|
import { googleSearchTool } from '@/tools/google'
|
||||||
import { googleBooksVolumeDetailsTool, googleBooksVolumeSearchTool } from '@/tools/google_books'
|
|
||||||
import {
|
import {
|
||||||
googleCalendarCreateTool,
|
googleCalendarCreateTool,
|
||||||
googleCalendarCreateV2Tool,
|
googleCalendarCreateV2Tool,
|
||||||
@@ -2557,8 +2556,6 @@ export const tools: Record<string, ToolConfig> = {
|
|||||||
google_docs_read: googleDocsReadTool,
|
google_docs_read: googleDocsReadTool,
|
||||||
google_docs_write: googleDocsWriteTool,
|
google_docs_write: googleDocsWriteTool,
|
||||||
google_docs_create: googleDocsCreateTool,
|
google_docs_create: googleDocsCreateTool,
|
||||||
google_books_volume_search: googleBooksVolumeSearchTool,
|
|
||||||
google_books_volume_details: googleBooksVolumeDetailsTool,
|
|
||||||
google_maps_air_quality: googleMapsAirQualityTool,
|
google_maps_air_quality: googleMapsAirQualityTool,
|
||||||
google_maps_directions: googleMapsDirectionsTool,
|
google_maps_directions: googleMapsDirectionsTool,
|
||||||
google_maps_distance_matrix: googleMapsDistanceMatrixTool,
|
google_maps_distance_matrix: googleMapsDistanceMatrixTool,
|
||||||
|
|||||||
@@ -26,13 +26,6 @@ export const s3GetObjectTool: ToolConfig = {
|
|||||||
visibility: 'user-only',
|
visibility: 'user-only',
|
||||||
description: 'Your AWS Secret Access Key',
|
description: 'Your AWS Secret Access Key',
|
||||||
},
|
},
|
||||||
region: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Optional region override when URL does not include region (e.g., us-east-1, eu-west-1)',
|
|
||||||
},
|
|
||||||
s3Uri: {
|
s3Uri: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
required: true,
|
required: true,
|
||||||
@@ -44,7 +37,7 @@ export const s3GetObjectTool: ToolConfig = {
|
|||||||
request: {
|
request: {
|
||||||
url: (params) => {
|
url: (params) => {
|
||||||
try {
|
try {
|
||||||
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri, params.region)
|
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri)
|
||||||
|
|
||||||
params.bucketName = bucketName
|
params.bucketName = bucketName
|
||||||
params.region = region
|
params.region = region
|
||||||
@@ -53,7 +46,7 @@ export const s3GetObjectTool: ToolConfig = {
|
|||||||
return `https://${bucketName}.s3.${region}.amazonaws.com/${encodeS3PathComponent(objectKey)}`
|
return `https://${bucketName}.s3.${region}.amazonaws.com/${encodeS3PathComponent(objectKey)}`
|
||||||
} catch (_error) {
|
} catch (_error) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Invalid S3 Object URL. Use a valid S3 URL and optionally provide region if the URL omits it.'
|
'Invalid S3 Object URL format. Expected format: https://bucket-name.s3.region.amazonaws.com/path/to/file'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -62,7 +55,7 @@ export const s3GetObjectTool: ToolConfig = {
|
|||||||
try {
|
try {
|
||||||
// Parse S3 URI if not already parsed
|
// Parse S3 URI if not already parsed
|
||||||
if (!params.bucketName || !params.region || !params.objectKey) {
|
if (!params.bucketName || !params.region || !params.objectKey) {
|
||||||
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri, params.region)
|
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri)
|
||||||
params.bucketName = bucketName
|
params.bucketName = bucketName
|
||||||
params.region = region
|
params.region = region
|
||||||
params.objectKey = objectKey
|
params.objectKey = objectKey
|
||||||
@@ -109,7 +102,7 @@ export const s3GetObjectTool: ToolConfig = {
|
|||||||
transformResponse: async (response: Response, params) => {
|
transformResponse: async (response: Response, params) => {
|
||||||
// Parse S3 URI if not already parsed
|
// Parse S3 URI if not already parsed
|
||||||
if (!params.bucketName || !params.region || !params.objectKey) {
|
if (!params.bucketName || !params.region || !params.objectKey) {
|
||||||
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri, params.region)
|
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri)
|
||||||
params.bucketName = bucketName
|
params.bucketName = bucketName
|
||||||
params.region = region
|
params.region = region
|
||||||
params.objectKey = objectKey
|
params.objectKey = objectKey
|
||||||
|
|||||||
@@ -20,10 +20,7 @@ export function getSignatureKey(
|
|||||||
return kSigning
|
return kSigning
|
||||||
}
|
}
|
||||||
|
|
||||||
export function parseS3Uri(
|
export function parseS3Uri(s3Uri: string): {
|
||||||
s3Uri: string,
|
|
||||||
fallbackRegion?: string
|
|
||||||
): {
|
|
||||||
bucketName: string
|
bucketName: string
|
||||||
region: string
|
region: string
|
||||||
objectKey: string
|
objectKey: string
|
||||||
@@ -31,55 +28,10 @@ export function parseS3Uri(
|
|||||||
try {
|
try {
|
||||||
const url = new URL(s3Uri)
|
const url = new URL(s3Uri)
|
||||||
const hostname = url.hostname
|
const hostname = url.hostname
|
||||||
const normalizedPath = url.pathname.startsWith('/') ? url.pathname.slice(1) : url.pathname
|
const bucketName = hostname.split('.')[0]
|
||||||
|
const regionMatch = hostname.match(/s3[.-]([^.]+)\.amazonaws\.com/)
|
||||||
const virtualHostedDualstackMatch = hostname.match(
|
const region = regionMatch ? regionMatch[1] : 'us-east-1'
|
||||||
/^(.+)\.s3\.dualstack\.([^.]+)\.amazonaws\.com(?:\.cn)?$/
|
const objectKey = url.pathname.startsWith('/') ? url.pathname.substring(1) : url.pathname
|
||||||
)
|
|
||||||
const virtualHostedRegionalMatch = hostname.match(
|
|
||||||
/^(.+)\.s3[.-]([^.]+)\.amazonaws\.com(?:\.cn)?$/
|
|
||||||
)
|
|
||||||
const virtualHostedGlobalMatch = hostname.match(/^(.+)\.s3\.amazonaws\.com(?:\.cn)?$/)
|
|
||||||
|
|
||||||
const pathStyleDualstackMatch = hostname.match(
|
|
||||||
/^s3\.dualstack\.([^.]+)\.amazonaws\.com(?:\.cn)?$/
|
|
||||||
)
|
|
||||||
const pathStyleRegionalMatch = hostname.match(/^s3[.-]([^.]+)\.amazonaws\.com(?:\.cn)?$/)
|
|
||||||
const pathStyleGlobalMatch = hostname.match(/^s3\.amazonaws\.com(?:\.cn)?$/)
|
|
||||||
|
|
||||||
const isPathStyleHost = Boolean(
|
|
||||||
pathStyleDualstackMatch || pathStyleRegionalMatch || pathStyleGlobalMatch
|
|
||||||
)
|
|
||||||
|
|
||||||
const firstSlashIndex = normalizedPath.indexOf('/')
|
|
||||||
const pathStyleBucketName =
|
|
||||||
firstSlashIndex === -1 ? normalizedPath : normalizedPath.slice(0, firstSlashIndex)
|
|
||||||
const pathStyleObjectKey =
|
|
||||||
firstSlashIndex === -1 ? '' : normalizedPath.slice(firstSlashIndex + 1)
|
|
||||||
|
|
||||||
const bucketName = isPathStyleHost
|
|
||||||
? pathStyleBucketName
|
|
||||||
: (virtualHostedDualstackMatch?.[1] ??
|
|
||||||
virtualHostedRegionalMatch?.[1] ??
|
|
||||||
virtualHostedGlobalMatch?.[1] ??
|
|
||||||
'')
|
|
||||||
|
|
||||||
const rawObjectKey = isPathStyleHost ? pathStyleObjectKey : normalizedPath
|
|
||||||
const objectKey = (() => {
|
|
||||||
try {
|
|
||||||
return decodeURIComponent(rawObjectKey)
|
|
||||||
} catch {
|
|
||||||
return rawObjectKey
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
|
|
||||||
const normalizedFallbackRegion = fallbackRegion?.trim()
|
|
||||||
const regionFromHost =
|
|
||||||
virtualHostedDualstackMatch?.[2] ??
|
|
||||||
virtualHostedRegionalMatch?.[2] ??
|
|
||||||
pathStyleDualstackMatch?.[1] ??
|
|
||||||
pathStyleRegionalMatch?.[1]
|
|
||||||
const region = regionFromHost || normalizedFallbackRegion || 'us-east-1'
|
|
||||||
|
|
||||||
if (!bucketName || !objectKey) {
|
if (!bucketName || !objectKey) {
|
||||||
throw new Error('Invalid S3 URI format')
|
throw new Error('Invalid S3 URI format')
|
||||||
@@ -88,7 +40,7 @@ export function parseS3Uri(
|
|||||||
return { bucketName, region, objectKey }
|
return { bucketName, region, objectKey }
|
||||||
} catch (_error) {
|
} catch (_error) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Invalid S3 Object URL format. Expected S3 virtual-hosted or path-style URL with object key.'
|
'Invalid S3 Object URL format. Expected format: https://bucket-name.s3.region.amazonaws.com/path/to/file'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ export const sftpUploadTool: ToolConfig<SftpUploadParams, SftpUploadResult> = {
|
|||||||
files: {
|
files: {
|
||||||
type: 'file[]',
|
type: 'file[]',
|
||||||
required: false,
|
required: false,
|
||||||
visibility: 'user-only',
|
visibility: 'hidden',
|
||||||
description: 'Files to upload',
|
description: 'Files to upload',
|
||||||
},
|
},
|
||||||
fileContent: {
|
fileContent: {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { AGENT, isCustomTool } from '@/executor/constants'
|
import { AGENT, isCustomTool } from '@/executor/constants'
|
||||||
import { getCustomTool } from '@/hooks/queries/custom-tools'
|
import { getCustomTool } from '@/hooks/queries/custom-tools'
|
||||||
import { useEnvironmentStore } from '@/stores/settings/environment'
|
import { useEnvironmentStore } from '@/stores/settings/environment'
|
||||||
@@ -373,7 +373,7 @@ async function fetchCustomToolFromAPI(
|
|||||||
const identifier = customToolId.replace('custom_', '')
|
const identifier = customToolId.replace('custom_', '')
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const baseUrl = getInternalApiBaseUrl()
|
const baseUrl = getBaseUrl()
|
||||||
const url = new URL('/api/tools/custom', baseUrl)
|
const url = new URL('/api/tools/custom', baseUrl)
|
||||||
|
|
||||||
if (workflowId) {
|
if (workflowId) {
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ export const visionToolV2: ToolConfig<VisionV2Params, VisionResponse> = {
|
|||||||
imageFile: {
|
imageFile: {
|
||||||
type: 'file',
|
type: 'file',
|
||||||
required: true,
|
required: true,
|
||||||
visibility: 'user-only',
|
visibility: 'hidden',
|
||||||
description: 'Image file to analyze',
|
description: 'Image file to analyze',
|
||||||
},
|
},
|
||||||
model: visionTool.params.model,
|
model: visionTool.params.model,
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ export const uploadMediaTool: ToolConfig<WordPressUploadMediaParams, WordPressUp
|
|||||||
file: {
|
file: {
|
||||||
type: 'file',
|
type: 'file',
|
||||||
required: false,
|
required: false,
|
||||||
visibility: 'user-only',
|
visibility: 'hidden',
|
||||||
description: 'File to upload (UserFile object)',
|
description: 'File to upload (UserFile object)',
|
||||||
},
|
},
|
||||||
filename: {
|
filename: {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user