Compare commits

...

10 Commits

Author SHA1 Message Date
waleed
609a8a53b0 fix hasChanges logic 2026-01-19 16:39:10 -08:00
waleed
b46f760247 added new rate limit category, ack PR comments 2026-01-19 16:05:51 -08:00
waleed
a2c794a77e added ability to edit parameter and workflow descriptions 2026-01-19 15:45:30 -08:00
waleed
530a3292a3 feat(api): added workflows api route for dynamic discovery 2026-01-19 15:10:47 -08:00
Waleed
81cbfe7af4 feat(browseruse): upgraded browseruse endpoints to v2 (#2890) 2026-01-19 14:47:19 -08:00
Waleed
739341b08e improvement(router): add resizable textareas for router conditions (#2888) 2026-01-19 13:59:13 -08:00
Waleed
3c43779ba3 feat(search): added operations to search modal in main app, updated retrieval in docs to use RRF (#2889) 2026-01-19 13:57:56 -08:00
Waleed
1861f77283 feat(terminal): add fix in copilot for errors (#2885) 2026-01-19 13:42:34 -08:00
Vikhyath Mondreti
72c2ba7443 fix(linear): team selector in tool input (#2886) 2026-01-19 12:40:45 -08:00
Waleed
037dad6975 fix(undo-redo): preserve subblock values during undo/redo cycles (#2884)
* fix(undo-redo): preserve subblock values during undo/redo cycles

* added tests
2026-01-19 12:19:51 -08:00
31 changed files with 1676 additions and 238 deletions

View File

@@ -86,27 +86,112 @@ export async function GET(request: NextRequest) {
) )
.limit(candidateLimit) .limit(candidateLimit)
const seenIds = new Set<string>() const knownLocales = ['en', 'es', 'fr', 'de', 'ja', 'zh']
const mergedResults = []
for (let i = 0; i < Math.max(vectorResults.length, keywordResults.length); i++) { const vectorRankMap = new Map<string, number>()
if (i < vectorResults.length && !seenIds.has(vectorResults[i].chunkId)) { vectorResults.forEach((r, idx) => vectorRankMap.set(r.chunkId, idx + 1))
mergedResults.push(vectorResults[i])
seenIds.add(vectorResults[i].chunkId) const keywordRankMap = new Map<string, number>()
} keywordResults.forEach((r, idx) => keywordRankMap.set(r.chunkId, idx + 1))
if (i < keywordResults.length && !seenIds.has(keywordResults[i].chunkId)) {
mergedResults.push(keywordResults[i]) const allChunkIds = new Set([
seenIds.add(keywordResults[i].chunkId) ...vectorResults.map((r) => r.chunkId),
...keywordResults.map((r) => r.chunkId),
])
const k = 60
type ResultWithRRF = (typeof vectorResults)[0] & { rrfScore: number }
const scoredResults: ResultWithRRF[] = []
for (const chunkId of allChunkIds) {
const vectorRank = vectorRankMap.get(chunkId) ?? Number.POSITIVE_INFINITY
const keywordRank = keywordRankMap.get(chunkId) ?? Number.POSITIVE_INFINITY
const rrfScore = 1 / (k + vectorRank) + 1 / (k + keywordRank)
const result =
vectorResults.find((r) => r.chunkId === chunkId) ||
keywordResults.find((r) => r.chunkId === chunkId)
if (result) {
scoredResults.push({ ...result, rrfScore })
} }
} }
const filteredResults = mergedResults.slice(0, limit) scoredResults.sort((a, b) => b.rrfScore - a.rrfScore)
const searchResults = filteredResults.map((result) => {
const localeFilteredResults = scoredResults.filter((result) => {
const firstPart = result.sourceDocument.split('/')[0]
if (knownLocales.includes(firstPart)) {
return firstPart === locale
}
return locale === 'en'
})
const queryLower = query.toLowerCase()
const getTitleBoost = (result: ResultWithRRF): number => {
const fileName = result.sourceDocument
.replace('.mdx', '')
.split('/')
.pop()
?.toLowerCase()
?.replace(/_/g, ' ')
if (fileName === queryLower) return 0.01
if (fileName?.includes(queryLower)) return 0.005
return 0
}
localeFilteredResults.sort((a, b) => {
return b.rrfScore + getTitleBoost(b) - (a.rrfScore + getTitleBoost(a))
})
const pageMap = new Map<string, ResultWithRRF>()
for (const result of localeFilteredResults) {
const pageKey = result.sourceDocument
const existing = pageMap.get(pageKey)
if (!existing || result.rrfScore > existing.rrfScore) {
pageMap.set(pageKey, result)
}
}
const deduplicatedResults = Array.from(pageMap.values())
.sort((a, b) => b.rrfScore + getTitleBoost(b) - (a.rrfScore + getTitleBoost(a)))
.slice(0, limit)
const searchResults = deduplicatedResults.map((result) => {
const title = result.headerText || result.sourceDocument.replace('.mdx', '') const title = result.headerText || result.sourceDocument.replace('.mdx', '')
const pathParts = result.sourceDocument const pathParts = result.sourceDocument
.replace('.mdx', '') .replace('.mdx', '')
.split('/') .split('/')
.map((part) => part.charAt(0).toUpperCase() + part.slice(1)) .filter((part) => part !== 'index' && !knownLocales.includes(part))
.map((part) => {
return part
.replace(/_/g, ' ')
.split(' ')
.map((word) => {
const acronyms = [
'api',
'mcp',
'sdk',
'url',
'http',
'json',
'xml',
'html',
'css',
'ai',
]
if (acronyms.includes(word.toLowerCase())) {
return word.toUpperCase()
}
return word.charAt(0).toUpperCase() + word.slice(1)
})
.join(' ')
})
return { return {
id: result.chunkId, id: result.chunkId,

View File

@@ -1739,12 +1739,12 @@ export function BrowserUseIcon(props: SVGProps<SVGSVGElement>) {
{...props} {...props}
version='1.0' version='1.0'
xmlns='http://www.w3.org/2000/svg' xmlns='http://www.w3.org/2000/svg'
width='150pt' width='28'
height='150pt' height='28'
viewBox='0 0 150 150' viewBox='0 0 150 150'
preserveAspectRatio='xMidYMid meet' preserveAspectRatio='xMidYMid meet'
> >
<g transform='translate(0,150) scale(0.05,-0.05)' fill='#000000' stroke='none'> <g transform='translate(0,150) scale(0.05,-0.05)' fill='currentColor' stroke='none'>
<path <path
d='M786 2713 c-184 -61 -353 -217 -439 -405 -76 -165 -65 -539 19 -666 d='M786 2713 c-184 -61 -353 -217 -439 -405 -76 -165 -65 -539 19 -666
l57 -85 -48 -124 c-203 -517 -79 -930 346 -1155 159 -85 441 -71 585 28 l111 l57 -85 -48 -124 c-203 -517 -79 -930 346 -1155 159 -85 441 -71 585 28 l111

View File

@@ -7,7 +7,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard <BlockInfoCard
type="browser_use" type="browser_use"
color="#E0E0E0" color="#181C1E"
/> />
{/* MANUAL-CONTENT-START:intro */} {/* MANUAL-CONTENT-START:intro */}

View File

@@ -52,6 +52,15 @@ Read content from a Google Slides presentation
| --------- | ---- | ----------- | | --------- | ---- | ----------- |
| `slides` | json | Array of slides with their content | | `slides` | json | Array of slides with their content |
| `metadata` | json | Presentation metadata including ID, title, and URL | | `metadata` | json | Presentation metadata including ID, title, and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `title` | string | The presentation title |
| ↳ `pageSize` | object | Presentation page size |
| ↳ `width` | json | Page width as a Dimension object |
| ↳ `height` | json | Page height as a Dimension object |
| ↳ `width` | json | Page width as a Dimension object |
| ↳ `height` | json | Page height as a Dimension object |
| ↳ `mimeType` | string | The mime type of the presentation |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_write` ### `google_slides_write`
@@ -71,6 +80,10 @@ Write or update content in a Google Slides presentation
| --------- | ---- | ----------- | | --------- | ---- | ----------- |
| `updatedContent` | boolean | Indicates if presentation content was updated successfully | | `updatedContent` | boolean | Indicates if presentation content was updated successfully |
| `metadata` | json | Updated presentation metadata including ID, title, and URL | | `metadata` | json | Updated presentation metadata including ID, title, and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `title` | string | The presentation title |
| ↳ `mimeType` | string | The mime type of the presentation |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_create` ### `google_slides_create`
@@ -90,6 +103,10 @@ Create a new Google Slides presentation
| Parameter | Type | Description | | Parameter | Type | Description |
| --------- | ---- | ----------- | | --------- | ---- | ----------- |
| `metadata` | json | Created presentation metadata including ID, title, and URL | | `metadata` | json | Created presentation metadata including ID, title, and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `title` | string | The presentation title |
| ↳ `mimeType` | string | The mime type of the presentation |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_replace_all_text` ### `google_slides_replace_all_text`
@@ -111,6 +128,10 @@ Find and replace all occurrences of text throughout a Google Slides presentation
| --------- | ---- | ----------- | | --------- | ---- | ----------- |
| `occurrencesChanged` | number | Number of text occurrences that were replaced | | `occurrencesChanged` | number | Number of text occurrences that were replaced |
| `metadata` | json | Operation metadata including presentation ID and URL | | `metadata` | json | Operation metadata including presentation ID and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `findText` | string | The text that was searched for |
| ↳ `replaceText` | string | The text that replaced the matches |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_add_slide` ### `google_slides_add_slide`
@@ -131,6 +152,10 @@ Add a new slide to a Google Slides presentation with a specified layout
| --------- | ---- | ----------- | | --------- | ---- | ----------- |
| `slideId` | string | The object ID of the newly created slide | | `slideId` | string | The object ID of the newly created slide |
| `metadata` | json | Operation metadata including presentation ID, layout, and URL | | `metadata` | json | Operation metadata including presentation ID, layout, and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `layout` | string | The layout used for the new slide |
| ↳ `insertionIndex` | number | The zero-based index where the slide was inserted |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_add_image` ### `google_slides_add_image`
@@ -154,6 +179,10 @@ Insert an image into a specific slide in a Google Slides presentation
| --------- | ---- | ----------- | | --------- | ---- | ----------- |
| `imageId` | string | The object ID of the newly created image | | `imageId` | string | The object ID of the newly created image |
| `metadata` | json | Operation metadata including presentation ID and image URL | | `metadata` | json | Operation metadata including presentation ID and image URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `pageObjectId` | string | The page object ID where the image was inserted |
| ↳ `imageUrl` | string | The source image URL |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_get_thumbnail` ### `google_slides_get_thumbnail`
@@ -176,6 +205,10 @@ Generate a thumbnail image of a specific slide in a Google Slides presentation
| `width` | number | Width of the thumbnail in pixels | | `width` | number | Width of the thumbnail in pixels |
| `height` | number | Height of the thumbnail in pixels | | `height` | number | Height of the thumbnail in pixels |
| `metadata` | json | Operation metadata including presentation ID and page object ID | | `metadata` | json | Operation metadata including presentation ID and page object ID |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `pageObjectId` | string | The page object ID for the thumbnail |
| ↳ `thumbnailSize` | string | The requested thumbnail size |
| ↳ `mimeType` | string | The thumbnail MIME type |
### `google_slides_get_page` ### `google_slides_get_page`

View File

@@ -19,7 +19,7 @@ export interface RateLimitResult {
export async function checkRateLimit( export async function checkRateLimit(
request: NextRequest, request: NextRequest,
endpoint: 'logs' | 'logs-detail' = 'logs' endpoint: 'logs' | 'logs-detail' | 'workflows' | 'workflow-detail' = 'logs'
): Promise<RateLimitResult> { ): Promise<RateLimitResult> {
try { try {
const auth = await authenticateV1Request(request) const auth = await authenticateV1Request(request)

View File

@@ -0,0 +1,102 @@
import { db } from '@sim/db'
import { permissions, workflow, workflowBlocks } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
const logger = createLogger('V1WorkflowDetailsAPI')
export const revalidate = 0
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const rateLimit = await checkRateLimit(request, 'workflow-detail')
if (!rateLimit.allowed) {
return createRateLimitResponse(rateLimit)
}
const userId = rateLimit.userId!
const { id } = await params
logger.info(`[${requestId}] Fetching workflow details for ${id}`, { userId })
const rows = await db
.select({
id: workflow.id,
name: workflow.name,
description: workflow.description,
color: workflow.color,
folderId: workflow.folderId,
workspaceId: workflow.workspaceId,
isDeployed: workflow.isDeployed,
deployedAt: workflow.deployedAt,
runCount: workflow.runCount,
lastRunAt: workflow.lastRunAt,
variables: workflow.variables,
createdAt: workflow.createdAt,
updatedAt: workflow.updatedAt,
})
.from(workflow)
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflow.workspaceId),
eq(permissions.userId, userId)
)
)
.where(eq(workflow.id, id))
.limit(1)
const workflowData = rows[0]
if (!workflowData) {
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
}
const blockRows = await db
.select({
id: workflowBlocks.id,
type: workflowBlocks.type,
subBlocks: workflowBlocks.subBlocks,
})
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, id))
const blocksRecord = Object.fromEntries(
blockRows.map((block) => [block.id, { type: block.type, subBlocks: block.subBlocks }])
)
const inputs = extractInputFieldsFromBlocks(blocksRecord)
const response = {
id: workflowData.id,
name: workflowData.name,
description: workflowData.description,
color: workflowData.color,
folderId: workflowData.folderId,
workspaceId: workflowData.workspaceId,
isDeployed: workflowData.isDeployed,
deployedAt: workflowData.deployedAt?.toISOString() || null,
runCount: workflowData.runCount,
lastRunAt: workflowData.lastRunAt?.toISOString() || null,
variables: workflowData.variables || {},
inputs,
createdAt: workflowData.createdAt.toISOString(),
updatedAt: workflowData.updatedAt.toISOString(),
}
const limits = await getUserLimits(userId)
const apiResponse = createApiResponse({ data: response }, limits, rateLimit)
return NextResponse.json(apiResponse.body, { headers: apiResponse.headers })
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 'Unknown error'
logger.error(`[${requestId}] Workflow details fetch error`, { error: message })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,184 @@
import { db } from '@sim/db'
import { permissions, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, asc, eq, gt, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
const logger = createLogger('V1WorkflowsAPI')
export const dynamic = 'force-dynamic'
export const revalidate = 0
const QueryParamsSchema = z.object({
workspaceId: z.string(),
folderId: z.string().optional(),
deployedOnly: z.coerce.boolean().optional().default(false),
limit: z.coerce.number().min(1).max(100).optional().default(50),
cursor: z.string().optional(),
})
interface CursorData {
sortOrder: number
createdAt: string
id: string
}
function encodeCursor(data: CursorData): string {
return Buffer.from(JSON.stringify(data)).toString('base64')
}
function decodeCursor(cursor: string): CursorData | null {
try {
return JSON.parse(Buffer.from(cursor, 'base64').toString())
} catch {
return null
}
}
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const rateLimit = await checkRateLimit(request, 'workflows')
if (!rateLimit.allowed) {
return createRateLimitResponse(rateLimit)
}
const userId = rateLimit.userId!
const { searchParams } = new URL(request.url)
const rawParams = Object.fromEntries(searchParams.entries())
const validationResult = QueryParamsSchema.safeParse(rawParams)
if (!validationResult.success) {
return NextResponse.json(
{ error: 'Invalid parameters', details: validationResult.error.errors },
{ status: 400 }
)
}
const params = validationResult.data
logger.info(`[${requestId}] Fetching workflows for workspace ${params.workspaceId}`, {
userId,
filters: {
folderId: params.folderId,
deployedOnly: params.deployedOnly,
},
})
const conditions = [
eq(workflow.workspaceId, params.workspaceId),
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, params.workspaceId),
eq(permissions.userId, userId),
]
if (params.folderId) {
conditions.push(eq(workflow.folderId, params.folderId))
}
if (params.deployedOnly) {
conditions.push(eq(workflow.isDeployed, true))
}
if (params.cursor) {
const cursorData = decodeCursor(params.cursor)
if (cursorData) {
const cursorCondition = or(
gt(workflow.sortOrder, cursorData.sortOrder),
and(
eq(workflow.sortOrder, cursorData.sortOrder),
gt(workflow.createdAt, new Date(cursorData.createdAt))
),
and(
eq(workflow.sortOrder, cursorData.sortOrder),
eq(workflow.createdAt, new Date(cursorData.createdAt)),
gt(workflow.id, cursorData.id)
)
)
if (cursorCondition) {
conditions.push(cursorCondition)
}
}
}
const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)]
const rows = await db
.select({
id: workflow.id,
name: workflow.name,
description: workflow.description,
color: workflow.color,
folderId: workflow.folderId,
workspaceId: workflow.workspaceId,
isDeployed: workflow.isDeployed,
deployedAt: workflow.deployedAt,
runCount: workflow.runCount,
lastRunAt: workflow.lastRunAt,
sortOrder: workflow.sortOrder,
createdAt: workflow.createdAt,
updatedAt: workflow.updatedAt,
})
.from(workflow)
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, params.workspaceId),
eq(permissions.userId, userId)
)
)
.where(and(...conditions))
.orderBy(...orderByClause)
.limit(params.limit + 1)
const hasMore = rows.length > params.limit
const data = rows.slice(0, params.limit)
let nextCursor: string | undefined
if (hasMore && data.length > 0) {
const lastWorkflow = data[data.length - 1]
nextCursor = encodeCursor({
sortOrder: lastWorkflow.sortOrder,
createdAt: lastWorkflow.createdAt.toISOString(),
id: lastWorkflow.id,
})
}
const formattedWorkflows = data.map((w) => ({
id: w.id,
name: w.name,
description: w.description,
color: w.color,
folderId: w.folderId,
workspaceId: w.workspaceId,
isDeployed: w.isDeployed,
deployedAt: w.deployedAt?.toISOString() || null,
runCount: w.runCount,
lastRunAt: w.lastRunAt?.toISOString() || null,
createdAt: w.createdAt.toISOString(),
updatedAt: w.updatedAt.toISOString(),
}))
const limits = await getUserLimits(userId)
const response = createApiResponse(
{
data: formattedWorkflows,
nextCursor,
},
limits,
rateLimit
)
return NextResponse.json(response.body, { headers: response.headers })
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 'Unknown error'
logger.error(`[${requestId}] Workflows fetch error`, { error: message })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -452,39 +452,6 @@ console.log(limits);`
</div> </div>
)} )}
{/* <div>
<div className='mb-[6.5px] flex items-center justify-between'>
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
URL
</Label>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={() => handleCopy('endpoint', info.endpoint)}
aria-label='Copy endpoint'
className='!p-1.5 -my-1.5'
>
{copied.endpoint ? (
<Check className='h-3 w-3' />
) : (
<Clipboard className='h-3 w-3' />
)}
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
<span>{copied.endpoint ? 'Copied' : 'Copy'}</span>
</Tooltip.Content>
</Tooltip.Root>
</div>
<Code.Viewer
code={info.endpoint}
language='javascript'
wrapText
className='!min-h-0 rounded-[4px] border border-[var(--border-1)]'
/>
</div> */}
<div> <div>
<div className='mb-[6.5px] flex items-center justify-between'> <div className='mb-[6.5px] flex items-center justify-between'>
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'> <Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>

View File

@@ -0,0 +1,260 @@
'use client'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import {
Badge,
Button,
Input,
Label,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Textarea,
} from '@/components/emcn'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
import type { InputFormatField } from '@/lib/workflows/types'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
type NormalizedField = InputFormatField & { name: string }
interface ApiInfoModalProps {
open: boolean
onOpenChange: (open: boolean) => void
workflowId: string
}
export function ApiInfoModal({ open, onOpenChange, workflowId }: ApiInfoModalProps) {
const blocks = useWorkflowStore((state) => state.blocks)
const setValue = useSubBlockStore((state) => state.setValue)
const subBlockValues = useSubBlockStore((state) =>
workflowId ? (state.workflowValues[workflowId] ?? {}) : {}
)
const workflowMetadata = useWorkflowRegistry((state) =>
workflowId ? state.workflows[workflowId] : undefined
)
const updateWorkflow = useWorkflowRegistry((state) => state.updateWorkflow)
const [description, setDescription] = useState('')
const [paramDescriptions, setParamDescriptions] = useState<Record<string, string>>({})
const [isSaving, setIsSaving] = useState(false)
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
const initialDescriptionRef = useRef('')
const initialParamDescriptionsRef = useRef<Record<string, string>>({})
const starterBlockId = useMemo(() => {
for (const [blockId, block] of Object.entries(blocks)) {
if (!block || typeof block !== 'object') continue
const blockType = (block as { type?: string }).type
if (blockType && isValidStartBlockType(blockType)) {
return blockId
}
}
return null
}, [blocks])
const inputFormat = useMemo((): NormalizedField[] => {
if (!starterBlockId) return []
const storeValue = subBlockValues[starterBlockId]?.inputFormat
const normalized = normalizeInputFormatValue(storeValue) as NormalizedField[]
if (normalized.length > 0) return normalized
const startBlock = blocks[starterBlockId]
const blockValue = startBlock?.subBlocks?.inputFormat?.value
return normalizeInputFormatValue(blockValue) as NormalizedField[]
}, [starterBlockId, subBlockValues, blocks])
useEffect(() => {
if (open) {
const normalizedDesc = workflowMetadata?.description?.toLowerCase().trim()
const isDefaultDescription =
!workflowMetadata?.description ||
workflowMetadata.description === workflowMetadata.name ||
normalizedDesc === 'new workflow' ||
normalizedDesc === 'your first workflow - start building here!'
const initialDescription = isDefaultDescription ? '' : workflowMetadata?.description || ''
setDescription(initialDescription)
initialDescriptionRef.current = initialDescription
const descriptions: Record<string, string> = {}
for (const field of inputFormat) {
if (field.description) {
descriptions[field.name] = field.description
}
}
setParamDescriptions(descriptions)
initialParamDescriptionsRef.current = { ...descriptions }
}
}, [open, workflowMetadata, inputFormat])
const hasChanges = useMemo(() => {
if (description !== initialDescriptionRef.current) return true
for (const field of inputFormat) {
const currentValue = (paramDescriptions[field.name] || '').trim()
const initialValue = (initialParamDescriptionsRef.current[field.name] || '').trim()
if (currentValue !== initialValue) return true
}
return false
}, [description, paramDescriptions, inputFormat])
const handleParamDescriptionChange = (fieldName: string, value: string) => {
setParamDescriptions((prev) => ({
...prev,
[fieldName]: value,
}))
}
const handleCloseAttempt = useCallback(() => {
if (hasChanges && !isSaving) {
setShowUnsavedChangesAlert(true)
} else {
onOpenChange(false)
}
}, [hasChanges, isSaving, onOpenChange])
const handleDiscardChanges = useCallback(() => {
setShowUnsavedChangesAlert(false)
setDescription(initialDescriptionRef.current)
setParamDescriptions({ ...initialParamDescriptionsRef.current })
onOpenChange(false)
}, [onOpenChange])
const handleSave = useCallback(async () => {
if (!workflowId) return
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
if (activeWorkflowId !== workflowId) {
return
}
setIsSaving(true)
try {
if (description.trim() !== (workflowMetadata?.description || '')) {
updateWorkflow(workflowId, { description: description.trim() || 'New workflow' })
}
if (starterBlockId) {
const updatedValue = inputFormat.map((field) => ({
...field,
description: paramDescriptions[field.name]?.trim() || undefined,
}))
setValue(starterBlockId, 'inputFormat', updatedValue)
}
onOpenChange(false)
} finally {
setIsSaving(false)
}
}, [
workflowId,
description,
workflowMetadata,
updateWorkflow,
starterBlockId,
inputFormat,
paramDescriptions,
setValue,
onOpenChange,
])
return (
<>
<Modal open={open} onOpenChange={(openState) => !openState && handleCloseAttempt()}>
<ModalContent className='max-w-[480px]'>
<ModalHeader>
<span>Edit API Info</span>
</ModalHeader>
<ModalBody className='space-y-[12px]'>
<div>
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
Description
</Label>
<Textarea
placeholder='Describe what this workflow API does...'
className='min-h-[80px] resize-none'
value={description}
onChange={(e) => setDescription(e.target.value)}
/>
</div>
{inputFormat.length > 0 && (
<div>
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
Parameters ({inputFormat.length})
</Label>
<div className='flex flex-col gap-[8px]'>
{inputFormat.map((field) => (
<div
key={field.name}
className='overflow-hidden rounded-[4px] border border-[var(--border-1)]'
>
<div className='flex items-center justify-between bg-[var(--surface-4)] px-[10px] py-[5px]'>
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
{field.name}
</span>
<Badge size='sm'>{field.type || 'string'}</Badge>
</div>
</div>
<div className='border-[var(--border-1)] border-t px-[10px] pt-[6px] pb-[10px]'>
<div className='flex flex-col gap-[6px]'>
<Label className='text-[13px]'>Description</Label>
<Input
value={paramDescriptions[field.name] || ''}
onChange={(e) =>
handleParamDescriptionChange(field.name, e.target.value)
}
placeholder={`Enter description for ${field.name}`}
/>
</div>
</div>
</div>
))}
</div>
</div>
)}
</ModalBody>
<ModalFooter>
<Button variant='default' onClick={handleCloseAttempt} disabled={isSaving}>
Cancel
</Button>
<Button variant='tertiary' onClick={handleSave} disabled={isSaving || !hasChanges}>
{isSaving ? 'Saving...' : 'Save'}
</Button>
</ModalFooter>
</ModalContent>
</Modal>
<Modal open={showUnsavedChangesAlert} onOpenChange={setShowUnsavedChangesAlert}>
<ModalContent className='max-w-[400px]'>
<ModalHeader>
<span>Unsaved Changes</span>
</ModalHeader>
<ModalBody>
<p className='text-[14px] text-[var(--text-secondary)]'>
You have unsaved changes. Are you sure you want to discard them?
</p>
</ModalBody>
<ModalFooter>
<Button variant='default' onClick={() => setShowUnsavedChangesAlert(false)}>
Keep Editing
</Button>
<Button variant='destructive' onClick={handleDiscardChanges}>
Discard Changes
</Button>
</ModalFooter>
</ModalContent>
</Modal>
</>
)
}

View File

@@ -43,6 +43,7 @@ import type { WorkflowState } from '@/stores/workflows/workflow/types'
import { A2aDeploy } from './components/a2a/a2a' import { A2aDeploy } from './components/a2a/a2a'
import { ApiDeploy } from './components/api/api' import { ApiDeploy } from './components/api/api'
import { ChatDeploy, type ExistingChat } from './components/chat/chat' import { ChatDeploy, type ExistingChat } from './components/chat/chat'
import { ApiInfoModal } from './components/general/components/api-info-modal'
import { GeneralDeploy } from './components/general/general' import { GeneralDeploy } from './components/general/general'
import { McpDeploy } from './components/mcp/mcp' import { McpDeploy } from './components/mcp/mcp'
import { TemplateDeploy } from './components/template/template' import { TemplateDeploy } from './components/template/template'
@@ -110,6 +111,7 @@ export function DeployModal({
const [chatSuccess, setChatSuccess] = useState(false) const [chatSuccess, setChatSuccess] = useState(false)
const [isCreateKeyModalOpen, setIsCreateKeyModalOpen] = useState(false) const [isCreateKeyModalOpen, setIsCreateKeyModalOpen] = useState(false)
const [isApiInfoModalOpen, setIsApiInfoModalOpen] = useState(false)
const userPermissions = useUserPermissionsContext() const userPermissions = useUserPermissionsContext()
const canManageWorkspaceKeys = userPermissions.canAdmin const canManageWorkspaceKeys = userPermissions.canAdmin
const { config: permissionConfig } = usePermissionConfig() const { config: permissionConfig } = usePermissionConfig()
@@ -389,11 +391,6 @@ export function DeployModal({
form?.requestSubmit() form?.requestSubmit()
}, []) }, [])
const handleA2aFormSubmit = useCallback(() => {
const form = document.getElementById('a2a-deploy-form') as HTMLFormElement
form?.requestSubmit()
}, [])
const handleA2aPublish = useCallback(() => { const handleA2aPublish = useCallback(() => {
const form = document.getElementById('a2a-deploy-form') const form = document.getElementById('a2a-deploy-form')
const publishTrigger = form?.querySelector('[data-a2a-publish-trigger]') as HTMLButtonElement const publishTrigger = form?.querySelector('[data-a2a-publish-trigger]') as HTMLButtonElement
@@ -594,7 +591,11 @@ export function DeployModal({
)} )}
{activeTab === 'api' && ( {activeTab === 'api' && (
<ModalFooter className='items-center justify-between'> <ModalFooter className='items-center justify-between'>
<div /> <div>
<Button variant='default' onClick={() => setIsApiInfoModalOpen(true)}>
Edit API Info
</Button>
</div>
<div className='flex items-center gap-2'> <div className='flex items-center gap-2'>
<Button <Button
variant='tertiary' variant='tertiary'
@@ -880,6 +881,14 @@ export function DeployModal({
canManageWorkspaceKeys={canManageWorkspaceKeys} canManageWorkspaceKeys={canManageWorkspaceKeys}
defaultKeyType={defaultKeyType} defaultKeyType={defaultKeyType}
/> />
{workflowId && (
<ApiInfoModal
open={isApiInfoModalOpen}
onOpenChange={setIsApiInfoModalOpen}
workflowId={workflowId}
/>
)}
</> </>
) )
} }

View File

@@ -1,7 +1,7 @@
import type { ReactElement } from 'react' import type { ReactElement } from 'react'
import { useEffect, useRef, useState } from 'react' import { useEffect, useRef, useState } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { ChevronDown, ChevronUp, Plus } from 'lucide-react' import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react'
import { useParams } from 'next/navigation' import { useParams } from 'next/navigation'
import Editor from 'react-simple-code-editor' import Editor from 'react-simple-code-editor'
import { useUpdateNodeInternals } from 'reactflow' import { useUpdateNodeInternals } from 'reactflow'
@@ -39,6 +39,16 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('ConditionInput') const logger = createLogger('ConditionInput')
/**
* Default height for router textareas in pixels
*/
const ROUTER_DEFAULT_HEIGHT_PX = 100
/**
* Minimum height for router textareas in pixels
*/
const ROUTER_MIN_HEIGHT_PX = 80
/** /**
* Represents a single conditional block (if/else if/else). * Represents a single conditional block (if/else if/else).
*/ */
@@ -743,6 +753,61 @@ export function ConditionInput({
} }
}, [conditionalBlocks, isRouterMode]) }, [conditionalBlocks, isRouterMode])
// State for tracking individual router textarea heights
const [routerHeights, setRouterHeights] = useState<{ [key: string]: number }>({})
const isResizing = useRef(false)
/**
* Gets the height for a specific router block, returning default if not set.
*
* @param blockId - ID of the router block
* @returns Height in pixels
*/
const getRouterHeight = (blockId: string): number => {
return routerHeights[blockId] ?? ROUTER_DEFAULT_HEIGHT_PX
}
/**
* Handles mouse-based resize for router textareas.
*
* @param e - Mouse event from the resize handle
* @param blockId - ID of the block being resized
*/
const startRouterResize = (e: React.MouseEvent, blockId: string) => {
if (isPreview || disabled) return
e.preventDefault()
e.stopPropagation()
isResizing.current = true
const startY = e.clientY
const startHeight = getRouterHeight(blockId)
const handleMouseMove = (moveEvent: MouseEvent) => {
if (!isResizing.current) return
const deltaY = moveEvent.clientY - startY
const newHeight = Math.max(ROUTER_MIN_HEIGHT_PX, startHeight + deltaY)
// Update the textarea height directly for smooth resizing
const textarea = inputRefs.current.get(blockId)
if (textarea) {
textarea.style.height = `${newHeight}px`
}
// Update state to keep track
setRouterHeights((prev) => ({ ...prev, [blockId]: newHeight }))
}
const handleMouseUp = () => {
isResizing.current = false
document.removeEventListener('mousemove', handleMouseMove)
document.removeEventListener('mouseup', handleMouseUp)
}
document.addEventListener('mousemove', handleMouseMove)
document.addEventListener('mouseup', handleMouseUp)
}
// Show loading or empty state if not ready or no blocks // Show loading or empty state if not ready or no blocks
if (!isReady || conditionalBlocks.length === 0) { if (!isReady || conditionalBlocks.length === 0) {
return ( return (
@@ -907,10 +972,24 @@ export function ConditionInput({
}} }}
placeholder='Describe when this route should be taken...' placeholder='Describe when this route should be taken...'
disabled={disabled || isPreview} disabled={disabled || isPreview}
className='min-h-[60px] resize-none rounded-none border-0 px-3 py-2 text-sm placeholder:text-muted-foreground/50 focus-visible:ring-0 focus-visible:ring-offset-0' className='min-h-[100px] resize-none rounded-none border-0 px-3 py-2 text-sm placeholder:text-muted-foreground/50 focus-visible:ring-0 focus-visible:ring-offset-0'
rows={2} rows={4}
style={{ height: `${getRouterHeight(block.id)}px` }}
/> />
{/* Custom resize handle */}
{!isPreview && !disabled && (
<div
className='absolute right-1 bottom-1 flex h-4 w-4 cursor-ns-resize items-center justify-center rounded-[4px] border border-[var(--border-1)] bg-[var(--surface-5)] dark:bg-[var(--surface-5)]'
onMouseDown={(e) => startRouterResize(e, block.id)}
onDragStart={(e) => {
e.preventDefault()
}}
>
<ChevronsUpDown className='h-3 w-3 text-[var(--text-muted)]' />
</div>
)}
{block.showEnvVars && ( {block.showEnvVars && (
<EnvVarDropdown <EnvVarDropdown
visible={block.showEnvVars} visible={block.showEnvVars}

View File

@@ -234,48 +234,45 @@ export function LongInput({
}, [value]) }, [value])
// Handle resize functionality // Handle resize functionality
const startResize = useCallback( const startResize = (e: React.MouseEvent) => {
(e: React.MouseEvent) => { e.preventDefault()
e.preventDefault() e.stopPropagation()
e.stopPropagation() isResizing.current = true
isResizing.current = true
const startY = e.clientY const startY = e.clientY
const startHeight = height const startHeight = height
const handleMouseMove = (moveEvent: MouseEvent) => { const handleMouseMove = (moveEvent: MouseEvent) => {
if (!isResizing.current) return if (!isResizing.current) return
const deltaY = moveEvent.clientY - startY const deltaY = moveEvent.clientY - startY
const newHeight = Math.max(MIN_HEIGHT_PX, startHeight + deltaY) const newHeight = Math.max(MIN_HEIGHT_PX, startHeight + deltaY)
if (textareaRef.current && overlayRef.current) { if (textareaRef.current && overlayRef.current) {
textareaRef.current.style.height = `${newHeight}px` textareaRef.current.style.height = `${newHeight}px`
overlayRef.current.style.height = `${newHeight}px` overlayRef.current.style.height = `${newHeight}px`
} }
if (containerRef.current) { if (containerRef.current) {
containerRef.current.style.height = `${newHeight}px` containerRef.current.style.height = `${newHeight}px`
} }
// Keep React state in sync so parent layouts (e.g., Editor) update during drag // Keep React state in sync so parent layouts (e.g., Editor) update during drag
setHeight(newHeight) setHeight(newHeight)
}
const handleMouseUp = () => {
if (textareaRef.current) {
const finalHeight = Number.parseInt(textareaRef.current.style.height, 10) || height
setHeight(finalHeight)
} }
const handleMouseUp = () => { isResizing.current = false
if (textareaRef.current) { document.removeEventListener('mousemove', handleMouseMove)
const finalHeight = Number.parseInt(textareaRef.current.style.height, 10) || height document.removeEventListener('mouseup', handleMouseUp)
setHeight(finalHeight) }
}
isResizing.current = false document.addEventListener('mousemove', handleMouseMove)
document.removeEventListener('mousemove', handleMouseMove) document.addEventListener('mouseup', handleMouseUp)
document.removeEventListener('mouseup', handleMouseUp) }
}
document.addEventListener('mousemove', handleMouseMove)
document.addEventListener('mouseup', handleMouseUp)
},
[height]
)
// Expose wand control handlers to parent via ref // Expose wand control handlers to parent via ref
useImperativeHandle( useImperativeHandle(

View File

@@ -2069,6 +2069,7 @@ export const ToolInput = memo(function ToolInput({
placeholder: uiComponent.placeholder, placeholder: uiComponent.placeholder,
requiredScopes: uiComponent.requiredScopes, requiredScopes: uiComponent.requiredScopes,
dependsOn: uiComponent.dependsOn, dependsOn: uiComponent.dependsOn,
canonicalParamId: uiComponent.canonicalParamId ?? param.id,
}} }}
onProjectSelect={onChange} onProjectSelect={onChange}
disabled={disabled} disabled={disabled}

View File

@@ -34,6 +34,7 @@ interface LogRowContextMenuProps {
onCopyRunId: (runId: string) => void onCopyRunId: (runId: string) => void
onClearFilters: () => void onClearFilters: () => void
onClearConsole: () => void onClearConsole: () => void
onFixInCopilot: (entry: ConsoleEntry) => void
hasActiveFilters: boolean hasActiveFilters: boolean
} }
@@ -54,6 +55,7 @@ export function LogRowContextMenu({
onCopyRunId, onCopyRunId,
onClearFilters, onClearFilters,
onClearConsole, onClearConsole,
onFixInCopilot,
hasActiveFilters, hasActiveFilters,
}: LogRowContextMenuProps) { }: LogRowContextMenuProps) {
const hasRunId = entry?.executionId != null const hasRunId = entry?.executionId != null
@@ -96,6 +98,21 @@ export function LogRowContextMenu({
</> </>
)} )}
{/* Fix in Copilot - only for error rows */}
{entry && !entry.success && (
<>
<PopoverItem
onClick={() => {
onFixInCopilot(entry)
onClose()
}}
>
Fix in Copilot
</PopoverItem>
<PopoverDivider />
</>
)}
{/* Filter actions */} {/* Filter actions */}
{entry && ( {entry && (
<> <>

View File

@@ -54,6 +54,7 @@ import { useShowTrainingControls } from '@/hooks/queries/general-settings'
import { useCodeViewerFeatures } from '@/hooks/use-code-viewer' import { useCodeViewerFeatures } from '@/hooks/use-code-viewer'
import { OUTPUT_PANEL_WIDTH, TERMINAL_HEIGHT } from '@/stores/constants' import { OUTPUT_PANEL_WIDTH, TERMINAL_HEIGHT } from '@/stores/constants'
import { useCopilotTrainingStore } from '@/stores/copilot-training/store' import { useCopilotTrainingStore } from '@/stores/copilot-training/store'
import { openCopilotWithMessage } from '@/stores/notifications/utils'
import type { ConsoleEntry } from '@/stores/terminal' import type { ConsoleEntry } from '@/stores/terminal'
import { useTerminalConsoleStore, useTerminalStore } from '@/stores/terminal' import { useTerminalConsoleStore, useTerminalStore } from '@/stores/terminal'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -226,7 +227,6 @@ const isEventFromEditableElement = (e: KeyboardEvent): boolean => {
return false return false
} }
// Check target and walk up ancestors in case editors render nested elements
let el: HTMLElement | null = target let el: HTMLElement | null = target
while (el) { while (el) {
if (isEditable(el)) return true if (isEditable(el)) return true
@@ -1159,6 +1159,17 @@ export const Terminal = memo(function Terminal() {
clearCurrentWorkflowConsole() clearCurrentWorkflowConsole()
}, [clearCurrentWorkflowConsole]) }, [clearCurrentWorkflowConsole])
const handleFixInCopilot = useCallback(
(entry: ConsoleEntry) => {
const errorMessage = entry.error ? String(entry.error) : 'Unknown error'
const blockName = entry.blockName || 'Unknown Block'
const message = `${errorMessage}\n\nError in ${blockName}.\n\nPlease fix this.`
openCopilotWithMessage(message)
closeLogRowMenu()
},
[closeLogRowMenu]
)
const handleTrainingClick = useCallback( const handleTrainingClick = useCallback(
(e: React.MouseEvent) => { (e: React.MouseEvent) => {
e.stopPropagation() e.stopPropagation()
@@ -1949,6 +1960,7 @@ export const Terminal = memo(function Terminal() {
closeLogRowMenu() closeLogRowMenu()
}} }}
onClearConsole={handleClearConsoleFromMenu} onClearConsole={handleClearConsoleFromMenu}
onFixInCopilot={handleFixInCopilot}
hasActiveFilters={hasActiveFilters} hasActiveFilters={hasActiveFilters}
/> />
</> </>

View File

@@ -692,7 +692,8 @@ const WorkflowContent = React.memo(() => {
parentId?: string, parentId?: string,
extent?: 'parent', extent?: 'parent',
autoConnectEdge?: Edge, autoConnectEdge?: Edge,
triggerMode?: boolean triggerMode?: boolean,
presetSubBlockValues?: Record<string, unknown>
) => { ) => {
setPendingSelection([id]) setPendingSelection([id])
setSelectedEdges(new Map()) setSelectedEdges(new Map())
@@ -722,6 +723,14 @@ const WorkflowContent = React.memo(() => {
} }
} }
// Apply preset subblock values (e.g., from tool-operation search)
if (presetSubBlockValues) {
if (!subBlockValues[id]) {
subBlockValues[id] = {}
}
Object.assign(subBlockValues[id], presetSubBlockValues)
}
collaborativeBatchAddBlocks( collaborativeBatchAddBlocks(
[block], [block],
autoConnectEdge ? [autoConnectEdge] : [], autoConnectEdge ? [autoConnectEdge] : [],
@@ -1489,7 +1498,7 @@ const WorkflowContent = React.memo(() => {
return return
} }
const { type, enableTriggerMode } = event.detail const { type, enableTriggerMode, presetOperation } = event.detail
if (!type) return if (!type) return
if (type === 'connectionBlock') return if (type === 'connectionBlock') return
@@ -1552,7 +1561,8 @@ const WorkflowContent = React.memo(() => {
undefined, undefined,
undefined, undefined,
autoConnectEdge, autoConnectEdge,
enableTriggerMode enableTriggerMode,
presetOperation ? { operation: presetOperation } : undefined
) )
} }

View File

@@ -8,6 +8,7 @@ import { useParams, useRouter } from 'next/navigation'
import { Dialog, DialogPortal, DialogTitle } from '@/components/ui/dialog' import { Dialog, DialogPortal, DialogTitle } from '@/components/ui/dialog'
import { useBrandConfig } from '@/lib/branding/branding' import { useBrandConfig } from '@/lib/branding/branding'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import { getToolOperationsIndex } from '@/lib/search/tool-operations'
import { getTriggersForSidebar, hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils' import { getTriggersForSidebar, hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
import { searchItems } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/search-modal/search-utils' import { searchItems } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/search-modal/search-utils'
import { SIDEBAR_SCROLL_EVENT } from '@/app/workspace/[workspaceId]/w/components/sidebar/sidebar' import { SIDEBAR_SCROLL_EVENT } from '@/app/workspace/[workspaceId]/w/components/sidebar/sidebar'
@@ -81,10 +82,12 @@ type SearchItem = {
color?: string color?: string
href?: string href?: string
shortcut?: string shortcut?: string
type: 'block' | 'trigger' | 'tool' | 'workflow' | 'workspace' | 'page' | 'doc' type: 'block' | 'trigger' | 'tool' | 'tool-operation' | 'workflow' | 'workspace' | 'page' | 'doc'
isCurrent?: boolean isCurrent?: boolean
blockType?: string blockType?: string
config?: any config?: any
operationId?: string
aliases?: string[]
} }
interface SearchResultItemProps { interface SearchResultItemProps {
@@ -101,7 +104,11 @@ const SearchResultItem = memo(function SearchResultItem({
onItemClick, onItemClick,
}: SearchResultItemProps) { }: SearchResultItemProps) {
const Icon = item.icon const Icon = item.icon
const showColoredIcon = item.type === 'block' || item.type === 'trigger' || item.type === 'tool' const showColoredIcon =
item.type === 'block' ||
item.type === 'trigger' ||
item.type === 'tool' ||
item.type === 'tool-operation'
const isWorkflow = item.type === 'workflow' const isWorkflow = item.type === 'workflow'
const isWorkspace = item.type === 'workspace' const isWorkspace = item.type === 'workspace'
@@ -278,6 +285,24 @@ export const SearchModal = memo(function SearchModal({
) )
}, [open, isOnWorkflowPage, filterBlocks]) }, [open, isOnWorkflowPage, filterBlocks])
const toolOperations = useMemo(() => {
if (!open || !isOnWorkflowPage) return []
const allowedBlockTypes = new Set(tools.map((t) => t.type))
return getToolOperationsIndex()
.filter((op) => allowedBlockTypes.has(op.blockType))
.map((op) => ({
id: op.id,
name: `${op.serviceName}: ${op.operationName}`,
icon: op.icon,
bgColor: op.bgColor,
blockType: op.blockType,
operationId: op.operationId,
aliases: op.aliases,
}))
}, [open, isOnWorkflowPage, tools])
const pages = useMemo( const pages = useMemo(
(): PageItem[] => [ (): PageItem[] => [
{ {
@@ -396,6 +421,19 @@ export const SearchModal = memo(function SearchModal({
}) })
}) })
toolOperations.forEach((op) => {
items.push({
id: op.id,
name: op.name,
icon: op.icon,
bgColor: op.bgColor,
type: 'tool-operation',
blockType: op.blockType,
operationId: op.operationId,
aliases: op.aliases,
})
})
docs.forEach((doc) => { docs.forEach((doc) => {
items.push({ items.push({
id: doc.id, id: doc.id,
@@ -407,10 +445,10 @@ export const SearchModal = memo(function SearchModal({
}) })
return items return items
}, [workspaces, workflows, pages, blocks, triggers, tools, docs]) }, [workspaces, workflows, pages, blocks, triggers, tools, toolOperations, docs])
const sectionOrder = useMemo<SearchItem['type'][]>( const sectionOrder = useMemo<SearchItem['type'][]>(
() => ['block', 'tool', 'trigger', 'workflow', 'workspace', 'page', 'doc'], () => ['block', 'tool', 'tool-operation', 'trigger', 'workflow', 'workspace', 'page', 'doc'],
[] []
) )
@@ -457,6 +495,7 @@ export const SearchModal = memo(function SearchModal({
page: [], page: [],
trigger: [], trigger: [],
block: [], block: [],
'tool-operation': [],
tool: [], tool: [],
doc: [], doc: [],
} }
@@ -512,6 +551,17 @@ export const SearchModal = memo(function SearchModal({
window.dispatchEvent(event) window.dispatchEvent(event)
} }
break break
case 'tool-operation':
if (item.blockType && item.operationId) {
const event = new CustomEvent('add-block-from-toolbar', {
detail: {
type: item.blockType,
presetOperation: item.operationId,
},
})
window.dispatchEvent(event)
}
break
case 'workspace': case 'workspace':
if (item.isCurrent) { if (item.isCurrent) {
break break
@@ -592,6 +642,7 @@ export const SearchModal = memo(function SearchModal({
page: 'Pages', page: 'Pages',
trigger: 'Triggers', trigger: 'Triggers',
block: 'Blocks', block: 'Blocks',
'tool-operation': 'Tool Operations',
tool: 'Tools', tool: 'Tools',
doc: 'Docs', doc: 'Docs',
} }

View File

@@ -8,17 +8,19 @@ export interface SearchableItem {
name: string name: string
description?: string description?: string
type: string type: string
aliases?: string[]
[key: string]: any [key: string]: any
} }
export interface SearchResult<T extends SearchableItem> { export interface SearchResult<T extends SearchableItem> {
item: T item: T
score: number score: number
matchType: 'exact' | 'prefix' | 'word-boundary' | 'substring' | 'description' matchType: 'exact' | 'prefix' | 'alias' | 'word-boundary' | 'substring' | 'description'
} }
const SCORE_EXACT_MATCH = 10000 const SCORE_EXACT_MATCH = 10000
const SCORE_PREFIX_MATCH = 5000 const SCORE_PREFIX_MATCH = 5000
const SCORE_ALIAS_MATCH = 3000
const SCORE_WORD_BOUNDARY = 1000 const SCORE_WORD_BOUNDARY = 1000
const SCORE_SUBSTRING_MATCH = 100 const SCORE_SUBSTRING_MATCH = 100
const DESCRIPTION_WEIGHT = 0.3 const DESCRIPTION_WEIGHT = 0.3
@@ -67,6 +69,39 @@ function calculateFieldScore(
return { score: 0, matchType: null } return { score: 0, matchType: null }
} }
/**
* Check if query matches any alias in the item's aliases array
* Returns the alias score if a match is found, 0 otherwise
*/
function calculateAliasScore(
query: string,
aliases?: string[]
): { score: number; matchType: 'alias' | null } {
if (!aliases || aliases.length === 0) {
return { score: 0, matchType: null }
}
const normalizedQuery = query.toLowerCase().trim()
for (const alias of aliases) {
const normalizedAlias = alias.toLowerCase().trim()
if (normalizedAlias === normalizedQuery) {
return { score: SCORE_ALIAS_MATCH, matchType: 'alias' }
}
if (normalizedAlias.startsWith(normalizedQuery)) {
return { score: SCORE_ALIAS_MATCH * 0.8, matchType: 'alias' }
}
if (normalizedQuery.includes(normalizedAlias) || normalizedAlias.includes(normalizedQuery)) {
return { score: SCORE_ALIAS_MATCH * 0.6, matchType: 'alias' }
}
}
return { score: 0, matchType: null }
}
/** /**
* Search items using tiered matching algorithm * Search items using tiered matching algorithm
* Returns items sorted by relevance (highest score first) * Returns items sorted by relevance (highest score first)
@@ -90,15 +125,20 @@ export function searchItems<T extends SearchableItem>(
? calculateFieldScore(normalizedQuery, item.description) ? calculateFieldScore(normalizedQuery, item.description)
: { score: 0, matchType: null } : { score: 0, matchType: null }
const aliasMatch = calculateAliasScore(normalizedQuery, item.aliases)
const nameScore = nameMatch.score const nameScore = nameMatch.score
const descScore = descMatch.score * DESCRIPTION_WEIGHT const descScore = descMatch.score * DESCRIPTION_WEIGHT
const aliasScore = aliasMatch.score
const bestScore = Math.max(nameScore, descScore) const bestScore = Math.max(nameScore, descScore, aliasScore)
if (bestScore > 0) { if (bestScore > 0) {
let matchType: SearchResult<T>['matchType'] = 'substring' let matchType: SearchResult<T>['matchType'] = 'substring'
if (nameScore >= descScore) { if (nameScore >= descScore && nameScore >= aliasScore) {
matchType = nameMatch.matchType || 'substring' matchType = nameMatch.matchType || 'substring'
} else if (aliasScore >= descScore) {
matchType = 'alias'
} else { } else {
matchType = 'description' matchType = 'description'
} }
@@ -125,6 +165,8 @@ export function getMatchTypeLabel(matchType: SearchResult<any>['matchType']): st
return 'Exact match' return 'Exact match'
case 'prefix': case 'prefix':
return 'Starts with' return 'Starts with'
case 'alias':
return 'Similar to'
case 'word-boundary': case 'word-boundary':
return 'Word match' return 'Word match'
case 'substring': case 'substring':

View File

@@ -11,7 +11,7 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
'Integrate Browser Use into the workflow. Can navigate the web and perform actions as if a real user was interacting with the browser.', 'Integrate Browser Use into the workflow. Can navigate the web and perform actions as if a real user was interacting with the browser.',
docsLink: 'https://docs.sim.ai/tools/browser_use', docsLink: 'https://docs.sim.ai/tools/browser_use',
category: 'tools', category: 'tools',
bgColor: '#E0E0E0', bgColor: '#181C1E',
icon: BrowserUseIcon, icon: BrowserUseIcon,
subBlocks: [ subBlocks: [
{ {

View File

@@ -1739,12 +1739,12 @@ export function BrowserUseIcon(props: SVGProps<SVGSVGElement>) {
{...props} {...props}
version='1.0' version='1.0'
xmlns='http://www.w3.org/2000/svg' xmlns='http://www.w3.org/2000/svg'
width='150pt' width='28'
height='150pt' height='28'
viewBox='0 0 150 150' viewBox='0 0 150 150'
preserveAspectRatio='xMidYMid meet' preserveAspectRatio='xMidYMid meet'
> >
<g transform='translate(0,150) scale(0.05,-0.05)' fill='#000000' stroke='none'> <g transform='translate(0,150) scale(0.05,-0.05)' fill='currentColor' stroke='none'>
<path <path
d='M786 2713 c-184 -61 -353 -217 -439 -405 -76 -165 -65 -539 19 -666 d='M786 2713 c-184 -61 -353 -217 -439 -405 -76 -165 -65 -539 19 -666
l57 -85 -48 -124 c-203 -517 -79 -930 346 -1155 159 -85 441 -71 585 28 l111 l57 -85 -48 -124 c-203 -517 -79 -930 346 -1155 159 -85 441 -71 585 28 l111

View File

@@ -203,10 +203,11 @@ function resolveProjectSelector(
): SelectorResolution { ): SelectorResolution {
const serviceId = subBlock.serviceId const serviceId = subBlock.serviceId
const context = buildBaseContext(args) const context = buildBaseContext(args)
const selectorId = subBlock.canonicalParamId ?? subBlock.id
switch (serviceId) { switch (serviceId) {
case 'linear': { case 'linear': {
const key: SelectorKey = subBlock.id === 'teamId' ? 'linear.teams' : 'linear.projects' const key: SelectorKey = selectorId === 'teamId' ? 'linear.teams' : 'linear.projects'
return { key, context, allowSearch: true } return { key, context, allowSearch: true }
} }
case 'jira': case 'jira':

View File

@@ -21,6 +21,8 @@ import {
type BatchToggleEnabledOperation, type BatchToggleEnabledOperation,
type BatchToggleHandlesOperation, type BatchToggleHandlesOperation,
type BatchUpdateParentOperation, type BatchUpdateParentOperation,
captureLatestEdges,
captureLatestSubBlockValues,
createOperationEntry, createOperationEntry,
runWithUndoRedoRecordingSuspended, runWithUndoRedoRecordingSuspended,
type UpdateParentOperation, type UpdateParentOperation,
@@ -28,7 +30,6 @@ import {
} from '@/stores/undo-redo' } from '@/stores/undo-redo'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { mergeSubblockState } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store' import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState } from '@/stores/workflows/workflow/types' import type { BlockState } from '@/stores/workflows/workflow/types'
@@ -445,34 +446,19 @@ export function useUndoRedo() {
break break
} }
const latestEdges = useWorkflowStore const latestEdges = captureLatestEdges(
.getState() useWorkflowStore.getState().edges,
.edges.filter( existingBlockIds
(e) => existingBlockIds.includes(e.source) || existingBlockIds.includes(e.target) )
)
batchRemoveOp.data.edgeSnapshots = latestEdges batchRemoveOp.data.edgeSnapshots = latestEdges
const latestSubBlockValues: Record<string, Record<string, unknown>> = {} const latestSubBlockValues = captureLatestSubBlockValues(
existingBlockIds.forEach((blockId) => { useWorkflowStore.getState().blocks,
const merged = mergeSubblockState( activeWorkflowId,
useWorkflowStore.getState().blocks, existingBlockIds
activeWorkflowId, )
blockId
)
const block = merged[blockId]
if (block?.subBlocks) {
const values: Record<string, unknown> = {}
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]) => {
if (subBlock.value !== null && subBlock.value !== undefined) {
values[subBlockId] = subBlock.value
}
})
if (Object.keys(values).length > 0) {
latestSubBlockValues[blockId] = values
}
}
})
batchRemoveOp.data.subBlockValues = latestSubBlockValues batchRemoveOp.data.subBlockValues = latestSubBlockValues
;(entry.operation as BatchAddBlocksOperation).data.subBlockValues = latestSubBlockValues
addToQueue({ addToQueue({
id: opId, id: opId,
@@ -1153,6 +1139,20 @@ export function useUndoRedo() {
break break
} }
const latestEdges = captureLatestEdges(
useWorkflowStore.getState().edges,
existingBlockIds
)
batchOp.data.edgeSnapshots = latestEdges
const latestSubBlockValues = captureLatestSubBlockValues(
useWorkflowStore.getState().blocks,
activeWorkflowId,
existingBlockIds
)
batchOp.data.subBlockValues = latestSubBlockValues
;(entry.inverse as BatchAddBlocksOperation).data.subBlockValues = latestSubBlockValues
addToQueue({ addToQueue({
id: opId, id: opId,
operation: { operation: {

View File

@@ -29,13 +29,11 @@ export class DocsChunker {
private readonly baseUrl: string private readonly baseUrl: string
constructor(options: DocsChunkerOptions = {}) { constructor(options: DocsChunkerOptions = {}) {
// Use the existing TextChunker for chunking logic
this.textChunker = new TextChunker({ this.textChunker = new TextChunker({
chunkSize: options.chunkSize ?? 300, // Max 300 tokens per chunk chunkSize: options.chunkSize ?? 300, // Max 300 tokens per chunk
minCharactersPerChunk: options.minCharactersPerChunk ?? 1, minCharactersPerChunk: options.minCharactersPerChunk ?? 1,
chunkOverlap: options.chunkOverlap ?? 50, chunkOverlap: options.chunkOverlap ?? 50,
}) })
// Use localhost docs in development, production docs otherwise
this.baseUrl = options.baseUrl ?? 'https://docs.sim.ai' this.baseUrl = options.baseUrl ?? 'https://docs.sim.ai'
} }
@@ -74,24 +72,18 @@ export class DocsChunker {
const content = await fs.readFile(filePath, 'utf-8') const content = await fs.readFile(filePath, 'utf-8')
const relativePath = path.relative(basePath, filePath) const relativePath = path.relative(basePath, filePath)
// Parse frontmatter and content
const { data: frontmatter, content: markdownContent } = this.parseFrontmatter(content) const { data: frontmatter, content: markdownContent } = this.parseFrontmatter(content)
// Extract headers from the content
const headers = this.extractHeaders(markdownContent) const headers = this.extractHeaders(markdownContent)
// Generate document URL
const documentUrl = this.generateDocumentUrl(relativePath) const documentUrl = this.generateDocumentUrl(relativePath)
// Split content into chunks
const textChunks = await this.splitContent(markdownContent) const textChunks = await this.splitContent(markdownContent)
// Generate embeddings for all chunks at once (batch processing)
logger.info(`Generating embeddings for ${textChunks.length} chunks in ${relativePath}`) logger.info(`Generating embeddings for ${textChunks.length} chunks in ${relativePath}`)
const embeddings = textChunks.length > 0 ? await generateEmbeddings(textChunks) : [] const embeddings = textChunks.length > 0 ? await generateEmbeddings(textChunks) : []
const embeddingModel = 'text-embedding-3-small' const embeddingModel = 'text-embedding-3-small'
// Convert to DocChunk objects with header context and embeddings
const chunks: DocChunk[] = [] const chunks: DocChunk[] = []
let currentPosition = 0 let currentPosition = 0
@@ -100,7 +92,6 @@ export class DocsChunker {
const chunkStart = currentPosition const chunkStart = currentPosition
const chunkEnd = currentPosition + chunkText.length const chunkEnd = currentPosition + chunkText.length
// Find the most relevant header for this chunk
const relevantHeader = this.findRelevantHeader(headers, chunkStart) const relevantHeader = this.findRelevantHeader(headers, chunkStart)
const chunk: DocChunk = { const chunk: DocChunk = {
@@ -186,11 +177,21 @@ export class DocsChunker {
/** /**
* Generate document URL from relative path * Generate document URL from relative path
* Handles index.mdx files specially - they are served at the parent directory path
*/ */
private generateDocumentUrl(relativePath: string): string { private generateDocumentUrl(relativePath: string): string {
// Convert file path to URL path // Convert file path to URL path
// e.g., "tools/knowledge.mdx" -> "/tools/knowledge" // e.g., "tools/knowledge.mdx" -> "/tools/knowledge"
const urlPath = relativePath.replace(/\.mdx$/, '').replace(/\\/g, '/') // Handle Windows paths // e.g., "triggers/index.mdx" -> "/triggers" (NOT "/triggers/index")
let urlPath = relativePath.replace(/\.mdx$/, '').replace(/\\/g, '/') // Handle Windows paths
// In fumadocs, index.mdx files are served at the parent directory path
// e.g., "triggers/index" -> "triggers"
if (urlPath.endsWith('/index')) {
urlPath = urlPath.slice(0, -6) // Remove "/index"
} else if (urlPath === 'index') {
urlPath = '' // Root index.mdx
}
return `${this.baseUrl}/${urlPath}` return `${this.baseUrl}/${urlPath}`
} }
@@ -201,7 +202,6 @@ export class DocsChunker {
private findRelevantHeader(headers: HeaderInfo[], position: number): HeaderInfo | null { private findRelevantHeader(headers: HeaderInfo[], position: number): HeaderInfo | null {
if (headers.length === 0) return null if (headers.length === 0) return null
// Find the last header that comes before this position
let relevantHeader: HeaderInfo | null = null let relevantHeader: HeaderInfo | null = null
for (const header of headers) { for (const header of headers) {
@@ -219,23 +219,18 @@ export class DocsChunker {
* Split content into chunks using the existing TextChunker with table awareness * Split content into chunks using the existing TextChunker with table awareness
*/ */
private async splitContent(content: string): Promise<string[]> { private async splitContent(content: string): Promise<string[]> {
// Clean the content first
const cleanedContent = this.cleanContent(content) const cleanedContent = this.cleanContent(content)
// Detect table boundaries to avoid splitting them
const tableBoundaries = this.detectTableBoundaries(cleanedContent) const tableBoundaries = this.detectTableBoundaries(cleanedContent)
// Use the existing TextChunker
const chunks = await this.textChunker.chunk(cleanedContent) const chunks = await this.textChunker.chunk(cleanedContent)
// Post-process chunks to ensure tables aren't split
const processedChunks = this.mergeTableChunks( const processedChunks = this.mergeTableChunks(
chunks.map((chunk) => chunk.text), chunks.map((chunk) => chunk.text),
tableBoundaries, tableBoundaries,
cleanedContent cleanedContent
) )
// Ensure no chunk exceeds 300 tokens
const finalChunks = this.enforceSizeLimit(processedChunks) const finalChunks = this.enforceSizeLimit(processedChunks)
return finalChunks return finalChunks
@@ -273,7 +268,6 @@ export class DocsChunker {
const [, frontmatterText, markdownContent] = match const [, frontmatterText, markdownContent] = match
const data: Frontmatter = {} const data: Frontmatter = {}
// Simple YAML parsing for title and description
const lines = frontmatterText.split('\n') const lines = frontmatterText.split('\n')
for (const line of lines) { for (const line of lines) {
const colonIndex = line.indexOf(':') const colonIndex = line.indexOf(':')
@@ -294,7 +288,6 @@ export class DocsChunker {
* Estimate token count (rough approximation) * Estimate token count (rough approximation)
*/ */
private estimateTokens(text: string): number { private estimateTokens(text: string): number {
// Rough approximation: 1 token ≈ 4 characters
return Math.ceil(text.length / 4) return Math.ceil(text.length / 4)
} }
@@ -311,17 +304,13 @@ export class DocsChunker {
for (let i = 0; i < lines.length; i++) { for (let i = 0; i < lines.length; i++) {
const line = lines[i].trim() const line = lines[i].trim()
// Detect table start (markdown table row with pipes)
if (line.includes('|') && line.split('|').length >= 3 && !inTable) { if (line.includes('|') && line.split('|').length >= 3 && !inTable) {
// Check if next line is table separator (contains dashes and pipes)
const nextLine = lines[i + 1]?.trim() const nextLine = lines[i + 1]?.trim()
if (nextLine?.includes('|') && nextLine.includes('-')) { if (nextLine?.includes('|') && nextLine.includes('-')) {
inTable = true inTable = true
tableStart = i tableStart = i
} }
} } else if (inTable && (!line.includes('|') || line === '' || line.startsWith('#'))) {
// Detect table end (empty line or non-table content)
else if (inTable && (!line.includes('|') || line === '' || line.startsWith('#'))) {
tables.push({ tables.push({
start: this.getCharacterPosition(lines, tableStart), start: this.getCharacterPosition(lines, tableStart),
end: this.getCharacterPosition(lines, i - 1) + lines[i - 1]?.length || 0, end: this.getCharacterPosition(lines, i - 1) + lines[i - 1]?.length || 0,
@@ -330,7 +319,6 @@ export class DocsChunker {
} }
} }
// Handle table at end of content
if (inTable && tableStart >= 0) { if (inTable && tableStart >= 0) {
tables.push({ tables.push({
start: this.getCharacterPosition(lines, tableStart), start: this.getCharacterPosition(lines, tableStart),
@@ -367,7 +355,6 @@ export class DocsChunker {
const chunkStart = originalContent.indexOf(chunk, currentPosition) const chunkStart = originalContent.indexOf(chunk, currentPosition)
const chunkEnd = chunkStart + chunk.length const chunkEnd = chunkStart + chunk.length
// Check if this chunk intersects with any table
const intersectsTable = tableBoundaries.some( const intersectsTable = tableBoundaries.some(
(table) => (table) =>
(chunkStart >= table.start && chunkStart <= table.end) || (chunkStart >= table.start && chunkStart <= table.end) ||
@@ -376,7 +363,6 @@ export class DocsChunker {
) )
if (intersectsTable) { if (intersectsTable) {
// Find which table(s) this chunk intersects with
const affectedTables = tableBoundaries.filter( const affectedTables = tableBoundaries.filter(
(table) => (table) =>
(chunkStart >= table.start && chunkStart <= table.end) || (chunkStart >= table.start && chunkStart <= table.end) ||
@@ -384,12 +370,10 @@ export class DocsChunker {
(chunkStart <= table.start && chunkEnd >= table.end) (chunkStart <= table.start && chunkEnd >= table.end)
) )
// Create a chunk that includes the complete table(s)
const minStart = Math.min(chunkStart, ...affectedTables.map((t) => t.start)) const minStart = Math.min(chunkStart, ...affectedTables.map((t) => t.start))
const maxEnd = Math.max(chunkEnd, ...affectedTables.map((t) => t.end)) const maxEnd = Math.max(chunkEnd, ...affectedTables.map((t) => t.end))
const completeChunk = originalContent.slice(minStart, maxEnd) const completeChunk = originalContent.slice(minStart, maxEnd)
// Only add if we haven't already included this content
if (!mergedChunks.some((existing) => existing.includes(completeChunk.trim()))) { if (!mergedChunks.some((existing) => existing.includes(completeChunk.trim()))) {
mergedChunks.push(completeChunk.trim()) mergedChunks.push(completeChunk.trim())
} }
@@ -400,7 +384,7 @@ export class DocsChunker {
currentPosition = chunkEnd currentPosition = chunkEnd
} }
return mergedChunks.filter((chunk) => chunk.length > 50) // Filter out tiny chunks return mergedChunks.filter((chunk) => chunk.length > 50)
} }
/** /**
@@ -413,10 +397,8 @@ export class DocsChunker {
const tokens = this.estimateTokens(chunk) const tokens = this.estimateTokens(chunk)
if (tokens <= 300) { if (tokens <= 300) {
// Chunk is within limit
finalChunks.push(chunk) finalChunks.push(chunk)
} else { } else {
// Chunk is too large - split it
const lines = chunk.split('\n') const lines = chunk.split('\n')
let currentChunk = '' let currentChunk = ''
@@ -426,7 +408,6 @@ export class DocsChunker {
if (this.estimateTokens(testChunk) <= 300) { if (this.estimateTokens(testChunk) <= 300) {
currentChunk = testChunk currentChunk = testChunk
} else { } else {
// Adding this line would exceed limit
if (currentChunk.trim()) { if (currentChunk.trim()) {
finalChunks.push(currentChunk.trim()) finalChunks.push(currentChunk.trim())
} }
@@ -434,7 +415,6 @@ export class DocsChunker {
} }
} }
// Add final chunk if it has content
if (currentChunk.trim()) { if (currentChunk.trim()) {
finalChunks.push(currentChunk.trim()) finalChunks.push(currentChunk.trim())
} }

View File

@@ -326,32 +326,32 @@ export const env = createEnv({
NEXT_PUBLIC_E2B_ENABLED: z.string().optional(), NEXT_PUBLIC_E2B_ENABLED: z.string().optional(),
NEXT_PUBLIC_COPILOT_TRAINING_ENABLED: z.string().optional(), NEXT_PUBLIC_COPILOT_TRAINING_ENABLED: z.string().optional(),
NEXT_PUBLIC_ENABLE_PLAYGROUND: z.string().optional(), // Enable component playground at /playground NEXT_PUBLIC_ENABLE_PLAYGROUND: z.string().optional(), // Enable component playground at /playground
NEXT_PUBLIC_DOCUMENTATION_URL: z.string().url().optional(), // Custom documentation URL NEXT_PUBLIC_DOCUMENTATION_URL: z.string().url().optional(), // Custom documentation URL
NEXT_PUBLIC_TERMS_URL: z.string().url().optional(), // Custom terms of service URL NEXT_PUBLIC_TERMS_URL: z.string().url().optional(), // Custom terms of service URL
NEXT_PUBLIC_PRIVACY_URL: z.string().url().optional(), // Custom privacy policy URL NEXT_PUBLIC_PRIVACY_URL: z.string().url().optional(), // Custom privacy policy URL
// Theme Customization // Theme Customization
NEXT_PUBLIC_BRAND_PRIMARY_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Primary brand color (hex format, e.g., "#701ffc") NEXT_PUBLIC_BRAND_PRIMARY_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Primary brand color (hex format, e.g., "#701ffc")
NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Primary brand hover state (hex format) NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Primary brand hover state (hex format)
NEXT_PUBLIC_BRAND_ACCENT_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Accent brand color (hex format) NEXT_PUBLIC_BRAND_ACCENT_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Accent brand color (hex format)
NEXT_PUBLIC_BRAND_ACCENT_HOVER_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Accent brand hover state (hex format) NEXT_PUBLIC_BRAND_ACCENT_HOVER_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Accent brand hover state (hex format)
NEXT_PUBLIC_BRAND_BACKGROUND_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Brand background color (hex format) NEXT_PUBLIC_BRAND_BACKGROUND_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Brand background color (hex format)
// Feature Flags // Feature Flags
NEXT_PUBLIC_TRIGGER_DEV_ENABLED: z.boolean().optional(), // Client-side gate for async executions UI NEXT_PUBLIC_TRIGGER_DEV_ENABLED: z.boolean().optional(), // Client-side gate for async executions UI
NEXT_PUBLIC_SSO_ENABLED: z.boolean().optional(), // Enable SSO login UI components NEXT_PUBLIC_SSO_ENABLED: z.boolean().optional(), // Enable SSO login UI components
NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED: z.boolean().optional(), // Enable credential sets (email polling) on self-hosted NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED: z.boolean().optional(), // Enable credential sets (email polling) on self-hosted
NEXT_PUBLIC_ACCESS_CONTROL_ENABLED: z.boolean().optional(), // Enable access control (permission groups) on self-hosted NEXT_PUBLIC_ACCESS_CONTROL_ENABLED: z.boolean().optional(), // Enable access control (permission groups) on self-hosted
NEXT_PUBLIC_ORGANIZATIONS_ENABLED: z.boolean().optional(), // Enable organizations on self-hosted (bypasses plan requirements) NEXT_PUBLIC_ORGANIZATIONS_ENABLED: z.boolean().optional(), // Enable organizations on self-hosted (bypasses plan requirements)
NEXT_PUBLIC_DISABLE_INVITATIONS: z.boolean().optional(), // Disable workspace invitations globally (for self-hosted deployments) NEXT_PUBLIC_DISABLE_INVITATIONS: z.boolean().optional(), // Disable workspace invitations globally (for self-hosted deployments)
NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED: z.boolean().optional().default(true), // Control visibility of email/password login forms NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED: z.boolean().optional().default(true), // Control visibility of email/password login forms
}, },
// Variables available on both server and client // Variables available on both server and client
shared: { shared: {
NODE_ENV: z.enum(['development', 'test', 'production']).optional(), // Runtime environment NODE_ENV: z.enum(['development', 'test', 'production']).optional(), // Runtime environment
NEXT_TELEMETRY_DISABLED: z.string().optional(), // Disable Next.js telemetry collection NEXT_TELEMETRY_DISABLED: z.string().optional(), // Disable Next.js telemetry collection
}, },
experimental__runtimeEnv: { experimental__runtimeEnv: {

View File

@@ -0,0 +1,193 @@
import type { ComponentType } from 'react'
import { getAllBlocks } from '@/blocks'
import type { BlockConfig, SubBlockConfig } from '@/blocks/types'
/**
* Represents a searchable tool operation extracted from block configurations.
* Each operation maps to a specific tool that can be invoked when the block
* is configured with that operation selected.
*/
export interface ToolOperationItem {
/** Unique identifier combining block type and operation ID (e.g., "slack_send") */
id: string
/** The block type this operation belongs to (e.g., "slack") */
blockType: string
/** The operation dropdown value (e.g., "send") */
operationId: string
/** Human-readable service name from the block (e.g., "Slack") */
serviceName: string
/** Human-readable operation name from the dropdown label (e.g., "Send Message") */
operationName: string
/** The block's icon component */
icon: ComponentType<{ className?: string }>
/** The block's background color */
bgColor: string
/** Search aliases for common synonyms */
aliases: string[]
}
/**
* Maps common action verbs to their synonyms for better search matching.
* When a user searches for "post message", it should match "send message".
* Based on analysis of 1000+ tool operations in the codebase.
*/
const ACTION_VERB_ALIASES: Record<string, string[]> = {
get: ['read', 'fetch', 'retrieve', 'load', 'obtain'],
read: ['get', 'fetch', 'retrieve', 'load'],
create: ['make', 'new', 'add', 'generate', 'insert'],
add: ['create', 'insert', 'append', 'include'],
update: ['edit', 'modify', 'change', 'patch', 'set'],
set: ['update', 'configure', 'assign'],
delete: ['remove', 'trash', 'destroy', 'erase'],
remove: ['delete', 'clear', 'drop', 'unset'],
list: ['show', 'display', 'view', 'browse', 'enumerate'],
search: ['find', 'query', 'lookup', 'locate'],
query: ['search', 'find', 'lookup'],
send: ['post', 'write', 'deliver', 'transmit', 'publish'],
write: ['send', 'post', 'compose'],
download: ['export', 'save', 'pull', 'fetch'],
upload: ['import', 'push', 'transfer', 'attach'],
execute: ['run', 'invoke', 'trigger', 'perform', 'start'],
check: ['verify', 'validate', 'test', 'inspect'],
cancel: ['abort', 'stop', 'terminate', 'revoke'],
archive: ['store', 'backup', 'preserve'],
copy: ['duplicate', 'clone', 'replicate'],
move: ['transfer', 'relocate', 'migrate'],
share: ['publish', 'distribute', 'broadcast'],
}
/**
* Generates search aliases for an operation name by finding synonyms
* for action verbs in the operation name.
*/
function generateAliases(operationName: string): string[] {
const aliases: string[] = []
const lowerName = operationName.toLowerCase()
for (const [verb, synonyms] of Object.entries(ACTION_VERB_ALIASES)) {
if (lowerName.includes(verb)) {
for (const synonym of synonyms) {
aliases.push(lowerName.replace(verb, synonym))
}
}
}
return aliases
}
/**
* Extracts the operation dropdown subblock from a block's configuration.
* Returns null if no operation dropdown exists.
*/
function findOperationDropdown(block: BlockConfig): SubBlockConfig | null {
return (
block.subBlocks.find(
(sb) => sb.id === 'operation' && sb.type === 'dropdown' && Array.isArray(sb.options)
) ?? null
)
}
/**
* Resolves the tool ID for a given operation using the block's tool config.
* Falls back to checking tools.access if no config.tool function exists.
*/
function resolveToolId(block: BlockConfig, operationId: string): string | null {
if (!block.tools) return null
if (block.tools.config?.tool) {
try {
return block.tools.config.tool({ operation: operationId })
} catch {
return null
}
}
if (block.tools.access?.length === 1) {
return block.tools.access[0]
}
return null
}
/**
* Builds an index of all tool operations from the block registry.
* This index is used by the search modal to enable operation-level discovery.
*
* The function iterates through all blocks that have:
* 1. A tools.access array (indicating they use tools)
* 2. An "operation" dropdown subblock with options
*
* For each operation option, it creates a ToolOperationItem that maps
* the operation to its corresponding tool.
*/
export function buildToolOperationsIndex(): ToolOperationItem[] {
const operations: ToolOperationItem[] = []
const allBlocks = getAllBlocks()
for (const block of allBlocks) {
if (!block.tools?.access?.length || block.hideFromToolbar) {
continue
}
if (block.category !== 'tools') {
continue
}
const operationDropdown = findOperationDropdown(block)
if (!operationDropdown) {
continue
}
const options =
typeof operationDropdown.options === 'function'
? operationDropdown.options()
: operationDropdown.options
if (!options) continue
for (const option of options) {
if (!resolveToolId(block, option.id)) continue
const operationName = option.label
const aliases = generateAliases(operationName)
operations.push({
id: `${block.type}_${option.id}`,
blockType: block.type,
operationId: option.id,
serviceName: block.name,
operationName,
icon: block.icon,
bgColor: block.bgColor,
aliases,
})
}
}
return operations
}
/**
* Cached operations index to avoid rebuilding on every search.
* The index is built lazily on first access.
*/
let cachedOperations: ToolOperationItem[] | null = null
/**
* Returns the tool operations index, building it if necessary.
* The index is cached after first build since block registry is static.
*/
export function getToolOperationsIndex(): ToolOperationItem[] {
if (!cachedOperations) {
cachedOperations = buildToolOperationsIndex()
}
return cachedOperations
}
/**
* Clears the cached operations index.
* Useful for testing or if blocks are dynamically modified.
*/
export function clearToolOperationsCache(): void {
cachedOperations = null
}

View File

@@ -7,6 +7,7 @@ import type { InputFormatField } from '@/lib/workflows/types'
export interface WorkflowInputField { export interface WorkflowInputField {
name: string name: string
type: string type: string
description?: string
} }
/** /**
@@ -37,7 +38,7 @@ export function extractInputFieldsFromBlocks(
if (Array.isArray(inputFormat)) { if (Array.isArray(inputFormat)) {
return inputFormat return inputFormat
.filter( .filter(
(field: unknown): field is { name: string; type?: string } => (field: unknown): field is { name: string; type?: string; description?: string } =>
typeof field === 'object' && typeof field === 'object' &&
field !== null && field !== null &&
'name' in field && 'name' in field &&
@@ -47,6 +48,7 @@ export function extractInputFieldsFromBlocks(
.map((field) => ({ .map((field) => ({
name: field.name, name: field.name,
type: field.type || 'string', type: field.type || 'string',
...(field.description && { description: field.description }),
})) }))
} }
@@ -57,7 +59,7 @@ export function extractInputFieldsFromBlocks(
if (Array.isArray(legacyFormat)) { if (Array.isArray(legacyFormat)) {
return legacyFormat return legacyFormat
.filter( .filter(
(field: unknown): field is { name: string; type?: string } => (field: unknown): field is { name: string; type?: string; description?: string } =>
typeof field === 'object' && typeof field === 'object' &&
field !== null && field !== null &&
'name' in field && 'name' in field &&
@@ -67,6 +69,7 @@ export function extractInputFieldsFromBlocks(
.map((field) => ({ .map((field) => ({
name: field.name, name: field.name,
type: field.type || 'string', type: field.type || 'string',
...(field.description && { description: field.description }),
})) }))
} }

View File

@@ -0,0 +1,394 @@
/**
* @vitest-environment node
*/
import type { Edge } from 'reactflow'
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
import type { BlockState } from '@/stores/workflows/workflow/types'
vi.mock('@/stores/workflows/utils', () => ({
mergeSubblockState: vi.fn(),
}))
import { mergeSubblockState } from '@/stores/workflows/utils'
import { captureLatestEdges, captureLatestSubBlockValues } from './utils'
const mockMergeSubblockState = mergeSubblockState as Mock
describe('captureLatestEdges', () => {
const createEdge = (id: string, source: string, target: string): Edge => ({
id,
source,
target,
})
it('should return edges where blockId is the source', () => {
const edges = [
createEdge('edge-1', 'block-1', 'block-2'),
createEdge('edge-2', 'block-3', 'block-4'),
]
const result = captureLatestEdges(edges, ['block-1'])
expect(result).toEqual([createEdge('edge-1', 'block-1', 'block-2')])
})
it('should return edges where blockId is the target', () => {
const edges = [
createEdge('edge-1', 'block-1', 'block-2'),
createEdge('edge-2', 'block-3', 'block-4'),
]
const result = captureLatestEdges(edges, ['block-2'])
expect(result).toEqual([createEdge('edge-1', 'block-1', 'block-2')])
})
it('should return edges for multiple blocks', () => {
const edges = [
createEdge('edge-1', 'block-1', 'block-2'),
createEdge('edge-2', 'block-3', 'block-4'),
createEdge('edge-3', 'block-2', 'block-5'),
]
const result = captureLatestEdges(edges, ['block-1', 'block-2'])
expect(result).toHaveLength(2)
expect(result).toContainEqual(createEdge('edge-1', 'block-1', 'block-2'))
expect(result).toContainEqual(createEdge('edge-3', 'block-2', 'block-5'))
})
it('should return empty array when no edges match', () => {
const edges = [
createEdge('edge-1', 'block-1', 'block-2'),
createEdge('edge-2', 'block-3', 'block-4'),
]
const result = captureLatestEdges(edges, ['block-99'])
expect(result).toEqual([])
})
it('should return empty array when blockIds is empty', () => {
const edges = [
createEdge('edge-1', 'block-1', 'block-2'),
createEdge('edge-2', 'block-3', 'block-4'),
]
const result = captureLatestEdges(edges, [])
expect(result).toEqual([])
})
it('should return edge when block has both source and target edges', () => {
const edges = [
createEdge('edge-1', 'block-1', 'block-2'),
createEdge('edge-2', 'block-2', 'block-3'),
createEdge('edge-3', 'block-4', 'block-2'),
]
const result = captureLatestEdges(edges, ['block-2'])
expect(result).toHaveLength(3)
expect(result).toContainEqual(createEdge('edge-1', 'block-1', 'block-2'))
expect(result).toContainEqual(createEdge('edge-2', 'block-2', 'block-3'))
expect(result).toContainEqual(createEdge('edge-3', 'block-4', 'block-2'))
})
it('should handle empty edges array', () => {
const result = captureLatestEdges([], ['block-1'])
expect(result).toEqual([])
})
it('should not duplicate edges when block appears in multiple blockIds', () => {
const edges = [createEdge('edge-1', 'block-1', 'block-2')]
const result = captureLatestEdges(edges, ['block-1', 'block-2'])
expect(result).toHaveLength(1)
expect(result).toContainEqual(createEdge('edge-1', 'block-1', 'block-2'))
})
})
describe('captureLatestSubBlockValues', () => {
const workflowId = 'wf-test'
const createBlockState = (
id: string,
subBlocks: Record<string, { id: string; type: string; value: unknown }>
): BlockState =>
({
id,
type: 'function',
name: 'Test Block',
position: { x: 0, y: 0 },
subBlocks: Object.fromEntries(
Object.entries(subBlocks).map(([subId, sb]) => [
subId,
{ id: sb.id, type: sb.type, value: sb.value },
])
),
outputs: {},
enabled: true,
}) as BlockState
beforeEach(() => {
vi.clearAllMocks()
})
it('should capture single block with single subblock value', () => {
const blocks: Record<string, BlockState> = {
'block-1': createBlockState('block-1', {
code: { id: 'code', type: 'code', value: 'console.log("hello")' },
}),
}
mockMergeSubblockState.mockReturnValue(blocks)
const result = captureLatestSubBlockValues(blocks, workflowId, ['block-1'])
expect(result).toEqual({
'block-1': { code: 'console.log("hello")' },
})
})
it('should capture single block with multiple subblock values', () => {
const blocks: Record<string, BlockState> = {
'block-1': createBlockState('block-1', {
code: { id: 'code', type: 'code', value: 'test code' },
model: { id: 'model', type: 'dropdown', value: 'gpt-4' },
temperature: { id: 'temperature', type: 'slider', value: 0.7 },
}),
}
mockMergeSubblockState.mockReturnValue(blocks)
const result = captureLatestSubBlockValues(blocks, workflowId, ['block-1'])
expect(result).toEqual({
'block-1': {
code: 'test code',
model: 'gpt-4',
temperature: 0.7,
},
})
})
it('should capture multiple blocks with values', () => {
const blocks: Record<string, BlockState> = {
'block-1': createBlockState('block-1', {
code: { id: 'code', type: 'code', value: 'code 1' },
}),
'block-2': createBlockState('block-2', {
prompt: { id: 'prompt', type: 'long-input', value: 'hello world' },
}),
}
mockMergeSubblockState.mockImplementation((_blocks, _wfId, blockId) => {
if (blockId === 'block-1') return { 'block-1': blocks['block-1'] }
if (blockId === 'block-2') return { 'block-2': blocks['block-2'] }
return {}
})
const result = captureLatestSubBlockValues(blocks, workflowId, ['block-1', 'block-2'])
expect(result).toEqual({
'block-1': { code: 'code 1' },
'block-2': { prompt: 'hello world' },
})
})
it('should skip null values', () => {
const blocks: Record<string, BlockState> = {
'block-1': createBlockState('block-1', {
code: { id: 'code', type: 'code', value: 'valid code' },
empty: { id: 'empty', type: 'short-input', value: null },
}),
}
mockMergeSubblockState.mockReturnValue(blocks)
const result = captureLatestSubBlockValues(blocks, workflowId, ['block-1'])
expect(result).toEqual({
'block-1': { code: 'valid code' },
})
expect(result['block-1']).not.toHaveProperty('empty')
})
it('should skip undefined values', () => {
const blocks: Record<string, BlockState> = {
'block-1': createBlockState('block-1', {
code: { id: 'code', type: 'code', value: 'valid code' },
empty: { id: 'empty', type: 'short-input', value: undefined },
}),
}
mockMergeSubblockState.mockReturnValue(blocks)
const result = captureLatestSubBlockValues(blocks, workflowId, ['block-1'])
expect(result).toEqual({
'block-1': { code: 'valid code' },
})
})
it('should return empty object for block with no subBlocks', () => {
const blocks: Record<string, BlockState> = {
'block-1': {
id: 'block-1',
type: 'function',
name: 'Test Block',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
} as BlockState,
}
mockMergeSubblockState.mockReturnValue(blocks)
const result = captureLatestSubBlockValues(blocks, workflowId, ['block-1'])
expect(result).toEqual({})
})
it('should return empty object for non-existent blockId', () => {
const blocks: Record<string, BlockState> = {
'block-1': createBlockState('block-1', {
code: { id: 'code', type: 'code', value: 'test' },
}),
}
mockMergeSubblockState.mockReturnValue({})
const result = captureLatestSubBlockValues(blocks, workflowId, ['non-existent'])
expect(result).toEqual({})
})
it('should return empty object when blockIds is empty', () => {
const blocks: Record<string, BlockState> = {
'block-1': createBlockState('block-1', {
code: { id: 'code', type: 'code', value: 'test' },
}),
}
const result = captureLatestSubBlockValues(blocks, workflowId, [])
expect(result).toEqual({})
expect(mockMergeSubblockState).not.toHaveBeenCalled()
})
it('should handle various value types (string, number, array)', () => {
const blocks: Record<string, BlockState> = {
'block-1': createBlockState('block-1', {
text: { id: 'text', type: 'short-input', value: 'string value' },
number: { id: 'number', type: 'slider', value: 42 },
array: {
id: 'array',
type: 'table',
value: [
['a', 'b'],
['c', 'd'],
],
},
}),
}
mockMergeSubblockState.mockReturnValue(blocks)
const result = captureLatestSubBlockValues(blocks, workflowId, ['block-1'])
expect(result).toEqual({
'block-1': {
text: 'string value',
number: 42,
array: [
['a', 'b'],
['c', 'd'],
],
},
})
})
it('should only capture values for blockIds in the list', () => {
const blocks: Record<string, BlockState> = {
'block-1': createBlockState('block-1', {
code: { id: 'code', type: 'code', value: 'code 1' },
}),
'block-2': createBlockState('block-2', {
code: { id: 'code', type: 'code', value: 'code 2' },
}),
'block-3': createBlockState('block-3', {
code: { id: 'code', type: 'code', value: 'code 3' },
}),
}
mockMergeSubblockState.mockImplementation((_blocks, _wfId, blockId) => {
if (blockId === 'block-1') return { 'block-1': blocks['block-1'] }
if (blockId === 'block-3') return { 'block-3': blocks['block-3'] }
return {}
})
const result = captureLatestSubBlockValues(blocks, workflowId, ['block-1', 'block-3'])
expect(result).toEqual({
'block-1': { code: 'code 1' },
'block-3': { code: 'code 3' },
})
expect(result).not.toHaveProperty('block-2')
})
it('should handle block without subBlocks property', () => {
const blocks: Record<string, BlockState> = {
'block-1': {
id: 'block-1',
type: 'function',
name: 'Test Block',
position: { x: 0, y: 0 },
outputs: {},
enabled: true,
} as BlockState,
}
mockMergeSubblockState.mockReturnValue(blocks)
const result = captureLatestSubBlockValues(blocks, workflowId, ['block-1'])
expect(result).toEqual({})
})
it('should handle empty string values', () => {
const blocks: Record<string, BlockState> = {
'block-1': createBlockState('block-1', {
code: { id: 'code', type: 'code', value: '' },
}),
}
mockMergeSubblockState.mockReturnValue(blocks)
const result = captureLatestSubBlockValues(blocks, workflowId, ['block-1'])
expect(result).toEqual({
'block-1': { code: '' },
})
})
it('should handle zero numeric values', () => {
const blocks: Record<string, BlockState> = {
'block-1': createBlockState('block-1', {
temperature: { id: 'temperature', type: 'slider', value: 0 },
}),
}
mockMergeSubblockState.mockReturnValue(blocks)
const result = captureLatestSubBlockValues(blocks, workflowId, ['block-1'])
expect(result).toEqual({
'block-1': { temperature: 0 },
})
})
})

View File

@@ -1,3 +1,4 @@
import type { Edge } from 'reactflow'
import { UNDO_REDO_OPERATIONS } from '@/socket/constants' import { UNDO_REDO_OPERATIONS } from '@/socket/constants'
import type { import type {
BatchAddBlocksOperation, BatchAddBlocksOperation,
@@ -9,6 +10,8 @@ import type {
Operation, Operation,
OperationEntry, OperationEntry,
} from '@/stores/undo-redo/types' } from '@/stores/undo-redo/types'
import { mergeSubblockState } from '@/stores/workflows/utils'
import type { BlockState } from '@/stores/workflows/workflow/types'
export function createOperationEntry(operation: Operation, inverse: Operation): OperationEntry { export function createOperationEntry(operation: Operation, inverse: Operation): OperationEntry {
return { return {
@@ -170,3 +173,31 @@ export function createInverseOperation(operation: Operation): Operation {
} }
} }
} }
export function captureLatestEdges(edges: Edge[], blockIds: string[]): Edge[] {
return edges.filter((e) => blockIds.includes(e.source) || blockIds.includes(e.target))
}
export function captureLatestSubBlockValues(
blocks: Record<string, BlockState>,
workflowId: string,
blockIds: string[]
): Record<string, Record<string, unknown>> {
const values: Record<string, Record<string, unknown>> = {}
blockIds.forEach((blockId) => {
const merged = mergeSubblockState(blocks, workflowId, blockId)
const block = merged[blockId]
if (block?.subBlocks) {
const blockValues: Record<string, unknown> = {}
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]) => {
if (subBlock.value !== null && subBlock.value !== undefined) {
blockValues[subBlockId] = subBlock.value
}
})
if (Object.keys(blockValues).length > 0) {
values[blockId] = blockValues
}
}
})
return values
}

View File

@@ -46,11 +46,11 @@ export const runTaskTool: ToolConfig<BrowserUseRunTaskParams, BrowserUseRunTaskR
}, },
}, },
request: { request: {
url: 'https://api.browser-use.com/api/v1/run-task', url: 'https://api.browser-use.com/api/v2/tasks',
method: 'POST', method: 'POST',
headers: (params) => ({ headers: (params) => ({
'Content-Type': 'application/json', 'Content-Type': 'application/json',
Authorization: `Bearer ${params.apiKey}`, 'X-Browser-Use-API-Key': params.apiKey,
}), }),
body: (params) => { body: (params) => {
const requestBody: Record<string, any> = { const requestBody: Record<string, any> = {
@@ -121,12 +121,15 @@ export const runTaskTool: ToolConfig<BrowserUseRunTaskParams, BrowserUseRunTaskR
let liveUrlLogged = false let liveUrlLogged = false
try { try {
const initialTaskResponse = await fetch(`https://api.browser-use.com/api/v1/task/${taskId}`, { const initialTaskResponse = await fetch(
method: 'GET', `https://api.browser-use.com/api/v2/tasks/${taskId}`,
headers: { {
Authorization: `Bearer ${params.apiKey}`, method: 'GET',
}, headers: {
}) 'X-Browser-Use-API-Key': params.apiKey,
},
}
)
if (initialTaskResponse.ok) { if (initialTaskResponse.ok) {
const initialTaskData = await initialTaskResponse.json() const initialTaskData = await initialTaskResponse.json()
@@ -145,60 +148,36 @@ export const runTaskTool: ToolConfig<BrowserUseRunTaskParams, BrowserUseRunTaskR
while (elapsedTime < MAX_POLL_TIME_MS) { while (elapsedTime < MAX_POLL_TIME_MS) {
try { try {
const statusResponse = await fetch( const statusResponse = await fetch(`https://api.browser-use.com/api/v2/tasks/${taskId}`, {
`https://api.browser-use.com/api/v1/task/${taskId}/status`, method: 'GET',
{ headers: {
method: 'GET', 'X-Browser-Use-API-Key': params.apiKey,
headers: { },
Authorization: `Bearer ${params.apiKey}`, })
},
}
)
if (!statusResponse.ok) { if (!statusResponse.ok) {
throw new Error(`Failed to get task status: ${statusResponse.statusText}`) throw new Error(`Failed to get task status: ${statusResponse.statusText}`)
} }
const status = await statusResponse.json() const taskData = await statusResponse.json()
const status = taskData.status
logger.info(`BrowserUse task ${taskId} status: ${status}`) logger.info(`BrowserUse task ${taskId} status: ${status}`)
if (['finished', 'failed', 'stopped'].includes(status)) { if (['finished', 'failed', 'stopped'].includes(status)) {
const taskResponse = await fetch(`https://api.browser-use.com/api/v1/task/${taskId}`, { result.output = {
method: 'GET', id: taskId,
headers: { success: status === 'finished',
Authorization: `Bearer ${params.apiKey}`, output: taskData.output ?? null,
}, steps: taskData.steps || [],
})
if (taskResponse.ok) {
const taskData = await taskResponse.json()
result.output = {
id: taskId,
success: status === 'finished',
output: taskData.output,
steps: taskData.steps || [],
}
} }
return result return result
} }
if (!liveUrlLogged && status === 'running') { if (!liveUrlLogged && status === 'running' && taskData.live_url) {
const taskResponse = await fetch(`https://api.browser-use.com/api/v1/task/${taskId}`, { logger.info(`BrowserUse task ${taskId} running with live URL: ${taskData.live_url}`)
method: 'GET', liveUrlLogged = true
headers: {
Authorization: `Bearer ${params.apiKey}`,
},
})
if (taskResponse.ok) {
const taskData = await taskResponse.json()
if (taskData.live_url) {
logger.info(`BrowserUse task ${taskId} running with live URL: ${taskData.live_url}`)
liveUrlLogged = true
}
}
} }
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS)) await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))

View File

@@ -10,6 +10,7 @@
"@octokit/rest": "^21.0.0", "@octokit/rest": "^21.0.0",
"@tailwindcss/typography": "0.5.19", "@tailwindcss/typography": "0.5.19",
"drizzle-kit": "^0.31.4", "drizzle-kit": "^0.31.4",
"glob": "13.0.0",
"husky": "9.1.7", "husky": "9.1.7",
"lint-staged": "16.0.0", "lint-staged": "16.0.0",
"turbo": "2.7.4", "turbo": "2.7.4",
@@ -2237,7 +2238,7 @@
"github-slugger": ["github-slugger@2.0.0", "", {}, "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="], "github-slugger": ["github-slugger@2.0.0", "", {}, "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw=="],
"glob": ["glob@11.1.0", "", { "dependencies": { "foreground-child": "^3.3.1", "jackspeak": "^4.1.1", "minimatch": "^10.1.1", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^2.0.0" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw=="], "glob": ["glob@13.0.0", "", { "dependencies": { "minimatch": "^10.1.1", "minipass": "^7.1.2", "path-scurry": "^2.0.0" } }, "sha512-tvZgpqk6fz4BaNZ66ZsRaZnbHvP/jG3uKJvAZOwEVUL4RTA5nJeeLYfyN9/VA8NX/V3IBG+hkeuGpKjvELkVhA=="],
"glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="], "glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="],
@@ -2539,7 +2540,7 @@
"loupe": ["loupe@3.2.1", "", {}, "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ=="], "loupe": ["loupe@3.2.1", "", {}, "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ=="],
"lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="], "lru-cache": ["lru-cache@11.2.4", "", {}, "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg=="],
"lru.min": ["lru.min@1.1.3", "", {}, "sha512-Lkk/vx6ak3rYkRR0Nhu4lFUT2VDnQSxBe8Hbl7f36358p6ow8Bnvr8lrLt98H8J1aGxfhbX4Fs5tYg2+FTwr5Q=="], "lru.min": ["lru.min@1.1.3", "", {}, "sha512-Lkk/vx6ak3rYkRR0Nhu4lFUT2VDnQSxBe8Hbl7f36358p6ow8Bnvr8lrLt98H8J1aGxfhbX4Fs5tYg2+FTwr5Q=="],
@@ -2699,7 +2700,7 @@
"minimal-polyfills": ["minimal-polyfills@2.2.3", "", {}, "sha512-oxdmJ9cL+xV72h0xYxp4tP2d5/fTBpP45H8DIOn9pASuF8a3IYTf+25fMGDYGiWW+MFsuog6KD6nfmhZJQ+uUw=="], "minimal-polyfills": ["minimal-polyfills@2.2.3", "", {}, "sha512-oxdmJ9cL+xV72h0xYxp4tP2d5/fTBpP45H8DIOn9pASuF8a3IYTf+25fMGDYGiWW+MFsuog6KD6nfmhZJQ+uUw=="],
"minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], "minimatch": ["minimatch@10.1.1", "", { "dependencies": { "@isaacs/brace-expansion": "^5.0.0" } }, "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ=="],
"minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="], "minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
@@ -3691,6 +3692,8 @@
"@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], "@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="],
"@babel/helper-compilation-targets/lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="],
"@babel/helper-compilation-targets/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], "@babel/helper-compilation-targets/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="],
"@better-auth/sso/jose": ["jose@6.1.3", "", {}, "sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ=="], "@better-auth/sso/jose": ["jose@6.1.3", "", {}, "sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ=="],
@@ -3953,6 +3956,8 @@
"dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="], "dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="],
"e2b/glob": ["glob@11.1.0", "", { "dependencies": { "foreground-child": "^3.3.1", "jackspeak": "^4.1.1", "minimatch": "^10.1.1", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^2.0.0" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw=="],
"engine.io/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="], "engine.io/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"engine.io/ws": ["ws@8.18.3", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="], "engine.io/ws": ["ws@8.18.3", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="],
@@ -3993,8 +3998,6 @@
"get-uri/data-uri-to-buffer": ["data-uri-to-buffer@6.0.2", "", {}, "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw=="], "get-uri/data-uri-to-buffer": ["data-uri-to-buffer@6.0.2", "", {}, "sha512-7hvf7/GW8e86rW0ptuwS3OcBGDjIi6SZva7hCyWC0yYry2cOPmLIjXAUHI6DK2HsnwJd9ifmt57i8eV2n4YNpw=="],
"glob/minimatch": ["minimatch@10.1.1", "", { "dependencies": { "@isaacs/brace-expansion": "^5.0.0" } }, "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ=="],
"gray-matter/js-yaml": ["js-yaml@3.14.2", "", { "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg=="], "gray-matter/js-yaml": ["js-yaml@3.14.2", "", { "dependencies": { "argparse": "^1.0.7", "esprima": "^4.0.0" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg=="],
"groq-sdk/@types/node": ["@types/node@18.19.130", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg=="], "groq-sdk/@types/node": ["@types/node@18.19.130", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg=="],
@@ -4043,8 +4046,6 @@
"log-update/wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], "log-update/wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="],
"lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
"mammoth/argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="], "mammoth/argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="],
"mdast-util-find-and-replace/escape-string-regexp": ["escape-string-regexp@5.0.0", "", {}, "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="], "mdast-util-find-and-replace/escape-string-regexp": ["escape-string-regexp@5.0.0", "", {}, "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw=="],
@@ -4083,8 +4084,6 @@
"parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="], "parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="],
"path-scurry/lru-cache": ["lru-cache@11.2.4", "", {}, "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg=="],
"pdf-lib/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="], "pdf-lib/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="],
"pino/thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="], "pino/thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="],
@@ -4113,6 +4112,8 @@
"react-email/commander": ["commander@13.1.0", "", {}, "sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw=="], "react-email/commander": ["commander@13.1.0", "", {}, "sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw=="],
"react-email/glob": ["glob@11.1.0", "", { "dependencies": { "foreground-child": "^3.3.1", "jackspeak": "^4.1.1", "minimatch": "^10.1.1", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^2.0.0" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw=="],
"react-promise-suspense/fast-deep-equal": ["fast-deep-equal@2.0.1", "", {}, "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w=="], "react-promise-suspense/fast-deep-equal": ["fast-deep-equal@2.0.1", "", {}, "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w=="],
"readable-stream/safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="], "readable-stream/safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="],
@@ -4171,6 +4172,8 @@
"test-exclude/glob": ["glob@10.5.0", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg=="], "test-exclude/glob": ["glob@10.5.0", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg=="],
"test-exclude/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="],
"thriftrw/long": ["long@2.4.0", "", {}, "sha512-ijUtjmO/n2A5PaosNG9ZGDsQ3vxJg7ZW8vsY8Kp0f2yIZWhSJvjmegV7t+9RPQKxKrvj8yKGehhS+po14hPLGQ=="], "thriftrw/long": ["long@2.4.0", "", {}, "sha512-ijUtjmO/n2A5PaosNG9ZGDsQ3vxJg7ZW8vsY8Kp0f2yIZWhSJvjmegV7t+9RPQKxKrvj8yKGehhS+po14hPLGQ=="],
"tsyringe/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="], "tsyringe/tslib": ["tslib@1.14.1", "", {}, "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="],
@@ -4249,6 +4252,8 @@
"@aws-sdk/middleware-sdk-s3/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.969.0", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-BSe4Lx/qdRQQdX8cSSI7Et20vqBspzAjBy8ZmXVoyLkol3y4sXBXzn+BiLtR+oh60ExQn6o2DU4QjdOZbXaKIQ=="], "@aws-sdk/middleware-sdk-s3/@aws-sdk/core/@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.969.0", "", { "dependencies": { "@smithy/types": "^4.12.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-BSe4Lx/qdRQQdX8cSSI7Et20vqBspzAjBy8ZmXVoyLkol3y4sXBXzn+BiLtR+oh60ExQn6o2DU4QjdOZbXaKIQ=="],
"@babel/helper-compilation-targets/lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
"@browserbasehq/sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="], "@browserbasehq/sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
"@browserbasehq/sdk/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], "@browserbasehq/sdk/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
@@ -4579,6 +4584,8 @@
"rimraf/glob/jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="], "rimraf/glob/jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="],
"rimraf/glob/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="],
"rimraf/glob/path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="], "rimraf/glob/path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="],
"sim/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="], "sim/@types/node/undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],

View File

@@ -39,6 +39,7 @@
"@octokit/rest": "^21.0.0", "@octokit/rest": "^21.0.0",
"@tailwindcss/typography": "0.5.19", "@tailwindcss/typography": "0.5.19",
"drizzle-kit": "^0.31.4", "drizzle-kit": "^0.31.4",
"glob": "13.0.0",
"husky": "9.1.7", "husky": "9.1.7",
"lint-staged": "16.0.0", "lint-staged": "16.0.0",
"turbo": "2.7.4" "turbo": "2.7.4"