Compare commits

...

11 Commits

Author SHA1 Message Date
Waleed
af592349d3 v0.5.99: local dev improvements, live workflow logs in terminal 2026-02-23 00:24:49 -08:00
Waleed
69ec70af13 feat(terminal): expandable child workflow blocks in console (#3306)
* feat(terminal): expandable child workflow blocks in console

* fix(terminal): cycle guard in collectWorkflowDescendants, workflow node running/canceled state

* fix(terminal): expand workflow blocks nested inside loop/parallel iterations

* fix(terminal): prevent child block mixing across loop iterations for workflow blocks

* ack PR comments, remove extranoeus logs

* feat(terminal): real-time child workflow block propagation in console

* fix(terminal): align parallel guard in WorkflowBlockHandler.getIterationContext with BlockExecutor

* fix(terminal): fire onChildWorkflowInstanceReady regardless of nodeMetadata presence

* fix(terminal): use shared isWorkflowBlockType from executor/constants
2026-02-23 00:17:44 -08:00
Waleed
687c12528b fix(parallel): correct active state pulsing and duration display for parallel subflow blocks (#3305)
* fix(executor): resolve block ID for parallel subflow active state

* fix timing for parallel block

* refactor(parallel): extract shared updateActiveBlockRefCount helper

* fix(parallel): error-sticky block run status to prevent branch success masking failure

* Revert "fix(parallel): error-sticky block run status to prevent branch success masking failure"

This reverts commit 9c087cd466.
2026-02-22 15:03:33 -08:00
Waleed
996dc96d6e fix(security): allow HTTP for localhost and loopback addresses (#3304)
* fix(security): allow localhost HTTP without weakening SSRF protections

* fix(security): remove extraneous comments and fix failing SSRF test

* fix(security): derive isLocalhost from hostname not resolved IP in validateUrlWithDNS

* fix(security): verify resolved IP is loopback when hostname is localhost in validateUrlWithDNS

---------

Co-authored-by: aayush598 <aayushgid598@gmail.com>
2026-02-22 14:58:11 -08:00
Waleed
0d86ea01f0 v0.5.98: change detection improvements, rate limit and code execution fixes, removed retired models, hex integration 2026-02-21 18:07:40 -08:00
Waleed
04286fc16b fix(hex): scope param renames to their respective operations (#3295) 2026-02-21 17:53:04 -08:00
Waleed
c52f78c840 fix(models): remove retired claude-3-7-sonnet and update default models (#3292) 2026-02-21 16:44:54 -08:00
Waleed
e318bf2e65 feat(tools): added hex (#3293)
* feat(tools): added hex

* update tool names
2026-02-21 16:44:39 -08:00
Waleed
4913799a27 feat(oauth): add CIMD support for client metadata discovery (#3285)
* feat(oauth): add CIMD support for client metadata discovery

* fix(oauth): add response size limit, redirect_uri and logo_uri validation to CIMD

- Add maxResponseBytes (256KB) to prevent oversized responses
- Validate redirect_uri schemes (https/http only) and reject commas
- Validate logo_uri requires HTTPS, silently drop invalid logos

* fix(oauth): add explicit userId null for CIMD client insert

* fix(oauth): fix redirect_uri error handling, skip upsert on cache hit

- Move scheme check outside try/catch so specific error isn't swallowed
- Return fromCache flag from resolveClientMetadata to skip redundant DB writes

* fix(oauth): evict CIMD cache on upsert failure to allow retry
2026-02-21 14:38:05 -08:00
Waleed
ccb4f5956d fix(redis): prevent false rate limits and code execution failures during Redis outages (#3289) 2026-02-21 12:20:19 -08:00
Vikhyath Mondreti
2a6d4fcb96 fix(deploy): reuse subblock merge helper in use change detection hook (#3287)
* fix(workflow-changes): change detection logic divergence

* use shared helper
2026-02-21 07:57:11 -08:00
64 changed files with 4358 additions and 232 deletions

View File

@@ -5819,3 +5819,15 @@ export function RedisIcon(props: SVGProps<SVGSVGElement>) {
</svg>
)
}
export function HexIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1450.3 600'>
<path
fill='#5F509D'
fillRule='evenodd'
d='m250.11,0v199.49h-50V0H0v600h200.11v-300.69h50v300.69h200.18V0h-200.18Zm249.9,0v600h450.29v-250.23h-200.2v149h-50v-199.46h250.2V0h-450.29Zm200.09,199.49v-99.49h50v99.49h-50Zm550.02,0V0h200.18v150l-100,100.09,100,100.09v249.82h-200.18v-300.69h-50v300.69h-200.11v-249.82l100.11-100.09-100.11-100.09V0h200.11v199.49h50Z'
/>
</svg>
)
}

View File

@@ -54,6 +54,7 @@ import {
GrafanaIcon,
GrainIcon,
GreptileIcon,
HexIcon,
HubspotIcon,
HuggingFaceIcon,
HunterIOIcon,
@@ -196,6 +197,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
grafana: GrafanaIcon,
grain: GrainIcon,
greptile: GreptileIcon,
hex: HexIcon,
hubspot: HubspotIcon,
huggingface: HuggingFaceIcon,
hunter: HunterIOIcon,

View File

@@ -0,0 +1,459 @@
---
title: Hex
description: Run and manage Hex projects
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="hex"
color="#F5E6FF"
/>
{/* MANUAL-CONTENT-START:intro */}
[Hex](https://hex.tech/) is a collaborative platform for analytics and data science that allows you to build, run, and share interactive data projects and notebooks. Hex lets teams work together on data exploration, transformation, and visualization, making it easy to turn analysis into shareable insights.
With Hex, you can:
- **Create and run powerful notebooks**: Blend SQL, Python, and visualizations in a single, interactive workspace.
- **Collaborate and share**: Work together with teammates in real time and publish interactive data apps for broader audiences.
- **Automate and orchestrate workflows**: Schedule notebook runs, parameterize runs with inputs, and automate data tasks.
- **Visualize and communicate results**: Turn analysis results into dashboards or interactive apps that anyone can use.
- **Integrate with your data stack**: Connect easily to data warehouses, APIs, and other sources.
The Sim Hex integration allows your AI agents or workflows to:
- List, get, and manage Hex projects directly from Sim.
- Trigger and monitor notebook runs, check their statuses, or cancel them as part of larger automation flows.
- Retrieve run results and use them within Sim-powered processes and decision-making.
- Leverage Hexs interactive analytics capabilities right inside your automated Sim workflows.
Whether youre empowering analysts, automating reporting, or embedding actionable data into your processes, Hex and Sim provide a seamless way to operationalize analytics and bring data-driven insights to your team.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Hex into your workflow. Run projects, check run status, manage collections and groups, list users, and view data connections. Requires a Hex API token.
## Tools
### `hex_cancel_run`
Cancel an active Hex project run.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `projectId` | string | Yes | The UUID of the Hex project |
| `runId` | string | Yes | The UUID of the run to cancel |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the run was successfully cancelled |
| `projectId` | string | Project UUID |
| `runId` | string | Run UUID that was cancelled |
### `hex_create_collection`
Create a new collection in the Hex workspace to organize projects.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `name` | string | Yes | Name for the new collection |
| `description` | string | No | Optional description for the collection |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Newly created collection UUID |
| `name` | string | Collection name |
| `description` | string | Collection description |
| `creator` | object | Collection creator |
| ↳ `email` | string | Creator email |
| ↳ `id` | string | Creator UUID |
### `hex_get_collection`
Retrieve details for a specific Hex collection by its ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `collectionId` | string | Yes | The UUID of the collection |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Collection UUID |
| `name` | string | Collection name |
| `description` | string | Collection description |
| `creator` | object | Collection creator |
| ↳ `email` | string | Creator email |
| ↳ `id` | string | Creator UUID |
### `hex_get_data_connection`
Retrieve details for a specific data connection including type, description, and configuration flags.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `dataConnectionId` | string | Yes | The UUID of the data connection |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Connection UUID |
| `name` | string | Connection name |
| `type` | string | Connection type \(e.g., snowflake, postgres, bigquery\) |
| `description` | string | Connection description |
| `connectViaSsh` | boolean | Whether SSH tunneling is enabled |
| `includeMagic` | boolean | Whether Magic AI features are enabled |
| `allowWritebackCells` | boolean | Whether writeback cells are allowed |
### `hex_get_group`
Retrieve details for a specific Hex group.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `groupId` | string | Yes | The UUID of the group |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Group UUID |
| `name` | string | Group name |
| `createdAt` | string | Creation timestamp |
### `hex_get_project`
Get metadata and details for a specific Hex project by its ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `projectId` | string | Yes | The UUID of the Hex project |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Project UUID |
| `title` | string | Project title |
| `description` | string | Project description |
| `status` | object | Project status |
| ↳ `name` | string | Status name \(e.g., PUBLISHED, DRAFT\) |
| `type` | string | Project type \(PROJECT or COMPONENT\) |
| `creator` | object | Project creator |
| ↳ `email` | string | Creator email |
| `owner` | object | Project owner |
| ↳ `email` | string | Owner email |
| `categories` | array | Project categories |
| ↳ `name` | string | Category name |
| ↳ `description` | string | Category description |
| `lastEditedAt` | string | ISO 8601 last edited timestamp |
| `lastPublishedAt` | string | ISO 8601 last published timestamp |
| `createdAt` | string | ISO 8601 creation timestamp |
| `archivedAt` | string | ISO 8601 archived timestamp |
| `trashedAt` | string | ISO 8601 trashed timestamp |
### `hex_get_project_runs`
Retrieve API-triggered runs for a Hex project with optional filtering by status and pagination.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `projectId` | string | Yes | The UUID of the Hex project |
| `limit` | number | No | Maximum number of runs to return \(1-100, default: 25\) |
| `offset` | number | No | Offset for paginated results \(default: 0\) |
| `statusFilter` | string | No | Filter by run status: PENDING, RUNNING, ERRORED, COMPLETED, KILLED, UNABLE_TO_ALLOCATE_KERNEL |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `runs` | array | List of project runs |
| ↳ `projectId` | string | Project UUID |
| ↳ `runId` | string | Run UUID |
| ↳ `runUrl` | string | URL to view the run |
| ↳ `status` | string | Run status \(PENDING, RUNNING, COMPLETED, ERRORED, KILLED, UNABLE_TO_ALLOCATE_KERNEL\) |
| ↳ `startTime` | string | Run start time |
| ↳ `endTime` | string | Run end time |
| ↳ `elapsedTime` | number | Elapsed time in seconds |
| ↳ `traceId` | string | Trace ID |
| ↳ `projectVersion` | number | Project version number |
| `total` | number | Total number of runs returned |
| `traceId` | string | Top-level trace ID |
### `hex_get_queried_tables`
Return the warehouse tables queried by a Hex project, including data connection and table names.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `projectId` | string | Yes | The UUID of the Hex project |
| `limit` | number | No | Maximum number of tables to return \(1-100\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `tables` | array | List of warehouse tables queried by the project |
| ↳ `dataConnectionId` | string | Data connection UUID |
| ↳ `dataConnectionName` | string | Data connection name |
| ↳ `tableName` | string | Table name |
| `total` | number | Total number of tables returned |
### `hex_get_run_status`
Check the status of a Hex project run by its run ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `projectId` | string | Yes | The UUID of the Hex project |
| `runId` | string | Yes | The UUID of the run to check |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `projectId` | string | Project UUID |
| `runId` | string | Run UUID |
| `runUrl` | string | URL to view the run |
| `status` | string | Run status \(PENDING, RUNNING, COMPLETED, ERRORED, KILLED, UNABLE_TO_ALLOCATE_KERNEL\) |
| `startTime` | string | ISO 8601 run start time |
| `endTime` | string | ISO 8601 run end time |
| `elapsedTime` | number | Elapsed time in seconds |
| `traceId` | string | Trace ID for debugging |
| `projectVersion` | number | Project version number |
### `hex_list_collections`
List all collections in the Hex workspace.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `limit` | number | No | Maximum number of collections to return \(1-500, default: 25\) |
| `sortBy` | string | No | Sort by field: NAME |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `collections` | array | List of collections |
| ↳ `id` | string | Collection UUID |
| ↳ `name` | string | Collection name |
| ↳ `description` | string | Collection description |
| ↳ `creator` | object | Collection creator |
| ↳ `email` | string | Creator email |
| ↳ `id` | string | Creator UUID |
| `total` | number | Total number of collections returned |
### `hex_list_data_connections`
List all data connections in the Hex workspace (e.g., Snowflake, PostgreSQL, BigQuery).
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `limit` | number | No | Maximum number of connections to return \(1-500, default: 25\) |
| `sortBy` | string | No | Sort by field: CREATED_AT or NAME |
| `sortDirection` | string | No | Sort direction: ASC or DESC |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `connections` | array | List of data connections |
| ↳ `id` | string | Connection UUID |
| ↳ `name` | string | Connection name |
| ↳ `type` | string | Connection type \(e.g., athena, bigquery, databricks, postgres, redshift, snowflake\) |
| ↳ `description` | string | Connection description |
| ↳ `connectViaSsh` | boolean | Whether SSH tunneling is enabled |
| ↳ `includeMagic` | boolean | Whether Magic AI features are enabled |
| ↳ `allowWritebackCells` | boolean | Whether writeback cells are allowed |
| `total` | number | Total number of connections returned |
### `hex_list_groups`
List all groups in the Hex workspace with optional sorting.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `limit` | number | No | Maximum number of groups to return \(1-500, default: 25\) |
| `sortBy` | string | No | Sort by field: CREATED_AT or NAME |
| `sortDirection` | string | No | Sort direction: ASC or DESC |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `groups` | array | List of workspace groups |
| ↳ `id` | string | Group UUID |
| ↳ `name` | string | Group name |
| ↳ `createdAt` | string | Creation timestamp |
| `total` | number | Total number of groups returned |
### `hex_list_projects`
List all projects in your Hex workspace with optional filtering by status.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `limit` | number | No | Maximum number of projects to return \(1-100\) |
| `includeArchived` | boolean | No | Include archived projects in results |
| `statusFilter` | string | No | Filter by status: PUBLISHED, DRAFT, or ALL |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `projects` | array | List of Hex projects |
| ↳ `id` | string | Project UUID |
| ↳ `title` | string | Project title |
| ↳ `description` | string | Project description |
| ↳ `status` | object | Project status |
| ↳ `name` | string | Status name \(e.g., PUBLISHED, DRAFT\) |
| ↳ `type` | string | Project type \(PROJECT or COMPONENT\) |
| ↳ `creator` | object | Project creator |
| ↳ `email` | string | Creator email |
| ↳ `owner` | object | Project owner |
| ↳ `email` | string | Owner email |
| ↳ `lastEditedAt` | string | Last edited timestamp |
| ↳ `lastPublishedAt` | string | Last published timestamp |
| ↳ `createdAt` | string | Creation timestamp |
| ↳ `archivedAt` | string | Archived timestamp |
| `total` | number | Total number of projects returned |
### `hex_list_users`
List all users in the Hex workspace with optional filtering and sorting.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `limit` | number | No | Maximum number of users to return \(1-100, default: 25\) |
| `sortBy` | string | No | Sort by field: NAME or EMAIL |
| `sortDirection` | string | No | Sort direction: ASC or DESC |
| `groupId` | string | No | Filter users by group UUID |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `users` | array | List of workspace users |
| ↳ `id` | string | User UUID |
| ↳ `name` | string | User name |
| ↳ `email` | string | User email |
| ↳ `role` | string | User role \(ADMIN, MANAGER, EDITOR, EXPLORER, MEMBER, GUEST, EMBEDDED_USER, ANONYMOUS\) |
| `total` | number | Total number of users returned |
### `hex_run_project`
Execute a published Hex project. Optionally pass input parameters and control caching behavior.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `projectId` | string | Yes | The UUID of the Hex project to run |
| `inputParams` | json | No | JSON object of input parameters for the project \(e.g., \{"date": "2024-01-01"\}\) |
| `dryRun` | boolean | No | If true, perform a dry run without executing the project |
| `updateCache` | boolean | No | \(Deprecated\) If true, update the cached results after execution |
| `updatePublishedResults` | boolean | No | If true, update the published app results after execution |
| `useCachedSqlResults` | boolean | No | If true, use cached SQL results instead of re-running queries |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `projectId` | string | Project UUID |
| `runId` | string | Run UUID |
| `runUrl` | string | URL to view the run |
| `runStatusUrl` | string | URL to check run status |
| `traceId` | string | Trace ID for debugging |
| `projectVersion` | number | Project version number |
### `hex_update_project`
Update a Hex project status label (e.g., endorsement or custom workspace statuses).
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
| `projectId` | string | Yes | The UUID of the Hex project to update |
| `status` | string | Yes | New project status name \(custom workspace status label\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Project UUID |
| `title` | string | Project title |
| `description` | string | Project description |
| `status` | object | Updated project status |
| ↳ `name` | string | Status name \(e.g., PUBLISHED, DRAFT\) |
| `type` | string | Project type \(PROJECT or COMPONENT\) |
| `creator` | object | Project creator |
| ↳ `email` | string | Creator email |
| `owner` | object | Project owner |
| ↳ `email` | string | Owner email |
| `categories` | array | Project categories |
| ↳ `name` | string | Category name |
| ↳ `description` | string | Category description |
| `lastEditedAt` | string | Last edited timestamp |
| `lastPublishedAt` | string | Last published timestamp |
| `createdAt` | string | Creation timestamp |
| `archivedAt` | string | Archived timestamp |
| `trashedAt` | string | Trashed timestamp |

View File

@@ -49,6 +49,7 @@
"grafana",
"grain",
"greptile",
"hex",
"hubspot",
"huggingface",
"hunter",

View File

@@ -46,7 +46,7 @@ export default function OAuthConsentPage() {
return
}
fetch(`/api/auth/oauth2/client/${clientId}`, { credentials: 'include' })
fetch(`/api/auth/oauth2/client/${encodeURIComponent(clientId)}`, { credentials: 'include' })
.then(async (res) => {
if (!res.ok) return
const data = await res.json()
@@ -164,13 +164,12 @@ export default function OAuthConsentPage() {
<div className='flex flex-col items-center justify-center'>
<div className='mb-6 flex items-center gap-4'>
{clientInfo?.icon ? (
<Image
<img
src={clientInfo.icon}
alt={clientName ?? 'Application'}
width={48}
height={48}
className='rounded-[10px]'
unoptimized
/>
) : (
<div className='flex h-12 w-12 items-center justify-center rounded-[10px] bg-muted font-medium text-[18px] text-muted-foreground'>

View File

@@ -211,7 +211,7 @@ describe('Function Execute API Route', () => {
it.concurrent('should block SSRF attacks through secure fetch wrapper', async () => {
expect(validateProxyUrl('http://169.254.169.254/latest/meta-data/').isValid).toBe(false)
expect(validateProxyUrl('http://127.0.0.1:8080/admin').isValid).toBe(false)
expect(validateProxyUrl('http://127.0.0.1:8080/admin').isValid).toBe(true)
expect(validateProxyUrl('http://192.168.1.1/config').isValid).toBe(false)
expect(validateProxyUrl('http://10.0.0.1/internal').isValid).toBe(false)
})

View File

@@ -165,7 +165,7 @@ export async function POST(request: NextRequest) {
}
const modelName =
provider === 'anthropic' ? 'anthropic/claude-3-7-sonnet-latest' : 'openai/gpt-4.1'
provider === 'anthropic' ? 'anthropic/claude-sonnet-4-5-20250929' : 'openai/gpt-5'
try {
logger.info('Initializing Stagehand with Browserbase (v3)', { provider, modelName })

View File

@@ -101,7 +101,7 @@ export async function POST(request: NextRequest) {
try {
const modelName =
provider === 'anthropic' ? 'anthropic/claude-3-7-sonnet-latest' : 'openai/gpt-4.1'
provider === 'anthropic' ? 'anthropic/claude-sonnet-4-5-20250929' : 'openai/gpt-5'
logger.info('Initializing Stagehand with Browserbase (v3)', { provider, modelName })

View File

@@ -38,6 +38,7 @@ import { executeWorkflowJob, type WorkflowExecutionPayload } from '@/background/
import { normalizeName } from '@/executor/constants'
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type {
ChildWorkflowContext,
ExecutionMetadata,
IterationContext,
SerializableExecutionState,
@@ -742,7 +743,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
blockName: string,
blockType: string,
executionOrder: number,
iterationContext?: IterationContext
iterationContext?: IterationContext,
childWorkflowContext?: ChildWorkflowContext
) => {
logger.info(`[${requestId}] 🔷 onBlockStart called:`, { blockId, blockName, blockType })
sendEvent({
@@ -761,6 +763,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
iterationType: iterationContext.iterationType,
iterationContainerId: iterationContext.iterationContainerId,
}),
...(childWorkflowContext && {
childWorkflowBlockId: childWorkflowContext.parentBlockId,
childWorkflowName: childWorkflowContext.workflowName,
}),
},
})
}
@@ -770,9 +776,20 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
blockName: string,
blockType: string,
callbackData: any,
iterationContext?: IterationContext
iterationContext?: IterationContext,
childWorkflowContext?: ChildWorkflowContext
) => {
const hasError = callbackData.output?.error
const childWorkflowData = childWorkflowContext
? {
childWorkflowBlockId: childWorkflowContext.parentBlockId,
childWorkflowName: childWorkflowContext.workflowName,
}
: {}
const instanceData = callbackData.childWorkflowInstanceId
? { childWorkflowInstanceId: callbackData.childWorkflowInstanceId }
: {}
if (hasError) {
logger.info(`[${requestId}] ✗ onBlockComplete (error) called:`, {
@@ -802,6 +819,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
iterationType: iterationContext.iterationType,
iterationContainerId: iterationContext.iterationContainerId,
}),
...childWorkflowData,
...instanceData,
},
})
} else {
@@ -831,6 +850,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
iterationType: iterationContext.iterationType,
iterationContainerId: iterationContext.iterationContainerId,
}),
...childWorkflowData,
...instanceData,
},
})
}
@@ -898,12 +919,34 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
selectedOutputs
)
const onChildWorkflowInstanceReady = (
blockId: string,
childWorkflowInstanceId: string,
iterationContext?: IterationContext
) => {
sendEvent({
type: 'block:childWorkflowStarted',
timestamp: new Date().toISOString(),
executionId,
workflowId,
data: {
blockId,
childWorkflowInstanceId,
...(iterationContext && {
iterationCurrent: iterationContext.iterationCurrent,
iterationContainerId: iterationContext.iterationContainerId,
}),
},
})
}
const result = await executeWorkflowCore({
snapshot,
callbacks: {
onBlockStart,
onBlockComplete,
onStream,
onChildWorkflowInstanceReady,
},
loggingSession,
abortSignal: timeoutController.signal,

View File

@@ -1,5 +1,6 @@
import { useMemo } from 'react'
import { hasWorkflowChanged } from '@/lib/workflows/comparison'
import { mergeSubblockStateWithValues } from '@/lib/workflows/subblocks'
import { useVariablesStore } from '@/stores/panel/variables/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
@@ -42,44 +43,10 @@ export function useChangeDetection({
const currentState = useMemo((): WorkflowState | null => {
if (!workflowId) return null
const blocksWithSubBlocks: WorkflowState['blocks'] = {}
for (const [blockId, block] of Object.entries(blocks)) {
const blockSubValues = subBlockValues?.[blockId] || {}
const subBlocks: Record<string, any> = {}
if (block.subBlocks) {
for (const [subId, subBlock] of Object.entries(block.subBlocks)) {
const storedValue = blockSubValues[subId]
subBlocks[subId] = {
...subBlock,
value: storedValue !== undefined ? storedValue : subBlock.value,
}
}
}
if (block.triggerMode) {
const triggerConfigValue = blockSubValues?.triggerConfig
if (
triggerConfigValue &&
typeof triggerConfigValue === 'object' &&
!subBlocks.triggerConfig
) {
subBlocks.triggerConfig = {
id: 'triggerConfig',
type: 'short-input',
value: triggerConfigValue,
}
}
}
blocksWithSubBlocks[blockId] = {
...block,
subBlocks,
}
}
const mergedBlocks = mergeSubblockStateWithValues(blocks, subBlockValues ?? {})
return {
blocks: blocksWithSubBlocks,
blocks: mergedBlocks,
edges,
loops,
parallels,

View File

@@ -41,6 +41,7 @@ import {
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/hooks'
import { ROW_STYLES } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
import {
collectExpandableNodeIds,
type EntryNode,
type ExecutionGroup,
flattenBlockEntriesOnly,
@@ -67,6 +68,21 @@ const MIN_HEIGHT = TERMINAL_HEIGHT.MIN
const DEFAULT_EXPANDED_HEIGHT = TERMINAL_HEIGHT.DEFAULT
const MIN_OUTPUT_PANEL_WIDTH_PX = OUTPUT_PANEL_WIDTH.MIN
/** Returns true if any node in the subtree has an error */
function hasErrorInTree(nodes: EntryNode[]): boolean {
return nodes.some((n) => Boolean(n.entry.error) || hasErrorInTree(n.children))
}
/** Returns true if any node in the subtree is currently running */
function hasRunningInTree(nodes: EntryNode[]): boolean {
return nodes.some((n) => Boolean(n.entry.isRunning) || hasRunningInTree(n.children))
}
/** Returns true if any node in the subtree was canceled */
function hasCanceledInTree(nodes: EntryNode[]): boolean {
return nodes.some((n) => Boolean(n.entry.isCanceled) || hasCanceledInTree(n.children))
}
/**
* Block row component for displaying actual block entries
*/
@@ -338,6 +354,122 @@ const SubflowNodeRow = memo(function SubflowNodeRow({
)
})
/**
* Workflow node component - shows workflow block header with nested child blocks
*/
const WorkflowNodeRow = memo(function WorkflowNodeRow({
node,
selectedEntryId,
onSelectEntry,
expandedNodes,
onToggleNode,
}: {
node: EntryNode
selectedEntryId: string | null
onSelectEntry: (entry: ConsoleEntry) => void
expandedNodes: Set<string>
onToggleNode: (nodeId: string) => void
}) {
const { entry, children } = node
const BlockIcon = getBlockIcon(entry.blockType)
const bgColor = getBlockColor(entry.blockType)
const nodeId = entry.id
const isExpanded = expandedNodes.has(nodeId)
const hasChildren = children.length > 0
const isSelected = selectedEntryId === entry.id
const hasError = useMemo(
() => Boolean(entry.error) || hasErrorInTree(children),
[entry.error, children]
)
const hasRunningDescendant = useMemo(
() => Boolean(entry.isRunning) || hasRunningInTree(children),
[entry.isRunning, children]
)
const hasCanceledDescendant = useMemo(
() => (Boolean(entry.isCanceled) || hasCanceledInTree(children)) && !hasRunningDescendant,
[entry.isCanceled, children, hasRunningDescendant]
)
return (
<div className='flex min-w-0 flex-col'>
{/* Workflow Block Header */}
<div
className={clsx(
ROW_STYLES.base,
'h-[26px]',
isSelected ? ROW_STYLES.selected : ROW_STYLES.hover
)}
onClick={(e) => {
e.stopPropagation()
if (!isSelected) onSelectEntry(entry)
if (hasChildren) onToggleNode(nodeId)
}}
>
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
<div
className='flex h-[14px] w-[14px] flex-shrink-0 items-center justify-center rounded-[4px]'
style={{ background: bgColor }}
>
{BlockIcon && <BlockIcon className='h-[9px] w-[9px] text-white' />}
</div>
<span
className={clsx(
'min-w-0 truncate font-medium text-[13px]',
hasError
? 'text-[var(--text-error)]'
: isSelected || isExpanded
? 'text-[var(--text-primary)]'
: 'text-[var(--text-tertiary)] group-hover:text-[var(--text-primary)]'
)}
>
{entry.blockName}
</span>
{hasChildren && (
<ChevronDown
className={clsx(
'h-[8px] w-[8px] flex-shrink-0 text-[var(--text-tertiary)] transition-transform duration-100 group-hover:text-[var(--text-primary)]',
!isExpanded && '-rotate-90'
)}
/>
)}
</div>
<span
className={clsx(
'flex-shrink-0 font-medium text-[13px]',
!hasRunningDescendant &&
(hasCanceledDescendant
? 'text-[var(--text-secondary)]'
: 'text-[var(--text-tertiary)]')
)}
>
<StatusDisplay
isRunning={hasRunningDescendant}
isCanceled={hasCanceledDescendant}
formattedDuration={formatDuration(entry.durationMs, { precision: 2 }) ?? '-'}
/>
</span>
</div>
{/* Nested Child Blocks — rendered through EntryNodeRow for full loop/parallel support */}
{isExpanded && hasChildren && (
<div className={ROW_STYLES.nested}>
{children.map((child) => (
<EntryNodeRow
key={child.entry.id}
node={child}
selectedEntryId={selectedEntryId}
onSelectEntry={onSelectEntry}
expandedNodes={expandedNodes}
onToggleNode={onToggleNode}
/>
))}
</div>
)}
</div>
)
})
/**
* Entry node component - dispatches to appropriate component based on node type
*/
@@ -368,6 +500,18 @@ const EntryNodeRow = memo(function EntryNodeRow({
)
}
if (nodeType === 'workflow') {
return (
<WorkflowNodeRow
node={node}
selectedEntryId={selectedEntryId}
onSelectEntry={onSelectEntry}
expandedNodes={expandedNodes}
onToggleNode={onToggleNode}
/>
)
}
if (nodeType === 'iteration') {
return (
<IterationNodeRow
@@ -659,27 +803,15 @@ export const Terminal = memo(function Terminal() {
])
/**
* Auto-expand subflows and iterations when new entries arrive.
* Auto-expand subflows, iterations, and workflow nodes when new entries arrive.
* Recursively walks the full tree so nested nodes (e.g. a workflow block inside
* a loop iteration) are also expanded automatically.
* This always runs regardless of autoSelectEnabled - new runs should always be visible.
*/
useEffect(() => {
if (executionGroups.length === 0) return
const newestExec = executionGroups[0]
// Collect all node IDs that should be expanded (subflows and their iterations)
const nodeIdsToExpand: string[] = []
for (const node of newestExec.entryTree) {
if (node.nodeType === 'subflow' && node.children.length > 0) {
nodeIdsToExpand.push(node.entry.id)
// Also expand all iteration children
for (const iterNode of node.children) {
if (iterNode.nodeType === 'iteration') {
nodeIdsToExpand.push(iterNode.entry.id)
}
}
}
}
const nodeIdsToExpand = collectExpandableNodeIds(executionGroups[0].entryTree)
if (nodeIdsToExpand.length > 0) {
setExpandedNodes((prev) => {

View File

@@ -1,6 +1,14 @@
import type React from 'react'
import { AlertTriangleIcon, BanIcon, RepeatIcon, SplitIcon, XCircleIcon } from 'lucide-react'
import {
AlertTriangleIcon,
BanIcon,
NetworkIcon,
RepeatIcon,
SplitIcon,
XCircleIcon,
} from 'lucide-react'
import { getBlock } from '@/blocks'
import { isWorkflowBlockType } from '@/executor/constants'
import { TERMINAL_BLOCK_COLUMN_WIDTH } from '@/stores/constants'
import type { ConsoleEntry } from '@/stores/terminal'
@@ -12,6 +20,8 @@ const SUBFLOW_COLORS = {
parallel: '#FEE12B',
} as const
const WORKFLOW_COLOR = '#8b5cf6'
/**
* Special block type colors for errors and system messages
*/
@@ -41,6 +51,10 @@ export function getBlockIcon(
return SplitIcon
}
if (blockType === 'workflow') {
return NetworkIcon
}
if (blockType === 'error') {
return XCircleIcon
}
@@ -71,6 +85,9 @@ export function getBlockColor(blockType: string): string {
if (blockType === 'parallel') {
return SUBFLOW_COLORS.parallel
}
if (blockType === 'workflow') {
return WORKFLOW_COLOR
}
// Special block types for errors and system messages
if (blockType === 'error') {
return SPECIAL_BLOCK_COLORS.error
@@ -120,7 +137,7 @@ export function isSubflowBlockType(blockType: string): boolean {
/**
* Node type for the tree structure
*/
export type EntryNodeType = 'block' | 'subflow' | 'iteration'
export type EntryNodeType = 'block' | 'subflow' | 'iteration' | 'workflow'
/**
* Entry node for tree structure - represents a block, subflow, or iteration
@@ -168,6 +185,36 @@ interface IterationGroup {
startTimeMs: number
}
/**
* Recursively collects all descendant entries owned by a workflow block.
* This includes direct children and the children of any nested workflow blocks,
* enabling correct tree construction for deeply-nested child workflows.
*/
function collectWorkflowDescendants(
instanceKey: string,
workflowChildGroups: Map<string, ConsoleEntry[]>,
visited: Set<string> = new Set()
): ConsoleEntry[] {
if (visited.has(instanceKey)) return []
visited.add(instanceKey)
const direct = workflowChildGroups.get(instanceKey) ?? []
const result = [...direct]
for (const entry of direct) {
if (isWorkflowBlockType(entry.blockType)) {
// Use childWorkflowInstanceId when available (unique per-invocation) to correctly
// separate children across loop iterations of the same workflow block.
result.push(
...collectWorkflowDescendants(
entry.childWorkflowInstanceId ?? entry.blockId,
workflowChildGroups,
visited
)
)
}
}
return result
}
/**
* Builds a tree structure from flat entries.
* Groups iteration entries by (iterationType, iterationContainerId, iterationCurrent), showing all blocks
@@ -175,18 +222,37 @@ interface IterationGroup {
* Sorts by start time to ensure chronological order.
*/
function buildEntryTree(entries: ConsoleEntry[]): EntryNode[] {
// Separate regular blocks from iteration entries
// Separate entries into three buckets:
// 1. Iteration entries (loop/parallel children)
// 2. Workflow child entries (blocks inside a child workflow)
// 3. Regular blocks
const regularBlocks: ConsoleEntry[] = []
const iterationEntries: ConsoleEntry[] = []
const workflowChildEntries: ConsoleEntry[] = []
for (const entry of entries) {
if (entry.iterationType && entry.iterationCurrent !== undefined) {
if (entry.childWorkflowBlockId) {
// Child workflow entries take priority over iteration classification
workflowChildEntries.push(entry)
} else if (entry.iterationType && entry.iterationCurrent !== undefined) {
iterationEntries.push(entry)
} else {
regularBlocks.push(entry)
}
}
// Group workflow child entries by the parent workflow block ID
const workflowChildGroups = new Map<string, ConsoleEntry[]>()
for (const entry of workflowChildEntries) {
const parentId = entry.childWorkflowBlockId!
const group = workflowChildGroups.get(parentId)
if (group) {
group.push(entry)
} else {
workflowChildGroups.set(parentId, [entry])
}
}
// Group iteration entries by (iterationType, iterationContainerId, iterationCurrent)
const iterationGroupsMap = new Map<string, IterationGroup>()
for (const entry of iterationEntries) {
@@ -261,6 +327,9 @@ function buildEntryTree(entries: ConsoleEntry[]): EntryNode[] {
...allBlocks.map((b) => new Date(b.endedAt || b.timestamp).getTime())
)
const totalDuration = allBlocks.reduce((sum, b) => sum + (b.durationMs || 0), 0)
// Parallel branches run concurrently — use wall-clock time. Loop iterations run serially — use sum.
const subflowDuration =
iterationType === 'parallel' ? subflowEndMs - subflowStartMs : totalDuration
// Create synthetic subflow parent entry
// Use the minimum executionOrder from all child blocks for proper ordering
@@ -276,7 +345,7 @@ function buildEntryTree(entries: ConsoleEntry[]): EntryNode[] {
startedAt: new Date(subflowStartMs).toISOString(),
executionOrder: subflowExecutionOrder,
endedAt: new Date(subflowEndMs).toISOString(),
durationMs: totalDuration,
durationMs: subflowDuration,
success: !allBlocks.some((b) => b.error),
}
@@ -291,6 +360,9 @@ function buildEntryTree(entries: ConsoleEntry[]): EntryNode[] {
...iterBlocks.map((b) => new Date(b.endedAt || b.timestamp).getTime())
)
const iterDuration = iterBlocks.reduce((sum, b) => sum + (b.durationMs || 0), 0)
// Parallel branches run concurrently — use wall-clock time. Loop iterations run serially — use sum.
const iterDisplayDuration =
iterationType === 'parallel' ? iterEndMs - iterStartMs : iterDuration
// Use the minimum executionOrder from blocks in this iteration
const iterExecutionOrder = Math.min(...iterBlocks.map((b) => b.executionOrder))
@@ -305,7 +377,7 @@ function buildEntryTree(entries: ConsoleEntry[]): EntryNode[] {
startedAt: new Date(iterStartMs).toISOString(),
executionOrder: iterExecutionOrder,
endedAt: new Date(iterEndMs).toISOString(),
durationMs: iterDuration,
durationMs: iterDisplayDuration,
success: !iterBlocks.some((b) => b.error),
iterationCurrent: iterGroup.iterationCurrent,
iterationTotal: iterGroup.iterationTotal,
@@ -313,12 +385,24 @@ function buildEntryTree(entries: ConsoleEntry[]): EntryNode[] {
iterationContainerId: iterGroup.iterationContainerId,
}
// Block nodes within this iteration
const blockNodes: EntryNode[] = iterBlocks.map((block) => ({
entry: block,
children: [],
nodeType: 'block' as const,
}))
// Block nodes within this iteration — workflow blocks get their full subtree
const blockNodes: EntryNode[] = iterBlocks.map((block) => {
if (isWorkflowBlockType(block.blockType)) {
const instanceKey = block.childWorkflowInstanceId ?? block.blockId
const allDescendants = collectWorkflowDescendants(instanceKey, workflowChildGroups)
const rawChildren = allDescendants.map((c) => ({
...c,
childWorkflowBlockId:
c.childWorkflowBlockId === instanceKey ? undefined : c.childWorkflowBlockId,
}))
return {
entry: block,
children: buildEntryTree(rawChildren),
nodeType: 'workflow' as const,
}
}
return { entry: block, children: [], nodeType: 'block' as const }
})
return {
entry: syntheticIteration,
@@ -338,19 +422,61 @@ function buildEntryTree(entries: ConsoleEntry[]): EntryNode[] {
})
}
// Build nodes for regular blocks
const regularNodes: EntryNode[] = regularBlocks.map((entry) => ({
// Build workflow nodes for regular blocks that are workflow block types
const workflowNodes: EntryNode[] = []
const remainingRegularBlocks: ConsoleEntry[] = []
for (const block of regularBlocks) {
if (isWorkflowBlockType(block.blockType)) {
const instanceKey = block.childWorkflowInstanceId ?? block.blockId
const allDescendants = collectWorkflowDescendants(instanceKey, workflowChildGroups)
const rawChildren = allDescendants.map((c) => ({
...c,
childWorkflowBlockId:
c.childWorkflowBlockId === instanceKey ? undefined : c.childWorkflowBlockId,
}))
const children = buildEntryTree(rawChildren)
workflowNodes.push({ entry: block, children, nodeType: 'workflow' as const })
} else {
remainingRegularBlocks.push(block)
}
}
// Build nodes for remaining regular blocks
const regularNodes: EntryNode[] = remainingRegularBlocks.map((entry) => ({
entry,
children: [],
nodeType: 'block' as const,
}))
// Combine all nodes and sort by executionOrder ascending (oldest first, top-down)
const allNodes = [...subflowNodes, ...regularNodes]
const allNodes = [...subflowNodes, ...workflowNodes, ...regularNodes]
allNodes.sort((a, b) => a.entry.executionOrder - b.entry.executionOrder)
return allNodes
}
/**
* Recursively collects IDs of all nodes that should be auto-expanded.
* Includes subflow, iteration, and workflow nodes that have children.
*/
export function collectExpandableNodeIds(nodes: EntryNode[]): string[] {
const ids: string[] = []
for (const node of nodes) {
if (
(node.nodeType === 'subflow' ||
node.nodeType === 'iteration' ||
node.nodeType === 'workflow') &&
node.children.length > 0
) {
ids.push(node.entry.id)
}
if (node.children.length > 0) {
ids.push(...collectExpandableNodeIds(node.children))
}
}
return ids
}
/**
* Groups console entries by execution ID and builds a tree structure.
* Pre-computes timestamps for efficient sorting.
@@ -458,7 +584,7 @@ export function flattenBlockEntriesOnly(
): NavigableBlockEntry[] {
const result: NavigableBlockEntry[] = []
for (const node of nodes) {
if (node.nodeType === 'block') {
if (node.nodeType === 'block' || node.nodeType === 'workflow') {
result.push({
entry: node.entry,
executionId,

View File

@@ -20,6 +20,7 @@ import {
TriggerUtils,
} from '@/lib/workflows/triggers/triggers'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
import { updateActiveBlockRefCount } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils/workflow-execution-utils'
import { getBlock } from '@/blocks'
import type { SerializableExecutionState } from '@/executor/execution/types'
import type {
@@ -63,6 +64,7 @@ interface BlockEventHandlerConfig {
executionIdRef: { current: string }
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
activeBlocksSet: Set<string>
activeBlockRefCounts: Map<string, number>
accumulatedBlockLogs: BlockLog[]
accumulatedBlockStates: Map<string, BlockState>
executedBlockIds: Set<string>
@@ -309,6 +311,7 @@ export function useWorkflowExecution() {
executionIdRef,
workflowEdges,
activeBlocksSet,
activeBlockRefCounts,
accumulatedBlockLogs,
accumulatedBlockStates,
executedBlockIds,
@@ -327,11 +330,7 @@ export function useWorkflowExecution() {
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
if (!workflowId) return
if (isActive) {
activeBlocksSet.add(blockId)
} else {
activeBlocksSet.delete(blockId)
}
updateActiveBlockRefCount(activeBlockRefCounts, activeBlocksSet, blockId, isActive)
setActiveBlocks(workflowId, new Set(activeBlocksSet))
}
@@ -384,6 +383,9 @@ export function useWorkflowExecution() {
iterationTotal: data.iterationTotal,
iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId,
childWorkflowBlockId: data.childWorkflowBlockId,
childWorkflowName: data.childWorkflowName,
childWorkflowInstanceId: data.childWorkflowInstanceId,
})
}
@@ -407,6 +409,9 @@ export function useWorkflowExecution() {
iterationTotal: data.iterationTotal,
iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId,
childWorkflowBlockId: data.childWorkflowBlockId,
childWorkflowName: data.childWorkflowName,
childWorkflowInstanceId: data.childWorkflowInstanceId,
})
}
@@ -426,6 +431,9 @@ export function useWorkflowExecution() {
iterationTotal: data.iterationTotal,
iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId,
childWorkflowBlockId: data.childWorkflowBlockId,
childWorkflowName: data.childWorkflowName,
childWorkflowInstanceId: data.childWorkflowInstanceId,
},
executionIdRef.current
)
@@ -448,6 +456,9 @@ export function useWorkflowExecution() {
iterationTotal: data.iterationTotal,
iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId,
childWorkflowBlockId: data.childWorkflowBlockId,
childWorkflowName: data.childWorkflowName,
childWorkflowInstanceId: data.childWorkflowInstanceId,
},
executionIdRef.current
)
@@ -479,6 +490,8 @@ export function useWorkflowExecution() {
iterationTotal: data.iterationTotal,
iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId,
childWorkflowBlockId: data.childWorkflowBlockId,
childWorkflowName: data.childWorkflowName,
})
}
@@ -536,7 +549,27 @@ export function useWorkflowExecution() {
}
}
return { onBlockStarted, onBlockCompleted, onBlockError }
const onBlockChildWorkflowStarted = (data: {
blockId: string
childWorkflowInstanceId: string
iterationCurrent?: number
iterationContainerId?: string
}) => {
if (isStaleExecution()) return
updateConsole(
data.blockId,
{
childWorkflowInstanceId: data.childWorkflowInstanceId,
...(data.iterationCurrent !== undefined && { iterationCurrent: data.iterationCurrent }),
...(data.iterationContainerId !== undefined && {
iterationContainerId: data.iterationContainerId,
}),
},
executionIdRef.current
)
}
return { onBlockStarted, onBlockCompleted, onBlockError, onBlockChildWorkflowStarted }
},
[addConsole, setActiveBlocks, setBlockRunStatus, setEdgeRunStatus, updateConsole]
)
@@ -1280,6 +1313,7 @@ export function useWorkflowExecution() {
}
const activeBlocksSet = new Set<string>()
const activeBlockRefCounts = new Map<string, number>()
const streamedContent = new Map<string, string>()
const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>()
@@ -1292,6 +1326,7 @@ export function useWorkflowExecution() {
executionIdRef,
workflowEdges,
activeBlocksSet,
activeBlockRefCounts,
accumulatedBlockLogs,
accumulatedBlockStates,
executedBlockIds,
@@ -1334,6 +1369,7 @@ export function useWorkflowExecution() {
onBlockStarted: blockHandlers.onBlockStarted,
onBlockCompleted: blockHandlers.onBlockCompleted,
onBlockError: blockHandlers.onBlockError,
onBlockChildWorkflowStarted: blockHandlers.onBlockChildWorkflowStarted,
onStreamChunk: (data) => {
const existing = streamedContent.get(data.blockId) || ''
@@ -1902,6 +1938,7 @@ export function useWorkflowExecution() {
const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>()
const activeBlocksSet = new Set<string>()
const activeBlockRefCounts = new Map<string, number>()
try {
const blockHandlers = buildBlockEventHandlers({
@@ -1909,6 +1946,7 @@ export function useWorkflowExecution() {
executionIdRef,
workflowEdges,
activeBlocksSet,
activeBlockRefCounts,
accumulatedBlockLogs,
accumulatedBlockStates,
executedBlockIds,
@@ -1929,6 +1967,7 @@ export function useWorkflowExecution() {
onBlockStarted: blockHandlers.onBlockStarted,
onBlockCompleted: blockHandlers.onBlockCompleted,
onBlockError: blockHandlers.onBlockError,
onBlockChildWorkflowStarted: blockHandlers.onBlockChildWorkflowStarted,
onExecutionCompleted: (data) => {
if (data.success) {
@@ -2104,6 +2143,7 @@ export function useWorkflowExecution() {
const workflowEdges = useWorkflowStore.getState().edges
const activeBlocksSet = new Set<string>()
const activeBlockRefCounts = new Map<string, number>()
const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>()
@@ -2115,6 +2155,7 @@ export function useWorkflowExecution() {
executionIdRef,
workflowEdges,
activeBlocksSet,
activeBlockRefCounts,
accumulatedBlockLogs,
accumulatedBlockStates,
executedBlockIds,
@@ -2155,6 +2196,10 @@ export function useWorkflowExecution() {
clearOnce()
handlers.onBlockError(data)
},
onBlockChildWorkflowStarted: (data) => {
clearOnce()
handlers.onBlockChildWorkflowStarted(data)
},
onExecutionCompleted: () => {
const currentId = useExecutionStore
.getState()

View File

@@ -5,6 +5,30 @@ import { useTerminalConsoleStore } from '@/stores/terminal'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
/**
* Updates the active blocks set and ref counts for a single block.
* Ref counting ensures a block stays active until all parallel branches for it complete.
*/
export function updateActiveBlockRefCount(
refCounts: Map<string, number>,
activeSet: Set<string>,
blockId: string,
isActive: boolean
): void {
if (isActive) {
refCounts.set(blockId, (refCounts.get(blockId) ?? 0) + 1)
activeSet.add(blockId)
} else {
const next = (refCounts.get(blockId) ?? 1) - 1
if (next <= 0) {
refCounts.delete(blockId)
activeSet.delete(blockId)
} else {
refCounts.set(blockId, next)
}
}
}
export interface WorkflowExecutionOptions {
workflowInput?: any
onStream?: (se: StreamingExecution) => Promise<void>
@@ -39,6 +63,7 @@ export async function executeWorkflowWithFullLogging(
const workflowEdges = useWorkflowStore.getState().edges
const activeBlocksSet = new Set<string>()
const activeBlockRefCounts = new Map<string, number>()
const payload: any = {
input: options.workflowInput,
@@ -103,7 +128,12 @@ export async function executeWorkflowWithFullLogging(
switch (event.type) {
case 'block:started': {
activeBlocksSet.add(event.data.blockId)
updateActiveBlockRefCount(
activeBlockRefCounts,
activeBlocksSet,
event.data.blockId,
true
)
setActiveBlocks(wfId, new Set(activeBlocksSet))
const incomingEdges = workflowEdges.filter(
@@ -115,8 +145,13 @@ export async function executeWorkflowWithFullLogging(
break
}
case 'block:completed':
activeBlocksSet.delete(event.data.blockId)
case 'block:completed': {
updateActiveBlockRefCount(
activeBlockRefCounts,
activeBlocksSet,
event.data.blockId,
false
)
setActiveBlocks(wfId, new Set(activeBlocksSet))
setBlockRunStatus(wfId, event.data.blockId, 'success')
@@ -138,15 +173,24 @@ export async function executeWorkflowWithFullLogging(
iterationTotal: event.data.iterationTotal,
iterationType: event.data.iterationType,
iterationContainerId: event.data.iterationContainerId,
childWorkflowBlockId: event.data.childWorkflowBlockId,
childWorkflowName: event.data.childWorkflowName,
childWorkflowInstanceId: event.data.childWorkflowInstanceId,
})
if (options.onBlockComplete) {
options.onBlockComplete(event.data.blockId, event.data.output).catch(() => {})
}
break
}
case 'block:error':
activeBlocksSet.delete(event.data.blockId)
case 'block:error': {
updateActiveBlockRefCount(
activeBlockRefCounts,
activeBlocksSet,
event.data.blockId,
false
)
setActiveBlocks(wfId, new Set(activeBlocksSet))
setBlockRunStatus(wfId, event.data.blockId, 'error')
@@ -169,8 +213,30 @@ export async function executeWorkflowWithFullLogging(
iterationTotal: event.data.iterationTotal,
iterationType: event.data.iterationType,
iterationContainerId: event.data.iterationContainerId,
childWorkflowBlockId: event.data.childWorkflowBlockId,
childWorkflowName: event.data.childWorkflowName,
childWorkflowInstanceId: event.data.childWorkflowInstanceId,
})
break
}
case 'block:childWorkflowStarted': {
const { updateConsole } = useTerminalConsoleStore.getState()
updateConsole(
event.data.blockId,
{
childWorkflowInstanceId: event.data.childWorkflowInstanceId,
...(event.data.iterationCurrent !== undefined && {
iterationCurrent: event.data.iterationCurrent,
}),
...(event.data.iterationContainerId !== undefined && {
iterationContainerId: event.data.iterationContainerId,
}),
},
executionId
)
break
}
case 'execution:completed':
executionResult = {

View File

@@ -33,6 +33,7 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
type: 'dropdown',
options: [
{ label: 'Browser Use LLM', id: 'browser-use-llm' },
{ label: 'Browser Use 2.0', id: 'browser-use-2.0' },
{ label: 'GPT-4o', id: 'gpt-4o' },
{ label: 'GPT-4o Mini', id: 'gpt-4o-mini' },
{ label: 'GPT-4.1', id: 'gpt-4.1' },
@@ -42,6 +43,7 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
{ label: 'Gemini 2.5 Flash', id: 'gemini-2.5-flash' },
{ label: 'Gemini 2.5 Pro', id: 'gemini-2.5-pro' },
{ label: 'Gemini 3 Pro Preview', id: 'gemini-3-pro-preview' },
{ label: 'Gemini 3 Flash Preview', id: 'gemini-3-flash-preview' },
{ label: 'Gemini Flash Latest', id: 'gemini-flash-latest' },
{ label: 'Gemini Flash Lite Latest', id: 'gemini-flash-lite-latest' },
{ label: 'Claude 3.7 Sonnet', id: 'claude-3-7-sonnet-20250219' },

View File

@@ -0,0 +1,446 @@
import { HexIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types'
import type { HexResponse } from '@/tools/hex/types'
export const HexBlock: BlockConfig<HexResponse> = {
type: 'hex',
name: 'Hex',
description: 'Run and manage Hex projects',
longDescription:
'Integrate Hex into your workflow. Run projects, check run status, manage collections and groups, list users, and view data connections. Requires a Hex API token.',
docsLink: 'https://docs.sim.ai/tools/hex',
category: 'tools',
bgColor: '#F5E6FF',
icon: HexIcon,
authMode: AuthMode.ApiKey,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
options: [
{ label: 'Run Project', id: 'run_project' },
{ label: 'Get Run Status', id: 'get_run_status' },
{ label: 'Get Project Runs', id: 'get_project_runs' },
{ label: 'Cancel Run', id: 'cancel_run' },
{ label: 'List Projects', id: 'list_projects' },
{ label: 'Get Project', id: 'get_project' },
{ label: 'Update Project', id: 'update_project' },
{ label: 'Get Queried Tables', id: 'get_queried_tables' },
{ label: 'List Users', id: 'list_users' },
{ label: 'List Groups', id: 'list_groups' },
{ label: 'Get Group', id: 'get_group' },
{ label: 'List Collections', id: 'list_collections' },
{ label: 'Get Collection', id: 'get_collection' },
{ label: 'Create Collection', id: 'create_collection' },
{ label: 'List Data Connections', id: 'list_data_connections' },
{ label: 'Get Data Connection', id: 'get_data_connection' },
],
value: () => 'run_project',
},
{
id: 'projectId',
title: 'Project ID',
type: 'short-input',
placeholder: 'Enter project UUID',
condition: {
field: 'operation',
value: [
'run_project',
'get_run_status',
'get_project_runs',
'cancel_run',
'get_project',
'update_project',
'get_queried_tables',
],
},
required: {
field: 'operation',
value: [
'run_project',
'get_run_status',
'get_project_runs',
'cancel_run',
'get_project',
'update_project',
'get_queried_tables',
],
},
},
{
id: 'runId',
title: 'Run ID',
type: 'short-input',
placeholder: 'Enter run UUID',
condition: { field: 'operation', value: ['get_run_status', 'cancel_run'] },
required: { field: 'operation', value: ['get_run_status', 'cancel_run'] },
},
{
id: 'inputParams',
title: 'Input Parameters',
type: 'code',
placeholder: '{"param_name": "value"}',
condition: { field: 'operation', value: 'run_project' },
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `You are an expert at creating Hex project input parameters.
Generate ONLY the raw JSON object based on the user's request.
The output MUST be a single, valid JSON object, starting with { and ending with }.
Current parameters: {context}
Do not include any explanations, markdown formatting, or other text outside the JSON object.
The keys should match the input parameter names defined in the Hex project.
Example:
{
"date_range": "2024-01-01",
"department": "engineering",
"include_inactive": false
}`,
placeholder: 'Describe the input parameters you need...',
generationType: 'json-object',
},
},
{
id: 'projectStatus',
title: 'Status',
type: 'short-input',
placeholder: 'Enter status name (e.g., custom workspace status label)',
condition: { field: 'operation', value: 'update_project' },
required: { field: 'operation', value: 'update_project' },
},
{
id: 'runStatusFilter',
title: 'Status Filter',
type: 'dropdown',
options: [
{ label: 'All', id: '' },
{ label: 'Pending', id: 'PENDING' },
{ label: 'Running', id: 'RUNNING' },
{ label: 'Completed', id: 'COMPLETED' },
{ label: 'Errored', id: 'ERRORED' },
{ label: 'Killed', id: 'KILLED' },
],
value: () => '',
condition: { field: 'operation', value: 'get_project_runs' },
},
{
id: 'groupIdInput',
title: 'Group ID',
type: 'short-input',
placeholder: 'Enter group UUID',
condition: { field: 'operation', value: 'get_group' },
required: { field: 'operation', value: 'get_group' },
},
{
id: 'collectionId',
title: 'Collection ID',
type: 'short-input',
placeholder: 'Enter collection UUID',
condition: { field: 'operation', value: 'get_collection' },
required: { field: 'operation', value: 'get_collection' },
},
{
id: 'collectionName',
title: 'Collection Name',
type: 'short-input',
placeholder: 'Enter collection name',
condition: { field: 'operation', value: 'create_collection' },
required: { field: 'operation', value: 'create_collection' },
},
{
id: 'collectionDescription',
title: 'Description',
type: 'long-input',
placeholder: 'Optional description for the collection',
condition: { field: 'operation', value: 'create_collection' },
},
{
id: 'dataConnectionId',
title: 'Data Connection ID',
type: 'short-input',
placeholder: 'Enter data connection UUID',
condition: { field: 'operation', value: 'get_data_connection' },
required: { field: 'operation', value: 'get_data_connection' },
},
{
id: 'apiKey',
title: 'API Key',
type: 'short-input',
placeholder: 'Enter your Hex API token',
password: true,
required: true,
},
// Advanced fields
{
id: 'dryRun',
title: 'Dry Run',
type: 'switch',
condition: { field: 'operation', value: 'run_project' },
mode: 'advanced',
},
{
id: 'updateCache',
title: 'Update Cache',
type: 'switch',
condition: { field: 'operation', value: 'run_project' },
mode: 'advanced',
},
{
id: 'updatePublishedResults',
title: 'Update Published Results',
type: 'switch',
condition: { field: 'operation', value: 'run_project' },
mode: 'advanced',
},
{
id: 'useCachedSqlResults',
title: 'Use Cached SQL Results',
type: 'switch',
condition: { field: 'operation', value: 'run_project' },
mode: 'advanced',
},
{
id: 'limit',
title: 'Limit',
type: 'short-input',
placeholder: '25',
condition: {
field: 'operation',
value: [
'list_projects',
'get_project_runs',
'get_queried_tables',
'list_users',
'list_groups',
'list_collections',
'list_data_connections',
],
},
mode: 'advanced',
},
{
id: 'offset',
title: 'Offset',
type: 'short-input',
placeholder: '0',
condition: { field: 'operation', value: 'get_project_runs' },
mode: 'advanced',
},
{
id: 'includeArchived',
title: 'Include Archived',
type: 'switch',
condition: { field: 'operation', value: 'list_projects' },
mode: 'advanced',
},
{
id: 'statusFilter',
title: 'Status Filter',
type: 'dropdown',
options: [
{ label: 'All', id: '' },
{ label: 'Published', id: 'PUBLISHED' },
{ label: 'Draft', id: 'DRAFT' },
],
value: () => '',
condition: { field: 'operation', value: 'list_projects' },
mode: 'advanced',
},
{
id: 'groupId',
title: 'Filter by Group',
type: 'short-input',
placeholder: 'Group UUID (optional)',
condition: { field: 'operation', value: 'list_users' },
mode: 'advanced',
},
],
tools: {
access: [
'hex_cancel_run',
'hex_create_collection',
'hex_get_collection',
'hex_get_data_connection',
'hex_get_group',
'hex_get_project',
'hex_get_project_runs',
'hex_get_queried_tables',
'hex_get_run_status',
'hex_list_collections',
'hex_list_data_connections',
'hex_list_groups',
'hex_list_projects',
'hex_list_users',
'hex_run_project',
'hex_update_project',
],
config: {
tool: (params) => {
switch (params.operation) {
case 'run_project':
return 'hex_run_project'
case 'get_run_status':
return 'hex_get_run_status'
case 'get_project_runs':
return 'hex_get_project_runs'
case 'cancel_run':
return 'hex_cancel_run'
case 'list_projects':
return 'hex_list_projects'
case 'get_project':
return 'hex_get_project'
case 'update_project':
return 'hex_update_project'
case 'get_queried_tables':
return 'hex_get_queried_tables'
case 'list_users':
return 'hex_list_users'
case 'list_groups':
return 'hex_list_groups'
case 'get_group':
return 'hex_get_group'
case 'list_collections':
return 'hex_list_collections'
case 'get_collection':
return 'hex_get_collection'
case 'create_collection':
return 'hex_create_collection'
case 'list_data_connections':
return 'hex_list_data_connections'
case 'get_data_connection':
return 'hex_get_data_connection'
default:
return 'hex_run_project'
}
},
params: (params) => {
const result: Record<string, unknown> = {}
const op = params.operation
if (params.limit) result.limit = Number(params.limit)
if (op === 'get_project_runs' && params.offset) result.offset = Number(params.offset)
if (op === 'update_project' && params.projectStatus) result.status = params.projectStatus
if (op === 'get_project_runs' && params.runStatusFilter)
result.statusFilter = params.runStatusFilter
if (op === 'get_group' && params.groupIdInput) result.groupId = params.groupIdInput
if (op === 'list_users' && params.groupId) result.groupId = params.groupId
if (op === 'create_collection' && params.collectionName) result.name = params.collectionName
if (op === 'create_collection' && params.collectionDescription)
result.description = params.collectionDescription
return result
},
},
},
inputs: {
operation: { type: 'string', description: 'Operation to perform' },
apiKey: { type: 'string', description: 'Hex API token' },
projectId: { type: 'string', description: 'Project UUID' },
runId: { type: 'string', description: 'Run UUID' },
inputParams: { type: 'json', description: 'Input parameters for project run' },
dryRun: { type: 'boolean', description: 'Perform a dry run without executing the project' },
updateCache: {
type: 'boolean',
description: '(Deprecated) Update cached results after execution',
},
updatePublishedResults: {
type: 'boolean',
description: 'Update published app results after execution',
},
useCachedSqlResults: {
type: 'boolean',
description: 'Use cached SQL results instead of re-running queries',
},
projectStatus: {
type: 'string',
description: 'New project status name (custom workspace status label)',
},
limit: { type: 'number', description: 'Max number of results to return' },
offset: { type: 'number', description: 'Offset for paginated results' },
includeArchived: { type: 'boolean', description: 'Include archived projects' },
statusFilter: { type: 'string', description: 'Filter projects by status' },
runStatusFilter: { type: 'string', description: 'Filter runs by status' },
groupId: { type: 'string', description: 'Filter users by group UUID' },
groupIdInput: { type: 'string', description: 'Group UUID for get group' },
collectionId: { type: 'string', description: 'Collection UUID' },
collectionName: { type: 'string', description: 'Collection name' },
collectionDescription: { type: 'string', description: 'Collection description' },
dataConnectionId: { type: 'string', description: 'Data connection UUID' },
},
outputs: {
// Run creation outputs
projectId: { type: 'string', description: 'Project UUID' },
runId: { type: 'string', description: 'Run UUID' },
runUrl: { type: 'string', description: 'URL to view the run' },
runStatusUrl: { type: 'string', description: 'URL to check run status' },
projectVersion: { type: 'number', description: 'Project version number' },
// Run status outputs
status: {
type: 'json',
description: 'Project status object ({ name }) or run status string',
},
startTime: { type: 'string', description: 'Run start time' },
endTime: { type: 'string', description: 'Run end time' },
elapsedTime: { type: 'number', description: 'Elapsed time in seconds' },
traceId: { type: 'string', description: 'Trace ID for debugging' },
// Project outputs
id: { type: 'string', description: 'Resource ID' },
title: { type: 'string', description: 'Project title' },
name: { type: 'string', description: 'Resource name' },
description: { type: 'string', description: 'Resource description' },
type: { type: 'string', description: 'Project type (PROJECT or COMPONENT)' },
createdAt: { type: 'string', description: 'Creation timestamp' },
updatedAt: { type: 'string', description: 'Last update timestamp' },
lastEditedAt: { type: 'string', description: 'Last edited timestamp' },
lastPublishedAt: { type: 'string', description: 'Last published timestamp' },
archivedAt: { type: 'string', description: 'Archived timestamp' },
trashedAt: { type: 'string', description: 'Trashed timestamp' },
// List outputs
projects: {
type: 'json',
description: 'List of projects with id, title, status, type, creator, owner, createdAt',
},
runs: {
type: 'json',
description:
'List of runs with runId, status, runUrl, startTime, endTime, elapsedTime, projectVersion',
},
users: { type: 'json', description: 'List of users with id, name, email, role' },
groups: { type: 'json', description: 'List of groups with id, name, createdAt' },
collections: {
type: 'json',
description: 'List of collections with id, name, description, creator',
},
connections: {
type: 'json',
description:
'List of data connections with id, name, type, description, connectViaSsh, includeMagic, allowWritebackCells',
},
tables: {
type: 'json',
description: 'List of queried tables with dataConnectionId, dataConnectionName, tableName',
},
categories: {
type: 'json',
description: 'Project categories with name and description',
},
creator: { type: 'json', description: 'Creator details ({ email, id })' },
owner: { type: 'json', description: 'Owner details ({ email })' },
total: { type: 'number', description: 'Total results returned' },
// Cancel output
success: { type: 'boolean', description: 'Whether the operation succeeded' },
// Data connection flags
connectViaSsh: { type: 'boolean', description: 'SSH tunneling enabled' },
includeMagic: { type: 'boolean', description: 'Magic AI features enabled' },
allowWritebackCells: { type: 'boolean', description: 'Writeback cells allowed' },
},
}

View File

@@ -55,6 +55,7 @@ import { GrafanaBlock } from '@/blocks/blocks/grafana'
import { GrainBlock } from '@/blocks/blocks/grain'
import { GreptileBlock } from '@/blocks/blocks/greptile'
import { GuardrailsBlock } from '@/blocks/blocks/guardrails'
import { HexBlock } from '@/blocks/blocks/hex'
import { HubSpotBlock } from '@/blocks/blocks/hubspot'
import { HuggingFaceBlock } from '@/blocks/blocks/huggingface'
import { HumanInTheLoopBlock } from '@/blocks/blocks/human_in_the_loop'
@@ -240,6 +241,7 @@ export const registry: Record<string, BlockConfig> = {
grain: GrainBlock,
greptile: GreptileBlock,
guardrails: GuardrailsBlock,
hex: HexBlock,
hubspot: HubSpotBlock,
huggingface: HuggingFaceBlock,
human_in_the_loop: HumanInTheLoopBlock,

View File

@@ -5819,3 +5819,15 @@ export function RedisIcon(props: SVGProps<SVGSVGElement>) {
</svg>
)
}
export function HexIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1450.3 600'>
<path
fill='#5F509D'
fillRule='evenodd'
d='m250.11,0v199.49h-50V0H0v600h200.11v-300.69h50v300.69h200.18V0h-200.18Zm249.9,0v600h450.29v-250.23h-200.2v149h-50v-199.46h250.2V0h-450.29Zm200.09,199.49v-99.49h50v99.49h-50Zm550.02,0V0h200.18v150l-100,100.09,100,100.09v249.82h-200.18v-300.69h-50v300.69h-200.11v-249.82l100.11-100.09-100.11-100.09V0h200.11v199.49h50Z'
/>
</svg>
)
}

View File

@@ -159,6 +159,7 @@ export const DEFAULTS = {
MAX_FOREACH_ITEMS: 1000,
MAX_PARALLEL_BRANCHES: 20,
MAX_WORKFLOW_DEPTH: 10,
MAX_SSE_CHILD_DEPTH: 3,
EXECUTION_TIME: 0,
TOKENS: {
PROMPT: 0,

View File

@@ -7,6 +7,7 @@ interface ChildWorkflowErrorOptions {
childTraceSpans?: TraceSpan[]
executionResult?: ExecutionResult
childWorkflowSnapshotId?: string
childWorkflowInstanceId?: string
cause?: Error
}
@@ -18,6 +19,8 @@ export class ChildWorkflowError extends Error {
readonly childWorkflowName: string
readonly executionResult?: ExecutionResult
readonly childWorkflowSnapshotId?: string
/** Per-invocation unique ID used to correlate child block events with this workflow block. */
readonly childWorkflowInstanceId?: string
constructor(options: ChildWorkflowErrorOptions) {
super(options.message, { cause: options.cause })
@@ -26,6 +29,7 @@ export class ChildWorkflowError extends Error {
this.childTraceSpans = options.childTraceSpans ?? []
this.executionResult = options.executionResult
this.childWorkflowSnapshotId = options.childWorkflowSnapshotId
this.childWorkflowInstanceId = options.childWorkflowInstanceId
}
static isChildWorkflowError(error: unknown): error is ChildWorkflowError {

View File

@@ -166,6 +166,9 @@ export class BlockExecutor {
this.state.setBlockOutput(node.id, normalizedOutput, duration)
if (!isSentinel && blockLog) {
const childWorkflowInstanceId = normalizedOutput._childWorkflowInstanceId as
| string
| undefined
const displayOutput = filterOutputForLog(block.metadata?.id || '', normalizedOutput, {
block,
})
@@ -178,7 +181,8 @@ export class BlockExecutor {
duration,
blockLog.startedAt,
blockLog.executionOrder,
blockLog.endedAt
blockLog.endedAt,
childWorkflowInstanceId
)
}
@@ -204,6 +208,8 @@ export class BlockExecutor {
parallelId?: string
branchIndex?: number
branchTotal?: number
originalBlockId?: string
isLoopNode?: boolean
} {
const metadata = node?.metadata ?? {}
return {
@@ -212,6 +218,8 @@ export class BlockExecutor {
parallelId: metadata.parallelId,
branchIndex: metadata.branchIndex,
branchTotal: metadata.branchTotal,
originalBlockId: metadata.originalBlockId,
isLoopNode: metadata.isLoopNode,
}
}
@@ -276,6 +284,9 @@ export class BlockExecutor {
)
if (!isSentinel && blockLog) {
const childWorkflowInstanceId = ChildWorkflowError.isChildWorkflowError(error)
? error.childWorkflowInstanceId
: undefined
const displayOutput = filterOutputForLog(block.metadata?.id || '', errorOutput, { block })
this.callOnBlockComplete(
ctx,
@@ -286,7 +297,8 @@ export class BlockExecutor {
duration,
blockLog.startedAt,
blockLog.executionOrder,
blockLog.endedAt
blockLog.endedAt,
childWorkflowInstanceId
)
}
@@ -428,7 +440,7 @@ export class BlockExecutor {
block: SerializedBlock,
executionOrder: number
): void {
const blockId = node.id
const blockId = node.metadata?.originalBlockId ?? node.id
const blockName = block.metadata?.name ?? blockId
const blockType = block.metadata?.id ?? DEFAULTS.BLOCK_TYPE
@@ -440,7 +452,8 @@ export class BlockExecutor {
blockName,
blockType,
executionOrder,
iterationContext
iterationContext,
ctx.childWorkflowContext
)
}
}
@@ -454,9 +467,10 @@ export class BlockExecutor {
duration: number,
startedAt: string,
executionOrder: number,
endedAt: string
endedAt: string,
childWorkflowInstanceId?: string
): void {
const blockId = node.id
const blockId = node.metadata?.originalBlockId ?? node.id
const blockName = block.metadata?.name ?? blockId
const blockType = block.metadata?.id ?? DEFAULTS.BLOCK_TYPE
@@ -474,8 +488,10 @@ export class BlockExecutor {
startedAt,
executionOrder,
endedAt,
childWorkflowInstanceId,
},
iterationContext
iterationContext,
ctx.childWorkflowContext
)
}
}

View File

@@ -322,7 +322,9 @@ export class DAGExecutor {
onStream: this.contextExtensions.onStream,
onBlockStart: this.contextExtensions.onBlockStart,
onBlockComplete: this.contextExtensions.onBlockComplete,
onChildWorkflowInstanceReady: this.contextExtensions.onChildWorkflowInstanceReady,
abortSignal: this.contextExtensions.abortSignal,
childWorkflowContext: this.contextExtensions.childWorkflowContext,
includeFileBase64: this.contextExtensions.includeFileBase64,
base64MaxBytes: this.contextExtensions.base64MaxBytes,
runFromBlockContext: overrides?.runFromBlockContext,

View File

@@ -54,6 +54,17 @@ export interface IterationContext {
iterationContainerId?: string
}
export interface ChildWorkflowContext {
/** The workflow block's ID in the parent execution */
parentBlockId: string
/** Display name of the child workflow */
workflowName: string
/** Child workflow ID */
workflowId: string
/** Nesting depth (1 = first level child) */
depth: number
}
export interface ExecutionCallbacks {
onStream?: (streamingExec: any) => Promise<void>
onBlockStart?: (
@@ -61,15 +72,23 @@ export interface ExecutionCallbacks {
blockName: string,
blockType: string,
executionOrder: number,
iterationContext?: IterationContext
iterationContext?: IterationContext,
childWorkflowContext?: ChildWorkflowContext
) => Promise<void>
onBlockComplete?: (
blockId: string,
blockName: string,
blockType: string,
output: any,
iterationContext?: IterationContext
iterationContext?: IterationContext,
childWorkflowContext?: ChildWorkflowContext
) => Promise<void>
/** Fires immediately after instanceId is generated, before child execution begins. */
onChildWorkflowInstanceReady?: (
blockId: string,
childWorkflowInstanceId: string,
iterationContext?: IterationContext
) => void
}
export interface ContextExtensions {
@@ -105,7 +124,8 @@ export interface ContextExtensions {
blockName: string,
blockType: string,
executionOrder: number,
iterationContext?: IterationContext
iterationContext?: IterationContext,
childWorkflowContext?: ChildWorkflowContext
) => Promise<void>
onBlockComplete?: (
blockId: string,
@@ -118,10 +138,23 @@ export interface ContextExtensions {
startedAt: string
executionOrder: number
endedAt: string
/** Per-invocation unique ID linking this workflow block execution to its child block events. */
childWorkflowInstanceId?: string
},
iterationContext?: IterationContext
iterationContext?: IterationContext,
childWorkflowContext?: ChildWorkflowContext
) => Promise<void>
/** Context identifying this execution as a child of a workflow block */
childWorkflowContext?: ChildWorkflowContext
/** Fires immediately after instanceId is generated, before child execution begins. */
onChildWorkflowInstanceReady?: (
blockId: string,
childWorkflowInstanceId: string,
iterationContext?: IterationContext
) => void
/**
* Run-from-block configuration. When provided, executor runs in partial
* execution mode starting from the specified block.

View File

@@ -6,6 +6,7 @@ import type { BlockOutput } from '@/blocks/types'
import { Executor } from '@/executor'
import { BlockType, DEFAULTS, HTTP } from '@/executor/constants'
import { ChildWorkflowError } from '@/executor/errors/child-workflow-error'
import type { IterationContext } from '@/executor/execution/types'
import type {
BlockHandler,
ExecutionContext,
@@ -44,6 +45,40 @@ export class WorkflowBlockHandler implements BlockHandler {
ctx: ExecutionContext,
block: SerializedBlock,
inputs: Record<string, any>
): Promise<BlockOutput | StreamingExecution> {
return this._executeCore(ctx, block, inputs)
}
async executeWithNode(
ctx: ExecutionContext,
block: SerializedBlock,
inputs: Record<string, any>,
nodeMetadata: {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
originalBlockId?: string
isLoopNode?: boolean
}
): Promise<BlockOutput | StreamingExecution> {
return this._executeCore(ctx, block, inputs, nodeMetadata)
}
private async _executeCore(
ctx: ExecutionContext,
block: SerializedBlock,
inputs: Record<string, any>,
nodeMetadata?: {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
originalBlockId?: string
isLoopNode?: boolean
}
): Promise<BlockOutput | StreamingExecution> {
logger.info(`Executing workflow block: ${block.id}`)
@@ -58,6 +93,10 @@ export class WorkflowBlockHandler implements BlockHandler {
const workflowMetadata = workflows[workflowId]
let childWorkflowName = workflowMetadata?.name || workflowId
// Unique ID per invocation — used to correlate child block events with this specific
// workflow block execution, preventing cross-iteration child mixing in loop contexts.
const instanceId = crypto.randomUUID()
let childWorkflowSnapshotId: string | undefined
try {
const currentDepth = (ctx.workflowId?.split('_sub_').length || 1) - 1
@@ -115,6 +154,19 @@ export class WorkflowBlockHandler implements BlockHandler {
)
childWorkflowSnapshotId = childSnapshotResult.snapshot.id
const childDepth = (ctx.childWorkflowContext?.depth ?? 0) + 1
const shouldPropagateCallbacks = childDepth <= DEFAULTS.MAX_SSE_CHILD_DEPTH
if (shouldPropagateCallbacks) {
const effectiveBlockId = nodeMetadata
? (nodeMetadata.originalBlockId ?? nodeMetadata.nodeId)
: block.id
const iterationContext = nodeMetadata
? this.getIterationContext(ctx, nodeMetadata)
: undefined
ctx.onChildWorkflowInstanceReady?.(effectiveBlockId, instanceId, iterationContext)
}
const subExecutor = new Executor({
workflow: childWorkflow.serializedState,
workflowInput: childWorkflowInput,
@@ -127,6 +179,18 @@ export class WorkflowBlockHandler implements BlockHandler {
userId: ctx.userId,
executionId: ctx.executionId,
abortSignal: ctx.abortSignal,
...(shouldPropagateCallbacks && {
onBlockStart: ctx.onBlockStart,
onBlockComplete: ctx.onBlockComplete,
onStream: ctx.onStream as ((streamingExecution: unknown) => Promise<void>) | undefined,
onChildWorkflowInstanceReady: ctx.onChildWorkflowInstanceReady,
childWorkflowContext: {
parentBlockId: instanceId,
workflowName: childWorkflowName,
workflowId,
depth: childDepth,
},
}),
},
})
@@ -148,6 +212,7 @@ export class WorkflowBlockHandler implements BlockHandler {
workflowId,
childWorkflowName,
duration,
instanceId,
childTraceSpans,
childWorkflowSnapshotId
)
@@ -183,11 +248,46 @@ export class WorkflowBlockHandler implements BlockHandler {
childTraceSpans,
executionResult,
childWorkflowSnapshotId,
childWorkflowInstanceId: instanceId,
cause: error instanceof Error ? error : undefined,
})
}
}
private getIterationContext(
ctx: ExecutionContext,
nodeMetadata: {
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
isLoopNode?: boolean
}
): IterationContext | undefined {
if (nodeMetadata.branchIndex !== undefined && nodeMetadata.branchTotal !== undefined) {
return {
iterationCurrent: nodeMetadata.branchIndex,
iterationTotal: nodeMetadata.branchTotal,
iterationType: 'parallel',
iterationContainerId: nodeMetadata.parallelId,
}
}
if (nodeMetadata.isLoopNode && nodeMetadata.loopId) {
const loopScope = ctx.loopExecutions?.get(nodeMetadata.loopId)
if (loopScope && loopScope.iteration !== undefined) {
return {
iterationCurrent: loopScope.iteration,
iterationTotal: loopScope.maxIterations,
iterationType: 'loop',
iterationContainerId: nodeMetadata.loopId,
}
}
}
return undefined
}
/**
* Builds a cleaner error message for nested workflow errors.
* Parses nested error messages to extract workflow chain and root error.
@@ -525,6 +625,7 @@ export class WorkflowBlockHandler implements BlockHandler {
childWorkflowId: string,
childWorkflowName: string,
duration: number,
instanceId: string,
childTraceSpans?: WorkflowTraceSpan[],
childWorkflowSnapshotId?: string
): BlockOutput {
@@ -538,6 +639,7 @@ export class WorkflowBlockHandler implements BlockHandler {
childWorkflowName,
childTraceSpans: childTraceSpans || [],
childWorkflowSnapshotId,
childWorkflowInstanceId: instanceId,
})
}
@@ -548,6 +650,7 @@ export class WorkflowBlockHandler implements BlockHandler {
...(childWorkflowSnapshotId ? { childWorkflowSnapshotId } : {}),
result,
childTraceSpans: childTraceSpans || [],
_childWorkflowInstanceId: instanceId,
} as Record<string, any>
}
}

View File

@@ -1,7 +1,11 @@
import type { TraceSpan } from '@/lib/logs/types'
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
import type { BlockOutput } from '@/blocks/types'
import type { SerializableExecutionState } from '@/executor/execution/types'
import type {
ChildWorkflowContext,
IterationContext,
SerializableExecutionState,
} from '@/executor/execution/types'
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
@@ -239,15 +243,29 @@ export interface ExecutionContext {
blockId: string,
blockName: string,
blockType: string,
executionOrder: number
executionOrder: number,
iterationContext?: IterationContext,
childWorkflowContext?: ChildWorkflowContext
) => Promise<void>
onBlockComplete?: (
blockId: string,
blockName: string,
blockType: string,
output: any
output: any,
iterationContext?: IterationContext,
childWorkflowContext?: ChildWorkflowContext
) => Promise<void>
/** Context identifying this execution as a child of a workflow block */
childWorkflowContext?: ChildWorkflowContext
/** Fires immediately after instanceId is generated, before child execution begins. */
onChildWorkflowInstanceReady?: (
blockId: string,
childWorkflowInstanceId: string,
iterationContext?: IterationContext
) => void
/**
* AbortSignal for cancellation support.
* When the signal is aborted, execution should stop gracefully.
@@ -350,6 +368,8 @@ export interface BlockHandler {
parallelId?: string
branchIndex?: number
branchTotal?: number
originalBlockId?: string
isLoopNode?: boolean
}
) => Promise<BlockOutput | StreamingExecution>
}

View File

@@ -1,6 +1,7 @@
import { useCallback } from 'react'
import { createLogger } from '@sim/logger'
import type {
BlockChildWorkflowStartedData,
BlockCompletedData,
BlockErrorData,
BlockStartedData,
@@ -83,6 +84,9 @@ async function processSSEStream(
case 'block:error':
callbacks.onBlockError?.(event.data)
break
case 'block:childWorkflowStarted':
callbacks.onBlockChildWorkflowStarted?.(event.data)
break
case 'stream:chunk':
callbacks.onStreamChunk?.(event.data)
break
@@ -110,6 +114,7 @@ export interface ExecutionStreamCallbacks {
onBlockStarted?: (data: BlockStartedData) => void
onBlockCompleted?: (data: BlockCompletedData) => void
onBlockError?: (data: BlockErrorData) => void
onBlockChildWorkflowStarted?: (data: BlockChildWorkflowStartedData) => void
onStreamChunk?: (data: StreamChunkData) => void
onStreamDone?: (data: StreamDoneData) => void
}

View File

@@ -25,6 +25,12 @@ import {
renderPasswordResetEmail,
renderWelcomeEmail,
} from '@/components/emails'
import {
evictCachedMetadata,
isMetadataUrl,
resolveClientMetadata,
upsertCimdClient,
} from '@/lib/auth/cimd'
import { sendPlanWelcomeEmail } from '@/lib/billing'
import { authorizeSubscriptionReference } from '@/lib/billing/authorization'
import { handleNewUser } from '@/lib/billing/core/usage'
@@ -541,6 +547,28 @@ export const auth = betterAuth({
}
}
if (ctx.path === '/oauth2/authorize' || ctx.path === '/oauth2/token') {
const clientId = (ctx.query?.client_id ?? ctx.body?.client_id) as string | undefined
if (clientId && isMetadataUrl(clientId)) {
try {
const { metadata, fromCache } = await resolveClientMetadata(clientId)
if (!fromCache) {
try {
await upsertCimdClient(metadata)
} catch (upsertErr) {
evictCachedMetadata(clientId)
throw upsertErr
}
}
} catch (err) {
logger.warn('CIMD resolution failed', {
clientId,
error: err instanceof Error ? err.message : String(err),
})
}
}
}
return
}),
},
@@ -560,6 +588,9 @@ export const auth = betterAuth({
allowDynamicClientRegistration: true,
useJWTPlugin: true,
scopes: ['openid', 'profile', 'email', 'offline_access', 'mcp:tools'],
metadata: {
client_id_metadata_document_supported: true,
} as Record<string, unknown>,
}),
oneTimeToken({
expiresIn: 24 * 60 * 60, // 24 hours - Socket.IO handles connection persistence with heartbeats

168
apps/sim/lib/auth/cimd.ts Normal file
View File

@@ -0,0 +1,168 @@
import { randomUUID } from 'node:crypto'
import { db } from '@sim/db'
import { oauthApplication } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
const logger = createLogger('cimd')
interface ClientMetadataDocument {
client_id: string
client_name: string
logo_uri?: string
redirect_uris: string[]
client_uri?: string
policy_uri?: string
tos_uri?: string
contacts?: string[]
scope?: string
}
export function isMetadataUrl(clientId: string): boolean {
return clientId.startsWith('https://')
}
async function fetchClientMetadata(url: string): Promise<ClientMetadataDocument> {
const parsed = new URL(url)
if (parsed.protocol !== 'https:') {
throw new Error('CIMD URL must use HTTPS')
}
const res = await secureFetchWithValidation(url, {
headers: { Accept: 'application/json' },
timeout: 5000,
maxResponseBytes: 256 * 1024,
})
if (!res.ok) {
throw new Error(`CIMD fetch failed: ${res.status} ${res.statusText}`)
}
const doc = (await res.json()) as ClientMetadataDocument
if (doc.client_id !== url) {
throw new Error(`CIMD client_id mismatch: document has "${doc.client_id}", expected "${url}"`)
}
if (!Array.isArray(doc.redirect_uris) || doc.redirect_uris.length === 0) {
throw new Error('CIMD document must contain at least one redirect_uri')
}
for (const uri of doc.redirect_uris) {
let parsed: URL
try {
parsed = new URL(uri)
} catch {
throw new Error(`Invalid redirect_uri: ${uri}`)
}
if (parsed.protocol !== 'https:' && parsed.protocol !== 'http:') {
throw new Error(`Invalid redirect_uri scheme: ${parsed.protocol}`)
}
if (uri.includes(',')) {
throw new Error(`redirect_uri must not contain commas: ${uri}`)
}
}
if (doc.logo_uri) {
try {
const logoParsed = new URL(doc.logo_uri)
if (logoParsed.protocol !== 'https:') {
doc.logo_uri = undefined
}
} catch {
doc.logo_uri = undefined
}
}
if (!doc.client_name || typeof doc.client_name !== 'string') {
throw new Error('CIMD document must contain a client_name')
}
return doc
}
const CACHE_TTL_MS = 5 * 60 * 1000
const NEGATIVE_CACHE_TTL_MS = 60 * 1000
const cache = new Map<string, { doc: ClientMetadataDocument; expiresAt: number }>()
const failureCache = new Map<string, { error: string; expiresAt: number }>()
const inflight = new Map<string, Promise<ClientMetadataDocument>>()
interface ResolveResult {
metadata: ClientMetadataDocument
fromCache: boolean
}
export async function resolveClientMetadata(url: string): Promise<ResolveResult> {
const cached = cache.get(url)
if (cached && Date.now() < cached.expiresAt) {
return { metadata: cached.doc, fromCache: true }
}
const failed = failureCache.get(url)
if (failed && Date.now() < failed.expiresAt) {
throw new Error(failed.error)
}
const pending = inflight.get(url)
if (pending) {
return pending.then((doc) => ({ metadata: doc, fromCache: false }))
}
const promise = fetchClientMetadata(url)
.then((doc) => {
cache.set(url, { doc, expiresAt: Date.now() + CACHE_TTL_MS })
failureCache.delete(url)
return doc
})
.catch((err) => {
const message = err instanceof Error ? err.message : String(err)
failureCache.set(url, { error: message, expiresAt: Date.now() + NEGATIVE_CACHE_TTL_MS })
throw err
})
.finally(() => {
inflight.delete(url)
})
inflight.set(url, promise)
return promise.then((doc) => ({ metadata: doc, fromCache: false }))
}
export function evictCachedMetadata(url: string): void {
cache.delete(url)
}
export async function upsertCimdClient(metadata: ClientMetadataDocument): Promise<void> {
const now = new Date()
const redirectURLs = metadata.redirect_uris.join(',')
await db
.insert(oauthApplication)
.values({
id: randomUUID(),
clientId: metadata.client_id,
name: metadata.client_name,
icon: metadata.logo_uri ?? null,
redirectURLs,
type: 'public',
clientSecret: null,
userId: null,
createdAt: now,
updatedAt: now,
})
.onConflictDoUpdate({
target: oauthApplication.clientId,
set: {
name: metadata.client_name,
icon: metadata.logo_uri ?? null,
redirectURLs,
type: 'public',
clientSecret: null,
updatedAt: now,
},
})
logger.info('Upserted CIMD client', {
clientId: metadata.client_id,
name: metadata.client_name,
})
}

View File

@@ -0,0 +1,176 @@
import { createEnvMock, createMockRedis, loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
const mockRedisInstance = createMockRedis()
vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/core/config/env', () => createEnvMock({ REDIS_URL: 'redis://localhost:6379' }))
vi.mock('ioredis', () => ({
default: vi.fn(() => mockRedisInstance),
}))
describe('redis config', () => {
beforeEach(() => {
vi.clearAllMocks()
vi.useFakeTimers()
})
afterEach(() => {
vi.useRealTimers()
vi.resetModules()
})
describe('onRedisReconnect', () => {
it('should register and invoke reconnect listeners', async () => {
const { onRedisReconnect, getRedisClient } = await import('./redis')
const listener = vi.fn()
onRedisReconnect(listener)
getRedisClient()
mockRedisInstance.ping.mockRejectedValue(new Error('ETIMEDOUT'))
await vi.advanceTimersByTimeAsync(30_000)
await vi.advanceTimersByTimeAsync(30_000)
await vi.advanceTimersByTimeAsync(30_000)
expect(listener).toHaveBeenCalledTimes(1)
})
it('should not invoke listeners when PINGs succeed', async () => {
const { onRedisReconnect, getRedisClient } = await import('./redis')
const listener = vi.fn()
onRedisReconnect(listener)
getRedisClient()
mockRedisInstance.ping.mockResolvedValue('PONG')
await vi.advanceTimersByTimeAsync(30_000)
await vi.advanceTimersByTimeAsync(30_000)
await vi.advanceTimersByTimeAsync(30_000)
expect(listener).not.toHaveBeenCalled()
})
it('should reset failure count on successful PING', async () => {
const { onRedisReconnect, getRedisClient } = await import('./redis')
const listener = vi.fn()
onRedisReconnect(listener)
getRedisClient()
// 2 failures then a success — should reset counter
mockRedisInstance.ping.mockRejectedValueOnce(new Error('timeout'))
await vi.advanceTimersByTimeAsync(30_000)
mockRedisInstance.ping.mockRejectedValueOnce(new Error('timeout'))
await vi.advanceTimersByTimeAsync(30_000)
mockRedisInstance.ping.mockResolvedValueOnce('PONG')
await vi.advanceTimersByTimeAsync(30_000)
// 2 more failures — should NOT trigger reconnect (counter was reset)
mockRedisInstance.ping.mockRejectedValueOnce(new Error('timeout'))
await vi.advanceTimersByTimeAsync(30_000)
mockRedisInstance.ping.mockRejectedValueOnce(new Error('timeout'))
await vi.advanceTimersByTimeAsync(30_000)
expect(listener).not.toHaveBeenCalled()
})
it('should call disconnect(true) after 3 consecutive PING failures', async () => {
const { getRedisClient } = await import('./redis')
getRedisClient()
mockRedisInstance.ping.mockRejectedValue(new Error('ETIMEDOUT'))
await vi.advanceTimersByTimeAsync(30_000)
await vi.advanceTimersByTimeAsync(30_000)
expect(mockRedisInstance.disconnect).not.toHaveBeenCalled()
await vi.advanceTimersByTimeAsync(30_000)
expect(mockRedisInstance.disconnect).toHaveBeenCalledWith(true)
})
it('should handle listener errors gracefully without breaking health check', async () => {
const { onRedisReconnect, getRedisClient } = await import('./redis')
const badListener = vi.fn(() => {
throw new Error('listener crashed')
})
const goodListener = vi.fn()
onRedisReconnect(badListener)
onRedisReconnect(goodListener)
getRedisClient()
mockRedisInstance.ping.mockRejectedValue(new Error('timeout'))
await vi.advanceTimersByTimeAsync(30_000)
await vi.advanceTimersByTimeAsync(30_000)
await vi.advanceTimersByTimeAsync(30_000)
expect(badListener).toHaveBeenCalledTimes(1)
expect(goodListener).toHaveBeenCalledTimes(1)
})
})
describe('closeRedisConnection', () => {
it('should clear the PING interval', async () => {
const { getRedisClient, closeRedisConnection } = await import('./redis')
getRedisClient()
mockRedisInstance.quit.mockResolvedValue('OK')
await closeRedisConnection()
// After closing, PING failures should not trigger disconnect
mockRedisInstance.ping.mockRejectedValue(new Error('timeout'))
await vi.advanceTimersByTimeAsync(30_000 * 5)
expect(mockRedisInstance.disconnect).not.toHaveBeenCalled()
})
})
describe('retryStrategy', () => {
async function captureRetryStrategy(): Promise<(times: number) => number> {
vi.resetModules()
vi.doMock('@sim/logger', () => loggerMock)
vi.doMock('@/lib/core/config/env', () =>
createEnvMock({ REDIS_URL: 'redis://localhost:6379' })
)
let capturedConfig: Record<string, unknown> = {}
vi.doMock('ioredis', () => ({
default: vi.fn((_url: string, config: Record<string, unknown>) => {
capturedConfig = config
return { ping: vi.fn(), on: vi.fn() }
}),
}))
const { getRedisClient } = await import('./redis')
getRedisClient()
return capturedConfig.retryStrategy as (times: number) => number
}
it('should use exponential backoff with jitter', async () => {
const retryStrategy = await captureRetryStrategy()
expect(retryStrategy).toBeDefined()
// Base for attempt 1: min(1000 * 2^0, 10000) = 1000, jitter up to 300
const delay1 = retryStrategy(1)
expect(delay1).toBeGreaterThanOrEqual(1000)
expect(delay1).toBeLessThanOrEqual(1300)
// Base for attempt 3: min(1000 * 2^2, 10000) = 4000, jitter up to 1200
const delay3 = retryStrategy(3)
expect(delay3).toBeGreaterThanOrEqual(4000)
expect(delay3).toBeLessThanOrEqual(5200)
// Base for attempt 5: min(1000 * 2^4, 10000) = 10000, jitter up to 3000
const delay5 = retryStrategy(5)
expect(delay5).toBeGreaterThanOrEqual(10000)
expect(delay5).toBeLessThanOrEqual(13000)
})
it('should cap at 30s for attempts beyond 10', async () => {
const retryStrategy = await captureRetryStrategy()
expect(retryStrategy(11)).toBe(30000)
expect(retryStrategy(100)).toBe(30000)
})
})
})

View File

@@ -7,6 +7,63 @@ const logger = createLogger('Redis')
const redisUrl = env.REDIS_URL
let globalRedisClient: Redis | null = null
let pingFailures = 0
let pingInterval: NodeJS.Timeout | null = null
let pingInFlight = false
const PING_INTERVAL_MS = 30_000
const MAX_PING_FAILURES = 3
/** Callbacks invoked when the PING health check forces a reconnect. */
const reconnectListeners: Array<() => void> = []
/**
* Register a callback that fires when the PING health check forces a reconnect.
* Useful for resetting cached adapters that hold a stale Redis reference.
*/
export function onRedisReconnect(cb: () => void): void {
reconnectListeners.push(cb)
}
function startPingHealthCheck(redis: Redis): void {
if (pingInterval) return
pingInterval = setInterval(async () => {
if (pingInFlight) return
pingInFlight = true
try {
await redis.ping()
pingFailures = 0
} catch (error) {
pingFailures++
logger.warn('Redis PING failed', {
consecutiveFailures: pingFailures,
error: error instanceof Error ? error.message : String(error),
})
if (pingFailures >= MAX_PING_FAILURES) {
logger.error('Redis PING failed 3 consecutive times — forcing reconnect', {
consecutiveFailures: pingFailures,
})
pingFailures = 0
for (const cb of reconnectListeners) {
try {
cb()
} catch (cbError) {
logger.error('Redis reconnect listener error', { error: cbError })
}
}
try {
redis.disconnect(true)
} catch (disconnectError) {
logger.error('Error during forced Redis disconnect', { error: disconnectError })
}
}
} finally {
pingInFlight = false
}
}, PING_INTERVAL_MS)
}
/**
* Get a Redis client instance.
@@ -35,8 +92,10 @@ export function getRedisClient(): Redis | null {
logger.error(`Redis reconnection attempt ${times}`, { nextRetryMs: 30000 })
return 30000
}
const delay = Math.min(times * 500, 5000)
logger.warn(`Redis reconnecting`, { attempt: times, nextRetryMs: delay })
const base = Math.min(1000 * 2 ** (times - 1), 10000)
const jitter = Math.random() * base * 0.3
const delay = Math.round(base + jitter)
logger.warn('Redis reconnecting', { attempt: times, nextRetryMs: delay })
return delay
},
@@ -54,6 +113,8 @@ export function getRedisClient(): Redis | null {
globalRedisClient.on('close', () => logger.warn('Redis connection closed'))
globalRedisClient.on('end', () => logger.error('Redis connection ended'))
startPingHealthCheck(globalRedisClient)
return globalRedisClient
} catch (error) {
logger.error('Failed to initialize Redis client', { error })
@@ -118,6 +179,11 @@ export async function releaseLock(lockKey: string, value: string): Promise<boole
* Use for graceful shutdown.
*/
export async function closeRedisConnection(): Promise<void> {
if (pingInterval) {
clearInterval(pingInterval)
pingInterval = null
}
if (globalRedisClient) {
try {
await globalRedisClient.quit()

View File

@@ -172,7 +172,7 @@ describe('RateLimiter', () => {
)
})
it('should deny on storage error (fail closed)', async () => {
it('should allow on storage error (fail open)', async () => {
mockAdapter.consumeTokens.mockRejectedValue(new Error('Storage error'))
const result = await rateLimiter.checkRateLimitWithSubscription(
@@ -182,8 +182,8 @@ describe('RateLimiter', () => {
false
)
expect(result.allowed).toBe(false)
expect(result.remaining).toBe(0)
expect(result.allowed).toBe(true)
expect(result.remaining).toBe(1)
})
it('should work for all non-manual trigger types', async () => {

View File

@@ -100,17 +100,16 @@ export class RateLimiter {
retryAfterMs: result.retryAfterMs,
}
} catch (error) {
logger.error('Rate limit storage error - failing closed (denying request)', {
logger.error('Rate limit storage error - failing open (allowing request)', {
error: error instanceof Error ? error.message : String(error),
userId,
triggerType,
isAsync,
})
return {
allowed: false,
remaining: 0,
allowed: true,
remaining: 1,
resetAt: new Date(Date.now() + RATE_LIMIT_WINDOW_MS),
retryAfterMs: RATE_LIMIT_WINDOW_MS,
}
}
}

View File

@@ -0,0 +1,129 @@
import { loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('@sim/logger', () => loggerMock)
const reconnectCallbacks: Array<() => void> = []
vi.mock('@/lib/core/config/redis', () => ({
getRedisClient: vi.fn(() => null),
onRedisReconnect: vi.fn((cb: () => void) => {
reconnectCallbacks.push(cb)
}),
}))
vi.mock('@/lib/core/storage', () => ({
getStorageMethod: vi.fn(() => 'db'),
}))
vi.mock('./db-token-bucket', () => ({
DbTokenBucket: vi.fn(() => ({ type: 'db' })),
}))
vi.mock('./redis-token-bucket', () => ({
RedisTokenBucket: vi.fn(() => ({ type: 'redis' })),
}))
describe('rate limit storage factory', () => {
beforeEach(() => {
vi.clearAllMocks()
reconnectCallbacks.length = 0
})
afterEach(() => {
vi.resetModules()
})
it('should fall back to DbTokenBucket when Redis is configured but client unavailable', async () => {
const { getStorageMethod } = await import('@/lib/core/storage')
vi.mocked(getStorageMethod).mockReturnValue('redis')
const { getRedisClient } = await import('@/lib/core/config/redis')
vi.mocked(getRedisClient).mockReturnValue(null)
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
resetStorageAdapter()
const adapter = createStorageAdapter()
expect(adapter).toEqual({ type: 'db' })
})
it('should use RedisTokenBucket when Redis client is available', async () => {
const { getStorageMethod } = await import('@/lib/core/storage')
vi.mocked(getStorageMethod).mockReturnValue('redis')
const { getRedisClient } = await import('@/lib/core/config/redis')
vi.mocked(getRedisClient).mockReturnValue({ ping: vi.fn() } as never)
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
resetStorageAdapter()
const adapter = createStorageAdapter()
expect(adapter).toEqual({ type: 'redis' })
})
it('should use DbTokenBucket when storage method is db', async () => {
const { getStorageMethod } = await import('@/lib/core/storage')
vi.mocked(getStorageMethod).mockReturnValue('db')
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
resetStorageAdapter()
const adapter = createStorageAdapter()
expect(adapter).toEqual({ type: 'db' })
})
it('should cache the adapter and return same instance', async () => {
const { getStorageMethod } = await import('@/lib/core/storage')
vi.mocked(getStorageMethod).mockReturnValue('db')
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
resetStorageAdapter()
const adapter1 = createStorageAdapter()
const adapter2 = createStorageAdapter()
expect(adapter1).toBe(adapter2)
})
it('should register a reconnect listener that resets cached adapter', async () => {
const { getStorageMethod } = await import('@/lib/core/storage')
vi.mocked(getStorageMethod).mockReturnValue('db')
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
resetStorageAdapter()
const adapter1 = createStorageAdapter()
// Simulate Redis reconnect — should reset cached adapter
expect(reconnectCallbacks.length).toBeGreaterThan(0)
reconnectCallbacks[0]()
// Next call should create a fresh adapter
const adapter2 = createStorageAdapter()
expect(adapter2).not.toBe(adapter1)
})
it('should re-evaluate storage on next call after reconnect resets cache', async () => {
const { getStorageMethod } = await import('@/lib/core/storage')
const { getRedisClient } = await import('@/lib/core/config/redis')
// Start with Redis unavailable — falls back to DB
vi.mocked(getStorageMethod).mockReturnValue('redis')
vi.mocked(getRedisClient).mockReturnValue(null)
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
resetStorageAdapter()
const adapter1 = createStorageAdapter()
expect(adapter1).toEqual({ type: 'db' })
// Simulate reconnect
reconnectCallbacks[0]()
// Now Redis is available
vi.mocked(getRedisClient).mockReturnValue({ ping: vi.fn() } as never)
const adapter2 = createStorageAdapter()
expect(adapter2).toEqual({ type: 'redis' })
})
})

View File

@@ -1,5 +1,5 @@
import { createLogger } from '@sim/logger'
import { getRedisClient } from '@/lib/core/config/redis'
import { getRedisClient, onRedisReconnect } from '@/lib/core/config/redis'
import { getStorageMethod, type StorageMethod } from '@/lib/core/storage'
import type { RateLimitStorageAdapter } from './adapter'
import { DbTokenBucket } from './db-token-bucket'
@@ -8,21 +8,33 @@ import { RedisTokenBucket } from './redis-token-bucket'
const logger = createLogger('RateLimitStorage')
let cachedAdapter: RateLimitStorageAdapter | null = null
let reconnectListenerRegistered = false
export function createStorageAdapter(): RateLimitStorageAdapter {
if (cachedAdapter) {
return cachedAdapter
}
if (!reconnectListenerRegistered) {
onRedisReconnect(() => {
cachedAdapter = null
})
reconnectListenerRegistered = true
}
const storageMethod = getStorageMethod()
if (storageMethod === 'redis') {
const redis = getRedisClient()
if (!redis) {
throw new Error('Redis configured but client unavailable')
logger.warn(
'Redis configured but client unavailable - falling back to PostgreSQL for rate limiting'
)
cachedAdapter = new DbTokenBucket()
} else {
logger.info('Rate limiting: Using Redis')
cachedAdapter = new RedisTokenBucket(redis)
}
logger.info('Rate limiting: Using Redis')
cachedAdapter = new RedisTokenBucket(redis)
} else {
logger.info('Rate limiting: Using PostgreSQL')
cachedAdapter = new DbTokenBucket()

View File

@@ -64,10 +64,31 @@ export async function validateUrlWithDNS(
const parsedUrl = new URL(url!)
const hostname = parsedUrl.hostname
try {
const { address } = await dns.lookup(hostname)
const hostnameLower = hostname.toLowerCase()
const cleanHostname =
hostnameLower.startsWith('[') && hostnameLower.endsWith(']')
? hostnameLower.slice(1, -1)
: hostnameLower
if (isPrivateOrReservedIP(address)) {
let isLocalhost = cleanHostname === 'localhost'
if (ipaddr.isValid(cleanHostname)) {
const processedIP = ipaddr.process(cleanHostname).toString()
if (processedIP === '127.0.0.1' || processedIP === '::1') {
isLocalhost = true
}
}
try {
const { address } = await dns.lookup(cleanHostname, { verbatim: true })
const resolvedIsLoopback =
ipaddr.isValid(address) &&
(() => {
const ip = ipaddr.process(address).toString()
return ip === '127.0.0.1' || ip === '::1'
})()
if (isPrivateOrReservedIP(address) && !(isLocalhost && resolvedIsLoopback)) {
logger.warn('URL resolves to blocked IP address', {
paramName,
hostname,
@@ -189,8 +210,6 @@ export async function secureFetchWithPinnedIP(
const agent = isHttps ? new https.Agent(agentOptions) : new http.Agent(agentOptions)
// Remove accept-encoding since Node.js http/https doesn't auto-decompress
// Headers are lowercase due to Web Headers API normalization in executeToolRequest
const { 'accept-encoding': _, ...sanitizedHeaders } = options.headers ?? {}
const requestOptions: http.RequestOptions = {
@@ -200,7 +219,7 @@ export async function secureFetchWithPinnedIP(
method: options.method || 'GET',
headers: sanitizedHeaders,
agent,
timeout: options.timeout || 300000, // Default 5 minutes
timeout: options.timeout || 300000,
}
const protocol = isHttps ? https : http

View File

@@ -569,10 +569,28 @@ describe('validateUrlWithDNS', () => {
expect(result.error).toContain('https://')
})
it('should reject localhost URLs', async () => {
it('should accept https localhost URLs', async () => {
const result = await validateUrlWithDNS('https://localhost/api')
expect(result.isValid).toBe(false)
expect(result.error).toContain('localhost')
expect(result.isValid).toBe(true)
expect(result.resolvedIP).toBeDefined()
})
it('should accept http localhost URLs', async () => {
const result = await validateUrlWithDNS('http://localhost/api')
expect(result.isValid).toBe(true)
expect(result.resolvedIP).toBeDefined()
})
it('should accept IPv4 loopback URLs', async () => {
const result = await validateUrlWithDNS('http://127.0.0.1/api')
expect(result.isValid).toBe(true)
expect(result.resolvedIP).toBeDefined()
})
it('should accept IPv6 loopback URLs', async () => {
const result = await validateUrlWithDNS('http://[::1]/api')
expect(result.isValid).toBe(true)
expect(result.resolvedIP).toBeDefined()
})
it('should reject private IP URLs', async () => {
@@ -898,17 +916,37 @@ describe('validateExternalUrl', () => {
expect(result.isValid).toBe(false)
expect(result.error).toContain('valid URL')
})
})
it.concurrent('should reject localhost', () => {
describe('localhost and loopback addresses', () => {
it.concurrent('should accept https localhost', () => {
const result = validateExternalUrl('https://localhost/api')
expect(result.isValid).toBe(false)
expect(result.error).toContain('localhost')
expect(result.isValid).toBe(true)
})
it.concurrent('should reject 127.0.0.1', () => {
it.concurrent('should accept http localhost', () => {
const result = validateExternalUrl('http://localhost/api')
expect(result.isValid).toBe(true)
})
it.concurrent('should accept https 127.0.0.1', () => {
const result = validateExternalUrl('https://127.0.0.1/api')
expect(result.isValid).toBe(false)
expect(result.error).toContain('private IP')
expect(result.isValid).toBe(true)
})
it.concurrent('should accept http 127.0.0.1', () => {
const result = validateExternalUrl('http://127.0.0.1/api')
expect(result.isValid).toBe(true)
})
it.concurrent('should accept https IPv6 loopback', () => {
const result = validateExternalUrl('https://[::1]/api')
expect(result.isValid).toBe(true)
})
it.concurrent('should accept http IPv6 loopback', () => {
const result = validateExternalUrl('http://[::1]/api')
expect(result.isValid).toBe(true)
})
it.concurrent('should reject 0.0.0.0', () => {
@@ -989,9 +1027,9 @@ describe('validateImageUrl', () => {
expect(result.isValid).toBe(true)
})
it.concurrent('should reject localhost URLs', () => {
it.concurrent('should accept localhost URLs', () => {
const result = validateImageUrl('https://localhost/image.png')
expect(result.isValid).toBe(false)
expect(result.isValid).toBe(true)
})
it.concurrent('should use imageUrl as default param name', () => {

View File

@@ -89,9 +89,9 @@ export function validatePathSegment(
const pathTraversalPatterns = [
'..',
'./',
'.\\.', // Windows path traversal
'%2e%2e', // URL encoded ..
'%252e%252e', // Double URL encoded ..
'.\\.',
'%2e%2e',
'%252e%252e',
'..%2f',
'..%5c',
'%2e%2e%2f',
@@ -391,7 +391,6 @@ export function validateHostname(
const lowerHostname = hostname.toLowerCase()
// Block localhost
if (lowerHostname === 'localhost') {
logger.warn('Hostname is localhost', { paramName })
return {
@@ -400,7 +399,6 @@ export function validateHostname(
}
}
// Use ipaddr.js to check if hostname is an IP and if it's private/reserved
if (ipaddr.isValid(lowerHostname)) {
if (isPrivateOrReservedIP(lowerHostname)) {
logger.warn('Hostname matches blocked IP range', {
@@ -414,7 +412,6 @@ export function validateHostname(
}
}
// Basic hostname format validation
const hostnamePattern =
/^[a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?(\.[a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?)*$/i
@@ -460,10 +457,7 @@ export function validateFileExtension(
}
}
// Remove leading dot if present
const ext = extension.startsWith('.') ? extension.slice(1) : extension
// Normalize to lowercase
const normalizedExt = ext.toLowerCase()
if (!allowedExtensions.map((e) => e.toLowerCase()).includes(normalizedExt)) {
@@ -515,7 +509,6 @@ export function validateMicrosoftGraphId(
}
}
// Check for path traversal patterns (../)
const pathTraversalPatterns = [
'../',
'..\\',
@@ -525,7 +518,7 @@ export function validateMicrosoftGraphId(
'%2e%2e%5c',
'%2e%2e\\',
'..%5c',
'%252e%252e%252f', // double encoded
'%252e%252e%252f',
]
const lowerValue = value.toLowerCase()
@@ -542,7 +535,6 @@ export function validateMicrosoftGraphId(
}
}
// Check for control characters and null bytes
if (/[\x00-\x1f\x7f]/.test(value) || value.includes('%00')) {
logger.warn('Control characters in Microsoft Graph ID', { paramName })
return {
@@ -551,7 +543,6 @@ export function validateMicrosoftGraphId(
}
}
// Check for newlines (which could be used for header injection)
if (value.includes('\n') || value.includes('\r')) {
return {
isValid: false,
@@ -559,8 +550,6 @@ export function validateMicrosoftGraphId(
}
}
// Microsoft Graph IDs can contain many characters, but not suspicious patterns
// We've blocked path traversal, so allow the rest
return { isValid: true, sanitized: value }
}
@@ -583,7 +572,6 @@ export function validateJiraCloudId(
value: string | null | undefined,
paramName = 'cloudId'
): ValidationResult {
// Jira cloud IDs are alphanumeric with hyphens (UUID-like)
return validatePathSegment(value, {
paramName,
allowHyphens: true,
@@ -612,7 +600,6 @@ export function validateJiraIssueKey(
value: string | null | undefined,
paramName = 'issueKey'
): ValidationResult {
// Jira issue keys: letters, numbers, hyphens (PROJECT-123 format)
return validatePathSegment(value, {
paramName,
allowHyphens: true,
@@ -653,7 +640,6 @@ export function validateExternalUrl(
}
}
// Must be a valid URL
let parsedUrl: URL
try {
parsedUrl = new URL(url)
@@ -664,28 +650,29 @@ export function validateExternalUrl(
}
}
// Only allow https protocol
if (parsedUrl.protocol !== 'https:') {
const protocol = parsedUrl.protocol
const hostname = parsedUrl.hostname.toLowerCase()
const cleanHostname =
hostname.startsWith('[') && hostname.endsWith(']') ? hostname.slice(1, -1) : hostname
let isLocalhost = cleanHostname === 'localhost'
if (ipaddr.isValid(cleanHostname)) {
const processedIP = ipaddr.process(cleanHostname).toString()
if (processedIP === '127.0.0.1' || processedIP === '::1') {
isLocalhost = true
}
}
if (protocol !== 'https:' && !(protocol === 'http:' && isLocalhost)) {
return {
isValid: false,
error: `${paramName} must use https:// protocol`,
}
}
// Block private IP ranges and localhost
const hostname = parsedUrl.hostname.toLowerCase()
// Block localhost
if (hostname === 'localhost') {
return {
isValid: false,
error: `${paramName} cannot point to localhost`,
}
}
// Use ipaddr.js to check if hostname is an IP and if it's private/reserved
if (ipaddr.isValid(hostname)) {
if (isPrivateOrReservedIP(hostname)) {
if (!isLocalhost && ipaddr.isValid(cleanHostname)) {
if (isPrivateOrReservedIP(cleanHostname)) {
return {
isValid: false,
error: `${paramName} cannot point to private IP addresses`,

View File

@@ -1,4 +1,5 @@
import { EventEmitter } from 'node:events'
import { createEnvMock, loggerMock } from '@sim/testing'
import { afterEach, describe, expect, it, vi } from 'vitest'
type MockProc = EventEmitter & {
@@ -130,13 +131,7 @@ async function loadExecutionModule(options: {
return next() as any
})
vi.doMock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}),
}))
vi.doMock('@sim/logger', () => loggerMock)
const secureFetchMock = vi.fn(
options.secureFetchImpl ??
@@ -154,8 +149,12 @@ async function loadExecutionModule(options: {
secureFetchWithValidation: secureFetchMock,
}))
vi.doMock('@/lib/core/config/env', () => ({
env: {
vi.doMock('@/lib/core/utils/logging', () => ({
sanitizeUrlForLog: vi.fn((url: string) => url),
}))
vi.doMock('@/lib/core/config/env', () =>
createEnvMock({
IVM_POOL_SIZE: '1',
IVM_MAX_CONCURRENT: '100',
IVM_MAX_PER_WORKER: '100',
@@ -168,8 +167,8 @@ async function loadExecutionModule(options: {
IVM_DISTRIBUTED_LEASE_MIN_TTL_MS: '1000',
IVM_QUEUE_TIMEOUT_MS: '1000',
...(options.envOverrides ?? {}),
},
}))
})
)
const redisEval = options.redisEvalImpl ? vi.fn(options.redisEvalImpl) : undefined
vi.doMock('@/lib/core/config/redis', () => ({
@@ -319,7 +318,7 @@ describe('isolated-vm scheduler', () => {
expect(result.error?.message).toContain('Too many concurrent')
})
it('fails closed when Redis is configured but unavailable', async () => {
it('falls back to local execution when Redis is configured but unavailable', async () => {
const { executeInIsolatedVM } = await loadExecutionModule({
envOverrides: {
REDIS_URL: 'redis://localhost:6379',
@@ -328,7 +327,7 @@ describe('isolated-vm scheduler', () => {
})
const result = await executeInIsolatedVM({
code: 'return "blocked"',
code: 'return "ok"',
params: {},
envVars: {},
contextVariables: {},
@@ -337,10 +336,11 @@ describe('isolated-vm scheduler', () => {
ownerKey: 'user:redis-down',
})
expect(result.error?.message).toContain('temporarily unavailable')
expect(result.error).toBeUndefined()
expect(result.result).toBe('ok')
})
it('fails closed when Redis lease evaluation errors', async () => {
it('falls back to local execution when Redis lease evaluation errors', async () => {
const { executeInIsolatedVM } = await loadExecutionModule({
envOverrides: {
REDIS_URL: 'redis://localhost:6379',
@@ -356,7 +356,7 @@ describe('isolated-vm scheduler', () => {
})
const result = await executeInIsolatedVM({
code: 'return "blocked"',
code: 'return "ok"',
params: {},
envVars: {},
contextVariables: {},
@@ -365,7 +365,8 @@ describe('isolated-vm scheduler', () => {
ownerKey: 'user:redis-error',
})
expect(result.error?.message).toContain('temporarily unavailable')
expect(result.error).toBeUndefined()
expect(result.result).toBe('ok')
})
it('applies weighted owner scheduling when draining queued executions', async () => {

View File

@@ -619,7 +619,6 @@ function cleanupWorker(workerId: number) {
workerInfo.activeExecutions = 0
workers.delete(workerId)
logger.info('Worker removed from pool', { workerId, poolSize: workers.size })
}
function resetWorkerIdleTimeout(workerId: number) {
@@ -635,7 +634,6 @@ function resetWorkerIdleTimeout(workerId: number) {
workerInfo.idleTimeout = setTimeout(() => {
const w = workers.get(workerId)
if (w && w.activeExecutions === 0) {
logger.info('Cleaning up idle worker', { workerId })
cleanupWorker(workerId)
}
}, WORKER_IDLE_TIMEOUT_MS)
@@ -987,15 +985,8 @@ export async function executeInIsolatedVM(
}
}
if (leaseAcquireResult === 'unavailable') {
maybeCleanupOwner(ownerKey)
return {
result: null,
stdout: '',
error: {
message: 'Code execution is temporarily unavailable. Please try again in a moment.',
name: 'Error',
},
}
logger.warn('Distributed lease unavailable, falling back to local execution', { ownerKey })
// Continue execution — local pool still enforces per-process concurrency limits
}
let settled = false

View File

@@ -20,6 +20,7 @@ import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
import { Executor } from '@/executor'
import type { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type {
ChildWorkflowContext,
ContextExtensions,
ExecutionCallbacks,
IterationContext,
@@ -128,7 +129,7 @@ export async function executeWorkflowCore(
const { metadata, workflow, input, workflowVariables, selectedOutputs } = snapshot
const { requestId, workflowId, userId, triggerType, executionId, triggerBlockId, useDraftState } =
metadata
const { onBlockStart, onBlockComplete, onStream } = callbacks
const { onBlockStart, onBlockComplete, onStream, onChildWorkflowInstanceReady } = callbacks
const providedWorkspaceId = metadata.workspaceId
if (!providedWorkspaceId) {
@@ -287,11 +288,19 @@ export async function executeWorkflowCore(
startedAt: string
endedAt: string
},
iterationContext?: IterationContext
iterationContext?: IterationContext,
childWorkflowContext?: ChildWorkflowContext
) => {
await loggingSession.onBlockComplete(blockId, blockName, blockType, output)
if (onBlockComplete) {
await onBlockComplete(blockId, blockName, blockType, output, iterationContext)
await onBlockComplete(
blockId,
blockName,
blockType,
output,
iterationContext,
childWorkflowContext
)
}
}
@@ -320,6 +329,7 @@ export async function executeWorkflowCore(
includeFileBase64,
base64MaxBytes,
stopAfterBlockId: resolvedStopAfterBlockId,
onChildWorkflowInstanceReady,
}
const executorInstance = new Executor({

View File

@@ -1,3 +1,4 @@
import type { ChildWorkflowContext, IterationContext } from '@/executor/execution/types'
import type { SubflowType } from '@/stores/workflows/workflow/types'
export type ExecutionEventType =
@@ -8,6 +9,7 @@ export type ExecutionEventType =
| 'block:started'
| 'block:completed'
| 'block:error'
| 'block:childWorkflowStarted'
| 'stream:chunk'
| 'stream:done'
@@ -81,6 +83,8 @@ export interface BlockStartedEvent extends BaseExecutionEvent {
iterationTotal?: number
iterationType?: SubflowType
iterationContainerId?: string
childWorkflowBlockId?: string
childWorkflowName?: string
}
}
@@ -104,6 +108,10 @@ export interface BlockCompletedEvent extends BaseExecutionEvent {
iterationTotal?: number
iterationType?: SubflowType
iterationContainerId?: string
childWorkflowBlockId?: string
childWorkflowName?: string
/** Per-invocation unique ID for correlating child block events with this workflow block. */
childWorkflowInstanceId?: string
}
}
@@ -127,6 +135,26 @@ export interface BlockErrorEvent extends BaseExecutionEvent {
iterationTotal?: number
iterationType?: SubflowType
iterationContainerId?: string
childWorkflowBlockId?: string
childWorkflowName?: string
/** Per-invocation unique ID for correlating child block events with this workflow block. */
childWorkflowInstanceId?: string
}
}
/**
* Block child workflow started event — fires when a workflow block generates its instanceId,
* before child execution begins. Allows clients to pre-associate the running entry with
* the instanceId so child block events can be correlated in real-time.
*/
export interface BlockChildWorkflowStartedEvent extends BaseExecutionEvent {
type: 'block:childWorkflowStarted'
workflowId: string
data: {
blockId: string
childWorkflowInstanceId: string
iterationCurrent?: number
iterationContainerId?: string
}
}
@@ -164,6 +192,7 @@ export type ExecutionEvent =
| BlockStartedEvent
| BlockCompletedEvent
| BlockErrorEvent
| BlockChildWorkflowStartedEvent
| StreamChunkEvent
| StreamDoneEvent
@@ -174,6 +203,7 @@ export type ExecutionCancelledData = ExecutionCancelledEvent['data']
export type BlockStartedData = BlockStartedEvent['data']
export type BlockCompletedData = BlockCompletedEvent['data']
export type BlockErrorData = BlockErrorEvent['data']
export type BlockChildWorkflowStartedData = BlockChildWorkflowStartedEvent['data']
export type StreamChunkData = StreamChunkEvent['data']
export type StreamDoneData = StreamDoneEvent['data']
@@ -222,12 +252,8 @@ export function createSSECallbacks(options: SSECallbackOptions) {
blockName: string,
blockType: string,
executionOrder: number,
iterationContext?: {
iterationCurrent: number
iterationTotal?: number
iterationType: string
iterationContainerId?: string
}
iterationContext?: IterationContext,
childWorkflowContext?: ChildWorkflowContext
) => {
sendEvent({
type: 'block:started',
@@ -242,9 +268,13 @@ export function createSSECallbacks(options: SSECallbackOptions) {
...(iterationContext && {
iterationCurrent: iterationContext.iterationCurrent,
iterationTotal: iterationContext.iterationTotal,
iterationType: iterationContext.iterationType as any,
iterationType: iterationContext.iterationType,
iterationContainerId: iterationContext.iterationContainerId,
}),
...(childWorkflowContext && {
childWorkflowBlockId: childWorkflowContext.parentBlockId,
childWorkflowName: childWorkflowContext.workflowName,
}),
},
})
}
@@ -260,23 +290,30 @@ export function createSSECallbacks(options: SSECallbackOptions) {
startedAt: string
executionOrder: number
endedAt: string
childWorkflowInstanceId?: string
},
iterationContext?: {
iterationCurrent: number
iterationTotal?: number
iterationType: string
iterationContainerId?: string
}
iterationContext?: IterationContext,
childWorkflowContext?: ChildWorkflowContext
) => {
const hasError = callbackData.output?.error
const iterationData = iterationContext
? {
iterationCurrent: iterationContext.iterationCurrent,
iterationTotal: iterationContext.iterationTotal,
iterationType: iterationContext.iterationType as any,
iterationType: iterationContext.iterationType,
iterationContainerId: iterationContext.iterationContainerId,
}
: {}
const childWorkflowData = childWorkflowContext
? {
childWorkflowBlockId: childWorkflowContext.parentBlockId,
childWorkflowName: childWorkflowContext.workflowName,
}
: {}
const instanceData = callbackData.childWorkflowInstanceId
? { childWorkflowInstanceId: callbackData.childWorkflowInstanceId }
: {}
if (hasError) {
sendEvent({
@@ -295,6 +332,8 @@ export function createSSECallbacks(options: SSECallbackOptions) {
executionOrder: callbackData.executionOrder,
endedAt: callbackData.endedAt,
...iterationData,
...childWorkflowData,
...instanceData,
},
})
} else {
@@ -314,6 +353,8 @@ export function createSSECallbacks(options: SSECallbackOptions) {
executionOrder: callbackData.executionOrder,
endedAt: callbackData.endedAt,
...iterationData,
...childWorkflowData,
...instanceData,
},
})
}
@@ -352,5 +393,26 @@ export function createSSECallbacks(options: SSECallbackOptions) {
}
}
return { sendEvent, onBlockStart, onBlockComplete, onStream }
const onChildWorkflowInstanceReady = (
blockId: string,
childWorkflowInstanceId: string,
iterationContext?: IterationContext
) => {
sendEvent({
type: 'block:childWorkflowStarted',
timestamp: new Date().toISOString(),
executionId,
workflowId,
data: {
blockId,
childWorkflowInstanceId,
...(iterationContext && {
iterationCurrent: iterationContext.iterationCurrent,
iterationContainerId: iterationContext.iterationContainerId,
}),
},
})
}
return { sendEvent, onBlockStart, onBlockComplete, onStream, onChildWorkflowInstanceReady }
}

View File

@@ -467,25 +467,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
},
contextWindow: 200000,
},
{
id: 'claude-3-7-sonnet-latest',
pricing: {
input: 3.0,
cachedInput: 0.3,
output: 15.0,
updatedAt: '2026-02-05',
},
capabilities: {
temperature: { min: 0, max: 1 },
computerUse: true,
maxOutputTokens: 64000,
thinking: {
levels: ['low', 'medium', 'high'],
default: 'high',
},
},
contextWindow: 200000,
},
],
},
'azure-openai': {

View File

@@ -183,7 +183,6 @@ describe('Model Capabilities', () => {
'gemini-2.5-flash',
'claude-sonnet-4-0',
'claude-opus-4-0',
'claude-3-7-sonnet-latest',
'grok-3-latest',
'grok-3-fast-latest',
'deepseek-v3',
@@ -260,7 +259,6 @@ describe('Model Capabilities', () => {
const modelsRange01 = [
'claude-sonnet-4-0',
'claude-opus-4-0',
'claude-3-7-sonnet-latest',
'grok-3-latest',
'grok-3-fast-latest',
]

View File

@@ -420,6 +420,18 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
updatedEntry.iterationContainerId = update.iterationContainerId
}
if (update.childWorkflowBlockId !== undefined) {
updatedEntry.childWorkflowBlockId = update.childWorkflowBlockId
}
if (update.childWorkflowName !== undefined) {
updatedEntry.childWorkflowName = update.childWorkflowName
}
if (update.childWorkflowInstanceId !== undefined) {
updatedEntry.childWorkflowInstanceId = update.childWorkflowInstanceId
}
return updatedEntry
})

View File

@@ -24,6 +24,12 @@ export interface ConsoleEntry {
iterationContainerId?: string
isRunning?: boolean
isCanceled?: boolean
/** ID of the workflow block in the parent execution that spawned this child block */
childWorkflowBlockId?: string
/** Display name of the child workflow this block belongs to */
childWorkflowName?: string
/** Per-invocation unique ID linking this workflow block to its child block events */
childWorkflowInstanceId?: string
}
export interface ConsoleUpdate {
@@ -44,6 +50,9 @@ export interface ConsoleUpdate {
iterationTotal?: number
iterationType?: SubflowType
iterationContainerId?: string
childWorkflowBlockId?: string
childWorkflowName?: string
childWorkflowInstanceId?: string
}
export interface ConsoleStore {

View File

@@ -0,0 +1,70 @@
import type { HexCancelRunParams, HexCancelRunResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const cancelRunTool: ToolConfig<HexCancelRunParams, HexCancelRunResponse> = {
id: 'hex_cancel_run',
name: 'Hex Cancel Run',
description: 'Cancel an active Hex project run.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
projectId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the Hex project',
},
runId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the run to cancel',
},
},
request: {
url: (params) =>
`https://app.hex.tech/api/v1/projects/${params.projectId}/runs/${params.runId}`,
method: 'DELETE',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response, params) => {
if (response.status === 204 || response.ok) {
return {
success: true,
output: {
success: true,
projectId: params?.projectId ?? '',
runId: params?.runId ?? '',
},
}
}
const data = await response.json().catch(() => ({}))
return {
success: false,
output: {
success: false,
projectId: params?.projectId ?? '',
runId: params?.runId ?? '',
},
error: (data as Record<string, string>).message ?? 'Failed to cancel run',
}
},
outputs: {
success: { type: 'boolean', description: 'Whether the run was successfully cancelled' },
projectId: { type: 'string', description: 'Project UUID' },
runId: { type: 'string', description: 'Run UUID that was cancelled' },
},
}

View File

@@ -0,0 +1,78 @@
import type { HexCreateCollectionParams, HexCreateCollectionResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const createCollectionTool: ToolConfig<
HexCreateCollectionParams,
HexCreateCollectionResponse
> = {
id: 'hex_create_collection',
name: 'Hex Create Collection',
description: 'Create a new collection in the Hex workspace to organize projects.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
name: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Name for the new collection',
},
description: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Optional description for the collection',
},
},
request: {
url: 'https://app.hex.tech/api/v1/collections',
method: 'POST',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
body: (params) => {
const body: Record<string, unknown> = { name: params.name }
if (params.description) body.description = params.description
return body
},
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
id: data.id ?? null,
name: data.name ?? null,
description: data.description ?? null,
creator: data.creator
? { email: data.creator.email ?? null, id: data.creator.id ?? null }
: null,
},
}
},
outputs: {
id: { type: 'string', description: 'Newly created collection UUID' },
name: { type: 'string', description: 'Collection name' },
description: { type: 'string', description: 'Collection description', optional: true },
creator: {
type: 'object',
description: 'Collection creator',
optional: true,
properties: {
email: { type: 'string', description: 'Creator email' },
id: { type: 'string', description: 'Creator UUID' },
},
},
},
}

View File

@@ -0,0 +1,64 @@
import type { HexGetCollectionParams, HexGetCollectionResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const getCollectionTool: ToolConfig<HexGetCollectionParams, HexGetCollectionResponse> = {
id: 'hex_get_collection',
name: 'Hex Get Collection',
description: 'Retrieve details for a specific Hex collection by its ID.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
collectionId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the collection',
},
},
request: {
url: (params) => `https://app.hex.tech/api/v1/collections/${params.collectionId}`,
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
id: data.id ?? null,
name: data.name ?? null,
description: data.description ?? null,
creator: data.creator
? { email: data.creator.email ?? null, id: data.creator.id ?? null }
: null,
},
}
},
outputs: {
id: { type: 'string', description: 'Collection UUID' },
name: { type: 'string', description: 'Collection name' },
description: { type: 'string', description: 'Collection description', optional: true },
creator: {
type: 'object',
description: 'Collection creator',
optional: true,
properties: {
email: { type: 'string', description: 'Creator email' },
id: { type: 'string', description: 'Creator UUID' },
},
},
},
}

View File

@@ -0,0 +1,76 @@
import type { HexGetDataConnectionParams, HexGetDataConnectionResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const getDataConnectionTool: ToolConfig<
HexGetDataConnectionParams,
HexGetDataConnectionResponse
> = {
id: 'hex_get_data_connection',
name: 'Hex Get Data Connection',
description:
'Retrieve details for a specific data connection including type, description, and configuration flags.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
dataConnectionId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the data connection',
},
},
request: {
url: (params) => `https://app.hex.tech/api/v1/data-connections/${params.dataConnectionId}`,
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
id: data.id ?? null,
name: data.name ?? null,
type: data.type ?? null,
description: data.description ?? null,
connectViaSsh: data.connectViaSsh ?? null,
includeMagic: data.includeMagic ?? null,
allowWritebackCells: data.allowWritebackCells ?? null,
},
}
},
outputs: {
id: { type: 'string', description: 'Connection UUID' },
name: { type: 'string', description: 'Connection name' },
type: { type: 'string', description: 'Connection type (e.g., snowflake, postgres, bigquery)' },
description: { type: 'string', description: 'Connection description', optional: true },
connectViaSsh: {
type: 'boolean',
description: 'Whether SSH tunneling is enabled',
optional: true,
},
includeMagic: {
type: 'boolean',
description: 'Whether Magic AI features are enabled',
optional: true,
},
allowWritebackCells: {
type: 'boolean',
description: 'Whether writeback cells are allowed',
optional: true,
},
},
}

View File

@@ -0,0 +1,52 @@
import type { HexGetGroupParams, HexGetGroupResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const getGroupTool: ToolConfig<HexGetGroupParams, HexGetGroupResponse> = {
id: 'hex_get_group',
name: 'Hex Get Group',
description: 'Retrieve details for a specific Hex group.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
groupId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the group',
},
},
request: {
url: (params) => `https://app.hex.tech/api/v1/groups/${params.groupId}`,
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
id: data.id ?? null,
name: data.name ?? null,
createdAt: data.createdAt ?? null,
},
}
},
outputs: {
id: { type: 'string', description: 'Group UUID' },
name: { type: 'string', description: 'Group name' },
createdAt: { type: 'string', description: 'Creation timestamp' },
},
}

View File

@@ -0,0 +1,78 @@
import type { HexGetProjectParams, HexGetProjectResponse } from '@/tools/hex/types'
import { HEX_PROJECT_OUTPUT_PROPERTIES } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const getProjectTool: ToolConfig<HexGetProjectParams, HexGetProjectResponse> = {
id: 'hex_get_project',
name: 'Hex Get Project',
description: 'Get metadata and details for a specific Hex project by its ID.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
projectId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the Hex project',
},
},
request: {
url: (params) => `https://app.hex.tech/api/v1/projects/${params.projectId}`,
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
id: data.id ?? null,
title: data.title ?? null,
description: data.description ?? null,
status: data.status ? { name: data.status.name ?? null } : null,
type: data.type ?? null,
creator: data.creator ? { email: data.creator.email ?? null } : null,
owner: data.owner ? { email: data.owner.email ?? null } : null,
categories: Array.isArray(data.categories)
? data.categories.map((c: Record<string, string>) => ({
name: c.name ?? null,
description: c.description ?? null,
}))
: [],
lastEditedAt: data.lastEditedAt ?? null,
lastPublishedAt: data.lastPublishedAt ?? null,
createdAt: data.createdAt ?? null,
archivedAt: data.archivedAt ?? null,
trashedAt: data.trashedAt ?? null,
},
}
},
outputs: {
id: HEX_PROJECT_OUTPUT_PROPERTIES.id,
title: HEX_PROJECT_OUTPUT_PROPERTIES.title,
description: HEX_PROJECT_OUTPUT_PROPERTIES.description,
status: HEX_PROJECT_OUTPUT_PROPERTIES.status,
type: HEX_PROJECT_OUTPUT_PROPERTIES.type,
creator: HEX_PROJECT_OUTPUT_PROPERTIES.creator,
owner: HEX_PROJECT_OUTPUT_PROPERTIES.owner,
categories: HEX_PROJECT_OUTPUT_PROPERTIES.categories,
lastEditedAt: HEX_PROJECT_OUTPUT_PROPERTIES.lastEditedAt,
lastPublishedAt: HEX_PROJECT_OUTPUT_PROPERTIES.lastPublishedAt,
createdAt: HEX_PROJECT_OUTPUT_PROPERTIES.createdAt,
archivedAt: HEX_PROJECT_OUTPUT_PROPERTIES.archivedAt,
trashedAt: HEX_PROJECT_OUTPUT_PROPERTIES.trashedAt,
},
}

View File

@@ -0,0 +1,115 @@
import type { HexGetProjectRunsParams, HexGetProjectRunsResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const getProjectRunsTool: ToolConfig<HexGetProjectRunsParams, HexGetProjectRunsResponse> = {
id: 'hex_get_project_runs',
name: 'Hex Get Project Runs',
description:
'Retrieve API-triggered runs for a Hex project with optional filtering by status and pagination.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
projectId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the Hex project',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of runs to return (1-100, default: 25)',
},
offset: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Offset for paginated results (default: 0)',
},
statusFilter: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description:
'Filter by run status: PENDING, RUNNING, ERRORED, COMPLETED, KILLED, UNABLE_TO_ALLOCATE_KERNEL',
},
},
request: {
url: (params) => {
const searchParams = new URLSearchParams()
if (params.limit) searchParams.set('limit', String(params.limit))
if (params.offset) searchParams.set('offset', String(params.offset))
if (params.statusFilter) searchParams.set('statusFilter', params.statusFilter)
const qs = searchParams.toString()
return `https://app.hex.tech/api/v1/projects/${params.projectId}/runs${qs ? `?${qs}` : ''}`
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const runs = Array.isArray(data) ? data : (data.runs ?? [])
return {
success: true,
output: {
runs: runs.map((r: Record<string, unknown>) => ({
projectId: (r.projectId as string) ?? null,
runId: (r.runId as string) ?? null,
runUrl: (r.runUrl as string) ?? null,
status: (r.status as string) ?? null,
startTime: (r.startTime as string) ?? null,
endTime: (r.endTime as string) ?? null,
elapsedTime: (r.elapsedTime as number) ?? null,
traceId: (r.traceId as string) ?? null,
projectVersion: (r.projectVersion as number) ?? null,
})),
total: runs.length,
traceId: data.traceId ?? null,
},
}
},
outputs: {
runs: {
type: 'array',
description: 'List of project runs',
items: {
type: 'object',
properties: {
projectId: { type: 'string', description: 'Project UUID' },
runId: { type: 'string', description: 'Run UUID' },
runUrl: { type: 'string', description: 'URL to view the run', optional: true },
status: {
type: 'string',
description:
'Run status (PENDING, RUNNING, COMPLETED, ERRORED, KILLED, UNABLE_TO_ALLOCATE_KERNEL)',
},
startTime: { type: 'string', description: 'Run start time', optional: true },
endTime: { type: 'string', description: 'Run end time', optional: true },
elapsedTime: { type: 'number', description: 'Elapsed time in seconds', optional: true },
traceId: { type: 'string', description: 'Trace ID', optional: true },
projectVersion: {
type: 'number',
description: 'Project version number',
optional: true,
},
},
},
},
total: { type: 'number', description: 'Total number of runs returned' },
traceId: { type: 'string', description: 'Top-level trace ID', optional: true },
},
}

View File

@@ -0,0 +1,81 @@
import type { HexGetQueriedTablesParams, HexGetQueriedTablesResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const getQueriedTablesTool: ToolConfig<
HexGetQueriedTablesParams,
HexGetQueriedTablesResponse
> = {
id: 'hex_get_queried_tables',
name: 'Hex Get Queried Tables',
description:
'Return the warehouse tables queried by a Hex project, including data connection and table names.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
projectId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the Hex project',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of tables to return (1-100)',
},
},
request: {
url: (params) => {
const searchParams = new URLSearchParams()
if (params.limit) searchParams.set('limit', String(params.limit))
const qs = searchParams.toString()
return `https://app.hex.tech/api/v1/projects/${params.projectId}/queriedTables${qs ? `?${qs}` : ''}`
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const tables = Array.isArray(data) ? data : (data.values ?? [])
return {
success: true,
output: {
tables: tables.map((t: Record<string, unknown>) => ({
dataConnectionId: (t.dataConnectionId as string) ?? null,
dataConnectionName: (t.dataConnectionName as string) ?? null,
tableName: (t.tableName as string) ?? null,
})),
total: tables.length,
},
}
},
outputs: {
tables: {
type: 'array',
description: 'List of warehouse tables queried by the project',
items: {
type: 'object',
properties: {
dataConnectionId: { type: 'string', description: 'Data connection UUID' },
dataConnectionName: { type: 'string', description: 'Data connection name' },
tableName: { type: 'string', description: 'Table name' },
},
},
},
total: { type: 'number', description: 'Total number of tables returned' },
},
}

View File

@@ -0,0 +1,72 @@
import type { HexGetRunStatusParams, HexGetRunStatusResponse } from '@/tools/hex/types'
import { HEX_RUN_STATUS_OUTPUT_PROPERTIES } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const getRunStatusTool: ToolConfig<HexGetRunStatusParams, HexGetRunStatusResponse> = {
id: 'hex_get_run_status',
name: 'Hex Get Run Status',
description: 'Check the status of a Hex project run by its run ID.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
projectId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the Hex project',
},
runId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the run to check',
},
},
request: {
url: (params) =>
`https://app.hex.tech/api/v1/projects/${params.projectId}/runs/${params.runId}`,
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
projectId: data.projectId ?? null,
runId: data.runId ?? null,
runUrl: data.runUrl ?? null,
status: data.status ?? null,
startTime: data.startTime ?? null,
endTime: data.endTime ?? null,
elapsedTime: data.elapsedTime ?? null,
traceId: data.traceId ?? null,
projectVersion: data.projectVersion ?? null,
},
}
},
outputs: {
projectId: HEX_RUN_STATUS_OUTPUT_PROPERTIES.projectId,
runId: HEX_RUN_STATUS_OUTPUT_PROPERTIES.runId,
runUrl: HEX_RUN_STATUS_OUTPUT_PROPERTIES.runUrl,
status: HEX_RUN_STATUS_OUTPUT_PROPERTIES.status,
startTime: HEX_RUN_STATUS_OUTPUT_PROPERTIES.startTime,
endTime: HEX_RUN_STATUS_OUTPUT_PROPERTIES.endTime,
elapsedTime: HEX_RUN_STATUS_OUTPUT_PROPERTIES.elapsedTime,
traceId: HEX_RUN_STATUS_OUTPUT_PROPERTIES.traceId,
projectVersion: HEX_RUN_STATUS_OUTPUT_PROPERTIES.projectVersion,
},
}

View File

@@ -0,0 +1,33 @@
import { cancelRunTool } from '@/tools/hex/cancel_run'
import { createCollectionTool } from '@/tools/hex/create_collection'
import { getCollectionTool } from '@/tools/hex/get_collection'
import { getDataConnectionTool } from '@/tools/hex/get_data_connection'
import { getGroupTool } from '@/tools/hex/get_group'
import { getProjectTool } from '@/tools/hex/get_project'
import { getProjectRunsTool } from '@/tools/hex/get_project_runs'
import { getQueriedTablesTool } from '@/tools/hex/get_queried_tables'
import { getRunStatusTool } from '@/tools/hex/get_run_status'
import { listCollectionsTool } from '@/tools/hex/list_collections'
import { listDataConnectionsTool } from '@/tools/hex/list_data_connections'
import { listGroupsTool } from '@/tools/hex/list_groups'
import { listProjectsTool } from '@/tools/hex/list_projects'
import { listUsersTool } from '@/tools/hex/list_users'
import { runProjectTool } from '@/tools/hex/run_project'
import { updateProjectTool } from '@/tools/hex/update_project'
export const hexCancelRunTool = cancelRunTool
export const hexCreateCollectionTool = createCollectionTool
export const hexGetCollectionTool = getCollectionTool
export const hexGetDataConnectionTool = getDataConnectionTool
export const hexGetGroupTool = getGroupTool
export const hexGetProjectTool = getProjectTool
export const hexGetProjectRunsTool = getProjectRunsTool
export const hexGetQueriedTablesTool = getQueriedTablesTool
export const hexGetRunStatusTool = getRunStatusTool
export const hexListCollectionsTool = listCollectionsTool
export const hexListDataConnectionsTool = listDataConnectionsTool
export const hexListGroupsTool = listGroupsTool
export const hexListProjectsTool = listProjectsTool
export const hexListUsersTool = listUsersTool
export const hexRunProjectTool = runProjectTool
export const hexUpdateProjectTool = updateProjectTool

View File

@@ -0,0 +1,94 @@
import type { HexListCollectionsParams, HexListCollectionsResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const listCollectionsTool: ToolConfig<HexListCollectionsParams, HexListCollectionsResponse> =
{
id: 'hex_list_collections',
name: 'Hex List Collections',
description: 'List all collections in the Hex workspace.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of collections to return (1-500, default: 25)',
},
sortBy: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Sort by field: NAME',
},
},
request: {
url: (params) => {
const searchParams = new URLSearchParams()
if (params.limit) searchParams.set('limit', String(params.limit))
if (params.sortBy) searchParams.set('sortBy', params.sortBy)
const qs = searchParams.toString()
return `https://app.hex.tech/api/v1/collections${qs ? `?${qs}` : ''}`
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const collections = Array.isArray(data) ? data : (data.values ?? [])
return {
success: true,
output: {
collections: collections.map((c: Record<string, unknown>) => ({
id: (c.id as string) ?? null,
name: (c.name as string) ?? null,
description: (c.description as string) ?? null,
creator: c.creator
? {
email: (c.creator as Record<string, string>).email ?? null,
id: (c.creator as Record<string, string>).id ?? null,
}
: null,
})),
total: collections.length,
},
}
},
outputs: {
collections: {
type: 'array',
description: 'List of collections',
items: {
type: 'object',
properties: {
id: { type: 'string', description: 'Collection UUID' },
name: { type: 'string', description: 'Collection name' },
description: { type: 'string', description: 'Collection description', optional: true },
creator: {
type: 'object',
description: 'Collection creator',
optional: true,
properties: {
email: { type: 'string', description: 'Creator email' },
id: { type: 'string', description: 'Creator UUID' },
},
},
},
},
},
total: { type: 'number', description: 'Total number of collections returned' },
},
}

View File

@@ -0,0 +1,116 @@
import type {
HexListDataConnectionsParams,
HexListDataConnectionsResponse,
} from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const listDataConnectionsTool: ToolConfig<
HexListDataConnectionsParams,
HexListDataConnectionsResponse
> = {
id: 'hex_list_data_connections',
name: 'Hex List Data Connections',
description:
'List all data connections in the Hex workspace (e.g., Snowflake, PostgreSQL, BigQuery).',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of connections to return (1-500, default: 25)',
},
sortBy: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Sort by field: CREATED_AT or NAME',
},
sortDirection: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Sort direction: ASC or DESC',
},
},
request: {
url: (params) => {
const searchParams = new URLSearchParams()
if (params.limit) searchParams.set('limit', String(params.limit))
if (params.sortBy) searchParams.set('sortBy', params.sortBy)
if (params.sortDirection) searchParams.set('sortDirection', params.sortDirection)
const qs = searchParams.toString()
return `https://app.hex.tech/api/v1/data-connections${qs ? `?${qs}` : ''}`
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const connections = Array.isArray(data) ? data : (data.values ?? [])
return {
success: true,
output: {
connections: connections.map((c: Record<string, unknown>) => ({
id: (c.id as string) ?? null,
name: (c.name as string) ?? null,
type: (c.type as string) ?? null,
description: (c.description as string) ?? null,
connectViaSsh: (c.connectViaSsh as boolean) ?? null,
includeMagic: (c.includeMagic as boolean) ?? null,
allowWritebackCells: (c.allowWritebackCells as boolean) ?? null,
})),
total: connections.length,
},
}
},
outputs: {
connections: {
type: 'array',
description: 'List of data connections',
items: {
type: 'object',
properties: {
id: { type: 'string', description: 'Connection UUID' },
name: { type: 'string', description: 'Connection name' },
type: {
type: 'string',
description:
'Connection type (e.g., athena, bigquery, databricks, postgres, redshift, snowflake)',
},
description: { type: 'string', description: 'Connection description', optional: true },
connectViaSsh: {
type: 'boolean',
description: 'Whether SSH tunneling is enabled',
optional: true,
},
includeMagic: {
type: 'boolean',
description: 'Whether Magic AI features are enabled',
optional: true,
},
allowWritebackCells: {
type: 'boolean',
description: 'Whether writeback cells are allowed',
optional: true,
},
},
},
},
total: { type: 'number', description: 'Total number of connections returned' },
},
}

View File

@@ -0,0 +1,85 @@
import type { HexListGroupsParams, HexListGroupsResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const listGroupsTool: ToolConfig<HexListGroupsParams, HexListGroupsResponse> = {
id: 'hex_list_groups',
name: 'Hex List Groups',
description: 'List all groups in the Hex workspace with optional sorting.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of groups to return (1-500, default: 25)',
},
sortBy: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Sort by field: CREATED_AT or NAME',
},
sortDirection: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Sort direction: ASC or DESC',
},
},
request: {
url: (params) => {
const searchParams = new URLSearchParams()
if (params.limit) searchParams.set('limit', String(params.limit))
if (params.sortBy) searchParams.set('sortBy', params.sortBy)
if (params.sortDirection) searchParams.set('sortDirection', params.sortDirection)
const qs = searchParams.toString()
return `https://app.hex.tech/api/v1/groups${qs ? `?${qs}` : ''}`
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const groups = Array.isArray(data) ? data : (data.values ?? [])
return {
success: true,
output: {
groups: groups.map((g: Record<string, unknown>) => ({
id: (g.id as string) ?? null,
name: (g.name as string) ?? null,
createdAt: (g.createdAt as string) ?? null,
})),
total: groups.length,
},
}
},
outputs: {
groups: {
type: 'array',
description: 'List of workspace groups',
items: {
type: 'object',
properties: {
id: { type: 'string', description: 'Group UUID' },
name: { type: 'string', description: 'Group name' },
createdAt: { type: 'string', description: 'Creation timestamp' },
},
},
},
total: { type: 'number', description: 'Total number of groups returned' },
},
}

View File

@@ -0,0 +1,138 @@
import type { HexListProjectsParams, HexListProjectsResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const listProjectsTool: ToolConfig<HexListProjectsParams, HexListProjectsResponse> = {
id: 'hex_list_projects',
name: 'Hex List Projects',
description: 'List all projects in your Hex workspace with optional filtering by status.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of projects to return (1-100)',
},
includeArchived: {
type: 'boolean',
required: false,
visibility: 'user-only',
description: 'Include archived projects in results',
},
statusFilter: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Filter by status: PUBLISHED, DRAFT, or ALL',
},
},
request: {
url: (params) => {
const searchParams = new URLSearchParams()
if (params.limit) searchParams.set('limit', String(params.limit))
if (params.includeArchived) searchParams.set('includeArchived', 'true')
if (params.statusFilter) searchParams.append('statuses[]', params.statusFilter)
const qs = searchParams.toString()
return `https://app.hex.tech/api/v1/projects${qs ? `?${qs}` : ''}`
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const projects = Array.isArray(data) ? data : (data.values ?? [])
return {
success: true,
output: {
projects: projects.map((p: Record<string, unknown>) => ({
id: (p.id as string) ?? null,
title: (p.title as string) ?? null,
description: (p.description as string) ?? null,
status: p.status ? { name: (p.status as Record<string, string>).name ?? null } : null,
type: (p.type as string) ?? null,
creator: p.creator
? { email: (p.creator as Record<string, string>).email ?? null }
: null,
owner: p.owner ? { email: (p.owner as Record<string, string>).email ?? null } : null,
categories: Array.isArray(p.categories)
? (p.categories as Array<Record<string, string>>).map((c) => ({
name: c.name ?? null,
description: c.description ?? null,
}))
: [],
lastEditedAt: (p.lastEditedAt as string) ?? null,
lastPublishedAt: (p.lastPublishedAt as string) ?? null,
createdAt: (p.createdAt as string) ?? null,
archivedAt: (p.archivedAt as string) ?? null,
trashedAt: (p.trashedAt as string) ?? null,
})),
total: projects.length,
},
}
},
outputs: {
projects: {
type: 'array',
description: 'List of Hex projects',
items: {
type: 'object',
properties: {
id: { type: 'string', description: 'Project UUID' },
title: { type: 'string', description: 'Project title' },
description: { type: 'string', description: 'Project description', optional: true },
status: {
type: 'object',
description: 'Project status',
properties: {
name: { type: 'string', description: 'Status name (e.g., PUBLISHED, DRAFT)' },
},
},
type: { type: 'string', description: 'Project type (PROJECT or COMPONENT)' },
creator: {
type: 'object',
description: 'Project creator',
optional: true,
properties: {
email: { type: 'string', description: 'Creator email' },
},
},
owner: {
type: 'object',
description: 'Project owner',
optional: true,
properties: {
email: { type: 'string', description: 'Owner email' },
},
},
lastEditedAt: {
type: 'string',
description: 'Last edited timestamp',
optional: true,
},
lastPublishedAt: {
type: 'string',
description: 'Last published timestamp',
optional: true,
},
createdAt: { type: 'string', description: 'Creation timestamp' },
archivedAt: { type: 'string', description: 'Archived timestamp', optional: true },
},
},
},
total: { type: 'number', description: 'Total number of projects returned' },
},
}

View File

@@ -0,0 +1,98 @@
import type { HexListUsersParams, HexListUsersResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const listUsersTool: ToolConfig<HexListUsersParams, HexListUsersResponse> = {
id: 'hex_list_users',
name: 'Hex List Users',
description: 'List all users in the Hex workspace with optional filtering and sorting.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of users to return (1-100, default: 25)',
},
sortBy: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Sort by field: NAME or EMAIL',
},
sortDirection: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Sort direction: ASC or DESC',
},
groupId: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Filter users by group UUID',
},
},
request: {
url: (params) => {
const searchParams = new URLSearchParams()
if (params.limit) searchParams.set('limit', String(params.limit))
if (params.sortBy) searchParams.set('sortBy', params.sortBy)
if (params.sortDirection) searchParams.set('sortDirection', params.sortDirection)
if (params.groupId) searchParams.set('groupId', params.groupId)
const qs = searchParams.toString()
return `https://app.hex.tech/api/v1/users${qs ? `?${qs}` : ''}`
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const users = Array.isArray(data) ? data : (data.values ?? [])
return {
success: true,
output: {
users: users.map((u: Record<string, unknown>) => ({
id: (u.id as string) ?? null,
name: (u.name as string) ?? null,
email: (u.email as string) ?? null,
role: (u.role as string) ?? null,
})),
total: users.length,
},
}
},
outputs: {
users: {
type: 'array',
description: 'List of workspace users',
items: {
type: 'object',
properties: {
id: { type: 'string', description: 'User UUID' },
name: { type: 'string', description: 'User name' },
email: { type: 'string', description: 'User email' },
role: {
type: 'string',
description:
'User role (ADMIN, MANAGER, EDITOR, EXPLORER, MEMBER, GUEST, EMBEDDED_USER, ANONYMOUS)',
},
},
},
},
total: { type: 'number', description: 'Total number of users returned' },
},
}

View File

@@ -0,0 +1,108 @@
import type { HexRunProjectParams, HexRunProjectResponse } from '@/tools/hex/types'
import { HEX_RUN_OUTPUT_PROPERTIES } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const runProjectTool: ToolConfig<HexRunProjectParams, HexRunProjectResponse> = {
id: 'hex_run_project',
name: 'Hex Run Project',
description:
'Execute a published Hex project. Optionally pass input parameters and control caching behavior.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
projectId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the Hex project to run',
},
inputParams: {
type: 'json',
required: false,
visibility: 'user-or-llm',
description: 'JSON object of input parameters for the project (e.g., {"date": "2024-01-01"})',
},
dryRun: {
type: 'boolean',
required: false,
visibility: 'user-only',
description: 'If true, perform a dry run without executing the project',
},
updateCache: {
type: 'boolean',
required: false,
visibility: 'user-only',
description: '(Deprecated) If true, update the cached results after execution',
},
updatePublishedResults: {
type: 'boolean',
required: false,
visibility: 'user-only',
description: 'If true, update the published app results after execution',
},
useCachedSqlResults: {
type: 'boolean',
required: false,
visibility: 'user-only',
description: 'If true, use cached SQL results instead of re-running queries',
},
},
request: {
url: (params) => `https://app.hex.tech/api/v1/projects/${params.projectId}/runs`,
method: 'POST',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
body: (params) => {
const body: Record<string, unknown> = {}
if (params.inputParams) {
body.inputParams =
typeof params.inputParams === 'string'
? JSON.parse(params.inputParams)
: params.inputParams
}
if (params.dryRun !== undefined) body.dryRun = params.dryRun
if (params.updateCache !== undefined) body.updateCache = params.updateCache
if (params.updatePublishedResults !== undefined)
body.updatePublishedResults = params.updatePublishedResults
if (params.useCachedSqlResults !== undefined)
body.useCachedSqlResults = params.useCachedSqlResults
return body
},
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
projectId: data.projectId ?? null,
runId: data.runId ?? null,
runUrl: data.runUrl ?? null,
runStatusUrl: data.runStatusUrl ?? null,
traceId: data.traceId ?? null,
projectVersion: data.projectVersion ?? null,
},
}
},
outputs: {
projectId: HEX_RUN_OUTPUT_PROPERTIES.projectId,
runId: HEX_RUN_OUTPUT_PROPERTIES.runId,
runUrl: HEX_RUN_OUTPUT_PROPERTIES.runUrl,
runStatusUrl: HEX_RUN_OUTPUT_PROPERTIES.runStatusUrl,
traceId: HEX_RUN_OUTPUT_PROPERTIES.traceId,
projectVersion: HEX_RUN_OUTPUT_PROPERTIES.projectVersion,
},
}

429
apps/sim/tools/hex/types.ts Normal file
View File

@@ -0,0 +1,429 @@
import type { OutputProperty, ToolResponse } from '@/tools/types'
/**
* Shared output property definitions for Hex API responses.
* Based on Hex API documentation: https://learn.hex.tech/docs/api/api-reference
*/
/**
* Output definition for project items returned by the Hex API.
* The status field is an object with a name property (e.g., { name: "PUBLISHED" }).
* The type field is a ProjectTypeApiEnum (PROJECT or COMPONENT).
*/
export const HEX_PROJECT_OUTPUT_PROPERTIES = {
id: { type: 'string', description: 'Project UUID' },
title: { type: 'string', description: 'Project title' },
description: { type: 'string', description: 'Project description', optional: true },
status: {
type: 'object',
description: 'Project status',
properties: {
name: {
type: 'string',
description: 'Status name (e.g., PUBLISHED, DRAFT)',
},
},
},
type: {
type: 'string',
description: 'Project type (PROJECT or COMPONENT)',
},
creator: {
type: 'object',
description: 'Project creator',
optional: true,
properties: {
email: { type: 'string', description: 'Creator email' },
},
},
owner: {
type: 'object',
description: 'Project owner',
optional: true,
properties: {
email: { type: 'string', description: 'Owner email' },
},
},
categories: {
type: 'array',
description: 'Project categories',
optional: true,
items: {
type: 'object',
properties: {
name: { type: 'string', description: 'Category name' },
description: { type: 'string', description: 'Category description' },
},
},
},
lastEditedAt: { type: 'string', description: 'ISO 8601 last edited timestamp', optional: true },
lastPublishedAt: {
type: 'string',
description: 'ISO 8601 last published timestamp',
optional: true,
},
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
archivedAt: { type: 'string', description: 'ISO 8601 archived timestamp', optional: true },
trashedAt: { type: 'string', description: 'ISO 8601 trashed timestamp', optional: true },
} as const satisfies Record<string, OutputProperty>
/**
* Output definition for run creation responses.
* POST /v1/projects/{projectId}/runs returns projectVersion but no status.
*/
export const HEX_RUN_OUTPUT_PROPERTIES = {
projectId: { type: 'string', description: 'Project UUID' },
runId: { type: 'string', description: 'Run UUID' },
runUrl: { type: 'string', description: 'URL to view the run' },
runStatusUrl: { type: 'string', description: 'URL to check run status' },
traceId: { type: 'string', description: 'Trace ID for debugging', optional: true },
projectVersion: { type: 'number', description: 'Project version number', optional: true },
} as const satisfies Record<string, OutputProperty>
/**
* Output definition for run status responses.
* GET /v1/projects/{projectId}/runs/{runId} returns full run details.
*/
export const HEX_RUN_STATUS_OUTPUT_PROPERTIES = {
projectId: { type: 'string', description: 'Project UUID' },
runId: { type: 'string', description: 'Run UUID' },
runUrl: { type: 'string', description: 'URL to view the run' },
status: {
type: 'string',
description:
'Run status (PENDING, RUNNING, COMPLETED, ERRORED, KILLED, UNABLE_TO_ALLOCATE_KERNEL)',
},
startTime: { type: 'string', description: 'ISO 8601 run start time', optional: true },
endTime: { type: 'string', description: 'ISO 8601 run end time', optional: true },
elapsedTime: { type: 'number', description: 'Elapsed time in seconds', optional: true },
traceId: { type: 'string', description: 'Trace ID for debugging', optional: true },
projectVersion: { type: 'number', description: 'Project version number', optional: true },
} as const satisfies Record<string, OutputProperty>
export interface HexListProjectsParams {
apiKey: string
limit?: number
includeArchived?: boolean
statusFilter?: string
}
export interface HexListProjectsResponse extends ToolResponse {
output: {
projects: Array<{
id: string
title: string
description: string | null
status: { name: string } | null
type: string
creator: { email: string } | null
owner: { email: string } | null
categories: Array<{ name: string; description: string }>
lastEditedAt: string | null
lastPublishedAt: string | null
createdAt: string
archivedAt: string | null
trashedAt: string | null
}>
total: number
}
}
export interface HexGetProjectParams {
apiKey: string
projectId: string
}
export interface HexGetProjectResponse extends ToolResponse {
output: {
id: string
title: string
description: string | null
status: { name: string } | null
type: string
creator: { email: string } | null
owner: { email: string } | null
categories: Array<{ name: string; description: string }>
lastEditedAt: string | null
lastPublishedAt: string | null
createdAt: string
archivedAt: string | null
trashedAt: string | null
}
}
export interface HexRunProjectParams {
apiKey: string
projectId: string
inputParams?: string
dryRun?: boolean
updateCache?: boolean
updatePublishedResults?: boolean
useCachedSqlResults?: boolean
}
export interface HexRunProjectResponse extends ToolResponse {
output: {
projectId: string
runId: string
runUrl: string
runStatusUrl: string
traceId: string | null
projectVersion: number | null
}
}
export interface HexGetRunStatusParams {
apiKey: string
projectId: string
runId: string
}
export interface HexGetRunStatusResponse extends ToolResponse {
output: {
projectId: string
runId: string
runUrl: string | null
status: string
startTime: string | null
endTime: string | null
elapsedTime: number | null
traceId: string | null
projectVersion: number | null
}
}
export interface HexCancelRunParams {
apiKey: string
projectId: string
runId: string
}
export interface HexCancelRunResponse extends ToolResponse {
output: {
success: boolean
projectId: string
runId: string
}
}
export interface HexGetProjectRunsParams {
apiKey: string
projectId: string
limit?: number
offset?: number
statusFilter?: string
}
export interface HexGetProjectRunsResponse extends ToolResponse {
output: {
runs: Array<{
projectId: string
runId: string
runUrl: string | null
status: string
startTime: string | null
endTime: string | null
elapsedTime: number | null
traceId: string | null
projectVersion: number | null
}>
total: number
traceId: string | null
}
}
export interface HexUpdateProjectParams {
apiKey: string
projectId: string
status: string
}
export interface HexUpdateProjectResponse extends ToolResponse {
output: {
id: string
title: string
description: string | null
status: { name: string } | null
type: string
creator: { email: string } | null
owner: { email: string } | null
categories: Array<{ name: string; description: string }>
lastEditedAt: string | null
lastPublishedAt: string | null
createdAt: string
archivedAt: string | null
trashedAt: string | null
}
}
export interface HexListUsersParams {
apiKey: string
limit?: number
sortBy?: string
sortDirection?: string
groupId?: string
}
export interface HexListUsersResponse extends ToolResponse {
output: {
users: Array<{
id: string
name: string
email: string
role: string
}>
total: number
}
}
export interface HexListCollectionsParams {
apiKey: string
limit?: number
sortBy?: string
}
export interface HexListCollectionsResponse extends ToolResponse {
output: {
collections: Array<{
id: string
name: string
description: string | null
creator: { email: string; id: string } | null
}>
total: number
}
}
export interface HexListDataConnectionsParams {
apiKey: string
limit?: number
sortBy?: string
sortDirection?: string
}
export interface HexListDataConnectionsResponse extends ToolResponse {
output: {
connections: Array<{
id: string
name: string
type: string
description: string | null
connectViaSsh: boolean | null
includeMagic: boolean | null
allowWritebackCells: boolean | null
}>
total: number
}
}
export interface HexGetQueriedTablesParams {
apiKey: string
projectId: string
limit?: number
}
export interface HexGetQueriedTablesResponse extends ToolResponse {
output: {
tables: Array<{
dataConnectionId: string | null
dataConnectionName: string | null
tableName: string | null
}>
total: number
}
}
export interface HexListGroupsParams {
apiKey: string
limit?: number
sortBy?: string
sortDirection?: string
}
export interface HexListGroupsResponse extends ToolResponse {
output: {
groups: Array<{
id: string
name: string
createdAt: string | null
}>
total: number
}
}
export interface HexGetGroupParams {
apiKey: string
groupId: string
}
export interface HexGetGroupResponse extends ToolResponse {
output: {
id: string
name: string
createdAt: string | null
}
}
export interface HexGetDataConnectionParams {
apiKey: string
dataConnectionId: string
}
export interface HexGetDataConnectionResponse extends ToolResponse {
output: {
id: string
name: string
type: string
description: string | null
connectViaSsh: boolean | null
includeMagic: boolean | null
allowWritebackCells: boolean | null
}
}
export interface HexGetCollectionParams {
apiKey: string
collectionId: string
}
export interface HexGetCollectionResponse extends ToolResponse {
output: {
id: string
name: string
description: string | null
creator: { email: string; id: string } | null
}
}
export interface HexCreateCollectionParams {
apiKey: string
name: string
description?: string
}
export interface HexCreateCollectionResponse extends ToolResponse {
output: {
id: string
name: string
description: string | null
creator: { email: string; id: string } | null
}
}
export type HexResponse =
| HexListProjectsResponse
| HexGetProjectResponse
| HexRunProjectResponse
| HexGetRunStatusResponse
| HexCancelRunResponse
| HexGetProjectRunsResponse
| HexUpdateProjectResponse
| HexListUsersResponse
| HexListCollectionsResponse
| HexListDataConnectionsResponse
| HexGetQueriedTablesResponse
| HexListGroupsResponse
| HexGetGroupResponse
| HexGetDataConnectionResponse
| HexGetCollectionResponse
| HexCreateCollectionResponse

View File

@@ -0,0 +1,118 @@
import type { HexUpdateProjectParams, HexUpdateProjectResponse } from '@/tools/hex/types'
import type { ToolConfig } from '@/tools/types'
export const updateProjectTool: ToolConfig<HexUpdateProjectParams, HexUpdateProjectResponse> = {
id: 'hex_update_project',
name: 'Hex Update Project',
description:
'Update a Hex project status label (e.g., endorsement or custom workspace statuses).',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Hex API token (Personal or Workspace)',
},
projectId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The UUID of the Hex project to update',
},
status: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'New project status name (custom workspace status label)',
},
},
request: {
url: (params) => `https://app.hex.tech/api/v1/projects/${params.projectId}`,
method: 'PATCH',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
body: (params) => ({
status: params.status,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
id: data.id ?? null,
title: data.title ?? null,
description: data.description ?? null,
status: data.status ? { name: data.status.name ?? null } : null,
type: data.type ?? null,
creator: data.creator ? { email: data.creator.email ?? null } : null,
owner: data.owner ? { email: data.owner.email ?? null } : null,
categories: Array.isArray(data.categories)
? data.categories.map((c: Record<string, string>) => ({
name: c.name ?? null,
description: c.description ?? null,
}))
: [],
lastEditedAt: data.lastEditedAt ?? null,
lastPublishedAt: data.lastPublishedAt ?? null,
createdAt: data.createdAt ?? null,
archivedAt: data.archivedAt ?? null,
trashedAt: data.trashedAt ?? null,
},
}
},
outputs: {
id: { type: 'string', description: 'Project UUID' },
title: { type: 'string', description: 'Project title' },
description: { type: 'string', description: 'Project description', optional: true },
status: {
type: 'object',
description: 'Updated project status',
properties: {
name: { type: 'string', description: 'Status name (e.g., PUBLISHED, DRAFT)' },
},
},
type: { type: 'string', description: 'Project type (PROJECT or COMPONENT)' },
creator: {
type: 'object',
description: 'Project creator',
optional: true,
properties: {
email: { type: 'string', description: 'Creator email' },
},
},
owner: {
type: 'object',
description: 'Project owner',
optional: true,
properties: {
email: { type: 'string', description: 'Owner email' },
},
},
categories: {
type: 'array',
description: 'Project categories',
optional: true,
items: {
type: 'object',
properties: {
name: { type: 'string', description: 'Category name' },
description: { type: 'string', description: 'Category description' },
},
},
},
lastEditedAt: { type: 'string', description: 'Last edited timestamp', optional: true },
lastPublishedAt: { type: 'string', description: 'Last published timestamp', optional: true },
createdAt: { type: 'string', description: 'Creation timestamp' },
archivedAt: { type: 'string', description: 'Archived timestamp', optional: true },
trashedAt: { type: 'string', description: 'Trashed timestamp', optional: true },
},
}

View File

@@ -723,6 +723,24 @@ import {
greptileStatusTool,
} from '@/tools/greptile'
import { guardrailsValidateTool } from '@/tools/guardrails'
import {
hexCancelRunTool,
hexCreateCollectionTool,
hexGetCollectionTool,
hexGetDataConnectionTool,
hexGetGroupTool,
hexGetProjectRunsTool,
hexGetProjectTool,
hexGetQueriedTablesTool,
hexGetRunStatusTool,
hexListCollectionsTool,
hexListDataConnectionsTool,
hexListGroupsTool,
hexListProjectsTool,
hexListUsersTool,
hexRunProjectTool,
hexUpdateProjectTool,
} from '@/tools/hex'
import { httpRequestTool, webhookRequestTool } from '@/tools/http'
import {
hubspotCreateCompanyTool,
@@ -2058,6 +2076,22 @@ export const tools: Record<string, ToolConfig> = {
grafana_create_folder: grafanaCreateFolderTool,
google_search: googleSearchTool,
guardrails_validate: guardrailsValidateTool,
hex_cancel_run: hexCancelRunTool,
hex_create_collection: hexCreateCollectionTool,
hex_get_collection: hexGetCollectionTool,
hex_get_data_connection: hexGetDataConnectionTool,
hex_get_group: hexGetGroupTool,
hex_get_project: hexGetProjectTool,
hex_get_project_runs: hexGetProjectRunsTool,
hex_get_queried_tables: hexGetQueriedTablesTool,
hex_get_run_status: hexGetRunStatusTool,
hex_list_collections: hexListCollectionsTool,
hex_list_data_connections: hexListDataConnectionsTool,
hex_list_groups: hexListGroupsTool,
hex_list_projects: hexListProjectsTool,
hex_list_users: hexListUsersTool,
hex_run_project: hexRunProjectTool,
hex_update_project: hexUpdateProjectTool,
jina_read_url: jinaReadUrlTool,
jina_search: jinaSearchTool,
linkup_search: linkupSearchTool,