mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-22 03:01:08 -05:00
Compare commits
22 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0d86ea01f0 | ||
|
|
04286fc16b | ||
|
|
c52f78c840 | ||
|
|
e318bf2e65 | ||
|
|
4913799a27 | ||
|
|
ccb4f5956d | ||
|
|
2a6d4fcb96 | ||
|
|
115f04e989 | ||
|
|
42020c3ae2 | ||
|
|
34d92fae89 | ||
|
|
a98463a486 | ||
|
|
765a481864 | ||
|
|
a1400caea0 | ||
|
|
2fc2e12cb2 | ||
|
|
3fa4bb4c12 | ||
|
|
67aa4bb332 | ||
|
|
1b8d666c93 | ||
|
|
71942cb53c | ||
|
|
15ace5e63f | ||
|
|
fdca73679d | ||
|
|
da46a387c9 | ||
|
|
b7e377ec4b |
2
.github/workflows/images.yml
vendored
2
.github/workflows/images.yml
vendored
@@ -146,7 +146,7 @@ jobs:
|
||||
|
||||
create-ghcr-manifests:
|
||||
name: Create GHCR Manifests
|
||||
runs-on: blacksmith-8vcpu-ubuntu-2404
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
needs: [build-amd64, build-ghcr-arm64]
|
||||
if: github.ref == 'refs/heads/main'
|
||||
strategy:
|
||||
|
||||
2
.github/workflows/test-build.yml
vendored
2
.github/workflows/test-build.yml
vendored
@@ -110,7 +110,7 @@ jobs:
|
||||
RESEND_API_KEY: 'dummy_key_for_ci_only'
|
||||
AWS_REGION: 'us-west-2'
|
||||
ENCRYPTION_KEY: '7cf672e460e430c1fba707575c2b0e2ad5a99dddf9b7b7e3b5646e630861db1c' # dummy key for CI only
|
||||
run: bun run build
|
||||
run: bunx turbo run build --filter=sim
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
|
||||
@@ -5819,3 +5819,15 @@ export function RedisIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function HexIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1450.3 600'>
|
||||
<path
|
||||
fill='#5F509D'
|
||||
fillRule='evenodd'
|
||||
d='m250.11,0v199.49h-50V0H0v600h200.11v-300.69h50v300.69h200.18V0h-200.18Zm249.9,0v600h450.29v-250.23h-200.2v149h-50v-199.46h250.2V0h-450.29Zm200.09,199.49v-99.49h50v99.49h-50Zm550.02,0V0h200.18v150l-100,100.09,100,100.09v249.82h-200.18v-300.69h-50v300.69h-200.11v-249.82l100.11-100.09-100.11-100.09V0h200.11v199.49h50Z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -54,6 +54,7 @@ import {
|
||||
GrafanaIcon,
|
||||
GrainIcon,
|
||||
GreptileIcon,
|
||||
HexIcon,
|
||||
HubspotIcon,
|
||||
HuggingFaceIcon,
|
||||
HunterIOIcon,
|
||||
@@ -196,6 +197,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
grafana: GrafanaIcon,
|
||||
grain: GrainIcon,
|
||||
greptile: GreptileIcon,
|
||||
hex: HexIcon,
|
||||
hubspot: HubspotIcon,
|
||||
huggingface: HuggingFaceIcon,
|
||||
hunter: HunterIOIcon,
|
||||
|
||||
459
apps/docs/content/docs/en/tools/hex.mdx
Normal file
459
apps/docs/content/docs/en/tools/hex.mdx
Normal file
@@ -0,0 +1,459 @@
|
||||
---
|
||||
title: Hex
|
||||
description: Run and manage Hex projects
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="hex"
|
||||
color="#F5E6FF"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Hex](https://hex.tech/) is a collaborative platform for analytics and data science that allows you to build, run, and share interactive data projects and notebooks. Hex lets teams work together on data exploration, transformation, and visualization, making it easy to turn analysis into shareable insights.
|
||||
|
||||
With Hex, you can:
|
||||
|
||||
- **Create and run powerful notebooks**: Blend SQL, Python, and visualizations in a single, interactive workspace.
|
||||
- **Collaborate and share**: Work together with teammates in real time and publish interactive data apps for broader audiences.
|
||||
- **Automate and orchestrate workflows**: Schedule notebook runs, parameterize runs with inputs, and automate data tasks.
|
||||
- **Visualize and communicate results**: Turn analysis results into dashboards or interactive apps that anyone can use.
|
||||
- **Integrate with your data stack**: Connect easily to data warehouses, APIs, and other sources.
|
||||
|
||||
The Sim Hex integration allows your AI agents or workflows to:
|
||||
|
||||
- List, get, and manage Hex projects directly from Sim.
|
||||
- Trigger and monitor notebook runs, check their statuses, or cancel them as part of larger automation flows.
|
||||
- Retrieve run results and use them within Sim-powered processes and decision-making.
|
||||
- Leverage Hex’s interactive analytics capabilities right inside your automated Sim workflows.
|
||||
|
||||
Whether you’re empowering analysts, automating reporting, or embedding actionable data into your processes, Hex and Sim provide a seamless way to operationalize analytics and bring data-driven insights to your team.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Hex into your workflow. Run projects, check run status, manage collections and groups, list users, and view data connections. Requires a Hex API token.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `hex_cancel_run`
|
||||
|
||||
Cancel an active Hex project run.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project |
|
||||
| `runId` | string | Yes | The UUID of the run to cancel |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the run was successfully cancelled |
|
||||
| `projectId` | string | Project UUID |
|
||||
| `runId` | string | Run UUID that was cancelled |
|
||||
|
||||
### `hex_create_collection`
|
||||
|
||||
Create a new collection in the Hex workspace to organize projects.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `name` | string | Yes | Name for the new collection |
|
||||
| `description` | string | No | Optional description for the collection |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Newly created collection UUID |
|
||||
| `name` | string | Collection name |
|
||||
| `description` | string | Collection description |
|
||||
| `creator` | object | Collection creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| ↳ `id` | string | Creator UUID |
|
||||
|
||||
### `hex_get_collection`
|
||||
|
||||
Retrieve details for a specific Hex collection by its ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `collectionId` | string | Yes | The UUID of the collection |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Collection UUID |
|
||||
| `name` | string | Collection name |
|
||||
| `description` | string | Collection description |
|
||||
| `creator` | object | Collection creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| ↳ `id` | string | Creator UUID |
|
||||
|
||||
### `hex_get_data_connection`
|
||||
|
||||
Retrieve details for a specific data connection including type, description, and configuration flags.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `dataConnectionId` | string | Yes | The UUID of the data connection |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Connection UUID |
|
||||
| `name` | string | Connection name |
|
||||
| `type` | string | Connection type \(e.g., snowflake, postgres, bigquery\) |
|
||||
| `description` | string | Connection description |
|
||||
| `connectViaSsh` | boolean | Whether SSH tunneling is enabled |
|
||||
| `includeMagic` | boolean | Whether Magic AI features are enabled |
|
||||
| `allowWritebackCells` | boolean | Whether writeback cells are allowed |
|
||||
|
||||
### `hex_get_group`
|
||||
|
||||
Retrieve details for a specific Hex group.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `groupId` | string | Yes | The UUID of the group |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Group UUID |
|
||||
| `name` | string | Group name |
|
||||
| `createdAt` | string | Creation timestamp |
|
||||
|
||||
### `hex_get_project`
|
||||
|
||||
Get metadata and details for a specific Hex project by its ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Project UUID |
|
||||
| `title` | string | Project title |
|
||||
| `description` | string | Project description |
|
||||
| `status` | object | Project status |
|
||||
| ↳ `name` | string | Status name \(e.g., PUBLISHED, DRAFT\) |
|
||||
| `type` | string | Project type \(PROJECT or COMPONENT\) |
|
||||
| `creator` | object | Project creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| `owner` | object | Project owner |
|
||||
| ↳ `email` | string | Owner email |
|
||||
| `categories` | array | Project categories |
|
||||
| ↳ `name` | string | Category name |
|
||||
| ↳ `description` | string | Category description |
|
||||
| `lastEditedAt` | string | ISO 8601 last edited timestamp |
|
||||
| `lastPublishedAt` | string | ISO 8601 last published timestamp |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `archivedAt` | string | ISO 8601 archived timestamp |
|
||||
| `trashedAt` | string | ISO 8601 trashed timestamp |
|
||||
|
||||
### `hex_get_project_runs`
|
||||
|
||||
Retrieve API-triggered runs for a Hex project with optional filtering by status and pagination.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project |
|
||||
| `limit` | number | No | Maximum number of runs to return \(1-100, default: 25\) |
|
||||
| `offset` | number | No | Offset for paginated results \(default: 0\) |
|
||||
| `statusFilter` | string | No | Filter by run status: PENDING, RUNNING, ERRORED, COMPLETED, KILLED, UNABLE_TO_ALLOCATE_KERNEL |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `runs` | array | List of project runs |
|
||||
| ↳ `projectId` | string | Project UUID |
|
||||
| ↳ `runId` | string | Run UUID |
|
||||
| ↳ `runUrl` | string | URL to view the run |
|
||||
| ↳ `status` | string | Run status \(PENDING, RUNNING, COMPLETED, ERRORED, KILLED, UNABLE_TO_ALLOCATE_KERNEL\) |
|
||||
| ↳ `startTime` | string | Run start time |
|
||||
| ↳ `endTime` | string | Run end time |
|
||||
| ↳ `elapsedTime` | number | Elapsed time in seconds |
|
||||
| ↳ `traceId` | string | Trace ID |
|
||||
| ↳ `projectVersion` | number | Project version number |
|
||||
| `total` | number | Total number of runs returned |
|
||||
| `traceId` | string | Top-level trace ID |
|
||||
|
||||
### `hex_get_queried_tables`
|
||||
|
||||
Return the warehouse tables queried by a Hex project, including data connection and table names.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project |
|
||||
| `limit` | number | No | Maximum number of tables to return \(1-100\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `tables` | array | List of warehouse tables queried by the project |
|
||||
| ↳ `dataConnectionId` | string | Data connection UUID |
|
||||
| ↳ `dataConnectionName` | string | Data connection name |
|
||||
| ↳ `tableName` | string | Table name |
|
||||
| `total` | number | Total number of tables returned |
|
||||
|
||||
### `hex_get_run_status`
|
||||
|
||||
Check the status of a Hex project run by its run ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project |
|
||||
| `runId` | string | Yes | The UUID of the run to check |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `projectId` | string | Project UUID |
|
||||
| `runId` | string | Run UUID |
|
||||
| `runUrl` | string | URL to view the run |
|
||||
| `status` | string | Run status \(PENDING, RUNNING, COMPLETED, ERRORED, KILLED, UNABLE_TO_ALLOCATE_KERNEL\) |
|
||||
| `startTime` | string | ISO 8601 run start time |
|
||||
| `endTime` | string | ISO 8601 run end time |
|
||||
| `elapsedTime` | number | Elapsed time in seconds |
|
||||
| `traceId` | string | Trace ID for debugging |
|
||||
| `projectVersion` | number | Project version number |
|
||||
|
||||
### `hex_list_collections`
|
||||
|
||||
List all collections in the Hex workspace.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `limit` | number | No | Maximum number of collections to return \(1-500, default: 25\) |
|
||||
| `sortBy` | string | No | Sort by field: NAME |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `collections` | array | List of collections |
|
||||
| ↳ `id` | string | Collection UUID |
|
||||
| ↳ `name` | string | Collection name |
|
||||
| ↳ `description` | string | Collection description |
|
||||
| ↳ `creator` | object | Collection creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| ↳ `id` | string | Creator UUID |
|
||||
| `total` | number | Total number of collections returned |
|
||||
|
||||
### `hex_list_data_connections`
|
||||
|
||||
List all data connections in the Hex workspace (e.g., Snowflake, PostgreSQL, BigQuery).
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `limit` | number | No | Maximum number of connections to return \(1-500, default: 25\) |
|
||||
| `sortBy` | string | No | Sort by field: CREATED_AT or NAME |
|
||||
| `sortDirection` | string | No | Sort direction: ASC or DESC |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `connections` | array | List of data connections |
|
||||
| ↳ `id` | string | Connection UUID |
|
||||
| ↳ `name` | string | Connection name |
|
||||
| ↳ `type` | string | Connection type \(e.g., athena, bigquery, databricks, postgres, redshift, snowflake\) |
|
||||
| ↳ `description` | string | Connection description |
|
||||
| ↳ `connectViaSsh` | boolean | Whether SSH tunneling is enabled |
|
||||
| ↳ `includeMagic` | boolean | Whether Magic AI features are enabled |
|
||||
| ↳ `allowWritebackCells` | boolean | Whether writeback cells are allowed |
|
||||
| `total` | number | Total number of connections returned |
|
||||
|
||||
### `hex_list_groups`
|
||||
|
||||
List all groups in the Hex workspace with optional sorting.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `limit` | number | No | Maximum number of groups to return \(1-500, default: 25\) |
|
||||
| `sortBy` | string | No | Sort by field: CREATED_AT or NAME |
|
||||
| `sortDirection` | string | No | Sort direction: ASC or DESC |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `groups` | array | List of workspace groups |
|
||||
| ↳ `id` | string | Group UUID |
|
||||
| ↳ `name` | string | Group name |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| `total` | number | Total number of groups returned |
|
||||
|
||||
### `hex_list_projects`
|
||||
|
||||
List all projects in your Hex workspace with optional filtering by status.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `limit` | number | No | Maximum number of projects to return \(1-100\) |
|
||||
| `includeArchived` | boolean | No | Include archived projects in results |
|
||||
| `statusFilter` | string | No | Filter by status: PUBLISHED, DRAFT, or ALL |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `projects` | array | List of Hex projects |
|
||||
| ↳ `id` | string | Project UUID |
|
||||
| ↳ `title` | string | Project title |
|
||||
| ↳ `description` | string | Project description |
|
||||
| ↳ `status` | object | Project status |
|
||||
| ↳ `name` | string | Status name \(e.g., PUBLISHED, DRAFT\) |
|
||||
| ↳ `type` | string | Project type \(PROJECT or COMPONENT\) |
|
||||
| ↳ `creator` | object | Project creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| ↳ `owner` | object | Project owner |
|
||||
| ↳ `email` | string | Owner email |
|
||||
| ↳ `lastEditedAt` | string | Last edited timestamp |
|
||||
| ↳ `lastPublishedAt` | string | Last published timestamp |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `archivedAt` | string | Archived timestamp |
|
||||
| `total` | number | Total number of projects returned |
|
||||
|
||||
### `hex_list_users`
|
||||
|
||||
List all users in the Hex workspace with optional filtering and sorting.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `limit` | number | No | Maximum number of users to return \(1-100, default: 25\) |
|
||||
| `sortBy` | string | No | Sort by field: NAME or EMAIL |
|
||||
| `sortDirection` | string | No | Sort direction: ASC or DESC |
|
||||
| `groupId` | string | No | Filter users by group UUID |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `users` | array | List of workspace users |
|
||||
| ↳ `id` | string | User UUID |
|
||||
| ↳ `name` | string | User name |
|
||||
| ↳ `email` | string | User email |
|
||||
| ↳ `role` | string | User role \(ADMIN, MANAGER, EDITOR, EXPLORER, MEMBER, GUEST, EMBEDDED_USER, ANONYMOUS\) |
|
||||
| `total` | number | Total number of users returned |
|
||||
|
||||
### `hex_run_project`
|
||||
|
||||
Execute a published Hex project. Optionally pass input parameters and control caching behavior.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project to run |
|
||||
| `inputParams` | json | No | JSON object of input parameters for the project \(e.g., \{"date": "2024-01-01"\}\) |
|
||||
| `dryRun` | boolean | No | If true, perform a dry run without executing the project |
|
||||
| `updateCache` | boolean | No | \(Deprecated\) If true, update the cached results after execution |
|
||||
| `updatePublishedResults` | boolean | No | If true, update the published app results after execution |
|
||||
| `useCachedSqlResults` | boolean | No | If true, use cached SQL results instead of re-running queries |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `projectId` | string | Project UUID |
|
||||
| `runId` | string | Run UUID |
|
||||
| `runUrl` | string | URL to view the run |
|
||||
| `runStatusUrl` | string | URL to check run status |
|
||||
| `traceId` | string | Trace ID for debugging |
|
||||
| `projectVersion` | number | Project version number |
|
||||
|
||||
### `hex_update_project`
|
||||
|
||||
Update a Hex project status label (e.g., endorsement or custom workspace statuses).
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project to update |
|
||||
| `status` | string | Yes | New project status name \(custom workspace status label\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Project UUID |
|
||||
| `title` | string | Project title |
|
||||
| `description` | string | Project description |
|
||||
| `status` | object | Updated project status |
|
||||
| ↳ `name` | string | Status name \(e.g., PUBLISHED, DRAFT\) |
|
||||
| `type` | string | Project type \(PROJECT or COMPONENT\) |
|
||||
| `creator` | object | Project creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| `owner` | object | Project owner |
|
||||
| ↳ `email` | string | Owner email |
|
||||
| `categories` | array | Project categories |
|
||||
| ↳ `name` | string | Category name |
|
||||
| ↳ `description` | string | Category description |
|
||||
| `lastEditedAt` | string | Last edited timestamp |
|
||||
| `lastPublishedAt` | string | Last published timestamp |
|
||||
| `createdAt` | string | Creation timestamp |
|
||||
| `archivedAt` | string | Archived timestamp |
|
||||
| `trashedAt` | string | Trashed timestamp |
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ Create a new service request in Jira Service Management
|
||||
| `summary` | string | Yes | Summary/title for the service request |
|
||||
| `description` | string | No | Description for the service request |
|
||||
| `raiseOnBehalfOf` | string | No | Account ID of customer to raise request on behalf of |
|
||||
| `requestFieldValues` | json | No | Custom field values as key-value pairs \(overrides summary/description if provided\) |
|
||||
| `requestFieldValues` | json | No | Request field values as key-value pairs \(overrides summary/description if provided\) |
|
||||
| `requestParticipants` | string | No | Comma-separated account IDs to add as request participants |
|
||||
| `channel` | string | No | Channel the request originates from \(e.g., portal, email\) |
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@
|
||||
"grafana",
|
||||
"grain",
|
||||
"greptile",
|
||||
"hex",
|
||||
"hubspot",
|
||||
"huggingface",
|
||||
"hunter",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Slack
|
||||
description: Send, update, delete messages, add reactions in Slack or trigger workflows from Slack events
|
||||
description: Send, update, delete messages, send ephemeral messages, add reactions in Slack or trigger workflows from Slack events
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
@@ -59,7 +59,7 @@ If you encounter issues with the Slack integration, contact us at [help@sim.ai](
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Slack into the workflow. Can send, update, and delete messages, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.
|
||||
Integrate Slack into the workflow. Can send, update, and delete messages, send ephemeral messages visible only to a specific user, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.
|
||||
|
||||
|
||||
|
||||
@@ -80,6 +80,7 @@ Send messages to Slack channels or direct messages. Supports Slack mrkdwn format
|
||||
| `dmUserId` | string | No | Slack user ID for direct messages \(e.g., U1234567890\) |
|
||||
| `text` | string | Yes | Message text to send \(supports Slack mrkdwn formatting\) |
|
||||
| `threadTs` | string | No | Thread timestamp to reply to \(creates thread reply\) |
|
||||
| `blocks` | json | No | Block Kit layout blocks as a JSON array. When provided, text becomes the fallback notification text. |
|
||||
| `files` | file[] | No | Files to attach to the message |
|
||||
|
||||
#### Output
|
||||
@@ -146,6 +147,29 @@ Send messages to Slack channels or direct messages. Supports Slack mrkdwn format
|
||||
| `fileCount` | number | Number of files uploaded \(when files are attached\) |
|
||||
| `files` | file[] | Files attached to the message |
|
||||
|
||||
### `slack_ephemeral_message`
|
||||
|
||||
Send an ephemeral message visible only to a specific user in a channel. Optionally reply in a thread. The message does not persist across sessions.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `channel` | string | Yes | Slack channel ID \(e.g., C1234567890\) |
|
||||
| `user` | string | Yes | User ID who will see the ephemeral message \(e.g., U1234567890\). Must be a member of the channel. |
|
||||
| `text` | string | Yes | Message text to send \(supports Slack mrkdwn formatting\) |
|
||||
| `threadTs` | string | No | Thread timestamp to reply in. When provided, the ephemeral message appears as a thread reply. |
|
||||
| `blocks` | json | No | Block Kit layout blocks as a JSON array. When provided, text becomes the fallback notification text. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `messageTs` | string | Timestamp of the ephemeral message \(cannot be used with chat.update\) |
|
||||
| `channel` | string | Channel ID where the ephemeral message was sent |
|
||||
|
||||
### `slack_canvas`
|
||||
|
||||
Create and share Slack canvases in channels. Canvases are collaborative documents within Slack.
|
||||
@@ -682,6 +706,7 @@ Update a message previously sent by the bot in Slack
|
||||
| `channel` | string | Yes | Channel ID where the message was posted \(e.g., C1234567890\) |
|
||||
| `timestamp` | string | Yes | Timestamp of the message to update \(e.g., 1405894322.002768\) |
|
||||
| `text` | string | Yes | New message text \(supports Slack mrkdwn formatting\) |
|
||||
| `blocks` | json | No | Block Kit layout blocks as a JSON array. When provided, text becomes the fallback notification text. |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
274
apps/sim/app/(auth)/oauth/consent/page.tsx
Normal file
274
apps/sim/app/(auth)/oauth/consent/page.tsx
Normal file
@@ -0,0 +1,274 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { ArrowLeftRight } from 'lucide-react'
|
||||
import Image from 'next/image'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { signOut, useSession } from '@/lib/auth/auth-client'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
|
||||
const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
||||
openid: 'Verify your identity',
|
||||
profile: 'Access your basic profile information',
|
||||
email: 'View your email address',
|
||||
offline_access: 'Maintain access when you are not actively using the app',
|
||||
'mcp:tools': 'Use Sim workflows and tools on your behalf',
|
||||
} as const
|
||||
|
||||
interface ClientInfo {
|
||||
clientId: string
|
||||
name: string
|
||||
icon: string
|
||||
}
|
||||
|
||||
export default function OAuthConsentPage() {
|
||||
const router = useRouter()
|
||||
const searchParams = useSearchParams()
|
||||
const { data: session } = useSession()
|
||||
const consentCode = searchParams.get('consent_code')
|
||||
const clientId = searchParams.get('client_id')
|
||||
const scope = searchParams.get('scope')
|
||||
|
||||
const [clientInfo, setClientInfo] = useState<ClientInfo | null>(null)
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [submitting, setSubmitting] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const scopes = scope?.split(' ').filter(Boolean) ?? []
|
||||
|
||||
useEffect(() => {
|
||||
if (!clientId) {
|
||||
setLoading(false)
|
||||
setError('The authorization request is missing a required client identifier.')
|
||||
return
|
||||
}
|
||||
|
||||
fetch(`/api/auth/oauth2/client/${encodeURIComponent(clientId)}`, { credentials: 'include' })
|
||||
.then(async (res) => {
|
||||
if (!res.ok) return
|
||||
const data = await res.json()
|
||||
setClientInfo(data)
|
||||
})
|
||||
.catch(() => {})
|
||||
.finally(() => {
|
||||
setLoading(false)
|
||||
})
|
||||
}, [clientId])
|
||||
|
||||
const handleConsent = useCallback(
|
||||
async (accept: boolean) => {
|
||||
if (!consentCode) {
|
||||
setError('The authorization request is missing a required consent code.')
|
||||
return
|
||||
}
|
||||
|
||||
setSubmitting(true)
|
||||
try {
|
||||
const res = await fetch('/api/auth/oauth2/consent', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
credentials: 'include',
|
||||
body: JSON.stringify({ accept, consent_code: consentCode }),
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
const body = await res.json().catch(() => null)
|
||||
setError(
|
||||
(body as Record<string, string> | null)?.message ??
|
||||
'The consent request could not be processed. Please try again.'
|
||||
)
|
||||
setSubmitting(false)
|
||||
return
|
||||
}
|
||||
|
||||
const data = (await res.json()) as { redirectURI?: string }
|
||||
if (data.redirectURI) {
|
||||
window.location.href = data.redirectURI
|
||||
} else {
|
||||
setError('The server did not return a redirect. Please try again.')
|
||||
setSubmitting(false)
|
||||
}
|
||||
} catch {
|
||||
setError('Something went wrong. Please try again.')
|
||||
setSubmitting(false)
|
||||
}
|
||||
},
|
||||
[consentCode]
|
||||
)
|
||||
|
||||
const handleSwitchAccount = useCallback(async () => {
|
||||
if (!consentCode) return
|
||||
|
||||
const res = await fetch(`/api/auth/oauth2/authorize-params?consent_code=${consentCode}`, {
|
||||
credentials: 'include',
|
||||
})
|
||||
if (!res.ok) {
|
||||
setError('Unable to switch accounts. Please re-initiate the connection.')
|
||||
return
|
||||
}
|
||||
|
||||
const params = (await res.json()) as Record<string, string | null>
|
||||
const authorizeUrl = new URL('/api/auth/oauth2/authorize', window.location.origin)
|
||||
for (const [key, value] of Object.entries(params)) {
|
||||
if (value) authorizeUrl.searchParams.set(key, value)
|
||||
}
|
||||
|
||||
await signOut({
|
||||
fetchOptions: {
|
||||
onSuccess: () => {
|
||||
window.location.href = authorizeUrl.toString()
|
||||
},
|
||||
},
|
||||
})
|
||||
}, [consentCode])
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className='flex flex-col items-center justify-center'>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Authorize Application
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
Loading application details...
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div className='flex flex-col items-center justify-center'>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Authorization Error
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
{error}
|
||||
</p>
|
||||
</div>
|
||||
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
|
||||
<BrandedButton onClick={() => router.push('/')}>Return to Home</BrandedButton>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const clientName = clientInfo?.name ?? clientId
|
||||
|
||||
return (
|
||||
<div className='flex flex-col items-center justify-center'>
|
||||
<div className='mb-6 flex items-center gap-4'>
|
||||
{clientInfo?.icon ? (
|
||||
<img
|
||||
src={clientInfo.icon}
|
||||
alt={clientName ?? 'Application'}
|
||||
width={48}
|
||||
height={48}
|
||||
className='rounded-[10px]'
|
||||
/>
|
||||
) : (
|
||||
<div className='flex h-12 w-12 items-center justify-center rounded-[10px] bg-muted font-medium text-[18px] text-muted-foreground'>
|
||||
{(clientName ?? '?').charAt(0).toUpperCase()}
|
||||
</div>
|
||||
)}
|
||||
<ArrowLeftRight className='h-5 w-5 text-muted-foreground' />
|
||||
<Image
|
||||
src='/new/logo/colorized-bg.svg'
|
||||
alt='Sim'
|
||||
width={48}
|
||||
height={48}
|
||||
className='rounded-[10px]'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Authorize Application
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
<span className='font-medium text-foreground'>{clientName}</span> is requesting access to
|
||||
your account
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{session?.user && (
|
||||
<div
|
||||
className={`${inter.className} mt-5 flex items-center gap-3 rounded-lg border px-4 py-3`}
|
||||
>
|
||||
{session.user.image ? (
|
||||
<Image
|
||||
src={session.user.image}
|
||||
alt={session.user.name ?? 'User'}
|
||||
width={32}
|
||||
height={32}
|
||||
className='rounded-full'
|
||||
unoptimized
|
||||
/>
|
||||
) : (
|
||||
<div className='flex h-8 w-8 items-center justify-center rounded-full bg-muted font-medium text-[13px] text-muted-foreground'>
|
||||
{(session.user.name ?? session.user.email ?? '?').charAt(0).toUpperCase()}
|
||||
</div>
|
||||
)}
|
||||
<div className='min-w-0'>
|
||||
{session.user.name && (
|
||||
<p className='truncate font-medium text-[14px]'>{session.user.name}</p>
|
||||
)}
|
||||
<p className='truncate text-[13px] text-muted-foreground'>{session.user.email}</p>
|
||||
</div>
|
||||
<button
|
||||
type='button'
|
||||
onClick={handleSwitchAccount}
|
||||
className='ml-auto text-[13px] text-muted-foreground underline-offset-2 transition-colors hover:text-foreground hover:underline'
|
||||
>
|
||||
Switch
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{scopes.length > 0 && (
|
||||
<div className={`${inter.className} mt-5 w-full max-w-[410px]`}>
|
||||
<div className='rounded-lg border p-4'>
|
||||
<p className='mb-3 font-medium text-[14px]'>This will allow the application to:</p>
|
||||
<ul className='space-y-2'>
|
||||
{scopes.map((s) => (
|
||||
<li
|
||||
key={s}
|
||||
className='flex items-start gap-2 font-normal text-[13px] text-muted-foreground'
|
||||
>
|
||||
<span className='mt-0.5 text-green-500'>✓</span>
|
||||
<span>{SCOPE_DESCRIPTIONS[s] ?? s}</span>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className={`${inter.className} mt-6 flex w-full max-w-[410px] gap-3`}>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='md'
|
||||
className='px-6 py-2'
|
||||
disabled={submitting}
|
||||
onClick={() => handleConsent(false)}
|
||||
>
|
||||
Deny
|
||||
</Button>
|
||||
<BrandedButton
|
||||
fullWidth
|
||||
showArrow={false}
|
||||
loading={submitting}
|
||||
loadingText='Authorizing'
|
||||
onClick={() => handleConsent(true)}
|
||||
>
|
||||
Allow
|
||||
</BrandedButton>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -23,7 +23,8 @@ export function ThemeProvider({ children, ...props }: ThemeProviderProps) {
|
||||
pathname.startsWith('/chat') ||
|
||||
pathname.startsWith('/studio') ||
|
||||
pathname.startsWith('/resume') ||
|
||||
pathname.startsWith('/form')
|
||||
pathname.startsWith('/form') ||
|
||||
pathname.startsWith('/oauth')
|
||||
|
||||
return (
|
||||
<NextThemesProvider
|
||||
|
||||
59
apps/sim/app/api/auth/oauth2/authorize-params/route.ts
Normal file
59
apps/sim/app/api/auth/oauth2/authorize-params/route.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { db } from '@sim/db'
|
||||
import { verification } from '@sim/db/schema'
|
||||
import { and, eq, gt } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
/**
|
||||
* Returns the original OAuth authorize parameters stored in the verification record
|
||||
* for a given consent code. Used by the consent page to reconstruct the authorize URL
|
||||
* when switching accounts.
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
const session = await getSession()
|
||||
if (!session?.user) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const consentCode = request.nextUrl.searchParams.get('consent_code')
|
||||
if (!consentCode) {
|
||||
return NextResponse.json({ error: 'consent_code is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const [record] = await db
|
||||
.select({ value: verification.value })
|
||||
.from(verification)
|
||||
.where(and(eq(verification.identifier, consentCode), gt(verification.expiresAt, new Date())))
|
||||
.limit(1)
|
||||
|
||||
if (!record) {
|
||||
return NextResponse.json({ error: 'Invalid or expired consent code' }, { status: 404 })
|
||||
}
|
||||
|
||||
const data = JSON.parse(record.value) as {
|
||||
clientId: string
|
||||
redirectURI: string
|
||||
scope: string[]
|
||||
userId: string
|
||||
codeChallenge: string
|
||||
codeChallengeMethod: string
|
||||
state: string | null
|
||||
nonce: string | null
|
||||
}
|
||||
|
||||
if (data.userId !== session.user.id) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
client_id: data.clientId,
|
||||
redirect_uri: data.redirectURI,
|
||||
scope: data.scope.join(' '),
|
||||
code_challenge: data.codeChallenge,
|
||||
code_challenge_method: data.codeChallengeMethod,
|
||||
state: data.state,
|
||||
nonce: data.nonce,
|
||||
response_type: 'code',
|
||||
})
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ import { userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { validateOAuthAccessToken } from '@/lib/auth/oauth-token'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import {
|
||||
ORCHESTRATION_TIMEOUT_MS,
|
||||
@@ -31,6 +32,7 @@ import {
|
||||
import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/definitions'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import {
|
||||
authorizeWorkflowByWorkspacePermission,
|
||||
resolveWorkflowIdForUser,
|
||||
@@ -384,12 +386,14 @@ function buildMcpServer(abortSignal?: AbortSignal): Server {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
inputSchema: tool.inputSchema,
|
||||
...(tool.annotations && { annotations: tool.annotations }),
|
||||
}))
|
||||
|
||||
const subagentTools = SUBAGENT_TOOL_DEFS.map((tool) => ({
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
inputSchema: tool.inputSchema,
|
||||
...(tool.annotations && { annotations: tool.annotations }),
|
||||
}))
|
||||
|
||||
const result: ListToolsResult = {
|
||||
@@ -402,27 +406,51 @@ function buildMcpServer(abortSignal?: AbortSignal): Server {
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request, extra) => {
|
||||
const headers = (extra.requestInfo?.headers || {}) as HeaderMap
|
||||
const apiKeyHeader = readHeader(headers, 'x-api-key')
|
||||
const authorizationHeader = readHeader(headers, 'authorization')
|
||||
|
||||
if (!apiKeyHeader) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: 'AUTHENTICATION ERROR: No Copilot API key provided. The user must set their Copilot API key in the x-api-key header. They can generate one in the Sim app under Settings → Copilot. Do NOT retry — this will fail until the key is configured.',
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
let authResult: CopilotKeyAuthResult = { success: false }
|
||||
|
||||
if (authorizationHeader?.startsWith('Bearer ')) {
|
||||
const token = authorizationHeader.slice(7)
|
||||
const oauthResult = await validateOAuthAccessToken(token)
|
||||
if (oauthResult.success && oauthResult.userId) {
|
||||
if (!oauthResult.scopes?.includes('mcp:tools')) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: 'AUTHENTICATION ERROR: OAuth token is missing the required "mcp:tools" scope. Re-authorize with the correct scopes.',
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
authResult = { success: true, userId: oauthResult.userId }
|
||||
} else {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: `AUTHENTICATION ERROR: ${oauthResult.error ?? 'Invalid OAuth access token'} Do NOT retry — re-authorize via OAuth.`,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
} else if (apiKeyHeader) {
|
||||
authResult = await authenticateCopilotApiKey(apiKeyHeader)
|
||||
}
|
||||
|
||||
const authResult = await authenticateCopilotApiKey(apiKeyHeader)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn('MCP copilot key auth failed', { method: request.method })
|
||||
const errorMsg = apiKeyHeader
|
||||
? `AUTHENTICATION ERROR: ${authResult.error} Do NOT retry — this will fail until the user fixes their Copilot API key.`
|
||||
: 'AUTHENTICATION ERROR: No authentication provided. Provide a Bearer token (OAuth 2.1) or an x-api-key header. Generate a Copilot API key in Settings → Copilot.'
|
||||
logger.warn('MCP copilot auth failed', { method: request.method })
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: `AUTHENTICATION ERROR: ${authResult.error} Do NOT retry — this will fail until the user fixes their Copilot API key.`,
|
||||
text: errorMsg,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
@@ -512,6 +540,20 @@ export async function GET() {
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const hasAuth = request.headers.has('authorization') || request.headers.has('x-api-key')
|
||||
|
||||
if (!hasAuth) {
|
||||
const origin = getBaseUrl().replace(/\/$/, '')
|
||||
const resourceMetadataUrl = `${origin}/.well-known/oauth-protected-resource/api/mcp/copilot`
|
||||
return new NextResponse(JSON.stringify({ error: 'unauthorized' }), {
|
||||
status: 401,
|
||||
headers: {
|
||||
'WWW-Authenticate': `Bearer resource_metadata="${resourceMetadataUrl}", scope="mcp:tools"`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
let parsedBody: unknown
|
||||
|
||||
@@ -532,6 +574,19 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function OPTIONS() {
|
||||
return new NextResponse(null, {
|
||||
status: 204,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, OPTIONS, DELETE',
|
||||
'Access-Control-Allow-Headers':
|
||||
'Content-Type, Authorization, X-API-Key, X-Requested-With, Accept',
|
||||
'Access-Control-Max-Age': '86400',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export async function DELETE(request: NextRequest) {
|
||||
void request
|
||||
return NextResponse.json(createError(0, -32000, 'Method not allowed.'), { status: 405 })
|
||||
|
||||
96
apps/sim/app/api/tools/slack/send-ephemeral/route.ts
Normal file
96
apps/sim/app/api/tools/slack/send-ephemeral/route.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SlackSendEphemeralAPI')
|
||||
|
||||
const SlackSendEphemeralSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
channel: z.string().min(1, 'Channel ID is required'),
|
||||
user: z.string().min(1, 'User ID is required'),
|
||||
text: z.string().min(1, 'Message text is required'),
|
||||
thread_ts: z.string().optional().nullable(),
|
||||
blocks: z.array(z.record(z.unknown())).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Slack ephemeral send attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated Slack ephemeral send request via ${authResult.authType}`,
|
||||
{ userId: authResult.userId }
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = SlackSendEphemeralSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending ephemeral message`, {
|
||||
channel: validatedData.channel,
|
||||
user: validatedData.user,
|
||||
threadTs: validatedData.thread_ts ?? undefined,
|
||||
})
|
||||
|
||||
const response = await fetch('https://slack.com/api/chat.postEphemeral', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
channel: validatedData.channel,
|
||||
user: validatedData.user,
|
||||
text: validatedData.text,
|
||||
...(validatedData.thread_ts && { thread_ts: validatedData.thread_ts }),
|
||||
...(validatedData.blocks &&
|
||||
validatedData.blocks.length > 0 && { blocks: validatedData.blocks }),
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
logger.error(`[${requestId}] Slack API error:`, data.error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: data.error || 'Failed to send ephemeral message' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Ephemeral message sent successfully`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
messageTs: data.message_ts,
|
||||
channel: validatedData.channel,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error sending ephemeral message:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -17,6 +17,7 @@ const SlackSendMessageSchema = z
|
||||
userId: z.string().optional().nullable(),
|
||||
text: z.string().min(1, 'Message text is required'),
|
||||
thread_ts: z.string().optional().nullable(),
|
||||
blocks: z.array(z.record(z.unknown())).optional().nullable(),
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
})
|
||||
.refine((data) => data.channel || data.userId, {
|
||||
@@ -63,6 +64,7 @@ export async function POST(request: NextRequest) {
|
||||
userId: validatedData.userId ?? undefined,
|
||||
text: validatedData.text,
|
||||
threadTs: validatedData.thread_ts ?? undefined,
|
||||
blocks: validatedData.blocks ?? undefined,
|
||||
files: validatedData.files ?? undefined,
|
||||
},
|
||||
requestId,
|
||||
|
||||
@@ -13,6 +13,7 @@ const SlackUpdateMessageSchema = z.object({
|
||||
channel: z.string().min(1, 'Channel is required'),
|
||||
timestamp: z.string().min(1, 'Message timestamp is required'),
|
||||
text: z.string().min(1, 'Message text is required'),
|
||||
blocks: z.array(z.record(z.unknown())).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
@@ -57,6 +58,8 @@ export async function POST(request: NextRequest) {
|
||||
channel: validatedData.channel,
|
||||
ts: validatedData.timestamp,
|
||||
text: validatedData.text,
|
||||
...(validatedData.blocks &&
|
||||
validatedData.blocks.length > 0 && { blocks: validatedData.blocks }),
|
||||
}),
|
||||
})
|
||||
|
||||
|
||||
@@ -11,7 +11,8 @@ export async function postSlackMessage(
|
||||
accessToken: string,
|
||||
channel: string,
|
||||
text: string,
|
||||
threadTs?: string | null
|
||||
threadTs?: string | null,
|
||||
blocks?: unknown[] | null
|
||||
): Promise<{ ok: boolean; ts?: string; channel?: string; message?: any; error?: string }> {
|
||||
const response = await fetch('https://slack.com/api/chat.postMessage', {
|
||||
method: 'POST',
|
||||
@@ -23,6 +24,7 @@ export async function postSlackMessage(
|
||||
channel,
|
||||
text,
|
||||
...(threadTs && { thread_ts: threadTs }),
|
||||
...(blocks && blocks.length > 0 && { blocks }),
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -220,6 +222,7 @@ export interface SlackMessageParams {
|
||||
userId?: string
|
||||
text: string
|
||||
threadTs?: string | null
|
||||
blocks?: unknown[] | null
|
||||
files?: any[] | null
|
||||
}
|
||||
|
||||
@@ -242,7 +245,7 @@ export async function sendSlackMessage(
|
||||
}
|
||||
error?: string
|
||||
}> {
|
||||
const { accessToken, text, threadTs, files } = params
|
||||
const { accessToken, text, threadTs, blocks, files } = params
|
||||
let { channel } = params
|
||||
|
||||
if (!channel && params.userId) {
|
||||
@@ -258,7 +261,7 @@ export async function sendSlackMessage(
|
||||
if (!files || files.length === 0) {
|
||||
logger.info(`[${requestId}] No files, using chat.postMessage`)
|
||||
|
||||
const data = await postSlackMessage(accessToken, channel, text, threadTs)
|
||||
const data = await postSlackMessage(accessToken, channel, text, threadTs, blocks)
|
||||
|
||||
if (!data.ok) {
|
||||
logger.error(`[${requestId}] Slack API error:`, data.error)
|
||||
@@ -282,7 +285,7 @@ export async function sendSlackMessage(
|
||||
if (fileIds.length === 0) {
|
||||
logger.warn(`[${requestId}] No valid files to upload, sending text-only message`)
|
||||
|
||||
const data = await postSlackMessage(accessToken, channel, text, threadTs)
|
||||
const data = await postSlackMessage(accessToken, channel, text, threadTs, blocks)
|
||||
|
||||
if (!data.ok) {
|
||||
return { success: false, error: data.error || 'Failed to send message' }
|
||||
|
||||
@@ -165,7 +165,7 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
const modelName =
|
||||
provider === 'anthropic' ? 'anthropic/claude-3-7-sonnet-latest' : 'openai/gpt-4.1'
|
||||
provider === 'anthropic' ? 'anthropic/claude-sonnet-4-5-20250929' : 'openai/gpt-5'
|
||||
|
||||
try {
|
||||
logger.info('Initializing Stagehand with Browserbase (v3)', { provider, modelName })
|
||||
|
||||
@@ -101,7 +101,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
try {
|
||||
const modelName =
|
||||
provider === 'anthropic' ? 'anthropic/claude-3-7-sonnet-latest' : 'openai/gpt-4.1'
|
||||
provider === 'anthropic' ? 'anthropic/claude-sonnet-4-5-20250929' : 'openai/gpt-5'
|
||||
|
||||
logger.info('Initializing Stagehand with Browserbase (v3)', { provider, modelName })
|
||||
|
||||
|
||||
@@ -208,9 +208,10 @@ export default function Logs() {
|
||||
|
||||
const selectedLog = useMemo(() => {
|
||||
if (!selectedLogFromList) return null
|
||||
if (!activeLogQuery.data || isPreviewOpen) return selectedLogFromList
|
||||
if (!activeLogQuery.data || isPreviewOpen || activeLogQuery.isPlaceholderData)
|
||||
return selectedLogFromList
|
||||
return { ...selectedLogFromList, ...activeLogQuery.data }
|
||||
}, [selectedLogFromList, activeLogQuery.data, isPreviewOpen])
|
||||
}, [selectedLogFromList, activeLogQuery.data, activeLogQuery.isPlaceholderData, isPreviewOpen])
|
||||
|
||||
const handleLogHover = useCallback(
|
||||
(log: WorkflowLog) => {
|
||||
@@ -650,7 +651,7 @@ export default function Logs() {
|
||||
hasActiveFilters={filtersActive}
|
||||
/>
|
||||
|
||||
{isPreviewOpen && activeLogQuery.data?.executionId && (
|
||||
{isPreviewOpen && !activeLogQuery.isPlaceholderData && activeLogQuery.data?.executionId && (
|
||||
<ExecutionSnapshot
|
||||
executionId={activeLogQuery.data.executionId}
|
||||
traceSpans={activeLogQuery.data.executionData?.traceSpans}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useMemo } from 'react'
|
||||
import { hasWorkflowChanged } from '@/lib/workflows/comparison'
|
||||
import { mergeSubblockStateWithValues } from '@/lib/workflows/subblocks'
|
||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -42,44 +43,10 @@ export function useChangeDetection({
|
||||
const currentState = useMemo((): WorkflowState | null => {
|
||||
if (!workflowId) return null
|
||||
|
||||
const blocksWithSubBlocks: WorkflowState['blocks'] = {}
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
const blockSubValues = subBlockValues?.[blockId] || {}
|
||||
const subBlocks: Record<string, any> = {}
|
||||
|
||||
if (block.subBlocks) {
|
||||
for (const [subId, subBlock] of Object.entries(block.subBlocks)) {
|
||||
const storedValue = blockSubValues[subId]
|
||||
subBlocks[subId] = {
|
||||
...subBlock,
|
||||
value: storedValue !== undefined ? storedValue : subBlock.value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (block.triggerMode) {
|
||||
const triggerConfigValue = blockSubValues?.triggerConfig
|
||||
if (
|
||||
triggerConfigValue &&
|
||||
typeof triggerConfigValue === 'object' &&
|
||||
!subBlocks.triggerConfig
|
||||
) {
|
||||
subBlocks.triggerConfig = {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: triggerConfigValue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
blocksWithSubBlocks[blockId] = {
|
||||
...block,
|
||||
subBlocks,
|
||||
}
|
||||
}
|
||||
const mergedBlocks = mergeSubblockStateWithValues(blocks, subBlockValues ?? {})
|
||||
|
||||
return {
|
||||
blocks: blocksWithSubBlocks,
|
||||
blocks: mergedBlocks,
|
||||
edges,
|
||||
loops,
|
||||
parallels,
|
||||
|
||||
@@ -33,6 +33,7 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Browser Use LLM', id: 'browser-use-llm' },
|
||||
{ label: 'Browser Use 2.0', id: 'browser-use-2.0' },
|
||||
{ label: 'GPT-4o', id: 'gpt-4o' },
|
||||
{ label: 'GPT-4o Mini', id: 'gpt-4o-mini' },
|
||||
{ label: 'GPT-4.1', id: 'gpt-4.1' },
|
||||
@@ -42,6 +43,7 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
|
||||
{ label: 'Gemini 2.5 Flash', id: 'gemini-2.5-flash' },
|
||||
{ label: 'Gemini 2.5 Pro', id: 'gemini-2.5-pro' },
|
||||
{ label: 'Gemini 3 Pro Preview', id: 'gemini-3-pro-preview' },
|
||||
{ label: 'Gemini 3 Flash Preview', id: 'gemini-3-flash-preview' },
|
||||
{ label: 'Gemini Flash Latest', id: 'gemini-flash-latest' },
|
||||
{ label: 'Gemini Flash Lite Latest', id: 'gemini-flash-lite-latest' },
|
||||
{ label: 'Claude 3.7 Sonnet', id: 'claude-3-7-sonnet-20250219' },
|
||||
|
||||
446
apps/sim/blocks/blocks/hex.ts
Normal file
446
apps/sim/blocks/blocks/hex.ts
Normal file
@@ -0,0 +1,446 @@
|
||||
import { HexIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type { HexResponse } from '@/tools/hex/types'
|
||||
|
||||
export const HexBlock: BlockConfig<HexResponse> = {
|
||||
type: 'hex',
|
||||
name: 'Hex',
|
||||
description: 'Run and manage Hex projects',
|
||||
longDescription:
|
||||
'Integrate Hex into your workflow. Run projects, check run status, manage collections and groups, list users, and view data connections. Requires a Hex API token.',
|
||||
docsLink: 'https://docs.sim.ai/tools/hex',
|
||||
category: 'tools',
|
||||
bgColor: '#F5E6FF',
|
||||
icon: HexIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Run Project', id: 'run_project' },
|
||||
{ label: 'Get Run Status', id: 'get_run_status' },
|
||||
{ label: 'Get Project Runs', id: 'get_project_runs' },
|
||||
{ label: 'Cancel Run', id: 'cancel_run' },
|
||||
{ label: 'List Projects', id: 'list_projects' },
|
||||
{ label: 'Get Project', id: 'get_project' },
|
||||
{ label: 'Update Project', id: 'update_project' },
|
||||
{ label: 'Get Queried Tables', id: 'get_queried_tables' },
|
||||
{ label: 'List Users', id: 'list_users' },
|
||||
{ label: 'List Groups', id: 'list_groups' },
|
||||
{ label: 'Get Group', id: 'get_group' },
|
||||
{ label: 'List Collections', id: 'list_collections' },
|
||||
{ label: 'Get Collection', id: 'get_collection' },
|
||||
{ label: 'Create Collection', id: 'create_collection' },
|
||||
{ label: 'List Data Connections', id: 'list_data_connections' },
|
||||
{ label: 'Get Data Connection', id: 'get_data_connection' },
|
||||
],
|
||||
value: () => 'run_project',
|
||||
},
|
||||
{
|
||||
id: 'projectId',
|
||||
title: 'Project ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter project UUID',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'run_project',
|
||||
'get_run_status',
|
||||
'get_project_runs',
|
||||
'cancel_run',
|
||||
'get_project',
|
||||
'update_project',
|
||||
'get_queried_tables',
|
||||
],
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'run_project',
|
||||
'get_run_status',
|
||||
'get_project_runs',
|
||||
'cancel_run',
|
||||
'get_project',
|
||||
'update_project',
|
||||
'get_queried_tables',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'runId',
|
||||
title: 'Run ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter run UUID',
|
||||
condition: { field: 'operation', value: ['get_run_status', 'cancel_run'] },
|
||||
required: { field: 'operation', value: ['get_run_status', 'cancel_run'] },
|
||||
},
|
||||
{
|
||||
id: 'inputParams',
|
||||
title: 'Input Parameters',
|
||||
type: 'code',
|
||||
placeholder: '{"param_name": "value"}',
|
||||
condition: { field: 'operation', value: 'run_project' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
prompt: `You are an expert at creating Hex project input parameters.
|
||||
Generate ONLY the raw JSON object based on the user's request.
|
||||
The output MUST be a single, valid JSON object, starting with { and ending with }.
|
||||
|
||||
Current parameters: {context}
|
||||
|
||||
Do not include any explanations, markdown formatting, or other text outside the JSON object.
|
||||
The keys should match the input parameter names defined in the Hex project.
|
||||
|
||||
Example:
|
||||
{
|
||||
"date_range": "2024-01-01",
|
||||
"department": "engineering",
|
||||
"include_inactive": false
|
||||
}`,
|
||||
placeholder: 'Describe the input parameters you need...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'projectStatus',
|
||||
title: 'Status',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter status name (e.g., custom workspace status label)',
|
||||
condition: { field: 'operation', value: 'update_project' },
|
||||
required: { field: 'operation', value: 'update_project' },
|
||||
},
|
||||
{
|
||||
id: 'runStatusFilter',
|
||||
title: 'Status Filter',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Pending', id: 'PENDING' },
|
||||
{ label: 'Running', id: 'RUNNING' },
|
||||
{ label: 'Completed', id: 'COMPLETED' },
|
||||
{ label: 'Errored', id: 'ERRORED' },
|
||||
{ label: 'Killed', id: 'KILLED' },
|
||||
],
|
||||
value: () => '',
|
||||
condition: { field: 'operation', value: 'get_project_runs' },
|
||||
},
|
||||
{
|
||||
id: 'groupIdInput',
|
||||
title: 'Group ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter group UUID',
|
||||
condition: { field: 'operation', value: 'get_group' },
|
||||
required: { field: 'operation', value: 'get_group' },
|
||||
},
|
||||
{
|
||||
id: 'collectionId',
|
||||
title: 'Collection ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter collection UUID',
|
||||
condition: { field: 'operation', value: 'get_collection' },
|
||||
required: { field: 'operation', value: 'get_collection' },
|
||||
},
|
||||
{
|
||||
id: 'collectionName',
|
||||
title: 'Collection Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter collection name',
|
||||
condition: { field: 'operation', value: 'create_collection' },
|
||||
required: { field: 'operation', value: 'create_collection' },
|
||||
},
|
||||
{
|
||||
id: 'collectionDescription',
|
||||
title: 'Description',
|
||||
type: 'long-input',
|
||||
placeholder: 'Optional description for the collection',
|
||||
condition: { field: 'operation', value: 'create_collection' },
|
||||
},
|
||||
{
|
||||
id: 'dataConnectionId',
|
||||
title: 'Data Connection ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter data connection UUID',
|
||||
condition: { field: 'operation', value: 'get_data_connection' },
|
||||
required: { field: 'operation', value: 'get_data_connection' },
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your Hex API token',
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
// Advanced fields
|
||||
{
|
||||
id: 'dryRun',
|
||||
title: 'Dry Run',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'run_project' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'updateCache',
|
||||
title: 'Update Cache',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'run_project' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'updatePublishedResults',
|
||||
title: 'Update Published Results',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'run_project' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'useCachedSqlResults',
|
||||
title: 'Use Cached SQL Results',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'run_project' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'limit',
|
||||
title: 'Limit',
|
||||
type: 'short-input',
|
||||
placeholder: '25',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'list_projects',
|
||||
'get_project_runs',
|
||||
'get_queried_tables',
|
||||
'list_users',
|
||||
'list_groups',
|
||||
'list_collections',
|
||||
'list_data_connections',
|
||||
],
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'offset',
|
||||
title: 'Offset',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'get_project_runs' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'includeArchived',
|
||||
title: 'Include Archived',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'list_projects' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'statusFilter',
|
||||
title: 'Status Filter',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Published', id: 'PUBLISHED' },
|
||||
{ label: 'Draft', id: 'DRAFT' },
|
||||
],
|
||||
value: () => '',
|
||||
condition: { field: 'operation', value: 'list_projects' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'groupId',
|
||||
title: 'Filter by Group',
|
||||
type: 'short-input',
|
||||
placeholder: 'Group UUID (optional)',
|
||||
condition: { field: 'operation', value: 'list_users' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
],
|
||||
|
||||
tools: {
|
||||
access: [
|
||||
'hex_cancel_run',
|
||||
'hex_create_collection',
|
||||
'hex_get_collection',
|
||||
'hex_get_data_connection',
|
||||
'hex_get_group',
|
||||
'hex_get_project',
|
||||
'hex_get_project_runs',
|
||||
'hex_get_queried_tables',
|
||||
'hex_get_run_status',
|
||||
'hex_list_collections',
|
||||
'hex_list_data_connections',
|
||||
'hex_list_groups',
|
||||
'hex_list_projects',
|
||||
'hex_list_users',
|
||||
'hex_run_project',
|
||||
'hex_update_project',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'run_project':
|
||||
return 'hex_run_project'
|
||||
case 'get_run_status':
|
||||
return 'hex_get_run_status'
|
||||
case 'get_project_runs':
|
||||
return 'hex_get_project_runs'
|
||||
case 'cancel_run':
|
||||
return 'hex_cancel_run'
|
||||
case 'list_projects':
|
||||
return 'hex_list_projects'
|
||||
case 'get_project':
|
||||
return 'hex_get_project'
|
||||
case 'update_project':
|
||||
return 'hex_update_project'
|
||||
case 'get_queried_tables':
|
||||
return 'hex_get_queried_tables'
|
||||
case 'list_users':
|
||||
return 'hex_list_users'
|
||||
case 'list_groups':
|
||||
return 'hex_list_groups'
|
||||
case 'get_group':
|
||||
return 'hex_get_group'
|
||||
case 'list_collections':
|
||||
return 'hex_list_collections'
|
||||
case 'get_collection':
|
||||
return 'hex_get_collection'
|
||||
case 'create_collection':
|
||||
return 'hex_create_collection'
|
||||
case 'list_data_connections':
|
||||
return 'hex_list_data_connections'
|
||||
case 'get_data_connection':
|
||||
return 'hex_get_data_connection'
|
||||
default:
|
||||
return 'hex_run_project'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
const op = params.operation
|
||||
|
||||
if (params.limit) result.limit = Number(params.limit)
|
||||
if (op === 'get_project_runs' && params.offset) result.offset = Number(params.offset)
|
||||
if (op === 'update_project' && params.projectStatus) result.status = params.projectStatus
|
||||
if (op === 'get_project_runs' && params.runStatusFilter)
|
||||
result.statusFilter = params.runStatusFilter
|
||||
if (op === 'get_group' && params.groupIdInput) result.groupId = params.groupIdInput
|
||||
if (op === 'list_users' && params.groupId) result.groupId = params.groupId
|
||||
if (op === 'create_collection' && params.collectionName) result.name = params.collectionName
|
||||
if (op === 'create_collection' && params.collectionDescription)
|
||||
result.description = params.collectionDescription
|
||||
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
apiKey: { type: 'string', description: 'Hex API token' },
|
||||
projectId: { type: 'string', description: 'Project UUID' },
|
||||
runId: { type: 'string', description: 'Run UUID' },
|
||||
inputParams: { type: 'json', description: 'Input parameters for project run' },
|
||||
dryRun: { type: 'boolean', description: 'Perform a dry run without executing the project' },
|
||||
updateCache: {
|
||||
type: 'boolean',
|
||||
description: '(Deprecated) Update cached results after execution',
|
||||
},
|
||||
updatePublishedResults: {
|
||||
type: 'boolean',
|
||||
description: 'Update published app results after execution',
|
||||
},
|
||||
useCachedSqlResults: {
|
||||
type: 'boolean',
|
||||
description: 'Use cached SQL results instead of re-running queries',
|
||||
},
|
||||
projectStatus: {
|
||||
type: 'string',
|
||||
description: 'New project status name (custom workspace status label)',
|
||||
},
|
||||
limit: { type: 'number', description: 'Max number of results to return' },
|
||||
offset: { type: 'number', description: 'Offset for paginated results' },
|
||||
includeArchived: { type: 'boolean', description: 'Include archived projects' },
|
||||
statusFilter: { type: 'string', description: 'Filter projects by status' },
|
||||
runStatusFilter: { type: 'string', description: 'Filter runs by status' },
|
||||
groupId: { type: 'string', description: 'Filter users by group UUID' },
|
||||
groupIdInput: { type: 'string', description: 'Group UUID for get group' },
|
||||
collectionId: { type: 'string', description: 'Collection UUID' },
|
||||
collectionName: { type: 'string', description: 'Collection name' },
|
||||
collectionDescription: { type: 'string', description: 'Collection description' },
|
||||
dataConnectionId: { type: 'string', description: 'Data connection UUID' },
|
||||
},
|
||||
|
||||
outputs: {
|
||||
// Run creation outputs
|
||||
projectId: { type: 'string', description: 'Project UUID' },
|
||||
runId: { type: 'string', description: 'Run UUID' },
|
||||
runUrl: { type: 'string', description: 'URL to view the run' },
|
||||
runStatusUrl: { type: 'string', description: 'URL to check run status' },
|
||||
projectVersion: { type: 'number', description: 'Project version number' },
|
||||
// Run status outputs
|
||||
status: {
|
||||
type: 'json',
|
||||
description: 'Project status object ({ name }) or run status string',
|
||||
},
|
||||
startTime: { type: 'string', description: 'Run start time' },
|
||||
endTime: { type: 'string', description: 'Run end time' },
|
||||
elapsedTime: { type: 'number', description: 'Elapsed time in seconds' },
|
||||
traceId: { type: 'string', description: 'Trace ID for debugging' },
|
||||
// Project outputs
|
||||
id: { type: 'string', description: 'Resource ID' },
|
||||
title: { type: 'string', description: 'Project title' },
|
||||
name: { type: 'string', description: 'Resource name' },
|
||||
description: { type: 'string', description: 'Resource description' },
|
||||
type: { type: 'string', description: 'Project type (PROJECT or COMPONENT)' },
|
||||
createdAt: { type: 'string', description: 'Creation timestamp' },
|
||||
updatedAt: { type: 'string', description: 'Last update timestamp' },
|
||||
lastEditedAt: { type: 'string', description: 'Last edited timestamp' },
|
||||
lastPublishedAt: { type: 'string', description: 'Last published timestamp' },
|
||||
archivedAt: { type: 'string', description: 'Archived timestamp' },
|
||||
trashedAt: { type: 'string', description: 'Trashed timestamp' },
|
||||
// List outputs
|
||||
projects: {
|
||||
type: 'json',
|
||||
description: 'List of projects with id, title, status, type, creator, owner, createdAt',
|
||||
},
|
||||
runs: {
|
||||
type: 'json',
|
||||
description:
|
||||
'List of runs with runId, status, runUrl, startTime, endTime, elapsedTime, projectVersion',
|
||||
},
|
||||
users: { type: 'json', description: 'List of users with id, name, email, role' },
|
||||
groups: { type: 'json', description: 'List of groups with id, name, createdAt' },
|
||||
collections: {
|
||||
type: 'json',
|
||||
description: 'List of collections with id, name, description, creator',
|
||||
},
|
||||
connections: {
|
||||
type: 'json',
|
||||
description:
|
||||
'List of data connections with id, name, type, description, connectViaSsh, includeMagic, allowWritebackCells',
|
||||
},
|
||||
tables: {
|
||||
type: 'json',
|
||||
description: 'List of queried tables with dataConnectionId, dataConnectionName, tableName',
|
||||
},
|
||||
categories: {
|
||||
type: 'json',
|
||||
description: 'Project categories with name and description',
|
||||
},
|
||||
creator: { type: 'json', description: 'Creator details ({ email, id })' },
|
||||
owner: { type: 'json', description: 'Owner details ({ email })' },
|
||||
total: { type: 'number', description: 'Total results returned' },
|
||||
// Cancel output
|
||||
success: { type: 'boolean', description: 'Whether the operation succeeded' },
|
||||
// Data connection flags
|
||||
connectViaSsh: { type: 'boolean', description: 'SSH tunneling enabled' },
|
||||
includeMagic: { type: 'boolean', description: 'Magic AI features enabled' },
|
||||
allowWritebackCells: { type: 'boolean', description: 'Writeback cells allowed' },
|
||||
},
|
||||
}
|
||||
@@ -9,10 +9,10 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
type: 'slack',
|
||||
name: 'Slack',
|
||||
description:
|
||||
'Send, update, delete messages, add reactions in Slack or trigger workflows from Slack events',
|
||||
'Send, update, delete messages, send ephemeral messages, add reactions in Slack or trigger workflows from Slack events',
|
||||
authMode: AuthMode.OAuth,
|
||||
longDescription:
|
||||
'Integrate Slack into the workflow. Can send, update, and delete messages, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.',
|
||||
'Integrate Slack into the workflow. Can send, update, and delete messages, send ephemeral messages visible only to a specific user, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.',
|
||||
docsLink: 'https://docs.sim.ai/tools/slack',
|
||||
category: 'tools',
|
||||
bgColor: '#611f69',
|
||||
@@ -25,6 +25,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Send Message', id: 'send' },
|
||||
{ label: 'Send Ephemeral Message', id: 'ephemeral' },
|
||||
{ label: 'Create Canvas', id: 'canvas' },
|
||||
{ label: 'Read Messages', id: 'read' },
|
||||
{ label: 'Get Message', id: 'get_message' },
|
||||
@@ -116,15 +117,21 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
placeholder: 'Select Slack channel',
|
||||
mode: 'basic',
|
||||
dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] },
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users', 'get_user'],
|
||||
not: true,
|
||||
and: {
|
||||
field: 'destinationType',
|
||||
value: 'dm',
|
||||
condition: (values?: Record<string, unknown>) => {
|
||||
const op = values?.operation as string
|
||||
if (op === 'ephemeral') {
|
||||
return { field: 'operation', value: 'ephemeral' }
|
||||
}
|
||||
return {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users', 'get_user'],
|
||||
not: true,
|
||||
},
|
||||
and: {
|
||||
field: 'destinationType',
|
||||
value: 'dm',
|
||||
not: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
@@ -135,15 +142,21 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
canonicalParamId: 'channel',
|
||||
placeholder: 'Enter Slack channel ID (e.g., C1234567890)',
|
||||
mode: 'advanced',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users', 'get_user'],
|
||||
not: true,
|
||||
and: {
|
||||
field: 'destinationType',
|
||||
value: 'dm',
|
||||
condition: (values?: Record<string, unknown>) => {
|
||||
const op = values?.operation as string
|
||||
if (op === 'ephemeral') {
|
||||
return { field: 'operation', value: 'ephemeral' }
|
||||
}
|
||||
return {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users', 'get_user'],
|
||||
not: true,
|
||||
},
|
||||
and: {
|
||||
field: 'destinationType',
|
||||
value: 'dm',
|
||||
not: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
@@ -175,6 +188,31 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'ephemeralUser',
|
||||
title: 'Target User',
|
||||
type: 'short-input',
|
||||
placeholder: 'User ID who will see the message (e.g., U1234567890)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'ephemeral',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'messageFormat',
|
||||
title: 'Message Format',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Plain Text', id: 'text' },
|
||||
{ label: 'Block Kit', id: 'blocks' },
|
||||
],
|
||||
value: () => 'text',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['send', 'ephemeral', 'update'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'text',
|
||||
title: 'Message',
|
||||
@@ -182,9 +220,77 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
placeholder: 'Enter your message (supports Slack mrkdwn)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'send',
|
||||
value: ['send', 'ephemeral'],
|
||||
and: { field: 'messageFormat', value: 'blocks', not: true },
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: ['send', 'ephemeral'],
|
||||
and: { field: 'messageFormat', value: 'blocks', not: true },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'blocks',
|
||||
title: 'Block Kit Blocks',
|
||||
type: 'code',
|
||||
language: 'json',
|
||||
placeholder: 'JSON array of Block Kit blocks',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['send', 'ephemeral', 'update'],
|
||||
and: { field: 'messageFormat', value: 'blocks' },
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: ['send', 'ephemeral', 'update'],
|
||||
and: { field: 'messageFormat', value: 'blocks' },
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
prompt: `You are an expert at Slack Block Kit.
|
||||
Generate ONLY a valid JSON array of Block Kit blocks based on the user's request.
|
||||
The output MUST be a JSON array starting with [ and ending with ].
|
||||
|
||||
Current blocks: {context}
|
||||
|
||||
Available block types for messages:
|
||||
- "section": Displays text with an optional accessory element. Text uses { "type": "mrkdwn", "text": "..." } or { "type": "plain_text", "text": "..." }.
|
||||
- "header": Large text header. Text must be plain_text.
|
||||
- "divider": A horizontal rule separator. No fields needed besides type.
|
||||
- "image": Displays an image. Requires "image_url" and "alt_text".
|
||||
- "context": Contextual info with an "elements" array of image and text objects.
|
||||
- "actions": Interactive elements like buttons. Each button needs "type": "button", a "text" object, and an "action_id".
|
||||
- "rich_text": Structured rich text with "elements" array of rich_text_section objects.
|
||||
|
||||
Example output:
|
||||
[
|
||||
{
|
||||
"type": "header",
|
||||
"text": { "type": "plain_text", "text": "Order Confirmation" }
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": { "type": "mrkdwn", "text": "Your order *#1234* has been confirmed." }
|
||||
},
|
||||
{ "type": "divider" },
|
||||
{
|
||||
"type": "actions",
|
||||
"elements": [
|
||||
{
|
||||
"type": "button",
|
||||
"text": { "type": "plain_text", "text": "View Order" },
|
||||
"action_id": "view_order",
|
||||
"url": "https://example.com/orders/1234"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
You can reference workflow variables using angle brackets, e.g., <blockName.output>.
|
||||
Do not include any explanations, markdown formatting, or other text outside the JSON array.`,
|
||||
placeholder: 'Describe the Block Kit layout you want to create...',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'threadTs',
|
||||
@@ -193,7 +299,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
placeholder: 'Reply to thread (e.g., 1405894322.002768)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'send',
|
||||
value: ['send', 'ephemeral'],
|
||||
},
|
||||
required: false,
|
||||
},
|
||||
@@ -456,8 +562,13 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'update',
|
||||
and: { field: 'messageFormat', value: 'blocks', not: true },
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: 'update',
|
||||
and: { field: 'messageFormat', value: 'blocks', not: true },
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// Delete Message specific fields
|
||||
{
|
||||
@@ -499,6 +610,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
tools: {
|
||||
access: [
|
||||
'slack_message',
|
||||
'slack_ephemeral_message',
|
||||
'slack_canvas',
|
||||
'slack_message_reader',
|
||||
'slack_get_message',
|
||||
@@ -517,6 +629,8 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
switch (params.operation) {
|
||||
case 'send':
|
||||
return 'slack_message'
|
||||
case 'ephemeral':
|
||||
return 'slack_ephemeral_message'
|
||||
case 'canvas':
|
||||
return 'slack_canvas'
|
||||
case 'read':
|
||||
@@ -554,13 +668,16 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
destinationType,
|
||||
channel,
|
||||
dmUserId,
|
||||
messageFormat,
|
||||
text,
|
||||
title,
|
||||
content,
|
||||
limit,
|
||||
oldest,
|
||||
files,
|
||||
blocks,
|
||||
threadTs,
|
||||
ephemeralUser,
|
||||
updateTimestamp,
|
||||
updateText,
|
||||
deleteTimestamp,
|
||||
@@ -602,10 +719,13 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
|
||||
switch (operation) {
|
||||
case 'send': {
|
||||
baseParams.text = text
|
||||
baseParams.text = messageFormat === 'blocks' && !text ? ' ' : text
|
||||
if (threadTs) {
|
||||
baseParams.threadTs = threadTs
|
||||
}
|
||||
if (blocks) {
|
||||
baseParams.blocks = blocks
|
||||
}
|
||||
// files is the canonical param from attachmentFiles (basic) or files (advanced)
|
||||
const normalizedFiles = normalizeFileInput(files)
|
||||
if (normalizedFiles) {
|
||||
@@ -614,6 +734,18 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
break
|
||||
}
|
||||
|
||||
case 'ephemeral': {
|
||||
baseParams.text = messageFormat === 'blocks' && !text ? ' ' : text
|
||||
baseParams.user = ephemeralUser ? String(ephemeralUser).trim() : ''
|
||||
if (threadTs) {
|
||||
baseParams.threadTs = threadTs
|
||||
}
|
||||
if (blocks) {
|
||||
baseParams.blocks = blocks
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'canvas':
|
||||
baseParams.title = title
|
||||
baseParams.content = content
|
||||
@@ -680,7 +812,10 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
|
||||
case 'update':
|
||||
baseParams.timestamp = updateTimestamp
|
||||
baseParams.text = updateText
|
||||
baseParams.text = messageFormat === 'blocks' && !updateText ? ' ' : updateText
|
||||
if (blocks) {
|
||||
baseParams.blocks = blocks
|
||||
}
|
||||
break
|
||||
|
||||
case 'delete':
|
||||
@@ -699,6 +834,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
messageFormat: { type: 'string', description: 'Message format: text or blocks' },
|
||||
authMethod: { type: 'string', description: 'Authentication method' },
|
||||
destinationType: { type: 'string', description: 'Destination type (channel or dm)' },
|
||||
credential: { type: 'string', description: 'Slack access token' },
|
||||
@@ -731,6 +867,9 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
// List Users inputs
|
||||
includeDeleted: { type: 'string', description: 'Include deactivated users (true/false)' },
|
||||
userLimit: { type: 'string', description: 'Maximum number of users to return' },
|
||||
// Ephemeral message inputs
|
||||
ephemeralUser: { type: 'string', description: 'User ID who will see the ephemeral message' },
|
||||
blocks: { type: 'json', description: 'Block Kit layout blocks as a JSON array' },
|
||||
// Get User inputs
|
||||
userId: { type: 'string', description: 'User ID to look up' },
|
||||
// Get Message inputs
|
||||
@@ -758,6 +897,12 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
},
|
||||
files: { type: 'file[]', description: 'Files attached to the message' },
|
||||
|
||||
// slack_ephemeral_message outputs (ephemeral operation)
|
||||
messageTs: {
|
||||
type: 'string',
|
||||
description: 'Timestamp of the ephemeral message (cannot be used to update or delete)',
|
||||
},
|
||||
|
||||
// slack_canvas outputs
|
||||
canvas_id: { type: 'string', description: 'Canvas identifier for created canvases' },
|
||||
title: { type: 'string', description: 'Canvas title' },
|
||||
|
||||
@@ -55,6 +55,7 @@ import { GrafanaBlock } from '@/blocks/blocks/grafana'
|
||||
import { GrainBlock } from '@/blocks/blocks/grain'
|
||||
import { GreptileBlock } from '@/blocks/blocks/greptile'
|
||||
import { GuardrailsBlock } from '@/blocks/blocks/guardrails'
|
||||
import { HexBlock } from '@/blocks/blocks/hex'
|
||||
import { HubSpotBlock } from '@/blocks/blocks/hubspot'
|
||||
import { HuggingFaceBlock } from '@/blocks/blocks/huggingface'
|
||||
import { HumanInTheLoopBlock } from '@/blocks/blocks/human_in_the_loop'
|
||||
@@ -240,6 +241,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
grain: GrainBlock,
|
||||
greptile: GreptileBlock,
|
||||
guardrails: GuardrailsBlock,
|
||||
hex: HexBlock,
|
||||
hubspot: HubSpotBlock,
|
||||
huggingface: HuggingFaceBlock,
|
||||
human_in_the_loop: HumanInTheLoopBlock,
|
||||
|
||||
@@ -5819,3 +5819,15 @@ export function RedisIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function HexIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1450.3 600'>
|
||||
<path
|
||||
fill='#5F509D'
|
||||
fillRule='evenodd'
|
||||
d='m250.11,0v199.49h-50V0H0v600h200.11v-300.69h50v300.69h200.18V0h-200.18Zm249.9,0v600h450.29v-250.23h-200.2v149h-50v-199.46h250.2V0h-450.29Zm200.09,199.49v-99.49h50v99.49h-50Zm550.02,0V0h200.18v150l-100,100.09,100,100.09v249.82h-200.18v-300.69h-50v300.69h-200.11v-249.82l100.11-100.09-100.11-100.09V0h200.11v199.49h50Z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ import {
|
||||
type QueryClient,
|
||||
useInfiniteQuery,
|
||||
useQuery,
|
||||
useQueryClient,
|
||||
} from '@tanstack/react-query'
|
||||
import { getEndDateFromTimeRange, getStartDateFromTimeRange } from '@/lib/logs/filters'
|
||||
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
|
||||
@@ -159,27 +158,13 @@ interface UseLogDetailOptions {
|
||||
}
|
||||
|
||||
export function useLogDetail(logId: string | undefined, options?: UseLogDetailOptions) {
|
||||
const queryClient = useQueryClient()
|
||||
return useQuery({
|
||||
queryKey: logKeys.detail(logId),
|
||||
queryFn: () => fetchLogDetail(logId as string),
|
||||
enabled: Boolean(logId) && (options?.enabled ?? true),
|
||||
refetchInterval: options?.refetchInterval ?? false,
|
||||
staleTime: 30 * 1000,
|
||||
initialData: () => {
|
||||
if (!logId) return undefined
|
||||
const listQueries = queryClient.getQueriesData<{
|
||||
pages: { logs: WorkflowLog[] }[]
|
||||
}>({
|
||||
queryKey: logKeys.lists(),
|
||||
})
|
||||
for (const [, data] of listQueries) {
|
||||
const match = data?.pages?.flatMap((p) => p.logs).find((l) => l.id === logId)
|
||||
if (match) return match
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
initialDataUpdatedAt: 0,
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,8 @@ import {
|
||||
customSession,
|
||||
emailOTP,
|
||||
genericOAuth,
|
||||
jwt,
|
||||
oidcProvider,
|
||||
oneTimeToken,
|
||||
organization,
|
||||
} from 'better-auth/plugins'
|
||||
@@ -23,6 +25,12 @@ import {
|
||||
renderPasswordResetEmail,
|
||||
renderWelcomeEmail,
|
||||
} from '@/components/emails'
|
||||
import {
|
||||
evictCachedMetadata,
|
||||
isMetadataUrl,
|
||||
resolveClientMetadata,
|
||||
upsertCimdClient,
|
||||
} from '@/lib/auth/cimd'
|
||||
import { sendPlanWelcomeEmail } from '@/lib/billing'
|
||||
import { authorizeSubscriptionReference } from '@/lib/billing/authorization'
|
||||
import { handleNewUser } from '@/lib/billing/core/usage'
|
||||
@@ -80,6 +88,8 @@ export const auth = betterAuth({
|
||||
trustedOrigins: [
|
||||
getBaseUrl(),
|
||||
...(env.NEXT_PUBLIC_SOCKET_URL ? [env.NEXT_PUBLIC_SOCKET_URL] : []),
|
||||
'https://claude.ai',
|
||||
'https://claude.com',
|
||||
].filter(Boolean),
|
||||
database: drizzleAdapter(db, {
|
||||
provider: 'pg',
|
||||
@@ -537,11 +547,51 @@ export const auth = betterAuth({
|
||||
}
|
||||
}
|
||||
|
||||
if (ctx.path === '/oauth2/authorize' || ctx.path === '/oauth2/token') {
|
||||
const clientId = (ctx.query?.client_id ?? ctx.body?.client_id) as string | undefined
|
||||
if (clientId && isMetadataUrl(clientId)) {
|
||||
try {
|
||||
const { metadata, fromCache } = await resolveClientMetadata(clientId)
|
||||
if (!fromCache) {
|
||||
try {
|
||||
await upsertCimdClient(metadata)
|
||||
} catch (upsertErr) {
|
||||
evictCachedMetadata(clientId)
|
||||
throw upsertErr
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('CIMD resolution failed', {
|
||||
clientId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}),
|
||||
},
|
||||
plugins: [
|
||||
nextCookies(),
|
||||
jwt({
|
||||
jwks: {
|
||||
keyPairConfig: { alg: 'RS256' },
|
||||
},
|
||||
disableSettingJwtHeader: true,
|
||||
}),
|
||||
oidcProvider({
|
||||
loginPage: '/login',
|
||||
consentPage: '/oauth/consent',
|
||||
requirePKCE: true,
|
||||
allowPlainCodeChallengeMethod: false,
|
||||
allowDynamicClientRegistration: true,
|
||||
useJWTPlugin: true,
|
||||
scopes: ['openid', 'profile', 'email', 'offline_access', 'mcp:tools'],
|
||||
metadata: {
|
||||
client_id_metadata_document_supported: true,
|
||||
} as Record<string, unknown>,
|
||||
}),
|
||||
oneTimeToken({
|
||||
expiresIn: 24 * 60 * 60, // 24 hours - Socket.IO handles connection persistence with heartbeats
|
||||
}),
|
||||
|
||||
168
apps/sim/lib/auth/cimd.ts
Normal file
168
apps/sim/lib/auth/cimd.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import { randomUUID } from 'node:crypto'
|
||||
import { db } from '@sim/db'
|
||||
import { oauthApplication } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||
|
||||
const logger = createLogger('cimd')
|
||||
|
||||
interface ClientMetadataDocument {
|
||||
client_id: string
|
||||
client_name: string
|
||||
logo_uri?: string
|
||||
redirect_uris: string[]
|
||||
client_uri?: string
|
||||
policy_uri?: string
|
||||
tos_uri?: string
|
||||
contacts?: string[]
|
||||
scope?: string
|
||||
}
|
||||
|
||||
export function isMetadataUrl(clientId: string): boolean {
|
||||
return clientId.startsWith('https://')
|
||||
}
|
||||
|
||||
async function fetchClientMetadata(url: string): Promise<ClientMetadataDocument> {
|
||||
const parsed = new URL(url)
|
||||
if (parsed.protocol !== 'https:') {
|
||||
throw new Error('CIMD URL must use HTTPS')
|
||||
}
|
||||
|
||||
const res = await secureFetchWithValidation(url, {
|
||||
headers: { Accept: 'application/json' },
|
||||
timeout: 5000,
|
||||
maxResponseBytes: 256 * 1024,
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`CIMD fetch failed: ${res.status} ${res.statusText}`)
|
||||
}
|
||||
|
||||
const doc = (await res.json()) as ClientMetadataDocument
|
||||
|
||||
if (doc.client_id !== url) {
|
||||
throw new Error(`CIMD client_id mismatch: document has "${doc.client_id}", expected "${url}"`)
|
||||
}
|
||||
|
||||
if (!Array.isArray(doc.redirect_uris) || doc.redirect_uris.length === 0) {
|
||||
throw new Error('CIMD document must contain at least one redirect_uri')
|
||||
}
|
||||
|
||||
for (const uri of doc.redirect_uris) {
|
||||
let parsed: URL
|
||||
try {
|
||||
parsed = new URL(uri)
|
||||
} catch {
|
||||
throw new Error(`Invalid redirect_uri: ${uri}`)
|
||||
}
|
||||
if (parsed.protocol !== 'https:' && parsed.protocol !== 'http:') {
|
||||
throw new Error(`Invalid redirect_uri scheme: ${parsed.protocol}`)
|
||||
}
|
||||
if (uri.includes(',')) {
|
||||
throw new Error(`redirect_uri must not contain commas: ${uri}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (doc.logo_uri) {
|
||||
try {
|
||||
const logoParsed = new URL(doc.logo_uri)
|
||||
if (logoParsed.protocol !== 'https:') {
|
||||
doc.logo_uri = undefined
|
||||
}
|
||||
} catch {
|
||||
doc.logo_uri = undefined
|
||||
}
|
||||
}
|
||||
|
||||
if (!doc.client_name || typeof doc.client_name !== 'string') {
|
||||
throw new Error('CIMD document must contain a client_name')
|
||||
}
|
||||
|
||||
return doc
|
||||
}
|
||||
|
||||
const CACHE_TTL_MS = 5 * 60 * 1000
|
||||
const NEGATIVE_CACHE_TTL_MS = 60 * 1000
|
||||
const cache = new Map<string, { doc: ClientMetadataDocument; expiresAt: number }>()
|
||||
const failureCache = new Map<string, { error: string; expiresAt: number }>()
|
||||
const inflight = new Map<string, Promise<ClientMetadataDocument>>()
|
||||
|
||||
interface ResolveResult {
|
||||
metadata: ClientMetadataDocument
|
||||
fromCache: boolean
|
||||
}
|
||||
|
||||
export async function resolveClientMetadata(url: string): Promise<ResolveResult> {
|
||||
const cached = cache.get(url)
|
||||
if (cached && Date.now() < cached.expiresAt) {
|
||||
return { metadata: cached.doc, fromCache: true }
|
||||
}
|
||||
|
||||
const failed = failureCache.get(url)
|
||||
if (failed && Date.now() < failed.expiresAt) {
|
||||
throw new Error(failed.error)
|
||||
}
|
||||
|
||||
const pending = inflight.get(url)
|
||||
if (pending) {
|
||||
return pending.then((doc) => ({ metadata: doc, fromCache: false }))
|
||||
}
|
||||
|
||||
const promise = fetchClientMetadata(url)
|
||||
.then((doc) => {
|
||||
cache.set(url, { doc, expiresAt: Date.now() + CACHE_TTL_MS })
|
||||
failureCache.delete(url)
|
||||
return doc
|
||||
})
|
||||
.catch((err) => {
|
||||
const message = err instanceof Error ? err.message : String(err)
|
||||
failureCache.set(url, { error: message, expiresAt: Date.now() + NEGATIVE_CACHE_TTL_MS })
|
||||
throw err
|
||||
})
|
||||
.finally(() => {
|
||||
inflight.delete(url)
|
||||
})
|
||||
|
||||
inflight.set(url, promise)
|
||||
return promise.then((doc) => ({ metadata: doc, fromCache: false }))
|
||||
}
|
||||
|
||||
export function evictCachedMetadata(url: string): void {
|
||||
cache.delete(url)
|
||||
}
|
||||
|
||||
export async function upsertCimdClient(metadata: ClientMetadataDocument): Promise<void> {
|
||||
const now = new Date()
|
||||
const redirectURLs = metadata.redirect_uris.join(',')
|
||||
|
||||
await db
|
||||
.insert(oauthApplication)
|
||||
.values({
|
||||
id: randomUUID(),
|
||||
clientId: metadata.client_id,
|
||||
name: metadata.client_name,
|
||||
icon: metadata.logo_uri ?? null,
|
||||
redirectURLs,
|
||||
type: 'public',
|
||||
clientSecret: null,
|
||||
userId: null,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: oauthApplication.clientId,
|
||||
set: {
|
||||
name: metadata.client_name,
|
||||
icon: metadata.logo_uri ?? null,
|
||||
redirectURLs,
|
||||
type: 'public',
|
||||
clientSecret: null,
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info('Upserted CIMD client', {
|
||||
clientId: metadata.client_id,
|
||||
name: metadata.client_name,
|
||||
})
|
||||
}
|
||||
51
apps/sim/lib/auth/oauth-token.ts
Normal file
51
apps/sim/lib/auth/oauth-token.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { db } from '@sim/db'
|
||||
import { oauthAccessToken } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, gt } from 'drizzle-orm'
|
||||
|
||||
const logger = createLogger('OAuthToken')
|
||||
|
||||
interface OAuthTokenValidationResult {
|
||||
success: boolean
|
||||
userId?: string
|
||||
scopes?: string[]
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates an OAuth 2.1 access token by looking it up in the oauthAccessToken table.
|
||||
* Returns the associated userId and scopes if the token is valid and not expired.
|
||||
*/
|
||||
export async function validateOAuthAccessToken(token: string): Promise<OAuthTokenValidationResult> {
|
||||
try {
|
||||
const [record] = await db
|
||||
.select({
|
||||
userId: oauthAccessToken.userId,
|
||||
scopes: oauthAccessToken.scopes,
|
||||
accessTokenExpiresAt: oauthAccessToken.accessTokenExpiresAt,
|
||||
})
|
||||
.from(oauthAccessToken)
|
||||
.where(
|
||||
and(
|
||||
eq(oauthAccessToken.accessToken, token),
|
||||
gt(oauthAccessToken.accessTokenExpiresAt, new Date())
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!record) {
|
||||
return { success: false, error: 'Invalid or expired OAuth access token' }
|
||||
}
|
||||
|
||||
if (!record.userId) {
|
||||
return { success: false, error: 'OAuth token has no associated user' }
|
||||
}
|
||||
|
||||
const scopes = record.scopes.split(' ').filter(Boolean)
|
||||
|
||||
return { success: true, userId: record.userId, scopes }
|
||||
} catch (error) {
|
||||
logger.error('OAuth access token validation failed', { error })
|
||||
return { success: false, error: 'Token validation error' }
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,16 @@
|
||||
export type ToolAnnotations = {
|
||||
readOnlyHint?: boolean
|
||||
destructiveHint?: boolean
|
||||
idempotentHint?: boolean
|
||||
openWorldHint?: boolean
|
||||
}
|
||||
|
||||
export type DirectToolDef = {
|
||||
name: string
|
||||
description: string
|
||||
inputSchema: { type: 'object'; properties?: Record<string, unknown>; required?: string[] }
|
||||
toolId: string
|
||||
annotations?: ToolAnnotations
|
||||
}
|
||||
|
||||
export type SubagentToolDef = {
|
||||
@@ -10,6 +18,7 @@ export type SubagentToolDef = {
|
||||
description: string
|
||||
inputSchema: { type: 'object'; properties?: Record<string, unknown>; required?: string[] }
|
||||
agentId: string
|
||||
annotations?: ToolAnnotations
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -26,6 +35,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
type: 'object',
|
||||
properties: {},
|
||||
},
|
||||
annotations: { readOnlyHint: true },
|
||||
},
|
||||
{
|
||||
name: 'list_workflows',
|
||||
@@ -45,6 +55,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
},
|
||||
},
|
||||
annotations: { readOnlyHint: true },
|
||||
},
|
||||
{
|
||||
name: 'list_folders',
|
||||
@@ -61,6 +72,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['workspaceId'],
|
||||
},
|
||||
annotations: { readOnlyHint: true },
|
||||
},
|
||||
{
|
||||
name: 'get_workflow',
|
||||
@@ -77,6 +89,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['workflowId'],
|
||||
},
|
||||
annotations: { readOnlyHint: true },
|
||||
},
|
||||
{
|
||||
name: 'create_workflow',
|
||||
@@ -105,6 +118,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['name'],
|
||||
},
|
||||
annotations: { destructiveHint: false },
|
||||
},
|
||||
{
|
||||
name: 'create_folder',
|
||||
@@ -129,6 +143,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['name'],
|
||||
},
|
||||
annotations: { destructiveHint: false },
|
||||
},
|
||||
{
|
||||
name: 'rename_workflow',
|
||||
@@ -148,6 +163,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['workflowId', 'name'],
|
||||
},
|
||||
annotations: { destructiveHint: false, idempotentHint: true },
|
||||
},
|
||||
{
|
||||
name: 'move_workflow',
|
||||
@@ -168,6 +184,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['workflowId'],
|
||||
},
|
||||
annotations: { destructiveHint: false, idempotentHint: true },
|
||||
},
|
||||
{
|
||||
name: 'move_folder',
|
||||
@@ -189,6 +206,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['folderId'],
|
||||
},
|
||||
annotations: { destructiveHint: false, idempotentHint: true },
|
||||
},
|
||||
{
|
||||
name: 'run_workflow',
|
||||
@@ -214,6 +232,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['workflowId'],
|
||||
},
|
||||
annotations: { destructiveHint: false, openWorldHint: true },
|
||||
},
|
||||
{
|
||||
name: 'run_workflow_until_block',
|
||||
@@ -243,6 +262,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['workflowId', 'stopAfterBlockId'],
|
||||
},
|
||||
annotations: { destructiveHint: false, openWorldHint: true },
|
||||
},
|
||||
{
|
||||
name: 'run_from_block',
|
||||
@@ -276,6 +296,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['workflowId', 'startBlockId'],
|
||||
},
|
||||
annotations: { destructiveHint: false, openWorldHint: true },
|
||||
},
|
||||
{
|
||||
name: 'run_block',
|
||||
@@ -309,6 +330,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['workflowId', 'blockId'],
|
||||
},
|
||||
annotations: { destructiveHint: false, openWorldHint: true },
|
||||
},
|
||||
{
|
||||
name: 'get_deployed_workflow_state',
|
||||
@@ -325,6 +347,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['workflowId'],
|
||||
},
|
||||
annotations: { readOnlyHint: true },
|
||||
},
|
||||
{
|
||||
name: 'generate_api_key',
|
||||
@@ -346,6 +369,7 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
},
|
||||
required: ['name'],
|
||||
},
|
||||
annotations: { destructiveHint: false },
|
||||
},
|
||||
]
|
||||
|
||||
@@ -397,6 +421,7 @@ WORKFLOW:
|
||||
},
|
||||
required: ['request', 'workflowId'],
|
||||
},
|
||||
annotations: { destructiveHint: false, openWorldHint: true },
|
||||
},
|
||||
{
|
||||
name: 'sim_discovery',
|
||||
@@ -422,6 +447,7 @@ DO NOT USE (use direct tools instead):
|
||||
},
|
||||
required: ['request'],
|
||||
},
|
||||
annotations: { readOnlyHint: true },
|
||||
},
|
||||
{
|
||||
name: 'sim_plan',
|
||||
@@ -456,6 +482,7 @@ IMPORTANT: Pass the returned plan EXACTLY to sim_edit - do not modify or summari
|
||||
},
|
||||
required: ['request', 'workflowId'],
|
||||
},
|
||||
annotations: { readOnlyHint: true },
|
||||
},
|
||||
{
|
||||
name: 'sim_edit',
|
||||
@@ -491,6 +518,7 @@ After sim_edit completes, you can test immediately with sim_test, or deploy with
|
||||
},
|
||||
required: ['workflowId'],
|
||||
},
|
||||
annotations: { destructiveHint: false, openWorldHint: true },
|
||||
},
|
||||
{
|
||||
name: 'sim_deploy',
|
||||
@@ -524,6 +552,7 @@ ALSO CAN:
|
||||
},
|
||||
required: ['request', 'workflowId'],
|
||||
},
|
||||
annotations: { destructiveHint: false, openWorldHint: true },
|
||||
},
|
||||
{
|
||||
name: 'sim_test',
|
||||
@@ -547,6 +576,7 @@ Supports full and partial execution:
|
||||
},
|
||||
required: ['request', 'workflowId'],
|
||||
},
|
||||
annotations: { destructiveHint: false, openWorldHint: true },
|
||||
},
|
||||
{
|
||||
name: 'sim_debug',
|
||||
@@ -562,6 +592,7 @@ Supports full and partial execution:
|
||||
},
|
||||
required: ['error', 'workflowId'],
|
||||
},
|
||||
annotations: { readOnlyHint: true },
|
||||
},
|
||||
{
|
||||
name: 'sim_auth',
|
||||
@@ -576,6 +607,7 @@ Supports full and partial execution:
|
||||
},
|
||||
required: ['request'],
|
||||
},
|
||||
annotations: { destructiveHint: false, openWorldHint: true },
|
||||
},
|
||||
{
|
||||
name: 'sim_knowledge',
|
||||
@@ -590,6 +622,7 @@ Supports full and partial execution:
|
||||
},
|
||||
required: ['request'],
|
||||
},
|
||||
annotations: { destructiveHint: false },
|
||||
},
|
||||
{
|
||||
name: 'sim_custom_tool',
|
||||
@@ -604,6 +637,7 @@ Supports full and partial execution:
|
||||
},
|
||||
required: ['request'],
|
||||
},
|
||||
annotations: { destructiveHint: false },
|
||||
},
|
||||
{
|
||||
name: 'sim_info',
|
||||
@@ -619,6 +653,7 @@ Supports full and partial execution:
|
||||
},
|
||||
required: ['request'],
|
||||
},
|
||||
annotations: { readOnlyHint: true },
|
||||
},
|
||||
{
|
||||
name: 'sim_workflow',
|
||||
@@ -634,6 +669,7 @@ Supports full and partial execution:
|
||||
},
|
||||
required: ['request'],
|
||||
},
|
||||
annotations: { destructiveHint: false },
|
||||
},
|
||||
{
|
||||
name: 'sim_research',
|
||||
@@ -648,6 +684,7 @@ Supports full and partial execution:
|
||||
},
|
||||
required: ['request'],
|
||||
},
|
||||
annotations: { readOnlyHint: true, openWorldHint: true },
|
||||
},
|
||||
{
|
||||
name: 'sim_superagent',
|
||||
@@ -662,6 +699,7 @@ Supports full and partial execution:
|
||||
},
|
||||
required: ['request'],
|
||||
},
|
||||
annotations: { destructiveHint: true, openWorldHint: true },
|
||||
},
|
||||
{
|
||||
name: 'sim_platform',
|
||||
@@ -676,5 +714,6 @@ Supports full and partial execution:
|
||||
},
|
||||
required: ['request'],
|
||||
},
|
||||
annotations: { readOnlyHint: true },
|
||||
},
|
||||
]
|
||||
|
||||
@@ -135,12 +135,13 @@ interface OutputFieldSchema {
|
||||
function matchesOperation(condition: any, operation: string): boolean {
|
||||
if (!condition) return false
|
||||
|
||||
const cond = typeof condition === 'function' ? condition() : condition
|
||||
const cond = typeof condition === 'function' ? condition({ operation }) : condition
|
||||
if (!cond) return false
|
||||
|
||||
if (cond.field === 'operation' && !cond.not) {
|
||||
if (cond.field === 'operation') {
|
||||
const values = Array.isArray(cond.value) ? cond.value : [cond.value]
|
||||
return values.includes(operation)
|
||||
const included = values.includes(operation)
|
||||
return cond.not ? !included : included
|
||||
}
|
||||
|
||||
return false
|
||||
@@ -173,18 +174,10 @@ function extractInputsFromSubBlocks(
|
||||
// 1. Have no condition (common parameters)
|
||||
// 2. Have a condition matching the operation
|
||||
if (operation) {
|
||||
const condition = typeof sb.condition === 'function' ? sb.condition() : sb.condition
|
||||
if (condition) {
|
||||
if (condition.field === 'operation' && !condition.not) {
|
||||
// This is an operation-specific field
|
||||
const values = Array.isArray(condition.value) ? condition.value : [condition.value]
|
||||
if (!values.includes(operation)) {
|
||||
continue // Skip if doesn't match our operation
|
||||
}
|
||||
} else if (!matchesOperation(condition, operation)) {
|
||||
// Other condition that doesn't match
|
||||
continue
|
||||
}
|
||||
const condition =
|
||||
typeof sb.condition === 'function' ? sb.condition({ operation }) : sb.condition
|
||||
if (condition && !matchesOperation(condition, operation)) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
176
apps/sim/lib/core/config/redis.test.ts
Normal file
176
apps/sim/lib/core/config/redis.test.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
import { createEnvMock, createMockRedis, loggerMock } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const mockRedisInstance = createMockRedis()
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
vi.mock('@/lib/core/config/env', () => createEnvMock({ REDIS_URL: 'redis://localhost:6379' }))
|
||||
vi.mock('ioredis', () => ({
|
||||
default: vi.fn(() => mockRedisInstance),
|
||||
}))
|
||||
|
||||
describe('redis config', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
vi.useFakeTimers()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers()
|
||||
vi.resetModules()
|
||||
})
|
||||
|
||||
describe('onRedisReconnect', () => {
|
||||
it('should register and invoke reconnect listeners', async () => {
|
||||
const { onRedisReconnect, getRedisClient } = await import('./redis')
|
||||
const listener = vi.fn()
|
||||
onRedisReconnect(listener)
|
||||
|
||||
getRedisClient()
|
||||
|
||||
mockRedisInstance.ping.mockRejectedValue(new Error('ETIMEDOUT'))
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
|
||||
expect(listener).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should not invoke listeners when PINGs succeed', async () => {
|
||||
const { onRedisReconnect, getRedisClient } = await import('./redis')
|
||||
const listener = vi.fn()
|
||||
onRedisReconnect(listener)
|
||||
|
||||
getRedisClient()
|
||||
mockRedisInstance.ping.mockResolvedValue('PONG')
|
||||
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
|
||||
expect(listener).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should reset failure count on successful PING', async () => {
|
||||
const { onRedisReconnect, getRedisClient } = await import('./redis')
|
||||
const listener = vi.fn()
|
||||
onRedisReconnect(listener)
|
||||
|
||||
getRedisClient()
|
||||
|
||||
// 2 failures then a success — should reset counter
|
||||
mockRedisInstance.ping.mockRejectedValueOnce(new Error('timeout'))
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
mockRedisInstance.ping.mockRejectedValueOnce(new Error('timeout'))
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
mockRedisInstance.ping.mockResolvedValueOnce('PONG')
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
|
||||
// 2 more failures — should NOT trigger reconnect (counter was reset)
|
||||
mockRedisInstance.ping.mockRejectedValueOnce(new Error('timeout'))
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
mockRedisInstance.ping.mockRejectedValueOnce(new Error('timeout'))
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
|
||||
expect(listener).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call disconnect(true) after 3 consecutive PING failures', async () => {
|
||||
const { getRedisClient } = await import('./redis')
|
||||
getRedisClient()
|
||||
|
||||
mockRedisInstance.ping.mockRejectedValue(new Error('ETIMEDOUT'))
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
|
||||
expect(mockRedisInstance.disconnect).not.toHaveBeenCalled()
|
||||
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
expect(mockRedisInstance.disconnect).toHaveBeenCalledWith(true)
|
||||
})
|
||||
|
||||
it('should handle listener errors gracefully without breaking health check', async () => {
|
||||
const { onRedisReconnect, getRedisClient } = await import('./redis')
|
||||
const badListener = vi.fn(() => {
|
||||
throw new Error('listener crashed')
|
||||
})
|
||||
const goodListener = vi.fn()
|
||||
onRedisReconnect(badListener)
|
||||
onRedisReconnect(goodListener)
|
||||
|
||||
getRedisClient()
|
||||
mockRedisInstance.ping.mockRejectedValue(new Error('timeout'))
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
await vi.advanceTimersByTimeAsync(30_000)
|
||||
|
||||
expect(badListener).toHaveBeenCalledTimes(1)
|
||||
expect(goodListener).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('closeRedisConnection', () => {
|
||||
it('should clear the PING interval', async () => {
|
||||
const { getRedisClient, closeRedisConnection } = await import('./redis')
|
||||
getRedisClient()
|
||||
|
||||
mockRedisInstance.quit.mockResolvedValue('OK')
|
||||
await closeRedisConnection()
|
||||
|
||||
// After closing, PING failures should not trigger disconnect
|
||||
mockRedisInstance.ping.mockRejectedValue(new Error('timeout'))
|
||||
await vi.advanceTimersByTimeAsync(30_000 * 5)
|
||||
expect(mockRedisInstance.disconnect).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('retryStrategy', () => {
|
||||
async function captureRetryStrategy(): Promise<(times: number) => number> {
|
||||
vi.resetModules()
|
||||
|
||||
vi.doMock('@sim/logger', () => loggerMock)
|
||||
vi.doMock('@/lib/core/config/env', () =>
|
||||
createEnvMock({ REDIS_URL: 'redis://localhost:6379' })
|
||||
)
|
||||
|
||||
let capturedConfig: Record<string, unknown> = {}
|
||||
vi.doMock('ioredis', () => ({
|
||||
default: vi.fn((_url: string, config: Record<string, unknown>) => {
|
||||
capturedConfig = config
|
||||
return { ping: vi.fn(), on: vi.fn() }
|
||||
}),
|
||||
}))
|
||||
|
||||
const { getRedisClient } = await import('./redis')
|
||||
getRedisClient()
|
||||
|
||||
return capturedConfig.retryStrategy as (times: number) => number
|
||||
}
|
||||
|
||||
it('should use exponential backoff with jitter', async () => {
|
||||
const retryStrategy = await captureRetryStrategy()
|
||||
expect(retryStrategy).toBeDefined()
|
||||
|
||||
// Base for attempt 1: min(1000 * 2^0, 10000) = 1000, jitter up to 300
|
||||
const delay1 = retryStrategy(1)
|
||||
expect(delay1).toBeGreaterThanOrEqual(1000)
|
||||
expect(delay1).toBeLessThanOrEqual(1300)
|
||||
|
||||
// Base for attempt 3: min(1000 * 2^2, 10000) = 4000, jitter up to 1200
|
||||
const delay3 = retryStrategy(3)
|
||||
expect(delay3).toBeGreaterThanOrEqual(4000)
|
||||
expect(delay3).toBeLessThanOrEqual(5200)
|
||||
|
||||
// Base for attempt 5: min(1000 * 2^4, 10000) = 10000, jitter up to 3000
|
||||
const delay5 = retryStrategy(5)
|
||||
expect(delay5).toBeGreaterThanOrEqual(10000)
|
||||
expect(delay5).toBeLessThanOrEqual(13000)
|
||||
})
|
||||
|
||||
it('should cap at 30s for attempts beyond 10', async () => {
|
||||
const retryStrategy = await captureRetryStrategy()
|
||||
expect(retryStrategy(11)).toBe(30000)
|
||||
expect(retryStrategy(100)).toBe(30000)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -7,6 +7,63 @@ const logger = createLogger('Redis')
|
||||
const redisUrl = env.REDIS_URL
|
||||
|
||||
let globalRedisClient: Redis | null = null
|
||||
let pingFailures = 0
|
||||
let pingInterval: NodeJS.Timeout | null = null
|
||||
let pingInFlight = false
|
||||
|
||||
const PING_INTERVAL_MS = 30_000
|
||||
const MAX_PING_FAILURES = 3
|
||||
|
||||
/** Callbacks invoked when the PING health check forces a reconnect. */
|
||||
const reconnectListeners: Array<() => void> = []
|
||||
|
||||
/**
|
||||
* Register a callback that fires when the PING health check forces a reconnect.
|
||||
* Useful for resetting cached adapters that hold a stale Redis reference.
|
||||
*/
|
||||
export function onRedisReconnect(cb: () => void): void {
|
||||
reconnectListeners.push(cb)
|
||||
}
|
||||
|
||||
function startPingHealthCheck(redis: Redis): void {
|
||||
if (pingInterval) return
|
||||
|
||||
pingInterval = setInterval(async () => {
|
||||
if (pingInFlight) return
|
||||
pingInFlight = true
|
||||
try {
|
||||
await redis.ping()
|
||||
pingFailures = 0
|
||||
} catch (error) {
|
||||
pingFailures++
|
||||
logger.warn('Redis PING failed', {
|
||||
consecutiveFailures: pingFailures,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
|
||||
if (pingFailures >= MAX_PING_FAILURES) {
|
||||
logger.error('Redis PING failed 3 consecutive times — forcing reconnect', {
|
||||
consecutiveFailures: pingFailures,
|
||||
})
|
||||
pingFailures = 0
|
||||
for (const cb of reconnectListeners) {
|
||||
try {
|
||||
cb()
|
||||
} catch (cbError) {
|
||||
logger.error('Redis reconnect listener error', { error: cbError })
|
||||
}
|
||||
}
|
||||
try {
|
||||
redis.disconnect(true)
|
||||
} catch (disconnectError) {
|
||||
logger.error('Error during forced Redis disconnect', { error: disconnectError })
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
pingInFlight = false
|
||||
}
|
||||
}, PING_INTERVAL_MS)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Redis client instance.
|
||||
@@ -35,8 +92,10 @@ export function getRedisClient(): Redis | null {
|
||||
logger.error(`Redis reconnection attempt ${times}`, { nextRetryMs: 30000 })
|
||||
return 30000
|
||||
}
|
||||
const delay = Math.min(times * 500, 5000)
|
||||
logger.warn(`Redis reconnecting`, { attempt: times, nextRetryMs: delay })
|
||||
const base = Math.min(1000 * 2 ** (times - 1), 10000)
|
||||
const jitter = Math.random() * base * 0.3
|
||||
const delay = Math.round(base + jitter)
|
||||
logger.warn('Redis reconnecting', { attempt: times, nextRetryMs: delay })
|
||||
return delay
|
||||
},
|
||||
|
||||
@@ -54,6 +113,8 @@ export function getRedisClient(): Redis | null {
|
||||
globalRedisClient.on('close', () => logger.warn('Redis connection closed'))
|
||||
globalRedisClient.on('end', () => logger.error('Redis connection ended'))
|
||||
|
||||
startPingHealthCheck(globalRedisClient)
|
||||
|
||||
return globalRedisClient
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize Redis client', { error })
|
||||
@@ -118,6 +179,11 @@ export async function releaseLock(lockKey: string, value: string): Promise<boole
|
||||
* Use for graceful shutdown.
|
||||
*/
|
||||
export async function closeRedisConnection(): Promise<void> {
|
||||
if (pingInterval) {
|
||||
clearInterval(pingInterval)
|
||||
pingInterval = null
|
||||
}
|
||||
|
||||
if (globalRedisClient) {
|
||||
try {
|
||||
await globalRedisClient.quit()
|
||||
|
||||
@@ -172,7 +172,7 @@ describe('RateLimiter', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should deny on storage error (fail closed)', async () => {
|
||||
it('should allow on storage error (fail open)', async () => {
|
||||
mockAdapter.consumeTokens.mockRejectedValue(new Error('Storage error'))
|
||||
|
||||
const result = await rateLimiter.checkRateLimitWithSubscription(
|
||||
@@ -182,8 +182,8 @@ describe('RateLimiter', () => {
|
||||
false
|
||||
)
|
||||
|
||||
expect(result.allowed).toBe(false)
|
||||
expect(result.remaining).toBe(0)
|
||||
expect(result.allowed).toBe(true)
|
||||
expect(result.remaining).toBe(1)
|
||||
})
|
||||
|
||||
it('should work for all non-manual trigger types', async () => {
|
||||
|
||||
@@ -100,17 +100,16 @@ export class RateLimiter {
|
||||
retryAfterMs: result.retryAfterMs,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Rate limit storage error - failing closed (denying request)', {
|
||||
logger.error('Rate limit storage error - failing open (allowing request)', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
userId,
|
||||
triggerType,
|
||||
isAsync,
|
||||
})
|
||||
return {
|
||||
allowed: false,
|
||||
remaining: 0,
|
||||
allowed: true,
|
||||
remaining: 1,
|
||||
resetAt: new Date(Date.now() + RATE_LIMIT_WINDOW_MS),
|
||||
retryAfterMs: RATE_LIMIT_WINDOW_MS,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
129
apps/sim/lib/core/rate-limiter/storage/factory.test.ts
Normal file
129
apps/sim/lib/core/rate-limiter/storage/factory.test.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
const reconnectCallbacks: Array<() => void> = []
|
||||
|
||||
vi.mock('@/lib/core/config/redis', () => ({
|
||||
getRedisClient: vi.fn(() => null),
|
||||
onRedisReconnect: vi.fn((cb: () => void) => {
|
||||
reconnectCallbacks.push(cb)
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/storage', () => ({
|
||||
getStorageMethod: vi.fn(() => 'db'),
|
||||
}))
|
||||
|
||||
vi.mock('./db-token-bucket', () => ({
|
||||
DbTokenBucket: vi.fn(() => ({ type: 'db' })),
|
||||
}))
|
||||
|
||||
vi.mock('./redis-token-bucket', () => ({
|
||||
RedisTokenBucket: vi.fn(() => ({ type: 'redis' })),
|
||||
}))
|
||||
|
||||
describe('rate limit storage factory', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
reconnectCallbacks.length = 0
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.resetModules()
|
||||
})
|
||||
|
||||
it('should fall back to DbTokenBucket when Redis is configured but client unavailable', async () => {
|
||||
const { getStorageMethod } = await import('@/lib/core/storage')
|
||||
vi.mocked(getStorageMethod).mockReturnValue('redis')
|
||||
|
||||
const { getRedisClient } = await import('@/lib/core/config/redis')
|
||||
vi.mocked(getRedisClient).mockReturnValue(null)
|
||||
|
||||
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
|
||||
resetStorageAdapter()
|
||||
|
||||
const adapter = createStorageAdapter()
|
||||
expect(adapter).toEqual({ type: 'db' })
|
||||
})
|
||||
|
||||
it('should use RedisTokenBucket when Redis client is available', async () => {
|
||||
const { getStorageMethod } = await import('@/lib/core/storage')
|
||||
vi.mocked(getStorageMethod).mockReturnValue('redis')
|
||||
|
||||
const { getRedisClient } = await import('@/lib/core/config/redis')
|
||||
vi.mocked(getRedisClient).mockReturnValue({ ping: vi.fn() } as never)
|
||||
|
||||
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
|
||||
resetStorageAdapter()
|
||||
|
||||
const adapter = createStorageAdapter()
|
||||
expect(adapter).toEqual({ type: 'redis' })
|
||||
})
|
||||
|
||||
it('should use DbTokenBucket when storage method is db', async () => {
|
||||
const { getStorageMethod } = await import('@/lib/core/storage')
|
||||
vi.mocked(getStorageMethod).mockReturnValue('db')
|
||||
|
||||
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
|
||||
resetStorageAdapter()
|
||||
|
||||
const adapter = createStorageAdapter()
|
||||
expect(adapter).toEqual({ type: 'db' })
|
||||
})
|
||||
|
||||
it('should cache the adapter and return same instance', async () => {
|
||||
const { getStorageMethod } = await import('@/lib/core/storage')
|
||||
vi.mocked(getStorageMethod).mockReturnValue('db')
|
||||
|
||||
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
|
||||
resetStorageAdapter()
|
||||
|
||||
const adapter1 = createStorageAdapter()
|
||||
const adapter2 = createStorageAdapter()
|
||||
expect(adapter1).toBe(adapter2)
|
||||
})
|
||||
|
||||
it('should register a reconnect listener that resets cached adapter', async () => {
|
||||
const { getStorageMethod } = await import('@/lib/core/storage')
|
||||
vi.mocked(getStorageMethod).mockReturnValue('db')
|
||||
|
||||
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
|
||||
resetStorageAdapter()
|
||||
|
||||
const adapter1 = createStorageAdapter()
|
||||
|
||||
// Simulate Redis reconnect — should reset cached adapter
|
||||
expect(reconnectCallbacks.length).toBeGreaterThan(0)
|
||||
reconnectCallbacks[0]()
|
||||
|
||||
// Next call should create a fresh adapter
|
||||
const adapter2 = createStorageAdapter()
|
||||
expect(adapter2).not.toBe(adapter1)
|
||||
})
|
||||
|
||||
it('should re-evaluate storage on next call after reconnect resets cache', async () => {
|
||||
const { getStorageMethod } = await import('@/lib/core/storage')
|
||||
const { getRedisClient } = await import('@/lib/core/config/redis')
|
||||
|
||||
// Start with Redis unavailable — falls back to DB
|
||||
vi.mocked(getStorageMethod).mockReturnValue('redis')
|
||||
vi.mocked(getRedisClient).mockReturnValue(null)
|
||||
|
||||
const { createStorageAdapter, resetStorageAdapter } = await import('./factory')
|
||||
resetStorageAdapter()
|
||||
|
||||
const adapter1 = createStorageAdapter()
|
||||
expect(adapter1).toEqual({ type: 'db' })
|
||||
|
||||
// Simulate reconnect
|
||||
reconnectCallbacks[0]()
|
||||
|
||||
// Now Redis is available
|
||||
vi.mocked(getRedisClient).mockReturnValue({ ping: vi.fn() } as never)
|
||||
|
||||
const adapter2 = createStorageAdapter()
|
||||
expect(adapter2).toEqual({ type: 'redis' })
|
||||
})
|
||||
})
|
||||
@@ -1,5 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getRedisClient } from '@/lib/core/config/redis'
|
||||
import { getRedisClient, onRedisReconnect } from '@/lib/core/config/redis'
|
||||
import { getStorageMethod, type StorageMethod } from '@/lib/core/storage'
|
||||
import type { RateLimitStorageAdapter } from './adapter'
|
||||
import { DbTokenBucket } from './db-token-bucket'
|
||||
@@ -8,21 +8,33 @@ import { RedisTokenBucket } from './redis-token-bucket'
|
||||
const logger = createLogger('RateLimitStorage')
|
||||
|
||||
let cachedAdapter: RateLimitStorageAdapter | null = null
|
||||
let reconnectListenerRegistered = false
|
||||
|
||||
export function createStorageAdapter(): RateLimitStorageAdapter {
|
||||
if (cachedAdapter) {
|
||||
return cachedAdapter
|
||||
}
|
||||
|
||||
if (!reconnectListenerRegistered) {
|
||||
onRedisReconnect(() => {
|
||||
cachedAdapter = null
|
||||
})
|
||||
reconnectListenerRegistered = true
|
||||
}
|
||||
|
||||
const storageMethod = getStorageMethod()
|
||||
|
||||
if (storageMethod === 'redis') {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
throw new Error('Redis configured but client unavailable')
|
||||
logger.warn(
|
||||
'Redis configured but client unavailable - falling back to PostgreSQL for rate limiting'
|
||||
)
|
||||
cachedAdapter = new DbTokenBucket()
|
||||
} else {
|
||||
logger.info('Rate limiting: Using Redis')
|
||||
cachedAdapter = new RedisTokenBucket(redis)
|
||||
}
|
||||
logger.info('Rate limiting: Using Redis')
|
||||
cachedAdapter = new RedisTokenBucket(redis)
|
||||
} else {
|
||||
logger.info('Rate limiting: Using PostgreSQL')
|
||||
cachedAdapter = new DbTokenBucket()
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { EventEmitter } from 'node:events'
|
||||
import { createEnvMock, loggerMock } from '@sim/testing'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
type MockProc = EventEmitter & {
|
||||
@@ -130,13 +131,7 @@ async function loadExecutionModule(options: {
|
||||
return next() as any
|
||||
})
|
||||
|
||||
vi.doMock('@sim/logger', () => ({
|
||||
createLogger: () => ({
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
vi.doMock('@sim/logger', () => loggerMock)
|
||||
|
||||
const secureFetchMock = vi.fn(
|
||||
options.secureFetchImpl ??
|
||||
@@ -154,8 +149,12 @@ async function loadExecutionModule(options: {
|
||||
secureFetchWithValidation: secureFetchMock,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/core/config/env', () => ({
|
||||
env: {
|
||||
vi.doMock('@/lib/core/utils/logging', () => ({
|
||||
sanitizeUrlForLog: vi.fn((url: string) => url),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/core/config/env', () =>
|
||||
createEnvMock({
|
||||
IVM_POOL_SIZE: '1',
|
||||
IVM_MAX_CONCURRENT: '100',
|
||||
IVM_MAX_PER_WORKER: '100',
|
||||
@@ -168,8 +167,8 @@ async function loadExecutionModule(options: {
|
||||
IVM_DISTRIBUTED_LEASE_MIN_TTL_MS: '1000',
|
||||
IVM_QUEUE_TIMEOUT_MS: '1000',
|
||||
...(options.envOverrides ?? {}),
|
||||
},
|
||||
}))
|
||||
})
|
||||
)
|
||||
|
||||
const redisEval = options.redisEvalImpl ? vi.fn(options.redisEvalImpl) : undefined
|
||||
vi.doMock('@/lib/core/config/redis', () => ({
|
||||
@@ -319,7 +318,7 @@ describe('isolated-vm scheduler', () => {
|
||||
expect(result.error?.message).toContain('Too many concurrent')
|
||||
})
|
||||
|
||||
it('fails closed when Redis is configured but unavailable', async () => {
|
||||
it('falls back to local execution when Redis is configured but unavailable', async () => {
|
||||
const { executeInIsolatedVM } = await loadExecutionModule({
|
||||
envOverrides: {
|
||||
REDIS_URL: 'redis://localhost:6379',
|
||||
@@ -328,7 +327,7 @@ describe('isolated-vm scheduler', () => {
|
||||
})
|
||||
|
||||
const result = await executeInIsolatedVM({
|
||||
code: 'return "blocked"',
|
||||
code: 'return "ok"',
|
||||
params: {},
|
||||
envVars: {},
|
||||
contextVariables: {},
|
||||
@@ -337,10 +336,11 @@ describe('isolated-vm scheduler', () => {
|
||||
ownerKey: 'user:redis-down',
|
||||
})
|
||||
|
||||
expect(result.error?.message).toContain('temporarily unavailable')
|
||||
expect(result.error).toBeUndefined()
|
||||
expect(result.result).toBe('ok')
|
||||
})
|
||||
|
||||
it('fails closed when Redis lease evaluation errors', async () => {
|
||||
it('falls back to local execution when Redis lease evaluation errors', async () => {
|
||||
const { executeInIsolatedVM } = await loadExecutionModule({
|
||||
envOverrides: {
|
||||
REDIS_URL: 'redis://localhost:6379',
|
||||
@@ -356,7 +356,7 @@ describe('isolated-vm scheduler', () => {
|
||||
})
|
||||
|
||||
const result = await executeInIsolatedVM({
|
||||
code: 'return "blocked"',
|
||||
code: 'return "ok"',
|
||||
params: {},
|
||||
envVars: {},
|
||||
contextVariables: {},
|
||||
@@ -365,7 +365,8 @@ describe('isolated-vm scheduler', () => {
|
||||
ownerKey: 'user:redis-error',
|
||||
})
|
||||
|
||||
expect(result.error?.message).toContain('temporarily unavailable')
|
||||
expect(result.error).toBeUndefined()
|
||||
expect(result.result).toBe('ok')
|
||||
})
|
||||
|
||||
it('applies weighted owner scheduling when draining queued executions', async () => {
|
||||
|
||||
@@ -987,15 +987,8 @@ export async function executeInIsolatedVM(
|
||||
}
|
||||
}
|
||||
if (leaseAcquireResult === 'unavailable') {
|
||||
maybeCleanupOwner(ownerKey)
|
||||
return {
|
||||
result: null,
|
||||
stdout: '',
|
||||
error: {
|
||||
message: 'Code execution is temporarily unavailable. Please try again in a moment.',
|
||||
name: 'Error',
|
||||
},
|
||||
}
|
||||
logger.warn('Distributed lease unavailable, falling back to local execution', { ownerKey })
|
||||
// Continue execution — local pool still enforces per-process concurrency limits
|
||||
}
|
||||
|
||||
let settled = false
|
||||
|
||||
@@ -1,24 +1,27 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
function getOrigin(request: NextRequest): string {
|
||||
return request.nextUrl.origin
|
||||
function getOrigin(): string {
|
||||
return getBaseUrl().replace(/\/$/, '')
|
||||
}
|
||||
|
||||
export function createMcpAuthorizationServerMetadataResponse(request: NextRequest): NextResponse {
|
||||
const origin = getOrigin(request)
|
||||
export function createMcpAuthorizationServerMetadataResponse(): NextResponse {
|
||||
const origin = getOrigin()
|
||||
const resource = `${origin}/api/mcp/copilot`
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
issuer: resource,
|
||||
token_endpoint: `${origin}/api/auth/oauth/token`,
|
||||
token_endpoint_auth_methods_supported: ['none'],
|
||||
issuer: origin,
|
||||
authorization_endpoint: `${origin}/api/auth/oauth2/authorize`,
|
||||
token_endpoint: `${origin}/api/auth/oauth2/token`,
|
||||
registration_endpoint: `${origin}/api/auth/oauth2/register`,
|
||||
jwks_uri: `${origin}/api/auth/jwks`,
|
||||
token_endpoint_auth_methods_supported: ['client_secret_basic', 'client_secret_post', 'none'],
|
||||
grant_types_supported: ['authorization_code', 'refresh_token'],
|
||||
response_types_supported: ['code'],
|
||||
code_challenge_methods_supported: ['S256'],
|
||||
scopes_supported: ['mcp:tools'],
|
||||
scopes_supported: ['openid', 'profile', 'email', 'offline_access', 'mcp:tools'],
|
||||
resource,
|
||||
// Non-standard extension for API-key-only clients.
|
||||
x_sim_auth: {
|
||||
type: 'api_key',
|
||||
header: 'x-api-key',
|
||||
@@ -32,10 +35,10 @@ export function createMcpAuthorizationServerMetadataResponse(request: NextReques
|
||||
)
|
||||
}
|
||||
|
||||
export function createMcpProtectedResourceMetadataResponse(request: NextRequest): NextResponse {
|
||||
const origin = getOrigin(request)
|
||||
export function createMcpProtectedResourceMetadataResponse(): NextResponse {
|
||||
const origin = getOrigin()
|
||||
const resource = `${origin}/api/mcp/copilot`
|
||||
const authorizationServerIssuer = `${origin}/api/mcp/copilot`
|
||||
const authorizationServerIssuer = origin
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
|
||||
@@ -679,6 +679,55 @@ async function downloadSlackFiles(
|
||||
return downloaded
|
||||
}
|
||||
|
||||
const SLACK_REACTION_EVENTS = new Set(['reaction_added', 'reaction_removed'])
|
||||
|
||||
/**
|
||||
* Fetches the text of a reacted-to message from Slack using the reactions.get API.
|
||||
* Unlike conversations.history, reactions.get works for both top-level messages and
|
||||
* thread replies, since it looks up the item directly by channel + timestamp.
|
||||
* Requires the bot token to have the reactions:read scope.
|
||||
*/
|
||||
async function fetchSlackMessageText(
|
||||
channel: string,
|
||||
messageTs: string,
|
||||
botToken: string
|
||||
): Promise<string> {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
channel,
|
||||
timestamp: messageTs,
|
||||
})
|
||||
const response = await fetch(`https://slack.com/api/reactions.get?${params}`, {
|
||||
headers: { Authorization: `Bearer ${botToken}` },
|
||||
})
|
||||
|
||||
const data = (await response.json()) as {
|
||||
ok: boolean
|
||||
error?: string
|
||||
type?: string
|
||||
message?: { text?: string }
|
||||
}
|
||||
|
||||
if (!data.ok) {
|
||||
logger.warn('Slack reactions.get failed — message text unavailable', {
|
||||
channel,
|
||||
messageTs,
|
||||
error: data.error,
|
||||
})
|
||||
return ''
|
||||
}
|
||||
|
||||
return data.message?.text ?? ''
|
||||
} catch (error) {
|
||||
logger.warn('Error fetching Slack message text', {
|
||||
channel,
|
||||
messageTs,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format webhook input based on provider
|
||||
*/
|
||||
@@ -953,6 +1002,23 @@ export async function formatWebhookInput(
|
||||
})
|
||||
}
|
||||
|
||||
const eventType: string = rawEvent?.type || body?.type || 'unknown'
|
||||
const isReactionEvent = SLACK_REACTION_EVENTS.has(eventType)
|
||||
|
||||
// Reaction events nest channel/ts inside event.item
|
||||
const channel: string = isReactionEvent
|
||||
? rawEvent?.item?.channel || ''
|
||||
: rawEvent?.channel || ''
|
||||
const messageTs: string = isReactionEvent
|
||||
? rawEvent?.item?.ts || ''
|
||||
: rawEvent?.ts || rawEvent?.event_ts || ''
|
||||
|
||||
// For reaction events, attempt to fetch the original message text
|
||||
let text: string = rawEvent?.text || ''
|
||||
if (isReactionEvent && channel && messageTs && botToken) {
|
||||
text = await fetchSlackMessageText(channel, messageTs, botToken)
|
||||
}
|
||||
|
||||
const rawFiles: any[] = rawEvent?.files ?? []
|
||||
const hasFiles = rawFiles.length > 0
|
||||
|
||||
@@ -965,16 +1031,18 @@ export async function formatWebhookInput(
|
||||
|
||||
return {
|
||||
event: {
|
||||
event_type: rawEvent?.type || body?.type || 'unknown',
|
||||
channel: rawEvent?.channel || '',
|
||||
event_type: eventType,
|
||||
channel,
|
||||
channel_name: '',
|
||||
user: rawEvent?.user || '',
|
||||
user_name: '',
|
||||
text: rawEvent?.text || '',
|
||||
timestamp: rawEvent?.ts || rawEvent?.event_ts || '',
|
||||
text,
|
||||
timestamp: messageTs,
|
||||
thread_ts: rawEvent?.thread_ts || '',
|
||||
team_id: body?.team_id || rawEvent?.team || '',
|
||||
event_id: body?.event_id || '',
|
||||
reaction: rawEvent?.reaction || '',
|
||||
item_user: rawEvent?.item_user || '',
|
||||
hasFiles,
|
||||
files,
|
||||
},
|
||||
|
||||
@@ -121,6 +121,14 @@ const nextConfig: NextConfig = {
|
||||
],
|
||||
async headers() {
|
||||
return [
|
||||
{
|
||||
source: '/.well-known/:path*',
|
||||
headers: [
|
||||
{ key: 'Access-Control-Allow-Origin', value: '*' },
|
||||
{ key: 'Access-Control-Allow-Methods', value: 'GET, OPTIONS' },
|
||||
{ key: 'Access-Control-Allow-Headers', value: 'Content-Type, Accept' },
|
||||
],
|
||||
},
|
||||
{
|
||||
// API routes CORS headers
|
||||
source: '/api/:path*',
|
||||
@@ -137,7 +145,52 @@ const nextConfig: NextConfig = {
|
||||
{
|
||||
key: 'Access-Control-Allow-Headers',
|
||||
value:
|
||||
'X-CSRF-Token, X-Requested-With, Accept, Accept-Version, Content-Length, Content-MD5, Content-Type, Date, X-Api-Version, X-API-Key',
|
||||
'X-CSRF-Token, X-Requested-With, Accept, Accept-Version, Content-Length, Content-MD5, Content-Type, Date, X-Api-Version, X-API-Key, Authorization',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
source: '/api/auth/oauth2/:path*',
|
||||
headers: [
|
||||
{ key: 'Access-Control-Allow-Credentials', value: 'false' },
|
||||
{ key: 'Access-Control-Allow-Origin', value: '*' },
|
||||
{ key: 'Access-Control-Allow-Methods', value: 'GET, POST, OPTIONS' },
|
||||
{
|
||||
key: 'Access-Control-Allow-Headers',
|
||||
value: 'Content-Type, Authorization, Accept',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
source: '/api/auth/jwks',
|
||||
headers: [
|
||||
{ key: 'Access-Control-Allow-Credentials', value: 'false' },
|
||||
{ key: 'Access-Control-Allow-Origin', value: '*' },
|
||||
{ key: 'Access-Control-Allow-Methods', value: 'GET, OPTIONS' },
|
||||
{ key: 'Access-Control-Allow-Headers', value: 'Content-Type, Accept' },
|
||||
],
|
||||
},
|
||||
{
|
||||
source: '/api/auth/.well-known/:path*',
|
||||
headers: [
|
||||
{ key: 'Access-Control-Allow-Credentials', value: 'false' },
|
||||
{ key: 'Access-Control-Allow-Origin', value: '*' },
|
||||
{ key: 'Access-Control-Allow-Methods', value: 'GET, OPTIONS' },
|
||||
{ key: 'Access-Control-Allow-Headers', value: 'Content-Type, Accept' },
|
||||
],
|
||||
},
|
||||
{
|
||||
source: '/api/mcp/copilot',
|
||||
headers: [
|
||||
{ key: 'Access-Control-Allow-Credentials', value: 'false' },
|
||||
{ key: 'Access-Control-Allow-Origin', value: '*' },
|
||||
{
|
||||
key: 'Access-Control-Allow-Methods',
|
||||
value: 'GET, POST, OPTIONS, DELETE',
|
||||
},
|
||||
{
|
||||
key: 'Access-Control-Allow-Headers',
|
||||
value: 'Content-Type, Authorization, X-API-Key, X-Requested-With, Accept',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -467,25 +467,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
},
|
||||
contextWindow: 200000,
|
||||
},
|
||||
{
|
||||
id: 'claude-3-7-sonnet-latest',
|
||||
pricing: {
|
||||
input: 3.0,
|
||||
cachedInput: 0.3,
|
||||
output: 15.0,
|
||||
updatedAt: '2026-02-05',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
computerUse: true,
|
||||
maxOutputTokens: 64000,
|
||||
thinking: {
|
||||
levels: ['low', 'medium', 'high'],
|
||||
default: 'high',
|
||||
},
|
||||
},
|
||||
contextWindow: 200000,
|
||||
},
|
||||
],
|
||||
},
|
||||
'azure-openai': {
|
||||
|
||||
@@ -183,7 +183,6 @@ describe('Model Capabilities', () => {
|
||||
'gemini-2.5-flash',
|
||||
'claude-sonnet-4-0',
|
||||
'claude-opus-4-0',
|
||||
'claude-3-7-sonnet-latest',
|
||||
'grok-3-latest',
|
||||
'grok-3-fast-latest',
|
||||
'deepseek-v3',
|
||||
@@ -260,7 +259,6 @@ describe('Model Capabilities', () => {
|
||||
const modelsRange01 = [
|
||||
'claude-sonnet-4-0',
|
||||
'claude-opus-4-0',
|
||||
'claude-3-7-sonnet-latest',
|
||||
'grok-3-latest',
|
||||
'grok-3-fast-latest',
|
||||
]
|
||||
|
||||
12
apps/sim/public/new/logo/colorized-bg.svg
Normal file
12
apps/sim/public/new/logo/colorized-bg.svg
Normal file
@@ -0,0 +1,12 @@
|
||||
<svg width="400" height="400" viewBox="0 0 400 400" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="400" height="400" fill="#0B0B0B"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M196.822 182.761C196.822 186.348 195.403 189.792 192.884 192.328L192.523 192.692C190.006 195.236 186.586 196.658 183.024 196.658H102.445C95.0246 196.658 89 202.718 89 210.191V297.332C89 304.806 95.0246 310.866 102.445 310.866H188.962C196.383 310.866 202.4 304.806 202.4 297.332V215.745C202.4 212.419 203.71 209.228 206.047 206.874C208.377 204.527 211.546 203.207 214.849 203.207H296.777C304.198 203.207 310.214 197.148 310.214 189.674V102.533C310.214 95.0596 304.198 89 296.777 89H210.26C202.839 89 196.822 95.0596 196.822 102.533V182.761ZM223.078 107.55H283.952C288.289 107.55 291.796 111.089 291.796 115.45V176.757C291.796 181.118 288.289 184.658 283.952 184.658H223.078C218.748 184.658 215.233 181.118 215.233 176.757V115.45C215.233 111.089 218.748 107.55 223.078 107.55Z" fill="#33C482"/>
|
||||
<path d="M296.878 218.57H232.554C224.756 218.57 218.434 224.937 218.434 232.791V296.784C218.434 304.638 224.756 311.005 232.554 311.005H296.878C304.677 311.005 310.999 304.638 310.999 296.784V232.791C310.999 224.937 304.677 218.57 296.878 218.57Z" fill="#33C482"/>
|
||||
<path d="M296.878 218.27H232.554C224.756 218.27 218.434 224.636 218.434 232.491V296.483C218.434 304.337 224.756 310.703 232.554 310.703H296.878C304.677 310.703 310.999 304.337 310.999 296.483V232.491C310.999 224.636 304.677 218.27 296.878 218.27Z" fill="url(#paint0_linear_2686_11143)" fill-opacity="0.2"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_2686_11143" x1="218.434" y1="218.27" x2="274.629" y2="274.334" gradientUnits="userSpaceOnUse">
|
||||
<stop/>
|
||||
<stop offset="1" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
11
apps/sim/public/new/logo/colorized.svg
Normal file
11
apps/sim/public/new/logo/colorized.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<svg width="222" height="222" viewBox="0 0 222 222" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M107.822 93.7612C107.822 97.3481 106.403 100.792 103.884 103.328L103.523 103.692C101.006 106.236 97.5855 107.658 94.0236 107.658H13.4455C6.02456 107.658 0 113.718 0 121.191V208.332C0 215.806 6.02456 221.866 13.4455 221.866H99.9622C107.383 221.866 113.4 215.806 113.4 208.332V126.745C113.4 123.419 114.71 120.228 117.047 117.874C119.377 115.527 122.546 114.207 125.849 114.207H207.777C215.198 114.207 221.214 108.148 221.214 100.674V13.5333C221.214 6.05956 215.198 0 207.777 0H121.26C113.839 0 107.822 6.05956 107.822 13.5333V93.7612ZM134.078 18.55H194.952C199.289 18.55 202.796 22.0893 202.796 26.4503V87.7574C202.796 92.1178 199.289 95.6577 194.952 95.6577H134.078C129.748 95.6577 126.233 92.1178 126.233 87.7574V26.4503C126.233 22.0893 129.748 18.55 134.078 18.55Z" fill="#33C482"/>
|
||||
<path d="M207.878 129.57H143.554C135.756 129.57 129.434 135.937 129.434 143.791V207.784C129.434 215.638 135.756 222.005 143.554 222.005H207.878C215.677 222.005 221.999 215.638 221.999 207.784V143.791C221.999 135.937 215.677 129.57 207.878 129.57Z" fill="#33C482"/>
|
||||
<path d="M207.878 129.27H143.554C135.756 129.27 129.434 135.636 129.434 143.491V207.483C129.434 215.337 135.756 221.703 143.554 221.703H207.878C215.677 221.703 221.999 215.337 221.999 207.483V143.491C221.999 135.636 215.677 129.27 207.878 129.27Z" fill="url(#paint0_linear_2888_11298)" fill-opacity="0.2"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_2888_11298" x1="129.434" y1="129.27" x2="185.629" y2="185.334" gradientUnits="userSpaceOnUse">
|
||||
<stop/>
|
||||
<stop offset="1" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
11
apps/sim/public/new/logo/plain.svg
Normal file
11
apps/sim/public/new/logo/plain.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<svg width="222" height="222" viewBox="0 0 222 222" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M107.822 93.7612C107.822 97.3481 106.403 100.792 103.884 103.328L103.523 103.692C101.006 106.236 97.5855 107.658 94.0236 107.658H13.4455C6.02456 107.658 0 113.718 0 121.191V208.332C0 215.806 6.02456 221.866 13.4455 221.866H99.9622C107.383 221.866 113.4 215.806 113.4 208.332V126.745C113.4 123.419 114.71 120.228 117.047 117.874C119.377 115.527 122.546 114.207 125.849 114.207H207.777C215.198 114.207 221.214 108.148 221.214 100.674V13.5333C221.214 6.05956 215.198 0 207.777 0H121.26C113.839 0 107.822 6.05956 107.822 13.5333V93.7612ZM134.078 18.55H194.952C199.289 18.55 202.796 22.0893 202.796 26.4503V87.7574C202.796 92.1178 199.289 95.6577 194.952 95.6577H134.078C129.748 95.6577 126.233 92.1178 126.233 87.7574V26.4503C126.233 22.0893 129.748 18.55 134.078 18.55Z" fill="white"/>
|
||||
<path d="M207.882 129.57H143.558C135.76 129.57 129.438 135.937 129.438 143.791V207.784C129.438 215.638 135.76 222.005 143.558 222.005H207.882C215.681 222.005 222.003 215.638 222.003 207.784V143.791C222.003 135.937 215.681 129.57 207.882 129.57Z" fill="white"/>
|
||||
<path d="M207.882 129.27H143.557C135.759 129.27 129.438 135.636 129.438 143.491V207.483C129.438 215.337 135.759 221.703 143.557 221.703H207.882C215.681 221.703 222.003 215.337 222.003 207.483V143.491C222.003 135.636 215.681 129.27 207.882 129.27Z" fill="url(#paint0_linear_2888_11298)" fill-opacity="0.2"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_2888_11298" x1="129.438" y1="129.27" x2="185.633" y2="185.334" gradientUnits="userSpaceOnUse">
|
||||
<stop/>
|
||||
<stop offset="1" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
70
apps/sim/tools/hex/cancel_run.ts
Normal file
70
apps/sim/tools/hex/cancel_run.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import type { HexCancelRunParams, HexCancelRunResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const cancelRunTool: ToolConfig<HexCancelRunParams, HexCancelRunResponse> = {
|
||||
id: 'hex_cancel_run',
|
||||
name: 'Hex Cancel Run',
|
||||
description: 'Cancel an active Hex project run.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
projectId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the Hex project',
|
||||
},
|
||||
runId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the run to cancel',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) =>
|
||||
`https://app.hex.tech/api/v1/projects/${params.projectId}/runs/${params.runId}`,
|
||||
method: 'DELETE',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response, params) => {
|
||||
if (response.status === 204 || response.ok) {
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
projectId: params?.projectId ?? '',
|
||||
runId: params?.runId ?? '',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const data = await response.json().catch(() => ({}))
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
success: false,
|
||||
projectId: params?.projectId ?? '',
|
||||
runId: params?.runId ?? '',
|
||||
},
|
||||
error: (data as Record<string, string>).message ?? 'Failed to cancel run',
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
success: { type: 'boolean', description: 'Whether the run was successfully cancelled' },
|
||||
projectId: { type: 'string', description: 'Project UUID' },
|
||||
runId: { type: 'string', description: 'Run UUID that was cancelled' },
|
||||
},
|
||||
}
|
||||
78
apps/sim/tools/hex/create_collection.ts
Normal file
78
apps/sim/tools/hex/create_collection.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import type { HexCreateCollectionParams, HexCreateCollectionResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const createCollectionTool: ToolConfig<
|
||||
HexCreateCollectionParams,
|
||||
HexCreateCollectionResponse
|
||||
> = {
|
||||
id: 'hex_create_collection',
|
||||
name: 'Hex Create Collection',
|
||||
description: 'Create a new collection in the Hex workspace to organize projects.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Name for the new collection',
|
||||
},
|
||||
description: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Optional description for the collection',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://app.hex.tech/api/v1/collections',
|
||||
method: 'POST',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = { name: params.name }
|
||||
if (params.description) body.description = params.description
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: data.id ?? null,
|
||||
name: data.name ?? null,
|
||||
description: data.description ?? null,
|
||||
creator: data.creator
|
||||
? { email: data.creator.email ?? null, id: data.creator.id ?? null }
|
||||
: null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Newly created collection UUID' },
|
||||
name: { type: 'string', description: 'Collection name' },
|
||||
description: { type: 'string', description: 'Collection description', optional: true },
|
||||
creator: {
|
||||
type: 'object',
|
||||
description: 'Collection creator',
|
||||
optional: true,
|
||||
properties: {
|
||||
email: { type: 'string', description: 'Creator email' },
|
||||
id: { type: 'string', description: 'Creator UUID' },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
64
apps/sim/tools/hex/get_collection.ts
Normal file
64
apps/sim/tools/hex/get_collection.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import type { HexGetCollectionParams, HexGetCollectionResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const getCollectionTool: ToolConfig<HexGetCollectionParams, HexGetCollectionResponse> = {
|
||||
id: 'hex_get_collection',
|
||||
name: 'Hex Get Collection',
|
||||
description: 'Retrieve details for a specific Hex collection by its ID.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
collectionId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the collection',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => `https://app.hex.tech/api/v1/collections/${params.collectionId}`,
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: data.id ?? null,
|
||||
name: data.name ?? null,
|
||||
description: data.description ?? null,
|
||||
creator: data.creator
|
||||
? { email: data.creator.email ?? null, id: data.creator.id ?? null }
|
||||
: null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Collection UUID' },
|
||||
name: { type: 'string', description: 'Collection name' },
|
||||
description: { type: 'string', description: 'Collection description', optional: true },
|
||||
creator: {
|
||||
type: 'object',
|
||||
description: 'Collection creator',
|
||||
optional: true,
|
||||
properties: {
|
||||
email: { type: 'string', description: 'Creator email' },
|
||||
id: { type: 'string', description: 'Creator UUID' },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
76
apps/sim/tools/hex/get_data_connection.ts
Normal file
76
apps/sim/tools/hex/get_data_connection.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import type { HexGetDataConnectionParams, HexGetDataConnectionResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const getDataConnectionTool: ToolConfig<
|
||||
HexGetDataConnectionParams,
|
||||
HexGetDataConnectionResponse
|
||||
> = {
|
||||
id: 'hex_get_data_connection',
|
||||
name: 'Hex Get Data Connection',
|
||||
description:
|
||||
'Retrieve details for a specific data connection including type, description, and configuration flags.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
dataConnectionId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the data connection',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => `https://app.hex.tech/api/v1/data-connections/${params.dataConnectionId}`,
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: data.id ?? null,
|
||||
name: data.name ?? null,
|
||||
type: data.type ?? null,
|
||||
description: data.description ?? null,
|
||||
connectViaSsh: data.connectViaSsh ?? null,
|
||||
includeMagic: data.includeMagic ?? null,
|
||||
allowWritebackCells: data.allowWritebackCells ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Connection UUID' },
|
||||
name: { type: 'string', description: 'Connection name' },
|
||||
type: { type: 'string', description: 'Connection type (e.g., snowflake, postgres, bigquery)' },
|
||||
description: { type: 'string', description: 'Connection description', optional: true },
|
||||
connectViaSsh: {
|
||||
type: 'boolean',
|
||||
description: 'Whether SSH tunneling is enabled',
|
||||
optional: true,
|
||||
},
|
||||
includeMagic: {
|
||||
type: 'boolean',
|
||||
description: 'Whether Magic AI features are enabled',
|
||||
optional: true,
|
||||
},
|
||||
allowWritebackCells: {
|
||||
type: 'boolean',
|
||||
description: 'Whether writeback cells are allowed',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
52
apps/sim/tools/hex/get_group.ts
Normal file
52
apps/sim/tools/hex/get_group.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import type { HexGetGroupParams, HexGetGroupResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const getGroupTool: ToolConfig<HexGetGroupParams, HexGetGroupResponse> = {
|
||||
id: 'hex_get_group',
|
||||
name: 'Hex Get Group',
|
||||
description: 'Retrieve details for a specific Hex group.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
groupId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the group',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => `https://app.hex.tech/api/v1/groups/${params.groupId}`,
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: data.id ?? null,
|
||||
name: data.name ?? null,
|
||||
createdAt: data.createdAt ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Group UUID' },
|
||||
name: { type: 'string', description: 'Group name' },
|
||||
createdAt: { type: 'string', description: 'Creation timestamp' },
|
||||
},
|
||||
}
|
||||
78
apps/sim/tools/hex/get_project.ts
Normal file
78
apps/sim/tools/hex/get_project.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import type { HexGetProjectParams, HexGetProjectResponse } from '@/tools/hex/types'
|
||||
import { HEX_PROJECT_OUTPUT_PROPERTIES } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const getProjectTool: ToolConfig<HexGetProjectParams, HexGetProjectResponse> = {
|
||||
id: 'hex_get_project',
|
||||
name: 'Hex Get Project',
|
||||
description: 'Get metadata and details for a specific Hex project by its ID.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
projectId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the Hex project',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => `https://app.hex.tech/api/v1/projects/${params.projectId}`,
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: data.id ?? null,
|
||||
title: data.title ?? null,
|
||||
description: data.description ?? null,
|
||||
status: data.status ? { name: data.status.name ?? null } : null,
|
||||
type: data.type ?? null,
|
||||
creator: data.creator ? { email: data.creator.email ?? null } : null,
|
||||
owner: data.owner ? { email: data.owner.email ?? null } : null,
|
||||
categories: Array.isArray(data.categories)
|
||||
? data.categories.map((c: Record<string, string>) => ({
|
||||
name: c.name ?? null,
|
||||
description: c.description ?? null,
|
||||
}))
|
||||
: [],
|
||||
lastEditedAt: data.lastEditedAt ?? null,
|
||||
lastPublishedAt: data.lastPublishedAt ?? null,
|
||||
createdAt: data.createdAt ?? null,
|
||||
archivedAt: data.archivedAt ?? null,
|
||||
trashedAt: data.trashedAt ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: HEX_PROJECT_OUTPUT_PROPERTIES.id,
|
||||
title: HEX_PROJECT_OUTPUT_PROPERTIES.title,
|
||||
description: HEX_PROJECT_OUTPUT_PROPERTIES.description,
|
||||
status: HEX_PROJECT_OUTPUT_PROPERTIES.status,
|
||||
type: HEX_PROJECT_OUTPUT_PROPERTIES.type,
|
||||
creator: HEX_PROJECT_OUTPUT_PROPERTIES.creator,
|
||||
owner: HEX_PROJECT_OUTPUT_PROPERTIES.owner,
|
||||
categories: HEX_PROJECT_OUTPUT_PROPERTIES.categories,
|
||||
lastEditedAt: HEX_PROJECT_OUTPUT_PROPERTIES.lastEditedAt,
|
||||
lastPublishedAt: HEX_PROJECT_OUTPUT_PROPERTIES.lastPublishedAt,
|
||||
createdAt: HEX_PROJECT_OUTPUT_PROPERTIES.createdAt,
|
||||
archivedAt: HEX_PROJECT_OUTPUT_PROPERTIES.archivedAt,
|
||||
trashedAt: HEX_PROJECT_OUTPUT_PROPERTIES.trashedAt,
|
||||
},
|
||||
}
|
||||
115
apps/sim/tools/hex/get_project_runs.ts
Normal file
115
apps/sim/tools/hex/get_project_runs.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import type { HexGetProjectRunsParams, HexGetProjectRunsResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const getProjectRunsTool: ToolConfig<HexGetProjectRunsParams, HexGetProjectRunsResponse> = {
|
||||
id: 'hex_get_project_runs',
|
||||
name: 'Hex Get Project Runs',
|
||||
description:
|
||||
'Retrieve API-triggered runs for a Hex project with optional filtering by status and pagination.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
projectId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the Hex project',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of runs to return (1-100, default: 25)',
|
||||
},
|
||||
offset: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Offset for paginated results (default: 0)',
|
||||
},
|
||||
statusFilter: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Filter by run status: PENDING, RUNNING, ERRORED, COMPLETED, KILLED, UNABLE_TO_ALLOCATE_KERNEL',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const searchParams = new URLSearchParams()
|
||||
if (params.limit) searchParams.set('limit', String(params.limit))
|
||||
if (params.offset) searchParams.set('offset', String(params.offset))
|
||||
if (params.statusFilter) searchParams.set('statusFilter', params.statusFilter)
|
||||
const qs = searchParams.toString()
|
||||
return `https://app.hex.tech/api/v1/projects/${params.projectId}/runs${qs ? `?${qs}` : ''}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
const runs = Array.isArray(data) ? data : (data.runs ?? [])
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
runs: runs.map((r: Record<string, unknown>) => ({
|
||||
projectId: (r.projectId as string) ?? null,
|
||||
runId: (r.runId as string) ?? null,
|
||||
runUrl: (r.runUrl as string) ?? null,
|
||||
status: (r.status as string) ?? null,
|
||||
startTime: (r.startTime as string) ?? null,
|
||||
endTime: (r.endTime as string) ?? null,
|
||||
elapsedTime: (r.elapsedTime as number) ?? null,
|
||||
traceId: (r.traceId as string) ?? null,
|
||||
projectVersion: (r.projectVersion as number) ?? null,
|
||||
})),
|
||||
total: runs.length,
|
||||
traceId: data.traceId ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
runs: {
|
||||
type: 'array',
|
||||
description: 'List of project runs',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
projectId: { type: 'string', description: 'Project UUID' },
|
||||
runId: { type: 'string', description: 'Run UUID' },
|
||||
runUrl: { type: 'string', description: 'URL to view the run', optional: true },
|
||||
status: {
|
||||
type: 'string',
|
||||
description:
|
||||
'Run status (PENDING, RUNNING, COMPLETED, ERRORED, KILLED, UNABLE_TO_ALLOCATE_KERNEL)',
|
||||
},
|
||||
startTime: { type: 'string', description: 'Run start time', optional: true },
|
||||
endTime: { type: 'string', description: 'Run end time', optional: true },
|
||||
elapsedTime: { type: 'number', description: 'Elapsed time in seconds', optional: true },
|
||||
traceId: { type: 'string', description: 'Trace ID', optional: true },
|
||||
projectVersion: {
|
||||
type: 'number',
|
||||
description: 'Project version number',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
total: { type: 'number', description: 'Total number of runs returned' },
|
||||
traceId: { type: 'string', description: 'Top-level trace ID', optional: true },
|
||||
},
|
||||
}
|
||||
81
apps/sim/tools/hex/get_queried_tables.ts
Normal file
81
apps/sim/tools/hex/get_queried_tables.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import type { HexGetQueriedTablesParams, HexGetQueriedTablesResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const getQueriedTablesTool: ToolConfig<
|
||||
HexGetQueriedTablesParams,
|
||||
HexGetQueriedTablesResponse
|
||||
> = {
|
||||
id: 'hex_get_queried_tables',
|
||||
name: 'Hex Get Queried Tables',
|
||||
description:
|
||||
'Return the warehouse tables queried by a Hex project, including data connection and table names.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
projectId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the Hex project',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of tables to return (1-100)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const searchParams = new URLSearchParams()
|
||||
if (params.limit) searchParams.set('limit', String(params.limit))
|
||||
const qs = searchParams.toString()
|
||||
return `https://app.hex.tech/api/v1/projects/${params.projectId}/queriedTables${qs ? `?${qs}` : ''}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
const tables = Array.isArray(data) ? data : (data.values ?? [])
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
tables: tables.map((t: Record<string, unknown>) => ({
|
||||
dataConnectionId: (t.dataConnectionId as string) ?? null,
|
||||
dataConnectionName: (t.dataConnectionName as string) ?? null,
|
||||
tableName: (t.tableName as string) ?? null,
|
||||
})),
|
||||
total: tables.length,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
tables: {
|
||||
type: 'array',
|
||||
description: 'List of warehouse tables queried by the project',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
dataConnectionId: { type: 'string', description: 'Data connection UUID' },
|
||||
dataConnectionName: { type: 'string', description: 'Data connection name' },
|
||||
tableName: { type: 'string', description: 'Table name' },
|
||||
},
|
||||
},
|
||||
},
|
||||
total: { type: 'number', description: 'Total number of tables returned' },
|
||||
},
|
||||
}
|
||||
72
apps/sim/tools/hex/get_run_status.ts
Normal file
72
apps/sim/tools/hex/get_run_status.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import type { HexGetRunStatusParams, HexGetRunStatusResponse } from '@/tools/hex/types'
|
||||
import { HEX_RUN_STATUS_OUTPUT_PROPERTIES } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const getRunStatusTool: ToolConfig<HexGetRunStatusParams, HexGetRunStatusResponse> = {
|
||||
id: 'hex_get_run_status',
|
||||
name: 'Hex Get Run Status',
|
||||
description: 'Check the status of a Hex project run by its run ID.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
projectId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the Hex project',
|
||||
},
|
||||
runId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the run to check',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) =>
|
||||
`https://app.hex.tech/api/v1/projects/${params.projectId}/runs/${params.runId}`,
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
projectId: data.projectId ?? null,
|
||||
runId: data.runId ?? null,
|
||||
runUrl: data.runUrl ?? null,
|
||||
status: data.status ?? null,
|
||||
startTime: data.startTime ?? null,
|
||||
endTime: data.endTime ?? null,
|
||||
elapsedTime: data.elapsedTime ?? null,
|
||||
traceId: data.traceId ?? null,
|
||||
projectVersion: data.projectVersion ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
projectId: HEX_RUN_STATUS_OUTPUT_PROPERTIES.projectId,
|
||||
runId: HEX_RUN_STATUS_OUTPUT_PROPERTIES.runId,
|
||||
runUrl: HEX_RUN_STATUS_OUTPUT_PROPERTIES.runUrl,
|
||||
status: HEX_RUN_STATUS_OUTPUT_PROPERTIES.status,
|
||||
startTime: HEX_RUN_STATUS_OUTPUT_PROPERTIES.startTime,
|
||||
endTime: HEX_RUN_STATUS_OUTPUT_PROPERTIES.endTime,
|
||||
elapsedTime: HEX_RUN_STATUS_OUTPUT_PROPERTIES.elapsedTime,
|
||||
traceId: HEX_RUN_STATUS_OUTPUT_PROPERTIES.traceId,
|
||||
projectVersion: HEX_RUN_STATUS_OUTPUT_PROPERTIES.projectVersion,
|
||||
},
|
||||
}
|
||||
33
apps/sim/tools/hex/index.ts
Normal file
33
apps/sim/tools/hex/index.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { cancelRunTool } from '@/tools/hex/cancel_run'
|
||||
import { createCollectionTool } from '@/tools/hex/create_collection'
|
||||
import { getCollectionTool } from '@/tools/hex/get_collection'
|
||||
import { getDataConnectionTool } from '@/tools/hex/get_data_connection'
|
||||
import { getGroupTool } from '@/tools/hex/get_group'
|
||||
import { getProjectTool } from '@/tools/hex/get_project'
|
||||
import { getProjectRunsTool } from '@/tools/hex/get_project_runs'
|
||||
import { getQueriedTablesTool } from '@/tools/hex/get_queried_tables'
|
||||
import { getRunStatusTool } from '@/tools/hex/get_run_status'
|
||||
import { listCollectionsTool } from '@/tools/hex/list_collections'
|
||||
import { listDataConnectionsTool } from '@/tools/hex/list_data_connections'
|
||||
import { listGroupsTool } from '@/tools/hex/list_groups'
|
||||
import { listProjectsTool } from '@/tools/hex/list_projects'
|
||||
import { listUsersTool } from '@/tools/hex/list_users'
|
||||
import { runProjectTool } from '@/tools/hex/run_project'
|
||||
import { updateProjectTool } from '@/tools/hex/update_project'
|
||||
|
||||
export const hexCancelRunTool = cancelRunTool
|
||||
export const hexCreateCollectionTool = createCollectionTool
|
||||
export const hexGetCollectionTool = getCollectionTool
|
||||
export const hexGetDataConnectionTool = getDataConnectionTool
|
||||
export const hexGetGroupTool = getGroupTool
|
||||
export const hexGetProjectTool = getProjectTool
|
||||
export const hexGetProjectRunsTool = getProjectRunsTool
|
||||
export const hexGetQueriedTablesTool = getQueriedTablesTool
|
||||
export const hexGetRunStatusTool = getRunStatusTool
|
||||
export const hexListCollectionsTool = listCollectionsTool
|
||||
export const hexListDataConnectionsTool = listDataConnectionsTool
|
||||
export const hexListGroupsTool = listGroupsTool
|
||||
export const hexListProjectsTool = listProjectsTool
|
||||
export const hexListUsersTool = listUsersTool
|
||||
export const hexRunProjectTool = runProjectTool
|
||||
export const hexUpdateProjectTool = updateProjectTool
|
||||
94
apps/sim/tools/hex/list_collections.ts
Normal file
94
apps/sim/tools/hex/list_collections.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import type { HexListCollectionsParams, HexListCollectionsResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const listCollectionsTool: ToolConfig<HexListCollectionsParams, HexListCollectionsResponse> =
|
||||
{
|
||||
id: 'hex_list_collections',
|
||||
name: 'Hex List Collections',
|
||||
description: 'List all collections in the Hex workspace.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of collections to return (1-500, default: 25)',
|
||||
},
|
||||
sortBy: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Sort by field: NAME',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const searchParams = new URLSearchParams()
|
||||
if (params.limit) searchParams.set('limit', String(params.limit))
|
||||
if (params.sortBy) searchParams.set('sortBy', params.sortBy)
|
||||
const qs = searchParams.toString()
|
||||
return `https://app.hex.tech/api/v1/collections${qs ? `?${qs}` : ''}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
const collections = Array.isArray(data) ? data : (data.values ?? [])
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
collections: collections.map((c: Record<string, unknown>) => ({
|
||||
id: (c.id as string) ?? null,
|
||||
name: (c.name as string) ?? null,
|
||||
description: (c.description as string) ?? null,
|
||||
creator: c.creator
|
||||
? {
|
||||
email: (c.creator as Record<string, string>).email ?? null,
|
||||
id: (c.creator as Record<string, string>).id ?? null,
|
||||
}
|
||||
: null,
|
||||
})),
|
||||
total: collections.length,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
collections: {
|
||||
type: 'array',
|
||||
description: 'List of collections',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Collection UUID' },
|
||||
name: { type: 'string', description: 'Collection name' },
|
||||
description: { type: 'string', description: 'Collection description', optional: true },
|
||||
creator: {
|
||||
type: 'object',
|
||||
description: 'Collection creator',
|
||||
optional: true,
|
||||
properties: {
|
||||
email: { type: 'string', description: 'Creator email' },
|
||||
id: { type: 'string', description: 'Creator UUID' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
total: { type: 'number', description: 'Total number of collections returned' },
|
||||
},
|
||||
}
|
||||
116
apps/sim/tools/hex/list_data_connections.ts
Normal file
116
apps/sim/tools/hex/list_data_connections.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import type {
|
||||
HexListDataConnectionsParams,
|
||||
HexListDataConnectionsResponse,
|
||||
} from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const listDataConnectionsTool: ToolConfig<
|
||||
HexListDataConnectionsParams,
|
||||
HexListDataConnectionsResponse
|
||||
> = {
|
||||
id: 'hex_list_data_connections',
|
||||
name: 'Hex List Data Connections',
|
||||
description:
|
||||
'List all data connections in the Hex workspace (e.g., Snowflake, PostgreSQL, BigQuery).',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of connections to return (1-500, default: 25)',
|
||||
},
|
||||
sortBy: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Sort by field: CREATED_AT or NAME',
|
||||
},
|
||||
sortDirection: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Sort direction: ASC or DESC',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const searchParams = new URLSearchParams()
|
||||
if (params.limit) searchParams.set('limit', String(params.limit))
|
||||
if (params.sortBy) searchParams.set('sortBy', params.sortBy)
|
||||
if (params.sortDirection) searchParams.set('sortDirection', params.sortDirection)
|
||||
const qs = searchParams.toString()
|
||||
return `https://app.hex.tech/api/v1/data-connections${qs ? `?${qs}` : ''}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
const connections = Array.isArray(data) ? data : (data.values ?? [])
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
connections: connections.map((c: Record<string, unknown>) => ({
|
||||
id: (c.id as string) ?? null,
|
||||
name: (c.name as string) ?? null,
|
||||
type: (c.type as string) ?? null,
|
||||
description: (c.description as string) ?? null,
|
||||
connectViaSsh: (c.connectViaSsh as boolean) ?? null,
|
||||
includeMagic: (c.includeMagic as boolean) ?? null,
|
||||
allowWritebackCells: (c.allowWritebackCells as boolean) ?? null,
|
||||
})),
|
||||
total: connections.length,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
connections: {
|
||||
type: 'array',
|
||||
description: 'List of data connections',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Connection UUID' },
|
||||
name: { type: 'string', description: 'Connection name' },
|
||||
type: {
|
||||
type: 'string',
|
||||
description:
|
||||
'Connection type (e.g., athena, bigquery, databricks, postgres, redshift, snowflake)',
|
||||
},
|
||||
description: { type: 'string', description: 'Connection description', optional: true },
|
||||
connectViaSsh: {
|
||||
type: 'boolean',
|
||||
description: 'Whether SSH tunneling is enabled',
|
||||
optional: true,
|
||||
},
|
||||
includeMagic: {
|
||||
type: 'boolean',
|
||||
description: 'Whether Magic AI features are enabled',
|
||||
optional: true,
|
||||
},
|
||||
allowWritebackCells: {
|
||||
type: 'boolean',
|
||||
description: 'Whether writeback cells are allowed',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
total: { type: 'number', description: 'Total number of connections returned' },
|
||||
},
|
||||
}
|
||||
85
apps/sim/tools/hex/list_groups.ts
Normal file
85
apps/sim/tools/hex/list_groups.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import type { HexListGroupsParams, HexListGroupsResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const listGroupsTool: ToolConfig<HexListGroupsParams, HexListGroupsResponse> = {
|
||||
id: 'hex_list_groups',
|
||||
name: 'Hex List Groups',
|
||||
description: 'List all groups in the Hex workspace with optional sorting.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of groups to return (1-500, default: 25)',
|
||||
},
|
||||
sortBy: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Sort by field: CREATED_AT or NAME',
|
||||
},
|
||||
sortDirection: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Sort direction: ASC or DESC',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const searchParams = new URLSearchParams()
|
||||
if (params.limit) searchParams.set('limit', String(params.limit))
|
||||
if (params.sortBy) searchParams.set('sortBy', params.sortBy)
|
||||
if (params.sortDirection) searchParams.set('sortDirection', params.sortDirection)
|
||||
const qs = searchParams.toString()
|
||||
return `https://app.hex.tech/api/v1/groups${qs ? `?${qs}` : ''}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
const groups = Array.isArray(data) ? data : (data.values ?? [])
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
groups: groups.map((g: Record<string, unknown>) => ({
|
||||
id: (g.id as string) ?? null,
|
||||
name: (g.name as string) ?? null,
|
||||
createdAt: (g.createdAt as string) ?? null,
|
||||
})),
|
||||
total: groups.length,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
groups: {
|
||||
type: 'array',
|
||||
description: 'List of workspace groups',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Group UUID' },
|
||||
name: { type: 'string', description: 'Group name' },
|
||||
createdAt: { type: 'string', description: 'Creation timestamp' },
|
||||
},
|
||||
},
|
||||
},
|
||||
total: { type: 'number', description: 'Total number of groups returned' },
|
||||
},
|
||||
}
|
||||
138
apps/sim/tools/hex/list_projects.ts
Normal file
138
apps/sim/tools/hex/list_projects.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import type { HexListProjectsParams, HexListProjectsResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const listProjectsTool: ToolConfig<HexListProjectsParams, HexListProjectsResponse> = {
|
||||
id: 'hex_list_projects',
|
||||
name: 'Hex List Projects',
|
||||
description: 'List all projects in your Hex workspace with optional filtering by status.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of projects to return (1-100)',
|
||||
},
|
||||
includeArchived: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Include archived projects in results',
|
||||
},
|
||||
statusFilter: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Filter by status: PUBLISHED, DRAFT, or ALL',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const searchParams = new URLSearchParams()
|
||||
if (params.limit) searchParams.set('limit', String(params.limit))
|
||||
if (params.includeArchived) searchParams.set('includeArchived', 'true')
|
||||
if (params.statusFilter) searchParams.append('statuses[]', params.statusFilter)
|
||||
const qs = searchParams.toString()
|
||||
return `https://app.hex.tech/api/v1/projects${qs ? `?${qs}` : ''}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
const projects = Array.isArray(data) ? data : (data.values ?? [])
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
projects: projects.map((p: Record<string, unknown>) => ({
|
||||
id: (p.id as string) ?? null,
|
||||
title: (p.title as string) ?? null,
|
||||
description: (p.description as string) ?? null,
|
||||
status: p.status ? { name: (p.status as Record<string, string>).name ?? null } : null,
|
||||
type: (p.type as string) ?? null,
|
||||
creator: p.creator
|
||||
? { email: (p.creator as Record<string, string>).email ?? null }
|
||||
: null,
|
||||
owner: p.owner ? { email: (p.owner as Record<string, string>).email ?? null } : null,
|
||||
categories: Array.isArray(p.categories)
|
||||
? (p.categories as Array<Record<string, string>>).map((c) => ({
|
||||
name: c.name ?? null,
|
||||
description: c.description ?? null,
|
||||
}))
|
||||
: [],
|
||||
lastEditedAt: (p.lastEditedAt as string) ?? null,
|
||||
lastPublishedAt: (p.lastPublishedAt as string) ?? null,
|
||||
createdAt: (p.createdAt as string) ?? null,
|
||||
archivedAt: (p.archivedAt as string) ?? null,
|
||||
trashedAt: (p.trashedAt as string) ?? null,
|
||||
})),
|
||||
total: projects.length,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
projects: {
|
||||
type: 'array',
|
||||
description: 'List of Hex projects',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Project UUID' },
|
||||
title: { type: 'string', description: 'Project title' },
|
||||
description: { type: 'string', description: 'Project description', optional: true },
|
||||
status: {
|
||||
type: 'object',
|
||||
description: 'Project status',
|
||||
properties: {
|
||||
name: { type: 'string', description: 'Status name (e.g., PUBLISHED, DRAFT)' },
|
||||
},
|
||||
},
|
||||
type: { type: 'string', description: 'Project type (PROJECT or COMPONENT)' },
|
||||
creator: {
|
||||
type: 'object',
|
||||
description: 'Project creator',
|
||||
optional: true,
|
||||
properties: {
|
||||
email: { type: 'string', description: 'Creator email' },
|
||||
},
|
||||
},
|
||||
owner: {
|
||||
type: 'object',
|
||||
description: 'Project owner',
|
||||
optional: true,
|
||||
properties: {
|
||||
email: { type: 'string', description: 'Owner email' },
|
||||
},
|
||||
},
|
||||
lastEditedAt: {
|
||||
type: 'string',
|
||||
description: 'Last edited timestamp',
|
||||
optional: true,
|
||||
},
|
||||
lastPublishedAt: {
|
||||
type: 'string',
|
||||
description: 'Last published timestamp',
|
||||
optional: true,
|
||||
},
|
||||
createdAt: { type: 'string', description: 'Creation timestamp' },
|
||||
archivedAt: { type: 'string', description: 'Archived timestamp', optional: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
total: { type: 'number', description: 'Total number of projects returned' },
|
||||
},
|
||||
}
|
||||
98
apps/sim/tools/hex/list_users.ts
Normal file
98
apps/sim/tools/hex/list_users.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import type { HexListUsersParams, HexListUsersResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const listUsersTool: ToolConfig<HexListUsersParams, HexListUsersResponse> = {
|
||||
id: 'hex_list_users',
|
||||
name: 'Hex List Users',
|
||||
description: 'List all users in the Hex workspace with optional filtering and sorting.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of users to return (1-100, default: 25)',
|
||||
},
|
||||
sortBy: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Sort by field: NAME or EMAIL',
|
||||
},
|
||||
sortDirection: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Sort direction: ASC or DESC',
|
||||
},
|
||||
groupId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Filter users by group UUID',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const searchParams = new URLSearchParams()
|
||||
if (params.limit) searchParams.set('limit', String(params.limit))
|
||||
if (params.sortBy) searchParams.set('sortBy', params.sortBy)
|
||||
if (params.sortDirection) searchParams.set('sortDirection', params.sortDirection)
|
||||
if (params.groupId) searchParams.set('groupId', params.groupId)
|
||||
const qs = searchParams.toString()
|
||||
return `https://app.hex.tech/api/v1/users${qs ? `?${qs}` : ''}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
const users = Array.isArray(data) ? data : (data.values ?? [])
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
users: users.map((u: Record<string, unknown>) => ({
|
||||
id: (u.id as string) ?? null,
|
||||
name: (u.name as string) ?? null,
|
||||
email: (u.email as string) ?? null,
|
||||
role: (u.role as string) ?? null,
|
||||
})),
|
||||
total: users.length,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
users: {
|
||||
type: 'array',
|
||||
description: 'List of workspace users',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'User UUID' },
|
||||
name: { type: 'string', description: 'User name' },
|
||||
email: { type: 'string', description: 'User email' },
|
||||
role: {
|
||||
type: 'string',
|
||||
description:
|
||||
'User role (ADMIN, MANAGER, EDITOR, EXPLORER, MEMBER, GUEST, EMBEDDED_USER, ANONYMOUS)',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
total: { type: 'number', description: 'Total number of users returned' },
|
||||
},
|
||||
}
|
||||
108
apps/sim/tools/hex/run_project.ts
Normal file
108
apps/sim/tools/hex/run_project.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import type { HexRunProjectParams, HexRunProjectResponse } from '@/tools/hex/types'
|
||||
import { HEX_RUN_OUTPUT_PROPERTIES } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const runProjectTool: ToolConfig<HexRunProjectParams, HexRunProjectResponse> = {
|
||||
id: 'hex_run_project',
|
||||
name: 'Hex Run Project',
|
||||
description:
|
||||
'Execute a published Hex project. Optionally pass input parameters and control caching behavior.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
projectId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the Hex project to run',
|
||||
},
|
||||
inputParams: {
|
||||
type: 'json',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'JSON object of input parameters for the project (e.g., {"date": "2024-01-01"})',
|
||||
},
|
||||
dryRun: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'If true, perform a dry run without executing the project',
|
||||
},
|
||||
updateCache: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: '(Deprecated) If true, update the cached results after execution',
|
||||
},
|
||||
updatePublishedResults: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'If true, update the published app results after execution',
|
||||
},
|
||||
useCachedSqlResults: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'If true, use cached SQL results instead of re-running queries',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => `https://app.hex.tech/api/v1/projects/${params.projectId}/runs`,
|
||||
method: 'POST',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {}
|
||||
|
||||
if (params.inputParams) {
|
||||
body.inputParams =
|
||||
typeof params.inputParams === 'string'
|
||||
? JSON.parse(params.inputParams)
|
||||
: params.inputParams
|
||||
}
|
||||
if (params.dryRun !== undefined) body.dryRun = params.dryRun
|
||||
if (params.updateCache !== undefined) body.updateCache = params.updateCache
|
||||
if (params.updatePublishedResults !== undefined)
|
||||
body.updatePublishedResults = params.updatePublishedResults
|
||||
if (params.useCachedSqlResults !== undefined)
|
||||
body.useCachedSqlResults = params.useCachedSqlResults
|
||||
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
projectId: data.projectId ?? null,
|
||||
runId: data.runId ?? null,
|
||||
runUrl: data.runUrl ?? null,
|
||||
runStatusUrl: data.runStatusUrl ?? null,
|
||||
traceId: data.traceId ?? null,
|
||||
projectVersion: data.projectVersion ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
projectId: HEX_RUN_OUTPUT_PROPERTIES.projectId,
|
||||
runId: HEX_RUN_OUTPUT_PROPERTIES.runId,
|
||||
runUrl: HEX_RUN_OUTPUT_PROPERTIES.runUrl,
|
||||
runStatusUrl: HEX_RUN_OUTPUT_PROPERTIES.runStatusUrl,
|
||||
traceId: HEX_RUN_OUTPUT_PROPERTIES.traceId,
|
||||
projectVersion: HEX_RUN_OUTPUT_PROPERTIES.projectVersion,
|
||||
},
|
||||
}
|
||||
429
apps/sim/tools/hex/types.ts
Normal file
429
apps/sim/tools/hex/types.ts
Normal file
@@ -0,0 +1,429 @@
|
||||
import type { OutputProperty, ToolResponse } from '@/tools/types'
|
||||
|
||||
/**
|
||||
* Shared output property definitions for Hex API responses.
|
||||
* Based on Hex API documentation: https://learn.hex.tech/docs/api/api-reference
|
||||
*/
|
||||
|
||||
/**
|
||||
* Output definition for project items returned by the Hex API.
|
||||
* The status field is an object with a name property (e.g., { name: "PUBLISHED" }).
|
||||
* The type field is a ProjectTypeApiEnum (PROJECT or COMPONENT).
|
||||
*/
|
||||
export const HEX_PROJECT_OUTPUT_PROPERTIES = {
|
||||
id: { type: 'string', description: 'Project UUID' },
|
||||
title: { type: 'string', description: 'Project title' },
|
||||
description: { type: 'string', description: 'Project description', optional: true },
|
||||
status: {
|
||||
type: 'object',
|
||||
description: 'Project status',
|
||||
properties: {
|
||||
name: {
|
||||
type: 'string',
|
||||
description: 'Status name (e.g., PUBLISHED, DRAFT)',
|
||||
},
|
||||
},
|
||||
},
|
||||
type: {
|
||||
type: 'string',
|
||||
description: 'Project type (PROJECT or COMPONENT)',
|
||||
},
|
||||
creator: {
|
||||
type: 'object',
|
||||
description: 'Project creator',
|
||||
optional: true,
|
||||
properties: {
|
||||
email: { type: 'string', description: 'Creator email' },
|
||||
},
|
||||
},
|
||||
owner: {
|
||||
type: 'object',
|
||||
description: 'Project owner',
|
||||
optional: true,
|
||||
properties: {
|
||||
email: { type: 'string', description: 'Owner email' },
|
||||
},
|
||||
},
|
||||
categories: {
|
||||
type: 'array',
|
||||
description: 'Project categories',
|
||||
optional: true,
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string', description: 'Category name' },
|
||||
description: { type: 'string', description: 'Category description' },
|
||||
},
|
||||
},
|
||||
},
|
||||
lastEditedAt: { type: 'string', description: 'ISO 8601 last edited timestamp', optional: true },
|
||||
lastPublishedAt: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 last published timestamp',
|
||||
optional: true,
|
||||
},
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
|
||||
archivedAt: { type: 'string', description: 'ISO 8601 archived timestamp', optional: true },
|
||||
trashedAt: { type: 'string', description: 'ISO 8601 trashed timestamp', optional: true },
|
||||
} as const satisfies Record<string, OutputProperty>
|
||||
|
||||
/**
|
||||
* Output definition for run creation responses.
|
||||
* POST /v1/projects/{projectId}/runs returns projectVersion but no status.
|
||||
*/
|
||||
export const HEX_RUN_OUTPUT_PROPERTIES = {
|
||||
projectId: { type: 'string', description: 'Project UUID' },
|
||||
runId: { type: 'string', description: 'Run UUID' },
|
||||
runUrl: { type: 'string', description: 'URL to view the run' },
|
||||
runStatusUrl: { type: 'string', description: 'URL to check run status' },
|
||||
traceId: { type: 'string', description: 'Trace ID for debugging', optional: true },
|
||||
projectVersion: { type: 'number', description: 'Project version number', optional: true },
|
||||
} as const satisfies Record<string, OutputProperty>
|
||||
|
||||
/**
|
||||
* Output definition for run status responses.
|
||||
* GET /v1/projects/{projectId}/runs/{runId} returns full run details.
|
||||
*/
|
||||
export const HEX_RUN_STATUS_OUTPUT_PROPERTIES = {
|
||||
projectId: { type: 'string', description: 'Project UUID' },
|
||||
runId: { type: 'string', description: 'Run UUID' },
|
||||
runUrl: { type: 'string', description: 'URL to view the run' },
|
||||
status: {
|
||||
type: 'string',
|
||||
description:
|
||||
'Run status (PENDING, RUNNING, COMPLETED, ERRORED, KILLED, UNABLE_TO_ALLOCATE_KERNEL)',
|
||||
},
|
||||
startTime: { type: 'string', description: 'ISO 8601 run start time', optional: true },
|
||||
endTime: { type: 'string', description: 'ISO 8601 run end time', optional: true },
|
||||
elapsedTime: { type: 'number', description: 'Elapsed time in seconds', optional: true },
|
||||
traceId: { type: 'string', description: 'Trace ID for debugging', optional: true },
|
||||
projectVersion: { type: 'number', description: 'Project version number', optional: true },
|
||||
} as const satisfies Record<string, OutputProperty>
|
||||
|
||||
export interface HexListProjectsParams {
|
||||
apiKey: string
|
||||
limit?: number
|
||||
includeArchived?: boolean
|
||||
statusFilter?: string
|
||||
}
|
||||
|
||||
export interface HexListProjectsResponse extends ToolResponse {
|
||||
output: {
|
||||
projects: Array<{
|
||||
id: string
|
||||
title: string
|
||||
description: string | null
|
||||
status: { name: string } | null
|
||||
type: string
|
||||
creator: { email: string } | null
|
||||
owner: { email: string } | null
|
||||
categories: Array<{ name: string; description: string }>
|
||||
lastEditedAt: string | null
|
||||
lastPublishedAt: string | null
|
||||
createdAt: string
|
||||
archivedAt: string | null
|
||||
trashedAt: string | null
|
||||
}>
|
||||
total: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexGetProjectParams {
|
||||
apiKey: string
|
||||
projectId: string
|
||||
}
|
||||
|
||||
export interface HexGetProjectResponse extends ToolResponse {
|
||||
output: {
|
||||
id: string
|
||||
title: string
|
||||
description: string | null
|
||||
status: { name: string } | null
|
||||
type: string
|
||||
creator: { email: string } | null
|
||||
owner: { email: string } | null
|
||||
categories: Array<{ name: string; description: string }>
|
||||
lastEditedAt: string | null
|
||||
lastPublishedAt: string | null
|
||||
createdAt: string
|
||||
archivedAt: string | null
|
||||
trashedAt: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexRunProjectParams {
|
||||
apiKey: string
|
||||
projectId: string
|
||||
inputParams?: string
|
||||
dryRun?: boolean
|
||||
updateCache?: boolean
|
||||
updatePublishedResults?: boolean
|
||||
useCachedSqlResults?: boolean
|
||||
}
|
||||
|
||||
export interface HexRunProjectResponse extends ToolResponse {
|
||||
output: {
|
||||
projectId: string
|
||||
runId: string
|
||||
runUrl: string
|
||||
runStatusUrl: string
|
||||
traceId: string | null
|
||||
projectVersion: number | null
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexGetRunStatusParams {
|
||||
apiKey: string
|
||||
projectId: string
|
||||
runId: string
|
||||
}
|
||||
|
||||
export interface HexGetRunStatusResponse extends ToolResponse {
|
||||
output: {
|
||||
projectId: string
|
||||
runId: string
|
||||
runUrl: string | null
|
||||
status: string
|
||||
startTime: string | null
|
||||
endTime: string | null
|
||||
elapsedTime: number | null
|
||||
traceId: string | null
|
||||
projectVersion: number | null
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexCancelRunParams {
|
||||
apiKey: string
|
||||
projectId: string
|
||||
runId: string
|
||||
}
|
||||
|
||||
export interface HexCancelRunResponse extends ToolResponse {
|
||||
output: {
|
||||
success: boolean
|
||||
projectId: string
|
||||
runId: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexGetProjectRunsParams {
|
||||
apiKey: string
|
||||
projectId: string
|
||||
limit?: number
|
||||
offset?: number
|
||||
statusFilter?: string
|
||||
}
|
||||
|
||||
export interface HexGetProjectRunsResponse extends ToolResponse {
|
||||
output: {
|
||||
runs: Array<{
|
||||
projectId: string
|
||||
runId: string
|
||||
runUrl: string | null
|
||||
status: string
|
||||
startTime: string | null
|
||||
endTime: string | null
|
||||
elapsedTime: number | null
|
||||
traceId: string | null
|
||||
projectVersion: number | null
|
||||
}>
|
||||
total: number
|
||||
traceId: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexUpdateProjectParams {
|
||||
apiKey: string
|
||||
projectId: string
|
||||
status: string
|
||||
}
|
||||
|
||||
export interface HexUpdateProjectResponse extends ToolResponse {
|
||||
output: {
|
||||
id: string
|
||||
title: string
|
||||
description: string | null
|
||||
status: { name: string } | null
|
||||
type: string
|
||||
creator: { email: string } | null
|
||||
owner: { email: string } | null
|
||||
categories: Array<{ name: string; description: string }>
|
||||
lastEditedAt: string | null
|
||||
lastPublishedAt: string | null
|
||||
createdAt: string
|
||||
archivedAt: string | null
|
||||
trashedAt: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexListUsersParams {
|
||||
apiKey: string
|
||||
limit?: number
|
||||
sortBy?: string
|
||||
sortDirection?: string
|
||||
groupId?: string
|
||||
}
|
||||
|
||||
export interface HexListUsersResponse extends ToolResponse {
|
||||
output: {
|
||||
users: Array<{
|
||||
id: string
|
||||
name: string
|
||||
email: string
|
||||
role: string
|
||||
}>
|
||||
total: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexListCollectionsParams {
|
||||
apiKey: string
|
||||
limit?: number
|
||||
sortBy?: string
|
||||
}
|
||||
|
||||
export interface HexListCollectionsResponse extends ToolResponse {
|
||||
output: {
|
||||
collections: Array<{
|
||||
id: string
|
||||
name: string
|
||||
description: string | null
|
||||
creator: { email: string; id: string } | null
|
||||
}>
|
||||
total: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexListDataConnectionsParams {
|
||||
apiKey: string
|
||||
limit?: number
|
||||
sortBy?: string
|
||||
sortDirection?: string
|
||||
}
|
||||
|
||||
export interface HexListDataConnectionsResponse extends ToolResponse {
|
||||
output: {
|
||||
connections: Array<{
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
description: string | null
|
||||
connectViaSsh: boolean | null
|
||||
includeMagic: boolean | null
|
||||
allowWritebackCells: boolean | null
|
||||
}>
|
||||
total: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexGetQueriedTablesParams {
|
||||
apiKey: string
|
||||
projectId: string
|
||||
limit?: number
|
||||
}
|
||||
|
||||
export interface HexGetQueriedTablesResponse extends ToolResponse {
|
||||
output: {
|
||||
tables: Array<{
|
||||
dataConnectionId: string | null
|
||||
dataConnectionName: string | null
|
||||
tableName: string | null
|
||||
}>
|
||||
total: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexListGroupsParams {
|
||||
apiKey: string
|
||||
limit?: number
|
||||
sortBy?: string
|
||||
sortDirection?: string
|
||||
}
|
||||
|
||||
export interface HexListGroupsResponse extends ToolResponse {
|
||||
output: {
|
||||
groups: Array<{
|
||||
id: string
|
||||
name: string
|
||||
createdAt: string | null
|
||||
}>
|
||||
total: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexGetGroupParams {
|
||||
apiKey: string
|
||||
groupId: string
|
||||
}
|
||||
|
||||
export interface HexGetGroupResponse extends ToolResponse {
|
||||
output: {
|
||||
id: string
|
||||
name: string
|
||||
createdAt: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexGetDataConnectionParams {
|
||||
apiKey: string
|
||||
dataConnectionId: string
|
||||
}
|
||||
|
||||
export interface HexGetDataConnectionResponse extends ToolResponse {
|
||||
output: {
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
description: string | null
|
||||
connectViaSsh: boolean | null
|
||||
includeMagic: boolean | null
|
||||
allowWritebackCells: boolean | null
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexGetCollectionParams {
|
||||
apiKey: string
|
||||
collectionId: string
|
||||
}
|
||||
|
||||
export interface HexGetCollectionResponse extends ToolResponse {
|
||||
output: {
|
||||
id: string
|
||||
name: string
|
||||
description: string | null
|
||||
creator: { email: string; id: string } | null
|
||||
}
|
||||
}
|
||||
|
||||
export interface HexCreateCollectionParams {
|
||||
apiKey: string
|
||||
name: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface HexCreateCollectionResponse extends ToolResponse {
|
||||
output: {
|
||||
id: string
|
||||
name: string
|
||||
description: string | null
|
||||
creator: { email: string; id: string } | null
|
||||
}
|
||||
}
|
||||
|
||||
export type HexResponse =
|
||||
| HexListProjectsResponse
|
||||
| HexGetProjectResponse
|
||||
| HexRunProjectResponse
|
||||
| HexGetRunStatusResponse
|
||||
| HexCancelRunResponse
|
||||
| HexGetProjectRunsResponse
|
||||
| HexUpdateProjectResponse
|
||||
| HexListUsersResponse
|
||||
| HexListCollectionsResponse
|
||||
| HexListDataConnectionsResponse
|
||||
| HexGetQueriedTablesResponse
|
||||
| HexListGroupsResponse
|
||||
| HexGetGroupResponse
|
||||
| HexGetDataConnectionResponse
|
||||
| HexGetCollectionResponse
|
||||
| HexCreateCollectionResponse
|
||||
118
apps/sim/tools/hex/update_project.ts
Normal file
118
apps/sim/tools/hex/update_project.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import type { HexUpdateProjectParams, HexUpdateProjectResponse } from '@/tools/hex/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const updateProjectTool: ToolConfig<HexUpdateProjectParams, HexUpdateProjectResponse> = {
|
||||
id: 'hex_update_project',
|
||||
name: 'Hex Update Project',
|
||||
description:
|
||||
'Update a Hex project status label (e.g., endorsement or custom workspace statuses).',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Hex API token (Personal or Workspace)',
|
||||
},
|
||||
projectId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the Hex project to update',
|
||||
},
|
||||
status: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'New project status name (custom workspace status label)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => `https://app.hex.tech/api/v1/projects/${params.projectId}`,
|
||||
method: 'PATCH',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
status: params.status,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: data.id ?? null,
|
||||
title: data.title ?? null,
|
||||
description: data.description ?? null,
|
||||
status: data.status ? { name: data.status.name ?? null } : null,
|
||||
type: data.type ?? null,
|
||||
creator: data.creator ? { email: data.creator.email ?? null } : null,
|
||||
owner: data.owner ? { email: data.owner.email ?? null } : null,
|
||||
categories: Array.isArray(data.categories)
|
||||
? data.categories.map((c: Record<string, string>) => ({
|
||||
name: c.name ?? null,
|
||||
description: c.description ?? null,
|
||||
}))
|
||||
: [],
|
||||
lastEditedAt: data.lastEditedAt ?? null,
|
||||
lastPublishedAt: data.lastPublishedAt ?? null,
|
||||
createdAt: data.createdAt ?? null,
|
||||
archivedAt: data.archivedAt ?? null,
|
||||
trashedAt: data.trashedAt ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Project UUID' },
|
||||
title: { type: 'string', description: 'Project title' },
|
||||
description: { type: 'string', description: 'Project description', optional: true },
|
||||
status: {
|
||||
type: 'object',
|
||||
description: 'Updated project status',
|
||||
properties: {
|
||||
name: { type: 'string', description: 'Status name (e.g., PUBLISHED, DRAFT)' },
|
||||
},
|
||||
},
|
||||
type: { type: 'string', description: 'Project type (PROJECT or COMPONENT)' },
|
||||
creator: {
|
||||
type: 'object',
|
||||
description: 'Project creator',
|
||||
optional: true,
|
||||
properties: {
|
||||
email: { type: 'string', description: 'Creator email' },
|
||||
},
|
||||
},
|
||||
owner: {
|
||||
type: 'object',
|
||||
description: 'Project owner',
|
||||
optional: true,
|
||||
properties: {
|
||||
email: { type: 'string', description: 'Owner email' },
|
||||
},
|
||||
},
|
||||
categories: {
|
||||
type: 'array',
|
||||
description: 'Project categories',
|
||||
optional: true,
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: { type: 'string', description: 'Category name' },
|
||||
description: { type: 'string', description: 'Category description' },
|
||||
},
|
||||
},
|
||||
},
|
||||
lastEditedAt: { type: 'string', description: 'Last edited timestamp', optional: true },
|
||||
lastPublishedAt: { type: 'string', description: 'Last published timestamp', optional: true },
|
||||
createdAt: { type: 'string', description: 'Creation timestamp' },
|
||||
archivedAt: { type: 'string', description: 'Archived timestamp', optional: true },
|
||||
trashedAt: { type: 'string', description: 'Trashed timestamp', optional: true },
|
||||
},
|
||||
}
|
||||
@@ -723,6 +723,24 @@ import {
|
||||
greptileStatusTool,
|
||||
} from '@/tools/greptile'
|
||||
import { guardrailsValidateTool } from '@/tools/guardrails'
|
||||
import {
|
||||
hexCancelRunTool,
|
||||
hexCreateCollectionTool,
|
||||
hexGetCollectionTool,
|
||||
hexGetDataConnectionTool,
|
||||
hexGetGroupTool,
|
||||
hexGetProjectRunsTool,
|
||||
hexGetProjectTool,
|
||||
hexGetQueriedTablesTool,
|
||||
hexGetRunStatusTool,
|
||||
hexListCollectionsTool,
|
||||
hexListDataConnectionsTool,
|
||||
hexListGroupsTool,
|
||||
hexListProjectsTool,
|
||||
hexListUsersTool,
|
||||
hexRunProjectTool,
|
||||
hexUpdateProjectTool,
|
||||
} from '@/tools/hex'
|
||||
import { httpRequestTool, webhookRequestTool } from '@/tools/http'
|
||||
import {
|
||||
hubspotCreateCompanyTool,
|
||||
@@ -1541,6 +1559,7 @@ import {
|
||||
slackCanvasTool,
|
||||
slackDeleteMessageTool,
|
||||
slackDownloadTool,
|
||||
slackEphemeralMessageTool,
|
||||
slackGetMessageTool,
|
||||
slackGetThreadTool,
|
||||
slackGetUserTool,
|
||||
@@ -2057,6 +2076,22 @@ export const tools: Record<string, ToolConfig> = {
|
||||
grafana_create_folder: grafanaCreateFolderTool,
|
||||
google_search: googleSearchTool,
|
||||
guardrails_validate: guardrailsValidateTool,
|
||||
hex_cancel_run: hexCancelRunTool,
|
||||
hex_create_collection: hexCreateCollectionTool,
|
||||
hex_get_collection: hexGetCollectionTool,
|
||||
hex_get_data_connection: hexGetDataConnectionTool,
|
||||
hex_get_group: hexGetGroupTool,
|
||||
hex_get_project: hexGetProjectTool,
|
||||
hex_get_project_runs: hexGetProjectRunsTool,
|
||||
hex_get_queried_tables: hexGetQueriedTablesTool,
|
||||
hex_get_run_status: hexGetRunStatusTool,
|
||||
hex_list_collections: hexListCollectionsTool,
|
||||
hex_list_data_connections: hexListDataConnectionsTool,
|
||||
hex_list_groups: hexListGroupsTool,
|
||||
hex_list_projects: hexListProjectsTool,
|
||||
hex_list_users: hexListUsersTool,
|
||||
hex_run_project: hexRunProjectTool,
|
||||
hex_update_project: hexUpdateProjectTool,
|
||||
jina_read_url: jinaReadUrlTool,
|
||||
jina_search: jinaSearchTool,
|
||||
linkup_search: linkupSearchTool,
|
||||
@@ -2216,6 +2251,7 @@ export const tools: Record<string, ToolConfig> = {
|
||||
slack_get_thread: slackGetThreadTool,
|
||||
slack_canvas: slackCanvasTool,
|
||||
slack_download: slackDownloadTool,
|
||||
slack_ephemeral_message: slackEphemeralMessageTool,
|
||||
slack_update_message: slackUpdateMessageTool,
|
||||
slack_delete_message: slackDeleteMessageTool,
|
||||
slack_add_reaction: slackAddReactionTool,
|
||||
|
||||
114
apps/sim/tools/slack/ephemeral_message.ts
Normal file
114
apps/sim/tools/slack/ephemeral_message.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import type {
|
||||
SlackEphemeralMessageParams,
|
||||
SlackEphemeralMessageResponse,
|
||||
} from '@/tools/slack/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const slackEphemeralMessageTool: ToolConfig<
|
||||
SlackEphemeralMessageParams,
|
||||
SlackEphemeralMessageResponse
|
||||
> = {
|
||||
id: 'slack_ephemeral_message',
|
||||
name: 'Slack Ephemeral Message',
|
||||
description:
|
||||
'Send an ephemeral message visible only to a specific user in a channel. Optionally reply in a thread. The message does not persist across sessions.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'slack',
|
||||
},
|
||||
|
||||
params: {
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication method: oauth or bot_token',
|
||||
},
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Bot token for Custom Bot',
|
||||
},
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token or bot token for Slack API',
|
||||
},
|
||||
channel: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Slack channel ID (e.g., C1234567890)',
|
||||
},
|
||||
user: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'User ID who will see the ephemeral message (e.g., U1234567890). Must be a member of the channel.',
|
||||
},
|
||||
text: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Message text to send (supports Slack mrkdwn formatting)',
|
||||
},
|
||||
threadTs: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Thread timestamp to reply in. When provided, the ephemeral message appears as a thread reply.',
|
||||
},
|
||||
blocks: {
|
||||
type: 'json',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Block Kit layout blocks as a JSON array. When provided, text becomes the fallback notification text.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/slack/send-ephemeral',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: SlackEphemeralMessageParams) => ({
|
||||
accessToken: params.accessToken || params.botToken,
|
||||
channel: params.channel,
|
||||
user: params.user?.trim(),
|
||||
text: params.text,
|
||||
thread_ts: params.threadTs || undefined,
|
||||
blocks:
|
||||
typeof params.blocks === 'string' ? JSON.parse(params.blocks) : params.blocks || undefined,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to send ephemeral message')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
output: data.output,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
messageTs: {
|
||||
type: 'string',
|
||||
description: 'Timestamp of the ephemeral message (cannot be used with chat.update)',
|
||||
},
|
||||
channel: {
|
||||
type: 'string',
|
||||
description: 'Channel ID where the ephemeral message was sent',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -2,6 +2,7 @@ import { slackAddReactionTool } from '@/tools/slack/add_reaction'
|
||||
import { slackCanvasTool } from '@/tools/slack/canvas'
|
||||
import { slackDeleteMessageTool } from '@/tools/slack/delete_message'
|
||||
import { slackDownloadTool } from '@/tools/slack/download'
|
||||
import { slackEphemeralMessageTool } from '@/tools/slack/ephemeral_message'
|
||||
import { slackGetMessageTool } from '@/tools/slack/get_message'
|
||||
import { slackGetThreadTool } from '@/tools/slack/get_thread'
|
||||
import { slackGetUserTool } from '@/tools/slack/get_user'
|
||||
@@ -17,6 +18,7 @@ export {
|
||||
slackCanvasTool,
|
||||
slackMessageReaderTool,
|
||||
slackDownloadTool,
|
||||
slackEphemeralMessageTool,
|
||||
slackUpdateMessageTool,
|
||||
slackDeleteMessageTool,
|
||||
slackAddReactionTool,
|
||||
|
||||
@@ -63,6 +63,13 @@ export const slackMessageTool: ToolConfig<SlackMessageParams, SlackMessageRespon
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Thread timestamp to reply to (creates thread reply)',
|
||||
},
|
||||
blocks: {
|
||||
type: 'json',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Block Kit layout blocks as a JSON array. When provided, text becomes the fallback notification text.',
|
||||
},
|
||||
files: {
|
||||
type: 'file[]',
|
||||
required: false,
|
||||
@@ -85,6 +92,10 @@ export const slackMessageTool: ToolConfig<SlackMessageParams, SlackMessageRespon
|
||||
userId: isDM ? params.dmUserId : params.userId,
|
||||
text: params.text,
|
||||
thread_ts: params.threadTs || undefined,
|
||||
blocks:
|
||||
typeof params.blocks === 'string'
|
||||
? JSON.parse(params.blocks)
|
||||
: params.blocks || undefined,
|
||||
files: params.files || null,
|
||||
}
|
||||
},
|
||||
|
||||
@@ -517,6 +517,7 @@ export interface SlackMessageParams extends SlackBaseParams {
|
||||
userId?: string
|
||||
text: string
|
||||
threadTs?: string
|
||||
blocks?: string
|
||||
files?: UserFile[]
|
||||
}
|
||||
|
||||
@@ -546,6 +547,7 @@ export interface SlackUpdateMessageParams extends SlackBaseParams {
|
||||
channel: string
|
||||
timestamp: string
|
||||
text: string
|
||||
blocks?: string
|
||||
}
|
||||
|
||||
export interface SlackDeleteMessageParams extends SlackBaseParams {
|
||||
@@ -584,6 +586,14 @@ export interface SlackGetMessageParams extends SlackBaseParams {
|
||||
timestamp: string
|
||||
}
|
||||
|
||||
export interface SlackEphemeralMessageParams extends SlackBaseParams {
|
||||
channel: string
|
||||
user: string
|
||||
text: string
|
||||
threadTs?: string
|
||||
blocks?: string
|
||||
}
|
||||
|
||||
export interface SlackGetThreadParams extends SlackBaseParams {
|
||||
channel: string
|
||||
threadTs: string
|
||||
@@ -831,6 +841,13 @@ export interface SlackGetMessageResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackEphemeralMessageResponse extends ToolResponse {
|
||||
output: {
|
||||
messageTs: string
|
||||
channel: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface SlackGetThreadResponse extends ToolResponse {
|
||||
output: {
|
||||
parentMessage: SlackMessage
|
||||
@@ -853,5 +870,6 @@ export type SlackResponse =
|
||||
| SlackListMembersResponse
|
||||
| SlackListUsersResponse
|
||||
| SlackGetUserResponse
|
||||
| SlackEphemeralMessageResponse
|
||||
| SlackGetMessageResponse
|
||||
| SlackGetThreadResponse
|
||||
|
||||
@@ -53,6 +53,13 @@ export const slackUpdateMessageTool: ToolConfig<
|
||||
visibility: 'user-or-llm',
|
||||
description: 'New message text (supports Slack mrkdwn formatting)',
|
||||
},
|
||||
blocks: {
|
||||
type: 'json',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description:
|
||||
'Block Kit layout blocks as a JSON array. When provided, text becomes the fallback notification text.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -66,6 +73,8 @@ export const slackUpdateMessageTool: ToolConfig<
|
||||
channel: params.channel,
|
||||
timestamp: params.timestamp,
|
||||
text: params.text,
|
||||
blocks:
|
||||
typeof params.blocks === 'string' ? JSON.parse(params.blocks) : params.blocks || undefined,
|
||||
}),
|
||||
},
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { env } from './lib/core/config/env'
|
||||
|
||||
export default defineConfig({
|
||||
project: env.TRIGGER_PROJECT_ID!,
|
||||
runtime: 'node',
|
||||
runtime: 'node-22',
|
||||
logLevel: 'log',
|
||||
maxDuration: 5400,
|
||||
retries: {
|
||||
|
||||
@@ -67,8 +67,8 @@ export const slackWebhookTrigger: TriggerConfig = {
|
||||
'Go to <a href="https://api.slack.com/apps" target="_blank" rel="noopener noreferrer" class="text-muted-foreground underline transition-colors hover:text-muted-foreground/80">Slack Apps page</a>',
|
||||
'If you don\'t have an app:<br><ul class="mt-1 ml-5 list-disc"><li>Create an app from scratch</li><li>Give it a name and select your workspace</li></ul>',
|
||||
'Go to "Basic Information", find the "Signing Secret", and paste it in the field above.',
|
||||
'Go to "OAuth & Permissions" and add bot token scopes:<br><ul class="mt-1 ml-5 list-disc"><li><code>app_mentions:read</code> - For viewing messages that tag your bot with an @</li><li><code>chat:write</code> - To send messages to channels your bot is a part of</li><li><code>files:read</code> - To access files and images shared in messages</li></ul>',
|
||||
'Go to "Event Subscriptions":<br><ul class="mt-1 ml-5 list-disc"><li>Enable events</li><li>Under "Subscribe to Bot Events", add <code>app_mention</code> to listen to messages that mention your bot</li><li>Paste the Webhook URL above into the "Request URL" field</li></ul>',
|
||||
'Go to "OAuth & Permissions" and add bot token scopes:<br><ul class="mt-1 ml-5 list-disc"><li><code>app_mentions:read</code> - For viewing messages that tag your bot with an @</li><li><code>chat:write</code> - To send messages to channels your bot is a part of</li><li><code>files:read</code> - To access files and images shared in messages</li><li><code>reactions:read</code> - For listening to emoji reactions and fetching reacted-to message text</li></ul>',
|
||||
'Go to "Event Subscriptions":<br><ul class="mt-1 ml-5 list-disc"><li>Enable events</li><li>Under "Subscribe to Bot Events", add <code>app_mention</code> to listen to messages that mention your bot</li><li>For reaction events, also add <code>reaction_added</code> and/or <code>reaction_removed</code></li><li>Paste the Webhook URL above into the "Request URL" field</li></ul>',
|
||||
'Go to "Install App" in the left sidebar and install the app into your desired Slack workspace and channel.',
|
||||
'Copy the "Bot User OAuth Token" (starts with <code>xoxb-</code>) and paste it in the Bot Token field above to enable file downloads.',
|
||||
'Save changes in both Slack and here.',
|
||||
@@ -128,6 +128,16 @@ export const slackWebhookTrigger: TriggerConfig = {
|
||||
type: 'string',
|
||||
description: 'Unique event identifier',
|
||||
},
|
||||
reaction: {
|
||||
type: 'string',
|
||||
description:
|
||||
'Emoji reaction name (e.g., thumbsup). Present for reaction_added/reaction_removed events',
|
||||
},
|
||||
item_user: {
|
||||
type: 'string',
|
||||
description:
|
||||
'User ID of the original message author. Present for reaction_added/reaction_removed events',
|
||||
},
|
||||
hasFiles: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the message has file attachments',
|
||||
|
||||
@@ -124,7 +124,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
|
||||
# Create .next/cache directory with correct ownership
|
||||
RUN mkdir -p apps/sim/.next/cache && \
|
||||
chown -R nextjs:nodejs /app
|
||||
chown -R nextjs:nodejs apps/sim/.next/cache
|
||||
|
||||
# Switch to non-root user
|
||||
USER nextjs
|
||||
|
||||
57
packages/db/migrations/0157_exotic_dormammu.sql
Normal file
57
packages/db/migrations/0157_exotic_dormammu.sql
Normal file
@@ -0,0 +1,57 @@
|
||||
CREATE TABLE "jwks" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"public_key" text NOT NULL,
|
||||
"private_key" text NOT NULL,
|
||||
"created_at" timestamp NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "oauth_access_token" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"access_token" text NOT NULL,
|
||||
"refresh_token" text NOT NULL,
|
||||
"access_token_expires_at" timestamp NOT NULL,
|
||||
"refresh_token_expires_at" timestamp NOT NULL,
|
||||
"client_id" text NOT NULL,
|
||||
"user_id" text,
|
||||
"scopes" text NOT NULL,
|
||||
"created_at" timestamp NOT NULL,
|
||||
"updated_at" timestamp NOT NULL,
|
||||
CONSTRAINT "oauth_access_token_access_token_unique" UNIQUE("access_token"),
|
||||
CONSTRAINT "oauth_access_token_refresh_token_unique" UNIQUE("refresh_token")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "oauth_application" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"icon" text,
|
||||
"metadata" text,
|
||||
"client_id" text NOT NULL,
|
||||
"client_secret" text,
|
||||
"redirect_urls" text NOT NULL,
|
||||
"type" text NOT NULL,
|
||||
"disabled" boolean DEFAULT false,
|
||||
"user_id" text,
|
||||
"created_at" timestamp NOT NULL,
|
||||
"updated_at" timestamp NOT NULL,
|
||||
CONSTRAINT "oauth_application_client_id_unique" UNIQUE("client_id")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "oauth_consent" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"client_id" text NOT NULL,
|
||||
"user_id" text NOT NULL,
|
||||
"scopes" text NOT NULL,
|
||||
"created_at" timestamp NOT NULL,
|
||||
"updated_at" timestamp NOT NULL,
|
||||
"consent_given" boolean NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "oauth_access_token" ADD CONSTRAINT "oauth_access_token_client_id_oauth_application_client_id_fk" FOREIGN KEY ("client_id") REFERENCES "public"."oauth_application"("client_id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "oauth_access_token" ADD CONSTRAINT "oauth_access_token_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "oauth_application" ADD CONSTRAINT "oauth_application_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "oauth_consent" ADD CONSTRAINT "oauth_consent_client_id_oauth_application_client_id_fk" FOREIGN KEY ("client_id") REFERENCES "public"."oauth_application"("client_id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "oauth_consent" ADD CONSTRAINT "oauth_consent_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "oauth_access_token_access_token_idx" ON "oauth_access_token" USING btree ("access_token");--> statement-breakpoint
|
||||
CREATE INDEX "oauth_access_token_refresh_token_idx" ON "oauth_access_token" USING btree ("refresh_token");--> statement-breakpoint
|
||||
CREATE INDEX "oauth_application_client_id_idx" ON "oauth_application" USING btree ("client_id");--> statement-breakpoint
|
||||
CREATE INDEX "oauth_consent_user_client_idx" ON "oauth_consent" USING btree ("user_id","client_id");
|
||||
11824
packages/db/migrations/meta/0157_snapshot.json
Normal file
11824
packages/db/migrations/meta/0157_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1093,6 +1093,13 @@
|
||||
"when": 1771528429740,
|
||||
"tag": "0156_easy_odin",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 157,
|
||||
"version": "7",
|
||||
"when": 1771621587420,
|
||||
"tag": "0157_exotic_dormammu",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -2334,3 +2334,73 @@ export const userTableRows = pgTable(
|
||||
),
|
||||
})
|
||||
)
|
||||
|
||||
export const oauthApplication = pgTable(
|
||||
'oauth_application',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
name: text('name').notNull(),
|
||||
icon: text('icon'),
|
||||
metadata: text('metadata'),
|
||||
clientId: text('client_id').notNull().unique(),
|
||||
clientSecret: text('client_secret'),
|
||||
redirectURLs: text('redirect_urls').notNull(),
|
||||
type: text('type').notNull(),
|
||||
disabled: boolean('disabled').default(false),
|
||||
userId: text('user_id').references(() => user.id, { onDelete: 'cascade' }),
|
||||
createdAt: timestamp('created_at').notNull(),
|
||||
updatedAt: timestamp('updated_at').notNull(),
|
||||
},
|
||||
(table) => ({
|
||||
clientIdIdx: index('oauth_application_client_id_idx').on(table.clientId),
|
||||
})
|
||||
)
|
||||
|
||||
export const oauthAccessToken = pgTable(
|
||||
'oauth_access_token',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
accessToken: text('access_token').notNull().unique(),
|
||||
refreshToken: text('refresh_token').notNull().unique(),
|
||||
accessTokenExpiresAt: timestamp('access_token_expires_at').notNull(),
|
||||
refreshTokenExpiresAt: timestamp('refresh_token_expires_at').notNull(),
|
||||
clientId: text('client_id')
|
||||
.notNull()
|
||||
.references(() => oauthApplication.clientId, { onDelete: 'cascade' }),
|
||||
userId: text('user_id').references(() => user.id, { onDelete: 'cascade' }),
|
||||
scopes: text('scopes').notNull(),
|
||||
createdAt: timestamp('created_at').notNull(),
|
||||
updatedAt: timestamp('updated_at').notNull(),
|
||||
},
|
||||
(table) => ({
|
||||
accessTokenIdx: index('oauth_access_token_access_token_idx').on(table.accessToken),
|
||||
refreshTokenIdx: index('oauth_access_token_refresh_token_idx').on(table.refreshToken),
|
||||
})
|
||||
)
|
||||
|
||||
export const oauthConsent = pgTable(
|
||||
'oauth_consent',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
clientId: text('client_id')
|
||||
.notNull()
|
||||
.references(() => oauthApplication.clientId, { onDelete: 'cascade' }),
|
||||
userId: text('user_id')
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: 'cascade' }),
|
||||
scopes: text('scopes').notNull(),
|
||||
createdAt: timestamp('created_at').notNull(),
|
||||
updatedAt: timestamp('updated_at').notNull(),
|
||||
consentGiven: boolean('consent_given').notNull(),
|
||||
},
|
||||
(table) => ({
|
||||
userClientIdx: index('oauth_consent_user_client_idx').on(table.userId, table.clientId),
|
||||
})
|
||||
)
|
||||
|
||||
export const jwks = pgTable('jwks', {
|
||||
id: text('id').primaryKey(),
|
||||
publicKey: text('public_key').notNull(),
|
||||
privateKey: text('private_key').notNull(),
|
||||
createdAt: timestamp('created_at').notNull(),
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"$schema": "https://v2-8-0.turborepo.dev/schema.json",
|
||||
"$schema": "https://v2-8-10.turborepo.dev/schema.json",
|
||||
"envMode": "loose",
|
||||
"tasks": {
|
||||
"build": {
|
||||
|
||||
Reference in New Issue
Block a user