Compare commits

..

7 Commits

Author SHA1 Message Date
Siddharth Ganesan
3338b25c30 Checkpoint 2026-02-18 18:55:10 -08:00
Siddharth Ganesan
4c3002f97d Checkpoint 2026-02-18 18:38:37 -08:00
Siddharth Ganesan
632e0e0762 Checkpoitn 2026-02-18 15:29:58 -08:00
Siddharth Ganesan
7599774974 Checkpoint 2026-02-17 18:54:15 -08:00
Siddharth Ganesan
471e58a2d0 Checkpoint 2026-02-17 17:04:34 -08:00
Siddharth Ganesan
231ddc59a0 V0 2026-02-17 16:07:55 -08:00
Siddharth Ganesan
b197f68828 v0 2026-02-17 15:28:23 -08:00
116 changed files with 14416 additions and 2415 deletions

View File

@@ -59,6 +59,12 @@ body {
--content-gap: 1.75rem;
}
/* Remove custom layout variable overrides to fallback to fumadocs defaults */
/* ============================================
Navbar Light Mode Styling
============================================ */
/* Light mode navbar and search styling */
:root:not(.dark) nav {
background-color: hsla(0, 0%, 96%, 0.85) !important;
@@ -82,6 +88,10 @@ body {
-webkit-backdrop-filter: blur(25px) saturate(180%) brightness(0.6) !important;
}
/* ============================================
Custom Sidebar Styling (Turborepo-inspired)
============================================ */
/* Floating sidebar appearance - remove background */
[data-sidebar-container],
#nd-sidebar {
@@ -458,6 +468,10 @@ aside[data-sidebar],
writing-mode: horizontal-tb !important;
}
/* ============================================
Code Block Styling (Improved)
============================================ */
/* Apply Geist Mono to code elements */
code,
pre,
@@ -518,6 +532,10 @@ pre code .line {
color: var(--color-fd-primary);
}
/* ============================================
TOC (Table of Contents) Styling
============================================ */
/* Remove the thin border-left on nested TOC items (keeps main indicator only) */
#nd-toc a[style*="padding-inline-start"] {
border-left: none !important;
@@ -536,6 +554,10 @@ main article,
padding-bottom: 4rem;
}
/* ============================================
Center and Constrain Main Content Width
============================================ */
/* Main content area - center and constrain like turborepo/raindrop */
/* Note: --sidebar-offset and --toc-offset are now applied at #nd-docs-layout level */
main[data-main] {

View File

@@ -234,6 +234,7 @@ List actions from incident.io. Optionally filter by incident ID.
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | incident.io API Key |
| `incident_id` | string | No | Filter actions by incident ID \(e.g., "01FCNDV6P870EA6S7TK1DSYDG0"\) |
| `page_size` | number | No | Number of actions to return per page \(e.g., 10, 25, 50\) |
#### Output
@@ -308,6 +309,7 @@ List follow-ups from incident.io. Optionally filter by incident ID.
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | incident.io API Key |
| `incident_id` | string | No | Filter follow-ups by incident ID \(e.g., "01FCNDV6P870EA6S7TK1DSYDG0"\) |
| `page_size` | number | No | Number of follow-ups to return per page \(e.g., 10, 25, 50\) |
#### Output
@@ -394,7 +396,6 @@ List all users in your Incident.io workspace. Returns user details including id,
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Incident.io API Key |
| `page_size` | number | No | Number of results to return per page \(e.g., 10, 25, 50\). Default: 25 |
| `after` | string | No | Pagination cursor to fetch the next page of results |
#### Output
@@ -405,10 +406,6 @@ List all users in your Incident.io workspace. Returns user details including id,
| ↳ `name` | string | Full name of the user |
| ↳ `email` | string | Email address of the user |
| ↳ `role` | string | Role of the user in the workspace |
| `pagination_meta` | object | Pagination metadata |
| ↳ `after` | string | Cursor for next page |
| ↳ `page_size` | number | Number of items per page |
| ↳ `total_record_count` | number | Total number of records |
### `incidentio_users_show`
@@ -647,6 +644,7 @@ List all escalation policies in incident.io
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | incident.io API Key |
| `page_size` | number | No | Number of results per page \(e.g., 10, 25, 50\). Default: 25 |
#### Output

View File

@@ -49,7 +49,6 @@ Retrieve all deals from Pipedrive with optional filters
| `pipeline_id` | string | No | If supplied, only deals in the specified pipeline are returned \(e.g., "1"\) |
| `updated_since` | string | No | If set, only deals updated after this time are returned. Format: 2025-01-01T10:20:00Z |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
#### Output
@@ -75,8 +74,6 @@ Retrieve all deals from Pipedrive with optional filters
| `metadata` | object | Pagination metadata for the response |
| ↳ `total_items` | number | Total number of items |
| ↳ `has_more` | boolean | Whether more items are available |
| ↳ `next_cursor` | string | Cursor for fetching the next page \(v2 endpoints\) |
| ↳ `next_start` | number | Offset for fetching the next page \(v1 endpoints\) |
| `success` | boolean | Operation success status |
### `pipedrive_get_deal`
@@ -151,9 +148,10 @@ Retrieve files from Pipedrive with optional filters
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `sort` | string | No | Sort files by field \(supported: "id", "update_time"\) |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 100\) |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
| `deal_id` | string | No | Filter files by deal ID \(e.g., "123"\) |
| `person_id` | string | No | Filter files by person ID \(e.g., "456"\) |
| `org_id` | string | No | Filter files by organization ID \(e.g., "789"\) |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `downloadFiles` | boolean | No | Download file contents into file outputs |
#### Output
@@ -173,8 +171,6 @@ Retrieve files from Pipedrive with optional filters
| ↳ `url` | string | File download URL |
| `downloadedFiles` | file[] | Downloaded files from Pipedrive |
| `total_items` | number | Total number of files returned |
| `has_more` | boolean | Whether more files are available |
| `next_start` | number | Offset for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_get_mail_messages`
@@ -187,7 +183,6 @@ Retrieve mail threads from Pipedrive mailbox
| --------- | ---- | -------- | ----------- |
| `folder` | string | No | Filter by folder: inbox, drafts, sent, archive \(default: inbox\) |
| `limit` | string | No | Number of results to return \(e.g., "25", default: 50\) |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
#### Output
@@ -195,8 +190,6 @@ Retrieve mail threads from Pipedrive mailbox
| --------- | ---- | ----------- |
| `messages` | array | Array of mail thread objects from Pipedrive mailbox |
| `total_items` | number | Total number of mail threads returned |
| `has_more` | boolean | Whether more messages are available |
| `next_start` | number | Offset for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_get_mail_thread`
@@ -228,7 +221,7 @@ Retrieve all pipelines from Pipedrive
| `sort_by` | string | No | Field to sort by: id, update_time, add_time \(default: id\) |
| `sort_direction` | string | No | Sorting direction: asc, desc \(default: asc\) |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
#### Output
@@ -244,8 +237,6 @@ Retrieve all pipelines from Pipedrive
| ↳ `add_time` | string | When the pipeline was created |
| ↳ `update_time` | string | When the pipeline was last updated |
| `total_items` | number | Total number of pipelines returned |
| `has_more` | boolean | Whether more pipelines are available |
| `next_start` | number | Offset for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_get_pipeline_deals`
@@ -258,8 +249,8 @@ Retrieve all deals in a specific pipeline
| --------- | ---- | -------- | ----------- |
| `pipeline_id` | string | Yes | The ID of the pipeline \(e.g., "1"\) |
| `stage_id` | string | No | Filter by specific stage within the pipeline \(e.g., "2"\) |
| `status` | string | No | Filter by deal status: open, won, lost |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
#### Output
@@ -280,7 +271,6 @@ Retrieve all projects or a specific project from Pipedrive
| `project_id` | string | No | Optional: ID of a specific project to retrieve \(e.g., "123"\) |
| `status` | string | No | Filter by project status: open, completed, deleted \(only for listing all\) |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500, only for listing all\) |
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
#### Output
@@ -289,8 +279,6 @@ Retrieve all projects or a specific project from Pipedrive
| `projects` | array | Array of project objects \(when listing all\) |
| `project` | object | Single project object \(when project_id is provided\) |
| `total_items` | number | Total number of projects returned |
| `has_more` | boolean | Whether more projects are available |
| `next_cursor` | string | Cursor for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_create_project`
@@ -321,11 +309,12 @@ Retrieve activities (tasks) from Pipedrive with optional filters
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `user_id` | string | No | Filter activities by user ID \(e.g., "123"\) |
| `deal_id` | string | No | Filter activities by deal ID \(e.g., "123"\) |
| `person_id` | string | No | Filter activities by person ID \(e.g., "456"\) |
| `org_id` | string | No | Filter activities by organization ID \(e.g., "789"\) |
| `type` | string | No | Filter by activity type \(call, meeting, task, deadline, email, lunch\) |
| `done` | string | No | Filter by completion status: 0 for not done, 1 for done |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
#### Output
@@ -346,8 +335,6 @@ Retrieve activities (tasks) from Pipedrive with optional filters
| ↳ `add_time` | string | When the activity was created |
| ↳ `update_time` | string | When the activity was last updated |
| `total_items` | number | Total number of activities returned |
| `has_more` | boolean | Whether more activities are available |
| `next_start` | number | Offset for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_create_activity`
@@ -412,7 +399,6 @@ Retrieve all leads or a specific lead from Pipedrive
| `person_id` | string | No | Filter by person ID \(e.g., "456"\) |
| `organization_id` | string | No | Filter by organization ID \(e.g., "789"\) |
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
#### Output
@@ -447,8 +433,6 @@ Retrieve all leads or a specific lead from Pipedrive
| ↳ `add_time` | string | When the lead was created \(ISO 8601\) |
| ↳ `update_time` | string | When the lead was last updated \(ISO 8601\) |
| `total_items` | number | Total number of leads returned |
| `has_more` | boolean | Whether more leads are available |
| `next_start` | number | Offset for fetching the next page |
| `success` | boolean | Operation success status |
### `pipedrive_create_lead`

View File

@@ -57,7 +57,6 @@ Query data from a Supabase table
| `filter` | string | No | PostgREST filter \(e.g., "id=eq.123"\) |
| `orderBy` | string | No | Column to order by \(add DESC for descending\) |
| `limit` | number | No | Maximum number of rows to return |
| `offset` | number | No | Number of rows to skip \(for pagination\) |
| `apiKey` | string | Yes | Your Supabase service role secret key |
#### Output
@@ -212,7 +211,6 @@ Perform full-text search on a Supabase table
| `searchType` | string | No | Search type: plain, phrase, or websearch \(default: websearch\) |
| `language` | string | No | Language for text search configuration \(default: english\) |
| `limit` | number | No | Maximum number of rows to return |
| `offset` | number | No | Number of rows to skip \(for pagination\) |
| `apiKey` | string | Yes | Your Supabase service role secret key |
#### Output

View File

@@ -43,8 +43,6 @@ Retrieve form responses from Typeform
| `formId` | string | Yes | Typeform form ID \(e.g., "abc123XYZ"\) |
| `apiKey` | string | Yes | Typeform Personal Access Token |
| `pageSize` | number | No | Number of responses to retrieve \(e.g., 10, 25, 50\) |
| `before` | string | No | Cursor token for fetching the next page of older responses |
| `after` | string | No | Cursor token for fetching the next page of newer responses |
| `since` | string | No | Retrieve responses submitted after this date \(e.g., "2024-01-01T00:00:00Z"\) |
| `until` | string | No | Retrieve responses submitted before this date \(e.g., "2024-12-31T23:59:59Z"\) |
| `completed` | string | No | Filter by completion status \(e.g., "true", "false", "all"\) |

View File

@@ -67,9 +67,10 @@ Retrieve a list of tickets from Zendesk with optional filtering
| `type` | string | No | Filter by type: "problem", "incident", "question", or "task" |
| `assigneeId` | string | No | Filter by assignee user ID as a numeric string \(e.g., "12345"\) |
| `organizationId` | string | No | Filter by organization ID as a numeric string \(e.g., "67890"\) |
| `sort` | string | No | Sort field for ticket listing \(only applies without filters\): "updated_at", "id", or "status". Prefix with "-" for descending \(e.g., "-updated_at"\) |
| `sortBy` | string | No | Sort field: "created_at", "updated_at", "priority", or "status" |
| `sortOrder` | string | No | Sort order: "asc" or "desc" |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
#### Output
@@ -128,10 +129,10 @@ Retrieve a list of tickets from Zendesk with optional filtering
| ↳ `from_messaging_channel` | boolean | Whether the ticket originated from a messaging channel |
| ↳ `ticket_form_id` | number | Ticket form ID |
| ↳ `generated_timestamp` | number | Unix timestamp of the ticket generation |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| `paging` | object | Pagination information |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |
@@ -514,7 +515,7 @@ Retrieve a list of users from Zendesk with optional filtering
| `role` | string | No | Filter by role: "end-user", "agent", or "admin" |
| `permissionSet` | string | No | Filter by permission set ID as a numeric string \(e.g., "12345"\) |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
#### Output
@@ -562,10 +563,10 @@ Retrieve a list of users from Zendesk with optional filtering
| ↳ `shared` | boolean | Whether the user is shared from a different Zendesk |
| ↳ `shared_agent` | boolean | Whether the agent is shared from a different Zendesk |
| ↳ `remote_photo_url` | string | URL to a remote photo |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| `paging` | object | Pagination information |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |
@@ -705,7 +706,7 @@ Search for users in Zendesk using a query string
| `query` | string | No | Search query string \(e.g., user name or email\) |
| `externalId` | string | No | External ID to search by \(your system identifier\) |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `page` | string | No | Page number for pagination \(1-based\) |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
#### Output
@@ -753,10 +754,10 @@ Search for users in Zendesk using a query string
| ↳ `shared` | boolean | Whether the user is shared from a different Zendesk |
| ↳ `shared_agent` | boolean | Whether the agent is shared from a different Zendesk |
| ↳ `remote_photo_url` | string | URL to a remote photo |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| `paging` | object | Pagination information |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |
@@ -998,7 +999,7 @@ Retrieve a list of organizations from Zendesk
| `apiToken` | string | Yes | Zendesk API token |
| `subdomain` | string | Yes | Your Zendesk subdomain \(e.g., "mycompany" for mycompany.zendesk.com\) |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
#### Output
@@ -1019,10 +1020,10 @@ Retrieve a list of organizations from Zendesk
| ↳ `created_at` | string | When the organization was created \(ISO 8601 format\) |
| ↳ `updated_at` | string | When the organization was last updated \(ISO 8601 format\) |
| ↳ `external_id` | string | External ID for linking to external records |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| `paging` | object | Pagination information |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |
@@ -1074,7 +1075,7 @@ Autocomplete organizations in Zendesk by name prefix (for name matching/autocomp
| `subdomain` | string | Yes | Your Zendesk subdomain |
| `name` | string | Yes | Organization name prefix to search for \(e.g., "Acme"\) |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `page` | string | No | Page number for pagination \(1-based\) |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
#### Output
@@ -1095,10 +1096,10 @@ Autocomplete organizations in Zendesk by name prefix (for name matching/autocomp
| ↳ `created_at` | string | When the organization was created \(ISO 8601 format\) |
| ↳ `updated_at` | string | When the organization was last updated \(ISO 8601 format\) |
| ↳ `external_id` | string | External ID for linking to external records |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| `paging` | object | Pagination information |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |
@@ -1248,18 +1249,19 @@ Unified search across tickets, users, and organizations in Zendesk
| `apiToken` | string | Yes | Zendesk API token |
| `subdomain` | string | Yes | Your Zendesk subdomain |
| `query` | string | Yes | Search query string using Zendesk search syntax \(e.g., "type:ticket status:open"\) |
| `filterType` | string | Yes | Resource type to search for: "ticket", "user", "organization", or "group" |
| `sortBy` | string | No | Sort field: "relevance", "created_at", "updated_at", "priority", "status", or "ticket_type" |
| `sortOrder` | string | No | Sort order: "asc" or "desc" |
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `paging` | object | Cursor-based pagination information |
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
| ↳ `has_more` | boolean | Whether more results are available |
| `paging` | object | Pagination information |
| ↳ `next_page` | string | URL for next page of results |
| ↳ `previous_page` | string | URL for previous page of results |
| ↳ `count` | number | Total count of items |
| `metadata` | object | Response metadata |
| ↳ `total_returned` | number | Number of items returned in this response |
| ↳ `has_more` | boolean | Whether more items are available |

View File

@@ -1,3 +1,5 @@
'use server'
import { env } from '@/lib/core/config/env'
import { isProd } from '@/lib/core/config/feature-flags'

View File

@@ -85,7 +85,7 @@ export const LandingNode = React.memo(function LandingNode({ data }: { data: Lan
transform: isAnimated ? 'translateY(0) scale(1)' : 'translateY(8px) scale(0.98)',
transition:
'opacity 0.6s cubic-bezier(0.22, 1, 0.36, 1), transform 0.6s cubic-bezier(0.22, 1, 0.36, 1)',
willChange: isAnimated ? 'auto' : 'transform, opacity',
willChange: 'transform, opacity',
}}
>
<LandingBlock icon={data.icon} color={data.color} name={data.name} tags={data.tags} />

View File

@@ -67,6 +67,7 @@ export const LandingEdge = React.memo(function LandingEdge(props: EdgeProps) {
strokeLinejoin: 'round',
pointerEvents: 'none',
animation: `landing-edge-dash-${id} 1s linear infinite`,
willChange: 'stroke-dashoffset',
...style,
}}
/>

View File

@@ -754,100 +754,3 @@ input[type="search"]::-ms-clear {
text-decoration: none !important;
color: inherit !important;
}
/**
* Respect user's prefers-reduced-motion setting (WCAG 2.3.3)
* Disables animations and transitions for users who prefer reduced motion.
*/
@media (prefers-reduced-motion: reduce) {
*,
*::before,
*::after {
animation-duration: 0.01ms !important;
animation-iteration-count: 1 !important;
transition-duration: 0.01ms !important;
scroll-behavior: auto !important;
}
}
/* WandPromptBar status indicator */
@keyframes smoke-pulse {
0%,
100% {
transform: scale(0.8);
opacity: 0.4;
}
50% {
transform: scale(1.1);
opacity: 0.8;
}
}
.status-indicator {
position: relative;
width: 12px;
height: 12px;
border-radius: 50%;
overflow: hidden;
background-color: hsl(var(--muted-foreground) / 0.5);
transition: background-color 0.3s ease;
}
.status-indicator.streaming {
background-color: transparent;
}
.status-indicator.streaming::before {
content: "";
position: absolute;
inset: 0;
border-radius: 50%;
background: radial-gradient(
circle,
hsl(var(--primary) / 0.9) 0%,
hsl(var(--primary) / 0.4) 60%,
transparent 80%
);
animation: smoke-pulse 1.8s ease-in-out infinite;
opacity: 0.9;
}
.dark .status-indicator.streaming::before {
background: #6b7280;
opacity: 0.9;
animation: smoke-pulse 1.8s ease-in-out infinite;
}
/* MessageContainer loading dot */
@keyframes growShrink {
0%,
100% {
transform: scale(0.9);
}
50% {
transform: scale(1.1);
}
}
.loading-dot {
animation: growShrink 1.5s infinite ease-in-out;
}
/* Subflow node z-index and drag-over styles */
.workflow-container .react-flow__node-subflowNode {
z-index: -1 !important;
}
.workflow-container .react-flow__node-subflowNode:has([data-subflow-selected="true"]) {
z-index: 10 !important;
}
.loop-node-drag-over,
.parallel-node-drag-over {
box-shadow: 0 0 0 1.75px var(--brand-secondary) !important;
border-radius: 8px !important;
}
.react-flow__node[data-parent-node-id] .react-flow__handle {
z-index: 30;
}

View File

@@ -1,12 +1,22 @@
import { db } from '@sim/db'
import { settings } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import { env } from '@/lib/core/config/env'
const logger = createLogger('CopilotAutoAllowedToolsAPI')
/** Headers for server-to-server calls to the Go copilot backend. */
function copilotHeaders(): Record<string, string> {
const headers: Record<string, string> = {
'Content-Type': 'application/json',
}
if (env.COPILOT_API_KEY) {
headers['x-api-key'] = env.COPILOT_API_KEY
}
return headers
}
/**
* GET - Fetch user's auto-allowed integration tools
*/
@@ -20,24 +30,18 @@ export async function GET() {
const userId = session.user.id
const [userSettings] = await db
.select()
.from(settings)
.where(eq(settings.userId, userId))
.limit(1)
const res = await fetch(
`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed?userId=${encodeURIComponent(userId)}`,
{ method: 'GET', headers: copilotHeaders() }
)
if (userSettings) {
const autoAllowedTools = (userSettings.copilotAutoAllowedTools as string[]) || []
return NextResponse.json({ autoAllowedTools })
if (!res.ok) {
logger.warn('Go backend returned error for list auto-allowed', { status: res.status })
return NextResponse.json({ autoAllowedTools: [] })
}
await db.insert(settings).values({
id: userId,
userId,
copilotAutoAllowedTools: [],
})
return NextResponse.json({ autoAllowedTools: [] })
const payload = await res.json()
return NextResponse.json({ autoAllowedTools: payload?.autoAllowedTools || [] })
} catch (error) {
logger.error('Failed to fetch auto-allowed tools', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
@@ -62,38 +66,22 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: 'toolId must be a string' }, { status: 400 })
}
const toolId = body.toolId
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
if (existing) {
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
if (!currentTools.includes(toolId)) {
const updatedTools = [...currentTools, toolId]
await db
.update(settings)
.set({
copilotAutoAllowedTools: updatedTools,
updatedAt: new Date(),
})
.where(eq(settings.userId, userId))
logger.info('Added tool to auto-allowed list', { userId, toolId })
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
}
return NextResponse.json({ success: true, autoAllowedTools: currentTools })
}
await db.insert(settings).values({
id: userId,
userId,
copilotAutoAllowedTools: [toolId],
const res = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
method: 'POST',
headers: copilotHeaders(),
body: JSON.stringify({ userId, toolId: body.toolId }),
})
logger.info('Created settings and added tool to auto-allowed list', { userId, toolId })
return NextResponse.json({ success: true, autoAllowedTools: [toolId] })
if (!res.ok) {
logger.warn('Go backend returned error for add auto-allowed', { status: res.status })
return NextResponse.json({ error: 'Failed to add tool' }, { status: 500 })
}
const payload = await res.json()
return NextResponse.json({
success: true,
autoAllowedTools: payload?.autoAllowedTools || [],
})
} catch (error) {
logger.error('Failed to add auto-allowed tool', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
@@ -119,25 +107,21 @@ export async function DELETE(request: NextRequest) {
return NextResponse.json({ error: 'toolId query parameter is required' }, { status: 400 })
}
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
const res = await fetch(
`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed?userId=${encodeURIComponent(userId)}&toolId=${encodeURIComponent(toolId)}`,
{ method: 'DELETE', headers: copilotHeaders() }
)
if (existing) {
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
const updatedTools = currentTools.filter((t) => t !== toolId)
await db
.update(settings)
.set({
copilotAutoAllowedTools: updatedTools,
updatedAt: new Date(),
})
.where(eq(settings.userId, userId))
logger.info('Removed tool from auto-allowed list', { userId, toolId })
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
if (!res.ok) {
logger.warn('Go backend returned error for remove auto-allowed', { status: res.status })
return NextResponse.json({ error: 'Failed to remove tool' }, { status: 500 })
}
return NextResponse.json({ success: true, autoAllowedTools: [] })
const payload = await res.json()
return NextResponse.json({
success: true,
autoAllowedTools: payload?.autoAllowedTools || [],
})
} catch (error) {
logger.error('Failed to remove auto-allowed tool', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })

View File

@@ -0,0 +1,130 @@
import { db } from '@sim/db'
import { copilotChats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle'
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
// Workspace prompt is now generated by the Go copilot backend (detected via source: 'workspace-chat')
const logger = createLogger('WorkspaceChatAPI')
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
export const maxDuration = 300
const WorkspaceChatSchema = z.object({
message: z.string().min(1, 'Message is required'),
workspaceId: z.string().min(1, 'workspaceId is required'),
chatId: z.string().optional(),
model: z.string().optional().default('claude-opus-4-5'),
})
export async function POST(req: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await req.json()
const { message, workspaceId, chatId, model } = WorkspaceChatSchema.parse(body)
const chatResult = await resolveOrCreateChat({
chatId,
userId: session.user.id,
workspaceId,
model,
})
const requestPayload: Record<string, unknown> = {
message,
userId: session.user.id,
model,
mode: 'agent',
headless: true,
messageId: crypto.randomUUID(),
version: SIM_AGENT_VERSION,
source: 'workspace-chat',
stream: true,
...(chatResult.chatId ? { chatId: chatResult.chatId } : {}),
}
const encoder = new TextEncoder()
const stream = new ReadableStream({
async start(controller) {
const pushEvent = (event: Record<string, unknown>) => {
try {
controller.enqueue(encoder.encode(`data: ${JSON.stringify(event)}\n\n`))
} catch {
// Client disconnected
}
}
if (chatResult.chatId) {
pushEvent({ type: 'chat_id', chatId: chatResult.chatId })
}
try {
const result = await orchestrateCopilotStream(requestPayload, {
userId: session.user.id,
workspaceId,
chatId: chatResult.chatId || undefined,
autoExecuteTools: true,
interactive: false,
onEvent: async (event: SSEEvent) => {
pushEvent(event as unknown as Record<string, unknown>)
},
})
if (chatResult.chatId && result.conversationId) {
await db
.update(copilotChats)
.set({
updatedAt: new Date(),
conversationId: result.conversationId,
})
.where(eq(copilotChats.id, chatResult.chatId))
}
pushEvent({
type: 'done',
success: result.success,
content: result.content,
})
} catch (error) {
logger.error('Workspace chat orchestration failed', { error })
pushEvent({
type: 'error',
error: error instanceof Error ? error.message : 'Chat failed',
})
} finally {
controller.close()
}
},
})
return new Response(stream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
'X-Accel-Buffering': 'no',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Invalid request', details: error.errors },
{ status: 400 }
)
}
logger.error('Workspace chat error', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -22,20 +22,15 @@ interface PipedriveFile {
interface PipedriveApiResponse {
success: boolean
data?: PipedriveFile[]
additional_data?: {
pagination?: {
more_items_in_collection: boolean
next_start: number
}
}
error?: string
}
const PipedriveGetFilesSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
sort: z.enum(['id', 'update_time']).optional().nullable(),
deal_id: z.string().optional().nullable(),
person_id: z.string().optional().nullable(),
org_id: z.string().optional().nullable(),
limit: z.string().optional().nullable(),
start: z.string().optional().nullable(),
downloadFiles: z.boolean().optional().default(false),
})
@@ -59,19 +54,20 @@ export async function POST(request: NextRequest) {
const body = await request.json()
const validatedData = PipedriveGetFilesSchema.parse(body)
const { accessToken, sort, limit, start, downloadFiles } = validatedData
const { accessToken, deal_id, person_id, org_id, limit, downloadFiles } = validatedData
const baseUrl = 'https://api.pipedrive.com/v1/files'
const queryParams = new URLSearchParams()
if (sort) queryParams.append('sort', sort)
if (deal_id) queryParams.append('deal_id', deal_id)
if (person_id) queryParams.append('person_id', person_id)
if (org_id) queryParams.append('org_id', org_id)
if (limit) queryParams.append('limit', limit)
if (start) queryParams.append('start', start)
const queryString = queryParams.toString()
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
logger.info(`[${requestId}] Fetching files from Pipedrive`)
logger.info(`[${requestId}] Fetching files from Pipedrive`, { deal_id, person_id, org_id })
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
if (!urlValidation.isValid) {
@@ -97,8 +93,6 @@ export async function POST(request: NextRequest) {
}
const files = data.data || []
const hasMore = data.additional_data?.pagination?.more_items_in_collection || false
const nextStart = data.additional_data?.pagination?.next_start ?? null
const downloadedFiles: Array<{
name: string
mimeType: string
@@ -155,8 +149,6 @@ export async function POST(request: NextRequest) {
files,
downloadedFiles: downloadedFiles.length > 0 ? downloadedFiles : undefined,
total_items: files.length,
has_more: hasMore,
next_start: nextStart,
success: true,
},
})

View File

@@ -30,6 +30,21 @@ export const ChatMessageContainer = memo(function ChatMessageContainer({
}: ChatMessageContainerProps) {
return (
<div className='relative flex flex-1 flex-col overflow-hidden bg-white'>
<style jsx>{`
@keyframes growShrink {
0%,
100% {
transform: scale(0.9);
}
50% {
transform: scale(1.1);
}
}
.loading-dot {
animation: growShrink 1.5s infinite ease-in-out;
}
`}</style>
{/* Scrollable Messages Area */}
<div
ref={messagesContainerRef}

View File

@@ -71,7 +71,7 @@ export function VoiceInterface({
const [state, setState] = useState<'idle' | 'listening' | 'agent_speaking'>('idle')
const [isInitialized, setIsInitialized] = useState(false)
const [isMuted, setIsMuted] = useState(false)
const [audioLevels, setAudioLevels] = useState<number[]>(() => new Array(200).fill(0))
const [audioLevels, setAudioLevels] = useState<number[]>(new Array(200).fill(0))
const [permissionStatus, setPermissionStatus] = useState<'prompt' | 'granted' | 'denied'>(
'prompt'
)

View File

@@ -0,0 +1,259 @@
'use client'
import { useCallback, useRef, useState } from 'react'
import { Check, CircleAlert, Loader2, Send, Square, Zap } from 'lucide-react'
import { useParams } from 'next/navigation'
import ReactMarkdown from 'react-markdown'
import remarkGfm from 'remark-gfm'
import { Button } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import type { ContentBlock, ToolCallInfo, ToolCallStatus } from './hooks/use-workspace-chat'
import { useWorkspaceChat } from './hooks/use-workspace-chat'
const REMARK_PLUGINS = [remarkGfm]
/** Status icon for a tool call. */
function ToolStatusIcon({ status }: { status: ToolCallStatus }) {
switch (status) {
case 'executing':
return <Loader2 className='h-3 w-3 animate-spin text-[var(--text-tertiary)]' />
case 'success':
return <Check className='h-3 w-3 text-emerald-500' />
case 'error':
return <CircleAlert className='h-3 w-3 text-red-400' />
}
}
/** Formats a tool name for display: "edit_workflow" → "Edit Workflow". */
function formatToolName(name: string): string {
return name
.replace(/_v\d+$/, '')
.split('_')
.map((w) => w.charAt(0).toUpperCase() + w.slice(1))
.join(' ')
}
/** Compact inline rendering of a single tool call. */
function ToolCallItem({ toolCall }: { toolCall: ToolCallInfo }) {
const label = toolCall.displayTitle || formatToolName(toolCall.name)
return (
<div className='flex items-center gap-2 rounded-md border border-[var(--border)] bg-[var(--surface-2)] px-3 py-1.5'>
<Zap className='h-3 w-3 flex-shrink-0 text-[var(--text-tertiary)]' />
<span className='min-w-0 flex-1 truncate text-xs text-[var(--text-secondary)]'>{label}</span>
<ToolStatusIcon status={toolCall.status} />
</div>
)
}
/** Renders a subagent activity label. */
function SubagentLabel({ label }: { label: string }) {
return (
<div className='flex items-center gap-2 py-0.5'>
<Loader2 className='h-3 w-3 animate-spin text-[var(--text-tertiary)]' />
<span className='text-xs text-[var(--text-tertiary)]'>{label}</span>
</div>
)
}
/** Renders structured content blocks for an assistant message. */
function AssistantContent({ blocks, isStreaming }: { blocks: ContentBlock[]; isStreaming: boolean }) {
return (
<div className='space-y-2'>
{blocks.map((block, i) => {
switch (block.type) {
case 'text': {
if (!block.content?.trim()) return null
return (
<div key={`text-${i}`} className='prose-sm prose-invert max-w-none'>
<ReactMarkdown remarkPlugins={REMARK_PLUGINS}>{block.content}</ReactMarkdown>
</div>
)
}
case 'tool_call': {
if (!block.toolCall) return null
return <ToolCallItem key={block.toolCall.id} toolCall={block.toolCall} />
}
case 'subagent': {
if (!block.content) return null
// Only show the subagent label if it's the last subagent block and we're streaming
const isLastSubagent =
isStreaming &&
blocks.slice(i + 1).every((b) => b.type !== 'subagent')
if (!isLastSubagent) return null
return <SubagentLabel key={`sub-${i}`} label={block.content} />
}
default:
return null
}
})}
</div>
)
}
export function Chat() {
const { workspaceId } = useParams<{ workspaceId: string }>()
const [inputValue, setInputValue] = useState('')
const inputRef = useRef<HTMLTextAreaElement>(null)
const messagesEndRef = useRef<HTMLDivElement>(null)
const { messages, isSending, error, sendMessage, abortMessage } = useWorkspaceChat({
workspaceId,
})
const scrollToBottom = useCallback(() => {
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' })
}, [])
const handleSubmit = useCallback(async () => {
const trimmed = inputValue.trim()
if (!trimmed || !workspaceId) return
setInputValue('')
await sendMessage(trimmed)
scrollToBottom()
}, [inputValue, workspaceId, sendMessage, scrollToBottom])
const handleKeyDown = useCallback(
(e: React.KeyboardEvent<HTMLTextAreaElement>) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault()
handleSubmit()
}
},
[handleSubmit]
)
return (
<div className='flex h-full flex-col'>
{/* Header */}
<div className='flex flex-shrink-0 items-center border-b border-[var(--border)] px-6 py-3'>
<h1 className='font-medium text-[16px] text-[var(--text-primary)]'>Mothership</h1>
</div>
{/* Messages area */}
<div className='flex-1 overflow-y-auto px-6 py-4'>
{messages.length === 0 && !isSending ? (
<div className='flex h-full items-center justify-center'>
<div className='flex flex-col items-center gap-3 text-center'>
<p className='text-[var(--text-secondary)] text-sm'>
Ask anything about your workspace build workflows, manage resources, get help.
</p>
</div>
</div>
) : (
<div className='mx-auto max-w-3xl space-y-4'>
{messages.map((msg) => {
const isStreamingEmpty =
isSending &&
msg.role === 'assistant' &&
!msg.content &&
(!msg.contentBlocks || msg.contentBlocks.length === 0)
if (isStreamingEmpty) {
return (
<div key={msg.id} className='flex justify-start'>
<div className='flex items-center gap-2 rounded-lg bg-[var(--surface-3)] px-4 py-2 text-sm text-[var(--text-secondary)]'>
<Loader2 className='h-3 w-3 animate-spin' />
Thinking...
</div>
</div>
)
}
// Skip empty assistant messages
if (
msg.role === 'assistant' &&
!msg.content &&
(!msg.contentBlocks || msg.contentBlocks.length === 0)
)
return null
// User messages
if (msg.role === 'user') {
return (
<div key={msg.id} className='flex justify-end'>
<div className='max-w-[85%] rounded-lg bg-[var(--accent)] px-4 py-2 text-sm text-[var(--accent-foreground)]'>
<p className='whitespace-pre-wrap'>{msg.content}</p>
</div>
</div>
)
}
// Assistant messages with content blocks
const hasBlocks = msg.contentBlocks && msg.contentBlocks.length > 0
const isThisMessageStreaming = isSending && msg === messages[messages.length - 1]
return (
<div key={msg.id} className='flex justify-start'>
<div className='max-w-[85%] rounded-lg bg-[var(--surface-3)] px-4 py-2 text-sm text-[var(--text-primary)]'>
{hasBlocks ? (
<AssistantContent
blocks={msg.contentBlocks!}
isStreaming={isThisMessageStreaming}
/>
) : (
<div className='prose-sm prose-invert max-w-none'>
<ReactMarkdown remarkPlugins={REMARK_PLUGINS}>
{msg.content}
</ReactMarkdown>
</div>
)}
</div>
</div>
)
})}
<div ref={messagesEndRef} />
</div>
)}
</div>
{/* Error display */}
{error && (
<div className='px-6 pb-2'>
<p className='text-xs text-red-500'>{error}</p>
</div>
)}
{/* Input area */}
<div className='flex-shrink-0 border-t border-[var(--border)] px-6 py-4'>
<div className='mx-auto flex max-w-3xl items-end gap-2'>
<textarea
ref={inputRef}
value={inputValue}
onChange={(e) => setInputValue(e.target.value)}
onKeyDown={handleKeyDown}
placeholder='Send a message...'
rows={1}
className='flex-1 resize-none rounded-lg border border-[var(--border)] bg-[var(--surface-2)] px-4 py-2.5 text-sm text-[var(--text-primary)] placeholder:text-[var(--text-tertiary)] focus:border-[var(--accent)] focus:outline-none'
style={{ maxHeight: '120px' }}
onInput={(e) => {
const target = e.target as HTMLTextAreaElement
target.style.height = 'auto'
target.style.height = `${Math.min(target.scrollHeight, 120)}px`
}}
/>
{isSending ? (
<Button
variant='ghost'
size='sm'
onClick={abortMessage}
className='h-[38px] w-[38px] flex-shrink-0 p-0'
>
<Square className='h-4 w-4' />
</Button>
) : (
<Button
variant='ghost'
size='sm'
onClick={handleSubmit}
disabled={!inputValue.trim()}
className='h-[38px] w-[38px] flex-shrink-0 p-0'
>
<Send className='h-4 w-4' />
</Button>
)}
</div>
</div>
</div>
)
}

View File

@@ -0,0 +1,346 @@
'use client'
import { useCallback, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
const logger = createLogger('useWorkspaceChat')
/** Status of a tool call as it progresses through execution. */
export type ToolCallStatus = 'executing' | 'success' | 'error'
/** Lightweight info about a single tool call rendered in the chat. */
export interface ToolCallInfo {
id: string
name: string
status: ToolCallStatus
/** Human-readable title from the backend ToolUI metadata. */
displayTitle?: string
}
/** A content block inside an assistant message. */
export type ContentBlockType = 'text' | 'tool_call' | 'subagent'
export interface ContentBlock {
type: ContentBlockType
/** Text content (for 'text' and 'subagent' blocks). */
content?: string
/** Tool call info (for 'tool_call' blocks). */
toolCall?: ToolCallInfo
}
export interface ChatMessage {
id: string
role: 'user' | 'assistant'
content: string
timestamp: string
/** Structured content blocks for rich rendering. When present, prefer over `content`. */
contentBlocks?: ContentBlock[]
/** Name of the currently active subagent (shown as a label while streaming). */
activeSubagent?: string | null
}
interface UseWorkspaceChatProps {
workspaceId: string
}
interface UseWorkspaceChatReturn {
messages: ChatMessage[]
isSending: boolean
error: string | null
sendMessage: (message: string) => Promise<void>
abortMessage: () => void
clearMessages: () => void
}
/** Maps subagent IDs to human-readable labels. */
const SUBAGENT_LABELS: Record<string, string> = {
build: 'Building',
deploy: 'Deploying',
auth: 'Connecting credentials',
research: 'Researching',
knowledge: 'Managing knowledge base',
custom_tool: 'Creating tool',
superagent: 'Executing action',
plan: 'Planning',
debug: 'Debugging',
edit: 'Editing workflow',
}
export function useWorkspaceChat({ workspaceId }: UseWorkspaceChatProps): UseWorkspaceChatReturn {
const [messages, setMessages] = useState<ChatMessage[]>([])
const [isSending, setIsSending] = useState(false)
const [error, setError] = useState<string | null>(null)
const abortControllerRef = useRef<AbortController | null>(null)
const chatIdRef = useRef<string | undefined>(undefined)
const sendMessage = useCallback(
async (message: string) => {
if (!message.trim() || !workspaceId) return
setError(null)
setIsSending(true)
const userMessage: ChatMessage = {
id: crypto.randomUUID(),
role: 'user',
content: message,
timestamp: new Date().toISOString(),
}
const assistantMessage: ChatMessage = {
id: crypto.randomUUID(),
role: 'assistant',
content: '',
timestamp: new Date().toISOString(),
contentBlocks: [],
activeSubagent: null,
}
setMessages((prev) => [...prev, userMessage, assistantMessage])
const abortController = new AbortController()
abortControllerRef.current = abortController
// Mutable refs for the streaming context so we can build content blocks
// without relying on stale React state closures.
const blocksRef: ContentBlock[] = []
const toolCallMapRef = new Map<string, number>() // toolCallId → index in blocksRef
/** Ensure the last block is a text block and return it. */
const ensureTextBlock = (): ContentBlock => {
const last = blocksRef[blocksRef.length - 1]
if (last && last.type === 'text') return last
const newBlock: ContentBlock = { type: 'text', content: '' }
blocksRef.push(newBlock)
return newBlock
}
/** Push updated blocks + content into the assistant message. */
const flushBlocks = (extra?: Partial<ChatMessage>) => {
const fullText = blocksRef
.filter((b) => b.type === 'text')
.map((b) => b.content ?? '')
.join('')
setMessages((prev) =>
prev.map((msg) =>
msg.id === assistantMessage.id
? {
...msg,
content: fullText,
contentBlocks: [...blocksRef],
...extra,
}
: msg
)
)
}
try {
const response = await fetch('/api/copilot/workspace-chat', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
message,
workspaceId,
...(chatIdRef.current ? { chatId: chatIdRef.current } : {}),
}),
signal: abortController.signal,
})
if (!response.ok) {
const errorData = await response.json().catch(() => ({}))
throw new Error(errorData.error || `Request failed: ${response.status}`)
}
if (!response.body) {
throw new Error('No response body')
}
const reader = response.body.getReader()
const decoder = new TextDecoder()
let buffer = ''
while (true) {
const { done, value } = await reader.read()
if (done) break
buffer += decoder.decode(value, { stream: true })
const lines = buffer.split('\n')
buffer = lines.pop() || ''
for (const line of lines) {
if (!line.startsWith('data: ')) continue
try {
const event = JSON.parse(line.slice(6))
switch (event.type) {
case 'chat_id': {
if (event.chatId) {
chatIdRef.current = event.chatId
}
break
}
case 'content': {
if (event.content || event.data) {
const chunk =
typeof event.data === 'string' ? event.data : event.content || ''
if (chunk) {
const textBlock = ensureTextBlock()
textBlock.content = (textBlock.content ?? '') + chunk
flushBlocks()
}
}
break
}
case 'tool_generating':
case 'tool_call': {
const toolCallId = event.toolCallId
const toolName = event.toolName || event.data?.name || 'unknown'
if (!toolCallId) break
const ui = event.ui || event.data?.ui
const displayTitle = ui?.title || ui?.phaseLabel
if (!toolCallMapRef.has(toolCallId)) {
const toolBlock: ContentBlock = {
type: 'tool_call',
toolCall: {
id: toolCallId,
name: toolName,
status: 'executing',
displayTitle,
},
}
toolCallMapRef.set(toolCallId, blocksRef.length)
blocksRef.push(toolBlock)
} else {
const idx = toolCallMapRef.get(toolCallId)!
const existing = blocksRef[idx]
if (existing.toolCall) {
existing.toolCall.name = toolName
if (displayTitle) existing.toolCall.displayTitle = displayTitle
}
}
flushBlocks()
break
}
case 'tool_result': {
const toolCallId = event.toolCallId || event.data?.id
if (!toolCallId) break
const idx = toolCallMapRef.get(toolCallId)
if (idx !== undefined) {
const block = blocksRef[idx]
if (block.toolCall) {
block.toolCall.status = event.success ? 'success' : 'error'
}
flushBlocks()
}
break
}
case 'tool_error': {
const toolCallId = event.toolCallId || event.data?.id
if (!toolCallId) break
const idx = toolCallMapRef.get(toolCallId)
if (idx !== undefined) {
const block = blocksRef[idx]
if (block.toolCall) {
block.toolCall.status = 'error'
}
flushBlocks()
}
break
}
case 'subagent_start': {
const subagentName = event.subagent || event.data?.agent
if (subagentName) {
const label = SUBAGENT_LABELS[subagentName] || subagentName
const subBlock: ContentBlock = {
type: 'subagent',
content: label,
}
blocksRef.push(subBlock)
flushBlocks({ activeSubagent: label })
}
break
}
case 'subagent_end': {
flushBlocks({ activeSubagent: null })
break
}
case 'error': {
setError(event.error || 'An error occurred')
break
}
case 'done': {
if (event.content && typeof event.content === 'string') {
setMessages((prev) =>
prev.map((msg) =>
msg.id === assistantMessage.id && !msg.content
? { ...msg, content: event.content }
: msg
)
)
}
break
}
}
} catch {
// Skip malformed SSE lines
}
}
}
} catch (err) {
if (err instanceof Error && err.name === 'AbortError') {
logger.info('Message aborted by user')
return
}
const errorMessage = err instanceof Error ? err.message : 'Failed to send message'
logger.error('Failed to send workspace chat message', { error: errorMessage })
setError(errorMessage)
setMessages((prev) =>
prev.map((msg) =>
msg.id === assistantMessage.id && !msg.content
? { ...msg, content: 'Sorry, something went wrong. Please try again.' }
: msg
)
)
} finally {
setIsSending(false)
abortControllerRef.current = null
}
},
[workspaceId]
)
const abortMessage = useCallback(() => {
abortControllerRef.current?.abort()
setIsSending(false)
}, [])
const clearMessages = useCallback(() => {
setMessages([])
setError(null)
chatIdRef.current = undefined
}, [])
return {
messages,
isSending,
error,
sendMessage,
abortMessage,
clearMessages,
}
}

View File

@@ -0,0 +1,7 @@
export default function ChatLayout({ children }: { children: React.ReactNode }) {
return (
<div className='flex h-full flex-1 flex-col overflow-hidden pl-[var(--sidebar-width)]'>
{children}
</div>
)
}

View File

@@ -0,0 +1,26 @@
import { redirect } from 'next/navigation'
import { getSession } from '@/lib/auth'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
import { Chat } from './chat'
interface ChatPageProps {
params: Promise<{
workspaceId: string
}>
}
export default async function ChatPage({ params }: ChatPageProps) {
const { workspaceId } = await params
const session = await getSession()
if (!session?.user?.id) {
redirect('/')
}
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
if (!hasPermission) {
redirect('/')
}
return <Chat />
}

View File

@@ -1,4 +1,4 @@
import { redirect, unstable_rethrow } from 'next/navigation'
import { redirect } from 'next/navigation'
import { getSession } from '@/lib/auth'
import { getWorkspaceFile } from '@/lib/uploads/contexts/workspace'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
@@ -14,27 +14,24 @@ interface FileViewerPageProps {
export default async function FileViewerPage({ params }: FileViewerPageProps) {
const { workspaceId, fileId } = await params
const session = await getSession()
if (!session?.user?.id) {
redirect('/')
}
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
if (!hasPermission) {
redirect(`/workspace/${workspaceId}`)
}
let fileRecord: Awaited<ReturnType<typeof getWorkspaceFile>>
try {
fileRecord = await getWorkspaceFile(workspaceId, fileId)
const session = await getSession()
if (!session?.user?.id) {
redirect('/')
}
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
if (!hasPermission) {
redirect(`/workspace/${workspaceId}`)
}
const fileRecord = await getWorkspaceFile(workspaceId, fileId)
if (!fileRecord) {
redirect(`/workspace/${workspaceId}`)
}
return <FileViewer file={fileRecord} />
} catch (error) {
unstable_rethrow(error)
redirect(`/workspace/${workspaceId}`)
}
if (!fileRecord) {
redirect(`/workspace/${workspaceId}`)
}
return <FileViewer file={fileRecord} />
}

View File

@@ -131,8 +131,10 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
resumeActiveStream,
})
// Handle scroll management
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage)
// Handle scroll management (80px stickiness for copilot)
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage, {
stickinessThreshold: 40,
})
// Handle chat history grouping
const { groupedChats, handleHistoryDropdownOpen: handleHistoryDropdownOpenHook } = useChatHistory(

View File

@@ -1,5 +1,5 @@
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import isEqual from 'lodash/isEqual'
import { isEqual } from 'lodash'
import { useReactFlow } from 'reactflow'
import { useStoreWithEqualityFn } from 'zustand/traditional'
import { Combobox, type ComboboxOption } from '@/components/emcn/components'

View File

@@ -1,5 +1,5 @@
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import isEqual from 'lodash/isEqual'
import { isEqual } from 'lodash'
import { useStoreWithEqualityFn } from 'zustand/traditional'
import { Badge } from '@/components/emcn'
import { Combobox, type ComboboxOption } from '@/components/emcn/components'

View File

@@ -7,7 +7,7 @@ import {
useRef,
useState,
} from 'react'
import isEqual from 'lodash/isEqual'
import { isEqual } from 'lodash'
import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react'
import { Button, Popover, PopoverContent, PopoverItem, PopoverTrigger } from '@/components/emcn'
import { Trash } from '@/components/emcn/icons/trash'

View File

@@ -1,5 +1,5 @@
import { type JSX, type MouseEvent, memo, useCallback, useRef, useState } from 'react'
import isEqual from 'lodash/isEqual'
import { isEqual } from 'lodash'
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
import { cn } from '@/lib/core/utils/cn'

View File

@@ -1,7 +1,7 @@
'use client'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import isEqual from 'lodash/isEqual'
import { isEqual } from 'lodash'
import {
BookOpen,
Check,

View File

@@ -10,6 +10,40 @@ import { ActionBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/componen
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { usePanelEditorStore } from '@/stores/panel'
/**
* Global styles for subflow nodes (loop and parallel containers).
* Includes animations for drag-over states and hover effects.
*
* @returns Style component with global CSS
*/
const SubflowNodeStyles: React.FC = () => {
return (
<style jsx global>{`
/* Z-index management for subflow nodes - default behind blocks */
.workflow-container .react-flow__node-subflowNode {
z-index: -1 !important;
}
/* Selected subflows appear above other subflows but below blocks (z-21) */
.workflow-container .react-flow__node-subflowNode:has([data-subflow-selected='true']) {
z-index: 10 !important;
}
/* Drag-over states */
.loop-node-drag-over,
.parallel-node-drag-over {
box-shadow: 0 0 0 1.75px var(--brand-secondary) !important;
border-radius: 8px !important;
}
/* Handle z-index for nested nodes */
.react-flow__node[data-parent-node-id] .react-flow__handle {
z-index: 30;
}
`}</style>
)
}
/**
* Data structure for subflow nodes (loop and parallel containers)
*/
@@ -117,130 +151,133 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
)
return (
<div className='group relative'>
<div
ref={blockRef}
onClick={() => setCurrentBlockId(id)}
className={cn(
'workflow-drag-handle relative cursor-grab select-none rounded-[8px] border border-[var(--border-1)] [&:active]:cursor-grabbing',
'transition-block-bg transition-ring',
'z-[20]'
)}
style={{
width: data.width || 500,
height: data.height || 300,
position: 'relative',
overflow: 'visible',
pointerEvents: isPreview ? 'none' : 'all',
}}
data-node-id={id}
data-type='subflowNode'
data-nesting-level={nestingLevel}
data-subflow-selected={isFocused || isSelected || isPreviewSelected}
>
{!isPreview && (
<ActionBar blockId={id} blockType={data.kind} disabled={!userPermissions.canEdit} />
)}
{/* Header Section */}
<>
<SubflowNodeStyles />
<div className='group relative'>
<div
ref={blockRef}
onClick={() => setCurrentBlockId(id)}
className={cn(
'flex items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px]'
'workflow-drag-handle relative cursor-grab select-none rounded-[8px] border border-[var(--border-1)] [&:active]:cursor-grabbing',
'transition-block-bg transition-ring',
'z-[20]'
)}
>
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
<div
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
style={{ backgroundColor: isEnabled ? blockIconBg : 'gray' }}
>
<BlockIcon className='h-[16px] w-[16px] text-white' />
</div>
<span
className={cn(
'truncate font-medium text-[16px]',
!isEnabled && 'text-[var(--text-muted)]'
)}
title={blockName}
>
{blockName}
</span>
</div>
<div className='flex items-center gap-1'>
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
</div>
</div>
{!isPreview && (
<div
className='absolute right-[8px] bottom-[8px] z-20 flex h-[32px] w-[32px] cursor-se-resize items-center justify-center text-muted-foreground'
style={{ pointerEvents: 'auto' }}
/>
)}
<div
className='h-[calc(100%-50px)] pt-[16px] pr-[80px] pb-[16px] pl-[16px]'
data-dragarea='true'
style={{
width: data.width || 500,
height: data.height || 300,
position: 'relative',
pointerEvents: isPreview ? 'none' : 'auto',
overflow: 'visible',
pointerEvents: isPreview ? 'none' : 'all',
}}
data-node-id={id}
data-type='subflowNode'
data-nesting-level={nestingLevel}
data-subflow-selected={isFocused || isSelected || isPreviewSelected}
>
{/* Subflow Start */}
{!isPreview && (
<ActionBar blockId={id} blockType={data.kind} disabled={!userPermissions.canEdit} />
)}
{/* Header Section */}
<div
className='absolute top-[16px] left-[16px] flex items-center justify-center rounded-[8px] border border-[var(--border-1)] bg-[var(--surface-2)] px-[12px] py-[6px]'
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
data-parent-id={id}
data-node-role={`${data.kind}-start`}
data-extent='parent'
className={cn(
'flex items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px]'
)}
>
<span className='font-medium text-[14px] text-[var(--text-primary)]'>Start</span>
<Handle
type='source'
position={Position.Right}
id={startHandleId}
className={getHandleClasses('right')}
style={{
top: '50%',
transform: 'translateY(-50%)',
pointerEvents: 'auto',
}}
data-parent-id={id}
/>
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
<div
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
style={{ backgroundColor: isEnabled ? blockIconBg : 'gray' }}
>
<BlockIcon className='h-[16px] w-[16px] text-white' />
</div>
<span
className={cn(
'truncate font-medium text-[16px]',
!isEnabled && 'text-[var(--text-muted)]'
)}
title={blockName}
>
{blockName}
</span>
</div>
<div className='flex items-center gap-1'>
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
</div>
</div>
</div>
{/* Input handle on left middle */}
<Handle
type='target'
position={Position.Left}
className={getHandleClasses('left')}
style={{
...getHandleStyle(),
pointerEvents: 'auto',
}}
/>
{!isPreview && (
<div
className='absolute right-[8px] bottom-[8px] z-20 flex h-[32px] w-[32px] cursor-se-resize items-center justify-center text-muted-foreground'
style={{ pointerEvents: 'auto' }}
/>
)}
{/* Output handle on right middle */}
<Handle
type='source'
position={Position.Right}
className={getHandleClasses('right')}
style={{
...getHandleStyle(),
pointerEvents: 'auto',
}}
id={endHandleId}
/>
{hasRing && (
<div
className={cn('pointer-events-none absolute inset-0 z-40 rounded-[8px]', ringStyles)}
className='h-[calc(100%-50px)] pt-[16px] pr-[80px] pb-[16px] pl-[16px]'
data-dragarea='true'
style={{
position: 'relative',
pointerEvents: isPreview ? 'none' : 'auto',
}}
>
{/* Subflow Start */}
<div
className='absolute top-[16px] left-[16px] flex items-center justify-center rounded-[8px] border border-[var(--border-1)] bg-[var(--surface-2)] px-[12px] py-[6px]'
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
data-parent-id={id}
data-node-role={`${data.kind}-start`}
data-extent='parent'
>
<span className='font-medium text-[14px] text-[var(--text-primary)]'>Start</span>
<Handle
type='source'
position={Position.Right}
id={startHandleId}
className={getHandleClasses('right')}
style={{
top: '50%',
transform: 'translateY(-50%)',
pointerEvents: 'auto',
}}
data-parent-id={id}
/>
</div>
</div>
{/* Input handle on left middle */}
<Handle
type='target'
position={Position.Left}
className={getHandleClasses('left')}
style={{
...getHandleStyle(),
pointerEvents: 'auto',
}}
/>
)}
{/* Output handle on right middle */}
<Handle
type='source'
position={Position.Right}
className={getHandleClasses('right')}
style={{
...getHandleStyle(),
pointerEvents: 'auto',
}}
id={endHandleId}
/>
{hasRing && (
<div
className={cn('pointer-events-none absolute inset-0 z-40 rounded-[8px]', ringStyles)}
/>
)}
</div>
</div>
</div>
</>
)
})

View File

@@ -134,6 +134,57 @@ export function WandPromptBar({
</Button>
)}
</div>
<style jsx global>{`
@keyframes smoke-pulse {
0%,
100% {
transform: scale(0.8);
opacity: 0.4;
}
50% {
transform: scale(1.1);
opacity: 0.8;
}
}
.status-indicator {
position: relative;
width: 12px;
height: 12px;
border-radius: 50%;
overflow: hidden;
background-color: hsl(var(--muted-foreground) / 0.5);
transition: background-color 0.3s ease;
}
.status-indicator.streaming {
background-color: transparent;
}
.status-indicator.streaming::before {
content: '';
position: absolute;
inset: 0;
border-radius: 50%;
background: radial-gradient(
circle,
hsl(var(--primary) / 0.9) 0%,
hsl(var(--primary) / 0.4) 60%,
transparent 80%
);
animation: smoke-pulse 1.8s ease-in-out infinite;
opacity: 0.9;
}
.dark .status-indicator.streaming::before {
background: #6b7280;
opacity: 0.9;
animation: smoke-pulse 1.8s ease-in-out infinite;
}
`}</style>
</div>
)
}

View File

@@ -1,6 +1,6 @@
import { memo, useCallback, useEffect, useMemo, useRef } from 'react'
import { createLogger } from '@sim/logger'
import isEqual from 'lodash/isEqual'
import { isEqual } from 'lodash'
import { useParams } from 'next/navigation'
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
import { useStoreWithEqualityFn } from 'zustand/traditional'

View File

@@ -16,7 +16,7 @@ interface UseScrollManagementOptions {
/**
* Distance from bottom (in pixels) within which auto-scroll stays active
* @remarks Lower values = less sticky (user can scroll away easier)
* @defaultValue 30
* @defaultValue 100
*/
stickinessThreshold?: number
}
@@ -41,7 +41,7 @@ export function useScrollManagement(
const lastScrollTopRef = useRef(0)
const scrollBehavior = options?.behavior ?? 'smooth'
const stickinessThreshold = options?.stickinessThreshold ?? 30
const stickinessThreshold = options?.stickinessThreshold ?? 100
/** Scrolls the container to the bottom */
const scrollToBottom = useCallback(() => {

View File

@@ -514,7 +514,6 @@ export function HelpModal({ open, onOpenChange, workflowId, workspaceId }: HelpM
alt={`Preview ${index + 1}`}
fill
unoptimized
sizes='(max-width: 768px) 100vw, 50vw'
className='object-contain'
/>
<button

View File

@@ -165,16 +165,12 @@ export function CancelSubscription({ subscription, subscriptionData }: CancelSub
logger.info('Subscription restored successfully', result)
}
await Promise.all([
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all }),
...(activeOrgId
? [
queryClient.invalidateQueries({ queryKey: organizationKeys.detail(activeOrgId) }),
queryClient.invalidateQueries({ queryKey: organizationKeys.billing(activeOrgId) }),
queryClient.invalidateQueries({ queryKey: organizationKeys.lists() }),
]
: []),
])
await queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
if (activeOrgId) {
await queryClient.invalidateQueries({ queryKey: organizationKeys.detail(activeOrgId) })
await queryClient.invalidateQueries({ queryKey: organizationKeys.billing(activeOrgId) })
await queryClient.invalidateQueries({ queryKey: organizationKeys.lists() })
}
setIsDialogOpen(false)
} catch (err) {

View File

@@ -37,7 +37,7 @@ export const UsageLimit = forwardRef<UsageLimitRef, UsageLimitProps>(
},
ref
) => {
const [inputValue, setInputValue] = useState(() => currentLimit.toString())
const [inputValue, setInputValue] = useState(currentLimit.toString())
const [hasError, setHasError] = useState(false)
const [errorType, setErrorType] = useState<'general' | 'belowUsage' | null>(null)
const [isEditing, setIsEditing] = useState(false)

View File

@@ -2,7 +2,7 @@
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
import { Database, HelpCircle, Layout, MessageSquare, Plus, Search, Settings } from 'lucide-react'
import Link from 'next/link'
import { useParams, usePathname, useRouter } from 'next/navigation'
import { Button, Download, FolderPlus, Library, Loader, Tooltip } from '@/components/emcn'
@@ -248,6 +248,12 @@ export const Sidebar = memo(function Sidebar() {
const footerNavigationItems = useMemo(
() =>
[
{
id: 'chat',
label: 'Mothership',
icon: MessageSquare,
href: `/workspace/${workspaceId}/chat`,
},
{
id: 'logs',
label: 'Logs',

View File

@@ -92,9 +92,12 @@ export const IncidentioBlock: BlockConfig<IncidentioResponse> = {
field: 'operation',
value: [
'incidentio_incidents_list',
'incidentio_actions_list',
'incidentio_follow_ups_list',
'incidentio_users_list',
'incidentio_workflows_list',
'incidentio_schedules_list',
'incidentio_escalations_list',
'incidentio_incident_updates_list',
'incidentio_schedule_entries_list',
],
@@ -110,7 +113,6 @@ export const IncidentioBlock: BlockConfig<IncidentioResponse> = {
field: 'operation',
value: [
'incidentio_incidents_list',
'incidentio_users_list',
'incidentio_workflows_list',
'incidentio_schedules_list',
'incidentio_incident_updates_list',

View File

@@ -216,21 +216,31 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
condition: { field: 'operation', value: ['update_deal'] },
},
{
id: 'sort',
title: 'Sort By',
type: 'dropdown',
options: [
{ label: 'ID', id: 'id' },
{ label: 'Update Time', id: 'update_time' },
],
value: () => 'id',
id: 'deal_id',
title: 'Deal ID',
type: 'short-input',
placeholder: 'Filter by deal ID ',
condition: { field: 'operation', value: ['get_files'] },
},
{
id: 'person_id',
title: 'Person ID',
type: 'short-input',
placeholder: 'Filter by person ID ',
condition: { field: 'operation', value: ['get_files'] },
},
{
id: 'org_id',
title: 'Organization ID',
type: 'short-input',
placeholder: 'Filter by organization ID ',
condition: { field: 'operation', value: ['get_files'] },
},
{
id: 'limit',
title: 'Limit',
type: 'short-input',
placeholder: 'Number of results (default 100, max 100)',
placeholder: 'Number of results (default 100, max 500)',
condition: { field: 'operation', value: ['get_files'] },
},
{
@@ -295,28 +305,8 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
id: 'cursor',
title: 'Cursor',
type: 'short-input',
placeholder: 'Pagination cursor from previous response',
condition: {
field: 'operation',
value: ['get_all_deals', 'get_projects'],
},
},
{
id: 'start',
title: 'Start (Offset)',
type: 'short-input',
placeholder: 'Pagination offset (e.g., 0, 100, 200)',
condition: {
field: 'operation',
value: [
'get_activities',
'get_leads',
'get_files',
'get_pipeline_deals',
'get_mail_messages',
'get_pipelines',
],
},
placeholder: 'Pagination cursor (optional)',
condition: { field: 'operation', value: ['get_pipelines'] },
},
{
id: 'pipeline_id',
@@ -333,6 +323,19 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
placeholder: 'Filter by stage ID ',
condition: { field: 'operation', value: ['get_pipeline_deals'] },
},
{
id: 'status',
title: 'Status',
type: 'dropdown',
options: [
{ label: 'All', id: '' },
{ label: 'Open', id: 'open' },
{ label: 'Won', id: 'won' },
{ label: 'Lost', id: 'lost' },
],
value: () => '',
condition: { field: 'operation', value: ['get_pipeline_deals'] },
},
{
id: 'limit',
title: 'Limit',
@@ -423,29 +426,22 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
id: 'deal_id',
title: 'Deal ID',
type: 'short-input',
placeholder: 'Associated deal ID ',
condition: { field: 'operation', value: ['create_activity'] },
placeholder: 'Filter by deal ID ',
condition: { field: 'operation', value: ['get_activities', 'create_activity'] },
},
{
id: 'person_id',
title: 'Person ID',
type: 'short-input',
placeholder: 'Associated person ID ',
condition: { field: 'operation', value: ['create_activity'] },
placeholder: 'Filter by person ID ',
condition: { field: 'operation', value: ['get_activities', 'create_activity'] },
},
{
id: 'org_id',
title: 'Organization ID',
type: 'short-input',
placeholder: 'Associated organization ID ',
condition: { field: 'operation', value: ['create_activity'] },
},
{
id: 'user_id',
title: 'User ID',
type: 'short-input',
placeholder: 'Filter by user ID',
condition: { field: 'operation', value: ['get_activities'] },
placeholder: 'Filter by organization ID ',
condition: { field: 'operation', value: ['get_activities', 'create_activity'] },
},
{
id: 'type',
@@ -785,8 +781,7 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
thread_id: { type: 'string', description: 'Mail thread ID' },
sort_by: { type: 'string', description: 'Field to sort by' },
sort_direction: { type: 'string', description: 'Sorting direction' },
cursor: { type: 'string', description: 'Pagination cursor (v2 endpoints)' },
start: { type: 'string', description: 'Pagination start offset (v1 endpoints)' },
cursor: { type: 'string', description: 'Pagination cursor' },
project_id: { type: 'string', description: 'Project ID' },
description: { type: 'string', description: 'Description' },
start_date: { type: 'string', description: 'Start date' },
@@ -798,15 +793,12 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
due_time: { type: 'string', description: 'Due time' },
duration: { type: 'string', description: 'Duration' },
done: { type: 'string', description: 'Completion status' },
user_id: { type: 'string', description: 'User ID' },
note: { type: 'string', description: 'Notes' },
lead_id: { type: 'string', description: 'Lead ID' },
archived: { type: 'string', description: 'Archived status' },
value_amount: { type: 'string', description: 'Value amount' },
value_currency: { type: 'string', description: 'Value currency' },
is_archived: { type: 'string', description: 'Archive status' },
organization_id: { type: 'string', description: 'Organization ID' },
owner_id: { type: 'string', description: 'Owner user ID' },
},
outputs: {
deals: { type: 'json', description: 'Array of deal objects' },

View File

@@ -445,13 +445,6 @@ Return ONLY the order by expression - no explanations, no extra text.`,
placeholder: '100',
condition: { field: 'operation', value: 'query' },
},
{
id: 'offset',
title: 'Offset',
type: 'short-input',
placeholder: '0',
condition: { field: 'operation', value: 'query' },
},
// Vector search operation fields
{
id: 'functionName',
@@ -550,13 +543,6 @@ Return ONLY the order by expression - no explanations, no extra text.`,
placeholder: '100',
condition: { field: 'operation', value: 'text_search' },
},
{
id: 'offset',
title: 'Offset',
type: 'short-input',
placeholder: '0',
condition: { field: 'operation', value: 'text_search' },
},
// Count operation fields
{
id: 'filter',

View File

@@ -66,20 +66,6 @@ export const TypeformBlock: BlockConfig<TypeformResponse> = {
placeholder: 'Number of responses per page (default: 25)',
condition: { field: 'operation', value: 'typeform_responses' },
},
{
id: 'before',
title: 'Before (Cursor)',
type: 'short-input',
placeholder: 'Cursor token from previous response for pagination',
condition: { field: 'operation', value: 'typeform_responses' },
},
{
id: 'after',
title: 'After (Cursor)',
type: 'short-input',
placeholder: 'Cursor token from previous response for newer results',
condition: { field: 'operation', value: 'typeform_responses' },
},
{
id: 'since',
title: 'Since',
@@ -394,8 +380,6 @@ Do not include any explanations, markdown formatting, or other text outside the
apiKey: { type: 'string', description: 'Personal access token' },
// Response operation params
pageSize: { type: 'number', description: 'Responses per page' },
before: { type: 'string', description: 'Cursor token for fetching the next page' },
after: { type: 'string', description: 'Cursor token for fetching newer results' },
since: { type: 'string', description: 'Start date filter' },
until: { type: 'string', description: 'End date filter' },
completed: { type: 'string', description: 'Completion status filter' },

View File

@@ -444,36 +444,33 @@ Return ONLY the search query - no explanations.`,
},
},
{
id: 'filterType',
title: 'Resource Type',
id: 'sortBy',
title: 'Sort By',
type: 'dropdown',
options: [
{ label: 'Ticket', id: 'ticket' },
{ label: 'User', id: 'user' },
{ label: 'Organization', id: 'organization' },
{ label: 'Group', id: 'group' },
{ label: 'Relevance', id: 'relevance' },
{ label: 'Created At', id: 'created_at' },
{ label: 'Updated At', id: 'updated_at' },
{ label: 'Priority', id: 'priority' },
{ label: 'Status', id: 'status' },
{ label: 'Ticket Type', id: 'ticket_type' },
],
required: true,
condition: {
field: 'operation',
value: ['search'],
},
},
{
id: 'sort',
title: 'Sort',
id: 'sortOrder',
title: 'Sort Order',
type: 'dropdown',
options: [
{ label: 'Updated At (Asc)', id: 'updated_at' },
{ label: 'Updated At (Desc)', id: '-updated_at' },
{ label: 'ID (Asc)', id: 'id' },
{ label: 'ID (Desc)', id: '-id' },
{ label: 'Status (Asc)', id: 'status' },
{ label: 'Status (Desc)', id: '-status' },
{ label: 'Ascending', id: 'asc' },
{ label: 'Descending', id: 'desc' },
],
condition: {
field: 'operation',
value: ['get_tickets'],
value: ['search'],
},
},
// Pagination fields
@@ -494,26 +491,21 @@ Return ONLY the search query - no explanations.`,
],
},
},
{
id: 'pageAfter',
title: 'Page After (Cursor)',
type: 'short-input',
placeholder: 'Cursor from previous response (after_cursor)',
description: 'Cursor value from a previous response to fetch the next page of results',
condition: {
field: 'operation',
value: ['get_tickets', 'get_users', 'get_organizations', 'search'],
},
},
{
id: 'page',
title: 'Page Number',
title: 'Page',
type: 'short-input',
placeholder: 'Page number (default: 1)',
description: 'Page number for offset-based pagination',
placeholder: 'Page number',
condition: {
field: 'operation',
value: ['search_users', 'autocomplete_organizations'],
value: [
'get_tickets',
'get_users',
'get_organizations',
'search_users',
'autocomplete_organizations',
'search',
],
},
},
],
@@ -632,7 +624,6 @@ Return ONLY the search query - no explanations.`,
email: { type: 'string', description: 'Zendesk email address' },
apiToken: { type: 'string', description: 'Zendesk API token' },
subdomain: { type: 'string', description: 'Zendesk subdomain' },
sort: { type: 'string', description: 'Sort field for ticket listing' },
},
outputs: {
// Ticket operations - list
@@ -674,11 +665,8 @@ Return ONLY the search query - no explanations.`,
type: 'boolean',
description: 'Deletion confirmation (delete_ticket, delete_user, delete_organization)',
},
// Cursor-based pagination (shared across list operations)
paging: {
type: 'json',
description: 'Cursor-based pagination information (after_cursor, has_more)',
},
// Pagination (shared across list operations)
paging: { type: 'json', description: 'Pagination information for list operations' },
// Metadata (shared across all operations)
metadata: { type: 'json', description: 'Operation metadata including operation type' },
},

View File

@@ -9,7 +9,6 @@ import {
type ReactNode,
useCallback,
useEffect,
useId,
useMemo,
useRef,
useState,
@@ -171,7 +170,6 @@ const Combobox = memo(
},
ref
) => {
const listboxId = useId()
const [open, setOpen] = useState(false)
const [highlightedIndex, setHighlightedIndex] = useState(-1)
const [searchQuery, setSearchQuery] = useState('')
@@ -515,7 +513,6 @@ const Combobox = memo(
role='combobox'
aria-expanded={open}
aria-haspopup='listbox'
aria-controls={listboxId}
aria-disabled={disabled}
tabIndex={disabled ? -1 : 0}
className={cn(
@@ -619,7 +616,7 @@ const Combobox = memo(
}
}}
>
<div ref={dropdownRef} role='listbox' id={listboxId}>
<div ref={dropdownRef} role='listbox'>
{isLoading ? (
<div className='flex items-center justify-center py-[14px]'>
<Loader2 className='h-[16px] w-[16px] animate-spin text-[var(--text-muted)]' />

View File

@@ -27,14 +27,12 @@ const Alert = React.forwardRef<
Alert.displayName = 'Alert'
const AlertTitle = React.forwardRef<HTMLParagraphElement, React.HTMLAttributes<HTMLHeadingElement>>(
({ className, children, ...props }, ref) => (
({ className, ...props }, ref) => (
<h5
ref={ref}
className={cn('mb-1 font-medium leading-none tracking-tight', className)}
{...props}
>
{children}
</h5>
/>
)
)
AlertTitle.displayName = 'AlertTitle'

View File

@@ -16,32 +16,26 @@ export const mdxComponents: MDXRemoteProps['components'] = {
unoptimized
/>
),
h2: ({ children, className, ...props }: any) => (
h2: (props: any) => (
<h2
{...props}
style={{ fontSize: '30px', marginTop: '3rem', marginBottom: '1.5rem' }}
className={clsx('font-medium text-black leading-tight', className)}
>
{children}
</h2>
className={clsx('font-medium text-black leading-tight', props.className)}
/>
),
h3: ({ children, className, ...props }: any) => (
h3: (props: any) => (
<h3
{...props}
style={{ fontSize: '24px', marginTop: '1.5rem', marginBottom: '0.75rem' }}
className={clsx('font-medium leading-tight', className)}
>
{children}
</h3>
className={clsx('font-medium leading-tight', props.className)}
/>
),
h4: ({ children, className, ...props }: any) => (
h4: (props: any) => (
<h4
{...props}
style={{ fontSize: '19px', marginTop: '1.5rem', marginBottom: '0.75rem' }}
className={clsx('font-medium leading-tight', className)}
>
{children}
</h4>
className={clsx('font-medium leading-tight', props.className)}
/>
),
p: (props: any) => (
<p

View File

@@ -15,14 +15,16 @@ export interface ChatLoadResult {
/**
* Resolve or create a copilot chat session.
* If chatId is provided, loads the existing chat. Otherwise creates a new one.
* Supports both workflow-scoped and workspace-scoped chats.
*/
export async function resolveOrCreateChat(params: {
chatId?: string
userId: string
workflowId: string
workflowId?: string
workspaceId?: string
model: string
}): Promise<ChatLoadResult> {
const { chatId, userId, workflowId, model } = params
const { chatId, userId, workflowId, workspaceId, model } = params
if (chatId) {
const [chat] = await db
@@ -43,7 +45,8 @@ export async function resolveOrCreateChat(params: {
.insert(copilotChats)
.values({
userId,
workflowId,
...(workflowId ? { workflowId } : {}),
...(workspaceId ? { workspaceId } : {}),
title: null,
model,
messages: [],
@@ -51,7 +54,7 @@ export async function resolveOrCreateChat(params: {
.returning()
if (!newChat) {
logger.warn('Failed to create new copilot chat row', { userId, workflowId })
logger.warn('Failed to create new copilot chat row', { userId, workflowId, workspaceId })
return {
chatId: '',
chat: null,

View File

@@ -269,7 +269,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
updatedMap[toolCallId] = {
...current,
state: targetState,
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
display: resolveToolDisplay(current.name, targetState, current.id, current.params, current.serverUI),
}
set({ toolCallsById: updatedMap })
@@ -469,7 +469,8 @@ export const sseHandlers: Record<string, SSEHandler> = {
b.toolCall?.name,
targetState,
toolCallId,
b.toolCall?.params
b.toolCall?.params,
b.toolCall?.serverUI
),
},
}
@@ -507,7 +508,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
updatedMap[toolCallId] = {
...current,
state: targetState,
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
display: resolveToolDisplay(current.name, targetState, current.id, current.params, current.serverUI),
}
set({ toolCallsById: updatedMap })
}
@@ -532,7 +533,8 @@ export const sseHandlers: Record<string, SSEHandler> = {
b.toolCall?.name,
targetState,
toolCallId,
b.toolCall?.params
b.toolCall?.params,
b.toolCall?.serverUI
),
},
}
@@ -579,6 +581,16 @@ export const sseHandlers: Record<string, SSEHandler> = {
const isPartial = toolData.partial === true
const { toolCallsById } = get()
// Extract copilot-provided UI metadata for fallback display
const rawUI = (toolData.ui || data?.ui) as Record<string, unknown> | undefined
const serverUI = rawUI
? {
title: rawUI.title as string | undefined,
phaseLabel: rawUI.phaseLabel as string | undefined,
icon: rawUI.icon as string | undefined,
}
: undefined
const existing = toolCallsById[id]
const toolName = name || existing?.name || 'unknown_tool'
const isAutoAllowed = get().isToolAutoAllowed(toolName)
@@ -592,20 +604,24 @@ export const sseHandlers: Record<string, SSEHandler> = {
initialState = ClientToolCallState.executing
}
const effectiveServerUI = serverUI || existing?.serverUI
const next: CopilotToolCall = existing
? {
...existing,
name: toolName,
state: initialState,
...(args ? { params: args } : {}),
display: resolveToolDisplay(toolName, initialState, id, args || existing.params),
...(effectiveServerUI ? { serverUI: effectiveServerUI } : {}),
display: resolveToolDisplay(toolName, initialState, id, args || existing.params, effectiveServerUI),
}
: {
id,
name: toolName,
state: initialState,
...(args ? { params: args } : {}),
display: resolveToolDisplay(toolName, initialState, id, args),
...(serverUI ? { serverUI } : {}),
display: resolveToolDisplay(toolName, initialState, id, args, serverUI),
}
const updated = { ...toolCallsById, [id]: next }
set({ toolCallsById: updated })

View File

@@ -178,7 +178,7 @@ function setToolState(toolCallId: string, state: ClientToolCallState): void {
[toolCallId]: {
...current,
state,
display: resolveToolDisplay(current.name, state, toolCallId, current.params),
display: resolveToolDisplay(current.name, state, toolCallId, current.params, current.serverUI),
},
}
useCopilotStore.setState({ toolCallsById: updated })

View File

@@ -285,7 +285,7 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
const updatedSubAgentToolCall = {
...existing,
state: targetState,
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params),
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params, existing.serverUI),
}
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall

View File

@@ -34,7 +34,7 @@ export function clearStreamingFlags(toolCall: CopilotToolCall): void {
? ClientToolCallState.success
: ClientToolCallState.aborted
toolCall.state = normalized
toolCall.display = resolveToolDisplay(toolCall.name, normalized, toolCall.id, toolCall.params)
toolCall.display = resolveToolDisplay(toolCall.name, normalized, toolCall.id, toolCall.params, toolCall.serverUI)
}
if (Array.isArray(toolCall.subAgentBlocks)) {

View File

@@ -1,15 +1,21 @@
import { createLogger } from '@sim/logger'
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import { prepareExecutionContext } from '@/lib/copilot/orchestrator/tool-executor'
import type { OrchestratorOptions, OrchestratorResult } from '@/lib/copilot/orchestrator/types'
import type {
ExecutionContext,
OrchestratorOptions,
OrchestratorResult,
} from '@/lib/copilot/orchestrator/types'
import { env } from '@/lib/core/config/env'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { buildToolCallSummaries, createStreamingContext, runStreamLoop } from './stream-core'
const logger = createLogger('CopilotOrchestrator')
export interface OrchestrateStreamOptions extends OrchestratorOptions {
userId: string
workflowId: string
workflowId?: string
workspaceId?: string
chatId?: string
}
@@ -17,8 +23,20 @@ export async function orchestrateCopilotStream(
requestPayload: Record<string, unknown>,
options: OrchestrateStreamOptions
): Promise<OrchestratorResult> {
const { userId, workflowId, chatId } = options
const execContext = await prepareExecutionContext(userId, workflowId)
const { userId, workflowId, workspaceId, chatId } = options
let execContext: ExecutionContext
if (workflowId) {
execContext = await prepareExecutionContext(userId, workflowId)
} else {
const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId)
execContext = {
userId,
workflowId: '',
workspaceId,
decryptedEnvVars,
}
}
const payloadMsgId = requestPayload?.messageId
const context = createStreamingContext({

View File

@@ -62,7 +62,7 @@ describe('sse-handlers tool lifecycle', () => {
await sseHandlers.tool_call(
{
type: 'tool_call',
data: { id: 'tool-1', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
data: { id: 'tool-1', name: 'read', arguments: { workflowId: 'workflow-1' } },
} as any,
context,
execContext,
@@ -90,7 +90,7 @@ describe('sse-handlers tool lifecycle', () => {
const event = {
type: 'tool_call',
data: { id: 'tool-dup', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
data: { id: 'tool-dup', name: 'read', arguments: { workflowId: 'workflow-1' } },
}
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })

View File

@@ -24,6 +24,12 @@ import {
executeRedeploy,
} from './deployment-tools'
import { executeIntegrationToolDirect } from './integration-tools'
import {
executeVfsGlob,
executeVfsGrep,
executeVfsList,
executeVfsRead,
} from './vfs-tools'
import type {
CheckDeploymentStatusParams,
CreateFolderParams,
@@ -36,11 +42,8 @@ import type {
GetBlockOutputsParams,
GetBlockUpstreamReferencesParams,
GetDeployedWorkflowStateParams,
GetUserWorkflowParams,
GetWorkflowDataParams,
GetWorkflowFromNameParams,
ListFoldersParams,
ListUserWorkflowsParams,
ListWorkspaceMcpServersParams,
MoveFolderParams,
MoveWorkflowParams,
@@ -59,11 +62,8 @@ import {
executeGetBlockOutputs,
executeGetBlockUpstreamReferences,
executeGetDeployedWorkflowState,
executeGetUserWorkflow,
executeGetWorkflowData,
executeGetWorkflowFromName,
executeListFolders,
executeListUserWorkflows,
executeListUserWorkspaces,
executeMoveFolder,
executeMoveWorkflow,
@@ -319,17 +319,13 @@ async function executeManageCustomTool(
}
const SERVER_TOOLS = new Set<string>([
'get_blocks_and_tools',
'get_blocks_metadata',
'get_block_options',
'get_block_config',
'get_trigger_blocks',
'edit_workflow',
'get_workflow_console',
'search_documentation',
'search_online',
'set_environment_variables',
'get_credentials',
'make_api_request',
'knowledge_base',
])
@@ -338,9 +334,6 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
string,
(params: Record<string, unknown>, context: ExecutionContext) => Promise<ToolCallResult>
> = {
get_user_workflow: (p, c) => executeGetUserWorkflow(p as GetUserWorkflowParams, c),
get_workflow_from_name: (p, c) => executeGetWorkflowFromName(p as GetWorkflowFromNameParams, c),
list_user_workflows: (p, c) => executeListUserWorkflows(p as ListUserWorkflowsParams, c),
list_user_workspaces: (_p, c) => executeListUserWorkspaces(c),
list_folders: (p, c) => executeListFolders(p as ListFoldersParams, c),
create_workflow: (p, c) => executeCreateWorkflow(p as CreateWorkflowParams, c),
@@ -416,6 +409,11 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
}
},
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
// VFS tools
grep: (p, c) => executeVfsGrep(p, c),
glob: (p, c) => executeVfsGlob(p, c),
read: (p, c) => executeVfsRead(p, c),
list: (p, c) => executeVfsList(p, c),
}
/**

View File

@@ -5,19 +5,6 @@
// === Workflow Query Params ===
export interface GetUserWorkflowParams {
workflowId?: string
}
export interface GetWorkflowFromNameParams {
workflow_name?: string
}
export interface ListUserWorkflowsParams {
workspaceId?: string
folderId?: string
}
export interface GetWorkflowDataParams {
workflowId?: string
data_type?: string

View File

@@ -0,0 +1,128 @@
import { createLogger } from '@sim/logger'
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
import { getOrMaterializeVFS } from '@/lib/copilot/vfs'
const logger = createLogger('VfsTools')
export async function executeVfsGrep(
params: Record<string, unknown>,
context: ExecutionContext
): Promise<ToolCallResult> {
const pattern = params.pattern as string | undefined
if (!pattern) {
return { success: false, error: "Missing required parameter 'pattern'" }
}
const workspaceId = context.workspaceId
if (!workspaceId) {
return { success: false, error: 'No workspace context available' }
}
try {
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
const result = vfs.grep(
pattern,
params.path as string | undefined,
{
maxResults: (params.maxResults as number) ?? 50,
outputMode: (params.output_mode as 'content' | 'files_with_matches' | 'count') ?? 'content',
ignoreCase: (params.ignoreCase as boolean) ?? false,
lineNumbers: (params.lineNumbers as boolean) ?? true,
context: (params.context as number) ?? 0,
}
)
const outputMode = (params.output_mode as string) ?? 'content'
const key = outputMode === 'files_with_matches' ? 'files' : outputMode === 'count' ? 'counts' : 'matches'
return { success: true, output: { [key]: result } }
} catch (err) {
logger.error('vfs_grep failed', {
error: err instanceof Error ? err.message : String(err),
})
return { success: false, error: err instanceof Error ? err.message : 'vfs_grep failed' }
}
}
export async function executeVfsGlob(
params: Record<string, unknown>,
context: ExecutionContext
): Promise<ToolCallResult> {
const pattern = params.pattern as string | undefined
if (!pattern) {
return { success: false, error: "Missing required parameter 'pattern'" }
}
const workspaceId = context.workspaceId
if (!workspaceId) {
return { success: false, error: 'No workspace context available' }
}
try {
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
const files = vfs.glob(pattern)
return { success: true, output: { files } }
} catch (err) {
logger.error('vfs_glob failed', {
error: err instanceof Error ? err.message : String(err),
})
return { success: false, error: err instanceof Error ? err.message : 'vfs_glob failed' }
}
}
export async function executeVfsRead(
params: Record<string, unknown>,
context: ExecutionContext
): Promise<ToolCallResult> {
const path = params.path as string | undefined
if (!path) {
return { success: false, error: "Missing required parameter 'path'" }
}
const workspaceId = context.workspaceId
if (!workspaceId) {
return { success: false, error: 'No workspace context available' }
}
try {
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
const result = vfs.read(
path,
params.offset as number | undefined,
params.limit as number | undefined
)
if (!result) {
return { success: false, error: `File not found: ${path}` }
}
return { success: true, output: result }
} catch (err) {
logger.error('vfs_read failed', {
error: err instanceof Error ? err.message : String(err),
})
return { success: false, error: err instanceof Error ? err.message : 'vfs_read failed' }
}
}
export async function executeVfsList(
params: Record<string, unknown>,
context: ExecutionContext
): Promise<ToolCallResult> {
const path = params.path as string | undefined
if (!path) {
return { success: false, error: "Missing required parameter 'path'" }
}
const workspaceId = context.workspaceId
if (!workspaceId) {
return { success: false, error: 'No workspace context available' }
}
try {
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
const entries = vfs.list(path)
return { success: true, output: { entries } }
} catch (err) {
logger.error('vfs_list failed', {
error: err instanceof Error ? err.message : String(err),
})
return { success: false, error: err instanceof Error ? err.message : 'vfs_list failed' }
}
}

View File

@@ -2,10 +2,7 @@ import { db } from '@sim/db'
import { customTools, permissions, workflow, workflowFolder, workspace } from '@sim/db/schema'
import { and, asc, desc, eq, isNull, or } from 'drizzle-orm'
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
import {
formatNormalizedWorkflowForCopilot,
normalizeWorkflowName,
} from '@/lib/copilot/tools/shared/workflow-utils'
import { formatNormalizedWorkflowForCopilot } from '@/lib/copilot/tools/shared/workflow-utils'
import { mcpService } from '@/lib/mcp/service'
import { listWorkspaceFiles } from '@/lib/uploads/contexts/workspace'
import { getEffectiveBlockOutputPaths } from '@/lib/workflows/blocks/block-outputs'
@@ -22,116 +19,16 @@ import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
import {
ensureWorkflowAccess,
ensureWorkspaceAccess,
getAccessibleWorkflowsForUser,
getDefaultWorkspaceId,
} from '../access'
import type {
GetBlockOutputsParams,
GetBlockUpstreamReferencesParams,
GetDeployedWorkflowStateParams,
GetUserWorkflowParams,
GetWorkflowDataParams,
GetWorkflowFromNameParams,
ListFoldersParams,
ListUserWorkflowsParams,
} from '../param-types'
export async function executeGetUserWorkflow(
params: GetUserWorkflowParams,
context: ExecutionContext
): Promise<ToolCallResult> {
try {
const workflowId = params.workflowId || context.workflowId
if (!workflowId) {
return { success: false, error: 'workflowId is required' }
}
const { workflow: workflowRecord, workspaceId } = await ensureWorkflowAccess(
workflowId,
context.userId
)
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
if (!userWorkflow) {
return { success: false, error: 'Workflow has no normalized data' }
}
return {
success: true,
output: {
workflowId,
workflowName: workflowRecord.name || '',
workspaceId,
userWorkflow,
},
}
} catch (error) {
return { success: false, error: error instanceof Error ? error.message : String(error) }
}
}
export async function executeGetWorkflowFromName(
params: GetWorkflowFromNameParams,
context: ExecutionContext
): Promise<ToolCallResult> {
try {
const workflowName = typeof params.workflow_name === 'string' ? params.workflow_name.trim() : ''
if (!workflowName) {
return { success: false, error: 'workflow_name is required' }
}
const workflows = await getAccessibleWorkflowsForUser(context.userId)
const targetName = normalizeWorkflowName(workflowName)
const match = workflows.find((w) => normalizeWorkflowName(w.name) === targetName)
if (!match) {
return { success: false, error: `Workflow not found: ${workflowName}` }
}
const normalized = await loadWorkflowFromNormalizedTables(match.id)
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
if (!userWorkflow) {
return { success: false, error: 'Workflow has no normalized data' }
}
return {
success: true,
output: {
workflowId: match.id,
workflowName: match.name || '',
workspaceId: match.workspaceId,
userWorkflow,
},
}
} catch (error) {
return { success: false, error: error instanceof Error ? error.message : String(error) }
}
}
export async function executeListUserWorkflows(
params: ListUserWorkflowsParams,
context: ExecutionContext
): Promise<ToolCallResult> {
try {
const workspaceId = params?.workspaceId as string | undefined
const folderId = params?.folderId as string | undefined
const workflows = await getAccessibleWorkflowsForUser(context.userId, { workspaceId, folderId })
const workflowList = workflows.map((w) => ({
workflowId: w.id,
workflowName: w.name || '',
workspaceId: w.workspaceId,
folderId: w.folderId,
}))
return { success: true, output: { workflows: workflowList } }
} catch (error) {
return { success: false, error: error instanceof Error ? error.message : String(error) }
}
}
export async function executeListUserWorkspaces(
context: ExecutionContext
): Promise<ToolCallResult> {

View File

@@ -35,6 +35,8 @@ export interface SSEEvent {
phase?: string
/** Set on tool_result events */
failedDependency?: boolean
/** UI metadata from copilot (title, icon, phaseLabel) */
ui?: Record<string, unknown>
}
export type ToolCallStatus = 'pending' | 'executing' | 'success' | 'error' | 'skipped' | 'rejected'

View File

@@ -1,5 +1,28 @@
import { createLogger } from '@sim/logger'
import { Loader2 } from 'lucide-react'
import type { LucideIcon } from 'lucide-react'
import {
BookOpen,
Bug,
Cloud,
Code,
FileText,
Folder,
Globe,
HelpCircle,
Key,
Loader2,
Lock,
Pencil,
Play,
Plus,
Rocket,
Search,
Server,
Settings,
Terminal,
Wrench,
Zap,
} from 'lucide-react'
import {
ClientToolCallState,
type ClientToolDisplay,
@@ -16,16 +39,62 @@ type StoreSet = (
/** Respond tools are internal to copilot subagents and should never be shown in the UI */
const HIDDEN_TOOL_SUFFIX = '_respond'
/** UI metadata sent by the copilot on SSE tool_call events. */
export interface ServerToolUI {
title?: string
phaseLabel?: string
icon?: string
}
/** Maps copilot icon name strings to Lucide icon components. */
const ICON_MAP: Record<string, LucideIcon> = {
search: Search,
globe: Globe,
hammer: Wrench,
rocket: Rocket,
lock: Lock,
book: BookOpen,
wrench: Wrench,
zap: Zap,
play: Play,
cloud: Cloud,
key: Key,
pencil: Pencil,
terminal: Terminal,
workflow: Settings,
settings: Settings,
server: Server,
bug: Bug,
brain: BookOpen,
code: Code,
help: HelpCircle,
plus: Plus,
file: FileText,
folder: Folder,
}
function resolveIcon(iconName: string | undefined): LucideIcon {
if (!iconName) return Loader2
return ICON_MAP[iconName] || Loader2
}
export function resolveToolDisplay(
toolName: string | undefined,
state: ClientToolCallState,
_toolCallId?: string,
params?: Record<string, any>
params?: Record<string, unknown>,
serverUI?: ServerToolUI
): ClientToolDisplay | undefined {
if (!toolName) return undefined
if (toolName.endsWith(HIDDEN_TOOL_SUFFIX)) return undefined
const entry = TOOL_DISPLAY_REGISTRY[toolName]
if (!entry) return humanizedFallback(toolName, state)
if (!entry) {
// Use copilot-provided UI as a better fallback than humanized name
if (serverUI?.title) {
return serverUIFallback(serverUI, state)
}
return humanizedFallback(toolName, state)
}
if (entry.uiConfig?.dynamicText && params) {
const dynamicText = entry.uiConfig.dynamicText(params, state)
@@ -51,6 +120,28 @@ export function resolveToolDisplay(
return humanizedFallback(toolName, state)
}
/** Generates display from copilot-provided UI metadata. */
function serverUIFallback(
serverUI: ServerToolUI,
state: ClientToolCallState
): ClientToolDisplay {
const icon = resolveIcon(serverUI.icon)
const title = serverUI.title!
switch (state) {
case ClientToolCallState.success:
return { text: `Completed ${title.toLowerCase()}`, icon }
case ClientToolCallState.error:
return { text: `Failed ${title.toLowerCase()}`, icon }
case ClientToolCallState.rejected:
return { text: `Skipped ${title.toLowerCase()}`, icon }
case ClientToolCallState.aborted:
return { text: `Aborted ${title.toLowerCase()}`, icon }
default:
return { text: title, icon: Loader2 }
}
}
export function humanizedFallback(
toolName: string,
state: ClientToolCallState
@@ -121,7 +212,7 @@ export function abortAllInProgressTools(set: StoreSet, get: () => CopilotStore)
...tc,
state: resolved,
subAgentStreaming: false,
display: resolveToolDisplay(tc.name, resolved, id, tc.params),
display: resolveToolDisplay(tc.name, resolved, id, tc.params, tc.serverUI),
}
hasUpdates = true
} else if (tc.subAgentStreaming) {
@@ -150,7 +241,7 @@ export function abortAllInProgressTools(set: StoreSet, get: () => CopilotStore)
toolCall: {
...prev,
state: resolved,
display: resolveToolDisplay(prev?.name, resolved, prev?.id, prev?.params),
display: resolveToolDisplay(prev?.name, resolved, prev?.id, prev?.params, prev?.serverUI),
},
}
}

View File

@@ -1,35 +1,24 @@
import type { LucideIcon } from 'lucide-react'
import {
Blocks,
BookOpen,
Bug,
Check,
CheckCircle,
CheckCircle2,
ClipboardCheck,
Compass,
Database,
FileCode,
FileText,
FlaskConical,
GitBranch,
Globe,
Globe2,
Grid2x2,
Grid2x2Check,
Grid2x2X,
Info,
Key,
KeyRound,
ListChecks,
ListFilter,
ListTodo,
Loader2,
MessageSquare,
MinusCircle,
Moon,
Navigation,
Pencil,
PencilLine,
Play,
PlugZap,
@@ -41,13 +30,11 @@ import {
Sparkles,
Tag,
TerminalSquare,
WorkflowIcon,
Wrench,
X,
XCircle,
Zap,
} from 'lucide-react'
import { getLatestBlock } from '@/blocks/registry'
import { getCustomTool } from '@/hooks/queries/custom-tools'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
@@ -340,46 +327,6 @@ const META_build: ToolMetadata = {
},
}
const META_debug: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Debugging', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Debugging', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Debugging', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Debugged', icon: Bug },
[ClientToolCallState.error]: { text: 'Failed to debug', icon: XCircle },
[ClientToolCallState.rejected]: { text: 'Skipped debug', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted debug', icon: XCircle },
},
uiConfig: {
subagent: {
streamingLabel: 'Debugging',
completedLabel: 'Debugged',
shouldCollapse: true,
outputArtifacts: [],
},
},
}
const META_discovery: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Discovering', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Discovering', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Discovering', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Discovered', icon: Search },
[ClientToolCallState.error]: { text: 'Failed to discover', icon: XCircle },
[ClientToolCallState.rejected]: { text: 'Skipped discovery', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted discovery', icon: XCircle },
},
uiConfig: {
subagent: {
streamingLabel: 'Discovering',
completedLabel: 'Discovered',
shouldCollapse: true,
outputArtifacts: [],
},
},
}
const META_deploy: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Deploying', icon: Loader2 },
@@ -570,28 +517,6 @@ const META_deploy_mcp: ToolMetadata = {
},
}
const META_edit: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Editing', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Editing', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Editing', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Edited', icon: Pencil },
[ClientToolCallState.error]: { text: 'Failed to apply edit', icon: XCircle },
[ClientToolCallState.rejected]: { text: 'Skipped edit', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted edit', icon: XCircle },
},
uiConfig: {
isSpecial: true,
subagent: {
streamingLabel: 'Editing',
completedLabel: 'Edited',
shouldCollapse: false, // Edit subagent stays expanded
outputArtifacts: ['edit_summary'],
hideThinkingText: true, // We show WorkflowEditSummary instead
},
},
}
const META_edit_workflow: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Editing your workflow', icon: Loader2 },
@@ -609,106 +534,6 @@ const META_edit_workflow: ToolMetadata = {
},
}
const META_evaluate: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Evaluating', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Evaluating', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Evaluated', icon: ClipboardCheck },
[ClientToolCallState.error]: { text: 'Failed to evaluate', icon: XCircle },
[ClientToolCallState.rejected]: { text: 'Skipped evaluation', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted evaluation', icon: XCircle },
},
uiConfig: {
subagent: {
streamingLabel: 'Evaluating',
completedLabel: 'Evaluated',
shouldCollapse: true,
outputArtifacts: [],
},
},
}
const META_get_block_config: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Getting block config', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Getting block config', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Getting block config', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Retrieved block config', icon: FileCode },
[ClientToolCallState.error]: { text: 'Failed to get block config', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted getting block config', icon: XCircle },
[ClientToolCallState.rejected]: {
text: 'Skipped getting block config',
icon: MinusCircle,
},
},
getDynamicText: (params, state) => {
if (params?.blockType && typeof params.blockType === 'string') {
const blockConfig = getLatestBlock(params.blockType)
const blockName = (blockConfig?.name ?? params.blockType.replace(/_/g, ' ')).toLowerCase()
const opSuffix = params.operation ? ` (${params.operation})` : ''
switch (state) {
case ClientToolCallState.success:
return `Retrieved ${blockName}${opSuffix} config`
case ClientToolCallState.executing:
case ClientToolCallState.generating:
case ClientToolCallState.pending:
return `Retrieving ${blockName}${opSuffix} config`
case ClientToolCallState.error:
return `Failed to retrieve ${blockName}${opSuffix} config`
case ClientToolCallState.aborted:
return `Aborted retrieving ${blockName}${opSuffix} config`
case ClientToolCallState.rejected:
return `Skipped retrieving ${blockName}${opSuffix} config`
}
}
return undefined
},
}
const META_get_block_options: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Getting block operations', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Getting block operations', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Getting block operations', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Retrieved block operations', icon: ListFilter },
[ClientToolCallState.error]: { text: 'Failed to get block operations', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted getting block operations', icon: XCircle },
[ClientToolCallState.rejected]: {
text: 'Skipped getting block operations',
icon: MinusCircle,
},
},
getDynamicText: (params, state) => {
const blockId =
(params as any)?.blockId ||
(params as any)?.blockType ||
(params as any)?.block_id ||
(params as any)?.block_type
if (typeof blockId === 'string') {
const blockConfig = getLatestBlock(blockId)
const blockName = (blockConfig?.name ?? blockId.replace(/_/g, ' ')).toLowerCase()
switch (state) {
case ClientToolCallState.success:
return `Retrieved ${blockName} operations`
case ClientToolCallState.executing:
case ClientToolCallState.generating:
case ClientToolCallState.pending:
return `Retrieving ${blockName} operations`
case ClientToolCallState.error:
return `Failed to retrieve ${blockName} operations`
case ClientToolCallState.aborted:
return `Aborted retrieving ${blockName} operations`
case ClientToolCallState.rejected:
return `Skipped retrieving ${blockName} operations`
}
}
return undefined
},
}
const META_get_block_outputs: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Getting block outputs', icon: Loader2 },
@@ -767,81 +592,6 @@ const META_get_block_upstream_references: ToolMetadata = {
},
}
const META_get_blocks_and_tools: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Exploring available options', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Exploring available options', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Exploring available options', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Explored available options', icon: Blocks },
[ClientToolCallState.error]: { text: 'Failed to explore options', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted exploring options', icon: MinusCircle },
[ClientToolCallState.rejected]: { text: 'Skipped exploring options', icon: MinusCircle },
},
interrupt: undefined,
}
const META_get_blocks_metadata: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Searching block choices', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Searching block choices', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Searching block choices', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Searched block choices', icon: ListFilter },
[ClientToolCallState.error]: { text: 'Failed to search block choices', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted searching block choices', icon: XCircle },
[ClientToolCallState.rejected]: {
text: 'Skipped searching block choices',
icon: MinusCircle,
},
},
getDynamicText: (params, state) => {
if (params?.blockIds && Array.isArray(params.blockIds) && params.blockIds.length > 0) {
const blockList = params.blockIds
.slice(0, 3)
.map((blockId) => blockId.replace(/_/g, ' '))
.join(', ')
const more = params.blockIds.length > 3 ? '...' : ''
const blocks = `${blockList}${more}`
switch (state) {
case ClientToolCallState.success:
return `Searched ${blocks}`
case ClientToolCallState.executing:
case ClientToolCallState.generating:
case ClientToolCallState.pending:
return `Searching ${blocks}`
case ClientToolCallState.error:
return `Failed to search ${blocks}`
case ClientToolCallState.aborted:
return `Aborted searching ${blocks}`
case ClientToolCallState.rejected:
return `Skipped searching ${blocks}`
}
}
return undefined
},
}
const META_get_credentials: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Fetching connected integrations', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Fetching connected integrations', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Fetching connected integrations', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Fetched connected integrations', icon: Key },
[ClientToolCallState.error]: {
text: 'Failed to fetch connected integrations',
icon: XCircle,
},
[ClientToolCallState.aborted]: {
text: 'Aborted fetching connected integrations',
icon: MinusCircle,
},
[ClientToolCallState.rejected]: {
text: 'Skipped fetching connected integrations',
icon: MinusCircle,
},
},
}
const META_get_examples_rag: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Fetching examples', icon: Loader2 },
@@ -963,19 +713,6 @@ const META_get_page_contents: ToolMetadata = {
},
}
const META_get_trigger_blocks: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Finding trigger blocks', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Finding trigger blocks', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Finding trigger blocks', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Found trigger blocks', icon: ListFilter },
[ClientToolCallState.error]: { text: 'Failed to find trigger blocks', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted finding trigger blocks', icon: MinusCircle },
[ClientToolCallState.rejected]: { text: 'Skipped finding trigger blocks', icon: MinusCircle },
},
interrupt: undefined,
}
const META_get_trigger_examples: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Selecting a trigger', icon: Loader2 },
@@ -989,41 +726,6 @@ const META_get_trigger_examples: ToolMetadata = {
interrupt: undefined,
}
const META_get_user_workflow: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Reading your workflow', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Reading your workflow', icon: WorkflowIcon },
[ClientToolCallState.executing]: { text: 'Reading your workflow', icon: Loader2 },
[ClientToolCallState.aborted]: { text: 'Aborted reading your workflow', icon: XCircle },
[ClientToolCallState.success]: { text: 'Read your workflow', icon: WorkflowIcon },
[ClientToolCallState.error]: { text: 'Failed to read your workflow', icon: X },
[ClientToolCallState.rejected]: { text: 'Skipped reading your workflow', icon: XCircle },
},
getDynamicText: (params, state) => {
const workflowId = params?.workflowId || useWorkflowRegistry.getState().activeWorkflowId
if (workflowId) {
const workflowName = useWorkflowRegistry.getState().workflows[workflowId]?.name
if (workflowName) {
switch (state) {
case ClientToolCallState.success:
return `Read ${workflowName}`
case ClientToolCallState.executing:
case ClientToolCallState.generating:
case ClientToolCallState.pending:
return `Reading ${workflowName}`
case ClientToolCallState.error:
return `Failed to read ${workflowName}`
case ClientToolCallState.aborted:
return `Aborted reading ${workflowName}`
case ClientToolCallState.rejected:
return `Skipped reading ${workflowName}`
}
}
}
return undefined
},
}
const META_get_workflow_console: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Fetching execution logs', icon: Loader2 },
@@ -1106,59 +808,6 @@ const META_get_workflow_data: ToolMetadata = {
},
}
const META_get_workflow_from_name: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Reading workflow', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Reading workflow', icon: FileText },
[ClientToolCallState.executing]: { text: 'Reading workflow', icon: Loader2 },
[ClientToolCallState.aborted]: { text: 'Aborted reading workflow', icon: XCircle },
[ClientToolCallState.success]: { text: 'Read workflow', icon: FileText },
[ClientToolCallState.error]: { text: 'Failed to read workflow', icon: X },
[ClientToolCallState.rejected]: { text: 'Skipped reading workflow', icon: XCircle },
},
getDynamicText: (params, state) => {
if (params?.workflow_name && typeof params.workflow_name === 'string') {
const workflowName = params.workflow_name
switch (state) {
case ClientToolCallState.success:
return `Read ${workflowName}`
case ClientToolCallState.executing:
case ClientToolCallState.generating:
case ClientToolCallState.pending:
return `Reading ${workflowName}`
case ClientToolCallState.error:
return `Failed to read ${workflowName}`
case ClientToolCallState.aborted:
return `Aborted reading ${workflowName}`
case ClientToolCallState.rejected:
return `Skipped reading ${workflowName}`
}
}
return undefined
},
}
const META_info: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Getting info', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Getting info', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Getting info', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Retrieved info', icon: Info },
[ClientToolCallState.error]: { text: 'Failed to get info', icon: XCircle },
[ClientToolCallState.rejected]: { text: 'Skipped info', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted info', icon: XCircle },
},
uiConfig: {
subagent: {
streamingLabel: 'Getting info',
completedLabel: 'Info retrieved',
shouldCollapse: true,
outputArtifacts: [],
},
},
}
const META_knowledge: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Managing knowledge', icon: Loader2 },
@@ -1230,18 +879,6 @@ const META_knowledge_base: ToolMetadata = {
},
}
const META_list_user_workflows: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Listing your workflows', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Listing your workflows', icon: ListChecks },
[ClientToolCallState.executing]: { text: 'Listing your workflows', icon: Loader2 },
[ClientToolCallState.aborted]: { text: 'Aborted listing workflows', icon: XCircle },
[ClientToolCallState.success]: { text: 'Listed your workflows', icon: ListChecks },
[ClientToolCallState.error]: { text: 'Failed to list workflows', icon: X },
[ClientToolCallState.rejected]: { text: 'Skipped listing workflows', icon: XCircle },
},
}
const META_list_workspace_mcp_servers: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: {
@@ -1604,26 +1241,6 @@ const META_oauth_request_access: ToolMetadata = {
},
}
const META_plan: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Planning', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Planning', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Planning', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Planned', icon: ListTodo },
[ClientToolCallState.error]: { text: 'Failed to plan', icon: XCircle },
[ClientToolCallState.rejected]: { text: 'Skipped plan', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted plan', icon: XCircle },
},
uiConfig: {
subagent: {
streamingLabel: 'Planning',
completedLabel: 'Planned',
shouldCollapse: true,
outputArtifacts: ['plan'],
},
},
}
const META_redeploy: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Redeploying workflow', icon: Loader2 },
@@ -2466,66 +2083,6 @@ const META_superagent: ToolMetadata = {
},
}
const META_test: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Testing', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Testing', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Testing', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Tested', icon: FlaskConical },
[ClientToolCallState.error]: { text: 'Failed to test', icon: XCircle },
[ClientToolCallState.rejected]: { text: 'Skipped test', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted test', icon: XCircle },
},
uiConfig: {
subagent: {
streamingLabel: 'Testing',
completedLabel: 'Tested',
shouldCollapse: true,
outputArtifacts: [],
},
},
}
const META_tour: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Touring', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Touring', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Touring', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Completed tour', icon: Compass },
[ClientToolCallState.error]: { text: 'Failed tour', icon: XCircle },
[ClientToolCallState.rejected]: { text: 'Skipped tour', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted tour', icon: XCircle },
},
uiConfig: {
subagent: {
streamingLabel: 'Touring',
completedLabel: 'Tour complete',
shouldCollapse: true,
outputArtifacts: [],
},
},
}
const META_workflow: ToolMetadata = {
displayNames: {
[ClientToolCallState.generating]: { text: 'Managing workflow', icon: Loader2 },
[ClientToolCallState.pending]: { text: 'Managing workflow', icon: Loader2 },
[ClientToolCallState.executing]: { text: 'Managing workflow', icon: Loader2 },
[ClientToolCallState.success]: { text: 'Managed workflow', icon: GitBranch },
[ClientToolCallState.error]: { text: 'Failed to manage workflow', icon: XCircle },
[ClientToolCallState.rejected]: { text: 'Skipped workflow', icon: XCircle },
[ClientToolCallState.aborted]: { text: 'Aborted workflow', icon: XCircle },
},
uiConfig: {
subagent: {
streamingLabel: 'Managing workflow',
completedLabel: 'Workflow managed',
shouldCollapse: true,
outputArtifacts: [],
},
},
}
const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
auth: META_auth,
check_deployment_status: META_check_deployment_status,
@@ -2534,37 +2091,23 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
create_workspace_mcp_server: META_create_workspace_mcp_server,
build: META_build,
custom_tool: META_custom_tool,
debug: META_debug,
deploy: META_deploy,
discovery: META_discovery,
deploy_api: META_deploy_api,
deploy_chat: META_deploy_chat,
deploy_mcp: META_deploy_mcp,
edit: META_edit,
edit_workflow: META_edit_workflow,
evaluate: META_evaluate,
get_block_config: META_get_block_config,
get_block_options: META_get_block_options,
get_block_outputs: META_get_block_outputs,
get_block_upstream_references: META_get_block_upstream_references,
get_blocks_and_tools: META_get_blocks_and_tools,
get_blocks_metadata: META_get_blocks_metadata,
get_credentials: META_get_credentials,
generate_api_key: META_generate_api_key,
get_examples_rag: META_get_examples_rag,
get_operations_examples: META_get_operations_examples,
get_page_contents: META_get_page_contents,
get_platform_actions: META_get_platform_actions,
get_trigger_blocks: META_get_trigger_blocks,
get_trigger_examples: META_get_trigger_examples,
get_user_workflow: META_get_user_workflow,
get_workflow_console: META_get_workflow_console,
get_workflow_data: META_get_workflow_data,
get_workflow_from_name: META_get_workflow_from_name,
info: META_info,
knowledge: META_knowledge,
knowledge_base: META_knowledge_base,
list_user_workflows: META_list_user_workflows,
list_workspace_mcp_servers: META_list_workspace_mcp_servers,
make_api_request: META_make_api_request,
manage_custom_tool: META_manage_custom_tool,
@@ -2572,7 +2115,6 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
mark_todo_in_progress: META_mark_todo_in_progress,
navigate_ui: META_navigate_ui,
oauth_request_access: META_oauth_request_access,
plan: META_plan,
redeploy: META_redeploy,
remember_debug: META_remember_debug,
research: META_research,
@@ -2591,9 +2133,6 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
sleep: META_sleep,
summarize_conversation: META_summarize_conversation,
superagent: META_superagent,
test: META_test,
tour: META_tour,
workflow: META_workflow,
}
export const TOOL_DISPLAY_REGISTRY: Record<string, ToolDisplayEntry> = Object.fromEntries(

View File

@@ -27,25 +27,6 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
properties: {},
},
},
{
name: 'list_workflows',
toolId: 'list_user_workflows',
description:
'List all workflows the user has access to. Returns workflow IDs, names, workspace, and folder info. Use workspaceId/folderId to scope results.',
inputSchema: {
type: 'object',
properties: {
workspaceId: {
type: 'string',
description: 'Optional workspace ID to filter workflows.',
},
folderId: {
type: 'string',
description: 'Optional folder ID to filter workflows.',
},
},
},
},
{
name: 'list_folders',
toolId: 'list_folders',
@@ -62,22 +43,6 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
required: ['workspaceId'],
},
},
{
name: 'get_workflow',
toolId: 'get_user_workflow',
description:
'Get a workflow by ID. Returns the full workflow definition including all blocks, connections, and configuration.',
inputSchema: {
type: 'object',
properties: {
workflowId: {
type: 'string',
description: 'Workflow ID to retrieve.',
},
},
required: ['workflowId'],
},
},
{
name: 'create_workflow',
toolId: 'create_workflow',

View File

@@ -1,493 +0,0 @@
import { createLogger } from '@sim/logger'
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
import {
GetBlockConfigInput,
type GetBlockConfigInputType,
GetBlockConfigResult,
type GetBlockConfigResultType,
} from '@/lib/copilot/tools/shared/schemas'
import { registry as blockRegistry, getLatestBlock } from '@/blocks/registry'
import { isHiddenFromDisplay, type SubBlockConfig } from '@/blocks/types'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
import { PROVIDER_DEFINITIONS } from '@/providers/models'
import { tools as toolsRegistry } from '@/tools/registry'
import { getTrigger, isTriggerValid } from '@/triggers'
interface InputFieldSchema {
type: string
description?: string
placeholder?: string
required?: boolean
options?: string[]
default?: any
min?: number
max?: number
}
/**
* Gets all available models from PROVIDER_DEFINITIONS as static options.
* This provides fallback data when store state is not available server-side.
*/
function getStaticModelOptions(): string[] {
const models: string[] = []
for (const provider of Object.values(PROVIDER_DEFINITIONS)) {
// Skip providers with dynamic/fetched models
if (provider.id === 'ollama' || provider.id === 'vllm' || provider.id === 'openrouter') {
continue
}
if (provider?.models) {
for (const model of provider.models) {
models.push(model.id)
}
}
}
return models
}
/**
* Attempts to call a dynamic options function with fallback data injected.
*/
function callOptionsWithFallback(optionsFn: () => any[]): any[] | undefined {
const staticModels = getStaticModelOptions()
const mockProvidersState = {
providers: {
base: { models: staticModels },
ollama: { models: [] },
vllm: { models: [] },
openrouter: { models: [] },
},
}
let originalGetState: (() => any) | undefined
let store: any
try {
// eslint-disable-next-line @typescript-eslint/no-require-imports
store = require('@/stores/providers')
if (store?.useProvidersStore?.getState) {
originalGetState = store.useProvidersStore.getState
store.useProvidersStore.getState = () => mockProvidersState
}
} catch {
// Store module not available
}
try {
return optionsFn()
} finally {
if (store?.useProvidersStore && originalGetState) {
store.useProvidersStore.getState = originalGetState
}
}
}
/**
* Resolves options from a subBlock, handling both static arrays and dynamic functions
*/
function resolveSubBlockOptions(sb: SubBlockConfig): string[] | undefined {
// Skip if subblock uses fetchOptions (async network calls)
if (sb.fetchOptions) {
return undefined
}
let rawOptions: any[] | undefined
try {
if (typeof sb.options === 'function') {
rawOptions = callOptionsWithFallback(sb.options)
} else {
rawOptions = sb.options
}
} catch {
return undefined
}
if (!Array.isArray(rawOptions) || rawOptions.length === 0) {
return undefined
}
// Return the actual option ID/value that edit_workflow expects, not the display label
return rawOptions
.map((opt: any) => {
if (!opt) return undefined
if (typeof opt === 'object') {
return opt.id || opt.label // Prefer id (actual value) over label (display name)
}
return String(opt)
})
.filter((o): o is string => o !== undefined)
}
interface OutputFieldSchema {
type: string
description?: string
properties?: Record<string, OutputFieldSchema>
items?: { type: string }
}
/**
* Resolves the condition to check if it matches the given operation
*/
function matchesOperation(condition: any, operation: string): boolean {
if (!condition) return false
const cond = typeof condition === 'function' ? condition() : condition
if (!cond) return false
if (cond.field === 'operation' && !cond.not) {
const values = Array.isArray(cond.value) ? cond.value : [cond.value]
return values.includes(operation)
}
return false
}
/**
* Extracts input schema from subBlocks
*/
function extractInputsFromSubBlocks(
subBlocks: SubBlockConfig[],
operation?: string,
triggerMode?: boolean
): Record<string, InputFieldSchema> {
const inputs: Record<string, InputFieldSchema> = {}
for (const sb of subBlocks) {
// Handle trigger vs non-trigger mode filtering
if (triggerMode) {
// In trigger mode, only include subBlocks with mode: 'trigger'
if (sb.mode !== 'trigger') continue
} else {
// In non-trigger mode, skip trigger-mode subBlocks
if (sb.mode === 'trigger') continue
}
// Skip hidden subBlocks
if (sb.hidden) continue
// If operation is specified, only include subBlocks that:
// 1. Have no condition (common parameters)
// 2. Have a condition matching the operation
if (operation) {
const condition = typeof sb.condition === 'function' ? sb.condition() : sb.condition
if (condition) {
if (condition.field === 'operation' && !condition.not) {
// This is an operation-specific field
const values = Array.isArray(condition.value) ? condition.value : [condition.value]
if (!values.includes(operation)) {
continue // Skip if doesn't match our operation
}
} else if (!matchesOperation(condition, operation)) {
// Other condition that doesn't match
continue
}
}
}
const field: InputFieldSchema = {
type: mapSubBlockTypeToSchemaType(sb.type),
}
if (sb.description) field.description = sb.description
if (sb.title && !sb.description) field.description = sb.title
if (sb.placeholder) field.placeholder = sb.placeholder
// Handle required
if (typeof sb.required === 'boolean') {
field.required = sb.required
} else if (typeof sb.required === 'object') {
field.required = true // Has conditional requirement
}
// Handle options using the resolver that handles dynamic model lists
const resolvedOptions = resolveSubBlockOptions(sb)
if (resolvedOptions && resolvedOptions.length > 0) {
field.options = resolvedOptions
}
// Handle default value
if (sb.defaultValue !== undefined) {
field.default = sb.defaultValue
}
// Handle numeric constraints
if (sb.min !== undefined) field.min = sb.min
if (sb.max !== undefined) field.max = sb.max
inputs[sb.id] = field
}
return inputs
}
/**
* Maps subBlock type to a simplified schema type
*/
function mapSubBlockTypeToSchemaType(type: string): string {
const typeMap: Record<string, string> = {
'short-input': 'string',
'long-input': 'string',
code: 'string',
dropdown: 'string',
combobox: 'string',
slider: 'number',
switch: 'boolean',
'tool-input': 'json',
'checkbox-list': 'array',
'grouped-checkbox-list': 'array',
'condition-input': 'json',
'eval-input': 'json',
'time-input': 'string',
'oauth-input': 'credential',
'file-selector': 'string',
'project-selector': 'string',
'channel-selector': 'string',
'user-selector': 'string',
'folder-selector': 'string',
'knowledge-base-selector': 'string',
'document-selector': 'string',
'mcp-server-selector': 'string',
'mcp-tool-selector': 'string',
table: 'json',
'file-upload': 'file',
'messages-input': 'array',
}
return typeMap[type] || 'string'
}
/**
* Extracts a single output field schema, including nested properties
*/
function extractOutputField(def: any): OutputFieldSchema {
if (typeof def === 'string') {
return { type: def }
}
if (typeof def !== 'object' || def === null) {
return { type: 'any' }
}
const field: OutputFieldSchema = {
type: def.type || 'any',
}
if (def.description) {
field.description = def.description
}
// Include nested properties if present
if (def.properties && typeof def.properties === 'object') {
field.properties = {}
for (const [propKey, propDef] of Object.entries(def.properties)) {
field.properties[propKey] = extractOutputField(propDef)
}
}
// Include items schema for arrays
if (def.items && typeof def.items === 'object') {
field.items = { type: def.items.type || 'any' }
}
return field
}
/**
* Extracts trigger outputs from the first available trigger
*/
function extractTriggerOutputs(blockConfig: any): Record<string, OutputFieldSchema> {
const outputs: Record<string, OutputFieldSchema> = {}
if (!blockConfig.triggers?.enabled || !blockConfig.triggers?.available?.length) {
return outputs
}
// Get the first available trigger's outputs as a baseline
const triggerId = blockConfig.triggers.available[0]
if (triggerId && isTriggerValid(triggerId)) {
const trigger = getTrigger(triggerId)
if (trigger.outputs) {
for (const [key, def] of Object.entries(trigger.outputs)) {
if (isHiddenFromDisplay(def)) continue
outputs[key] = extractOutputField(def)
}
}
}
return outputs
}
/**
* Extracts output schema from block config or tool
*/
function extractOutputs(
blockConfig: any,
operation?: string,
triggerMode?: boolean
): Record<string, OutputFieldSchema> {
const outputs: Record<string, OutputFieldSchema> = {}
// In trigger mode, return trigger outputs
if (triggerMode && blockConfig.triggers?.enabled) {
return extractTriggerOutputs(blockConfig)
}
// If operation is specified, try to get outputs from the specific tool
if (operation) {
try {
const toolSelector = blockConfig.tools?.config?.tool
if (typeof toolSelector === 'function') {
const toolId = toolSelector({ operation })
const tool = toolsRegistry[toolId]
if (tool?.outputs) {
for (const [key, def] of Object.entries(tool.outputs)) {
if (isHiddenFromDisplay(def)) continue
outputs[key] = extractOutputField(def)
}
return outputs
}
}
} catch {
// Fall through to block-level outputs
}
}
// Use block-level outputs
if (blockConfig.outputs) {
for (const [key, def] of Object.entries(blockConfig.outputs)) {
if (isHiddenFromDisplay(def)) continue
outputs[key] = extractOutputField(def)
}
}
return outputs
}
export const getBlockConfigServerTool: BaseServerTool<
GetBlockConfigInputType,
GetBlockConfigResultType
> = {
name: 'get_block_config',
inputSchema: GetBlockConfigInput,
outputSchema: GetBlockConfigResult,
async execute(
{ blockType, operation, trigger }: GetBlockConfigInputType,
context?: { userId: string }
): Promise<GetBlockConfigResultType> {
const logger = createLogger('GetBlockConfigServerTool')
logger.debug('Executing get_block_config', { blockType, operation, trigger })
if (blockType === 'loop') {
const result = {
blockType,
blockName: 'Loop',
operation,
trigger,
inputs: {
loopType: {
type: 'string',
description: 'Loop type',
options: ['for', 'forEach', 'while', 'doWhile'],
default: 'for',
},
iterations: {
type: 'number',
description: 'Number of iterations (for loop type "for")',
},
collection: {
type: 'string',
description: 'Collection to iterate (for loop type "forEach")',
},
condition: {
type: 'string',
description: 'Loop condition (for loop types "while" and "doWhile")',
},
},
outputs: {},
}
return GetBlockConfigResult.parse(result)
}
if (blockType === 'parallel') {
const result = {
blockType,
blockName: 'Parallel',
operation,
trigger,
inputs: {
parallelType: {
type: 'string',
description: 'Parallel type',
options: ['count', 'collection'],
default: 'count',
},
count: {
type: 'number',
description: 'Number of parallel branches (for parallel type "count")',
},
collection: {
type: 'string',
description: 'Collection to branch over (for parallel type "collection")',
},
},
outputs: {},
}
return GetBlockConfigResult.parse(result)
}
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
const allowedIntegrations = permissionConfig?.allowedIntegrations
if (allowedIntegrations != null && !allowedIntegrations.includes(blockType)) {
throw new Error(`Block "${blockType}" is not available`)
}
const blockConfig = blockRegistry[blockType]
if (!blockConfig) {
throw new Error(`Block not found: ${blockType}`)
}
// Validate trigger mode is supported for this block
if (trigger && !blockConfig.triggers?.enabled && !blockConfig.triggerAllowed) {
throw new Error(
`Block "${blockType}" does not support trigger mode. Only blocks with triggers.enabled or triggerAllowed can be used in trigger mode.`
)
}
// If operation is specified, validate it exists
if (operation) {
const operationSubBlock = blockConfig.subBlocks?.find((sb) => sb.id === 'operation')
if (operationSubBlock && Array.isArray(operationSubBlock.options)) {
const validOperations = operationSubBlock.options.map((o) =>
typeof o === 'object' ? o.id : o
)
if (!validOperations.includes(operation)) {
throw new Error(
`Invalid operation "${operation}" for block "${blockType}". Valid operations: ${validOperations.join(', ')}`
)
}
}
}
const subBlocks = Array.isArray(blockConfig.subBlocks) ? blockConfig.subBlocks : []
const inputs = extractInputsFromSubBlocks(subBlocks, operation, trigger)
const outputs = extractOutputs(blockConfig, operation, trigger)
const latestBlock = getLatestBlock(blockType)
const displayName = latestBlock?.name ?? blockConfig.name
const result = {
blockType,
blockName: displayName,
operation,
trigger,
inputs,
outputs,
}
return GetBlockConfigResult.parse(result)
},
}

View File

@@ -1,130 +0,0 @@
import { createLogger } from '@sim/logger'
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
import {
GetBlockOptionsInput,
type GetBlockOptionsInputType,
GetBlockOptionsResult,
type GetBlockOptionsResultType,
} from '@/lib/copilot/tools/shared/schemas'
import { registry as blockRegistry, getLatestBlock } from '@/blocks/registry'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
import { tools as toolsRegistry } from '@/tools/registry'
export const getBlockOptionsServerTool: BaseServerTool<
GetBlockOptionsInputType,
GetBlockOptionsResultType
> = {
name: 'get_block_options',
inputSchema: GetBlockOptionsInput,
outputSchema: GetBlockOptionsResult,
async execute(
{ blockId }: GetBlockOptionsInputType,
context?: { userId: string }
): Promise<GetBlockOptionsResultType> {
const logger = createLogger('GetBlockOptionsServerTool')
logger.debug('Executing get_block_options', { blockId })
if (blockId === 'loop') {
const result = {
blockId,
blockName: 'Loop',
operations: [
{ id: 'for', name: 'For', description: 'Run a fixed number of iterations.' },
{ id: 'forEach', name: 'For each', description: 'Iterate over a collection.' },
{ id: 'while', name: 'While', description: 'Repeat while a condition is true.' },
{
id: 'doWhile',
name: 'Do while',
description: 'Run once, then repeat while a condition is true.',
},
],
}
return GetBlockOptionsResult.parse(result)
}
if (blockId === 'parallel') {
const result = {
blockId,
blockName: 'Parallel',
operations: [
{ id: 'count', name: 'Count', description: 'Run a fixed number of parallel branches.' },
{
id: 'collection',
name: 'Collection',
description: 'Run one branch per collection item.',
},
],
}
return GetBlockOptionsResult.parse(result)
}
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
const allowedIntegrations = permissionConfig?.allowedIntegrations
if (allowedIntegrations != null && !allowedIntegrations.includes(blockId)) {
throw new Error(`Block "${blockId}" is not available`)
}
const blockConfig = blockRegistry[blockId]
if (!blockConfig) {
throw new Error(`Block not found: ${blockId}`)
}
const operations: { id: string; name: string; description?: string }[] = []
// Check if block has an operation dropdown to determine available operations
const operationSubBlock = blockConfig.subBlocks?.find((sb) => sb.id === 'operation')
if (operationSubBlock && Array.isArray(operationSubBlock.options)) {
// Block has operations - get tool info for each operation
for (const option of operationSubBlock.options) {
const opId = typeof option === 'object' ? option.id : option
const opLabel = typeof option === 'object' ? option.label : option
// Try to resolve the tool for this operation
let toolDescription: string | undefined
try {
const toolSelector = blockConfig.tools?.config?.tool
if (typeof toolSelector === 'function') {
const toolId = toolSelector({ operation: opId })
const tool = toolsRegistry[toolId]
if (tool) {
toolDescription = tool.description
}
}
} catch {
// Tool resolution failed, continue without description
}
operations.push({
id: opId,
name: opLabel || opId,
description: toolDescription,
})
}
} else {
// No operation dropdown - list all accessible tools
const accessibleTools = blockConfig.tools?.access || []
for (const toolId of accessibleTools) {
const tool = toolsRegistry[toolId]
if (tool) {
operations.push({
id: toolId,
name: tool.name || toolId,
description: tool.description,
})
}
}
}
const latestBlock = getLatestBlock(blockId)
const displayName = latestBlock?.name ?? blockConfig.name
const result = {
blockId,
blockName: displayName,
operations,
}
return GetBlockOptionsResult.parse(result)
},
}

View File

@@ -1,68 +0,0 @@
import { createLogger } from '@sim/logger'
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
import { GetBlocksAndToolsInput, GetBlocksAndToolsResult } from '@/lib/copilot/tools/shared/schemas'
import { registry as blockRegistry } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
export const getBlocksAndToolsServerTool: BaseServerTool<
ReturnType<typeof GetBlocksAndToolsInput.parse>,
ReturnType<typeof GetBlocksAndToolsResult.parse>
> = {
name: 'get_blocks_and_tools',
inputSchema: GetBlocksAndToolsInput,
outputSchema: GetBlocksAndToolsResult,
async execute(_args: unknown, context?: { userId: string }) {
const logger = createLogger('GetBlocksAndToolsServerTool')
logger.debug('Executing get_blocks_and_tools')
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
const allowedIntegrations = permissionConfig?.allowedIntegrations
type BlockListItem = {
type: string
name: string
description?: string
triggerAllowed?: boolean
}
const blocks: BlockListItem[] = []
Object.entries(blockRegistry)
.filter(([blockType, blockConfig]: [string, BlockConfig]) => {
if (blockConfig.hideFromToolbar) return false
if (allowedIntegrations != null && !allowedIntegrations.includes(blockType)) return false
return true
})
.forEach(([blockType, blockConfig]: [string, BlockConfig]) => {
blocks.push({
type: blockType,
name: blockConfig.name,
description: blockConfig.longDescription,
triggerAllowed: 'triggerAllowed' in blockConfig ? !!blockConfig.triggerAllowed : false,
})
})
const specialBlocks: Record<string, { name: string; description: string }> = {
loop: {
name: 'Loop',
description:
'Control flow block for iterating over collections or repeating actions in a loop',
},
parallel: {
name: 'Parallel',
description: 'Control flow block for executing multiple branches simultaneously',
},
}
Object.entries(specialBlocks).forEach(([blockType, info]) => {
if (!blocks.some((b) => b.type === blockType)) {
blocks.push({
type: blockType,
name: info.name,
description: info.description,
})
}
})
return GetBlocksAndToolsResult.parse({ blocks })
},
}

View File

@@ -1,8 +1,5 @@
import { createLogger } from '@sim/logger'
import type { BaseServerTool, ServerToolContext } from '@/lib/copilot/tools/server/base-tool'
import { getBlockConfigServerTool } from '@/lib/copilot/tools/server/blocks/get-block-config'
import { getBlockOptionsServerTool } from '@/lib/copilot/tools/server/blocks/get-block-options'
import { getBlocksAndToolsServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-and-tools'
import { getBlocksMetadataServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-metadata-tool'
import { getTriggerBlocksServerTool } from '@/lib/copilot/tools/server/blocks/get-trigger-blocks'
import { searchDocumentationServerTool } from '@/lib/copilot/tools/server/docs/search-documentation'
@@ -22,10 +19,7 @@ const logger = createLogger('ServerToolRouter')
/** Registry of all server tools. Tools self-declare their validation schemas. */
const serverToolRegistry: Record<string, BaseServerTool> = {
[getBlocksAndToolsServerTool.name]: getBlocksAndToolsServerTool,
[getBlocksMetadataServerTool.name]: getBlocksMetadataServerTool,
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
[editWorkflowServerTool.name]: editWorkflowServerTool,
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,

View File

@@ -7,22 +7,6 @@ export const ExecuteResponseSuccessSchema = z.object({
})
export type ExecuteResponseSuccess = z.infer<typeof ExecuteResponseSuccessSchema>
// get_blocks_and_tools
export const GetBlocksAndToolsInput = z.object({})
export const GetBlocksAndToolsResult = z.object({
blocks: z.array(
z
.object({
type: z.string(),
name: z.string(),
triggerAllowed: z.boolean().optional(),
longDescription: z.string().optional(),
})
.passthrough()
),
})
export type GetBlocksAndToolsResultType = z.infer<typeof GetBlocksAndToolsResult>
// get_blocks_metadata
export const GetBlocksMetadataInput = z.object({ blockIds: z.array(z.string()).min(1) })
export const GetBlocksMetadataResult = z.object({ metadata: z.record(z.any()) })
@@ -35,41 +19,6 @@ export const GetTriggerBlocksResult = z.object({
})
export type GetTriggerBlocksResultType = z.infer<typeof GetTriggerBlocksResult>
// get_block_options
export const GetBlockOptionsInput = z.object({
blockId: z.string(),
})
export const GetBlockOptionsResult = z.object({
blockId: z.string(),
blockName: z.string(),
operations: z.array(
z.object({
id: z.string(),
name: z.string(),
description: z.string().optional(),
})
),
})
export type GetBlockOptionsInputType = z.infer<typeof GetBlockOptionsInput>
export type GetBlockOptionsResultType = z.infer<typeof GetBlockOptionsResult>
// get_block_config
export const GetBlockConfigInput = z.object({
blockType: z.string(),
operation: z.string().optional(),
trigger: z.boolean().optional(),
})
export const GetBlockConfigResult = z.object({
blockType: z.string(),
blockName: z.string(),
operation: z.string().optional(),
trigger: z.boolean().optional(),
inputs: z.record(z.any()),
outputs: z.record(z.any()),
})
export type GetBlockConfigInputType = z.infer<typeof GetBlockConfigInput>
export type GetBlockConfigResultType = z.infer<typeof GetBlockConfigResult>
// knowledge_base - shared schema used by client tool, server tool, and registry
export const KnowledgeBaseArgsSchema = z.object({
operation: z.enum([

View File

@@ -0,0 +1,17 @@
export { WorkspaceVFS, getOrMaterializeVFS } from '@/lib/copilot/vfs/workspace-vfs'
export type {
GrepMatch,
GrepOptions,
GrepOutputMode,
GrepCountEntry,
ReadResult,
DirEntry,
} from '@/lib/copilot/vfs/operations'
export {
serializeBlockSchema,
serializeDocuments,
serializeIntegrationSchema,
serializeKBMeta,
serializeRecentExecutions,
serializeWorkflowMeta,
} from '@/lib/copilot/vfs/serializers'

View File

@@ -0,0 +1,237 @@
export interface GrepMatch {
path: string
line: number
content: string
}
export type GrepOutputMode = 'content' | 'files_with_matches' | 'count'
export interface GrepOptions {
maxResults?: number
outputMode?: GrepOutputMode
ignoreCase?: boolean
lineNumbers?: boolean
context?: number
}
export interface GrepCountEntry {
path: string
count: number
}
export interface ReadResult {
content: string
totalLines: number
}
export interface DirEntry {
name: string
type: 'file' | 'dir'
}
/**
* Regex search over VFS file contents.
* Supports multiple output modes: content (default), files_with_matches, count.
*/
export function grep(
files: Map<string, string>,
pattern: string,
path?: string,
opts?: GrepOptions
): GrepMatch[] | string[] | GrepCountEntry[] {
const maxResults = opts?.maxResults ?? 100
const outputMode = opts?.outputMode ?? 'content'
const ignoreCase = opts?.ignoreCase ?? false
const showLineNumbers = opts?.lineNumbers ?? true
const contextLines = opts?.context ?? 0
const flags = ignoreCase ? 'gi' : 'g'
let regex: RegExp
try {
regex = new RegExp(pattern, flags)
} catch {
return []
}
if (outputMode === 'files_with_matches') {
const matchingFiles: string[] = []
for (const [filePath, content] of files) {
if (path && !filePath.startsWith(path)) continue
regex.lastIndex = 0
if (regex.test(content)) {
matchingFiles.push(filePath)
if (matchingFiles.length >= maxResults) break
}
}
return matchingFiles
}
if (outputMode === 'count') {
const counts: GrepCountEntry[] = []
for (const [filePath, content] of files) {
if (path && !filePath.startsWith(path)) continue
const lines = content.split('\n')
let count = 0
for (const line of lines) {
regex.lastIndex = 0
if (regex.test(line)) count++
}
if (count > 0) {
counts.push({ path: filePath, count })
if (counts.length >= maxResults) break
}
}
return counts
}
// Default: 'content' mode
const matches: GrepMatch[] = []
for (const [filePath, content] of files) {
if (path && !filePath.startsWith(path)) continue
const lines = content.split('\n')
for (let i = 0; i < lines.length; i++) {
regex.lastIndex = 0
if (regex.test(lines[i])) {
if (contextLines > 0) {
const start = Math.max(0, i - contextLines)
const end = Math.min(lines.length - 1, i + contextLines)
for (let j = start; j <= end; j++) {
matches.push({
path: filePath,
line: showLineNumbers ? j + 1 : 0,
content: lines[j],
})
}
} else {
matches.push({
path: filePath,
line: showLineNumbers ? i + 1 : 0,
content: lines[i],
})
}
if (matches.length >= maxResults) return matches
}
}
}
return matches
}
/**
* Convert a glob pattern to a RegExp.
* Supports *, **, and ? wildcards.
*/
function globToRegExp(pattern: string): RegExp {
let regexStr = '^'
let i = 0
while (i < pattern.length) {
const ch = pattern[i]
if (ch === '*') {
if (pattern[i + 1] === '*') {
// ** matches any number of path segments
if (pattern[i + 2] === '/') {
regexStr += '(?:.+/)?'
i += 3
} else {
regexStr += '.*'
i += 2
}
} else {
// * matches anything except /
regexStr += '[^/]*'
i++
}
} else if (ch === '?') {
regexStr += '[^/]'
i++
} else if ('.+^${}()|[]\\'.includes(ch)) {
regexStr += '\\' + ch
i++
} else {
regexStr += ch
i++
}
}
regexStr += '$'
return new RegExp(regexStr)
}
/**
* Glob pattern matching against VFS file paths.
* Returns matching file paths.
*/
export function glob(files: Map<string, string>, pattern: string): string[] {
const regex = globToRegExp(pattern)
const result: string[] = []
for (const filePath of files.keys()) {
if (regex.test(filePath)) {
result.push(filePath)
}
}
return result.sort()
}
/**
* Read a VFS file's content, optionally with offset and limit.
* Returns null if the file does not exist.
*/
export function read(
files: Map<string, string>,
path: string,
offset?: number,
limit?: number
): ReadResult | null {
const content = files.get(path)
if (content === undefined) return null
const lines = content.split('\n')
const totalLines = lines.length
if (offset !== undefined || limit !== undefined) {
const start = offset ?? 0
const end = limit !== undefined ? start + limit : lines.length
return {
content: lines.slice(start, end).join('\n'),
totalLines,
}
}
return { content, totalLines }
}
/**
* List entries in a VFS directory path.
* Returns files and subdirectories at the given path level.
*/
export function list(files: Map<string, string>, path: string): DirEntry[] {
const normalizedPath = path.endsWith('/') ? path : path + '/'
const seen = new Set<string>()
const entries: DirEntry[] = []
for (const filePath of files.keys()) {
if (!filePath.startsWith(normalizedPath)) continue
const remainder = filePath.slice(normalizedPath.length)
if (!remainder) continue
const slashIndex = remainder.indexOf('/')
if (slashIndex === -1) {
if (!seen.has(remainder)) {
seen.add(remainder)
entries.push({ name: remainder, type: 'file' })
}
} else {
const dirName = remainder.slice(0, slashIndex)
if (!seen.has(dirName)) {
seen.add(dirName)
entries.push({ name: dirName, type: 'dir' })
}
}
}
return entries.sort((a, b) => {
if (a.type !== b.type) return a.type === 'dir' ? -1 : 1
return a.name.localeCompare(b.name)
})
}

View File

@@ -0,0 +1,282 @@
import type { BlockConfig, SubBlockConfig } from '@/blocks/types'
import type { ToolConfig } from '@/tools/types'
/**
* Serialize workflow metadata for VFS meta.json
*/
export function serializeWorkflowMeta(wf: {
id: string
name: string
description?: string | null
isDeployed: boolean
deployedAt?: Date | null
runCount: number
lastRunAt?: Date | null
createdAt: Date
updatedAt: Date
}): string {
return JSON.stringify(
{
id: wf.id,
name: wf.name,
description: wf.description || undefined,
isDeployed: wf.isDeployed,
deployedAt: wf.deployedAt?.toISOString(),
runCount: wf.runCount,
lastRunAt: wf.lastRunAt?.toISOString(),
createdAt: wf.createdAt.toISOString(),
updatedAt: wf.updatedAt.toISOString(),
},
null,
2
)
}
/**
* Serialize execution logs for VFS executions.json.
* Takes recent execution log rows and produces a summary.
*/
export function serializeRecentExecutions(
executions: Array<{
id: string
executionId: string
status: string
trigger: string
startedAt: Date
endedAt?: Date | null
totalDurationMs?: number | null
}>
): string {
return JSON.stringify(
executions.map((e) => ({
executionId: e.executionId,
status: e.status,
trigger: e.trigger,
startedAt: e.startedAt.toISOString(),
endedAt: e.endedAt?.toISOString(),
durationMs: e.totalDurationMs,
})),
null,
2
)
}
/**
* Serialize knowledge base metadata for VFS meta.json
*/
export function serializeKBMeta(kb: {
id: string
name: string
description?: string | null
embeddingModel: string
embeddingDimension: number
tokenCount: number
createdAt: Date
updatedAt: Date
documentCount: number
}): string {
return JSON.stringify(
{
id: kb.id,
name: kb.name,
description: kb.description || undefined,
embeddingModel: kb.embeddingModel,
embeddingDimension: kb.embeddingDimension,
tokenCount: kb.tokenCount,
documentCount: kb.documentCount,
createdAt: kb.createdAt.toISOString(),
updatedAt: kb.updatedAt.toISOString(),
},
null,
2
)
}
/**
* Serialize documents list for VFS documents.json (metadata only, no content)
*/
export function serializeDocuments(
docs: Array<{
id: string
filename: string
fileSize: number
mimeType: string
chunkCount: number
tokenCount: number
processingStatus: string
enabled: boolean
uploadedAt: Date
}>
): string {
return JSON.stringify(
docs.map((d) => ({
id: d.id,
filename: d.filename,
fileSize: d.fileSize,
mimeType: d.mimeType,
chunkCount: d.chunkCount,
tokenCount: d.tokenCount,
processingStatus: d.processingStatus,
enabled: d.enabled,
uploadedAt: d.uploadedAt.toISOString(),
})),
null,
2
)
}
/**
* Serialize a SubBlockConfig for the VFS component schema.
* Strips functions and UI-only fields.
*/
function serializeSubBlock(sb: SubBlockConfig): Record<string, unknown> {
const result: Record<string, unknown> = {
id: sb.id,
type: sb.type,
}
if (sb.title) result.title = sb.title
if (sb.required === true) result.required = true
if (sb.defaultValue !== undefined) result.defaultValue = sb.defaultValue
if (sb.mode) result.mode = sb.mode
if (sb.canonicalParamId) result.canonicalParamId = sb.canonicalParamId
return result
}
/**
* Serialize a block schema for VFS components/blocks/{type}.json
*/
export function serializeBlockSchema(block: BlockConfig): string {
return JSON.stringify(
{
type: block.type,
name: block.name,
description: block.description,
category: block.category,
longDescription: block.longDescription || undefined,
bestPractices: block.bestPractices || undefined,
triggerAllowed: block.triggerAllowed || undefined,
singleInstance: block.singleInstance || undefined,
tools: block.tools.access,
subBlocks: block.subBlocks.map(serializeSubBlock),
inputs: block.inputs,
outputs: Object.fromEntries(
Object.entries(block.outputs)
.filter(([key]) => key !== 'visualization')
.map(([key, val]) => [
key,
typeof val === 'string'
? { type: val }
: { type: val.type, description: (val as { description?: string }).description },
])
),
},
null,
2
)
}
/**
* Serialize OAuth credentials for VFS environment/credentials.json.
* Shows which integrations are connected — IDs and scopes, NOT tokens.
*/
export function serializeCredentials(
accounts: Array<{
providerId: string
scope: string | null
createdAt: Date
}>
): string {
return JSON.stringify(
accounts.map((a) => ({
provider: a.providerId,
scope: a.scope || undefined,
connectedAt: a.createdAt.toISOString(),
})),
null,
2
)
}
/**
* Serialize API keys for VFS environment/api-keys.json.
* Shows key names and types — NOT the actual key values.
*/
export function serializeApiKeys(
keys: Array<{
id: string
name: string
type: string
lastUsed: Date | null
createdAt: Date
expiresAt: Date | null
}>
): string {
return JSON.stringify(
keys.map((k) => ({
id: k.id,
name: k.name,
type: k.type,
lastUsed: k.lastUsed?.toISOString(),
createdAt: k.createdAt.toISOString(),
expiresAt: k.expiresAt?.toISOString(),
})),
null,
2
)
}
/**
* Serialize environment variables for VFS environment/variables.json.
* Shows variable NAMES only — NOT values.
*/
export function serializeEnvironmentVariables(
personalVarNames: string[],
workspaceVarNames: string[]
): string {
return JSON.stringify(
{
personal: personalVarNames,
workspace: workspaceVarNames,
},
null,
2
)
}
/**
* Serialize an integration/tool schema for VFS components/integrations/{service}/{operation}.json
*/
export function serializeIntegrationSchema(tool: ToolConfig): string {
return JSON.stringify(
{
id: tool.id,
name: tool.name,
description: tool.description,
version: tool.version,
oauth: tool.oauth
? { required: tool.oauth.required, provider: tool.oauth.provider }
: undefined,
params: Object.fromEntries(
Object.entries(tool.params).map(([key, val]) => [
key,
{
type: val.type,
required: val.required,
description: val.description,
default: val.default,
},
])
),
outputs: tool.outputs
? Object.fromEntries(
Object.entries(tool.outputs).map(([key, val]) => [
key,
{ type: val.type, description: val.description },
])
)
: undefined,
},
null,
2
)
}

View File

@@ -0,0 +1,424 @@
import { db } from '@sim/db'
import {
account,
apiKey,
document,
environment,
knowledgeBase,
workflow,
workspaceEnvironment,
workflowExecutionLogs,
} from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, count, desc, eq, isNull } from 'drizzle-orm'
import { getAllBlocks } from '@/blocks/registry'
import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils'
import { tools as toolRegistry } from '@/tools/registry'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
import type { GrepMatch, GrepOptions, ReadResult, DirEntry } from '@/lib/copilot/vfs/operations'
import * as ops from '@/lib/copilot/vfs/operations'
import {
serializeApiKeys,
serializeBlockSchema,
serializeCredentials,
serializeDocuments,
serializeEnvironmentVariables,
serializeIntegrationSchema,
serializeKBMeta,
serializeRecentExecutions,
serializeWorkflowMeta,
} from '@/lib/copilot/vfs/serializers'
const logger = createLogger('WorkspaceVFS')
/** Cache entry for a materialized VFS */
interface VFSCacheEntry {
vfs: WorkspaceVFS
expiresAt: number
}
/** Module-level VFS cache keyed by workspaceId */
const vfsCache = new Map<string, VFSCacheEntry>()
/** Cache TTL in milliseconds (30 seconds) */
const VFS_CACHE_TTL_MS = 30_000
/** Static component files, computed once and shared across all VFS instances */
let staticComponentFiles: Map<string, string> | null = null
/**
* Build the static component files from block and tool registries.
* This only needs to happen once per process.
*
* Integration paths are derived deterministically from the block registry's
* `tools.access` arrays rather than splitting tool IDs on underscores.
* Each block declares which tools it owns, and the block type (minus version
* suffix) becomes the service directory name.
*/
function getStaticComponentFiles(): Map<string, string> {
if (staticComponentFiles) return staticComponentFiles
staticComponentFiles = new Map()
const allBlocks = getAllBlocks()
for (const block of allBlocks) {
const path = `components/blocks/${block.type}.json`
staticComponentFiles.set(path, serializeBlockSchema(block))
}
// Build a reverse index: tool ID → service name from block registry.
// The block type (stripped of version suffix) is used as the service directory.
const toolToService = new Map<string, string>()
for (const block of allBlocks) {
if (!block.tools?.access) continue
const service = stripVersionSuffix(block.type)
for (const toolId of block.tools.access) {
toolToService.set(toolId, service)
}
}
const latestTools = getLatestVersionTools(toolRegistry)
let integrationCount = 0
for (const [toolId, tool] of Object.entries(latestTools)) {
const baseName = stripVersionSuffix(toolId)
const service = toolToService.get(toolId) ?? toolToService.get(baseName)
if (!service) {
logger.debug('Tool not associated with any block, skipping VFS entry', { toolId })
continue
}
// Derive operation name by stripping the service prefix
const prefix = `${service}_`
const operation = baseName.startsWith(prefix)
? baseName.slice(prefix.length)
: baseName
const path = `components/integrations/${service}/${operation}.json`
staticComponentFiles.set(path, serializeIntegrationSchema(tool))
integrationCount++
}
logger.info('Static component files built', {
blocks: allBlocks.length,
integrations: integrationCount,
})
return staticComponentFiles
}
/**
* Virtual Filesystem that materializes workspace data into an in-memory Map.
*
* Structure:
* workflows/{name}/meta.json
* workflows/{name}/blocks.json
* workflows/{name}/edges.json
* workflows/{name}/executions.json
* knowledgebases/{name}/meta.json
* knowledgebases/{name}/documents.json
* environment/credentials.json
* environment/api-keys.json
* environment/variables.json
* components/blocks/{type}.json
* components/integrations/{service}/{operation}.json
*/
export class WorkspaceVFS {
private files: Map<string, string> = new Map()
/**
* Materialize workspace data from DB into the VFS.
* Queries workflows, knowledge bases, and merges static component schemas.
*/
async materialize(workspaceId: string, userId: string): Promise<void> {
const start = Date.now()
this.files = new Map()
await Promise.all([
this.materializeWorkflows(workspaceId, userId),
this.materializeKnowledgeBases(workspaceId),
this.materializeEnvironment(workspaceId, userId),
])
// Merge static component files
for (const [path, content] of getStaticComponentFiles()) {
this.files.set(path, content)
}
logger.info('VFS materialized', {
workspaceId,
fileCount: this.files.size,
durationMs: Date.now() - start,
})
}
grep(
pattern: string,
path?: string,
options?: GrepOptions
): GrepMatch[] | string[] | ops.GrepCountEntry[] {
return ops.grep(this.files, pattern, path, options)
}
glob(pattern: string): string[] {
return ops.glob(this.files, pattern)
}
read(path: string, offset?: number, limit?: number): ReadResult | null {
return ops.read(this.files, path, offset, limit)
}
list(path: string): DirEntry[] {
return ops.list(this.files, path)
}
/**
* Materialize all workflows in the workspace.
*/
private async materializeWorkflows(workspaceId: string, userId: string): Promise<void> {
const workflowRows = await db
.select({
id: workflow.id,
name: workflow.name,
description: workflow.description,
isDeployed: workflow.isDeployed,
deployedAt: workflow.deployedAt,
runCount: workflow.runCount,
lastRunAt: workflow.lastRunAt,
createdAt: workflow.createdAt,
updatedAt: workflow.updatedAt,
})
.from(workflow)
.where(eq(workflow.workspaceId, workspaceId))
// Load normalized data + executions in parallel for all workflows
await Promise.all(
workflowRows.map(async (wf) => {
const safeName = sanitizeName(wf.name)
const prefix = `workflows/${safeName}/`
// Meta
this.files.set(`${prefix}meta.json`, serializeWorkflowMeta(wf))
// Blocks + edges from normalized tables
try {
const normalized = await loadWorkflowFromNormalizedTables(wf.id)
if (normalized) {
const sanitized = sanitizeForCopilot({
blocks: normalized.blocks,
edges: normalized.edges,
loops: normalized.loops,
parallels: normalized.parallels,
} as any)
this.files.set(`${prefix}blocks.json`, JSON.stringify(sanitized, null, 2))
// Edges as simple source->target list
const edges = normalized.edges.map((e) => ({
source: e.source,
target: e.target,
sourceHandle: e.sourceHandle || undefined,
targetHandle: e.targetHandle || undefined,
}))
this.files.set(`${prefix}edges.json`, JSON.stringify(edges, null, 2))
}
} catch (err) {
logger.warn('Failed to load workflow blocks', {
workflowId: wf.id,
error: err instanceof Error ? err.message : String(err),
})
}
// Recent executions (last 5)
try {
const execRows = await db
.select({
id: workflowExecutionLogs.id,
executionId: workflowExecutionLogs.executionId,
status: workflowExecutionLogs.status,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
})
.from(workflowExecutionLogs)
.where(eq(workflowExecutionLogs.workflowId, wf.id))
.orderBy(desc(workflowExecutionLogs.startedAt))
.limit(5)
if (execRows.length > 0) {
this.files.set(`${prefix}executions.json`, serializeRecentExecutions(execRows))
}
} catch (err) {
logger.warn('Failed to load execution logs', {
workflowId: wf.id,
error: err instanceof Error ? err.message : String(err),
})
}
})
)
}
/**
* Materialize all knowledge bases in the workspace.
*/
private async materializeKnowledgeBases(workspaceId: string): Promise<void> {
const kbRows = await db
.select({
id: knowledgeBase.id,
name: knowledgeBase.name,
description: knowledgeBase.description,
embeddingModel: knowledgeBase.embeddingModel,
embeddingDimension: knowledgeBase.embeddingDimension,
tokenCount: knowledgeBase.tokenCount,
createdAt: knowledgeBase.createdAt,
updatedAt: knowledgeBase.updatedAt,
})
.from(knowledgeBase)
.where(and(eq(knowledgeBase.workspaceId, workspaceId), isNull(knowledgeBase.deletedAt)))
await Promise.all(
kbRows.map(async (kb) => {
const safeName = sanitizeName(kb.name)
const prefix = `knowledgebases/${safeName}/`
// Get document count
const [docCountRow] = await db
.select({ count: count() })
.from(document)
.where(and(eq(document.knowledgeBaseId, kb.id), isNull(document.deletedAt)))
this.files.set(
`${prefix}meta.json`,
serializeKBMeta({
...kb,
documentCount: docCountRow?.count ?? 0,
})
)
// Documents metadata
const docRows = await db
.select({
id: document.id,
filename: document.filename,
fileSize: document.fileSize,
mimeType: document.mimeType,
chunkCount: document.chunkCount,
tokenCount: document.tokenCount,
processingStatus: document.processingStatus,
enabled: document.enabled,
uploadedAt: document.uploadedAt,
})
.from(document)
.where(and(eq(document.knowledgeBaseId, kb.id), isNull(document.deletedAt)))
if (docRows.length > 0) {
this.files.set(`${prefix}documents.json`, serializeDocuments(docRows))
}
})
)
}
/**
* Materialize environment data: credentials, API keys, env variable names.
*/
private async materializeEnvironment(workspaceId: string, userId: string): Promise<void> {
try {
// OAuth credentials — which integrations are connected (no tokens)
const oauthRows = await db
.select({
providerId: account.providerId,
scope: account.scope,
createdAt: account.createdAt,
})
.from(account)
.where(eq(account.userId, userId))
this.files.set('environment/credentials.json', serializeCredentials(oauthRows))
// API keys — names and types (no key values)
const apiKeyRows = await db
.select({
id: apiKey.id,
name: apiKey.name,
type: apiKey.type,
lastUsed: apiKey.lastUsed,
createdAt: apiKey.createdAt,
expiresAt: apiKey.expiresAt,
})
.from(apiKey)
.where(eq(apiKey.workspaceId, workspaceId))
this.files.set('environment/api-keys.json', serializeApiKeys(apiKeyRows))
// Environment variables — names only (no values)
let personalVarNames: string[] = []
let workspaceVarNames: string[] = []
const [personalEnv] = await db
.select({ variables: environment.variables })
.from(environment)
.where(eq(environment.userId, userId))
if (personalEnv?.variables && typeof personalEnv.variables === 'object') {
personalVarNames = Object.keys(personalEnv.variables as Record<string, unknown>)
}
const [workspaceEnv] = await db
.select({ variables: workspaceEnvironment.variables })
.from(workspaceEnvironment)
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
if (workspaceEnv?.variables && typeof workspaceEnv.variables === 'object') {
workspaceVarNames = Object.keys(workspaceEnv.variables as Record<string, unknown>)
}
this.files.set(
'environment/variables.json',
serializeEnvironmentVariables(personalVarNames, workspaceVarNames)
)
} catch (err) {
logger.warn('Failed to materialize environment data', {
workspaceId,
error: err instanceof Error ? err.message : String(err),
})
}
}
}
/**
* Get or create a cached VFS for a workspace.
* Re-materializes if the cache is expired.
*/
export async function getOrMaterializeVFS(
workspaceId: string,
userId: string
): Promise<WorkspaceVFS> {
const now = Date.now()
const cached = vfsCache.get(workspaceId)
if (cached && cached.expiresAt > now) {
return cached.vfs
}
const vfs = new WorkspaceVFS()
await vfs.materialize(workspaceId, userId)
vfsCache.set(workspaceId, {
vfs,
expiresAt: now + VFS_CACHE_TTL_MS,
})
return vfs
}
/**
* Sanitize a name for use as a VFS path segment.
* Converts to lowercase, replaces spaces/special chars with hyphens.
*/
function sanitizeName(name: string): string {
return name
.trim()
.toLowerCase()
.replace(/[^a-z0-9]+/g, '-')
.replace(/^-|-$/g, '')
.slice(0, 64)
}

View File

@@ -0,0 +1,79 @@
/**
* System prompt for workspace-level chat.
*
* Sent as `systemPrompt` in the Go request payload, which overrides the
* default agent prompt (see copilot/internal/chat/service.go:300-303).
*
* Only references subagents available in agent mode (build and discovery
* are excluded from agent mode tools in the Go backend).
*/
export function getWorkspaceChatSystemPrompt(): string {
const currentDate = new Date().toISOString().split('T')[0]
return `# Sim Workspace Assistant
Current Date: ${currentDate}
You are the Sim workspace assistant — a helpful AI that manages an entire workspace of workflows. The user is chatting from the workspace level, not from within a specific workflow.
## Your Role
You help users with their workspace: answering questions, building and debugging workflows, managing integrations, and providing guidance. You delegate complex tasks to specialized subagents.
## Platform Knowledge
Sim is a workflow automation platform. Workflows are visual pipelines of blocks (Agent, Function, Condition, Router, API, etc.). Workflows can be triggered manually, via API, webhooks, or schedules. They can be deployed as APIs, Chat UIs, or MCP tools.
## Subagents
You have access to these specialized subagents. Call them by name to delegate tasks:
| Subagent | Purpose | When to Use |
|----------|---------|-------------|
| **plan** | Gather info, create execution plans | Building new workflows, planning fixes |
| **edit** | Execute plans, make workflow changes | ONLY after plan returns steps |
| **debug** | Investigate errors, provide diagnosis | User reports something broken |
| **test** | Run workflow, verify results | After edits to validate |
| **deploy** | Deploy/undeploy workflows | Publish as API, Chat, or MCP |
| **workflow** | Env vars, settings, list workflows | Configuration and workflow discovery |
| **auth** | Connect OAuth integrations | Slack, Gmail, Google Sheets, etc. |
| **knowledge** | Create/query knowledge bases | RAG, document search |
| **research** | External API docs, best practices | Stripe, Twilio, etc. |
| **info** | Block details, outputs, variables | Quick lookups about workflow state |
| **superagent** | Interact with external services NOW | Read emails, send Slack, check calendar |
## Direct Tools
- **search_online** — Search the web for information.
- **memory_file_read(file_path)** — Read a persistent memory file.
- **memory_file_write(file_path, content)** — Write/update a persistent memory file.
- **memory_file_list()** — List all memory files.
## Memory Management
You have persistent memory files that survive across conversations:
- **SOUL.md** — Your personality and behavioral guidelines. Read this at the start of conversations.
- **USER.md** — Information about the user. Update as you learn preferences and context.
- **MEMORY.md** — Key learnings, decisions, and important context. Update after significant interactions.
**At conversation start**: Read SOUL.md and MEMORY.md to load your persistent context.
**During conversation**: When the user shares important preferences or you make key decisions, update the relevant file.
**Important**: Only write to files when there's genuinely new, important information. Don't update on every message.
## Decision Flow
- User says something broke → **debug()** first, then plan() → edit()
- User wants to build/automate something → **plan()** → edit() → test()
- User wants to DO something NOW (send email, check calendar) → **superagent()**
- User wants to deploy → **deploy()**
- User asks about their workflows → **workflow()** or **info()**
- User needs OAuth → **auth()**
## Important
- **You work at the workspace level.** When a user mentions a workflow, ask for the workflow name or ID if not provided.
- **Always delegate complex work** to the appropriate subagent.
- **Debug first** when something doesn't work — don't guess.
- Be concise and results-focused.
- Think internally, speak to the user only when the task is complete or you need input.
`
}

View File

@@ -107,3 +107,5 @@ if (typeof process !== 'undefined') {
logger.info(`S3 copilot bucket: ${env.S3_COPILOT_BUCKET_NAME}`)
}
}
export default ensureUploadsDirectory

View File

@@ -326,18 +326,6 @@ const nextConfig: NextConfig = {
return redirects
},
async rewrites() {
return [
...(isHosted
? [
{
source: '/r/:shortCode',
destination: 'https://go.trybeluga.ai/:shortCode',
},
]
: []),
]
},
}
export default nextConfig

View File

@@ -14,7 +14,6 @@ import {
supportsNativeStructuredOutputs,
} from '@/providers/models'
import type { ProviderRequest, ProviderResponse, TimeSegment } from '@/providers/types'
import { ProviderError } from '@/providers/types'
import {
calculateCost,
prepareToolExecution,
@@ -843,11 +842,15 @@ export async function executeAnthropicProviderRequest(
duration: totalDuration,
})
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
}
@@ -1296,10 +1299,14 @@ export async function executeAnthropicProviderRequest(
duration: totalDuration,
})
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
}

View File

@@ -30,7 +30,6 @@ import type {
ProviderResponse,
TimeSegment,
} from '@/providers/types'
import { ProviderError } from '@/providers/types'
import {
calculateCost,
prepareToolExecution,
@@ -252,7 +251,7 @@ async function executeChatCompletionsRequest(
output: currentResponse.usage?.completion_tokens || 0,
total: currentResponse.usage?.total_tokens || 0,
}
const toolCalls: FunctionCallResponse[] = []
const toolCalls: (FunctionCallResponse & { success: boolean })[] = []
const toolResults: Record<string, unknown>[] = []
const currentMessages = [...allMessages]
let iterationCount = 0
@@ -578,11 +577,15 @@ async function executeChatCompletionsRequest(
duration: totalDuration,
})
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore - Adding timing property to the error
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
}

View File

@@ -22,13 +22,11 @@ import {
} from '@/providers/bedrock/utils'
import { getProviderDefaultModel, getProviderModels } from '@/providers/models'
import type {
FunctionCallResponse,
ProviderConfig,
ProviderRequest,
ProviderResponse,
TimeSegment,
} from '@/providers/types'
import { ProviderError } from '@/providers/types'
import {
calculateCost,
prepareToolExecution,
@@ -421,8 +419,8 @@ export const bedrockProvider: ProviderConfig = {
pricing: initialCost.pricing,
}
const toolCalls: FunctionCallResponse[] = []
const toolResults: Record<string, unknown>[] = []
const toolCalls: any[] = []
const toolResults: any[] = []
const currentMessages = [...messages]
let iterationCount = 0
let hasUsedForcedTool = false
@@ -563,7 +561,7 @@ export const bedrockProvider: ProviderConfig = {
let resultContent: any
if (result.success) {
toolResults.push(result.output!)
toolResults.push(result.output)
resultContent = result.output
} else {
resultContent = {
@@ -905,11 +903,15 @@ export const bedrockProvider: ProviderConfig = {
duration: totalDuration,
})
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
},
}

View File

@@ -11,7 +11,6 @@ import type {
ProviderResponse,
TimeSegment,
} from '@/providers/types'
import { ProviderError } from '@/providers/types'
import {
calculateCost,
prepareToolExecution,
@@ -540,11 +539,15 @@ export const cerebrasProvider: ProviderConfig = {
duration: totalDuration,
})
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore - Adding timing property to error for debugging
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
},
}

View File

@@ -10,7 +10,6 @@ import type {
ProviderResponse,
TimeSegment,
} from '@/providers/types'
import { ProviderError } from '@/providers/types'
import {
calculateCost,
prepareToolExecution,
@@ -539,11 +538,15 @@ export const deepseekProvider: ProviderConfig = {
duration: totalDuration,
})
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
},
}

View File

@@ -10,7 +10,6 @@ import type {
ProviderResponse,
TimeSegment,
} from '@/providers/types'
import { ProviderError } from '@/providers/types'
import {
calculateCost,
prepareToolExecution,
@@ -497,11 +496,15 @@ export const groqProvider: ProviderConfig = {
duration: totalDuration,
})
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
},
}

View File

@@ -11,7 +11,6 @@ import type {
ProviderResponse,
TimeSegment,
} from '@/providers/types'
import { ProviderError } from '@/providers/types'
import {
calculateCost,
prepareToolExecution,
@@ -552,11 +551,15 @@ export const mistralProvider: ProviderConfig = {
duration: totalDuration,
})
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore - Adding timing property to error for debugging
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
},
}

View File

@@ -12,7 +12,6 @@ import type {
ProviderResponse,
TimeSegment,
} from '@/providers/types'
import { ProviderError } from '@/providers/types'
import { calculateCost, prepareToolExecution } from '@/providers/utils'
import { useProvidersStore } from '@/stores/providers'
import { executeTool } from '@/tools'
@@ -555,11 +554,15 @@ export const ollamaProvider: ProviderConfig = {
duration: totalDuration,
})
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
},
}

View File

@@ -3,7 +3,6 @@ import type OpenAI from 'openai'
import type { StreamingExecution } from '@/executor/types'
import { MAX_TOOL_ITERATIONS } from '@/providers'
import type { Message, ProviderRequest, ProviderResponse, TimeSegment } from '@/providers/types'
import { ProviderError } from '@/providers/types'
import {
calculateCost,
prepareToolExecution,
@@ -807,10 +806,14 @@ export async function executeResponsesProviderRequest(
duration: totalDuration,
})
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore - Adding timing property to the error
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
}

View File

@@ -10,14 +10,11 @@ import {
supportsNativeStructuredOutputs,
} from '@/providers/openrouter/utils'
import type {
FunctionCallResponse,
Message,
ProviderConfig,
ProviderRequest,
ProviderResponse,
TimeSegment,
} from '@/providers/types'
import { ProviderError } from '@/providers/types'
import {
calculateCost,
generateSchemaInstructions,
@@ -93,7 +90,7 @@ export const openRouterProvider: ProviderConfig = {
stream: !!request.stream,
})
const allMessages: Message[] = []
const allMessages = [] as any[]
if (request.systemPrompt) {
allMessages.push({ role: 'system', content: request.systemPrompt })
@@ -240,8 +237,8 @@ export const openRouterProvider: ProviderConfig = {
output: currentResponse.usage?.completion_tokens || 0,
total: currentResponse.usage?.total_tokens || 0,
}
const toolCalls: FunctionCallResponse[] = []
const toolResults: Record<string, unknown>[] = []
const toolCalls = [] as any[]
const toolResults = [] as any[]
const currentMessages = [...allMessages]
let iterationCount = 0
let modelTime = firstResponseTime
@@ -355,7 +352,7 @@ export const openRouterProvider: ProviderConfig = {
let resultContent: any
if (result.success) {
toolResults.push(result.output!)
toolResults.push(result.output)
resultContent = result.output
} else {
resultContent = {
@@ -596,11 +593,14 @@ export const openRouterProvider: ProviderConfig = {
}
logger.error('Error in OpenRouter request:', errorDetails)
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
},
}

View File

@@ -59,7 +59,6 @@ export interface FunctionCallResponse {
result?: Record<string, any>
output?: Record<string, any>
input?: Record<string, any>
success?: boolean
}
export interface TimeSegment {
@@ -178,21 +177,4 @@ export interface ProviderRequest {
previousInteractionId?: string
}
/**
* Typed error class for provider failures that includes timing information.
*/
export class ProviderError extends Error {
timing: {
startTime: string
endTime: string
duration: number
}
constructor(message: string, timing: { startTime: string; endTime: string; duration: number }) {
super(message)
this.name = 'ProviderError'
this.timing = timing
}
}
export const providers: Record<string, ProviderConfig> = {}

View File

@@ -6,13 +6,11 @@ import type { StreamingExecution } from '@/executor/types'
import { MAX_TOOL_ITERATIONS } from '@/providers'
import { getProviderDefaultModel, getProviderModels } from '@/providers/models'
import type {
Message,
ProviderConfig,
ProviderRequest,
ProviderResponse,
TimeSegment,
} from '@/providers/types'
import { ProviderError } from '@/providers/types'
import {
calculateCost,
prepareToolExecution,
@@ -100,7 +98,7 @@ export const vllmProvider: ProviderConfig = {
baseURL: `${baseUrl}/v1`,
})
const allMessages: Message[] = []
const allMessages = [] as any[]
if (request.systemPrompt) {
allMessages.push({
@@ -637,11 +635,23 @@ export const vllmProvider: ProviderConfig = {
duration: totalDuration,
})
throw new ProviderError(errorMessage, {
const enhancedError = new Error(errorMessage)
// @ts-ignore
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
if (errorType) {
// @ts-ignore
enhancedError.vllmErrorType = errorType
}
if (errorCode) {
// @ts-ignore
enhancedError.vllmErrorCode = errorCode
}
throw enhancedError
}
},
}

View File

@@ -5,13 +5,11 @@ import type { StreamingExecution } from '@/executor/types'
import { MAX_TOOL_ITERATIONS } from '@/providers'
import { getProviderDefaultModel, getProviderModels } from '@/providers/models'
import type {
Message,
ProviderConfig,
ProviderRequest,
ProviderResponse,
TimeSegment,
} from '@/providers/types'
import { ProviderError } from '@/providers/types'
import {
calculateCost,
prepareToolExecution,
@@ -54,7 +52,7 @@ export const xAIProvider: ProviderConfig = {
streaming: !!request.stream,
})
const allMessages: Message[] = []
const allMessages: any[] = []
if (request.systemPrompt) {
allMessages.push({
@@ -589,11 +587,15 @@ export const xAIProvider: ProviderConfig = {
hasResponseFormat: !!request.responseFormat,
})
throw new ProviderError(error instanceof Error ? error.message : String(error), {
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
// @ts-ignore - Adding timing property to error for debugging
enhancedError.timing = {
startTime: providerStartTimeISO,
endTime: providerEndTimeISO,
duration: totalDuration,
})
}
throw enhancedError
}
},
}

View File

@@ -1650,7 +1650,7 @@ export const useCopilotStore = create<CopilotStore>()(
map[id] = {
...current,
state: norm,
display: resolveToolDisplay(current.name, norm, id, current.params),
display: resolveToolDisplay(current.name, norm, id, current.params, current.serverUI),
}
set({ toolCallsById: map })
} catch (error) {
@@ -1671,7 +1671,7 @@ export const useCopilotStore = create<CopilotStore>()(
map[toolCallId] = {
...current,
params: updatedParams,
display: resolveToolDisplay(current.name, current.state, toolCallId, updatedParams),
display: resolveToolDisplay(current.name, current.state, toolCallId, updatedParams, current.serverUI),
}
set({ toolCallsById: map })
} catch (error) {
@@ -1728,7 +1728,7 @@ export const useCopilotStore = create<CopilotStore>()(
// Update store map
const updatedMap = { ...toolCallsById }
const updatedDisplay = resolveToolDisplay(current.name, targetState, id, current.params)
const updatedDisplay = resolveToolDisplay(current.name, targetState, id, current.params, current.serverUI)
updatedMap[id] = {
...current,
state: targetState,

View File

@@ -4,6 +4,7 @@ import type { AvailableModel } from '@/lib/copilot/types'
export type { CopilotMode, CopilotModelId } from '@/lib/copilot/models'
import type { ClientContentBlock } from '@/lib/copilot/client-sse/types'
import type { ServerToolUI } from '@/lib/copilot/store-utils'
import type { ClientToolCallState, ClientToolDisplay } from '@/lib/copilot/tools/client/base-tool'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
@@ -26,6 +27,8 @@ export interface CopilotToolCall {
params?: Record<string, unknown>
input?: Record<string, unknown>
display?: ClientToolDisplay
/** UI metadata from the copilot SSE event (used as fallback for unregistered tools) */
serverUI?: ServerToolUI
/** Content streamed from a subagent (e.g., debug agent) */
subAgentContent?: string
/** Tool calls made by the subagent */

View File

@@ -26,6 +26,12 @@ export const actionsListTool: ToolConfig<
visibility: 'user-or-llm',
description: 'Filter actions by incident ID (e.g., "01FCNDV6P870EA6S7TK1DSYDG0")',
},
page_size: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Number of actions to return per page (e.g., 10, 25, 50)',
},
},
request: {
@@ -36,6 +42,10 @@ export const actionsListTool: ToolConfig<
url.searchParams.append('incident_id', params.incident_id)
}
if (params.page_size) {
url.searchParams.append('page_size', params.page_size.toString())
}
return url.toString()
},
method: 'GET',

View File

@@ -20,10 +20,22 @@ export const escalationsListTool: ToolConfig<
visibility: 'user-only',
description: 'incident.io API Key',
},
page_size: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Number of results per page (e.g., 10, 25, 50). Default: 25',
},
},
request: {
url: () => 'https://api.incident.io/v2/escalations',
url: (params) => {
const url = new URL('https://api.incident.io/v2/escalations')
if (params.page_size) {
url.searchParams.append('page_size', params.page_size.toString())
}
return url.toString()
},
method: 'GET',
headers: (params) => ({
'Content-Type': 'application/json',

View File

@@ -26,6 +26,12 @@ export const followUpsListTool: ToolConfig<
visibility: 'user-or-llm',
description: 'Filter follow-ups by incident ID (e.g., "01FCNDV6P870EA6S7TK1DSYDG0")',
},
page_size: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Number of follow-ups to return per page (e.g., 10, 25, 50)',
},
},
request: {
@@ -36,6 +42,10 @@ export const followUpsListTool: ToolConfig<
url.searchParams.append('incident_id', params.incident_id)
}
if (params.page_size) {
url.searchParams.append('page_size', params.page_size.toString())
}
return url.toString()
},
method: 'GET',

View File

@@ -396,6 +396,7 @@ export interface IncidentioIncidentsUpdateResponse extends ToolResponse {
// Action types
export interface IncidentioActionsListParams extends IncidentioBaseParams {
incident_id?: string
page_size?: number
}
export interface IncidentioAction {
@@ -445,6 +446,7 @@ export interface IncidentioActionsShowResponse extends ToolResponse {
// Follow-up types
export interface IncidentioFollowUpsListParams extends IncidentioBaseParams {
incident_id?: string
page_size?: number
}
export interface IncidentioFollowUp {
@@ -662,7 +664,6 @@ export interface CustomFieldsDeleteResponse extends ToolResponse {
// Users list tool types
export interface IncidentioUsersListParams extends IncidentioBaseParams {
page_size?: number
after?: string
}
export interface IncidentioUser {
@@ -675,11 +676,6 @@ export interface IncidentioUser {
export interface IncidentioUsersListResponse extends ToolResponse {
output: {
users: IncidentioUser[]
pagination_meta?: {
after: string
page_size: number
total_record_count?: number
}
}
}
@@ -790,7 +786,9 @@ export type IncidentioResponse =
| IncidentioEscalationPathsDeleteResponse
// Escalations types
export interface IncidentioEscalationsListParams extends IncidentioBaseParams {}
export interface IncidentioEscalationsListParams extends IncidentioBaseParams {
page_size?: number
}
export interface IncidentioEscalation {
id: string

View File

@@ -1,7 +1,6 @@
import {
INCIDENTIO_PAGINATION_OUTPUT_PROPERTIES,
type IncidentioUsersListParams,
type IncidentioUsersListResponse,
import type {
IncidentioUsersListParams,
IncidentioUsersListResponse,
} from '@/tools/incidentio/types'
import type { ToolConfig } from '@/tools/types'
@@ -25,27 +24,15 @@ export const usersListTool: ToolConfig<IncidentioUsersListParams, IncidentioUser
visibility: 'user-or-llm',
description: 'Number of results to return per page (e.g., 10, 25, 50). Default: 25',
},
after: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination cursor to fetch the next page of results',
},
},
request: {
url: (params) => {
const url = new URL('https://api.incident.io/v2/users')
const baseUrl = 'https://api.incident.io/v2/users'
if (params.page_size) {
url.searchParams.append('page_size', params.page_size.toString())
return `${baseUrl}?page_size=${params.page_size}`
}
if (params.after) {
url.searchParams.append('after', params.after)
}
return url.toString()
return baseUrl
},
method: 'GET',
headers: (params) => ({
@@ -66,13 +53,6 @@ export const usersListTool: ToolConfig<IncidentioUsersListParams, IncidentioUser
email: user.email,
role: user.role,
})),
pagination_meta: data.pagination_meta
? {
after: data.pagination_meta.after,
page_size: data.pagination_meta.page_size,
total_record_count: data.pagination_meta.total_record_count,
}
: undefined,
},
}
},
@@ -91,11 +71,5 @@ export const usersListTool: ToolConfig<IncidentioUsersListParams, IncidentioUser
},
},
},
pagination_meta: {
type: 'object',
description: 'Pagination metadata',
optional: true,
properties: INCIDENTIO_PAGINATION_OUTPUT_PROPERTIES,
},
},
}

View File

@@ -24,11 +24,23 @@ export const pipedriveGetActivitiesTool: ToolConfig<
visibility: 'hidden',
description: 'The access token for the Pipedrive API',
},
user_id: {
deal_id: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Filter activities by user ID (e.g., "123")',
description: 'Filter activities by deal ID (e.g., "123")',
},
person_id: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Filter activities by person ID (e.g., "456")',
},
org_id: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Filter activities by organization ID (e.g., "789")',
},
type: {
type: 'string',
@@ -48,12 +60,6 @@ export const pipedriveGetActivitiesTool: ToolConfig<
visibility: 'user-or-llm',
description: 'Number of results to return (e.g., "50", default: 100, max: 500)',
},
start: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination start offset (0-based index of the first item to return)',
},
},
request: {
@@ -61,11 +67,12 @@ export const pipedriveGetActivitiesTool: ToolConfig<
const baseUrl = 'https://api.pipedrive.com/v1/activities'
const queryParams = new URLSearchParams()
if (params.user_id) queryParams.append('user_id', params.user_id)
if (params.deal_id) queryParams.append('deal_id', params.deal_id)
if (params.person_id) queryParams.append('person_id', params.person_id)
if (params.org_id) queryParams.append('org_id', params.org_id)
if (params.type) queryParams.append('type', params.type)
if (params.done) queryParams.append('done', params.done)
if (params.limit) queryParams.append('limit', params.limit)
if (params.start) queryParams.append('start', params.start)
const queryString = queryParams.toString()
return queryString ? `${baseUrl}?${queryString}` : baseUrl
@@ -92,16 +99,12 @@ export const pipedriveGetActivitiesTool: ToolConfig<
}
const activities = data.data || []
const hasMore = data.additional_data?.pagination?.more_items_in_collection || false
const nextStart = data.additional_data?.pagination?.next_start ?? null
return {
success: true,
output: {
activities,
total_items: activities.length,
has_more: hasMore,
next_start: nextStart,
success: true,
},
}
@@ -117,16 +120,6 @@ export const pipedriveGetActivitiesTool: ToolConfig<
},
},
total_items: { type: 'number', description: 'Total number of activities returned' },
has_more: {
type: 'boolean',
description: 'Whether more activities are available',
optional: true,
},
next_start: {
type: 'number',
description: 'Offset for fetching the next page',
optional: true,
},
success: { type: 'boolean', description: 'Operation success status' },
},
}

View File

@@ -67,12 +67,6 @@ export const pipedriveGetAllDealsTool: ToolConfig<
visibility: 'user-or-llm',
description: 'Number of results to return (e.g., "50", default: 100, max: 500)',
},
cursor: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'For pagination, the marker representing the first item on the next page',
},
},
request: {
@@ -87,7 +81,6 @@ export const pipedriveGetAllDealsTool: ToolConfig<
if (params.pipeline_id) queryParams.append('pipeline_id', params.pipeline_id)
if (params.updated_since) queryParams.append('updated_since', params.updated_since)
if (params.limit) queryParams.append('limit', params.limit)
if (params.cursor) queryParams.append('cursor', params.cursor)
const queryString = queryParams.toString()
return queryString ? `${baseUrl}?${queryString}` : baseUrl
@@ -114,8 +107,7 @@ export const pipedriveGetAllDealsTool: ToolConfig<
}
const deals = data.data || []
const nextCursor = data.additional_data?.next_cursor ?? null
const hasMore = nextCursor !== null
const hasMore = data.additional_data?.pagination?.more_items_in_collection || false
return {
success: true,
@@ -124,7 +116,6 @@ export const pipedriveGetAllDealsTool: ToolConfig<
metadata: {
total_items: deals.length,
has_more: hasMore,
next_cursor: nextCursor,
},
success: true,
},

View File

@@ -16,23 +16,29 @@ export const pipedriveGetFilesTool: ToolConfig<PipedriveGetFilesParams, Pipedriv
visibility: 'hidden',
description: 'The access token for the Pipedrive API',
},
sort: {
deal_id: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Sort files by field (supported: "id", "update_time")',
description: 'Filter files by deal ID (e.g., "123")',
},
person_id: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Filter files by person ID (e.g., "456")',
},
org_id: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Filter files by organization ID (e.g., "789")',
},
limit: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Number of results to return (e.g., "50", default: 100, max: 100)',
},
start: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination start offset (0-based index of the first item to return)',
description: 'Number of results to return (e.g., "50", default: 100, max: 500)',
},
downloadFiles: {
type: 'boolean',
@@ -50,9 +56,10 @@ export const pipedriveGetFilesTool: ToolConfig<PipedriveGetFilesParams, Pipedriv
}),
body: (params) => ({
accessToken: params.accessToken,
sort: params.sort,
deal_id: params.deal_id,
person_id: params.person_id,
org_id: params.org_id,
limit: params.limit,
start: params.start,
downloadFiles: params.downloadFiles,
}),
},
@@ -72,16 +79,6 @@ export const pipedriveGetFilesTool: ToolConfig<PipedriveGetFilesParams, Pipedriv
optional: true,
},
total_items: { type: 'number', description: 'Total number of files returned' },
has_more: {
type: 'boolean',
description: 'Whether more files are available',
optional: true,
},
next_start: {
type: 'number',
description: 'Offset for fetching the next page',
optional: true,
},
success: { type: 'boolean', description: 'Operation success status' },
},
}

View File

@@ -60,12 +60,6 @@ export const pipedriveGetLeadsTool: ToolConfig<PipedriveGetLeadsParams, Pipedriv
visibility: 'user-or-llm',
description: 'Number of results to return (e.g., "50", default: 100, max: 500)',
},
start: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination start offset (0-based index of the first item to return)',
},
},
request: {
@@ -87,7 +81,6 @@ export const pipedriveGetLeadsTool: ToolConfig<PipedriveGetLeadsParams, Pipedriv
if (params.person_id) queryParams.append('person_id', params.person_id)
if (params.organization_id) queryParams.append('organization_id', params.organization_id)
if (params.limit) queryParams.append('limit', params.limit)
if (params.start) queryParams.append('start', params.start)
const queryString = queryParams.toString()
return queryString ? `${baseUrl}?${queryString}` : baseUrl
@@ -126,19 +119,12 @@ export const pipedriveGetLeadsTool: ToolConfig<PipedriveGetLeadsParams, Pipedriv
// Otherwise, return list of leads
const leads = data.data || []
// Leads endpoint puts pagination fields directly on additional_data (no .pagination wrapper)
const hasMore = data.additional_data?.more_items_in_collection || false
const currentStart = data.additional_data?.start ?? 0
const currentLimit = data.additional_data?.limit ?? leads.length
const nextStart = hasMore ? currentStart + currentLimit : null
return {
success: true,
output: {
leads,
total_items: leads.length,
has_more: hasMore,
next_start: nextStart,
success: true,
},
}
@@ -165,16 +151,6 @@ export const pipedriveGetLeadsTool: ToolConfig<PipedriveGetLeadsParams, Pipedriv
description: 'Total number of leads returned',
optional: true,
},
has_more: {
type: 'boolean',
description: 'Whether more leads are available',
optional: true,
},
next_start: {
type: 'number',
description: 'Offset for fetching the next page',
optional: true,
},
success: { type: 'boolean', description: 'Operation success status' },
},
}

View File

@@ -40,12 +40,6 @@ export const pipedriveGetMailMessagesTool: ToolConfig<
visibility: 'user-or-llm',
description: 'Number of results to return (e.g., "25", default: 50)',
},
start: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination start offset (0-based index of the first item to return)',
},
},
request: {
@@ -55,7 +49,6 @@ export const pipedriveGetMailMessagesTool: ToolConfig<
if (params.folder) queryParams.append('folder', params.folder)
if (params.limit) queryParams.append('limit', params.limit)
if (params.start) queryParams.append('start', params.start)
const queryString = queryParams.toString()
return queryString ? `${baseUrl}?${queryString}` : baseUrl
@@ -82,16 +75,12 @@ export const pipedriveGetMailMessagesTool: ToolConfig<
}
const threads = data.data || []
const hasMore = data.additional_data?.pagination?.more_items_in_collection || false
const nextStart = data.additional_data?.pagination?.next_start ?? null
return {
success: true,
output: {
messages: threads,
total_items: threads.length,
has_more: hasMore,
next_start: nextStart,
success: true,
},
}
@@ -100,16 +89,6 @@ export const pipedriveGetMailMessagesTool: ToolConfig<
outputs: {
messages: { type: 'array', description: 'Array of mail thread objects from Pipedrive mailbox' },
total_items: { type: 'number', description: 'Total number of mail threads returned' },
has_more: {
type: 'boolean',
description: 'Whether more messages are available',
optional: true,
},
next_start: {
type: 'number',
description: 'Offset for fetching the next page',
optional: true,
},
success: { type: 'boolean', description: 'Operation success status' },
},
}

View File

@@ -35,18 +35,18 @@ export const pipedriveGetPipelineDealsTool: ToolConfig<
visibility: 'user-or-llm',
description: 'Filter by specific stage within the pipeline (e.g., "2")',
},
status: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Filter by deal status: open, won, lost',
},
limit: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Number of results to return (e.g., "50", default: 100, max: 500)',
},
start: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination start offset (0-based index of the first item to return)',
},
},
request: {
@@ -55,8 +55,8 @@ export const pipedriveGetPipelineDealsTool: ToolConfig<
const queryParams = new URLSearchParams()
if (params.stage_id) queryParams.append('stage_id', params.stage_id)
if (params.status) queryParams.append('status', params.status)
if (params.limit) queryParams.append('limit', params.limit)
if (params.start) queryParams.append('start', params.start)
const queryString = queryParams.toString()
return queryString ? `${baseUrl}?${queryString}` : baseUrl
@@ -83,8 +83,6 @@ export const pipedriveGetPipelineDealsTool: ToolConfig<
}
const deals = data.data || []
const hasMore = data.additional_data?.pagination?.more_items_in_collection || false
const nextStart = data.additional_data?.pagination?.next_start ?? null
return {
success: true,
@@ -93,8 +91,6 @@ export const pipedriveGetPipelineDealsTool: ToolConfig<
metadata: {
pipeline_id: params?.pipeline_id || '',
total_items: deals.length,
has_more: hasMore,
next_start: nextStart,
},
success: true,
},

View File

@@ -42,11 +42,11 @@ export const pipedriveGetPipelinesTool: ToolConfig<
visibility: 'user-or-llm',
description: 'Number of results to return (e.g., "50", default: 100, max: 500)',
},
start: {
cursor: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination start offset (0-based index of the first item to return)',
description: 'For pagination, the marker representing the first item on the next page',
},
},
@@ -58,7 +58,7 @@ export const pipedriveGetPipelinesTool: ToolConfig<
if (params.sort_by) queryParams.append('sort_by', params.sort_by)
if (params.sort_direction) queryParams.append('sort_direction', params.sort_direction)
if (params.limit) queryParams.append('limit', params.limit)
if (params.start) queryParams.append('start', params.start)
if (params.cursor) queryParams.append('cursor', params.cursor)
const queryString = queryParams.toString()
return queryString ? `${baseUrl}?${queryString}` : baseUrl
@@ -85,16 +85,12 @@ export const pipedriveGetPipelinesTool: ToolConfig<
}
const pipelines = data.data || []
const hasMore = data.additional_data?.pagination?.more_items_in_collection || false
const nextStart = data.additional_data?.pagination?.next_start ?? null
return {
success: true,
output: {
pipelines,
total_items: pipelines.length,
has_more: hasMore,
next_start: nextStart,
success: true,
},
}
@@ -110,16 +106,6 @@ export const pipedriveGetPipelinesTool: ToolConfig<
},
},
total_items: { type: 'number', description: 'Total number of pipelines returned' },
has_more: {
type: 'boolean',
description: 'Whether more pipelines are available',
optional: true,
},
next_start: {
type: 'number',
description: 'Offset for fetching the next page',
optional: true,
},
success: { type: 'boolean', description: 'Operation success status' },
},
}

View File

@@ -42,12 +42,6 @@ export const pipedriveGetProjectsTool: ToolConfig<
description:
'Number of results to return (e.g., "50", default: 100, max: 500, only for listing all)',
},
cursor: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'For pagination, the marker representing the first item on the next page',
},
},
request: {
@@ -63,7 +57,6 @@ export const pipedriveGetProjectsTool: ToolConfig<
if (params.status) queryParams.append('status', params.status)
if (params.limit) queryParams.append('limit', params.limit)
if (params.cursor) queryParams.append('cursor', params.cursor)
const queryString = queryParams.toString()
return queryString ? `${baseUrl}?${queryString}` : baseUrl
@@ -102,16 +95,12 @@ export const pipedriveGetProjectsTool: ToolConfig<
// Otherwise, return list of projects
const projects = data.data || []
const nextCursor = data.additional_data?.next_cursor ?? null
const hasMore = nextCursor !== null
return {
success: true,
output: {
projects,
total_items: projects.length,
has_more: hasMore,
next_cursor: nextCursor,
success: true,
},
}
@@ -133,16 +122,6 @@ export const pipedriveGetProjectsTool: ToolConfig<
description: 'Total number of projects returned',
optional: true,
},
has_more: {
type: 'boolean',
description: 'Whether more projects are available',
optional: true,
},
next_cursor: {
type: 'string',
description: 'Cursor for fetching the next page',
optional: true,
},
success: { type: 'boolean', description: 'Operation success status' },
},
}

View File

@@ -239,16 +239,6 @@ export const PIPEDRIVE_MAIL_MESSAGE_OUTPUT: OutputProperty = {
export const PIPEDRIVE_METADATA_OUTPUT_PROPERTIES = {
total_items: { type: 'number', description: 'Total number of items' },
has_more: { type: 'boolean', description: 'Whether more items are available', optional: true },
next_cursor: {
type: 'string',
description: 'Cursor for fetching the next page (v2 endpoints)',
optional: true,
},
next_start: {
type: 'number',
description: 'Offset for fetching the next page (v1 endpoints)',
optional: true,
},
} as const satisfies Record<string, OutputProperty>
// Common Pipedrive types
@@ -365,7 +355,6 @@ export interface PipedriveGetAllDealsParams {
pipeline_id?: string
updated_since?: string
limit?: string
cursor?: string
}
export interface PipedriveGetAllDealsOutput {
@@ -373,7 +362,6 @@ export interface PipedriveGetAllDealsOutput {
metadata: {
total_items: number
has_more: boolean
next_cursor?: string
}
success: boolean
}
@@ -443,9 +431,10 @@ export interface PipedriveUpdateDealResponse extends ToolResponse {
// GET Files
export interface PipedriveGetFilesParams {
accessToken: string
sort?: string
deal_id?: string
person_id?: string
org_id?: string
limit?: string
start?: string
downloadFiles?: boolean
}
@@ -453,8 +442,6 @@ export interface PipedriveGetFilesOutput {
files: PipedriveFile[]
downloadedFiles?: ToolFileData[]
total_items: number
has_more?: boolean
next_start?: number
success: boolean
}
@@ -466,14 +453,11 @@ export interface PipedriveGetMailMessagesParams {
accessToken: string
folder?: string
limit?: string
start?: string
}
export interface PipedriveGetMailMessagesOutput {
messages: PipedriveMailMessage[]
total_items: number
has_more?: boolean
next_start?: number
success: boolean
}
@@ -506,14 +490,12 @@ export interface PipedriveGetPipelinesParams {
sort_by?: string
sort_direction?: string
limit?: string
start?: string
cursor?: string
}
export interface PipedriveGetPipelinesOutput {
pipelines: PipedrivePipeline[]
total_items: number
has_more?: boolean
next_start?: number
success: boolean
}
@@ -526,8 +508,8 @@ export interface PipedriveGetPipelineDealsParams {
accessToken: string
pipeline_id: string
stage_id?: string
status?: string
limit?: string
start?: string
}
export interface PipedriveGetPipelineDealsOutput {
@@ -535,8 +517,6 @@ export interface PipedriveGetPipelineDealsOutput {
metadata: {
pipeline_id: string
total_items: number
has_more?: boolean
next_start?: number
}
success: boolean
}
@@ -551,15 +531,12 @@ export interface PipedriveGetProjectsParams {
project_id?: string
status?: string
limit?: string
cursor?: string
}
export interface PipedriveGetProjectsOutput {
projects?: PipedriveProject[]
project?: PipedriveProject
total_items?: number
has_more?: boolean
next_cursor?: string
success: boolean
}
@@ -588,18 +565,17 @@ export interface PipedriveCreateProjectResponse extends ToolResponse {
// GET All Activities
export interface PipedriveGetActivitiesParams {
accessToken: string
user_id?: string
deal_id?: string
person_id?: string
org_id?: string
type?: string
done?: string
limit?: string
start?: string
}
export interface PipedriveGetActivitiesOutput {
activities: PipedriveActivity[]
total_items: number
has_more?: boolean
next_start?: number
success: boolean
}
@@ -660,15 +636,12 @@ export interface PipedriveGetLeadsParams {
person_id?: string
organization_id?: string
limit?: string
start?: string
}
export interface PipedriveGetLeadsOutput {
leads?: PipedriveLead[]
lead?: PipedriveLead
total_items?: number
has_more?: boolean
next_start?: number
success: boolean
}

View File

@@ -51,12 +51,6 @@ export const queryTool: ToolConfig<SupabaseQueryParams, SupabaseQueryResponse> =
visibility: 'user-or-llm',
description: 'Maximum number of rows to return',
},
offset: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Number of rows to skip (for pagination)',
},
apiKey: {
type: 'string',
required: true,
@@ -97,15 +91,10 @@ export const queryTool: ToolConfig<SupabaseQueryParams, SupabaseQueryResponse> =
}
// Add limit if provided
if (params.limit !== undefined && params.limit !== null) {
if (params.limit) {
url += `&limit=${Number(params.limit)}`
}
// Add offset if provided
if (params.offset !== undefined && params.offset !== null) {
url += `&offset=${Number(params.offset)}`
}
return url
},
method: 'GET',

View File

@@ -57,12 +57,6 @@ export const textSearchTool: ToolConfig<SupabaseTextSearchParams, SupabaseTextSe
visibility: 'user-or-llm',
description: 'Maximum number of rows to return',
},
offset: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Number of rows to skip (for pagination)',
},
apiKey: {
type: 'string',
required: true,
@@ -80,9 +74,8 @@ export const textSearchTool: ToolConfig<SupabaseTextSearchParams, SupabaseTextSe
let url = `https://${params.projectId}.supabase.co/rest/v1/${params.table}?select=*`
// Map search types to PostgREST operators
// plfts = plainto_tsquery (natural language), phfts = phraseto_tsquery, wfts = websearch_to_tsquery
const operatorMap: Record<string, string> = {
plain: 'plfts',
plain: 'fts',
phrase: 'phfts',
websearch: 'wfts',
}
@@ -93,15 +86,10 @@ export const textSearchTool: ToolConfig<SupabaseTextSearchParams, SupabaseTextSe
url += `&${params.column}=${operator}(${language}).${encodeURIComponent(params.query)}`
// Add limit if provided
if (params.limit !== undefined && params.limit !== null) {
if (params.limit) {
url += `&limit=${Number(params.limit)}`
}
// Add offset if provided
if (params.offset !== undefined && params.offset !== null) {
url += `&offset=${Number(params.offset)}`
}
return url
},
method: 'GET',

Some files were not shown because too many files have changed in this diff Show More