mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-18 10:22:00 -05:00
Compare commits
13 Commits
feat/mothe
...
cursor/ent
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6fbf5741d4 | ||
|
|
eab01e0272 | ||
|
|
bbcef7ce5c | ||
|
|
0ee52df5a7 | ||
|
|
6421b1a0ca | ||
|
|
61a5c98717 | ||
|
|
a0afb5d03e | ||
|
|
cdacb796a8 | ||
|
|
3ce54147e6 | ||
|
|
08690b2906 | ||
|
|
299cc26694 | ||
|
|
48715ff013 | ||
|
|
ad0d0ed1f1 |
@@ -59,12 +59,6 @@ body {
|
||||
--content-gap: 1.75rem;
|
||||
}
|
||||
|
||||
/* Remove custom layout variable overrides to fallback to fumadocs defaults */
|
||||
|
||||
/* ============================================
|
||||
Navbar Light Mode Styling
|
||||
============================================ */
|
||||
|
||||
/* Light mode navbar and search styling */
|
||||
:root:not(.dark) nav {
|
||||
background-color: hsla(0, 0%, 96%, 0.85) !important;
|
||||
@@ -88,10 +82,6 @@ body {
|
||||
-webkit-backdrop-filter: blur(25px) saturate(180%) brightness(0.6) !important;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Custom Sidebar Styling (Turborepo-inspired)
|
||||
============================================ */
|
||||
|
||||
/* Floating sidebar appearance - remove background */
|
||||
[data-sidebar-container],
|
||||
#nd-sidebar {
|
||||
@@ -468,10 +458,6 @@ aside[data-sidebar],
|
||||
writing-mode: horizontal-tb !important;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Code Block Styling (Improved)
|
||||
============================================ */
|
||||
|
||||
/* Apply Geist Mono to code elements */
|
||||
code,
|
||||
pre,
|
||||
@@ -532,10 +518,6 @@ pre code .line {
|
||||
color: var(--color-fd-primary);
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
TOC (Table of Contents) Styling
|
||||
============================================ */
|
||||
|
||||
/* Remove the thin border-left on nested TOC items (keeps main indicator only) */
|
||||
#nd-toc a[style*="padding-inline-start"] {
|
||||
border-left: none !important;
|
||||
@@ -554,10 +536,6 @@ main article,
|
||||
padding-bottom: 4rem;
|
||||
}
|
||||
|
||||
/* ============================================
|
||||
Center and Constrain Main Content Width
|
||||
============================================ */
|
||||
|
||||
/* Main content area - center and constrain like turborepo/raindrop */
|
||||
/* Note: --sidebar-offset and --toc-offset are now applied at #nd-docs-layout level */
|
||||
main[data-main] {
|
||||
|
||||
@@ -234,7 +234,6 @@ List actions from incident.io. Optionally filter by incident ID.
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | incident.io API Key |
|
||||
| `incident_id` | string | No | Filter actions by incident ID \(e.g., "01FCNDV6P870EA6S7TK1DSYDG0"\) |
|
||||
| `page_size` | number | No | Number of actions to return per page \(e.g., 10, 25, 50\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -309,7 +308,6 @@ List follow-ups from incident.io. Optionally filter by incident ID.
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | incident.io API Key |
|
||||
| `incident_id` | string | No | Filter follow-ups by incident ID \(e.g., "01FCNDV6P870EA6S7TK1DSYDG0"\) |
|
||||
| `page_size` | number | No | Number of follow-ups to return per page \(e.g., 10, 25, 50\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -396,6 +394,7 @@ List all users in your Incident.io workspace. Returns user details including id,
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Incident.io API Key |
|
||||
| `page_size` | number | No | Number of results to return per page \(e.g., 10, 25, 50\). Default: 25 |
|
||||
| `after` | string | No | Pagination cursor to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -406,6 +405,10 @@ List all users in your Incident.io workspace. Returns user details including id,
|
||||
| ↳ `name` | string | Full name of the user |
|
||||
| ↳ `email` | string | Email address of the user |
|
||||
| ↳ `role` | string | Role of the user in the workspace |
|
||||
| `pagination_meta` | object | Pagination metadata |
|
||||
| ↳ `after` | string | Cursor for next page |
|
||||
| ↳ `page_size` | number | Number of items per page |
|
||||
| ↳ `total_record_count` | number | Total number of records |
|
||||
|
||||
### `incidentio_users_show`
|
||||
|
||||
@@ -644,7 +647,6 @@ List all escalation policies in incident.io
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | incident.io API Key |
|
||||
| `page_size` | number | No | Number of results per page \(e.g., 10, 25, 50\). Default: 25 |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ Retrieve all deals from Pipedrive with optional filters
|
||||
| `pipeline_id` | string | No | If supplied, only deals in the specified pipeline are returned \(e.g., "1"\) |
|
||||
| `updated_since` | string | No | If set, only deals updated after this time are returned. Format: 2025-01-01T10:20:00Z |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -74,6 +75,8 @@ Retrieve all deals from Pipedrive with optional filters
|
||||
| `metadata` | object | Pagination metadata for the response |
|
||||
| ↳ `total_items` | number | Total number of items |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
| ↳ `next_cursor` | string | Cursor for fetching the next page \(v2 endpoints\) |
|
||||
| ↳ `next_start` | number | Offset for fetching the next page \(v1 endpoints\) |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_deal`
|
||||
@@ -148,10 +151,9 @@ Retrieve files from Pipedrive with optional filters
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `deal_id` | string | No | Filter files by deal ID \(e.g., "123"\) |
|
||||
| `person_id` | string | No | Filter files by person ID \(e.g., "456"\) |
|
||||
| `org_id` | string | No | Filter files by organization ID \(e.g., "789"\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `sort` | string | No | Sort files by field \(supported: "id", "update_time"\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 100\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
| `downloadFiles` | boolean | No | Download file contents into file outputs |
|
||||
|
||||
#### Output
|
||||
@@ -171,6 +173,8 @@ Retrieve files from Pipedrive with optional filters
|
||||
| ↳ `url` | string | File download URL |
|
||||
| `downloadedFiles` | file[] | Downloaded files from Pipedrive |
|
||||
| `total_items` | number | Total number of files returned |
|
||||
| `has_more` | boolean | Whether more files are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_mail_messages`
|
||||
@@ -183,6 +187,7 @@ Retrieve mail threads from Pipedrive mailbox
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `folder` | string | No | Filter by folder: inbox, drafts, sent, archive \(default: inbox\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "25", default: 50\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -190,6 +195,8 @@ Retrieve mail threads from Pipedrive mailbox
|
||||
| --------- | ---- | ----------- |
|
||||
| `messages` | array | Array of mail thread objects from Pipedrive mailbox |
|
||||
| `total_items` | number | Total number of mail threads returned |
|
||||
| `has_more` | boolean | Whether more messages are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_mail_thread`
|
||||
@@ -221,7 +228,7 @@ Retrieve all pipelines from Pipedrive
|
||||
| `sort_by` | string | No | Field to sort by: id, update_time, add_time \(default: id\) |
|
||||
| `sort_direction` | string | No | Sorting direction: asc, desc \(default: asc\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -237,6 +244,8 @@ Retrieve all pipelines from Pipedrive
|
||||
| ↳ `add_time` | string | When the pipeline was created |
|
||||
| ↳ `update_time` | string | When the pipeline was last updated |
|
||||
| `total_items` | number | Total number of pipelines returned |
|
||||
| `has_more` | boolean | Whether more pipelines are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_get_pipeline_deals`
|
||||
@@ -249,8 +258,8 @@ Retrieve all deals in a specific pipeline
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `pipeline_id` | string | Yes | The ID of the pipeline \(e.g., "1"\) |
|
||||
| `stage_id` | string | No | Filter by specific stage within the pipeline \(e.g., "2"\) |
|
||||
| `status` | string | No | Filter by deal status: open, won, lost |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -271,6 +280,7 @@ Retrieve all projects or a specific project from Pipedrive
|
||||
| `project_id` | string | No | Optional: ID of a specific project to retrieve \(e.g., "123"\) |
|
||||
| `status` | string | No | Filter by project status: open, completed, deleted \(only for listing all\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500, only for listing all\) |
|
||||
| `cursor` | string | No | For pagination, the marker representing the first item on the next page |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -279,6 +289,8 @@ Retrieve all projects or a specific project from Pipedrive
|
||||
| `projects` | array | Array of project objects \(when listing all\) |
|
||||
| `project` | object | Single project object \(when project_id is provided\) |
|
||||
| `total_items` | number | Total number of projects returned |
|
||||
| `has_more` | boolean | Whether more projects are available |
|
||||
| `next_cursor` | string | Cursor for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_create_project`
|
||||
@@ -309,12 +321,11 @@ Retrieve activities (tasks) from Pipedrive with optional filters
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `deal_id` | string | No | Filter activities by deal ID \(e.g., "123"\) |
|
||||
| `person_id` | string | No | Filter activities by person ID \(e.g., "456"\) |
|
||||
| `org_id` | string | No | Filter activities by organization ID \(e.g., "789"\) |
|
||||
| `user_id` | string | No | Filter activities by user ID \(e.g., "123"\) |
|
||||
| `type` | string | No | Filter by activity type \(call, meeting, task, deadline, email, lunch\) |
|
||||
| `done` | string | No | Filter by completion status: 0 for not done, 1 for done |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -335,6 +346,8 @@ Retrieve activities (tasks) from Pipedrive with optional filters
|
||||
| ↳ `add_time` | string | When the activity was created |
|
||||
| ↳ `update_time` | string | When the activity was last updated |
|
||||
| `total_items` | number | Total number of activities returned |
|
||||
| `has_more` | boolean | Whether more activities are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_create_activity`
|
||||
@@ -399,6 +412,7 @@ Retrieve all leads or a specific lead from Pipedrive
|
||||
| `person_id` | string | No | Filter by person ID \(e.g., "456"\) |
|
||||
| `organization_id` | string | No | Filter by organization ID \(e.g., "789"\) |
|
||||
| `limit` | string | No | Number of results to return \(e.g., "50", default: 100, max: 500\) |
|
||||
| `start` | string | No | Pagination start offset \(0-based index of the first item to return\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -433,6 +447,8 @@ Retrieve all leads or a specific lead from Pipedrive
|
||||
| ↳ `add_time` | string | When the lead was created \(ISO 8601\) |
|
||||
| ↳ `update_time` | string | When the lead was last updated \(ISO 8601\) |
|
||||
| `total_items` | number | Total number of leads returned |
|
||||
| `has_more` | boolean | Whether more leads are available |
|
||||
| `next_start` | number | Offset for fetching the next page |
|
||||
| `success` | boolean | Operation success status |
|
||||
|
||||
### `pipedrive_create_lead`
|
||||
|
||||
@@ -57,6 +57,7 @@ Query data from a Supabase table
|
||||
| `filter` | string | No | PostgREST filter \(e.g., "id=eq.123"\) |
|
||||
| `orderBy` | string | No | Column to order by \(add DESC for descending\) |
|
||||
| `limit` | number | No | Maximum number of rows to return |
|
||||
| `offset` | number | No | Number of rows to skip \(for pagination\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
|
||||
#### Output
|
||||
@@ -211,6 +212,7 @@ Perform full-text search on a Supabase table
|
||||
| `searchType` | string | No | Search type: plain, phrase, or websearch \(default: websearch\) |
|
||||
| `language` | string | No | Language for text search configuration \(default: english\) |
|
||||
| `limit` | number | No | Maximum number of rows to return |
|
||||
| `offset` | number | No | Number of rows to skip \(for pagination\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -43,6 +43,8 @@ Retrieve form responses from Typeform
|
||||
| `formId` | string | Yes | Typeform form ID \(e.g., "abc123XYZ"\) |
|
||||
| `apiKey` | string | Yes | Typeform Personal Access Token |
|
||||
| `pageSize` | number | No | Number of responses to retrieve \(e.g., 10, 25, 50\) |
|
||||
| `before` | string | No | Cursor token for fetching the next page of older responses |
|
||||
| `after` | string | No | Cursor token for fetching the next page of newer responses |
|
||||
| `since` | string | No | Retrieve responses submitted after this date \(e.g., "2024-01-01T00:00:00Z"\) |
|
||||
| `until` | string | No | Retrieve responses submitted before this date \(e.g., "2024-12-31T23:59:59Z"\) |
|
||||
| `completed` | string | No | Filter by completion status \(e.g., "true", "false", "all"\) |
|
||||
|
||||
@@ -67,10 +67,9 @@ Retrieve a list of tickets from Zendesk with optional filtering
|
||||
| `type` | string | No | Filter by type: "problem", "incident", "question", or "task" |
|
||||
| `assigneeId` | string | No | Filter by assignee user ID as a numeric string \(e.g., "12345"\) |
|
||||
| `organizationId` | string | No | Filter by organization ID as a numeric string \(e.g., "67890"\) |
|
||||
| `sortBy` | string | No | Sort field: "created_at", "updated_at", "priority", or "status" |
|
||||
| `sortOrder` | string | No | Sort order: "asc" or "desc" |
|
||||
| `sort` | string | No | Sort field for ticket listing \(only applies without filters\): "updated_at", "id", or "status". Prefix with "-" for descending \(e.g., "-updated_at"\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -129,10 +128,10 @@ Retrieve a list of tickets from Zendesk with optional filtering
|
||||
| ↳ `from_messaging_channel` | boolean | Whether the ticket originated from a messaging channel |
|
||||
| ↳ `ticket_form_id` | number | Ticket form ID |
|
||||
| ↳ `generated_timestamp` | number | Unix timestamp of the ticket generation |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -515,7 +514,7 @@ Retrieve a list of users from Zendesk with optional filtering
|
||||
| `role` | string | No | Filter by role: "end-user", "agent", or "admin" |
|
||||
| `permissionSet` | string | No | Filter by permission set ID as a numeric string \(e.g., "12345"\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -563,10 +562,10 @@ Retrieve a list of users from Zendesk with optional filtering
|
||||
| ↳ `shared` | boolean | Whether the user is shared from a different Zendesk |
|
||||
| ↳ `shared_agent` | boolean | Whether the agent is shared from a different Zendesk |
|
||||
| ↳ `remote_photo_url` | string | URL to a remote photo |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -706,7 +705,7 @@ Search for users in Zendesk using a query string
|
||||
| `query` | string | No | Search query string \(e.g., user name or email\) |
|
||||
| `externalId` | string | No | External ID to search by \(your system identifier\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `page` | string | No | Page number for pagination \(1-based\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -754,10 +753,10 @@ Search for users in Zendesk using a query string
|
||||
| ↳ `shared` | boolean | Whether the user is shared from a different Zendesk |
|
||||
| ↳ `shared_agent` | boolean | Whether the agent is shared from a different Zendesk |
|
||||
| ↳ `remote_photo_url` | string | URL to a remote photo |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -999,7 +998,7 @@ Retrieve a list of organizations from Zendesk
|
||||
| `apiToken` | string | Yes | Zendesk API token |
|
||||
| `subdomain` | string | Yes | Your Zendesk subdomain \(e.g., "mycompany" for mycompany.zendesk.com\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -1020,10 +1019,10 @@ Retrieve a list of organizations from Zendesk
|
||||
| ↳ `created_at` | string | When the organization was created \(ISO 8601 format\) |
|
||||
| ↳ `updated_at` | string | When the organization was last updated \(ISO 8601 format\) |
|
||||
| ↳ `external_id` | string | External ID for linking to external records |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -1075,7 +1074,7 @@ Autocomplete organizations in Zendesk by name prefix (for name matching/autocomp
|
||||
| `subdomain` | string | Yes | Your Zendesk subdomain |
|
||||
| `name` | string | Yes | Organization name prefix to search for \(e.g., "Acme"\) |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `page` | string | No | Page number for pagination \(1-based\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -1096,10 +1095,10 @@ Autocomplete organizations in Zendesk by name prefix (for name matching/autocomp
|
||||
| ↳ `created_at` | string | When the organization was created \(ISO 8601 format\) |
|
||||
| ↳ `updated_at` | string | When the organization was last updated \(ISO 8601 format\) |
|
||||
| ↳ `external_id` | string | External ID for linking to external records |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
@@ -1249,19 +1248,18 @@ Unified search across tickets, users, and organizations in Zendesk
|
||||
| `apiToken` | string | Yes | Zendesk API token |
|
||||
| `subdomain` | string | Yes | Your Zendesk subdomain |
|
||||
| `query` | string | Yes | Search query string using Zendesk search syntax \(e.g., "type:ticket status:open"\) |
|
||||
| `sortBy` | string | No | Sort field: "relevance", "created_at", "updated_at", "priority", "status", or "ticket_type" |
|
||||
| `sortOrder` | string | No | Sort order: "asc" or "desc" |
|
||||
| `filterType` | string | Yes | Resource type to search for: "ticket", "user", "organization", or "group" |
|
||||
| `perPage` | string | No | Results per page as a number string \(default: "100", max: "100"\) |
|
||||
| `page` | string | No | Page number as a string \(e.g., "1", "2"\) |
|
||||
| `pageAfter` | string | No | Cursor from a previous response to fetch the next page of results |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `paging` | object | Pagination information |
|
||||
| `paging` | object | Cursor-based pagination information |
|
||||
| ↳ `after_cursor` | string | Cursor for fetching the next page of results |
|
||||
| ↳ `has_more` | boolean | Whether more results are available |
|
||||
| ↳ `next_page` | string | URL for next page of results |
|
||||
| ↳ `previous_page` | string | URL for previous page of results |
|
||||
| ↳ `count` | number | Total count of items |
|
||||
| `metadata` | object | Response metadata |
|
||||
| ↳ `total_returned` | number | Number of items returned in this response |
|
||||
| ↳ `has_more` | boolean | Whether more items are available |
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
'use server'
|
||||
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { isProd } from '@/lib/core/config/feature-flags'
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@ export const LandingNode = React.memo(function LandingNode({ data }: { data: Lan
|
||||
transform: isAnimated ? 'translateY(0) scale(1)' : 'translateY(8px) scale(0.98)',
|
||||
transition:
|
||||
'opacity 0.6s cubic-bezier(0.22, 1, 0.36, 1), transform 0.6s cubic-bezier(0.22, 1, 0.36, 1)',
|
||||
willChange: 'transform, opacity',
|
||||
willChange: isAnimated ? 'auto' : 'transform, opacity',
|
||||
}}
|
||||
>
|
||||
<LandingBlock icon={data.icon} color={data.color} name={data.name} tags={data.tags} />
|
||||
|
||||
@@ -67,7 +67,6 @@ export const LandingEdge = React.memo(function LandingEdge(props: EdgeProps) {
|
||||
strokeLinejoin: 'round',
|
||||
pointerEvents: 'none',
|
||||
animation: `landing-edge-dash-${id} 1s linear infinite`,
|
||||
willChange: 'stroke-dashoffset',
|
||||
...style,
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -754,3 +754,100 @@ input[type="search"]::-ms-clear {
|
||||
text-decoration: none !important;
|
||||
color: inherit !important;
|
||||
}
|
||||
|
||||
/**
|
||||
* Respect user's prefers-reduced-motion setting (WCAG 2.3.3)
|
||||
* Disables animations and transitions for users who prefer reduced motion.
|
||||
*/
|
||||
@media (prefers-reduced-motion: reduce) {
|
||||
*,
|
||||
*::before,
|
||||
*::after {
|
||||
animation-duration: 0.01ms !important;
|
||||
animation-iteration-count: 1 !important;
|
||||
transition-duration: 0.01ms !important;
|
||||
scroll-behavior: auto !important;
|
||||
}
|
||||
}
|
||||
|
||||
/* WandPromptBar status indicator */
|
||||
@keyframes smoke-pulse {
|
||||
0%,
|
||||
100% {
|
||||
transform: scale(0.8);
|
||||
opacity: 0.4;
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
.status-indicator {
|
||||
position: relative;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 50%;
|
||||
overflow: hidden;
|
||||
background-color: hsl(var(--muted-foreground) / 0.5);
|
||||
transition: background-color 0.3s ease;
|
||||
}
|
||||
|
||||
.status-indicator.streaming {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.status-indicator.streaming::before {
|
||||
content: "";
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
border-radius: 50%;
|
||||
background: radial-gradient(
|
||||
circle,
|
||||
hsl(var(--primary) / 0.9) 0%,
|
||||
hsl(var(--primary) / 0.4) 60%,
|
||||
transparent 80%
|
||||
);
|
||||
animation: smoke-pulse 1.8s ease-in-out infinite;
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.dark .status-indicator.streaming::before {
|
||||
background: #6b7280;
|
||||
opacity: 0.9;
|
||||
animation: smoke-pulse 1.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
/* MessageContainer loading dot */
|
||||
@keyframes growShrink {
|
||||
0%,
|
||||
100% {
|
||||
transform: scale(0.9);
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
}
|
||||
}
|
||||
|
||||
.loading-dot {
|
||||
animation: growShrink 1.5s infinite ease-in-out;
|
||||
}
|
||||
|
||||
/* Subflow node z-index and drag-over styles */
|
||||
.workflow-container .react-flow__node-subflowNode {
|
||||
z-index: -1 !important;
|
||||
}
|
||||
|
||||
.workflow-container .react-flow__node-subflowNode:has([data-subflow-selected="true"]) {
|
||||
z-index: 10 !important;
|
||||
}
|
||||
|
||||
.loop-node-drag-over,
|
||||
.parallel-node-drag-over {
|
||||
box-shadow: 0 0 0 1.75px var(--brand-secondary) !important;
|
||||
border-radius: 8px !important;
|
||||
}
|
||||
|
||||
.react-flow__node[data-parent-node-id] .react-flow__handle {
|
||||
z-index: 30;
|
||||
}
|
||||
|
||||
@@ -1,22 +1,12 @@
|
||||
import { db } from '@sim/db'
|
||||
import { settings } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
|
||||
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
||||
|
||||
/** Headers for server-to-server calls to the Go copilot backend. */
|
||||
function copilotHeaders(): Record<string, string> {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
if (env.COPILOT_API_KEY) {
|
||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
||||
}
|
||||
return headers
|
||||
}
|
||||
|
||||
/**
|
||||
* GET - Fetch user's auto-allowed integration tools
|
||||
*/
|
||||
@@ -30,18 +20,24 @@ export async function GET() {
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
const res = await fetch(
|
||||
`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed?userId=${encodeURIComponent(userId)}`,
|
||||
{ method: 'GET', headers: copilotHeaders() }
|
||||
)
|
||||
const [userSettings] = await db
|
||||
.select()
|
||||
.from(settings)
|
||||
.where(eq(settings.userId, userId))
|
||||
.limit(1)
|
||||
|
||||
if (!res.ok) {
|
||||
logger.warn('Go backend returned error for list auto-allowed', { status: res.status })
|
||||
return NextResponse.json({ autoAllowedTools: [] })
|
||||
if (userSettings) {
|
||||
const autoAllowedTools = (userSettings.copilotAutoAllowedTools as string[]) || []
|
||||
return NextResponse.json({ autoAllowedTools })
|
||||
}
|
||||
|
||||
const payload = await res.json()
|
||||
return NextResponse.json({ autoAllowedTools: payload?.autoAllowedTools || [] })
|
||||
await db.insert(settings).values({
|
||||
id: userId,
|
||||
userId,
|
||||
copilotAutoAllowedTools: [],
|
||||
})
|
||||
|
||||
return NextResponse.json({ autoAllowedTools: [] })
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch auto-allowed tools', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
@@ -66,22 +62,38 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'toolId must be a string' }, { status: 400 })
|
||||
}
|
||||
|
||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
||||
method: 'POST',
|
||||
headers: copilotHeaders(),
|
||||
body: JSON.stringify({ userId, toolId: body.toolId }),
|
||||
})
|
||||
const toolId = body.toolId
|
||||
|
||||
if (!res.ok) {
|
||||
logger.warn('Go backend returned error for add auto-allowed', { status: res.status })
|
||||
return NextResponse.json({ error: 'Failed to add tool' }, { status: 500 })
|
||||
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||
|
||||
if (existing) {
|
||||
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||
|
||||
if (!currentTools.includes(toolId)) {
|
||||
const updatedTools = [...currentTools, toolId]
|
||||
await db
|
||||
.update(settings)
|
||||
.set({
|
||||
copilotAutoAllowedTools: updatedTools,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(settings.userId, userId))
|
||||
|
||||
logger.info('Added tool to auto-allowed list', { userId, toolId })
|
||||
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||
}
|
||||
|
||||
return NextResponse.json({ success: true, autoAllowedTools: currentTools })
|
||||
}
|
||||
|
||||
const payload = await res.json()
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
autoAllowedTools: payload?.autoAllowedTools || [],
|
||||
await db.insert(settings).values({
|
||||
id: userId,
|
||||
userId,
|
||||
copilotAutoAllowedTools: [toolId],
|
||||
})
|
||||
|
||||
logger.info('Created settings and added tool to auto-allowed list', { userId, toolId })
|
||||
return NextResponse.json({ success: true, autoAllowedTools: [toolId] })
|
||||
} catch (error) {
|
||||
logger.error('Failed to add auto-allowed tool', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
@@ -107,21 +119,25 @@ export async function DELETE(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'toolId query parameter is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const res = await fetch(
|
||||
`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed?userId=${encodeURIComponent(userId)}&toolId=${encodeURIComponent(toolId)}`,
|
||||
{ method: 'DELETE', headers: copilotHeaders() }
|
||||
)
|
||||
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||
|
||||
if (!res.ok) {
|
||||
logger.warn('Go backend returned error for remove auto-allowed', { status: res.status })
|
||||
return NextResponse.json({ error: 'Failed to remove tool' }, { status: 500 })
|
||||
if (existing) {
|
||||
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||
const updatedTools = currentTools.filter((t) => t !== toolId)
|
||||
|
||||
await db
|
||||
.update(settings)
|
||||
.set({
|
||||
copilotAutoAllowedTools: updatedTools,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(settings.userId, userId))
|
||||
|
||||
logger.info('Removed tool from auto-allowed list', { userId, toolId })
|
||||
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||
}
|
||||
|
||||
const payload = await res.json()
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
autoAllowedTools: payload?.autoAllowedTools || [],
|
||||
})
|
||||
return NextResponse.json({ success: true, autoAllowedTools: [] })
|
||||
} catch (error) {
|
||||
logger.error('Failed to remove auto-allowed tool', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { copilotChats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle'
|
||||
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||
// Workspace prompt is now generated by the Go copilot backend (detected via source: 'workspace-chat')
|
||||
|
||||
const logger = createLogger('WorkspaceChatAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
export const maxDuration = 300
|
||||
|
||||
const WorkspaceChatSchema = z.object({
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
workspaceId: z.string().min(1, 'workspaceId is required'),
|
||||
chatId: z.string().optional(),
|
||||
model: z.string().optional().default('claude-opus-4-5'),
|
||||
})
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { message, workspaceId, chatId, model } = WorkspaceChatSchema.parse(body)
|
||||
|
||||
const chatResult = await resolveOrCreateChat({
|
||||
chatId,
|
||||
userId: session.user.id,
|
||||
workspaceId,
|
||||
model,
|
||||
})
|
||||
|
||||
const requestPayload: Record<string, unknown> = {
|
||||
message,
|
||||
userId: session.user.id,
|
||||
model,
|
||||
mode: 'agent',
|
||||
headless: true,
|
||||
messageId: crypto.randomUUID(),
|
||||
version: SIM_AGENT_VERSION,
|
||||
source: 'workspace-chat',
|
||||
stream: true,
|
||||
...(chatResult.chatId ? { chatId: chatResult.chatId } : {}),
|
||||
}
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
const pushEvent = (event: Record<string, unknown>) => {
|
||||
try {
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(event)}\n\n`))
|
||||
} catch {
|
||||
// Client disconnected
|
||||
}
|
||||
}
|
||||
|
||||
if (chatResult.chatId) {
|
||||
pushEvent({ type: 'chat_id', chatId: chatResult.chatId })
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await orchestrateCopilotStream(requestPayload, {
|
||||
userId: session.user.id,
|
||||
workspaceId,
|
||||
chatId: chatResult.chatId || undefined,
|
||||
autoExecuteTools: true,
|
||||
interactive: false,
|
||||
onEvent: async (event: SSEEvent) => {
|
||||
pushEvent(event as unknown as Record<string, unknown>)
|
||||
},
|
||||
})
|
||||
|
||||
if (chatResult.chatId && result.conversationId) {
|
||||
await db
|
||||
.update(copilotChats)
|
||||
.set({
|
||||
updatedAt: new Date(),
|
||||
conversationId: result.conversationId,
|
||||
})
|
||||
.where(eq(copilotChats.id, chatResult.chatId))
|
||||
}
|
||||
|
||||
pushEvent({
|
||||
type: 'done',
|
||||
success: result.success,
|
||||
content: result.content,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Workspace chat orchestration failed', { error })
|
||||
pushEvent({
|
||||
type: 'error',
|
||||
error: error instanceof Error ? error.message : 'Chat failed',
|
||||
})
|
||||
} finally {
|
||||
controller.close()
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
return new Response(stream, {
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'X-Accel-Buffering': 'no',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error('Workspace chat error', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import { mcpServers } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { McpDomainNotAllowedError, validateMcpDomain } from '@/lib/mcp/domain-check'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpService } from '@/lib/mcp/service'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
@@ -29,6 +30,17 @@ export const PATCH = withMcpAuth<{ id: string }>('write')(
|
||||
// Remove workspaceId from body to prevent it from being updated
|
||||
const { workspaceId: _, ...updateData } = body
|
||||
|
||||
if (updateData.url) {
|
||||
try {
|
||||
validateMcpDomain(updateData.url)
|
||||
} catch (e) {
|
||||
if (e instanceof McpDomainNotAllowedError) {
|
||||
return createMcpErrorResponse(e, e.message, 403)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Get the current server to check if URL is changing
|
||||
const [currentServer] = await db
|
||||
.select({ url: mcpServers.url })
|
||||
|
||||
@@ -3,6 +3,7 @@ import { mcpServers } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { McpDomainNotAllowedError, validateMcpDomain } from '@/lib/mcp/domain-check'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpService } from '@/lib/mcp/service'
|
||||
import {
|
||||
@@ -72,6 +73,15 @@ export const POST = withMcpAuth('write')(
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
validateMcpDomain(body.url)
|
||||
} catch (e) {
|
||||
if (e instanceof McpDomainNotAllowedError) {
|
||||
return createMcpErrorResponse(e, e.message, 403)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
|
||||
const serverId = body.url ? generateMcpServerId(workspaceId, body.url) : crypto.randomUUID()
|
||||
|
||||
const [existingServer] = await db
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { McpClient } from '@/lib/mcp/client'
|
||||
import { McpDomainNotAllowedError, validateMcpDomain } from '@/lib/mcp/domain-check'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { resolveMcpConfigEnvVars } from '@/lib/mcp/resolve-config'
|
||||
import type { McpTransport } from '@/lib/mcp/types'
|
||||
@@ -71,6 +72,15 @@ export const POST = withMcpAuth('write')(
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
validateMcpDomain(body.url)
|
||||
} catch (e) {
|
||||
if (e instanceof McpDomainNotAllowedError) {
|
||||
return createMcpErrorResponse(e, e.message, 403)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
|
||||
// Build initial config for resolution
|
||||
const initialConfig = {
|
||||
id: `test-${requestId}`,
|
||||
|
||||
14
apps/sim/app/api/settings/allowed-integrations/route.ts
Normal file
14
apps/sim/app/api/settings/allowed-integrations/route.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getAllowedIntegrationsFromEnv } from '@/lib/core/config/feature-flags'
|
||||
|
||||
export async function GET() {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
allowedIntegrations: getAllowedIntegrationsFromEnv(),
|
||||
})
|
||||
}
|
||||
27
apps/sim/app/api/settings/allowed-mcp-domains/route.ts
Normal file
27
apps/sim/app/api/settings/allowed-mcp-domains/route.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getAllowedMcpDomainsFromEnv } from '@/lib/core/config/feature-flags'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
export async function GET() {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const configuredDomains = getAllowedMcpDomainsFromEnv()
|
||||
if (configuredDomains === null) {
|
||||
return NextResponse.json({ allowedMcpDomains: null })
|
||||
}
|
||||
|
||||
try {
|
||||
const platformHostname = new URL(getBaseUrl()).hostname.toLowerCase()
|
||||
if (!configuredDomains.includes(platformHostname)) {
|
||||
return NextResponse.json({
|
||||
allowedMcpDomains: [...configuredDomains, platformHostname],
|
||||
})
|
||||
}
|
||||
} catch {}
|
||||
|
||||
return NextResponse.json({ allowedMcpDomains: configuredDomains })
|
||||
}
|
||||
@@ -22,15 +22,20 @@ interface PipedriveFile {
|
||||
interface PipedriveApiResponse {
|
||||
success: boolean
|
||||
data?: PipedriveFile[]
|
||||
additional_data?: {
|
||||
pagination?: {
|
||||
more_items_in_collection: boolean
|
||||
next_start: number
|
||||
}
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
const PipedriveGetFilesSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
deal_id: z.string().optional().nullable(),
|
||||
person_id: z.string().optional().nullable(),
|
||||
org_id: z.string().optional().nullable(),
|
||||
sort: z.enum(['id', 'update_time']).optional().nullable(),
|
||||
limit: z.string().optional().nullable(),
|
||||
start: z.string().optional().nullable(),
|
||||
downloadFiles: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
@@ -54,20 +59,19 @@ export async function POST(request: NextRequest) {
|
||||
const body = await request.json()
|
||||
const validatedData = PipedriveGetFilesSchema.parse(body)
|
||||
|
||||
const { accessToken, deal_id, person_id, org_id, limit, downloadFiles } = validatedData
|
||||
const { accessToken, sort, limit, start, downloadFiles } = validatedData
|
||||
|
||||
const baseUrl = 'https://api.pipedrive.com/v1/files'
|
||||
const queryParams = new URLSearchParams()
|
||||
|
||||
if (deal_id) queryParams.append('deal_id', deal_id)
|
||||
if (person_id) queryParams.append('person_id', person_id)
|
||||
if (org_id) queryParams.append('org_id', org_id)
|
||||
if (sort) queryParams.append('sort', sort)
|
||||
if (limit) queryParams.append('limit', limit)
|
||||
if (start) queryParams.append('start', start)
|
||||
|
||||
const queryString = queryParams.toString()
|
||||
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||
|
||||
logger.info(`[${requestId}] Fetching files from Pipedrive`, { deal_id, person_id, org_id })
|
||||
logger.info(`[${requestId}] Fetching files from Pipedrive`)
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
@@ -93,6 +97,8 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
const files = data.data || []
|
||||
const hasMore = data.additional_data?.pagination?.more_items_in_collection || false
|
||||
const nextStart = data.additional_data?.pagination?.next_start ?? null
|
||||
const downloadedFiles: Array<{
|
||||
name: string
|
||||
mimeType: string
|
||||
@@ -149,6 +155,8 @@ export async function POST(request: NextRequest) {
|
||||
files,
|
||||
downloadedFiles: downloadedFiles.length > 0 ? downloadedFiles : undefined,
|
||||
total_items: files.length,
|
||||
has_more: hasMore,
|
||||
next_start: nextStart,
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
|
||||
@@ -30,21 +30,6 @@ export const ChatMessageContainer = memo(function ChatMessageContainer({
|
||||
}: ChatMessageContainerProps) {
|
||||
return (
|
||||
<div className='relative flex flex-1 flex-col overflow-hidden bg-white'>
|
||||
<style jsx>{`
|
||||
@keyframes growShrink {
|
||||
0%,
|
||||
100% {
|
||||
transform: scale(0.9);
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
}
|
||||
}
|
||||
.loading-dot {
|
||||
animation: growShrink 1.5s infinite ease-in-out;
|
||||
}
|
||||
`}</style>
|
||||
|
||||
{/* Scrollable Messages Area */}
|
||||
<div
|
||||
ref={messagesContainerRef}
|
||||
|
||||
@@ -71,7 +71,7 @@ export function VoiceInterface({
|
||||
const [state, setState] = useState<'idle' | 'listening' | 'agent_speaking'>('idle')
|
||||
const [isInitialized, setIsInitialized] = useState(false)
|
||||
const [isMuted, setIsMuted] = useState(false)
|
||||
const [audioLevels, setAudioLevels] = useState<number[]>(new Array(200).fill(0))
|
||||
const [audioLevels, setAudioLevels] = useState<number[]>(() => new Array(200).fill(0))
|
||||
const [permissionStatus, setPermissionStatus] = useState<'prompt' | 'granted' | 'denied'>(
|
||||
'prompt'
|
||||
)
|
||||
|
||||
@@ -1,150 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { Send, Square } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { useWorkspaceChat } from './hooks/use-workspace-chat'
|
||||
|
||||
export function Chat() {
|
||||
const { workspaceId } = useParams<{ workspaceId: string }>()
|
||||
const [inputValue, setInputValue] = useState('')
|
||||
const inputRef = useRef<HTMLTextAreaElement>(null)
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
const { messages, isSending, error, sendMessage, abortMessage } = useWorkspaceChat({
|
||||
workspaceId,
|
||||
})
|
||||
|
||||
const scrollToBottom = useCallback(() => {
|
||||
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' })
|
||||
}, [])
|
||||
|
||||
const handleSubmit = useCallback(async () => {
|
||||
const trimmed = inputValue.trim()
|
||||
if (!trimmed || !workspaceId) return
|
||||
|
||||
setInputValue('')
|
||||
await sendMessage(trimmed)
|
||||
scrollToBottom()
|
||||
}, [inputValue, workspaceId, sendMessage, scrollToBottom])
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
e.preventDefault()
|
||||
handleSubmit()
|
||||
}
|
||||
},
|
||||
[handleSubmit]
|
||||
)
|
||||
|
||||
return (
|
||||
<div className='flex h-full flex-col'>
|
||||
{/* Header */}
|
||||
<div className='flex flex-shrink-0 items-center border-b border-[var(--border)] px-6 py-3'>
|
||||
<h1 className='font-medium text-[16px] text-[var(--text-primary)]'>Chat</h1>
|
||||
</div>
|
||||
|
||||
{/* Messages area */}
|
||||
<div className='flex-1 overflow-y-auto px-6 py-4'>
|
||||
{messages.length === 0 && !isSending ? (
|
||||
<div className='flex h-full items-center justify-center'>
|
||||
<div className='flex flex-col items-center gap-3 text-center'>
|
||||
<p className='text-[var(--text-secondary)] text-sm'>
|
||||
Ask anything about your workspace — build workflows, manage resources, get help.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className='mx-auto max-w-3xl space-y-4'>
|
||||
{messages.map((msg) => {
|
||||
const isStreamingEmpty =
|
||||
isSending && msg.role === 'assistant' && !msg.content
|
||||
if (isStreamingEmpty) {
|
||||
return (
|
||||
<div key={msg.id} className='flex justify-start'>
|
||||
<div className='rounded-lg bg-[var(--surface-3)] px-4 py-2 text-sm text-[var(--text-secondary)]'>
|
||||
Thinking...
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
if (msg.role === 'assistant' && !msg.content) return null
|
||||
return (
|
||||
<div
|
||||
key={msg.id}
|
||||
className={cn(
|
||||
'flex',
|
||||
msg.role === 'user' ? 'justify-end' : 'justify-start'
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'max-w-[85%] rounded-lg px-4 py-2 text-sm',
|
||||
msg.role === 'user'
|
||||
? 'bg-[var(--accent)] text-[var(--accent-foreground)]'
|
||||
: 'bg-[var(--surface-3)] text-[var(--text-primary)]'
|
||||
)}
|
||||
>
|
||||
<p className='whitespace-pre-wrap'>{msg.content}</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Error display */}
|
||||
{error && (
|
||||
<div className='px-6 pb-2'>
|
||||
<p className='text-xs text-red-500'>{error}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Input area */}
|
||||
<div className='flex-shrink-0 border-t border-[var(--border)] px-6 py-4'>
|
||||
<div className='mx-auto flex max-w-3xl items-end gap-2'>
|
||||
<textarea
|
||||
ref={inputRef}
|
||||
value={inputValue}
|
||||
onChange={(e) => setInputValue(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
placeholder='Send a message...'
|
||||
rows={1}
|
||||
className='flex-1 resize-none rounded-lg border border-[var(--border)] bg-[var(--surface-2)] px-4 py-2.5 text-sm text-[var(--text-primary)] placeholder:text-[var(--text-tertiary)] focus:border-[var(--accent)] focus:outline-none'
|
||||
style={{ maxHeight: '120px' }}
|
||||
onInput={(e) => {
|
||||
const target = e.target as HTMLTextAreaElement
|
||||
target.style.height = 'auto'
|
||||
target.style.height = `${Math.min(target.scrollHeight, 120)}px`
|
||||
}}
|
||||
/>
|
||||
{isSending ? (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={abortMessage}
|
||||
className='h-[38px] w-[38px] flex-shrink-0 p-0'
|
||||
>
|
||||
<Square className='h-4 w-4' />
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={handleSubmit}
|
||||
disabled={!inputValue.trim()}
|
||||
className='h-[38px] w-[38px] flex-shrink-0 p-0'
|
||||
>
|
||||
<Send className='h-4 w-4' />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,173 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
|
||||
const logger = createLogger('useWorkspaceChat')
|
||||
|
||||
interface ChatMessage {
|
||||
id: string
|
||||
role: 'user' | 'assistant'
|
||||
content: string
|
||||
timestamp: string
|
||||
}
|
||||
|
||||
interface UseWorkspaceChatProps {
|
||||
workspaceId: string
|
||||
}
|
||||
|
||||
interface UseWorkspaceChatReturn {
|
||||
messages: ChatMessage[]
|
||||
isSending: boolean
|
||||
error: string | null
|
||||
sendMessage: (message: string) => Promise<void>
|
||||
abortMessage: () => void
|
||||
clearMessages: () => void
|
||||
}
|
||||
|
||||
export function useWorkspaceChat({ workspaceId }: UseWorkspaceChatProps): UseWorkspaceChatReturn {
|
||||
const [messages, setMessages] = useState<ChatMessage[]>([])
|
||||
const [isSending, setIsSending] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const abortControllerRef = useRef<AbortController | null>(null)
|
||||
const chatIdRef = useRef<string | undefined>(undefined)
|
||||
|
||||
const sendMessage = useCallback(
|
||||
async (message: string) => {
|
||||
if (!message.trim() || !workspaceId) return
|
||||
|
||||
setError(null)
|
||||
setIsSending(true)
|
||||
|
||||
const userMessage: ChatMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
content: message,
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const assistantMessage: ChatMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'assistant',
|
||||
content: '',
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
setMessages((prev) => [...prev, userMessage, assistantMessage])
|
||||
|
||||
const abortController = new AbortController()
|
||||
abortControllerRef.current = abortController
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/copilot/workspace-chat', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
message,
|
||||
workspaceId,
|
||||
...(chatIdRef.current ? { chatId: chatIdRef.current } : {}),
|
||||
}),
|
||||
signal: abortController.signal,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
throw new Error(errorData.error || `Request failed: ${response.status}`)
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
|
||||
const lines = buffer.split('\n')
|
||||
buffer = lines.pop() || ''
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.startsWith('data: ')) continue
|
||||
|
||||
try {
|
||||
const event = JSON.parse(line.slice(6))
|
||||
|
||||
if (event.type === 'chat_id' && event.chatId) {
|
||||
chatIdRef.current = event.chatId
|
||||
} else if (event.type === 'content' && event.content) {
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === assistantMessage.id
|
||||
? { ...msg, content: msg.content + event.content }
|
||||
: msg
|
||||
)
|
||||
)
|
||||
} else if (event.type === 'error') {
|
||||
setError(event.error || 'An error occurred')
|
||||
} else if (event.type === 'done') {
|
||||
if (event.content && typeof event.content === 'string') {
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === assistantMessage.id && !msg.content
|
||||
? { ...msg, content: event.content }
|
||||
: msg
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Skip malformed SSE lines
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof Error && err.name === 'AbortError') {
|
||||
logger.info('Message aborted by user')
|
||||
return
|
||||
}
|
||||
|
||||
const errorMessage = err instanceof Error ? err.message : 'Failed to send message'
|
||||
logger.error('Failed to send workspace chat message', { error: errorMessage })
|
||||
setError(errorMessage)
|
||||
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === assistantMessage.id && !msg.content
|
||||
? { ...msg, content: 'Sorry, something went wrong. Please try again.' }
|
||||
: msg
|
||||
)
|
||||
)
|
||||
} finally {
|
||||
setIsSending(false)
|
||||
abortControllerRef.current = null
|
||||
}
|
||||
},
|
||||
[workspaceId]
|
||||
)
|
||||
|
||||
const abortMessage = useCallback(() => {
|
||||
abortControllerRef.current?.abort()
|
||||
setIsSending(false)
|
||||
}, [])
|
||||
|
||||
const clearMessages = useCallback(() => {
|
||||
setMessages([])
|
||||
setError(null)
|
||||
chatIdRef.current = undefined
|
||||
}, [])
|
||||
|
||||
return {
|
||||
messages,
|
||||
isSending,
|
||||
error,
|
||||
sendMessage,
|
||||
abortMessage,
|
||||
clearMessages,
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
export default function ChatLayout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<div className='flex h-full flex-1 flex-col overflow-hidden pl-[var(--sidebar-width)]'>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
import { redirect } from 'next/navigation'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
import { Chat } from './chat'
|
||||
|
||||
interface ChatPageProps {
|
||||
params: Promise<{
|
||||
workspaceId: string
|
||||
}>
|
||||
}
|
||||
|
||||
export default async function ChatPage({ params }: ChatPageProps) {
|
||||
const { workspaceId } = await params
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
redirect('/')
|
||||
}
|
||||
|
||||
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||
if (!hasPermission) {
|
||||
redirect('/')
|
||||
}
|
||||
|
||||
return <Chat />
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { redirect } from 'next/navigation'
|
||||
import { redirect, unstable_rethrow } from 'next/navigation'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getWorkspaceFile } from '@/lib/uploads/contexts/workspace'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
@@ -14,24 +14,27 @@ interface FileViewerPageProps {
|
||||
export default async function FileViewerPage({ params }: FileViewerPageProps) {
|
||||
const { workspaceId, fileId } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
redirect('/')
|
||||
}
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
redirect('/')
|
||||
}
|
||||
|
||||
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||
if (!hasPermission) {
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, fileId)
|
||||
if (!fileRecord) {
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
return <FileViewer file={fileRecord} />
|
||||
} catch (error) {
|
||||
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||
if (!hasPermission) {
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
let fileRecord: Awaited<ReturnType<typeof getWorkspaceFile>>
|
||||
try {
|
||||
fileRecord = await getWorkspaceFile(workspaceId, fileId)
|
||||
} catch (error) {
|
||||
unstable_rethrow(error)
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
if (!fileRecord) {
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
return <FileViewer file={fileRecord} />
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/compo
|
||||
import { getDisplayValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { CopilotToolCall } from '@/stores/panel'
|
||||
import { useCopilotStore } from '@/stores/panel'
|
||||
import { useCopilotStore, usePanelStore } from '@/stores/panel'
|
||||
import type { SubAgentContentBlock } from '@/stores/panel/copilot/types'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
@@ -341,16 +341,20 @@ export function OptionsSelector({
|
||||
const [hoveredIndex, setHoveredIndex] = useState(-1)
|
||||
const [chosenKey, setChosenKey] = useState<string | null>(selectedOptionKey)
|
||||
const containerRef = useRef<HTMLDivElement>(null)
|
||||
const activeTab = usePanelStore((s) => s.activeTab)
|
||||
|
||||
const isLocked = chosenKey !== null
|
||||
|
||||
// Handle keyboard navigation - only for the active options selector
|
||||
// Handle keyboard navigation - only for the active options selector when copilot is active
|
||||
useEffect(() => {
|
||||
if (isInteractionDisabled || !enableKeyboardNav || isLocked) return
|
||||
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
if (e.defaultPrevented) return
|
||||
|
||||
// Only handle keyboard shortcuts when the copilot panel is active
|
||||
if (activeTab !== 'copilot') return
|
||||
|
||||
const activeElement = document.activeElement
|
||||
const isInputFocused =
|
||||
activeElement?.tagName === 'INPUT' ||
|
||||
@@ -387,7 +391,15 @@ export function OptionsSelector({
|
||||
|
||||
document.addEventListener('keydown', handleKeyDown)
|
||||
return () => document.removeEventListener('keydown', handleKeyDown)
|
||||
}, [isInteractionDisabled, enableKeyboardNav, isLocked, sortedOptions, hoveredIndex, onSelect])
|
||||
}, [
|
||||
isInteractionDisabled,
|
||||
enableKeyboardNav,
|
||||
isLocked,
|
||||
sortedOptions,
|
||||
hoveredIndex,
|
||||
onSelect,
|
||||
activeTab,
|
||||
])
|
||||
|
||||
if (sortedOptions.length === 0) return null
|
||||
|
||||
|
||||
@@ -131,10 +131,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
resumeActiveStream,
|
||||
})
|
||||
|
||||
// Handle scroll management (80px stickiness for copilot)
|
||||
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage, {
|
||||
stickinessThreshold: 40,
|
||||
})
|
||||
// Handle scroll management
|
||||
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage)
|
||||
|
||||
// Handle chat history grouping
|
||||
const { groupedChats, handleHistoryDropdownOpen: handleHistoryDropdownOpenHook } = useChatHistory(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import { useReactFlow } from 'reactflow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Badge } from '@/components/emcn'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react'
|
||||
import { Button, Popover, PopoverContent, PopoverItem, PopoverTrigger } from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { type JSX, type MouseEvent, memo, useCallback, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
|
||||
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import {
|
||||
BookOpen,
|
||||
Check,
|
||||
|
||||
@@ -10,40 +10,6 @@ import { ActionBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/componen
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { usePanelEditorStore } from '@/stores/panel'
|
||||
|
||||
/**
|
||||
* Global styles for subflow nodes (loop and parallel containers).
|
||||
* Includes animations for drag-over states and hover effects.
|
||||
*
|
||||
* @returns Style component with global CSS
|
||||
*/
|
||||
const SubflowNodeStyles: React.FC = () => {
|
||||
return (
|
||||
<style jsx global>{`
|
||||
/* Z-index management for subflow nodes - default behind blocks */
|
||||
.workflow-container .react-flow__node-subflowNode {
|
||||
z-index: -1 !important;
|
||||
}
|
||||
|
||||
/* Selected subflows appear above other subflows but below blocks (z-21) */
|
||||
.workflow-container .react-flow__node-subflowNode:has([data-subflow-selected='true']) {
|
||||
z-index: 10 !important;
|
||||
}
|
||||
|
||||
/* Drag-over states */
|
||||
.loop-node-drag-over,
|
||||
.parallel-node-drag-over {
|
||||
box-shadow: 0 0 0 1.75px var(--brand-secondary) !important;
|
||||
border-radius: 8px !important;
|
||||
}
|
||||
|
||||
/* Handle z-index for nested nodes */
|
||||
.react-flow__node[data-parent-node-id] .react-flow__handle {
|
||||
z-index: 30;
|
||||
}
|
||||
`}</style>
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Data structure for subflow nodes (loop and parallel containers)
|
||||
*/
|
||||
@@ -151,133 +117,130 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
)
|
||||
|
||||
return (
|
||||
<>
|
||||
<SubflowNodeStyles />
|
||||
<div className='group relative'>
|
||||
<div className='group relative'>
|
||||
<div
|
||||
ref={blockRef}
|
||||
onClick={() => setCurrentBlockId(id)}
|
||||
className={cn(
|
||||
'workflow-drag-handle relative cursor-grab select-none rounded-[8px] border border-[var(--border-1)] [&:active]:cursor-grabbing',
|
||||
'transition-block-bg transition-ring',
|
||||
'z-[20]'
|
||||
)}
|
||||
style={{
|
||||
width: data.width || 500,
|
||||
height: data.height || 300,
|
||||
position: 'relative',
|
||||
overflow: 'visible',
|
||||
pointerEvents: isPreview ? 'none' : 'all',
|
||||
}}
|
||||
data-node-id={id}
|
||||
data-type='subflowNode'
|
||||
data-nesting-level={nestingLevel}
|
||||
data-subflow-selected={isFocused || isSelected || isPreviewSelected}
|
||||
>
|
||||
{!isPreview && (
|
||||
<ActionBar blockId={id} blockType={data.kind} disabled={!userPermissions.canEdit} />
|
||||
)}
|
||||
|
||||
{/* Header Section */}
|
||||
<div
|
||||
ref={blockRef}
|
||||
onClick={() => setCurrentBlockId(id)}
|
||||
className={cn(
|
||||
'workflow-drag-handle relative cursor-grab select-none rounded-[8px] border border-[var(--border-1)] [&:active]:cursor-grabbing',
|
||||
'transition-block-bg transition-ring',
|
||||
'z-[20]'
|
||||
'flex items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px]'
|
||||
)}
|
||||
style={{
|
||||
width: data.width || 500,
|
||||
height: data.height || 300,
|
||||
position: 'relative',
|
||||
overflow: 'visible',
|
||||
pointerEvents: isPreview ? 'none' : 'all',
|
||||
}}
|
||||
data-node-id={id}
|
||||
data-type='subflowNode'
|
||||
data-nesting-level={nestingLevel}
|
||||
data-subflow-selected={isFocused || isSelected || isPreviewSelected}
|
||||
>
|
||||
{!isPreview && (
|
||||
<ActionBar blockId={id} blockType={data.kind} disabled={!userPermissions.canEdit} />
|
||||
)}
|
||||
|
||||
{/* Header Section */}
|
||||
<div
|
||||
className={cn(
|
||||
'flex items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px]'
|
||||
)}
|
||||
>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
|
||||
<div
|
||||
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
|
||||
style={{ backgroundColor: isEnabled ? blockIconBg : 'gray' }}
|
||||
>
|
||||
<BlockIcon className='h-[16px] w-[16px] text-white' />
|
||||
</div>
|
||||
<span
|
||||
className={cn(
|
||||
'truncate font-medium text-[16px]',
|
||||
!isEnabled && 'text-[var(--text-muted)]'
|
||||
)}
|
||||
title={blockName}
|
||||
>
|
||||
{blockName}
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex items-center gap-1'>
|
||||
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
|
||||
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{!isPreview && (
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
|
||||
<div
|
||||
className='absolute right-[8px] bottom-[8px] z-20 flex h-[32px] w-[32px] cursor-se-resize items-center justify-center text-muted-foreground'
|
||||
style={{ pointerEvents: 'auto' }}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div
|
||||
className='h-[calc(100%-50px)] pt-[16px] pr-[80px] pb-[16px] pl-[16px]'
|
||||
data-dragarea='true'
|
||||
style={{
|
||||
position: 'relative',
|
||||
pointerEvents: isPreview ? 'none' : 'auto',
|
||||
}}
|
||||
>
|
||||
{/* Subflow Start */}
|
||||
<div
|
||||
className='absolute top-[16px] left-[16px] flex items-center justify-center rounded-[8px] border border-[var(--border-1)] bg-[var(--surface-2)] px-[12px] py-[6px]'
|
||||
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
|
||||
data-parent-id={id}
|
||||
data-node-role={`${data.kind}-start`}
|
||||
data-extent='parent'
|
||||
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
|
||||
style={{ backgroundColor: isEnabled ? blockIconBg : 'gray' }}
|
||||
>
|
||||
<span className='font-medium text-[14px] text-[var(--text-primary)]'>Start</span>
|
||||
|
||||
<Handle
|
||||
type='source'
|
||||
position={Position.Right}
|
||||
id={startHandleId}
|
||||
className={getHandleClasses('right')}
|
||||
style={{
|
||||
top: '50%',
|
||||
transform: 'translateY(-50%)',
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
data-parent-id={id}
|
||||
/>
|
||||
<BlockIcon className='h-[16px] w-[16px] text-white' />
|
||||
</div>
|
||||
<span
|
||||
className={cn(
|
||||
'truncate font-medium text-[16px]',
|
||||
!isEnabled && 'text-[var(--text-muted)]'
|
||||
)}
|
||||
title={blockName}
|
||||
>
|
||||
{blockName}
|
||||
</span>
|
||||
</div>
|
||||
<div className='flex items-center gap-1'>
|
||||
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
|
||||
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
|
||||
</div>
|
||||
|
||||
{/* Input handle on left middle */}
|
||||
<Handle
|
||||
type='target'
|
||||
position={Position.Left}
|
||||
className={getHandleClasses('left')}
|
||||
style={{
|
||||
...getHandleStyle(),
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Output handle on right middle */}
|
||||
<Handle
|
||||
type='source'
|
||||
position={Position.Right}
|
||||
className={getHandleClasses('right')}
|
||||
style={{
|
||||
...getHandleStyle(),
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
id={endHandleId}
|
||||
/>
|
||||
|
||||
{hasRing && (
|
||||
<div
|
||||
className={cn('pointer-events-none absolute inset-0 z-40 rounded-[8px]', ringStyles)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{!isPreview && (
|
||||
<div
|
||||
className='absolute right-[8px] bottom-[8px] z-20 flex h-[32px] w-[32px] cursor-se-resize items-center justify-center text-muted-foreground'
|
||||
style={{ pointerEvents: 'auto' }}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div
|
||||
className='h-[calc(100%-50px)] pt-[16px] pr-[80px] pb-[16px] pl-[16px]'
|
||||
data-dragarea='true'
|
||||
style={{
|
||||
position: 'relative',
|
||||
pointerEvents: isPreview ? 'none' : 'auto',
|
||||
}}
|
||||
>
|
||||
{/* Subflow Start */}
|
||||
<div
|
||||
className='absolute top-[16px] left-[16px] flex items-center justify-center rounded-[8px] border border-[var(--border-1)] bg-[var(--surface-2)] px-[12px] py-[6px]'
|
||||
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
|
||||
data-parent-id={id}
|
||||
data-node-role={`${data.kind}-start`}
|
||||
data-extent='parent'
|
||||
>
|
||||
<span className='font-medium text-[14px] text-[var(--text-primary)]'>Start</span>
|
||||
|
||||
<Handle
|
||||
type='source'
|
||||
position={Position.Right}
|
||||
id={startHandleId}
|
||||
className={getHandleClasses('right')}
|
||||
style={{
|
||||
top: '50%',
|
||||
transform: 'translateY(-50%)',
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
data-parent-id={id}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Input handle on left middle */}
|
||||
<Handle
|
||||
type='target'
|
||||
position={Position.Left}
|
||||
className={getHandleClasses('left')}
|
||||
style={{
|
||||
...getHandleStyle(),
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Output handle on right middle */}
|
||||
<Handle
|
||||
type='source'
|
||||
position={Position.Right}
|
||||
className={getHandleClasses('right')}
|
||||
style={{
|
||||
...getHandleStyle(),
|
||||
pointerEvents: 'auto',
|
||||
}}
|
||||
id={endHandleId}
|
||||
/>
|
||||
|
||||
{hasRing && (
|
||||
<div
|
||||
className={cn('pointer-events-none absolute inset-0 z-40 rounded-[8px]', ringStyles)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
|
||||
@@ -134,57 +134,6 @@ export function WandPromptBar({
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<style jsx global>{`
|
||||
|
||||
@keyframes smoke-pulse {
|
||||
0%,
|
||||
100% {
|
||||
transform: scale(0.8);
|
||||
opacity: 0.4;
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
.status-indicator {
|
||||
position: relative;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
border-radius: 50%;
|
||||
overflow: hidden;
|
||||
background-color: hsl(var(--muted-foreground) / 0.5);
|
||||
transition: background-color 0.3s ease;
|
||||
}
|
||||
|
||||
.status-indicator.streaming {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.status-indicator.streaming::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
border-radius: 50%;
|
||||
background: radial-gradient(
|
||||
circle,
|
||||
hsl(var(--primary) / 0.9) 0%,
|
||||
hsl(var(--primary) / 0.4) 60%,
|
||||
transparent 80%
|
||||
);
|
||||
animation: smoke-pulse 1.8s ease-in-out infinite;
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
.dark .status-indicator.streaming::before {
|
||||
background: #6b7280;
|
||||
opacity: 0.9;
|
||||
animation: smoke-pulse 1.8s ease-in-out infinite;
|
||||
}
|
||||
|
||||
`}</style>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { isEqual } from 'lodash'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
|
||||
@@ -16,7 +16,7 @@ interface UseScrollManagementOptions {
|
||||
/**
|
||||
* Distance from bottom (in pixels) within which auto-scroll stays active
|
||||
* @remarks Lower values = less sticky (user can scroll away easier)
|
||||
* @defaultValue 100
|
||||
* @defaultValue 30
|
||||
*/
|
||||
stickinessThreshold?: number
|
||||
}
|
||||
@@ -41,7 +41,7 @@ export function useScrollManagement(
|
||||
const lastScrollTopRef = useRef(0)
|
||||
|
||||
const scrollBehavior = options?.behavior ?? 'smooth'
|
||||
const stickinessThreshold = options?.stickinessThreshold ?? 100
|
||||
const stickinessThreshold = options?.stickinessThreshold ?? 30
|
||||
|
||||
/** Scrolls the container to the bottom */
|
||||
const scrollToBottom = useCallback(() => {
|
||||
|
||||
@@ -36,17 +36,18 @@ export function isBlockProtected(blockId: string, blocks: Record<string, BlockSt
|
||||
|
||||
/**
|
||||
* Checks if an edge is protected from modification.
|
||||
* An edge is protected if either its source or target block is protected.
|
||||
* An edge is protected only if its target block is protected.
|
||||
* Outbound connections from locked blocks are allowed to be modified.
|
||||
*
|
||||
* @param edge - The edge to check (must have source and target)
|
||||
* @param blocks - Record of all blocks in the workflow
|
||||
* @returns True if the edge is protected
|
||||
* @returns True if the edge is protected (target is locked)
|
||||
*/
|
||||
export function isEdgeProtected(
|
||||
edge: { source: string; target: string },
|
||||
blocks: Record<string, BlockState>
|
||||
): boolean {
|
||||
return isBlockProtected(edge.source, blocks) || isBlockProtected(edge.target, blocks)
|
||||
return isBlockProtected(edge.target, blocks)
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -2523,7 +2523,7 @@ const WorkflowContent = React.memo(() => {
|
||||
.filter((change: any) => change.type === 'remove')
|
||||
.map((change: any) => change.id)
|
||||
.filter((edgeId: string) => {
|
||||
// Prevent removing edges connected to protected blocks
|
||||
// Prevent removing edges targeting protected blocks
|
||||
const edge = edges.find((e) => e.id === edgeId)
|
||||
if (!edge) return true
|
||||
return !isEdgeProtected(edge, blocks)
|
||||
@@ -2595,7 +2595,7 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
if (!sourceNode || !targetNode) return
|
||||
|
||||
// Prevent connections to/from protected blocks
|
||||
// Prevent connections to protected blocks (outbound from locked blocks is allowed)
|
||||
if (isEdgeProtected(connection, blocks)) {
|
||||
addNotification({
|
||||
level: 'info',
|
||||
@@ -3357,12 +3357,12 @@ const WorkflowContent = React.memo(() => {
|
||||
/** Stable delete handler to avoid creating new function references per edge. */
|
||||
const handleEdgeDelete = useCallback(
|
||||
(edgeId: string) => {
|
||||
// Prevent removing edges connected to protected blocks
|
||||
// Prevent removing edges targeting protected blocks
|
||||
const edge = edges.find((e) => e.id === edgeId)
|
||||
if (edge && isEdgeProtected(edge, blocks)) {
|
||||
addNotification({
|
||||
level: 'info',
|
||||
message: 'Cannot remove connections from locked blocks',
|
||||
message: 'Cannot remove connections to locked blocks',
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
return
|
||||
@@ -3420,7 +3420,7 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
// Handle edge deletion first (edges take priority if selected)
|
||||
if (selectedEdges.size > 0) {
|
||||
// Get all selected edge IDs and filter out edges connected to protected blocks
|
||||
// Get all selected edge IDs and filter out edges targeting protected blocks
|
||||
const edgeIds = Array.from(selectedEdges.values()).filter((edgeId) => {
|
||||
const edge = edges.find((e) => e.id === edgeId)
|
||||
if (!edge) return true
|
||||
|
||||
@@ -514,6 +514,7 @@ export function HelpModal({ open, onOpenChange, workflowId, workspaceId }: HelpM
|
||||
alt={`Preview ${index + 1}`}
|
||||
fill
|
||||
unoptimized
|
||||
sizes='(max-width: 768px) 100vw, 50vw'
|
||||
className='object-contain'
|
||||
/>
|
||||
<button
|
||||
|
||||
@@ -223,13 +223,11 @@ export function Integrations({ onOpenChange, registerCloseHandler }: Integration
|
||||
}
|
||||
}
|
||||
|
||||
// Group services by provider, filtering by permission config
|
||||
const groupedServices = services.reduce(
|
||||
(acc, service) => {
|
||||
// Filter based on allowedIntegrations
|
||||
if (
|
||||
permissionConfig.allowedIntegrations !== null &&
|
||||
!permissionConfig.allowedIntegrations.includes(service.id)
|
||||
!permissionConfig.allowedIntegrations.includes(service.id.replace(/-/g, '_'))
|
||||
) {
|
||||
return acc
|
||||
}
|
||||
|
||||
@@ -106,6 +106,21 @@ interface McpServer {
|
||||
|
||||
const logger = createLogger('McpSettings')
|
||||
|
||||
/**
|
||||
* Checks if a URL's hostname is in the allowed domains list.
|
||||
* Returns true if no allowlist is configured (null) or the domain matches.
|
||||
*/
|
||||
function isDomainAllowed(url: string | undefined, allowedDomains: string[] | null): boolean {
|
||||
if (allowedDomains === null) return true
|
||||
if (!url) return true
|
||||
try {
|
||||
const hostname = new URL(url).hostname.toLowerCase()
|
||||
return allowedDomains.includes(hostname)
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_FORM_DATA: McpServerFormData = {
|
||||
name: '',
|
||||
transport: 'streamable-http',
|
||||
@@ -390,6 +405,15 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
} = useMcpServerTest()
|
||||
const availableEnvVars = useAvailableEnvVarKeys(workspaceId)
|
||||
|
||||
const [allowedMcpDomains, setAllowedMcpDomains] = useState<string[] | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
fetch('/api/settings/allowed-mcp-domains')
|
||||
.then((res) => res.json())
|
||||
.then((data) => setAllowedMcpDomains(data.allowedMcpDomains ?? null))
|
||||
.catch(() => setAllowedMcpDomains(null))
|
||||
}, [])
|
||||
|
||||
const urlInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
const [showAddForm, setShowAddForm] = useState(false)
|
||||
@@ -1006,10 +1030,12 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
const showNoResults = searchTerm.trim() && filteredServers.length === 0 && servers.length > 0
|
||||
|
||||
const isFormValid = formData.name.trim() && formData.url?.trim()
|
||||
const isSubmitDisabled = serversLoading || isAddingServer || !isFormValid
|
||||
const isAddDomainBlocked = !isDomainAllowed(formData.url, allowedMcpDomains)
|
||||
const isSubmitDisabled = serversLoading || isAddingServer || !isFormValid || isAddDomainBlocked
|
||||
const testButtonLabel = getTestButtonLabel(testResult, isTestingConnection)
|
||||
|
||||
const isEditFormValid = editFormData.name.trim() && editFormData.url?.trim()
|
||||
const isEditDomainBlocked = !isDomainAllowed(editFormData.url, allowedMcpDomains)
|
||||
const editTestButtonLabel = getTestButtonLabel(editTestResult, isEditTestingConnection)
|
||||
const hasEditChanges = useMemo(() => {
|
||||
if (editFormData.name !== editOriginalData.name) return true
|
||||
@@ -1299,6 +1325,11 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
onChange={(e) => handleEditInputChange('url', e.target.value)}
|
||||
onScroll={setEditUrlScrollLeft}
|
||||
/>
|
||||
{isEditDomainBlocked && (
|
||||
<p className='mt-[4px] text-[12px] text-[var(--text-error)]'>
|
||||
Domain not permitted by server policy
|
||||
</p>
|
||||
)}
|
||||
</FormField>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
@@ -1351,7 +1382,7 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
<Button
|
||||
variant='default'
|
||||
onClick={handleEditTestConnection}
|
||||
disabled={isEditTestingConnection || !isEditFormValid}
|
||||
disabled={isEditTestingConnection || !isEditFormValid || isEditDomainBlocked}
|
||||
>
|
||||
{editTestButtonLabel}
|
||||
</Button>
|
||||
@@ -1361,7 +1392,9 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleSaveEdit}
|
||||
disabled={!hasEditChanges || isUpdatingServer || !isEditFormValid}
|
||||
disabled={
|
||||
!hasEditChanges || isUpdatingServer || !isEditFormValid || isEditDomainBlocked
|
||||
}
|
||||
variant='tertiary'
|
||||
>
|
||||
{isUpdatingServer ? 'Saving...' : 'Save'}
|
||||
@@ -1434,6 +1467,11 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
onChange={(e) => handleInputChange('url', e.target.value)}
|
||||
onScroll={(scrollLeft) => handleUrlScroll(scrollLeft)}
|
||||
/>
|
||||
{isAddDomainBlocked && (
|
||||
<p className='mt-[4px] text-[12px] text-[var(--text-error)]'>
|
||||
Domain not permitted by server policy
|
||||
</p>
|
||||
)}
|
||||
</FormField>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
@@ -1479,7 +1517,7 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
<Button
|
||||
variant='default'
|
||||
onClick={handleTestConnection}
|
||||
disabled={isTestingConnection || !isFormValid}
|
||||
disabled={isTestingConnection || !isFormValid || isAddDomainBlocked}
|
||||
>
|
||||
{testButtonLabel}
|
||||
</Button>
|
||||
@@ -1489,7 +1527,9 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={handleAddServer} disabled={isSubmitDisabled} variant='tertiary'>
|
||||
{isSubmitDisabled && isFormValid ? 'Adding...' : 'Add Server'}
|
||||
{isSubmitDisabled && isFormValid && !isAddDomainBlocked
|
||||
? 'Adding...'
|
||||
: 'Add Server'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -165,12 +165,16 @@ export function CancelSubscription({ subscription, subscriptionData }: CancelSub
|
||||
logger.info('Subscription restored successfully', result)
|
||||
}
|
||||
|
||||
await queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||
if (activeOrgId) {
|
||||
await queryClient.invalidateQueries({ queryKey: organizationKeys.detail(activeOrgId) })
|
||||
await queryClient.invalidateQueries({ queryKey: organizationKeys.billing(activeOrgId) })
|
||||
await queryClient.invalidateQueries({ queryKey: organizationKeys.lists() })
|
||||
}
|
||||
await Promise.all([
|
||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all }),
|
||||
...(activeOrgId
|
||||
? [
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.detail(activeOrgId) }),
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.billing(activeOrgId) }),
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.lists() }),
|
||||
]
|
||||
: []),
|
||||
])
|
||||
|
||||
setIsDialogOpen(false)
|
||||
} catch (err) {
|
||||
|
||||
@@ -7,6 +7,8 @@ export interface SubscriptionPermissions {
|
||||
canCancelSubscription: boolean
|
||||
showTeamMemberView: boolean
|
||||
showUpgradePlans: boolean
|
||||
isEnterpriseMember: boolean
|
||||
canViewUsageInfo: boolean
|
||||
}
|
||||
|
||||
export interface SubscriptionState {
|
||||
@@ -31,6 +33,9 @@ export function getSubscriptionPermissions(
|
||||
const { isFree, isPro, isTeam, isEnterprise, isPaid } = subscription
|
||||
const { isTeamAdmin } = userRole
|
||||
|
||||
const isEnterpriseMember = isEnterprise && !isTeamAdmin
|
||||
const canViewUsageInfo = !isEnterpriseMember
|
||||
|
||||
return {
|
||||
canUpgradeToPro: isFree,
|
||||
canUpgradeToTeam: isFree || (isPro && !isTeam),
|
||||
@@ -40,6 +45,8 @@ export function getSubscriptionPermissions(
|
||||
canCancelSubscription: isPaid && !isEnterprise && !(isTeam && !isTeamAdmin), // Team members can't cancel
|
||||
showTeamMemberView: isTeam && !isTeamAdmin,
|
||||
showUpgradePlans: isFree || (isPro && !isTeam) || (isTeam && isTeamAdmin), // Free users, Pro users, Team owners see plans
|
||||
isEnterpriseMember,
|
||||
canViewUsageInfo,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -300,12 +300,16 @@ export function Subscription() {
|
||||
)
|
||||
|
||||
const showBadge =
|
||||
(permissions.canEditUsageLimit && !permissions.showTeamMemberView) ||
|
||||
permissions.showTeamMemberView ||
|
||||
subscription.isEnterprise ||
|
||||
isBlocked
|
||||
!permissions.isEnterpriseMember &&
|
||||
((permissions.canEditUsageLimit && !permissions.showTeamMemberView) ||
|
||||
permissions.showTeamMemberView ||
|
||||
subscription.isEnterprise ||
|
||||
isBlocked)
|
||||
|
||||
const getBadgeConfig = (): { text: string; variant: 'blue-secondary' | 'red' } => {
|
||||
if (permissions.isEnterpriseMember) {
|
||||
return { text: '', variant: 'blue-secondary' }
|
||||
}
|
||||
if (permissions.showTeamMemberView || subscription.isEnterprise) {
|
||||
return { text: `${subscription.seats} seats`, variant: 'blue-secondary' }
|
||||
}
|
||||
@@ -443,67 +447,75 @@ export function Subscription() {
|
||||
|
||||
return (
|
||||
<div className='flex h-full flex-col gap-[20px]'>
|
||||
{/* Current Plan & Usage Overview */}
|
||||
<UsageHeader
|
||||
title={formatPlanName(subscription.plan)}
|
||||
showBadge={showBadge}
|
||||
badgeText={badgeConfig.text}
|
||||
badgeVariant={badgeConfig.variant}
|
||||
onBadgeClick={permissions.showTeamMemberView ? undefined : handleBadgeClick}
|
||||
seatsText={
|
||||
permissions.canManageTeam || subscription.isEnterprise
|
||||
? `${subscription.seats} seats`
|
||||
: undefined
|
||||
}
|
||||
current={usage.current}
|
||||
limit={
|
||||
subscription.isEnterprise || subscription.isTeam
|
||||
? organizationBillingData?.data?.totalUsageLimit
|
||||
: !subscription.isFree &&
|
||||
(permissions.canEditUsageLimit || permissions.showTeamMemberView)
|
||||
? usage.current // placeholder; rightContent will render UsageLimit
|
||||
: usage.limit
|
||||
}
|
||||
isBlocked={isBlocked}
|
||||
progressValue={Math.min(usage.percentUsed, 100)}
|
||||
rightContent={
|
||||
!subscription.isFree &&
|
||||
(permissions.canEditUsageLimit || permissions.showTeamMemberView) ? (
|
||||
<UsageLimit
|
||||
ref={usageLimitRef}
|
||||
currentLimit={
|
||||
(subscription.isTeam || subscription.isEnterprise) &&
|
||||
isTeamAdmin &&
|
||||
organizationBillingData?.data
|
||||
? organizationBillingData.data.totalUsageLimit
|
||||
: usageLimitData.currentLimit || usage.limit
|
||||
}
|
||||
currentUsage={usage.current}
|
||||
canEdit={permissions.canEditUsageLimit}
|
||||
minimumLimit={
|
||||
(subscription.isTeam || subscription.isEnterprise) &&
|
||||
isTeamAdmin &&
|
||||
organizationBillingData?.data
|
||||
? organizationBillingData.data.minimumBillingAmount
|
||||
: usageLimitData.minimumLimit || (subscription.isPro ? 20 : 40)
|
||||
}
|
||||
context={
|
||||
(subscription.isTeam || subscription.isEnterprise) && isTeamAdmin
|
||||
? 'organization'
|
||||
: 'user'
|
||||
}
|
||||
organizationId={
|
||||
(subscription.isTeam || subscription.isEnterprise) && isTeamAdmin
|
||||
? activeOrgId
|
||||
: undefined
|
||||
}
|
||||
onLimitUpdated={() => {
|
||||
logger.info('Usage limit updated')
|
||||
}}
|
||||
/>
|
||||
) : undefined
|
||||
}
|
||||
/>
|
||||
{/* Current Plan & Usage Overview - hidden from enterprise members (non-admin) */}
|
||||
{permissions.canViewUsageInfo ? (
|
||||
<UsageHeader
|
||||
title={formatPlanName(subscription.plan)}
|
||||
showBadge={showBadge}
|
||||
badgeText={badgeConfig.text}
|
||||
badgeVariant={badgeConfig.variant}
|
||||
onBadgeClick={permissions.showTeamMemberView ? undefined : handleBadgeClick}
|
||||
seatsText={
|
||||
permissions.canManageTeam || subscription.isEnterprise
|
||||
? `${subscription.seats} seats`
|
||||
: undefined
|
||||
}
|
||||
current={usage.current}
|
||||
limit={
|
||||
subscription.isEnterprise || subscription.isTeam
|
||||
? organizationBillingData?.data?.totalUsageLimit
|
||||
: !subscription.isFree &&
|
||||
(permissions.canEditUsageLimit || permissions.showTeamMemberView)
|
||||
? usage.current // placeholder; rightContent will render UsageLimit
|
||||
: usage.limit
|
||||
}
|
||||
isBlocked={isBlocked}
|
||||
progressValue={Math.min(usage.percentUsed, 100)}
|
||||
rightContent={
|
||||
!subscription.isFree &&
|
||||
(permissions.canEditUsageLimit || permissions.showTeamMemberView) ? (
|
||||
<UsageLimit
|
||||
ref={usageLimitRef}
|
||||
currentLimit={
|
||||
(subscription.isTeam || subscription.isEnterprise) &&
|
||||
isTeamAdmin &&
|
||||
organizationBillingData?.data
|
||||
? organizationBillingData.data.totalUsageLimit
|
||||
: usageLimitData.currentLimit || usage.limit
|
||||
}
|
||||
currentUsage={usage.current}
|
||||
canEdit={permissions.canEditUsageLimit}
|
||||
minimumLimit={
|
||||
(subscription.isTeam || subscription.isEnterprise) &&
|
||||
isTeamAdmin &&
|
||||
organizationBillingData?.data
|
||||
? organizationBillingData.data.minimumBillingAmount
|
||||
: usageLimitData.minimumLimit || (subscription.isPro ? 20 : 40)
|
||||
}
|
||||
context={
|
||||
(subscription.isTeam || subscription.isEnterprise) && isTeamAdmin
|
||||
? 'organization'
|
||||
: 'user'
|
||||
}
|
||||
organizationId={
|
||||
(subscription.isTeam || subscription.isEnterprise) && isTeamAdmin
|
||||
? activeOrgId
|
||||
: undefined
|
||||
}
|
||||
onLimitUpdated={() => {
|
||||
logger.info('Usage limit updated')
|
||||
}}
|
||||
/>
|
||||
) : undefined
|
||||
}
|
||||
/>
|
||||
) : (
|
||||
<div className='flex items-center'>
|
||||
<span className='font-medium text-[14px] text-[var(--text-primary)]'>
|
||||
{formatPlanName(subscription.plan)}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Upgrade Plans */}
|
||||
{permissions.showUpgradePlans && (
|
||||
@@ -539,8 +551,8 @@ export function Subscription() {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Credit Balance */}
|
||||
{subscription.isPaid && (
|
||||
{/* Credit Balance - hidden from enterprise members (non-admin) */}
|
||||
{subscription.isPaid && permissions.canViewUsageInfo && (
|
||||
<CreditBalance
|
||||
balance={subscriptionData?.data?.creditBalance ?? 0}
|
||||
canPurchase={permissions.canEditUsageLimit}
|
||||
@@ -554,10 +566,11 @@ export function Subscription() {
|
||||
<ReferralCode onRedeemComplete={() => refetchSubscription()} />
|
||||
)}
|
||||
|
||||
{/* Next Billing Date - hidden from team members */}
|
||||
{/* Next Billing Date - hidden from team members and enterprise members (non-admin) */}
|
||||
{subscription.isPaid &&
|
||||
subscriptionData?.data?.periodEnd &&
|
||||
!permissions.showTeamMemberView && (
|
||||
!permissions.showTeamMemberView &&
|
||||
!permissions.isEnterpriseMember && (
|
||||
<div className='flex items-center justify-between'>
|
||||
<Label>Next Billing Date</Label>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>
|
||||
@@ -566,8 +579,8 @@ export function Subscription() {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Usage notifications */}
|
||||
{subscription.isPaid && <BillingUsageNotificationsToggle />}
|
||||
{/* Usage notifications - hidden from enterprise members (non-admin) */}
|
||||
{subscription.isPaid && permissions.canViewUsageInfo && <BillingUsageNotificationsToggle />}
|
||||
|
||||
{/* Cancel Subscription */}
|
||||
{permissions.canCancelSubscription && (
|
||||
|
||||
@@ -37,7 +37,7 @@ export const UsageLimit = forwardRef<UsageLimitRef, UsageLimitProps>(
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const [inputValue, setInputValue] = useState(currentLimit.toString())
|
||||
const [inputValue, setInputValue] = useState(() => currentLimit.toString())
|
||||
const [hasError, setHasError] = useState(false)
|
||||
const [errorType, setErrorType] = useState<'general' | 'belowUsage' | null>(null)
|
||||
const [isEditing, setIsEditing] = useState(false)
|
||||
|
||||
@@ -285,6 +285,7 @@ export function UsageIndicator({ onClick }: UsageIndicatorProps) {
|
||||
const isPro = planType === 'pro'
|
||||
const isTeam = planType === 'team'
|
||||
const isEnterprise = planType === 'enterprise'
|
||||
const isEnterpriseMember = isEnterprise && !userCanManageBilling
|
||||
|
||||
const handleUpgradeToPro = useCallback(async () => {
|
||||
try {
|
||||
@@ -463,6 +464,18 @@ export function UsageIndicator({ onClick }: UsageIndicatorProps) {
|
||||
}
|
||||
}
|
||||
|
||||
if (isEnterpriseMember) {
|
||||
return (
|
||||
<div className='flex flex-shrink-0 flex-col border-t px-[13.5px] pt-[8px] pb-[10px]'>
|
||||
<div className='flex h-[18px] items-center'>
|
||||
<span className='font-medium text-[12px] text-[var(--text-primary)]'>
|
||||
{PLAN_NAMES[planType]}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Database, HelpCircle, Layout, MessageSquare, Plus, Search, Settings } from 'lucide-react'
|
||||
import { Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useParams, usePathname, useRouter } from 'next/navigation'
|
||||
import { Button, Download, FolderPlus, Library, Loader, Tooltip } from '@/components/emcn'
|
||||
@@ -248,12 +248,6 @@ export const Sidebar = memo(function Sidebar() {
|
||||
const footerNavigationItems = useMemo(
|
||||
() =>
|
||||
[
|
||||
{
|
||||
id: 'chat',
|
||||
label: 'Chat',
|
||||
icon: MessageSquare,
|
||||
href: `/workspace/${workspaceId}/chat`,
|
||||
},
|
||||
{
|
||||
id: 'logs',
|
||||
label: 'Logs',
|
||||
|
||||
@@ -92,12 +92,9 @@ export const IncidentioBlock: BlockConfig<IncidentioResponse> = {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'incidentio_incidents_list',
|
||||
'incidentio_actions_list',
|
||||
'incidentio_follow_ups_list',
|
||||
'incidentio_users_list',
|
||||
'incidentio_workflows_list',
|
||||
'incidentio_schedules_list',
|
||||
'incidentio_escalations_list',
|
||||
'incidentio_incident_updates_list',
|
||||
'incidentio_schedule_entries_list',
|
||||
],
|
||||
@@ -113,6 +110,7 @@ export const IncidentioBlock: BlockConfig<IncidentioResponse> = {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'incidentio_incidents_list',
|
||||
'incidentio_users_list',
|
||||
'incidentio_workflows_list',
|
||||
'incidentio_schedules_list',
|
||||
'incidentio_incident_updates_list',
|
||||
|
||||
@@ -216,31 +216,21 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
condition: { field: 'operation', value: ['update_deal'] },
|
||||
},
|
||||
{
|
||||
id: 'deal_id',
|
||||
title: 'Deal ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by deal ID ',
|
||||
condition: { field: 'operation', value: ['get_files'] },
|
||||
},
|
||||
{
|
||||
id: 'person_id',
|
||||
title: 'Person ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by person ID ',
|
||||
condition: { field: 'operation', value: ['get_files'] },
|
||||
},
|
||||
{
|
||||
id: 'org_id',
|
||||
title: 'Organization ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by organization ID ',
|
||||
id: 'sort',
|
||||
title: 'Sort By',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'ID', id: 'id' },
|
||||
{ label: 'Update Time', id: 'update_time' },
|
||||
],
|
||||
value: () => 'id',
|
||||
condition: { field: 'operation', value: ['get_files'] },
|
||||
},
|
||||
{
|
||||
id: 'limit',
|
||||
title: 'Limit',
|
||||
type: 'short-input',
|
||||
placeholder: 'Number of results (default 100, max 500)',
|
||||
placeholder: 'Number of results (default 100, max 100)',
|
||||
condition: { field: 'operation', value: ['get_files'] },
|
||||
},
|
||||
{
|
||||
@@ -305,8 +295,28 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
id: 'cursor',
|
||||
title: 'Cursor',
|
||||
type: 'short-input',
|
||||
placeholder: 'Pagination cursor (optional)',
|
||||
condition: { field: 'operation', value: ['get_pipelines'] },
|
||||
placeholder: 'Pagination cursor from previous response',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['get_all_deals', 'get_projects'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'start',
|
||||
title: 'Start (Offset)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Pagination offset (e.g., 0, 100, 200)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get_activities',
|
||||
'get_leads',
|
||||
'get_files',
|
||||
'get_pipeline_deals',
|
||||
'get_mail_messages',
|
||||
'get_pipelines',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'pipeline_id',
|
||||
@@ -323,19 +333,6 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
placeholder: 'Filter by stage ID ',
|
||||
condition: { field: 'operation', value: ['get_pipeline_deals'] },
|
||||
},
|
||||
{
|
||||
id: 'status',
|
||||
title: 'Status',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Open', id: 'open' },
|
||||
{ label: 'Won', id: 'won' },
|
||||
{ label: 'Lost', id: 'lost' },
|
||||
],
|
||||
value: () => '',
|
||||
condition: { field: 'operation', value: ['get_pipeline_deals'] },
|
||||
},
|
||||
{
|
||||
id: 'limit',
|
||||
title: 'Limit',
|
||||
@@ -426,22 +423,29 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
id: 'deal_id',
|
||||
title: 'Deal ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by deal ID ',
|
||||
condition: { field: 'operation', value: ['get_activities', 'create_activity'] },
|
||||
placeholder: 'Associated deal ID ',
|
||||
condition: { field: 'operation', value: ['create_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'person_id',
|
||||
title: 'Person ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by person ID ',
|
||||
condition: { field: 'operation', value: ['get_activities', 'create_activity'] },
|
||||
placeholder: 'Associated person ID ',
|
||||
condition: { field: 'operation', value: ['create_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'org_id',
|
||||
title: 'Organization ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by organization ID ',
|
||||
condition: { field: 'operation', value: ['get_activities', 'create_activity'] },
|
||||
placeholder: 'Associated organization ID ',
|
||||
condition: { field: 'operation', value: ['create_activity'] },
|
||||
},
|
||||
{
|
||||
id: 'user_id',
|
||||
title: 'User ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by user ID',
|
||||
condition: { field: 'operation', value: ['get_activities'] },
|
||||
},
|
||||
{
|
||||
id: 'type',
|
||||
@@ -781,7 +785,8 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
thread_id: { type: 'string', description: 'Mail thread ID' },
|
||||
sort_by: { type: 'string', description: 'Field to sort by' },
|
||||
sort_direction: { type: 'string', description: 'Sorting direction' },
|
||||
cursor: { type: 'string', description: 'Pagination cursor' },
|
||||
cursor: { type: 'string', description: 'Pagination cursor (v2 endpoints)' },
|
||||
start: { type: 'string', description: 'Pagination start offset (v1 endpoints)' },
|
||||
project_id: { type: 'string', description: 'Project ID' },
|
||||
description: { type: 'string', description: 'Description' },
|
||||
start_date: { type: 'string', description: 'Start date' },
|
||||
@@ -793,12 +798,15 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
due_time: { type: 'string', description: 'Due time' },
|
||||
duration: { type: 'string', description: 'Duration' },
|
||||
done: { type: 'string', description: 'Completion status' },
|
||||
user_id: { type: 'string', description: 'User ID' },
|
||||
note: { type: 'string', description: 'Notes' },
|
||||
lead_id: { type: 'string', description: 'Lead ID' },
|
||||
archived: { type: 'string', description: 'Archived status' },
|
||||
value_amount: { type: 'string', description: 'Value amount' },
|
||||
value_currency: { type: 'string', description: 'Value currency' },
|
||||
is_archived: { type: 'string', description: 'Archive status' },
|
||||
organization_id: { type: 'string', description: 'Organization ID' },
|
||||
owner_id: { type: 'string', description: 'Owner user ID' },
|
||||
},
|
||||
outputs: {
|
||||
deals: { type: 'json', description: 'Array of deal objects' },
|
||||
|
||||
@@ -445,6 +445,13 @@ Return ONLY the order by expression - no explanations, no extra text.`,
|
||||
placeholder: '100',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
},
|
||||
{
|
||||
id: 'offset',
|
||||
title: 'Offset',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
},
|
||||
// Vector search operation fields
|
||||
{
|
||||
id: 'functionName',
|
||||
@@ -543,6 +550,13 @@ Return ONLY the order by expression - no explanations, no extra text.`,
|
||||
placeholder: '100',
|
||||
condition: { field: 'operation', value: 'text_search' },
|
||||
},
|
||||
{
|
||||
id: 'offset',
|
||||
title: 'Offset',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'text_search' },
|
||||
},
|
||||
// Count operation fields
|
||||
{
|
||||
id: 'filter',
|
||||
|
||||
@@ -66,6 +66,20 @@ export const TypeformBlock: BlockConfig<TypeformResponse> = {
|
||||
placeholder: 'Number of responses per page (default: 25)',
|
||||
condition: { field: 'operation', value: 'typeform_responses' },
|
||||
},
|
||||
{
|
||||
id: 'before',
|
||||
title: 'Before (Cursor)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Cursor token from previous response for pagination',
|
||||
condition: { field: 'operation', value: 'typeform_responses' },
|
||||
},
|
||||
{
|
||||
id: 'after',
|
||||
title: 'After (Cursor)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Cursor token from previous response for newer results',
|
||||
condition: { field: 'operation', value: 'typeform_responses' },
|
||||
},
|
||||
{
|
||||
id: 'since',
|
||||
title: 'Since',
|
||||
@@ -380,6 +394,8 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
apiKey: { type: 'string', description: 'Personal access token' },
|
||||
// Response operation params
|
||||
pageSize: { type: 'number', description: 'Responses per page' },
|
||||
before: { type: 'string', description: 'Cursor token for fetching the next page' },
|
||||
after: { type: 'string', description: 'Cursor token for fetching newer results' },
|
||||
since: { type: 'string', description: 'Start date filter' },
|
||||
until: { type: 'string', description: 'End date filter' },
|
||||
completed: { type: 'string', description: 'Completion status filter' },
|
||||
|
||||
@@ -444,33 +444,36 @@ Return ONLY the search query - no explanations.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'sortBy',
|
||||
title: 'Sort By',
|
||||
id: 'filterType',
|
||||
title: 'Resource Type',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Relevance', id: 'relevance' },
|
||||
{ label: 'Created At', id: 'created_at' },
|
||||
{ label: 'Updated At', id: 'updated_at' },
|
||||
{ label: 'Priority', id: 'priority' },
|
||||
{ label: 'Status', id: 'status' },
|
||||
{ label: 'Ticket Type', id: 'ticket_type' },
|
||||
{ label: 'Ticket', id: 'ticket' },
|
||||
{ label: 'User', id: 'user' },
|
||||
{ label: 'Organization', id: 'organization' },
|
||||
{ label: 'Group', id: 'group' },
|
||||
],
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['search'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'sortOrder',
|
||||
title: 'Sort Order',
|
||||
id: 'sort',
|
||||
title: 'Sort',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Ascending', id: 'asc' },
|
||||
{ label: 'Descending', id: 'desc' },
|
||||
{ label: 'Updated At (Asc)', id: 'updated_at' },
|
||||
{ label: 'Updated At (Desc)', id: '-updated_at' },
|
||||
{ label: 'ID (Asc)', id: 'id' },
|
||||
{ label: 'ID (Desc)', id: '-id' },
|
||||
{ label: 'Status (Asc)', id: 'status' },
|
||||
{ label: 'Status (Desc)', id: '-status' },
|
||||
],
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['search'],
|
||||
value: ['get_tickets'],
|
||||
},
|
||||
},
|
||||
// Pagination fields
|
||||
@@ -492,20 +495,25 @@ Return ONLY the search query - no explanations.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'page',
|
||||
title: 'Page',
|
||||
id: 'pageAfter',
|
||||
title: 'Page After (Cursor)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Page number',
|
||||
placeholder: 'Cursor from previous response (after_cursor)',
|
||||
description: 'Cursor value from a previous response to fetch the next page of results',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get_tickets',
|
||||
'get_users',
|
||||
'get_organizations',
|
||||
'search_users',
|
||||
'autocomplete_organizations',
|
||||
'search',
|
||||
],
|
||||
value: ['get_tickets', 'get_users', 'get_organizations', 'search'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'page',
|
||||
title: 'Page Number',
|
||||
type: 'short-input',
|
||||
placeholder: 'Page number (default: 1)',
|
||||
description: 'Page number for offset-based pagination',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['search_users', 'autocomplete_organizations'],
|
||||
},
|
||||
},
|
||||
],
|
||||
@@ -624,6 +632,7 @@ Return ONLY the search query - no explanations.`,
|
||||
email: { type: 'string', description: 'Zendesk email address' },
|
||||
apiToken: { type: 'string', description: 'Zendesk API token' },
|
||||
subdomain: { type: 'string', description: 'Zendesk subdomain' },
|
||||
sort: { type: 'string', description: 'Sort field for ticket listing' },
|
||||
},
|
||||
outputs: {
|
||||
// Ticket operations - list
|
||||
@@ -665,8 +674,11 @@ Return ONLY the search query - no explanations.`,
|
||||
type: 'boolean',
|
||||
description: 'Deletion confirmation (delete_ticket, delete_user, delete_organization)',
|
||||
},
|
||||
// Pagination (shared across list operations)
|
||||
paging: { type: 'json', description: 'Pagination information for list operations' },
|
||||
// Cursor-based pagination (shared across list operations)
|
||||
paging: {
|
||||
type: 'json',
|
||||
description: 'Cursor-based pagination information (after_cursor, has_more)',
|
||||
},
|
||||
// Metadata (shared across all operations)
|
||||
metadata: { type: 'json', description: 'Operation metadata including operation type' },
|
||||
},
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
type ReactNode,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useId,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
@@ -170,6 +171,7 @@ const Combobox = memo(
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const listboxId = useId()
|
||||
const [open, setOpen] = useState(false)
|
||||
const [highlightedIndex, setHighlightedIndex] = useState(-1)
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
@@ -513,6 +515,7 @@ const Combobox = memo(
|
||||
role='combobox'
|
||||
aria-expanded={open}
|
||||
aria-haspopup='listbox'
|
||||
aria-controls={listboxId}
|
||||
aria-disabled={disabled}
|
||||
tabIndex={disabled ? -1 : 0}
|
||||
className={cn(
|
||||
@@ -616,7 +619,7 @@ const Combobox = memo(
|
||||
}
|
||||
}}
|
||||
>
|
||||
<div ref={dropdownRef} role='listbox'>
|
||||
<div ref={dropdownRef} role='listbox' id={listboxId}>
|
||||
{isLoading ? (
|
||||
<div className='flex items-center justify-center py-[14px]'>
|
||||
<Loader2 className='h-[16px] w-[16px] animate-spin text-[var(--text-muted)]' />
|
||||
|
||||
@@ -27,12 +27,14 @@ const Alert = React.forwardRef<
|
||||
Alert.displayName = 'Alert'
|
||||
|
||||
const AlertTitle = React.forwardRef<HTMLParagraphElement, React.HTMLAttributes<HTMLHeadingElement>>(
|
||||
({ className, ...props }, ref) => (
|
||||
({ className, children, ...props }, ref) => (
|
||||
<h5
|
||||
ref={ref}
|
||||
className={cn('mb-1 font-medium leading-none tracking-tight', className)}
|
||||
{...props}
|
||||
/>
|
||||
>
|
||||
{children}
|
||||
</h5>
|
||||
)
|
||||
)
|
||||
AlertTitle.displayName = 'AlertTitle'
|
||||
|
||||
265
apps/sim/ee/access-control/utils/permission-check.test.ts
Normal file
265
apps/sim/ee/access-control/utils/permission-check.test.ts
Normal file
@@ -0,0 +1,265 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { databaseMock, drizzleOrmMock, loggerMock } from '@sim/testing'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const {
|
||||
DEFAULT_PERMISSION_GROUP_CONFIG,
|
||||
mockGetAllowedIntegrationsFromEnv,
|
||||
mockIsOrganizationOnEnterprisePlan,
|
||||
mockGetProviderFromModel,
|
||||
} = vi.hoisted(() => ({
|
||||
DEFAULT_PERMISSION_GROUP_CONFIG: {
|
||||
allowedIntegrations: null,
|
||||
allowedModelProviders: null,
|
||||
hideTraceSpans: false,
|
||||
hideKnowledgeBaseTab: false,
|
||||
hideCopilot: false,
|
||||
hideApiKeysTab: false,
|
||||
hideEnvironmentTab: false,
|
||||
hideFilesTab: false,
|
||||
disableMcpTools: false,
|
||||
disableCustomTools: false,
|
||||
disableSkills: false,
|
||||
hideTemplates: false,
|
||||
disableInvitations: false,
|
||||
hideDeployApi: false,
|
||||
hideDeployMcp: false,
|
||||
hideDeployA2a: false,
|
||||
hideDeployChatbot: false,
|
||||
hideDeployTemplate: false,
|
||||
},
|
||||
mockGetAllowedIntegrationsFromEnv: vi.fn<() => string[] | null>(),
|
||||
mockIsOrganizationOnEnterprisePlan: vi.fn<() => Promise<boolean>>(),
|
||||
mockGetProviderFromModel: vi.fn<(model: string) => string>(),
|
||||
}))
|
||||
|
||||
vi.mock('@sim/db', () => databaseMock)
|
||||
vi.mock('@sim/db/schema', () => ({}))
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
vi.mock('drizzle-orm', () => drizzleOrmMock)
|
||||
vi.mock('@/lib/billing', () => ({
|
||||
isOrganizationOnEnterprisePlan: mockIsOrganizationOnEnterprisePlan,
|
||||
}))
|
||||
vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
getAllowedIntegrationsFromEnv: mockGetAllowedIntegrationsFromEnv,
|
||||
isAccessControlEnabled: false,
|
||||
isHosted: false,
|
||||
}))
|
||||
vi.mock('@/lib/permission-groups/types', () => ({
|
||||
DEFAULT_PERMISSION_GROUP_CONFIG,
|
||||
parsePermissionGroupConfig: (config: unknown) => {
|
||||
if (!config || typeof config !== 'object') return DEFAULT_PERMISSION_GROUP_CONFIG
|
||||
return { ...DEFAULT_PERMISSION_GROUP_CONFIG, ...config }
|
||||
},
|
||||
}))
|
||||
vi.mock('@/providers/utils', () => ({
|
||||
getProviderFromModel: mockGetProviderFromModel,
|
||||
}))
|
||||
|
||||
import {
|
||||
getUserPermissionConfig,
|
||||
IntegrationNotAllowedError,
|
||||
validateBlockType,
|
||||
} from './permission-check'
|
||||
|
||||
describe('IntegrationNotAllowedError', () => {
|
||||
it.concurrent('creates error with correct name and message', () => {
|
||||
const error = new IntegrationNotAllowedError('discord')
|
||||
|
||||
expect(error).toBeInstanceOf(Error)
|
||||
expect(error.name).toBe('IntegrationNotAllowedError')
|
||||
expect(error.message).toContain('discord')
|
||||
})
|
||||
|
||||
it.concurrent('includes custom reason when provided', () => {
|
||||
const error = new IntegrationNotAllowedError('discord', 'blocked by server policy')
|
||||
|
||||
expect(error.message).toContain('blocked by server policy')
|
||||
})
|
||||
})
|
||||
|
||||
describe('getUserPermissionConfig', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('returns null when no env allowlist is configured', async () => {
|
||||
mockGetAllowedIntegrationsFromEnv.mockReturnValue(null)
|
||||
|
||||
const config = await getUserPermissionConfig('user-123')
|
||||
|
||||
expect(config).toBeNull()
|
||||
})
|
||||
|
||||
it('returns config with env allowlist when configured', async () => {
|
||||
mockGetAllowedIntegrationsFromEnv.mockReturnValue(['slack', 'gmail'])
|
||||
|
||||
const config = await getUserPermissionConfig('user-123')
|
||||
|
||||
expect(config).not.toBeNull()
|
||||
expect(config!.allowedIntegrations).toEqual(['slack', 'gmail'])
|
||||
})
|
||||
|
||||
it('preserves default values for non-allowlist fields', async () => {
|
||||
mockGetAllowedIntegrationsFromEnv.mockReturnValue(['slack'])
|
||||
|
||||
const config = await getUserPermissionConfig('user-123')
|
||||
|
||||
expect(config!.disableMcpTools).toBe(false)
|
||||
expect(config!.allowedModelProviders).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('env allowlist fallback when userId is absent', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('returns null allowlist when no userId and no env allowlist', async () => {
|
||||
mockGetAllowedIntegrationsFromEnv.mockReturnValue(null)
|
||||
|
||||
const userId: string | undefined = undefined
|
||||
const permissionConfig = userId ? await getUserPermissionConfig(userId) : null
|
||||
const allowedIntegrations =
|
||||
permissionConfig?.allowedIntegrations ?? mockGetAllowedIntegrationsFromEnv()
|
||||
|
||||
expect(allowedIntegrations).toBeNull()
|
||||
})
|
||||
|
||||
it('falls back to env allowlist when no userId is provided', async () => {
|
||||
mockGetAllowedIntegrationsFromEnv.mockReturnValue(['slack', 'gmail'])
|
||||
|
||||
const userId: string | undefined = undefined
|
||||
const permissionConfig = userId ? await getUserPermissionConfig(userId) : null
|
||||
const allowedIntegrations =
|
||||
permissionConfig?.allowedIntegrations ?? mockGetAllowedIntegrationsFromEnv()
|
||||
|
||||
expect(allowedIntegrations).toEqual(['slack', 'gmail'])
|
||||
})
|
||||
|
||||
it('env allowlist filters block types when userId is absent', async () => {
|
||||
mockGetAllowedIntegrationsFromEnv.mockReturnValue(['slack', 'gmail'])
|
||||
|
||||
const userId: string | undefined = undefined
|
||||
const permissionConfig = userId ? await getUserPermissionConfig(userId) : null
|
||||
const allowedIntegrations =
|
||||
permissionConfig?.allowedIntegrations ?? mockGetAllowedIntegrationsFromEnv()
|
||||
|
||||
expect(allowedIntegrations).not.toBeNull()
|
||||
expect(allowedIntegrations!.includes('slack')).toBe(true)
|
||||
expect(allowedIntegrations!.includes('discord')).toBe(false)
|
||||
})
|
||||
|
||||
it('uses permission config when userId is present, ignoring env fallback', async () => {
|
||||
mockGetAllowedIntegrationsFromEnv.mockReturnValue(['slack', 'gmail'])
|
||||
|
||||
const config = await getUserPermissionConfig('user-123')
|
||||
|
||||
expect(config).not.toBeNull()
|
||||
expect(config!.allowedIntegrations).toEqual(['slack', 'gmail'])
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateBlockType', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('when no env allowlist is configured', () => {
|
||||
beforeEach(() => {
|
||||
mockGetAllowedIntegrationsFromEnv.mockReturnValue(null)
|
||||
})
|
||||
|
||||
it('allows any block type', async () => {
|
||||
await validateBlockType(undefined, 'google_drive')
|
||||
})
|
||||
|
||||
it('allows multi-word block types', async () => {
|
||||
await validateBlockType(undefined, 'microsoft_excel')
|
||||
})
|
||||
|
||||
it('always allows start_trigger', async () => {
|
||||
await validateBlockType(undefined, 'start_trigger')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when env allowlist is configured', () => {
|
||||
beforeEach(() => {
|
||||
mockGetAllowedIntegrationsFromEnv.mockReturnValue([
|
||||
'slack',
|
||||
'google_drive',
|
||||
'microsoft_excel',
|
||||
])
|
||||
})
|
||||
|
||||
it('allows block types on the allowlist', async () => {
|
||||
await validateBlockType(undefined, 'slack')
|
||||
await validateBlockType(undefined, 'google_drive')
|
||||
await validateBlockType(undefined, 'microsoft_excel')
|
||||
})
|
||||
|
||||
it('rejects block types not on the allowlist', async () => {
|
||||
await expect(validateBlockType(undefined, 'discord')).rejects.toThrow(
|
||||
IntegrationNotAllowedError
|
||||
)
|
||||
})
|
||||
|
||||
it('always allows start_trigger regardless of allowlist', async () => {
|
||||
await validateBlockType(undefined, 'start_trigger')
|
||||
})
|
||||
|
||||
it('matches case-insensitively', async () => {
|
||||
await validateBlockType(undefined, 'Slack')
|
||||
await validateBlockType(undefined, 'GOOGLE_DRIVE')
|
||||
})
|
||||
|
||||
it('includes env reason in error when env allowlist is the source', async () => {
|
||||
await expect(validateBlockType(undefined, 'discord')).rejects.toThrow(/ALLOWED_INTEGRATIONS/)
|
||||
})
|
||||
|
||||
it('includes env reason even when userId is present if env is the source', async () => {
|
||||
await expect(validateBlockType('user-123', 'discord')).rejects.toThrow(/ALLOWED_INTEGRATIONS/)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('service ID to block type normalization', () => {
|
||||
it.concurrent('hyphenated service IDs match underscore block types after normalization', () => {
|
||||
const allowedBlockTypes = [
|
||||
'google_drive',
|
||||
'microsoft_excel',
|
||||
'microsoft_teams',
|
||||
'google_sheets',
|
||||
'google_docs',
|
||||
'google_calendar',
|
||||
'google_forms',
|
||||
'microsoft_planner',
|
||||
]
|
||||
const serviceIds = [
|
||||
'google-drive',
|
||||
'microsoft-excel',
|
||||
'microsoft-teams',
|
||||
'google-sheets',
|
||||
'google-docs',
|
||||
'google-calendar',
|
||||
'google-forms',
|
||||
'microsoft-planner',
|
||||
]
|
||||
|
||||
for (const serviceId of serviceIds) {
|
||||
const normalized = serviceId.replace(/-/g, '_')
|
||||
expect(allowedBlockTypes).toContain(normalized)
|
||||
}
|
||||
})
|
||||
|
||||
it.concurrent('single-word service IDs are unaffected by normalization', () => {
|
||||
const serviceIds = ['slack', 'gmail', 'notion', 'discord', 'jira', 'trello']
|
||||
|
||||
for (const serviceId of serviceIds) {
|
||||
const normalized = serviceId.replace(/-/g, '_')
|
||||
expect(normalized).toBe(serviceId)
|
||||
}
|
||||
})
|
||||
})
|
||||
@@ -3,8 +3,13 @@ import { member, permissionGroup, permissionGroupMember } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { isOrganizationOnEnterprisePlan } from '@/lib/billing'
|
||||
import { isAccessControlEnabled, isHosted } from '@/lib/core/config/feature-flags'
|
||||
import {
|
||||
getAllowedIntegrationsFromEnv,
|
||||
isAccessControlEnabled,
|
||||
isHosted,
|
||||
} from '@/lib/core/config/feature-flags'
|
||||
import {
|
||||
DEFAULT_PERMISSION_GROUP_CONFIG,
|
||||
type PermissionGroupConfig,
|
||||
parsePermissionGroupConfig,
|
||||
} from '@/lib/permission-groups/types'
|
||||
@@ -23,8 +28,12 @@ export class ProviderNotAllowedError extends Error {
|
||||
}
|
||||
|
||||
export class IntegrationNotAllowedError extends Error {
|
||||
constructor(blockType: string) {
|
||||
super(`Integration "${blockType}" is not allowed based on your permission group settings`)
|
||||
constructor(blockType: string, reason?: string) {
|
||||
super(
|
||||
reason
|
||||
? `Integration "${blockType}" is not allowed: ${reason}`
|
||||
: `Integration "${blockType}" is not allowed based on your permission group settings`
|
||||
)
|
||||
this.name = 'IntegrationNotAllowedError'
|
||||
}
|
||||
}
|
||||
@@ -57,11 +66,38 @@ export class InvitationsNotAllowedError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges the env allowlist into a permission config.
|
||||
* If `config` is null and no env allowlist is set, returns null.
|
||||
* If `config` is null but env allowlist is set, returns a default config with only allowedIntegrations set.
|
||||
* If both are set, intersects the two allowlists.
|
||||
*/
|
||||
function mergeEnvAllowlist(config: PermissionGroupConfig | null): PermissionGroupConfig | null {
|
||||
const envAllowlist = getAllowedIntegrationsFromEnv()
|
||||
|
||||
if (envAllowlist === null) {
|
||||
return config
|
||||
}
|
||||
|
||||
if (config === null) {
|
||||
return { ...DEFAULT_PERMISSION_GROUP_CONFIG, allowedIntegrations: envAllowlist }
|
||||
}
|
||||
|
||||
const merged =
|
||||
config.allowedIntegrations === null
|
||||
? envAllowlist
|
||||
: config.allowedIntegrations
|
||||
.map((i) => i.toLowerCase())
|
||||
.filter((i) => envAllowlist.includes(i))
|
||||
|
||||
return { ...config, allowedIntegrations: merged }
|
||||
}
|
||||
|
||||
export async function getUserPermissionConfig(
|
||||
userId: string
|
||||
): Promise<PermissionGroupConfig | null> {
|
||||
if (!isHosted && !isAccessControlEnabled) {
|
||||
return null
|
||||
return mergeEnvAllowlist(null)
|
||||
}
|
||||
|
||||
const [membership] = await db
|
||||
@@ -71,12 +107,12 @@ export async function getUserPermissionConfig(
|
||||
.limit(1)
|
||||
|
||||
if (!membership) {
|
||||
return null
|
||||
return mergeEnvAllowlist(null)
|
||||
}
|
||||
|
||||
const isEnterprise = await isOrganizationOnEnterprisePlan(membership.organizationId)
|
||||
if (!isEnterprise) {
|
||||
return null
|
||||
return mergeEnvAllowlist(null)
|
||||
}
|
||||
|
||||
const [groupMembership] = await db
|
||||
@@ -92,10 +128,10 @@ export async function getUserPermissionConfig(
|
||||
.limit(1)
|
||||
|
||||
if (!groupMembership) {
|
||||
return null
|
||||
return mergeEnvAllowlist(null)
|
||||
}
|
||||
|
||||
return parsePermissionGroupConfig(groupMembership.config)
|
||||
return mergeEnvAllowlist(parsePermissionGroupConfig(groupMembership.config))
|
||||
}
|
||||
|
||||
export async function getPermissionConfig(
|
||||
@@ -152,19 +188,25 @@ export async function validateBlockType(
|
||||
return
|
||||
}
|
||||
|
||||
if (!userId) {
|
||||
return
|
||||
}
|
||||
|
||||
const config = await getPermissionConfig(userId, ctx)
|
||||
const config = userId ? await getPermissionConfig(userId, ctx) : mergeEnvAllowlist(null)
|
||||
|
||||
if (!config || config.allowedIntegrations === null) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!config.allowedIntegrations.includes(blockType)) {
|
||||
logger.warn('Integration blocked by permission group', { userId, blockType })
|
||||
throw new IntegrationNotAllowedError(blockType)
|
||||
if (!config.allowedIntegrations.includes(blockType.toLowerCase())) {
|
||||
const envAllowlist = getAllowedIntegrationsFromEnv()
|
||||
const blockedByEnv = envAllowlist !== null && !envAllowlist.includes(blockType.toLowerCase())
|
||||
logger.warn(
|
||||
blockedByEnv
|
||||
? 'Integration blocked by env allowlist'
|
||||
: 'Integration blocked by permission group',
|
||||
{ userId, blockType }
|
||||
)
|
||||
throw new IntegrationNotAllowedError(
|
||||
blockType,
|
||||
blockedByEnv ? 'blocked by server ALLOWED_INTEGRATIONS policy' : undefined
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Check, ChevronDown, Copy, Eye, EyeOff } from 'lucide-react'
|
||||
import { Check, ChevronDown, Clipboard, Eye, EyeOff } from 'lucide-react'
|
||||
import { Button, Combobox, Input, Switch, Textarea } from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
@@ -418,29 +418,29 @@ export function SSO() {
|
||||
|
||||
{/* Callback URL */}
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
||||
Callback URL
|
||||
</span>
|
||||
<div className='relative'>
|
||||
<div className='flex h-9 items-center rounded-[6px] border bg-[var(--surface-1)] px-[10px] pr-[40px]'>
|
||||
<code className='flex-1 truncate font-mono text-[13px] text-[var(--text-primary)]'>
|
||||
{providerCallbackUrl}
|
||||
</code>
|
||||
</div>
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
||||
Callback URL
|
||||
</span>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
onClick={() => copyToClipboard(providerCallbackUrl)}
|
||||
className='-translate-y-1/2 absolute top-1/2 right-[4px] h-[28px] w-[28px] rounded-[4px] text-[var(--text-muted)] hover:text-[var(--text-primary)]'
|
||||
className='h-[22px] w-[22px] rounded-[4px] p-0 text-[var(--text-muted)] hover:text-[var(--text-primary)]'
|
||||
>
|
||||
{copied ? (
|
||||
<Check className='h-[14px] w-[14px]' />
|
||||
<Check className='h-[13px] w-[13px]' />
|
||||
) : (
|
||||
<Copy className='h-[14px] w-[14px]' />
|
||||
<Clipboard className='h-[13px] w-[13px]' />
|
||||
)}
|
||||
<span className='sr-only'>Copy callback URL</span>
|
||||
</Button>
|
||||
</div>
|
||||
<div className='flex h-9 items-center rounded-[6px] border bg-[var(--surface-1)] px-[10px]'>
|
||||
<code className='flex-1 truncate font-mono text-[13px] text-[var(--text-primary)]'>
|
||||
{providerCallbackUrl}
|
||||
</code>
|
||||
</div>
|
||||
<p className='text-[13px] text-[var(--text-muted)]'>
|
||||
Configure this in your identity provider
|
||||
</p>
|
||||
@@ -852,29 +852,29 @@ export function SSO() {
|
||||
|
||||
{/* Callback URL display */}
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
||||
Callback URL
|
||||
</span>
|
||||
<div className='relative'>
|
||||
<div className='flex h-9 items-center rounded-[6px] border bg-[var(--surface-1)] px-[10px] pr-[40px]'>
|
||||
<code className='flex-1 truncate font-mono text-[13px] text-[var(--text-primary)]'>
|
||||
{callbackUrl}
|
||||
</code>
|
||||
</div>
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
||||
Callback URL
|
||||
</span>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
onClick={() => copyToClipboard(callbackUrl)}
|
||||
className='-translate-y-1/2 absolute top-1/2 right-[4px] h-[28px] w-[28px] rounded-[4px] text-[var(--text-muted)] hover:text-[var(--text-primary)]'
|
||||
className='h-[22px] w-[22px] rounded-[4px] p-0 text-[var(--text-muted)] hover:text-[var(--text-primary)]'
|
||||
>
|
||||
{copied ? (
|
||||
<Check className='h-[14px] w-[14px]' />
|
||||
<Check className='h-[13px] w-[13px]' />
|
||||
) : (
|
||||
<Copy className='h-[14px] w-[14px]' />
|
||||
<Clipboard className='h-[13px] w-[13px]' />
|
||||
)}
|
||||
<span className='sr-only'>Copy callback URL</span>
|
||||
</Button>
|
||||
</div>
|
||||
<div className='flex h-9 items-center rounded-[6px] border bg-[var(--surface-1)] px-[10px]'>
|
||||
<code className='flex-1 truncate font-mono text-[13px] text-[var(--text-primary)]'>
|
||||
{callbackUrl}
|
||||
</code>
|
||||
</div>
|
||||
<p className='text-[13px] text-[var(--text-muted)]'>
|
||||
Configure this in your identity provider
|
||||
</p>
|
||||
|
||||
@@ -17,6 +17,7 @@ vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
isDev: true,
|
||||
isTest: false,
|
||||
getCostMultiplier: vi.fn().mockReturnValue(1),
|
||||
getAllowedIntegrationsFromEnv: vi.fn().mockReturnValue(null),
|
||||
isEmailVerificationEnabled: false,
|
||||
isBillingEnabled: false,
|
||||
isOrganizationsEnabled: false,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { useQuery } from '@tanstack/react-query'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { isAccessControlEnabled, isHosted } from '@/lib/core/config/feature-flags'
|
||||
import {
|
||||
@@ -21,12 +22,44 @@ export interface PermissionConfigResult {
|
||||
isInvitationsDisabled: boolean
|
||||
}
|
||||
|
||||
interface AllowedIntegrationsResponse {
|
||||
allowedIntegrations: string[] | null
|
||||
}
|
||||
|
||||
function useAllowedIntegrationsFromEnv() {
|
||||
return useQuery<AllowedIntegrationsResponse>({
|
||||
queryKey: ['allowedIntegrations', 'env'],
|
||||
queryFn: async () => {
|
||||
const response = await fetch('/api/settings/allowed-integrations')
|
||||
if (!response.ok) return { allowedIntegrations: null }
|
||||
return response.json()
|
||||
},
|
||||
staleTime: 5 * 60 * 1000,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Intersects two allowlists. If either is null (unrestricted), returns the other.
|
||||
* If both are set, returns only items present in both.
|
||||
*/
|
||||
function intersectAllowlists(a: string[] | null, b: string[] | null): string[] | null {
|
||||
if (a === null) return b
|
||||
if (b === null) return a
|
||||
return a.map((i) => i.toLowerCase()).filter((i) => b.includes(i))
|
||||
}
|
||||
|
||||
export function usePermissionConfig(): PermissionConfigResult {
|
||||
const accessControlDisabled = !isHosted && !isAccessControlEnabled
|
||||
const { data: organizationsData } = useOrganizations()
|
||||
const activeOrganization = organizationsData?.activeOrganization
|
||||
|
||||
const { data: permissionData, isLoading } = useUserPermissionConfig(activeOrganization?.id)
|
||||
const { data: permissionData, isLoading: isPermissionLoading } = useUserPermissionConfig(
|
||||
activeOrganization?.id
|
||||
)
|
||||
const { data: envAllowlistData, isLoading: isEnvAllowlistLoading } =
|
||||
useAllowedIntegrationsFromEnv()
|
||||
|
||||
const isLoading = isPermissionLoading || isEnvAllowlistLoading
|
||||
|
||||
const config = useMemo(() => {
|
||||
if (accessControlDisabled) {
|
||||
@@ -40,13 +73,18 @@ export function usePermissionConfig(): PermissionConfigResult {
|
||||
|
||||
const isInPermissionGroup = !accessControlDisabled && !!permissionData?.permissionGroupId
|
||||
|
||||
const mergedAllowedIntegrations = useMemo(() => {
|
||||
const envAllowlist = envAllowlistData?.allowedIntegrations ?? null
|
||||
return intersectAllowlists(config.allowedIntegrations, envAllowlist)
|
||||
}, [config.allowedIntegrations, envAllowlistData])
|
||||
|
||||
const isBlockAllowed = useMemo(() => {
|
||||
return (blockType: string) => {
|
||||
if (blockType === 'start_trigger') return true
|
||||
if (config.allowedIntegrations === null) return true
|
||||
return config.allowedIntegrations.includes(blockType)
|
||||
if (mergedAllowedIntegrations === null) return true
|
||||
return mergedAllowedIntegrations.includes(blockType.toLowerCase())
|
||||
}
|
||||
}, [config.allowedIntegrations])
|
||||
}, [mergedAllowedIntegrations])
|
||||
|
||||
const isProviderAllowed = useMemo(() => {
|
||||
return (providerId: string) => {
|
||||
@@ -57,13 +95,14 @@ export function usePermissionConfig(): PermissionConfigResult {
|
||||
|
||||
const filterBlocks = useMemo(() => {
|
||||
return <T extends { type: string }>(blocks: T[]): T[] => {
|
||||
if (config.allowedIntegrations === null) return blocks
|
||||
if (mergedAllowedIntegrations === null) return blocks
|
||||
return blocks.filter(
|
||||
(block) =>
|
||||
block.type === 'start_trigger' || config.allowedIntegrations!.includes(block.type)
|
||||
block.type === 'start_trigger' ||
|
||||
mergedAllowedIntegrations.includes(block.type.toLowerCase())
|
||||
)
|
||||
}
|
||||
}, [config.allowedIntegrations])
|
||||
}, [mergedAllowedIntegrations])
|
||||
|
||||
const filterProviders = useMemo(() => {
|
||||
return (providerIds: string[]): string[] => {
|
||||
@@ -77,9 +116,14 @@ export function usePermissionConfig(): PermissionConfigResult {
|
||||
return featureFlagDisabled || config.disableInvitations
|
||||
}, [config.disableInvitations])
|
||||
|
||||
const mergedConfig = useMemo(
|
||||
() => ({ ...config, allowedIntegrations: mergedAllowedIntegrations }),
|
||||
[config, mergedAllowedIntegrations]
|
||||
)
|
||||
|
||||
return useMemo(
|
||||
() => ({
|
||||
config,
|
||||
config: mergedConfig,
|
||||
isLoading,
|
||||
isInPermissionGroup,
|
||||
filterBlocks,
|
||||
@@ -89,7 +133,7 @@ export function usePermissionConfig(): PermissionConfigResult {
|
||||
isInvitationsDisabled,
|
||||
}),
|
||||
[
|
||||
config,
|
||||
mergedConfig,
|
||||
isLoading,
|
||||
isInPermissionGroup,
|
||||
filterBlocks,
|
||||
|
||||
@@ -16,26 +16,32 @@ export const mdxComponents: MDXRemoteProps['components'] = {
|
||||
unoptimized
|
||||
/>
|
||||
),
|
||||
h2: (props: any) => (
|
||||
h2: ({ children, className, ...props }: any) => (
|
||||
<h2
|
||||
{...props}
|
||||
style={{ fontSize: '30px', marginTop: '3rem', marginBottom: '1.5rem' }}
|
||||
className={clsx('font-medium text-black leading-tight', props.className)}
|
||||
/>
|
||||
className={clsx('font-medium text-black leading-tight', className)}
|
||||
>
|
||||
{children}
|
||||
</h2>
|
||||
),
|
||||
h3: (props: any) => (
|
||||
h3: ({ children, className, ...props }: any) => (
|
||||
<h3
|
||||
{...props}
|
||||
style={{ fontSize: '24px', marginTop: '1.5rem', marginBottom: '0.75rem' }}
|
||||
className={clsx('font-medium leading-tight', props.className)}
|
||||
/>
|
||||
className={clsx('font-medium leading-tight', className)}
|
||||
>
|
||||
{children}
|
||||
</h3>
|
||||
),
|
||||
h4: (props: any) => (
|
||||
h4: ({ children, className, ...props }: any) => (
|
||||
<h4
|
||||
{...props}
|
||||
style={{ fontSize: '19px', marginTop: '1.5rem', marginBottom: '0.75rem' }}
|
||||
className={clsx('font-medium leading-tight', props.className)}
|
||||
/>
|
||||
className={clsx('font-medium leading-tight', className)}
|
||||
>
|
||||
{children}
|
||||
</h4>
|
||||
),
|
||||
p: (props: any) => (
|
||||
<p
|
||||
|
||||
@@ -15,16 +15,14 @@ export interface ChatLoadResult {
|
||||
/**
|
||||
* Resolve or create a copilot chat session.
|
||||
* If chatId is provided, loads the existing chat. Otherwise creates a new one.
|
||||
* Supports both workflow-scoped and workspace-scoped chats.
|
||||
*/
|
||||
export async function resolveOrCreateChat(params: {
|
||||
chatId?: string
|
||||
userId: string
|
||||
workflowId?: string
|
||||
workspaceId?: string
|
||||
workflowId: string
|
||||
model: string
|
||||
}): Promise<ChatLoadResult> {
|
||||
const { chatId, userId, workflowId, workspaceId, model } = params
|
||||
const { chatId, userId, workflowId, model } = params
|
||||
|
||||
if (chatId) {
|
||||
const [chat] = await db
|
||||
@@ -45,8 +43,7 @@ export async function resolveOrCreateChat(params: {
|
||||
.insert(copilotChats)
|
||||
.values({
|
||||
userId,
|
||||
...(workflowId ? { workflowId } : {}),
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
workflowId,
|
||||
title: null,
|
||||
model,
|
||||
messages: [],
|
||||
@@ -54,7 +51,7 @@ export async function resolveOrCreateChat(params: {
|
||||
.returning()
|
||||
|
||||
if (!newChat) {
|
||||
logger.warn('Failed to create new copilot chat row', { userId, workflowId, workspaceId })
|
||||
logger.warn('Failed to create new copilot chat row', { userId, workflowId })
|
||||
return {
|
||||
chatId: '',
|
||||
chat: null,
|
||||
|
||||
@@ -1,21 +1,15 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { prepareExecutionContext } from '@/lib/copilot/orchestrator/tool-executor'
|
||||
import type {
|
||||
ExecutionContext,
|
||||
OrchestratorOptions,
|
||||
OrchestratorResult,
|
||||
} from '@/lib/copilot/orchestrator/types'
|
||||
import type { OrchestratorOptions, OrchestratorResult } from '@/lib/copilot/orchestrator/types'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||
import { buildToolCallSummaries, createStreamingContext, runStreamLoop } from './stream-core'
|
||||
|
||||
const logger = createLogger('CopilotOrchestrator')
|
||||
|
||||
export interface OrchestrateStreamOptions extends OrchestratorOptions {
|
||||
userId: string
|
||||
workflowId?: string
|
||||
workspaceId?: string
|
||||
workflowId: string
|
||||
chatId?: string
|
||||
}
|
||||
|
||||
@@ -23,20 +17,8 @@ export async function orchestrateCopilotStream(
|
||||
requestPayload: Record<string, unknown>,
|
||||
options: OrchestrateStreamOptions
|
||||
): Promise<OrchestratorResult> {
|
||||
const { userId, workflowId, workspaceId, chatId } = options
|
||||
|
||||
let execContext: ExecutionContext
|
||||
if (workflowId) {
|
||||
execContext = await prepareExecutionContext(userId, workflowId)
|
||||
} else {
|
||||
const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId)
|
||||
execContext = {
|
||||
userId,
|
||||
workflowId: '',
|
||||
workspaceId,
|
||||
decryptedEnvVars,
|
||||
}
|
||||
}
|
||||
const { userId, workflowId, chatId } = options
|
||||
const execContext = await prepareExecutionContext(userId, workflowId)
|
||||
|
||||
const payloadMsgId = requestPayload?.messageId
|
||||
const context = createStreamingContext({
|
||||
|
||||
@@ -62,7 +62,7 @@ describe('sse-handlers tool lifecycle', () => {
|
||||
await sseHandlers.tool_call(
|
||||
{
|
||||
type: 'tool_call',
|
||||
data: { id: 'tool-1', name: 'read', arguments: { workflowId: 'workflow-1' } },
|
||||
data: { id: 'tool-1', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||
} as any,
|
||||
context,
|
||||
execContext,
|
||||
@@ -90,7 +90,7 @@ describe('sse-handlers tool lifecycle', () => {
|
||||
|
||||
const event = {
|
||||
type: 'tool_call',
|
||||
data: { id: 'tool-dup', name: 'read', arguments: { workflowId: 'workflow-1' } },
|
||||
data: { id: 'tool-dup', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||
}
|
||||
|
||||
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })
|
||||
|
||||
@@ -24,12 +24,6 @@ import {
|
||||
executeRedeploy,
|
||||
} from './deployment-tools'
|
||||
import { executeIntegrationToolDirect } from './integration-tools'
|
||||
import {
|
||||
executeVfsGlob,
|
||||
executeVfsGrep,
|
||||
executeVfsList,
|
||||
executeVfsRead,
|
||||
} from './vfs-tools'
|
||||
import type {
|
||||
CheckDeploymentStatusParams,
|
||||
CreateFolderParams,
|
||||
@@ -42,8 +36,11 @@ import type {
|
||||
GetBlockOutputsParams,
|
||||
GetBlockUpstreamReferencesParams,
|
||||
GetDeployedWorkflowStateParams,
|
||||
GetUserWorkflowParams,
|
||||
GetWorkflowDataParams,
|
||||
GetWorkflowFromNameParams,
|
||||
ListFoldersParams,
|
||||
ListUserWorkflowsParams,
|
||||
ListWorkspaceMcpServersParams,
|
||||
MoveFolderParams,
|
||||
MoveWorkflowParams,
|
||||
@@ -62,8 +59,11 @@ import {
|
||||
executeGetBlockOutputs,
|
||||
executeGetBlockUpstreamReferences,
|
||||
executeGetDeployedWorkflowState,
|
||||
executeGetUserWorkflow,
|
||||
executeGetWorkflowData,
|
||||
executeGetWorkflowFromName,
|
||||
executeListFolders,
|
||||
executeListUserWorkflows,
|
||||
executeListUserWorkspaces,
|
||||
executeMoveFolder,
|
||||
executeMoveWorkflow,
|
||||
@@ -319,13 +319,17 @@ async function executeManageCustomTool(
|
||||
}
|
||||
|
||||
const SERVER_TOOLS = new Set<string>([
|
||||
'get_blocks_and_tools',
|
||||
'get_blocks_metadata',
|
||||
'get_block_options',
|
||||
'get_block_config',
|
||||
'get_trigger_blocks',
|
||||
'edit_workflow',
|
||||
'get_workflow_console',
|
||||
'search_documentation',
|
||||
'search_online',
|
||||
'set_environment_variables',
|
||||
'get_credentials',
|
||||
'make_api_request',
|
||||
'knowledge_base',
|
||||
])
|
||||
@@ -334,6 +338,9 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||
string,
|
||||
(params: Record<string, unknown>, context: ExecutionContext) => Promise<ToolCallResult>
|
||||
> = {
|
||||
get_user_workflow: (p, c) => executeGetUserWorkflow(p as GetUserWorkflowParams, c),
|
||||
get_workflow_from_name: (p, c) => executeGetWorkflowFromName(p as GetWorkflowFromNameParams, c),
|
||||
list_user_workflows: (p, c) => executeListUserWorkflows(p as ListUserWorkflowsParams, c),
|
||||
list_user_workspaces: (_p, c) => executeListUserWorkspaces(c),
|
||||
list_folders: (p, c) => executeListFolders(p as ListFoldersParams, c),
|
||||
create_workflow: (p, c) => executeCreateWorkflow(p as CreateWorkflowParams, c),
|
||||
@@ -409,11 +416,6 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||
}
|
||||
},
|
||||
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
|
||||
// VFS tools
|
||||
grep: (p, c) => executeVfsGrep(p, c),
|
||||
glob: (p, c) => executeVfsGlob(p, c),
|
||||
read: (p, c) => executeVfsRead(p, c),
|
||||
list: (p, c) => executeVfsList(p, c),
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -5,6 +5,19 @@
|
||||
|
||||
// === Workflow Query Params ===
|
||||
|
||||
export interface GetUserWorkflowParams {
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export interface GetWorkflowFromNameParams {
|
||||
workflow_name?: string
|
||||
}
|
||||
|
||||
export interface ListUserWorkflowsParams {
|
||||
workspaceId?: string
|
||||
folderId?: string
|
||||
}
|
||||
|
||||
export interface GetWorkflowDataParams {
|
||||
workflowId?: string
|
||||
data_type?: string
|
||||
|
||||
@@ -1,128 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||
import { getOrMaterializeVFS } from '@/lib/copilot/vfs'
|
||||
|
||||
const logger = createLogger('VfsTools')
|
||||
|
||||
export async function executeVfsGrep(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const pattern = params.pattern as string | undefined
|
||||
if (!pattern) {
|
||||
return { success: false, error: "Missing required parameter 'pattern'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const result = vfs.grep(
|
||||
pattern,
|
||||
params.path as string | undefined,
|
||||
{
|
||||
maxResults: (params.maxResults as number) ?? 50,
|
||||
outputMode: (params.output_mode as 'content' | 'files_with_matches' | 'count') ?? 'content',
|
||||
ignoreCase: (params.ignoreCase as boolean) ?? false,
|
||||
lineNumbers: (params.lineNumbers as boolean) ?? true,
|
||||
context: (params.context as number) ?? 0,
|
||||
}
|
||||
)
|
||||
const outputMode = (params.output_mode as string) ?? 'content'
|
||||
const key = outputMode === 'files_with_matches' ? 'files' : outputMode === 'count' ? 'counts' : 'matches'
|
||||
return { success: true, output: { [key]: result } }
|
||||
} catch (err) {
|
||||
logger.error('vfs_grep failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_grep failed' }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeVfsGlob(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const pattern = params.pattern as string | undefined
|
||||
if (!pattern) {
|
||||
return { success: false, error: "Missing required parameter 'pattern'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const files = vfs.glob(pattern)
|
||||
return { success: true, output: { files } }
|
||||
} catch (err) {
|
||||
logger.error('vfs_glob failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_glob failed' }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeVfsRead(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const path = params.path as string | undefined
|
||||
if (!path) {
|
||||
return { success: false, error: "Missing required parameter 'path'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const result = vfs.read(
|
||||
path,
|
||||
params.offset as number | undefined,
|
||||
params.limit as number | undefined
|
||||
)
|
||||
if (!result) {
|
||||
return { success: false, error: `File not found: ${path}` }
|
||||
}
|
||||
return { success: true, output: result }
|
||||
} catch (err) {
|
||||
logger.error('vfs_read failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_read failed' }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeVfsList(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const path = params.path as string | undefined
|
||||
if (!path) {
|
||||
return { success: false, error: "Missing required parameter 'path'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const entries = vfs.list(path)
|
||||
return { success: true, output: { entries } }
|
||||
} catch (err) {
|
||||
logger.error('vfs_list failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_list failed' }
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,10 @@ import { db } from '@sim/db'
|
||||
import { customTools, permissions, workflow, workflowFolder, workspace } from '@sim/db/schema'
|
||||
import { and, asc, desc, eq, isNull, or } from 'drizzle-orm'
|
||||
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||
import { formatNormalizedWorkflowForCopilot } from '@/lib/copilot/tools/shared/workflow-utils'
|
||||
import {
|
||||
formatNormalizedWorkflowForCopilot,
|
||||
normalizeWorkflowName,
|
||||
} from '@/lib/copilot/tools/shared/workflow-utils'
|
||||
import { mcpService } from '@/lib/mcp/service'
|
||||
import { listWorkspaceFiles } from '@/lib/uploads/contexts/workspace'
|
||||
import { getEffectiveBlockOutputPaths } from '@/lib/workflows/blocks/block-outputs'
|
||||
@@ -19,16 +22,116 @@ import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
import {
|
||||
ensureWorkflowAccess,
|
||||
ensureWorkspaceAccess,
|
||||
getAccessibleWorkflowsForUser,
|
||||
getDefaultWorkspaceId,
|
||||
} from '../access'
|
||||
import type {
|
||||
GetBlockOutputsParams,
|
||||
GetBlockUpstreamReferencesParams,
|
||||
GetDeployedWorkflowStateParams,
|
||||
GetUserWorkflowParams,
|
||||
GetWorkflowDataParams,
|
||||
GetWorkflowFromNameParams,
|
||||
ListFoldersParams,
|
||||
ListUserWorkflowsParams,
|
||||
} from '../param-types'
|
||||
|
||||
export async function executeGetUserWorkflow(
|
||||
params: GetUserWorkflowParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const workflowId = params.workflowId || context.workflowId
|
||||
if (!workflowId) {
|
||||
return { success: false, error: 'workflowId is required' }
|
||||
}
|
||||
|
||||
const { workflow: workflowRecord, workspaceId } = await ensureWorkflowAccess(
|
||||
workflowId,
|
||||
context.userId
|
||||
)
|
||||
|
||||
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
|
||||
if (!userWorkflow) {
|
||||
return { success: false, error: 'Workflow has no normalized data' }
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
workflowId,
|
||||
workflowName: workflowRecord.name || '',
|
||||
workspaceId,
|
||||
userWorkflow,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeGetWorkflowFromName(
|
||||
params: GetWorkflowFromNameParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const workflowName = typeof params.workflow_name === 'string' ? params.workflow_name.trim() : ''
|
||||
if (!workflowName) {
|
||||
return { success: false, error: 'workflow_name is required' }
|
||||
}
|
||||
|
||||
const workflows = await getAccessibleWorkflowsForUser(context.userId)
|
||||
|
||||
const targetName = normalizeWorkflowName(workflowName)
|
||||
const match = workflows.find((w) => normalizeWorkflowName(w.name) === targetName)
|
||||
if (!match) {
|
||||
return { success: false, error: `Workflow not found: ${workflowName}` }
|
||||
}
|
||||
|
||||
const normalized = await loadWorkflowFromNormalizedTables(match.id)
|
||||
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
|
||||
if (!userWorkflow) {
|
||||
return { success: false, error: 'Workflow has no normalized data' }
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
workflowId: match.id,
|
||||
workflowName: match.name || '',
|
||||
workspaceId: match.workspaceId,
|
||||
userWorkflow,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeListUserWorkflows(
|
||||
params: ListUserWorkflowsParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const workspaceId = params?.workspaceId as string | undefined
|
||||
const folderId = params?.folderId as string | undefined
|
||||
|
||||
const workflows = await getAccessibleWorkflowsForUser(context.userId, { workspaceId, folderId })
|
||||
|
||||
const workflowList = workflows.map((w) => ({
|
||||
workflowId: w.id,
|
||||
workflowName: w.name || '',
|
||||
workspaceId: w.workspaceId,
|
||||
folderId: w.folderId,
|
||||
}))
|
||||
|
||||
return { success: true, output: { workflows: workflowList } }
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeListUserWorkspaces(
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
|
||||
@@ -2,6 +2,7 @@ import { db } from '@sim/db'
|
||||
import { copilotChats, document, knowledgeBase, templates } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import { getAllowedIntegrationsFromEnv } from '@/lib/core/config/feature-flags'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
|
||||
import { isHiddenFromDisplay } from '@/blocks/types'
|
||||
@@ -349,16 +350,14 @@ async function processBlockMetadata(
|
||||
userId?: string
|
||||
): Promise<AgentContext | null> {
|
||||
try {
|
||||
if (userId) {
|
||||
const permissionConfig = await getUserPermissionConfig(userId)
|
||||
const allowedIntegrations = permissionConfig?.allowedIntegrations
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockId)) {
|
||||
logger.debug('Block not allowed by permission group', { blockId, userId })
|
||||
return null
|
||||
}
|
||||
const permissionConfig = userId ? await getUserPermissionConfig(userId) : null
|
||||
const allowedIntegrations =
|
||||
permissionConfig?.allowedIntegrations ?? getAllowedIntegrationsFromEnv()
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockId.toLowerCase())) {
|
||||
logger.debug('Block not allowed by integration allowlist', { blockId, userId })
|
||||
return null
|
||||
}
|
||||
|
||||
// Reuse registry to match get_blocks_metadata tool result
|
||||
const { registry: blockRegistry } = await import('@/blocks/registry')
|
||||
const { tools: toolsRegistry } = await import('@/tools/registry')
|
||||
const SPECIAL_BLOCKS_METADATA: Record<string, any> = {}
|
||||
@@ -466,7 +465,6 @@ async function processWorkflowBlockFromDb(
|
||||
if (!block) return null
|
||||
const tag = label ? `@${label} in Workflow` : `@${block.name || blockId} in Workflow`
|
||||
|
||||
// Build content: isolate the block and include its subBlocks fully
|
||||
const contentObj = {
|
||||
workflowId,
|
||||
block: block,
|
||||
@@ -518,7 +516,6 @@ async function processExecutionLogFromDb(
|
||||
endedAt: log.endedAt?.toISOString?.() || (log.endedAt ? String(log.endedAt) : null),
|
||||
totalDurationMs: log.totalDurationMs ?? null,
|
||||
workflowName: log.workflowName || '',
|
||||
// Include trace spans and any available details without being huge
|
||||
executionData: log.executionData
|
||||
? {
|
||||
traceSpans: (log.executionData as any).traceSpans || undefined,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { LucideIcon } from 'lucide-react'
|
||||
import {
|
||||
Blocks,
|
||||
BookOpen,
|
||||
Bug,
|
||||
Check,
|
||||
@@ -8,6 +9,7 @@ import {
|
||||
ClipboardCheck,
|
||||
Compass,
|
||||
Database,
|
||||
FileCode,
|
||||
FileText,
|
||||
FlaskConical,
|
||||
GitBranch,
|
||||
@@ -17,7 +19,9 @@ import {
|
||||
Grid2x2Check,
|
||||
Grid2x2X,
|
||||
Info,
|
||||
Key,
|
||||
KeyRound,
|
||||
ListChecks,
|
||||
ListFilter,
|
||||
ListTodo,
|
||||
Loader2,
|
||||
@@ -37,11 +41,13 @@ import {
|
||||
Sparkles,
|
||||
Tag,
|
||||
TerminalSquare,
|
||||
WorkflowIcon,
|
||||
Wrench,
|
||||
X,
|
||||
XCircle,
|
||||
Zap,
|
||||
} from 'lucide-react'
|
||||
import { getLatestBlock } from '@/blocks/registry'
|
||||
import { getCustomTool } from '@/hooks/queries/custom-tools'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -623,6 +629,86 @@ const META_evaluate: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_block_config: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting block config', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Getting block config', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Getting block config', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved block config', icon: FileCode },
|
||||
[ClientToolCallState.error]: { text: 'Failed to get block config', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block config', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped getting block config',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.blockType && typeof params.blockType === 'string') {
|
||||
const blockConfig = getLatestBlock(params.blockType)
|
||||
const blockName = (blockConfig?.name ?? params.blockType.replace(/_/g, ' ')).toLowerCase()
|
||||
const opSuffix = params.operation ? ` (${params.operation})` : ''
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Retrieved ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Retrieving ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to retrieve ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted retrieving ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped retrieving ${blockName}${opSuffix} config`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_block_options: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved block operations', icon: ListFilter },
|
||||
[ClientToolCallState.error]: { text: 'Failed to get block operations', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block operations', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped getting block operations',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const blockId =
|
||||
(params as any)?.blockId ||
|
||||
(params as any)?.blockType ||
|
||||
(params as any)?.block_id ||
|
||||
(params as any)?.block_type
|
||||
if (typeof blockId === 'string') {
|
||||
const blockConfig = getLatestBlock(blockId)
|
||||
const blockName = (blockConfig?.name ?? blockId.replace(/_/g, ' ')).toLowerCase()
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Retrieved ${blockName} operations`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Retrieving ${blockName} operations`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to retrieve ${blockName} operations`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted retrieving ${blockName} operations`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped retrieving ${blockName} operations`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_block_outputs: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting block outputs', icon: Loader2 },
|
||||
@@ -681,6 +767,19 @@ const META_get_block_upstream_references: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_blocks_and_tools: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Explored available options', icon: Blocks },
|
||||
[ClientToolCallState.error]: { text: 'Failed to explore options', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted exploring options', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped exploring options', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
}
|
||||
|
||||
const META_get_blocks_metadata: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Searching block choices', icon: Loader2 },
|
||||
@@ -722,6 +821,27 @@ const META_get_blocks_metadata: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_credentials: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Fetched connected integrations', icon: Key },
|
||||
[ClientToolCallState.error]: {
|
||||
text: 'Failed to fetch connected integrations',
|
||||
icon: XCircle,
|
||||
},
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted fetching connected integrations',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped fetching connected integrations',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_examples_rag: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching examples', icon: Loader2 },
|
||||
@@ -869,6 +989,41 @@ const META_get_trigger_examples: ToolMetadata = {
|
||||
interrupt: undefined,
|
||||
}
|
||||
|
||||
const META_get_user_workflow: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Reading your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Reading your workflow', icon: WorkflowIcon },
|
||||
[ClientToolCallState.executing]: { text: 'Reading your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted reading your workflow', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Read your workflow', icon: WorkflowIcon },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read your workflow', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped reading your workflow', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const workflowId = params?.workflowId || useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (workflowId) {
|
||||
const workflowName = useWorkflowRegistry.getState().workflows[workflowId]?.name
|
||||
if (workflowName) {
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Read ${workflowName}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Reading ${workflowName}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to read ${workflowName}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted reading ${workflowName}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped reading ${workflowName}`
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_workflow_console: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching execution logs', icon: Loader2 },
|
||||
@@ -951,6 +1106,39 @@ const META_get_workflow_data: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_workflow_from_name: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Reading workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Reading workflow', icon: FileText },
|
||||
[ClientToolCallState.executing]: { text: 'Reading workflow', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted reading workflow', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Read workflow', icon: FileText },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read workflow', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped reading workflow', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.workflow_name && typeof params.workflow_name === 'string') {
|
||||
const workflowName = params.workflow_name
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Read ${workflowName}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Reading ${workflowName}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to read ${workflowName}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted reading ${workflowName}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped reading ${workflowName}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_info: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting info', icon: Loader2 },
|
||||
@@ -1042,6 +1230,18 @@ const META_knowledge_base: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_list_user_workflows: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Listing your workflows', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Listing your workflows', icon: ListChecks },
|
||||
[ClientToolCallState.executing]: { text: 'Listing your workflows', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted listing workflows', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Listed your workflows', icon: ListChecks },
|
||||
[ClientToolCallState.error]: { text: 'Failed to list workflows', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped listing workflows', icon: XCircle },
|
||||
},
|
||||
}
|
||||
|
||||
const META_list_workspace_mcp_servers: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: {
|
||||
@@ -2343,9 +2543,13 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
||||
edit: META_edit,
|
||||
edit_workflow: META_edit_workflow,
|
||||
evaluate: META_evaluate,
|
||||
get_block_config: META_get_block_config,
|
||||
get_block_options: META_get_block_options,
|
||||
get_block_outputs: META_get_block_outputs,
|
||||
get_block_upstream_references: META_get_block_upstream_references,
|
||||
get_blocks_and_tools: META_get_blocks_and_tools,
|
||||
get_blocks_metadata: META_get_blocks_metadata,
|
||||
get_credentials: META_get_credentials,
|
||||
generate_api_key: META_generate_api_key,
|
||||
get_examples_rag: META_get_examples_rag,
|
||||
get_operations_examples: META_get_operations_examples,
|
||||
@@ -2353,11 +2557,14 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
||||
get_platform_actions: META_get_platform_actions,
|
||||
get_trigger_blocks: META_get_trigger_blocks,
|
||||
get_trigger_examples: META_get_trigger_examples,
|
||||
get_user_workflow: META_get_user_workflow,
|
||||
get_workflow_console: META_get_workflow_console,
|
||||
get_workflow_data: META_get_workflow_data,
|
||||
get_workflow_from_name: META_get_workflow_from_name,
|
||||
info: META_info,
|
||||
knowledge: META_knowledge,
|
||||
knowledge_base: META_knowledge_base,
|
||||
list_user_workflows: META_list_user_workflows,
|
||||
list_workspace_mcp_servers: META_list_workspace_mcp_servers,
|
||||
make_api_request: META_make_api_request,
|
||||
manage_custom_tool: META_manage_custom_tool,
|
||||
|
||||
@@ -27,6 +27,25 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'list_workflows',
|
||||
toolId: 'list_user_workflows',
|
||||
description:
|
||||
'List all workflows the user has access to. Returns workflow IDs, names, workspace, and folder info. Use workspaceId/folderId to scope results.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
workspaceId: {
|
||||
type: 'string',
|
||||
description: 'Optional workspace ID to filter workflows.',
|
||||
},
|
||||
folderId: {
|
||||
type: 'string',
|
||||
description: 'Optional folder ID to filter workflows.',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'list_folders',
|
||||
toolId: 'list_folders',
|
||||
@@ -43,6 +62,22 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
required: ['workspaceId'],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'get_workflow',
|
||||
toolId: 'get_user_workflow',
|
||||
description:
|
||||
'Get a workflow by ID. Returns the full workflow definition including all blocks, connections, and configuration.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
workflowId: {
|
||||
type: 'string',
|
||||
description: 'Workflow ID to retrieve.',
|
||||
},
|
||||
},
|
||||
required: ['workflowId'],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'create_workflow',
|
||||
toolId: 'create_workflow',
|
||||
|
||||
495
apps/sim/lib/copilot/tools/server/blocks/get-block-config.ts
Normal file
495
apps/sim/lib/copilot/tools/server/blocks/get-block-config.ts
Normal file
@@ -0,0 +1,495 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import {
|
||||
GetBlockConfigInput,
|
||||
type GetBlockConfigInputType,
|
||||
GetBlockConfigResult,
|
||||
type GetBlockConfigResultType,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { getAllowedIntegrationsFromEnv } from '@/lib/core/config/feature-flags'
|
||||
import { registry as blockRegistry, getLatestBlock } from '@/blocks/registry'
|
||||
import { isHiddenFromDisplay, type SubBlockConfig } from '@/blocks/types'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
import { PROVIDER_DEFINITIONS } from '@/providers/models'
|
||||
import { tools as toolsRegistry } from '@/tools/registry'
|
||||
import { getTrigger, isTriggerValid } from '@/triggers'
|
||||
|
||||
interface InputFieldSchema {
|
||||
type: string
|
||||
description?: string
|
||||
placeholder?: string
|
||||
required?: boolean
|
||||
options?: string[]
|
||||
default?: any
|
||||
min?: number
|
||||
max?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all available models from PROVIDER_DEFINITIONS as static options.
|
||||
* This provides fallback data when store state is not available server-side.
|
||||
*/
|
||||
function getStaticModelOptions(): string[] {
|
||||
const models: string[] = []
|
||||
|
||||
for (const provider of Object.values(PROVIDER_DEFINITIONS)) {
|
||||
// Skip providers with dynamic/fetched models
|
||||
if (provider.id === 'ollama' || provider.id === 'vllm' || provider.id === 'openrouter') {
|
||||
continue
|
||||
}
|
||||
if (provider?.models) {
|
||||
for (const model of provider.models) {
|
||||
models.push(model.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return models
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to call a dynamic options function with fallback data injected.
|
||||
*/
|
||||
function callOptionsWithFallback(optionsFn: () => any[]): any[] | undefined {
|
||||
const staticModels = getStaticModelOptions()
|
||||
|
||||
const mockProvidersState = {
|
||||
providers: {
|
||||
base: { models: staticModels },
|
||||
ollama: { models: [] },
|
||||
vllm: { models: [] },
|
||||
openrouter: { models: [] },
|
||||
},
|
||||
}
|
||||
|
||||
let originalGetState: (() => any) | undefined
|
||||
let store: any
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
store = require('@/stores/providers')
|
||||
if (store?.useProvidersStore?.getState) {
|
||||
originalGetState = store.useProvidersStore.getState
|
||||
store.useProvidersStore.getState = () => mockProvidersState
|
||||
}
|
||||
} catch {
|
||||
// Store module not available
|
||||
}
|
||||
|
||||
try {
|
||||
return optionsFn()
|
||||
} finally {
|
||||
if (store?.useProvidersStore && originalGetState) {
|
||||
store.useProvidersStore.getState = originalGetState
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves options from a subBlock, handling both static arrays and dynamic functions
|
||||
*/
|
||||
function resolveSubBlockOptions(sb: SubBlockConfig): string[] | undefined {
|
||||
// Skip if subblock uses fetchOptions (async network calls)
|
||||
if (sb.fetchOptions) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let rawOptions: any[] | undefined
|
||||
|
||||
try {
|
||||
if (typeof sb.options === 'function') {
|
||||
rawOptions = callOptionsWithFallback(sb.options)
|
||||
} else {
|
||||
rawOptions = sb.options
|
||||
}
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!Array.isArray(rawOptions) || rawOptions.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// Return the actual option ID/value that edit_workflow expects, not the display label
|
||||
return rawOptions
|
||||
.map((opt: any) => {
|
||||
if (!opt) return undefined
|
||||
if (typeof opt === 'object') {
|
||||
return opt.id || opt.label // Prefer id (actual value) over label (display name)
|
||||
}
|
||||
return String(opt)
|
||||
})
|
||||
.filter((o): o is string => o !== undefined)
|
||||
}
|
||||
|
||||
interface OutputFieldSchema {
|
||||
type: string
|
||||
description?: string
|
||||
properties?: Record<string, OutputFieldSchema>
|
||||
items?: { type: string }
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the condition to check if it matches the given operation
|
||||
*/
|
||||
function matchesOperation(condition: any, operation: string): boolean {
|
||||
if (!condition) return false
|
||||
|
||||
const cond = typeof condition === 'function' ? condition() : condition
|
||||
if (!cond) return false
|
||||
|
||||
if (cond.field === 'operation' && !cond.not) {
|
||||
const values = Array.isArray(cond.value) ? cond.value : [cond.value]
|
||||
return values.includes(operation)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts input schema from subBlocks
|
||||
*/
|
||||
function extractInputsFromSubBlocks(
|
||||
subBlocks: SubBlockConfig[],
|
||||
operation?: string,
|
||||
triggerMode?: boolean
|
||||
): Record<string, InputFieldSchema> {
|
||||
const inputs: Record<string, InputFieldSchema> = {}
|
||||
|
||||
for (const sb of subBlocks) {
|
||||
// Handle trigger vs non-trigger mode filtering
|
||||
if (triggerMode) {
|
||||
// In trigger mode, only include subBlocks with mode: 'trigger'
|
||||
if (sb.mode !== 'trigger') continue
|
||||
} else {
|
||||
// In non-trigger mode, skip trigger-mode subBlocks
|
||||
if (sb.mode === 'trigger') continue
|
||||
}
|
||||
|
||||
// Skip hidden subBlocks
|
||||
if (sb.hidden) continue
|
||||
|
||||
// If operation is specified, only include subBlocks that:
|
||||
// 1. Have no condition (common parameters)
|
||||
// 2. Have a condition matching the operation
|
||||
if (operation) {
|
||||
const condition = typeof sb.condition === 'function' ? sb.condition() : sb.condition
|
||||
if (condition) {
|
||||
if (condition.field === 'operation' && !condition.not) {
|
||||
// This is an operation-specific field
|
||||
const values = Array.isArray(condition.value) ? condition.value : [condition.value]
|
||||
if (!values.includes(operation)) {
|
||||
continue // Skip if doesn't match our operation
|
||||
}
|
||||
} else if (!matchesOperation(condition, operation)) {
|
||||
// Other condition that doesn't match
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const field: InputFieldSchema = {
|
||||
type: mapSubBlockTypeToSchemaType(sb.type),
|
||||
}
|
||||
|
||||
if (sb.description) field.description = sb.description
|
||||
if (sb.title && !sb.description) field.description = sb.title
|
||||
if (sb.placeholder) field.placeholder = sb.placeholder
|
||||
|
||||
// Handle required
|
||||
if (typeof sb.required === 'boolean') {
|
||||
field.required = sb.required
|
||||
} else if (typeof sb.required === 'object') {
|
||||
field.required = true // Has conditional requirement
|
||||
}
|
||||
|
||||
// Handle options using the resolver that handles dynamic model lists
|
||||
const resolvedOptions = resolveSubBlockOptions(sb)
|
||||
if (resolvedOptions && resolvedOptions.length > 0) {
|
||||
field.options = resolvedOptions
|
||||
}
|
||||
|
||||
// Handle default value
|
||||
if (sb.defaultValue !== undefined) {
|
||||
field.default = sb.defaultValue
|
||||
}
|
||||
|
||||
// Handle numeric constraints
|
||||
if (sb.min !== undefined) field.min = sb.min
|
||||
if (sb.max !== undefined) field.max = sb.max
|
||||
|
||||
inputs[sb.id] = field
|
||||
}
|
||||
|
||||
return inputs
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps subBlock type to a simplified schema type
|
||||
*/
|
||||
function mapSubBlockTypeToSchemaType(type: string): string {
|
||||
const typeMap: Record<string, string> = {
|
||||
'short-input': 'string',
|
||||
'long-input': 'string',
|
||||
code: 'string',
|
||||
dropdown: 'string',
|
||||
combobox: 'string',
|
||||
slider: 'number',
|
||||
switch: 'boolean',
|
||||
'tool-input': 'json',
|
||||
'checkbox-list': 'array',
|
||||
'grouped-checkbox-list': 'array',
|
||||
'condition-input': 'json',
|
||||
'eval-input': 'json',
|
||||
'time-input': 'string',
|
||||
'oauth-input': 'credential',
|
||||
'file-selector': 'string',
|
||||
'project-selector': 'string',
|
||||
'channel-selector': 'string',
|
||||
'user-selector': 'string',
|
||||
'folder-selector': 'string',
|
||||
'knowledge-base-selector': 'string',
|
||||
'document-selector': 'string',
|
||||
'mcp-server-selector': 'string',
|
||||
'mcp-tool-selector': 'string',
|
||||
table: 'json',
|
||||
'file-upload': 'file',
|
||||
'messages-input': 'array',
|
||||
}
|
||||
|
||||
return typeMap[type] || 'string'
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts a single output field schema, including nested properties
|
||||
*/
|
||||
function extractOutputField(def: any): OutputFieldSchema {
|
||||
if (typeof def === 'string') {
|
||||
return { type: def }
|
||||
}
|
||||
|
||||
if (typeof def !== 'object' || def === null) {
|
||||
return { type: 'any' }
|
||||
}
|
||||
|
||||
const field: OutputFieldSchema = {
|
||||
type: def.type || 'any',
|
||||
}
|
||||
|
||||
if (def.description) {
|
||||
field.description = def.description
|
||||
}
|
||||
|
||||
// Include nested properties if present
|
||||
if (def.properties && typeof def.properties === 'object') {
|
||||
field.properties = {}
|
||||
for (const [propKey, propDef] of Object.entries(def.properties)) {
|
||||
field.properties[propKey] = extractOutputField(propDef)
|
||||
}
|
||||
}
|
||||
|
||||
// Include items schema for arrays
|
||||
if (def.items && typeof def.items === 'object') {
|
||||
field.items = { type: def.items.type || 'any' }
|
||||
}
|
||||
|
||||
return field
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts trigger outputs from the first available trigger
|
||||
*/
|
||||
function extractTriggerOutputs(blockConfig: any): Record<string, OutputFieldSchema> {
|
||||
const outputs: Record<string, OutputFieldSchema> = {}
|
||||
|
||||
if (!blockConfig.triggers?.enabled || !blockConfig.triggers?.available?.length) {
|
||||
return outputs
|
||||
}
|
||||
|
||||
// Get the first available trigger's outputs as a baseline
|
||||
const triggerId = blockConfig.triggers.available[0]
|
||||
if (triggerId && isTriggerValid(triggerId)) {
|
||||
const trigger = getTrigger(triggerId)
|
||||
if (trigger.outputs) {
|
||||
for (const [key, def] of Object.entries(trigger.outputs)) {
|
||||
if (isHiddenFromDisplay(def)) continue
|
||||
outputs[key] = extractOutputField(def)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return outputs
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts output schema from block config or tool
|
||||
*/
|
||||
function extractOutputs(
|
||||
blockConfig: any,
|
||||
operation?: string,
|
||||
triggerMode?: boolean
|
||||
): Record<string, OutputFieldSchema> {
|
||||
const outputs: Record<string, OutputFieldSchema> = {}
|
||||
|
||||
// In trigger mode, return trigger outputs
|
||||
if (triggerMode && blockConfig.triggers?.enabled) {
|
||||
return extractTriggerOutputs(blockConfig)
|
||||
}
|
||||
|
||||
// If operation is specified, try to get outputs from the specific tool
|
||||
if (operation) {
|
||||
try {
|
||||
const toolSelector = blockConfig.tools?.config?.tool
|
||||
if (typeof toolSelector === 'function') {
|
||||
const toolId = toolSelector({ operation })
|
||||
const tool = toolsRegistry[toolId]
|
||||
if (tool?.outputs) {
|
||||
for (const [key, def] of Object.entries(tool.outputs)) {
|
||||
if (isHiddenFromDisplay(def)) continue
|
||||
outputs[key] = extractOutputField(def)
|
||||
}
|
||||
return outputs
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Fall through to block-level outputs
|
||||
}
|
||||
}
|
||||
|
||||
// Use block-level outputs
|
||||
if (blockConfig.outputs) {
|
||||
for (const [key, def] of Object.entries(blockConfig.outputs)) {
|
||||
if (isHiddenFromDisplay(def)) continue
|
||||
outputs[key] = extractOutputField(def)
|
||||
}
|
||||
}
|
||||
|
||||
return outputs
|
||||
}
|
||||
|
||||
export const getBlockConfigServerTool: BaseServerTool<
|
||||
GetBlockConfigInputType,
|
||||
GetBlockConfigResultType
|
||||
> = {
|
||||
name: 'get_block_config',
|
||||
inputSchema: GetBlockConfigInput,
|
||||
outputSchema: GetBlockConfigResult,
|
||||
async execute(
|
||||
{ blockType, operation, trigger }: GetBlockConfigInputType,
|
||||
context?: { userId: string }
|
||||
): Promise<GetBlockConfigResultType> {
|
||||
const logger = createLogger('GetBlockConfigServerTool')
|
||||
logger.debug('Executing get_block_config', { blockType, operation, trigger })
|
||||
|
||||
if (blockType === 'loop') {
|
||||
const result = {
|
||||
blockType,
|
||||
blockName: 'Loop',
|
||||
operation,
|
||||
trigger,
|
||||
inputs: {
|
||||
loopType: {
|
||||
type: 'string',
|
||||
description: 'Loop type',
|
||||
options: ['for', 'forEach', 'while', 'doWhile'],
|
||||
default: 'for',
|
||||
},
|
||||
iterations: {
|
||||
type: 'number',
|
||||
description: 'Number of iterations (for loop type "for")',
|
||||
},
|
||||
collection: {
|
||||
type: 'string',
|
||||
description: 'Collection to iterate (for loop type "forEach")',
|
||||
},
|
||||
condition: {
|
||||
type: 'string',
|
||||
description: 'Loop condition (for loop types "while" and "doWhile")',
|
||||
},
|
||||
},
|
||||
outputs: {},
|
||||
}
|
||||
return GetBlockConfigResult.parse(result)
|
||||
}
|
||||
|
||||
if (blockType === 'parallel') {
|
||||
const result = {
|
||||
blockType,
|
||||
blockName: 'Parallel',
|
||||
operation,
|
||||
trigger,
|
||||
inputs: {
|
||||
parallelType: {
|
||||
type: 'string',
|
||||
description: 'Parallel type',
|
||||
options: ['count', 'collection'],
|
||||
default: 'count',
|
||||
},
|
||||
count: {
|
||||
type: 'number',
|
||||
description: 'Number of parallel branches (for parallel type "count")',
|
||||
},
|
||||
collection: {
|
||||
type: 'string',
|
||||
description: 'Collection to branch over (for parallel type "collection")',
|
||||
},
|
||||
},
|
||||
outputs: {},
|
||||
}
|
||||
return GetBlockConfigResult.parse(result)
|
||||
}
|
||||
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
const allowedIntegrations =
|
||||
permissionConfig?.allowedIntegrations ?? getAllowedIntegrationsFromEnv()
|
||||
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockType.toLowerCase())) {
|
||||
throw new Error(`Block "${blockType}" is not available`)
|
||||
}
|
||||
|
||||
const blockConfig = blockRegistry[blockType]
|
||||
if (!blockConfig) {
|
||||
throw new Error(`Block not found: ${blockType}`)
|
||||
}
|
||||
|
||||
// Validate trigger mode is supported for this block
|
||||
if (trigger && !blockConfig.triggers?.enabled && !blockConfig.triggerAllowed) {
|
||||
throw new Error(
|
||||
`Block "${blockType}" does not support trigger mode. Only blocks with triggers.enabled or triggerAllowed can be used in trigger mode.`
|
||||
)
|
||||
}
|
||||
|
||||
// If operation is specified, validate it exists
|
||||
if (operation) {
|
||||
const operationSubBlock = blockConfig.subBlocks?.find((sb) => sb.id === 'operation')
|
||||
if (operationSubBlock && Array.isArray(operationSubBlock.options)) {
|
||||
const validOperations = operationSubBlock.options.map((o) =>
|
||||
typeof o === 'object' ? o.id : o
|
||||
)
|
||||
if (!validOperations.includes(operation)) {
|
||||
throw new Error(
|
||||
`Invalid operation "${operation}" for block "${blockType}". Valid operations: ${validOperations.join(', ')}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const subBlocks = Array.isArray(blockConfig.subBlocks) ? blockConfig.subBlocks : []
|
||||
const inputs = extractInputsFromSubBlocks(subBlocks, operation, trigger)
|
||||
const outputs = extractOutputs(blockConfig, operation, trigger)
|
||||
|
||||
const latestBlock = getLatestBlock(blockType)
|
||||
const displayName = latestBlock?.name ?? blockConfig.name
|
||||
|
||||
const result = {
|
||||
blockType,
|
||||
blockName: displayName,
|
||||
operation,
|
||||
trigger,
|
||||
inputs,
|
||||
outputs,
|
||||
}
|
||||
|
||||
return GetBlockConfigResult.parse(result)
|
||||
},
|
||||
}
|
||||
132
apps/sim/lib/copilot/tools/server/blocks/get-block-options.ts
Normal file
132
apps/sim/lib/copilot/tools/server/blocks/get-block-options.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import {
|
||||
GetBlockOptionsInput,
|
||||
type GetBlockOptionsInputType,
|
||||
GetBlockOptionsResult,
|
||||
type GetBlockOptionsResultType,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { getAllowedIntegrationsFromEnv } from '@/lib/core/config/feature-flags'
|
||||
import { registry as blockRegistry, getLatestBlock } from '@/blocks/registry'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
import { tools as toolsRegistry } from '@/tools/registry'
|
||||
|
||||
export const getBlockOptionsServerTool: BaseServerTool<
|
||||
GetBlockOptionsInputType,
|
||||
GetBlockOptionsResultType
|
||||
> = {
|
||||
name: 'get_block_options',
|
||||
inputSchema: GetBlockOptionsInput,
|
||||
outputSchema: GetBlockOptionsResult,
|
||||
async execute(
|
||||
{ blockId }: GetBlockOptionsInputType,
|
||||
context?: { userId: string }
|
||||
): Promise<GetBlockOptionsResultType> {
|
||||
const logger = createLogger('GetBlockOptionsServerTool')
|
||||
logger.debug('Executing get_block_options', { blockId })
|
||||
|
||||
if (blockId === 'loop') {
|
||||
const result = {
|
||||
blockId,
|
||||
blockName: 'Loop',
|
||||
operations: [
|
||||
{ id: 'for', name: 'For', description: 'Run a fixed number of iterations.' },
|
||||
{ id: 'forEach', name: 'For each', description: 'Iterate over a collection.' },
|
||||
{ id: 'while', name: 'While', description: 'Repeat while a condition is true.' },
|
||||
{
|
||||
id: 'doWhile',
|
||||
name: 'Do while',
|
||||
description: 'Run once, then repeat while a condition is true.',
|
||||
},
|
||||
],
|
||||
}
|
||||
return GetBlockOptionsResult.parse(result)
|
||||
}
|
||||
|
||||
if (blockId === 'parallel') {
|
||||
const result = {
|
||||
blockId,
|
||||
blockName: 'Parallel',
|
||||
operations: [
|
||||
{ id: 'count', name: 'Count', description: 'Run a fixed number of parallel branches.' },
|
||||
{
|
||||
id: 'collection',
|
||||
name: 'Collection',
|
||||
description: 'Run one branch per collection item.',
|
||||
},
|
||||
],
|
||||
}
|
||||
return GetBlockOptionsResult.parse(result)
|
||||
}
|
||||
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
const allowedIntegrations =
|
||||
permissionConfig?.allowedIntegrations ?? getAllowedIntegrationsFromEnv()
|
||||
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockId.toLowerCase())) {
|
||||
throw new Error(`Block "${blockId}" is not available`)
|
||||
}
|
||||
|
||||
const blockConfig = blockRegistry[blockId]
|
||||
if (!blockConfig) {
|
||||
throw new Error(`Block not found: ${blockId}`)
|
||||
}
|
||||
|
||||
const operations: { id: string; name: string; description?: string }[] = []
|
||||
|
||||
// Check if block has an operation dropdown to determine available operations
|
||||
const operationSubBlock = blockConfig.subBlocks?.find((sb) => sb.id === 'operation')
|
||||
if (operationSubBlock && Array.isArray(operationSubBlock.options)) {
|
||||
// Block has operations - get tool info for each operation
|
||||
for (const option of operationSubBlock.options) {
|
||||
const opId = typeof option === 'object' ? option.id : option
|
||||
const opLabel = typeof option === 'object' ? option.label : option
|
||||
|
||||
// Try to resolve the tool for this operation
|
||||
let toolDescription: string | undefined
|
||||
try {
|
||||
const toolSelector = blockConfig.tools?.config?.tool
|
||||
if (typeof toolSelector === 'function') {
|
||||
const toolId = toolSelector({ operation: opId })
|
||||
const tool = toolsRegistry[toolId]
|
||||
if (tool) {
|
||||
toolDescription = tool.description
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Tool resolution failed, continue without description
|
||||
}
|
||||
|
||||
operations.push({
|
||||
id: opId,
|
||||
name: opLabel || opId,
|
||||
description: toolDescription,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// No operation dropdown - list all accessible tools
|
||||
const accessibleTools = blockConfig.tools?.access || []
|
||||
for (const toolId of accessibleTools) {
|
||||
const tool = toolsRegistry[toolId]
|
||||
if (tool) {
|
||||
operations.push({
|
||||
id: toolId,
|
||||
name: tool.name || toolId,
|
||||
description: tool.description,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const latestBlock = getLatestBlock(blockId)
|
||||
const displayName = latestBlock?.name ?? blockConfig.name
|
||||
|
||||
const result = {
|
||||
blockId,
|
||||
blockName: displayName,
|
||||
operations,
|
||||
}
|
||||
|
||||
return GetBlockOptionsResult.parse(result)
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { GetBlocksAndToolsInput, GetBlocksAndToolsResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { getAllowedIntegrationsFromEnv } from '@/lib/core/config/feature-flags'
|
||||
import { registry as blockRegistry } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
|
||||
export const getBlocksAndToolsServerTool: BaseServerTool<
|
||||
ReturnType<typeof GetBlocksAndToolsInput.parse>,
|
||||
ReturnType<typeof GetBlocksAndToolsResult.parse>
|
||||
> = {
|
||||
name: 'get_blocks_and_tools',
|
||||
inputSchema: GetBlocksAndToolsInput,
|
||||
outputSchema: GetBlocksAndToolsResult,
|
||||
async execute(_args: unknown, context?: { userId: string }) {
|
||||
const logger = createLogger('GetBlocksAndToolsServerTool')
|
||||
logger.debug('Executing get_blocks_and_tools')
|
||||
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
const allowedIntegrations =
|
||||
permissionConfig?.allowedIntegrations ?? getAllowedIntegrationsFromEnv()
|
||||
|
||||
type BlockListItem = {
|
||||
type: string
|
||||
name: string
|
||||
description?: string
|
||||
triggerAllowed?: boolean
|
||||
}
|
||||
const blocks: BlockListItem[] = []
|
||||
|
||||
Object.entries(blockRegistry)
|
||||
.filter(([blockType, blockConfig]: [string, BlockConfig]) => {
|
||||
if (blockConfig.hideFromToolbar) return false
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockType.toLowerCase()))
|
||||
return false
|
||||
return true
|
||||
})
|
||||
.forEach(([blockType, blockConfig]: [string, BlockConfig]) => {
|
||||
blocks.push({
|
||||
type: blockType,
|
||||
name: blockConfig.name,
|
||||
description: blockConfig.longDescription,
|
||||
triggerAllowed: 'triggerAllowed' in blockConfig ? !!blockConfig.triggerAllowed : false,
|
||||
})
|
||||
})
|
||||
|
||||
const specialBlocks: Record<string, { name: string; description: string }> = {
|
||||
loop: {
|
||||
name: 'Loop',
|
||||
description:
|
||||
'Control flow block for iterating over collections or repeating actions in a loop',
|
||||
},
|
||||
parallel: {
|
||||
name: 'Parallel',
|
||||
description: 'Control flow block for executing multiple branches simultaneously',
|
||||
},
|
||||
}
|
||||
Object.entries(specialBlocks).forEach(([blockType, info]) => {
|
||||
if (!blocks.some((b) => b.type === blockType)) {
|
||||
blocks.push({
|
||||
type: blockType,
|
||||
name: info.name,
|
||||
description: info.description,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return GetBlocksAndToolsResult.parse({ blocks })
|
||||
},
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import { join } from 'path'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { GetBlocksMetadataInput, GetBlocksMetadataResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { getAllowedIntegrationsFromEnv } from '@/lib/core/config/feature-flags'
|
||||
import { registry as blockRegistry } from '@/blocks/registry'
|
||||
import { AuthMode, type BlockConfig, isHiddenFromDisplay } from '@/blocks/types'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
@@ -112,11 +113,12 @@ export const getBlocksMetadataServerTool: BaseServerTool<
|
||||
logger.debug('Executing get_blocks_metadata', { count: blockIds?.length })
|
||||
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
const allowedIntegrations = permissionConfig?.allowedIntegrations
|
||||
const allowedIntegrations =
|
||||
permissionConfig?.allowedIntegrations ?? getAllowedIntegrationsFromEnv()
|
||||
|
||||
const result: Record<string, CopilotBlockMetadata> = {}
|
||||
for (const blockId of blockIds || []) {
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockId)) {
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockId.toLowerCase())) {
|
||||
logger.debug('Block not allowed by permission group', { blockId })
|
||||
continue
|
||||
}
|
||||
@@ -420,7 +422,6 @@ function extractInputs(metadata: CopilotBlockMetadata): {
|
||||
}
|
||||
|
||||
if (schema.options && schema.options.length > 0) {
|
||||
// Always return the id (actual value to use), not the display label
|
||||
input.options = schema.options.map((opt) => opt.id || opt.label)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { z } from 'zod'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { getAllowedIntegrationsFromEnv } from '@/lib/core/config/feature-flags'
|
||||
import { registry as blockRegistry } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
@@ -22,13 +23,15 @@ export const getTriggerBlocksServerTool: BaseServerTool<
|
||||
logger.debug('Executing get_trigger_blocks')
|
||||
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
const allowedIntegrations = permissionConfig?.allowedIntegrations
|
||||
const allowedIntegrations =
|
||||
permissionConfig?.allowedIntegrations ?? getAllowedIntegrationsFromEnv()
|
||||
|
||||
const triggerBlockIds: string[] = []
|
||||
|
||||
Object.entries(blockRegistry).forEach(([blockType, blockConfig]: [string, BlockConfig]) => {
|
||||
if (blockConfig.hideFromToolbar) return
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockType)) return
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockType.toLowerCase()))
|
||||
return
|
||||
|
||||
if (blockConfig.category === 'triggers') {
|
||||
triggerBlockIds.push(blockType)
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool, ServerToolContext } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { getBlockConfigServerTool } from '@/lib/copilot/tools/server/blocks/get-block-config'
|
||||
import { getBlockOptionsServerTool } from '@/lib/copilot/tools/server/blocks/get-block-options'
|
||||
import { getBlocksAndToolsServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-and-tools'
|
||||
import { getBlocksMetadataServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-metadata-tool'
|
||||
import { getTriggerBlocksServerTool } from '@/lib/copilot/tools/server/blocks/get-trigger-blocks'
|
||||
import { searchDocumentationServerTool } from '@/lib/copilot/tools/server/docs/search-documentation'
|
||||
@@ -19,7 +22,10 @@ const logger = createLogger('ServerToolRouter')
|
||||
|
||||
/** Registry of all server tools. Tools self-declare their validation schemas. */
|
||||
const serverToolRegistry: Record<string, BaseServerTool> = {
|
||||
[getBlocksAndToolsServerTool.name]: getBlocksAndToolsServerTool,
|
||||
[getBlocksMetadataServerTool.name]: getBlocksMetadataServerTool,
|
||||
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
||||
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
||||
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
||||
[editWorkflowServerTool.name]: editWorkflowServerTool,
|
||||
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
||||
|
||||
@@ -657,7 +657,7 @@ export function isBlockTypeAllowed(
|
||||
if (!permissionConfig || permissionConfig.allowedIntegrations === null) {
|
||||
return true
|
||||
}
|
||||
return permissionConfig.allowedIntegrations.includes(blockType)
|
||||
return permissionConfig.allowedIntegrations.includes(blockType.toLowerCase())
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -7,6 +7,22 @@ export const ExecuteResponseSuccessSchema = z.object({
|
||||
})
|
||||
export type ExecuteResponseSuccess = z.infer<typeof ExecuteResponseSuccessSchema>
|
||||
|
||||
// get_blocks_and_tools
|
||||
export const GetBlocksAndToolsInput = z.object({})
|
||||
export const GetBlocksAndToolsResult = z.object({
|
||||
blocks: z.array(
|
||||
z
|
||||
.object({
|
||||
type: z.string(),
|
||||
name: z.string(),
|
||||
triggerAllowed: z.boolean().optional(),
|
||||
longDescription: z.string().optional(),
|
||||
})
|
||||
.passthrough()
|
||||
),
|
||||
})
|
||||
export type GetBlocksAndToolsResultType = z.infer<typeof GetBlocksAndToolsResult>
|
||||
|
||||
// get_blocks_metadata
|
||||
export const GetBlocksMetadataInput = z.object({ blockIds: z.array(z.string()).min(1) })
|
||||
export const GetBlocksMetadataResult = z.object({ metadata: z.record(z.any()) })
|
||||
@@ -19,6 +35,41 @@ export const GetTriggerBlocksResult = z.object({
|
||||
})
|
||||
export type GetTriggerBlocksResultType = z.infer<typeof GetTriggerBlocksResult>
|
||||
|
||||
// get_block_options
|
||||
export const GetBlockOptionsInput = z.object({
|
||||
blockId: z.string(),
|
||||
})
|
||||
export const GetBlockOptionsResult = z.object({
|
||||
blockId: z.string(),
|
||||
blockName: z.string(),
|
||||
operations: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
),
|
||||
})
|
||||
export type GetBlockOptionsInputType = z.infer<typeof GetBlockOptionsInput>
|
||||
export type GetBlockOptionsResultType = z.infer<typeof GetBlockOptionsResult>
|
||||
|
||||
// get_block_config
|
||||
export const GetBlockConfigInput = z.object({
|
||||
blockType: z.string(),
|
||||
operation: z.string().optional(),
|
||||
trigger: z.boolean().optional(),
|
||||
})
|
||||
export const GetBlockConfigResult = z.object({
|
||||
blockType: z.string(),
|
||||
blockName: z.string(),
|
||||
operation: z.string().optional(),
|
||||
trigger: z.boolean().optional(),
|
||||
inputs: z.record(z.any()),
|
||||
outputs: z.record(z.any()),
|
||||
})
|
||||
export type GetBlockConfigInputType = z.infer<typeof GetBlockConfigInput>
|
||||
export type GetBlockConfigResultType = z.infer<typeof GetBlockConfigResult>
|
||||
|
||||
// knowledge_base - shared schema used by client tool, server tool, and registry
|
||||
export const KnowledgeBaseArgsSchema = z.object({
|
||||
operation: z.enum([
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
export { WorkspaceVFS, getOrMaterializeVFS } from '@/lib/copilot/vfs/workspace-vfs'
|
||||
export type {
|
||||
GrepMatch,
|
||||
GrepOptions,
|
||||
GrepOutputMode,
|
||||
GrepCountEntry,
|
||||
ReadResult,
|
||||
DirEntry,
|
||||
} from '@/lib/copilot/vfs/operations'
|
||||
export {
|
||||
serializeBlockSchema,
|
||||
serializeDocuments,
|
||||
serializeIntegrationSchema,
|
||||
serializeKBMeta,
|
||||
serializeRecentExecutions,
|
||||
serializeWorkflowMeta,
|
||||
} from '@/lib/copilot/vfs/serializers'
|
||||
@@ -1,237 +0,0 @@
|
||||
export interface GrepMatch {
|
||||
path: string
|
||||
line: number
|
||||
content: string
|
||||
}
|
||||
|
||||
export type GrepOutputMode = 'content' | 'files_with_matches' | 'count'
|
||||
|
||||
export interface GrepOptions {
|
||||
maxResults?: number
|
||||
outputMode?: GrepOutputMode
|
||||
ignoreCase?: boolean
|
||||
lineNumbers?: boolean
|
||||
context?: number
|
||||
}
|
||||
|
||||
export interface GrepCountEntry {
|
||||
path: string
|
||||
count: number
|
||||
}
|
||||
|
||||
export interface ReadResult {
|
||||
content: string
|
||||
totalLines: number
|
||||
}
|
||||
|
||||
export interface DirEntry {
|
||||
name: string
|
||||
type: 'file' | 'dir'
|
||||
}
|
||||
|
||||
/**
|
||||
* Regex search over VFS file contents.
|
||||
* Supports multiple output modes: content (default), files_with_matches, count.
|
||||
*/
|
||||
export function grep(
|
||||
files: Map<string, string>,
|
||||
pattern: string,
|
||||
path?: string,
|
||||
opts?: GrepOptions
|
||||
): GrepMatch[] | string[] | GrepCountEntry[] {
|
||||
const maxResults = opts?.maxResults ?? 100
|
||||
const outputMode = opts?.outputMode ?? 'content'
|
||||
const ignoreCase = opts?.ignoreCase ?? false
|
||||
const showLineNumbers = opts?.lineNumbers ?? true
|
||||
const contextLines = opts?.context ?? 0
|
||||
|
||||
const flags = ignoreCase ? 'gi' : 'g'
|
||||
let regex: RegExp
|
||||
try {
|
||||
regex = new RegExp(pattern, flags)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
|
||||
if (outputMode === 'files_with_matches') {
|
||||
const matchingFiles: string[] = []
|
||||
for (const [filePath, content] of files) {
|
||||
if (path && !filePath.startsWith(path)) continue
|
||||
regex.lastIndex = 0
|
||||
if (regex.test(content)) {
|
||||
matchingFiles.push(filePath)
|
||||
if (matchingFiles.length >= maxResults) break
|
||||
}
|
||||
}
|
||||
return matchingFiles
|
||||
}
|
||||
|
||||
if (outputMode === 'count') {
|
||||
const counts: GrepCountEntry[] = []
|
||||
for (const [filePath, content] of files) {
|
||||
if (path && !filePath.startsWith(path)) continue
|
||||
const lines = content.split('\n')
|
||||
let count = 0
|
||||
for (const line of lines) {
|
||||
regex.lastIndex = 0
|
||||
if (regex.test(line)) count++
|
||||
}
|
||||
if (count > 0) {
|
||||
counts.push({ path: filePath, count })
|
||||
if (counts.length >= maxResults) break
|
||||
}
|
||||
}
|
||||
return counts
|
||||
}
|
||||
|
||||
// Default: 'content' mode
|
||||
const matches: GrepMatch[] = []
|
||||
for (const [filePath, content] of files) {
|
||||
if (path && !filePath.startsWith(path)) continue
|
||||
|
||||
const lines = content.split('\n')
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
regex.lastIndex = 0
|
||||
if (regex.test(lines[i])) {
|
||||
if (contextLines > 0) {
|
||||
const start = Math.max(0, i - contextLines)
|
||||
const end = Math.min(lines.length - 1, i + contextLines)
|
||||
for (let j = start; j <= end; j++) {
|
||||
matches.push({
|
||||
path: filePath,
|
||||
line: showLineNumbers ? j + 1 : 0,
|
||||
content: lines[j],
|
||||
})
|
||||
}
|
||||
} else {
|
||||
matches.push({
|
||||
path: filePath,
|
||||
line: showLineNumbers ? i + 1 : 0,
|
||||
content: lines[i],
|
||||
})
|
||||
}
|
||||
if (matches.length >= maxResults) return matches
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return matches
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a glob pattern to a RegExp.
|
||||
* Supports *, **, and ? wildcards.
|
||||
*/
|
||||
function globToRegExp(pattern: string): RegExp {
|
||||
let regexStr = '^'
|
||||
let i = 0
|
||||
while (i < pattern.length) {
|
||||
const ch = pattern[i]
|
||||
if (ch === '*') {
|
||||
if (pattern[i + 1] === '*') {
|
||||
// ** matches any number of path segments
|
||||
if (pattern[i + 2] === '/') {
|
||||
regexStr += '(?:.+/)?'
|
||||
i += 3
|
||||
} else {
|
||||
regexStr += '.*'
|
||||
i += 2
|
||||
}
|
||||
} else {
|
||||
// * matches anything except /
|
||||
regexStr += '[^/]*'
|
||||
i++
|
||||
}
|
||||
} else if (ch === '?') {
|
||||
regexStr += '[^/]'
|
||||
i++
|
||||
} else if ('.+^${}()|[]\\'.includes(ch)) {
|
||||
regexStr += '\\' + ch
|
||||
i++
|
||||
} else {
|
||||
regexStr += ch
|
||||
i++
|
||||
}
|
||||
}
|
||||
regexStr += '$'
|
||||
return new RegExp(regexStr)
|
||||
}
|
||||
|
||||
/**
|
||||
* Glob pattern matching against VFS file paths.
|
||||
* Returns matching file paths.
|
||||
*/
|
||||
export function glob(files: Map<string, string>, pattern: string): string[] {
|
||||
const regex = globToRegExp(pattern)
|
||||
const result: string[] = []
|
||||
for (const filePath of files.keys()) {
|
||||
if (regex.test(filePath)) {
|
||||
result.push(filePath)
|
||||
}
|
||||
}
|
||||
return result.sort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a VFS file's content, optionally with offset and limit.
|
||||
* Returns null if the file does not exist.
|
||||
*/
|
||||
export function read(
|
||||
files: Map<string, string>,
|
||||
path: string,
|
||||
offset?: number,
|
||||
limit?: number
|
||||
): ReadResult | null {
|
||||
const content = files.get(path)
|
||||
if (content === undefined) return null
|
||||
|
||||
const lines = content.split('\n')
|
||||
const totalLines = lines.length
|
||||
|
||||
if (offset !== undefined || limit !== undefined) {
|
||||
const start = offset ?? 0
|
||||
const end = limit !== undefined ? start + limit : lines.length
|
||||
return {
|
||||
content: lines.slice(start, end).join('\n'),
|
||||
totalLines,
|
||||
}
|
||||
}
|
||||
|
||||
return { content, totalLines }
|
||||
}
|
||||
|
||||
/**
|
||||
* List entries in a VFS directory path.
|
||||
* Returns files and subdirectories at the given path level.
|
||||
*/
|
||||
export function list(files: Map<string, string>, path: string): DirEntry[] {
|
||||
const normalizedPath = path.endsWith('/') ? path : path + '/'
|
||||
const seen = new Set<string>()
|
||||
const entries: DirEntry[] = []
|
||||
|
||||
for (const filePath of files.keys()) {
|
||||
if (!filePath.startsWith(normalizedPath)) continue
|
||||
|
||||
const remainder = filePath.slice(normalizedPath.length)
|
||||
if (!remainder) continue
|
||||
|
||||
const slashIndex = remainder.indexOf('/')
|
||||
if (slashIndex === -1) {
|
||||
if (!seen.has(remainder)) {
|
||||
seen.add(remainder)
|
||||
entries.push({ name: remainder, type: 'file' })
|
||||
}
|
||||
} else {
|
||||
const dirName = remainder.slice(0, slashIndex)
|
||||
if (!seen.has(dirName)) {
|
||||
seen.add(dirName)
|
||||
entries.push({ name: dirName, type: 'dir' })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return entries.sort((a, b) => {
|
||||
if (a.type !== b.type) return a.type === 'dir' ? -1 : 1
|
||||
return a.name.localeCompare(b.name)
|
||||
})
|
||||
}
|
||||
@@ -1,282 +0,0 @@
|
||||
import type { BlockConfig, SubBlockConfig } from '@/blocks/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
/**
|
||||
* Serialize workflow metadata for VFS meta.json
|
||||
*/
|
||||
export function serializeWorkflowMeta(wf: {
|
||||
id: string
|
||||
name: string
|
||||
description?: string | null
|
||||
isDeployed: boolean
|
||||
deployedAt?: Date | null
|
||||
runCount: number
|
||||
lastRunAt?: Date | null
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
}): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
id: wf.id,
|
||||
name: wf.name,
|
||||
description: wf.description || undefined,
|
||||
isDeployed: wf.isDeployed,
|
||||
deployedAt: wf.deployedAt?.toISOString(),
|
||||
runCount: wf.runCount,
|
||||
lastRunAt: wf.lastRunAt?.toISOString(),
|
||||
createdAt: wf.createdAt.toISOString(),
|
||||
updatedAt: wf.updatedAt.toISOString(),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize execution logs for VFS executions.json.
|
||||
* Takes recent execution log rows and produces a summary.
|
||||
*/
|
||||
export function serializeRecentExecutions(
|
||||
executions: Array<{
|
||||
id: string
|
||||
executionId: string
|
||||
status: string
|
||||
trigger: string
|
||||
startedAt: Date
|
||||
endedAt?: Date | null
|
||||
totalDurationMs?: number | null
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
executions.map((e) => ({
|
||||
executionId: e.executionId,
|
||||
status: e.status,
|
||||
trigger: e.trigger,
|
||||
startedAt: e.startedAt.toISOString(),
|
||||
endedAt: e.endedAt?.toISOString(),
|
||||
durationMs: e.totalDurationMs,
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize knowledge base metadata for VFS meta.json
|
||||
*/
|
||||
export function serializeKBMeta(kb: {
|
||||
id: string
|
||||
name: string
|
||||
description?: string | null
|
||||
embeddingModel: string
|
||||
embeddingDimension: number
|
||||
tokenCount: number
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
documentCount: number
|
||||
}): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
id: kb.id,
|
||||
name: kb.name,
|
||||
description: kb.description || undefined,
|
||||
embeddingModel: kb.embeddingModel,
|
||||
embeddingDimension: kb.embeddingDimension,
|
||||
tokenCount: kb.tokenCount,
|
||||
documentCount: kb.documentCount,
|
||||
createdAt: kb.createdAt.toISOString(),
|
||||
updatedAt: kb.updatedAt.toISOString(),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize documents list for VFS documents.json (metadata only, no content)
|
||||
*/
|
||||
export function serializeDocuments(
|
||||
docs: Array<{
|
||||
id: string
|
||||
filename: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
chunkCount: number
|
||||
tokenCount: number
|
||||
processingStatus: string
|
||||
enabled: boolean
|
||||
uploadedAt: Date
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
docs.map((d) => ({
|
||||
id: d.id,
|
||||
filename: d.filename,
|
||||
fileSize: d.fileSize,
|
||||
mimeType: d.mimeType,
|
||||
chunkCount: d.chunkCount,
|
||||
tokenCount: d.tokenCount,
|
||||
processingStatus: d.processingStatus,
|
||||
enabled: d.enabled,
|
||||
uploadedAt: d.uploadedAt.toISOString(),
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a SubBlockConfig for the VFS component schema.
|
||||
* Strips functions and UI-only fields.
|
||||
*/
|
||||
function serializeSubBlock(sb: SubBlockConfig): Record<string, unknown> {
|
||||
const result: Record<string, unknown> = {
|
||||
id: sb.id,
|
||||
type: sb.type,
|
||||
}
|
||||
if (sb.title) result.title = sb.title
|
||||
if (sb.required === true) result.required = true
|
||||
if (sb.defaultValue !== undefined) result.defaultValue = sb.defaultValue
|
||||
if (sb.mode) result.mode = sb.mode
|
||||
if (sb.canonicalParamId) result.canonicalParamId = sb.canonicalParamId
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a block schema for VFS components/blocks/{type}.json
|
||||
*/
|
||||
export function serializeBlockSchema(block: BlockConfig): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
description: block.description,
|
||||
category: block.category,
|
||||
longDescription: block.longDescription || undefined,
|
||||
bestPractices: block.bestPractices || undefined,
|
||||
triggerAllowed: block.triggerAllowed || undefined,
|
||||
singleInstance: block.singleInstance || undefined,
|
||||
tools: block.tools.access,
|
||||
subBlocks: block.subBlocks.map(serializeSubBlock),
|
||||
inputs: block.inputs,
|
||||
outputs: Object.fromEntries(
|
||||
Object.entries(block.outputs)
|
||||
.filter(([key]) => key !== 'visualization')
|
||||
.map(([key, val]) => [
|
||||
key,
|
||||
typeof val === 'string'
|
||||
? { type: val }
|
||||
: { type: val.type, description: (val as { description?: string }).description },
|
||||
])
|
||||
),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize OAuth credentials for VFS environment/credentials.json.
|
||||
* Shows which integrations are connected — IDs and scopes, NOT tokens.
|
||||
*/
|
||||
export function serializeCredentials(
|
||||
accounts: Array<{
|
||||
providerId: string
|
||||
scope: string | null
|
||||
createdAt: Date
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
accounts.map((a) => ({
|
||||
provider: a.providerId,
|
||||
scope: a.scope || undefined,
|
||||
connectedAt: a.createdAt.toISOString(),
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize API keys for VFS environment/api-keys.json.
|
||||
* Shows key names and types — NOT the actual key values.
|
||||
*/
|
||||
export function serializeApiKeys(
|
||||
keys: Array<{
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
lastUsed: Date | null
|
||||
createdAt: Date
|
||||
expiresAt: Date | null
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
keys.map((k) => ({
|
||||
id: k.id,
|
||||
name: k.name,
|
||||
type: k.type,
|
||||
lastUsed: k.lastUsed?.toISOString(),
|
||||
createdAt: k.createdAt.toISOString(),
|
||||
expiresAt: k.expiresAt?.toISOString(),
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize environment variables for VFS environment/variables.json.
|
||||
* Shows variable NAMES only — NOT values.
|
||||
*/
|
||||
export function serializeEnvironmentVariables(
|
||||
personalVarNames: string[],
|
||||
workspaceVarNames: string[]
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
personal: personalVarNames,
|
||||
workspace: workspaceVarNames,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize an integration/tool schema for VFS components/integrations/{service}/{operation}.json
|
||||
*/
|
||||
export function serializeIntegrationSchema(tool: ToolConfig): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
id: tool.id,
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
version: tool.version,
|
||||
oauth: tool.oauth
|
||||
? { required: tool.oauth.required, provider: tool.oauth.provider }
|
||||
: undefined,
|
||||
params: Object.fromEntries(
|
||||
Object.entries(tool.params).map(([key, val]) => [
|
||||
key,
|
||||
{
|
||||
type: val.type,
|
||||
required: val.required,
|
||||
description: val.description,
|
||||
default: val.default,
|
||||
},
|
||||
])
|
||||
),
|
||||
outputs: tool.outputs
|
||||
? Object.fromEntries(
|
||||
Object.entries(tool.outputs).map(([key, val]) => [
|
||||
key,
|
||||
{ type: val.type, description: val.description },
|
||||
])
|
||||
)
|
||||
: undefined,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
@@ -1,396 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
account,
|
||||
apiKey,
|
||||
document,
|
||||
environment,
|
||||
knowledgeBase,
|
||||
workflow,
|
||||
workspaceEnvironment,
|
||||
workflowExecutionLogs,
|
||||
} from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, count, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import { getLatestVersionTools } from '@/tools/utils'
|
||||
import { tools as toolRegistry } from '@/tools/registry'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
|
||||
import type { GrepMatch, GrepOptions, ReadResult, DirEntry } from '@/lib/copilot/vfs/operations'
|
||||
import * as ops from '@/lib/copilot/vfs/operations'
|
||||
import {
|
||||
serializeApiKeys,
|
||||
serializeBlockSchema,
|
||||
serializeCredentials,
|
||||
serializeDocuments,
|
||||
serializeEnvironmentVariables,
|
||||
serializeIntegrationSchema,
|
||||
serializeKBMeta,
|
||||
serializeRecentExecutions,
|
||||
serializeWorkflowMeta,
|
||||
} from '@/lib/copilot/vfs/serializers'
|
||||
|
||||
const logger = createLogger('WorkspaceVFS')
|
||||
|
||||
/** Cache entry for a materialized VFS */
|
||||
interface VFSCacheEntry {
|
||||
vfs: WorkspaceVFS
|
||||
expiresAt: number
|
||||
}
|
||||
|
||||
/** Module-level VFS cache keyed by workspaceId */
|
||||
const vfsCache = new Map<string, VFSCacheEntry>()
|
||||
|
||||
/** Cache TTL in milliseconds (30 seconds) */
|
||||
const VFS_CACHE_TTL_MS = 30_000
|
||||
|
||||
/** Static component files, computed once and shared across all VFS instances */
|
||||
let staticComponentFiles: Map<string, string> | null = null
|
||||
|
||||
/**
|
||||
* Build the static component files from block and tool registries.
|
||||
* This only needs to happen once per process.
|
||||
*/
|
||||
function getStaticComponentFiles(): Map<string, string> {
|
||||
if (staticComponentFiles) return staticComponentFiles
|
||||
staticComponentFiles = new Map()
|
||||
|
||||
const allBlocks = getAllBlocks()
|
||||
for (const block of allBlocks) {
|
||||
const path = `components/blocks/${block.type}.json`
|
||||
staticComponentFiles.set(path, serializeBlockSchema(block))
|
||||
}
|
||||
|
||||
const latestTools = getLatestVersionTools(toolRegistry)
|
||||
for (const [toolId, tool] of Object.entries(latestTools)) {
|
||||
const parts = toolId.split('_')
|
||||
const service = parts[0]
|
||||
const operation = parts.slice(1).join('_') || 'default'
|
||||
const path = `components/integrations/${service}/${operation}.json`
|
||||
staticComponentFiles.set(path, serializeIntegrationSchema(tool))
|
||||
}
|
||||
|
||||
logger.info('Static component files built', {
|
||||
blocks: allBlocks.length,
|
||||
integrations: Object.keys(latestTools).length,
|
||||
})
|
||||
|
||||
return staticComponentFiles
|
||||
}
|
||||
|
||||
/**
|
||||
* Virtual Filesystem that materializes workspace data into an in-memory Map.
|
||||
*
|
||||
* Structure:
|
||||
* workflows/{name}/meta.json
|
||||
* workflows/{name}/blocks.json
|
||||
* workflows/{name}/edges.json
|
||||
* workflows/{name}/executions.json
|
||||
* knowledgebases/{name}/meta.json
|
||||
* knowledgebases/{name}/documents.json
|
||||
* environment/credentials.json
|
||||
* environment/api-keys.json
|
||||
* environment/variables.json
|
||||
* components/blocks/{type}.json
|
||||
* components/integrations/{service}/{operation}.json
|
||||
*/
|
||||
export class WorkspaceVFS {
|
||||
private files: Map<string, string> = new Map()
|
||||
|
||||
/**
|
||||
* Materialize workspace data from DB into the VFS.
|
||||
* Queries workflows, knowledge bases, and merges static component schemas.
|
||||
*/
|
||||
async materialize(workspaceId: string, userId: string): Promise<void> {
|
||||
const start = Date.now()
|
||||
this.files = new Map()
|
||||
|
||||
await Promise.all([
|
||||
this.materializeWorkflows(workspaceId, userId),
|
||||
this.materializeKnowledgeBases(workspaceId),
|
||||
this.materializeEnvironment(workspaceId, userId),
|
||||
])
|
||||
|
||||
// Merge static component files
|
||||
for (const [path, content] of getStaticComponentFiles()) {
|
||||
this.files.set(path, content)
|
||||
}
|
||||
|
||||
logger.info('VFS materialized', {
|
||||
workspaceId,
|
||||
fileCount: this.files.size,
|
||||
durationMs: Date.now() - start,
|
||||
})
|
||||
}
|
||||
|
||||
grep(
|
||||
pattern: string,
|
||||
path?: string,
|
||||
options?: GrepOptions
|
||||
): GrepMatch[] | string[] | ops.GrepCountEntry[] {
|
||||
return ops.grep(this.files, pattern, path, options)
|
||||
}
|
||||
|
||||
glob(pattern: string): string[] {
|
||||
return ops.glob(this.files, pattern)
|
||||
}
|
||||
|
||||
read(path: string, offset?: number, limit?: number): ReadResult | null {
|
||||
return ops.read(this.files, path, offset, limit)
|
||||
}
|
||||
|
||||
list(path: string): DirEntry[] {
|
||||
return ops.list(this.files, path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Materialize all workflows in the workspace.
|
||||
*/
|
||||
private async materializeWorkflows(workspaceId: string, userId: string): Promise<void> {
|
||||
const workflowRows = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
isDeployed: workflow.isDeployed,
|
||||
deployedAt: workflow.deployedAt,
|
||||
runCount: workflow.runCount,
|
||||
lastRunAt: workflow.lastRunAt,
|
||||
createdAt: workflow.createdAt,
|
||||
updatedAt: workflow.updatedAt,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(eq(workflow.workspaceId, workspaceId))
|
||||
|
||||
// Load normalized data + executions in parallel for all workflows
|
||||
await Promise.all(
|
||||
workflowRows.map(async (wf) => {
|
||||
const safeName = sanitizeName(wf.name)
|
||||
const prefix = `workflows/${safeName}/`
|
||||
|
||||
// Meta
|
||||
this.files.set(`${prefix}meta.json`, serializeWorkflowMeta(wf))
|
||||
|
||||
// Blocks + edges from normalized tables
|
||||
try {
|
||||
const normalized = await loadWorkflowFromNormalizedTables(wf.id)
|
||||
if (normalized) {
|
||||
const sanitized = sanitizeForCopilot({
|
||||
blocks: normalized.blocks,
|
||||
edges: normalized.edges,
|
||||
loops: normalized.loops,
|
||||
parallels: normalized.parallels,
|
||||
} as any)
|
||||
this.files.set(`${prefix}blocks.json`, JSON.stringify(sanitized, null, 2))
|
||||
|
||||
// Edges as simple source->target list
|
||||
const edges = normalized.edges.map((e) => ({
|
||||
source: e.source,
|
||||
target: e.target,
|
||||
sourceHandle: e.sourceHandle || undefined,
|
||||
targetHandle: e.targetHandle || undefined,
|
||||
}))
|
||||
this.files.set(`${prefix}edges.json`, JSON.stringify(edges, null, 2))
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Failed to load workflow blocks', {
|
||||
workflowId: wf.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
|
||||
// Recent executions (last 5)
|
||||
try {
|
||||
const execRows = await db
|
||||
.select({
|
||||
id: workflowExecutionLogs.id,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
status: workflowExecutionLogs.status,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.where(eq(workflowExecutionLogs.workflowId, wf.id))
|
||||
.orderBy(desc(workflowExecutionLogs.startedAt))
|
||||
.limit(5)
|
||||
|
||||
if (execRows.length > 0) {
|
||||
this.files.set(`${prefix}executions.json`, serializeRecentExecutions(execRows))
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Failed to load execution logs', {
|
||||
workflowId: wf.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Materialize all knowledge bases in the workspace.
|
||||
*/
|
||||
private async materializeKnowledgeBases(workspaceId: string): Promise<void> {
|
||||
const kbRows = await db
|
||||
.select({
|
||||
id: knowledgeBase.id,
|
||||
name: knowledgeBase.name,
|
||||
description: knowledgeBase.description,
|
||||
embeddingModel: knowledgeBase.embeddingModel,
|
||||
embeddingDimension: knowledgeBase.embeddingDimension,
|
||||
tokenCount: knowledgeBase.tokenCount,
|
||||
createdAt: knowledgeBase.createdAt,
|
||||
updatedAt: knowledgeBase.updatedAt,
|
||||
})
|
||||
.from(knowledgeBase)
|
||||
.where(and(eq(knowledgeBase.workspaceId, workspaceId), isNull(knowledgeBase.deletedAt)))
|
||||
|
||||
await Promise.all(
|
||||
kbRows.map(async (kb) => {
|
||||
const safeName = sanitizeName(kb.name)
|
||||
const prefix = `knowledgebases/${safeName}/`
|
||||
|
||||
// Get document count
|
||||
const [docCountRow] = await db
|
||||
.select({ count: count() })
|
||||
.from(document)
|
||||
.where(and(eq(document.knowledgeBaseId, kb.id), isNull(document.deletedAt)))
|
||||
|
||||
this.files.set(
|
||||
`${prefix}meta.json`,
|
||||
serializeKBMeta({
|
||||
...kb,
|
||||
documentCount: docCountRow?.count ?? 0,
|
||||
})
|
||||
)
|
||||
|
||||
// Documents metadata
|
||||
const docRows = await db
|
||||
.select({
|
||||
id: document.id,
|
||||
filename: document.filename,
|
||||
fileSize: document.fileSize,
|
||||
mimeType: document.mimeType,
|
||||
chunkCount: document.chunkCount,
|
||||
tokenCount: document.tokenCount,
|
||||
processingStatus: document.processingStatus,
|
||||
enabled: document.enabled,
|
||||
uploadedAt: document.uploadedAt,
|
||||
})
|
||||
.from(document)
|
||||
.where(and(eq(document.knowledgeBaseId, kb.id), isNull(document.deletedAt)))
|
||||
|
||||
if (docRows.length > 0) {
|
||||
this.files.set(`${prefix}documents.json`, serializeDocuments(docRows))
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Materialize environment data: credentials, API keys, env variable names.
|
||||
*/
|
||||
private async materializeEnvironment(workspaceId: string, userId: string): Promise<void> {
|
||||
try {
|
||||
// OAuth credentials — which integrations are connected (no tokens)
|
||||
const oauthRows = await db
|
||||
.select({
|
||||
providerId: account.providerId,
|
||||
scope: account.scope,
|
||||
createdAt: account.createdAt,
|
||||
})
|
||||
.from(account)
|
||||
.where(eq(account.userId, userId))
|
||||
|
||||
this.files.set('environment/credentials.json', serializeCredentials(oauthRows))
|
||||
|
||||
// API keys — names and types (no key values)
|
||||
const apiKeyRows = await db
|
||||
.select({
|
||||
id: apiKey.id,
|
||||
name: apiKey.name,
|
||||
type: apiKey.type,
|
||||
lastUsed: apiKey.lastUsed,
|
||||
createdAt: apiKey.createdAt,
|
||||
expiresAt: apiKey.expiresAt,
|
||||
})
|
||||
.from(apiKey)
|
||||
.where(eq(apiKey.workspaceId, workspaceId))
|
||||
|
||||
this.files.set('environment/api-keys.json', serializeApiKeys(apiKeyRows))
|
||||
|
||||
// Environment variables — names only (no values)
|
||||
let personalVarNames: string[] = []
|
||||
let workspaceVarNames: string[] = []
|
||||
|
||||
const [personalEnv] = await db
|
||||
.select({ variables: environment.variables })
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, userId))
|
||||
|
||||
if (personalEnv?.variables && typeof personalEnv.variables === 'object') {
|
||||
personalVarNames = Object.keys(personalEnv.variables as Record<string, unknown>)
|
||||
}
|
||||
|
||||
const [workspaceEnv] = await db
|
||||
.select({ variables: workspaceEnvironment.variables })
|
||||
.from(workspaceEnvironment)
|
||||
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
|
||||
|
||||
if (workspaceEnv?.variables && typeof workspaceEnv.variables === 'object') {
|
||||
workspaceVarNames = Object.keys(workspaceEnv.variables as Record<string, unknown>)
|
||||
}
|
||||
|
||||
this.files.set(
|
||||
'environment/variables.json',
|
||||
serializeEnvironmentVariables(personalVarNames, workspaceVarNames)
|
||||
)
|
||||
} catch (err) {
|
||||
logger.warn('Failed to materialize environment data', {
|
||||
workspaceId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or create a cached VFS for a workspace.
|
||||
* Re-materializes if the cache is expired.
|
||||
*/
|
||||
export async function getOrMaterializeVFS(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<WorkspaceVFS> {
|
||||
const now = Date.now()
|
||||
const cached = vfsCache.get(workspaceId)
|
||||
|
||||
if (cached && cached.expiresAt > now) {
|
||||
return cached.vfs
|
||||
}
|
||||
|
||||
const vfs = new WorkspaceVFS()
|
||||
await vfs.materialize(workspaceId, userId)
|
||||
|
||||
vfsCache.set(workspaceId, {
|
||||
vfs,
|
||||
expiresAt: now + VFS_CACHE_TTL_MS,
|
||||
})
|
||||
|
||||
return vfs
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize a name for use as a VFS path segment.
|
||||
* Converts to lowercase, replaces spaces/special chars with hyphens.
|
||||
*/
|
||||
function sanitizeName(name: string): string {
|
||||
return name
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/^-|-$/g, '')
|
||||
.slice(0, 64)
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
/**
|
||||
* System prompt for workspace-level chat.
|
||||
*
|
||||
* Sent as `systemPrompt` in the Go request payload, which overrides the
|
||||
* default agent prompt (see copilot/internal/chat/service.go:300-303).
|
||||
*
|
||||
* Only references subagents available in agent mode (build and discovery
|
||||
* are excluded from agent mode tools in the Go backend).
|
||||
*/
|
||||
export function getWorkspaceChatSystemPrompt(): string {
|
||||
const currentDate = new Date().toISOString().split('T')[0]
|
||||
return `# Sim Workspace Assistant
|
||||
|
||||
Current Date: ${currentDate}
|
||||
|
||||
You are the Sim workspace assistant — a helpful AI that manages an entire workspace of workflows. The user is chatting from the workspace level, not from within a specific workflow.
|
||||
|
||||
## Your Role
|
||||
|
||||
You help users with their workspace: answering questions, building and debugging workflows, managing integrations, and providing guidance. You delegate complex tasks to specialized subagents.
|
||||
|
||||
## Platform Knowledge
|
||||
|
||||
Sim is a workflow automation platform. Workflows are visual pipelines of blocks (Agent, Function, Condition, Router, API, etc.). Workflows can be triggered manually, via API, webhooks, or schedules. They can be deployed as APIs, Chat UIs, or MCP tools.
|
||||
|
||||
## Subagents
|
||||
|
||||
You have access to these specialized subagents. Call them by name to delegate tasks:
|
||||
|
||||
| Subagent | Purpose | When to Use |
|
||||
|----------|---------|-------------|
|
||||
| **plan** | Gather info, create execution plans | Building new workflows, planning fixes |
|
||||
| **edit** | Execute plans, make workflow changes | ONLY after plan returns steps |
|
||||
| **debug** | Investigate errors, provide diagnosis | User reports something broken |
|
||||
| **test** | Run workflow, verify results | After edits to validate |
|
||||
| **deploy** | Deploy/undeploy workflows | Publish as API, Chat, or MCP |
|
||||
| **workflow** | Env vars, settings, list workflows | Configuration and workflow discovery |
|
||||
| **auth** | Connect OAuth integrations | Slack, Gmail, Google Sheets, etc. |
|
||||
| **knowledge** | Create/query knowledge bases | RAG, document search |
|
||||
| **research** | External API docs, best practices | Stripe, Twilio, etc. |
|
||||
| **info** | Block details, outputs, variables | Quick lookups about workflow state |
|
||||
| **superagent** | Interact with external services NOW | Read emails, send Slack, check calendar |
|
||||
|
||||
## Direct Tools
|
||||
|
||||
- **search_online** — Search the web for information.
|
||||
- **memory_file_read(file_path)** — Read a persistent memory file.
|
||||
- **memory_file_write(file_path, content)** — Write/update a persistent memory file.
|
||||
- **memory_file_list()** — List all memory files.
|
||||
|
||||
## Memory Management
|
||||
|
||||
You have persistent memory files that survive across conversations:
|
||||
- **SOUL.md** — Your personality and behavioral guidelines. Read this at the start of conversations.
|
||||
- **USER.md** — Information about the user. Update as you learn preferences and context.
|
||||
- **MEMORY.md** — Key learnings, decisions, and important context. Update after significant interactions.
|
||||
|
||||
**At conversation start**: Read SOUL.md and MEMORY.md to load your persistent context.
|
||||
**During conversation**: When the user shares important preferences or you make key decisions, update the relevant file.
|
||||
**Important**: Only write to files when there's genuinely new, important information. Don't update on every message.
|
||||
|
||||
## Decision Flow
|
||||
|
||||
- User says something broke → **debug()** first, then plan() → edit()
|
||||
- User wants to build/automate something → **plan()** → edit() → test()
|
||||
- User wants to DO something NOW (send email, check calendar) → **superagent()**
|
||||
- User wants to deploy → **deploy()**
|
||||
- User asks about their workflows → **workflow()** or **info()**
|
||||
- User needs OAuth → **auth()**
|
||||
|
||||
## Important
|
||||
|
||||
- **You work at the workspace level.** When a user mentions a workflow, ask for the workflow name or ID if not provided.
|
||||
- **Always delegate complex work** to the appropriate subagent.
|
||||
- **Debug first** when something doesn't work — don't guess.
|
||||
- Be concise and results-focused.
|
||||
- Think internally, speak to the user only when the task is complete or you need input.
|
||||
`
|
||||
}
|
||||
@@ -93,6 +93,8 @@ export const env = createEnv({
|
||||
EXA_API_KEY: z.string().min(1).optional(), // Exa AI API key for enhanced online search
|
||||
BLACKLISTED_PROVIDERS: z.string().optional(), // Comma-separated provider IDs to hide (e.g., "openai,anthropic")
|
||||
BLACKLISTED_MODELS: z.string().optional(), // Comma-separated model names/prefixes to hide (e.g., "gpt-4,claude-*")
|
||||
ALLOWED_MCP_DOMAINS: z.string().optional(), // Comma-separated domains for MCP servers (e.g., "internal.company.com,mcp.example.org"). Empty = all allowed.
|
||||
ALLOWED_INTEGRATIONS: z.string().optional(), // Comma-separated block types to allow (e.g., "slack,github,agent"). Empty = all allowed.
|
||||
|
||||
// Azure Configuration - Shared credentials with feature-specific models
|
||||
AZURE_OPENAI_ENDPOINT: z.string().url().optional(), // Shared Azure OpenAI service endpoint
|
||||
|
||||
@@ -123,6 +123,47 @@ export const isReactGrabEnabled = isDev && isTruthy(env.REACT_GRAB_ENABLED)
|
||||
*/
|
||||
export const isReactScanEnabled = isDev && isTruthy(env.REACT_SCAN_ENABLED)
|
||||
|
||||
/**
|
||||
* Returns the parsed allowlist of integration block types from the environment variable.
|
||||
* If not set or empty, returns null (meaning all integrations are allowed).
|
||||
*/
|
||||
export function getAllowedIntegrationsFromEnv(): string[] | null {
|
||||
if (!env.ALLOWED_INTEGRATIONS) return null
|
||||
const parsed = env.ALLOWED_INTEGRATIONS.split(',')
|
||||
.map((i) => i.trim().toLowerCase())
|
||||
.filter(Boolean)
|
||||
return parsed.length > 0 ? parsed : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes a domain entry from the ALLOWED_MCP_DOMAINS env var.
|
||||
* Accepts bare hostnames (e.g., "mcp.company.com") or full URLs (e.g., "https://mcp.company.com").
|
||||
* Extracts the hostname in either case.
|
||||
*/
|
||||
function normalizeDomainEntry(entry: string): string {
|
||||
const trimmed = entry.trim().toLowerCase()
|
||||
if (!trimmed) return ''
|
||||
if (trimmed.includes('://')) {
|
||||
try {
|
||||
return new URL(trimmed).hostname
|
||||
} catch {
|
||||
return trimmed
|
||||
}
|
||||
}
|
||||
return trimmed
|
||||
}
|
||||
|
||||
/**
|
||||
* Get allowed MCP server domains from the ALLOWED_MCP_DOMAINS env var.
|
||||
* Returns null if not set (all domains allowed), or parsed array of lowercase hostnames.
|
||||
* Accepts both bare hostnames and full URLs in the env var value.
|
||||
*/
|
||||
export function getAllowedMcpDomainsFromEnv(): string[] | null {
|
||||
if (!env.ALLOWED_MCP_DOMAINS) return null
|
||||
const parsed = env.ALLOWED_MCP_DOMAINS.split(',').map(normalizeDomainEntry).filter(Boolean)
|
||||
return parsed.length > 0 ? parsed : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cost multiplier based on environment
|
||||
*/
|
||||
|
||||
163
apps/sim/lib/mcp/domain-check.test.ts
Normal file
163
apps/sim/lib/mcp/domain-check.test.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const mockGetAllowedMcpDomainsFromEnv = vi.fn<() => string[] | null>()
|
||||
const mockGetBaseUrl = vi.fn<() => string>()
|
||||
|
||||
vi.doMock('@/lib/core/config/feature-flags', () => ({
|
||||
getAllowedMcpDomainsFromEnv: mockGetAllowedMcpDomainsFromEnv,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/core/utils/urls', () => ({
|
||||
getBaseUrl: mockGetBaseUrl,
|
||||
}))
|
||||
|
||||
const { McpDomainNotAllowedError, isMcpDomainAllowed, validateMcpDomain } = await import(
|
||||
'./domain-check'
|
||||
)
|
||||
|
||||
describe('McpDomainNotAllowedError', () => {
|
||||
it.concurrent('creates error with correct name and message', () => {
|
||||
const error = new McpDomainNotAllowedError('evil.com')
|
||||
|
||||
expect(error).toBeInstanceOf(Error)
|
||||
expect(error).toBeInstanceOf(McpDomainNotAllowedError)
|
||||
expect(error.name).toBe('McpDomainNotAllowedError')
|
||||
expect(error.message).toContain('evil.com')
|
||||
})
|
||||
})
|
||||
|
||||
describe('isMcpDomainAllowed', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('when no allowlist is configured', () => {
|
||||
beforeEach(() => {
|
||||
mockGetAllowedMcpDomainsFromEnv.mockReturnValue(null)
|
||||
})
|
||||
|
||||
it('allows any URL', () => {
|
||||
expect(isMcpDomainAllowed('https://any-server.com/mcp')).toBe(true)
|
||||
})
|
||||
|
||||
it('allows undefined URL', () => {
|
||||
expect(isMcpDomainAllowed(undefined)).toBe(true)
|
||||
})
|
||||
|
||||
it('allows empty string URL', () => {
|
||||
expect(isMcpDomainAllowed('')).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when allowlist is configured', () => {
|
||||
beforeEach(() => {
|
||||
mockGetAllowedMcpDomainsFromEnv.mockReturnValue(['allowed.com', 'internal.company.com'])
|
||||
mockGetBaseUrl.mockReturnValue('https://platform.example.com')
|
||||
})
|
||||
|
||||
it('allows URLs on the allowlist', () => {
|
||||
expect(isMcpDomainAllowed('https://allowed.com/mcp')).toBe(true)
|
||||
expect(isMcpDomainAllowed('https://internal.company.com/tools')).toBe(true)
|
||||
})
|
||||
|
||||
it('rejects URLs not on the allowlist', () => {
|
||||
expect(isMcpDomainAllowed('https://evil.com/mcp')).toBe(false)
|
||||
})
|
||||
|
||||
it('rejects undefined URL (fail-closed)', () => {
|
||||
expect(isMcpDomainAllowed(undefined)).toBe(false)
|
||||
})
|
||||
|
||||
it('rejects empty string URL (fail-closed)', () => {
|
||||
expect(isMcpDomainAllowed('')).toBe(false)
|
||||
})
|
||||
|
||||
it('rejects malformed URLs', () => {
|
||||
expect(isMcpDomainAllowed('not-a-url')).toBe(false)
|
||||
})
|
||||
|
||||
it('matches case-insensitively', () => {
|
||||
expect(isMcpDomainAllowed('https://ALLOWED.COM/mcp')).toBe(true)
|
||||
})
|
||||
|
||||
it('always allows the platform hostname', () => {
|
||||
expect(isMcpDomainAllowed('https://platform.example.com/mcp')).toBe(true)
|
||||
})
|
||||
|
||||
it('allows platform hostname even when not in the allowlist', () => {
|
||||
mockGetAllowedMcpDomainsFromEnv.mockReturnValue(['other.com'])
|
||||
expect(isMcpDomainAllowed('https://platform.example.com/mcp')).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when getBaseUrl is not configured', () => {
|
||||
beforeEach(() => {
|
||||
mockGetAllowedMcpDomainsFromEnv.mockReturnValue(['allowed.com'])
|
||||
mockGetBaseUrl.mockImplementation(() => {
|
||||
throw new Error('Not configured')
|
||||
})
|
||||
})
|
||||
|
||||
it('still allows URLs on the allowlist', () => {
|
||||
expect(isMcpDomainAllowed('https://allowed.com/mcp')).toBe(true)
|
||||
})
|
||||
|
||||
it('still rejects URLs not on the allowlist', () => {
|
||||
expect(isMcpDomainAllowed('https://evil.com/mcp')).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateMcpDomain', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('when no allowlist is configured', () => {
|
||||
beforeEach(() => {
|
||||
mockGetAllowedMcpDomainsFromEnv.mockReturnValue(null)
|
||||
})
|
||||
|
||||
it('does not throw for any URL', () => {
|
||||
expect(() => validateMcpDomain('https://any-server.com/mcp')).not.toThrow()
|
||||
})
|
||||
|
||||
it('does not throw for undefined URL', () => {
|
||||
expect(() => validateMcpDomain(undefined)).not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('when allowlist is configured', () => {
|
||||
beforeEach(() => {
|
||||
mockGetAllowedMcpDomainsFromEnv.mockReturnValue(['allowed.com'])
|
||||
mockGetBaseUrl.mockReturnValue('https://platform.example.com')
|
||||
})
|
||||
|
||||
it('does not throw for allowed URLs', () => {
|
||||
expect(() => validateMcpDomain('https://allowed.com/mcp')).not.toThrow()
|
||||
})
|
||||
|
||||
it('throws McpDomainNotAllowedError for disallowed URLs', () => {
|
||||
expect(() => validateMcpDomain('https://evil.com/mcp')).toThrow(McpDomainNotAllowedError)
|
||||
})
|
||||
|
||||
it('throws for undefined URL (fail-closed)', () => {
|
||||
expect(() => validateMcpDomain(undefined)).toThrow(McpDomainNotAllowedError)
|
||||
})
|
||||
|
||||
it('throws for malformed URLs', () => {
|
||||
expect(() => validateMcpDomain('not-a-url')).toThrow(McpDomainNotAllowedError)
|
||||
})
|
||||
|
||||
it('includes the rejected domain in the error message', () => {
|
||||
expect(() => validateMcpDomain('https://evil.com/mcp')).toThrow(/evil\.com/)
|
||||
})
|
||||
|
||||
it('does not throw for platform hostname', () => {
|
||||
expect(() => validateMcpDomain('https://platform.example.com/mcp')).not.toThrow()
|
||||
})
|
||||
})
|
||||
})
|
||||
69
apps/sim/lib/mcp/domain-check.ts
Normal file
69
apps/sim/lib/mcp/domain-check.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { getAllowedMcpDomainsFromEnv } from '@/lib/core/config/feature-flags'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
export class McpDomainNotAllowedError extends Error {
|
||||
constructor(domain: string) {
|
||||
super(`MCP server domain "${domain}" is not allowed by the server's ALLOWED_MCP_DOMAINS policy`)
|
||||
this.name = 'McpDomainNotAllowedError'
|
||||
}
|
||||
}
|
||||
|
||||
let cachedPlatformHostname: string | null = null
|
||||
|
||||
/**
|
||||
* Returns the platform's own hostname (from getBaseUrl), lazy-cached.
|
||||
* Always lowercase. Returns null if the base URL is not configured or invalid.
|
||||
*/
|
||||
function getPlatformHostname(): string | null {
|
||||
if (cachedPlatformHostname !== null) return cachedPlatformHostname
|
||||
try {
|
||||
cachedPlatformHostname = new URL(getBaseUrl()).hostname.toLowerCase()
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
return cachedPlatformHostname
|
||||
}
|
||||
|
||||
/**
|
||||
* Core domain check. Returns null if the URL is allowed, or the hostname/url
|
||||
* string to use in the rejection error.
|
||||
*/
|
||||
function checkMcpDomain(url: string): string | null {
|
||||
const allowedDomains = getAllowedMcpDomainsFromEnv()
|
||||
if (allowedDomains === null) return null
|
||||
try {
|
||||
const hostname = new URL(url).hostname.toLowerCase()
|
||||
if (hostname === getPlatformHostname()) return null
|
||||
return allowedDomains.includes(hostname) ? null : hostname
|
||||
} catch {
|
||||
return url
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the URL's domain is allowed (or no restriction is configured).
|
||||
* The platform's own hostname (from getBaseUrl) is always allowed.
|
||||
*/
|
||||
export function isMcpDomainAllowed(url: string | undefined): boolean {
|
||||
if (!url) {
|
||||
return getAllowedMcpDomainsFromEnv() === null
|
||||
}
|
||||
return checkMcpDomain(url) === null
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws McpDomainNotAllowedError if the URL's domain is not in the allowlist.
|
||||
* The platform's own hostname (from getBaseUrl) is always allowed.
|
||||
*/
|
||||
export function validateMcpDomain(url: string | undefined): void {
|
||||
if (!url) {
|
||||
if (getAllowedMcpDomainsFromEnv() !== null) {
|
||||
throw new McpDomainNotAllowedError('(empty)')
|
||||
}
|
||||
return
|
||||
}
|
||||
const rejected = checkMcpDomain(url)
|
||||
if (rejected !== null) {
|
||||
throw new McpDomainNotAllowedError(rejected)
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,7 @@ import { isTest } from '@/lib/core/config/feature-flags'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { McpClient } from '@/lib/mcp/client'
|
||||
import { mcpConnectionManager } from '@/lib/mcp/connection-manager'
|
||||
import { isMcpDomainAllowed } from '@/lib/mcp/domain-check'
|
||||
import { resolveMcpConfigEnvVars } from '@/lib/mcp/resolve-config'
|
||||
import {
|
||||
createMcpCacheAdapter,
|
||||
@@ -93,6 +94,10 @@ class McpService {
|
||||
return null
|
||||
}
|
||||
|
||||
if (!isMcpDomainAllowed(server.url || undefined)) {
|
||||
return null
|
||||
}
|
||||
|
||||
return {
|
||||
id: server.id,
|
||||
name: server.name,
|
||||
@@ -123,19 +128,21 @@ class McpService {
|
||||
.from(mcpServers)
|
||||
.where(and(...whereConditions))
|
||||
|
||||
return servers.map((server) => ({
|
||||
id: server.id,
|
||||
name: server.name,
|
||||
description: server.description || undefined,
|
||||
transport: server.transport as McpTransport,
|
||||
url: server.url || undefined,
|
||||
headers: (server.headers as Record<string, string>) || {},
|
||||
timeout: server.timeout || 30000,
|
||||
retries: server.retries || 3,
|
||||
enabled: server.enabled,
|
||||
createdAt: server.createdAt.toISOString(),
|
||||
updatedAt: server.updatedAt.toISOString(),
|
||||
}))
|
||||
return servers
|
||||
.map((server) => ({
|
||||
id: server.id,
|
||||
name: server.name,
|
||||
description: server.description || undefined,
|
||||
transport: server.transport as McpTransport,
|
||||
url: server.url || undefined,
|
||||
headers: (server.headers as Record<string, string>) || {},
|
||||
timeout: server.timeout || 30000,
|
||||
retries: server.retries || 3,
|
||||
enabled: server.enabled,
|
||||
createdAt: server.createdAt.toISOString(),
|
||||
updatedAt: server.updatedAt.toISOString(),
|
||||
}))
|
||||
.filter((config) => isMcpDomainAllowed(config.url))
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -107,5 +107,3 @@ if (typeof process !== 'undefined') {
|
||||
logger.info(`S3 copilot bucket: ${env.S3_COPILOT_BUCKET_NAME}`)
|
||||
}
|
||||
}
|
||||
|
||||
export default ensureUploadsDirectory
|
||||
|
||||
@@ -324,6 +324,15 @@ const nextConfig: NextConfig = {
|
||||
)
|
||||
}
|
||||
|
||||
// Beluga campaign short link tracking
|
||||
if (isHosted) {
|
||||
redirects.push({
|
||||
source: '/r/:shortCode',
|
||||
destination: 'https://go.trybeluga.ai/:shortCode',
|
||||
permanent: false,
|
||||
})
|
||||
}
|
||||
|
||||
return redirects
|
||||
},
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
supportsNativeStructuredOutputs,
|
||||
} from '@/providers/models'
|
||||
import type { ProviderRequest, ProviderResponse, TimeSegment } from '@/providers/types'
|
||||
import { ProviderError } from '@/providers/types'
|
||||
import {
|
||||
calculateCost,
|
||||
prepareToolExecution,
|
||||
@@ -842,15 +843,11 @@ export async function executeAnthropicProviderRequest(
|
||||
duration: totalDuration,
|
||||
})
|
||||
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore
|
||||
enhancedError.timing = {
|
||||
throw new ProviderError(error instanceof Error ? error.message : String(error), {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
|
||||
throw enhancedError
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1299,14 +1296,10 @@ export async function executeAnthropicProviderRequest(
|
||||
duration: totalDuration,
|
||||
})
|
||||
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore
|
||||
enhancedError.timing = {
|
||||
throw new ProviderError(error instanceof Error ? error.message : String(error), {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
|
||||
throw enhancedError
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ import type {
|
||||
ProviderResponse,
|
||||
TimeSegment,
|
||||
} from '@/providers/types'
|
||||
import { ProviderError } from '@/providers/types'
|
||||
import {
|
||||
calculateCost,
|
||||
prepareToolExecution,
|
||||
@@ -251,7 +252,7 @@ async function executeChatCompletionsRequest(
|
||||
output: currentResponse.usage?.completion_tokens || 0,
|
||||
total: currentResponse.usage?.total_tokens || 0,
|
||||
}
|
||||
const toolCalls: (FunctionCallResponse & { success: boolean })[] = []
|
||||
const toolCalls: FunctionCallResponse[] = []
|
||||
const toolResults: Record<string, unknown>[] = []
|
||||
const currentMessages = [...allMessages]
|
||||
let iterationCount = 0
|
||||
@@ -577,15 +578,11 @@ async function executeChatCompletionsRequest(
|
||||
duration: totalDuration,
|
||||
})
|
||||
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore - Adding timing property to the error
|
||||
enhancedError.timing = {
|
||||
throw new ProviderError(error instanceof Error ? error.message : String(error), {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
|
||||
throw enhancedError
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -22,11 +22,13 @@ import {
|
||||
} from '@/providers/bedrock/utils'
|
||||
import { getProviderDefaultModel, getProviderModels } from '@/providers/models'
|
||||
import type {
|
||||
FunctionCallResponse,
|
||||
ProviderConfig,
|
||||
ProviderRequest,
|
||||
ProviderResponse,
|
||||
TimeSegment,
|
||||
} from '@/providers/types'
|
||||
import { ProviderError } from '@/providers/types'
|
||||
import {
|
||||
calculateCost,
|
||||
prepareToolExecution,
|
||||
@@ -419,8 +421,8 @@ export const bedrockProvider: ProviderConfig = {
|
||||
pricing: initialCost.pricing,
|
||||
}
|
||||
|
||||
const toolCalls: any[] = []
|
||||
const toolResults: any[] = []
|
||||
const toolCalls: FunctionCallResponse[] = []
|
||||
const toolResults: Record<string, unknown>[] = []
|
||||
const currentMessages = [...messages]
|
||||
let iterationCount = 0
|
||||
let hasUsedForcedTool = false
|
||||
@@ -561,7 +563,7 @@ export const bedrockProvider: ProviderConfig = {
|
||||
|
||||
let resultContent: any
|
||||
if (result.success) {
|
||||
toolResults.push(result.output)
|
||||
toolResults.push(result.output!)
|
||||
resultContent = result.output
|
||||
} else {
|
||||
resultContent = {
|
||||
@@ -903,15 +905,11 @@ export const bedrockProvider: ProviderConfig = {
|
||||
duration: totalDuration,
|
||||
})
|
||||
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore
|
||||
enhancedError.timing = {
|
||||
throw new ProviderError(error instanceof Error ? error.message : String(error), {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
|
||||
throw enhancedError
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import type {
|
||||
ProviderResponse,
|
||||
TimeSegment,
|
||||
} from '@/providers/types'
|
||||
import { ProviderError } from '@/providers/types'
|
||||
import {
|
||||
calculateCost,
|
||||
prepareToolExecution,
|
||||
@@ -539,15 +540,11 @@ export const cerebrasProvider: ProviderConfig = {
|
||||
duration: totalDuration,
|
||||
})
|
||||
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore - Adding timing property to error for debugging
|
||||
enhancedError.timing = {
|
||||
throw new ProviderError(error instanceof Error ? error.message : String(error), {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
|
||||
throw enhancedError
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import type {
|
||||
ProviderResponse,
|
||||
TimeSegment,
|
||||
} from '@/providers/types'
|
||||
import { ProviderError } from '@/providers/types'
|
||||
import {
|
||||
calculateCost,
|
||||
prepareToolExecution,
|
||||
@@ -538,15 +539,11 @@ export const deepseekProvider: ProviderConfig = {
|
||||
duration: totalDuration,
|
||||
})
|
||||
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore
|
||||
enhancedError.timing = {
|
||||
throw new ProviderError(error instanceof Error ? error.message : String(error), {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
|
||||
throw enhancedError
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import type {
|
||||
ProviderResponse,
|
||||
TimeSegment,
|
||||
} from '@/providers/types'
|
||||
import { ProviderError } from '@/providers/types'
|
||||
import {
|
||||
calculateCost,
|
||||
prepareToolExecution,
|
||||
@@ -496,15 +497,11 @@ export const groqProvider: ProviderConfig = {
|
||||
duration: totalDuration,
|
||||
})
|
||||
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore
|
||||
enhancedError.timing = {
|
||||
throw new ProviderError(error instanceof Error ? error.message : String(error), {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
|
||||
throw enhancedError
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import type {
|
||||
ProviderResponse,
|
||||
TimeSegment,
|
||||
} from '@/providers/types'
|
||||
import { ProviderError } from '@/providers/types'
|
||||
import {
|
||||
calculateCost,
|
||||
prepareToolExecution,
|
||||
@@ -551,15 +552,11 @@ export const mistralProvider: ProviderConfig = {
|
||||
duration: totalDuration,
|
||||
})
|
||||
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore - Adding timing property to error for debugging
|
||||
enhancedError.timing = {
|
||||
throw new ProviderError(error instanceof Error ? error.message : String(error), {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
|
||||
throw enhancedError
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import type {
|
||||
ProviderResponse,
|
||||
TimeSegment,
|
||||
} from '@/providers/types'
|
||||
import { ProviderError } from '@/providers/types'
|
||||
import { calculateCost, prepareToolExecution } from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers'
|
||||
import { executeTool } from '@/tools'
|
||||
@@ -554,15 +555,11 @@ export const ollamaProvider: ProviderConfig = {
|
||||
duration: totalDuration,
|
||||
})
|
||||
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore
|
||||
enhancedError.timing = {
|
||||
throw new ProviderError(error instanceof Error ? error.message : String(error), {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
|
||||
throw enhancedError
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import type OpenAI from 'openai'
|
||||
import type { StreamingExecution } from '@/executor/types'
|
||||
import { MAX_TOOL_ITERATIONS } from '@/providers'
|
||||
import type { Message, ProviderRequest, ProviderResponse, TimeSegment } from '@/providers/types'
|
||||
import { ProviderError } from '@/providers/types'
|
||||
import {
|
||||
calculateCost,
|
||||
prepareToolExecution,
|
||||
@@ -806,14 +807,10 @@ export async function executeResponsesProviderRequest(
|
||||
duration: totalDuration,
|
||||
})
|
||||
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore - Adding timing property to the error
|
||||
enhancedError.timing = {
|
||||
throw new ProviderError(error instanceof Error ? error.message : String(error), {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
|
||||
throw enhancedError
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,11 +10,14 @@ import {
|
||||
supportsNativeStructuredOutputs,
|
||||
} from '@/providers/openrouter/utils'
|
||||
import type {
|
||||
FunctionCallResponse,
|
||||
Message,
|
||||
ProviderConfig,
|
||||
ProviderRequest,
|
||||
ProviderResponse,
|
||||
TimeSegment,
|
||||
} from '@/providers/types'
|
||||
import { ProviderError } from '@/providers/types'
|
||||
import {
|
||||
calculateCost,
|
||||
generateSchemaInstructions,
|
||||
@@ -90,7 +93,7 @@ export const openRouterProvider: ProviderConfig = {
|
||||
stream: !!request.stream,
|
||||
})
|
||||
|
||||
const allMessages = [] as any[]
|
||||
const allMessages: Message[] = []
|
||||
|
||||
if (request.systemPrompt) {
|
||||
allMessages.push({ role: 'system', content: request.systemPrompt })
|
||||
@@ -237,8 +240,8 @@ export const openRouterProvider: ProviderConfig = {
|
||||
output: currentResponse.usage?.completion_tokens || 0,
|
||||
total: currentResponse.usage?.total_tokens || 0,
|
||||
}
|
||||
const toolCalls = [] as any[]
|
||||
const toolResults = [] as any[]
|
||||
const toolCalls: FunctionCallResponse[] = []
|
||||
const toolResults: Record<string, unknown>[] = []
|
||||
const currentMessages = [...allMessages]
|
||||
let iterationCount = 0
|
||||
let modelTime = firstResponseTime
|
||||
@@ -352,7 +355,7 @@ export const openRouterProvider: ProviderConfig = {
|
||||
|
||||
let resultContent: any
|
||||
if (result.success) {
|
||||
toolResults.push(result.output)
|
||||
toolResults.push(result.output!)
|
||||
resultContent = result.output
|
||||
} else {
|
||||
resultContent = {
|
||||
@@ -593,14 +596,11 @@ export const openRouterProvider: ProviderConfig = {
|
||||
}
|
||||
|
||||
logger.error('Error in OpenRouter request:', errorDetails)
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore
|
||||
enhancedError.timing = {
|
||||
throw new ProviderError(error instanceof Error ? error.message : String(error), {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
throw enhancedError
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user