mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-12 23:45:07 -05:00
Compare commits
12 Commits
feat/atlas
...
feat/smart
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d12830abfe | ||
|
|
546c9c3c8a | ||
|
|
17789c1df6 | ||
|
|
541665e41a | ||
|
|
3d5336994b | ||
|
|
311c4d38f3 | ||
|
|
e7abcd34df | ||
|
|
433552019e | ||
|
|
f733b8dd88 | ||
|
|
76bd405293 | ||
|
|
c22bd2caaa | ||
|
|
462aa15341 |
@@ -88,8 +88,7 @@ Update a Confluence page using the Confluence API.
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of update |
|
||||||
| `success` | boolean | Operation success status |
|
|
||||||
| `pageId` | string | Confluence page ID |
|
| `pageId` | string | Confluence page ID |
|
||||||
| `title` | string | Updated page title |
|
| `title` | string | Updated page title |
|
||||||
| `status` | string | Page status |
|
| `status` | string | Page status |
|
||||||
@@ -111,6 +110,7 @@ Update a Confluence page using the Confluence API.
|
|||||||
| ↳ `authorId` | string | Account ID of the version author |
|
| ↳ `authorId` | string | Account ID of the version author |
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||||
| `url` | string | URL to view the page in Confluence |
|
| `url` | string | URL to view the page in Confluence |
|
||||||
|
| `success` | boolean | Update operation success status |
|
||||||
|
|
||||||
### `confluence_create_page`
|
### `confluence_create_page`
|
||||||
|
|
||||||
@@ -131,7 +131,7 @@ Create a new page in a Confluence space.
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of creation |
|
||||||
| `pageId` | string | Created page ID |
|
| `pageId` | string | Created page ID |
|
||||||
| `title` | string | Page title |
|
| `title` | string | Page title |
|
||||||
| `status` | string | Page status |
|
| `status` | string | Page status |
|
||||||
@@ -172,9 +172,9 @@ Delete a Confluence page. By default moves to trash; use purge=true to permanent
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of deletion |
|
||||||
| `deleted` | boolean | Deletion status |
|
|
||||||
| `pageId` | string | Deleted page ID |
|
| `pageId` | string | Deleted page ID |
|
||||||
|
| `deleted` | boolean | Deletion status |
|
||||||
|
|
||||||
### `confluence_list_pages_in_space`
|
### `confluence_list_pages_in_space`
|
||||||
|
|
||||||
@@ -358,10 +358,10 @@ List all custom properties (metadata) attached to a Confluence page.
|
|||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `pageId` | string | ID of the page |
|
| `pageId` | string | ID of the page |
|
||||||
| `properties` | array | Array of content properties |
|
| `properties` | array | Array of content properties |
|
||||||
| ↳ `id` | string | Unique property identifier |
|
| ↳ `id` | string | Property ID |
|
||||||
| ↳ `key` | string | Property key/name |
|
| ↳ `key` | string | Property key |
|
||||||
| ↳ `value` | json | Property value \(can be any JSON\) |
|
| ↳ `value` | json | Property value \(can be any JSON\) |
|
||||||
| ↳ `version` | object | Property version information |
|
| ↳ `version` | object | Version information |
|
||||||
| ↳ `number` | number | Version number |
|
| ↳ `number` | number | Version number |
|
||||||
| ↳ `message` | string | Version message |
|
| ↳ `message` | string | Version message |
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
||||||
@@ -388,50 +388,16 @@ Create a new custom property (metadata) on a Confluence page.
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `id` | string | Unique property identifier |
|
|
||||||
| `key` | string | Property key/name |
|
|
||||||
| `value` | json | Property value \(can be any JSON\) |
|
|
||||||
| `version` | object | Property version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| `pageId` | string | ID of the page |
|
| `pageId` | string | ID of the page |
|
||||||
| `propertyId` | string | ID of the created property |
|
| `propertyId` | string | ID of the created property |
|
||||||
|
| `key` | string | Property key |
|
||||||
### `confluence_update_page_property`
|
| `value` | json | Property value |
|
||||||
|
| `version` | object | Version information |
|
||||||
Update an existing content property on a Confluence page.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `pageId` | string | Yes | The ID of the page containing the property |
|
|
||||||
| `propertyId` | string | Yes | The ID of the property to update |
|
|
||||||
| `key` | string | Yes | The key/name of the property |
|
|
||||||
| `value` | json | Yes | The new value for the property \(can be any JSON value\) |
|
|
||||||
| `versionNumber` | number | Yes | The current version number of the property \(for conflict prevention\) |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `id` | string | Unique property identifier |
|
|
||||||
| `key` | string | Property key/name |
|
|
||||||
| `value` | json | Property value \(can be any JSON\) |
|
|
||||||
| `version` | object | Property version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
| ↳ `number` | number | Version number |
|
||||||
| ↳ `message` | string | Version message |
|
| ↳ `message` | string | Version message |
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
| ↳ `authorId` | string | Account ID of the version author |
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||||
| `pageId` | string | ID of the page |
|
|
||||||
| `propertyId` | string | ID of the updated property |
|
|
||||||
|
|
||||||
### `confluence_delete_page_property`
|
### `confluence_delete_page_property`
|
||||||
|
|
||||||
@@ -472,7 +438,7 @@ Search for content across Confluence pages, blog posts, and other content.
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of search |
|
||||||
| `results` | array | Array of search results |
|
| `results` | array | Array of search results |
|
||||||
| ↳ `id` | string | Unique content identifier |
|
| ↳ `id` | string | Unique content identifier |
|
||||||
| ↳ `title` | string | Content title |
|
| ↳ `title` | string | Content title |
|
||||||
@@ -546,29 +512,19 @@ List all blog posts across all accessible Confluence spaces.
|
|||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `blogPosts` | array | Array of blog posts |
|
| `blogPosts` | array | Array of blog posts |
|
||||||
| ↳ `id` | string | Unique blog post identifier |
|
| ↳ `id` | string | Blog post ID |
|
||||||
| ↳ `title` | string | Blog post title |
|
| ↳ `title` | string | Blog post title |
|
||||||
| ↳ `status` | string | Blog post status \(e.g., current, draft\) |
|
| ↳ `status` | string | Blog post status |
|
||||||
| ↳ `spaceId` | string | ID of the space containing the blog post |
|
| ↳ `spaceId` | string | Space ID |
|
||||||
| ↳ `authorId` | string | Account ID of the blog post author |
|
| ↳ `authorId` | string | Author account ID |
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp when the blog post was created |
|
| ↳ `createdAt` | string | Creation timestamp |
|
||||||
| ↳ `version` | object | Blog post version information |
|
| ↳ `version` | object | Version information |
|
||||||
| ↳ `number` | number | Version number |
|
| ↳ `number` | number | Version number |
|
||||||
| ↳ `message` | string | Version message |
|
| ↳ `message` | string | Version message |
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
| ↳ `authorId` | string | Account ID of the version author |
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||||
| ↳ `body` | object | Blog post body content |
|
| ↳ `webUrl` | string | URL to view the blog post |
|
||||||
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `view` | object | Body in view format \(rendered HTML\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `webUrl` | string | URL to view the blog post in Confluence |
|
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||||
|
|
||||||
### `confluence_get_blogpost`
|
### `confluence_get_blogpost`
|
||||||
@@ -589,19 +545,19 @@ Get a specific Confluence blog post by ID, including its content.
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `id` | string | Unique blog post identifier |
|
| `id` | string | Blog post ID |
|
||||||
| `title` | string | Blog post title |
|
| `title` | string | Blog post title |
|
||||||
| `status` | string | Blog post status \(e.g., current, draft\) |
|
| `status` | string | Blog post status |
|
||||||
| `spaceId` | string | ID of the space containing the blog post |
|
| `spaceId` | string | Space ID |
|
||||||
| `authorId` | string | Account ID of the blog post author |
|
| `authorId` | string | Author account ID |
|
||||||
| `createdAt` | string | ISO 8601 timestamp when the blog post was created |
|
| `createdAt` | string | Creation timestamp |
|
||||||
| `version` | object | Blog post version information |
|
| `version` | object | Version information |
|
||||||
| ↳ `number` | number | Version number |
|
| ↳ `number` | number | Version number |
|
||||||
| ↳ `message` | string | Version message |
|
| ↳ `message` | string | Version message |
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
| ↳ `authorId` | string | Account ID of the version author |
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||||
| `body` | object | Blog post body content |
|
| `body` | object | Blog post body content in requested format\(s\) |
|
||||||
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
| ↳ `value` | string | The content value in the specified format |
|
||||||
| ↳ `representation` | string | Content representation type |
|
| ↳ `representation` | string | Content representation type |
|
||||||
@@ -611,7 +567,7 @@ Get a specific Confluence blog post by ID, including its content.
|
|||||||
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
| ↳ `value` | string | The content value in the specified format |
|
||||||
| ↳ `representation` | string | Content representation type |
|
| ↳ `representation` | string | Content representation type |
|
||||||
| `webUrl` | string | URL to view the blog post in Confluence |
|
| `webUrl` | string | URL to view the blog post |
|
||||||
|
|
||||||
### `confluence_create_blogpost`
|
### `confluence_create_blogpost`
|
||||||
|
|
||||||
@@ -633,18 +589,11 @@ Create a new blog post in a Confluence space.
|
|||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `id` | string | Unique blog post identifier |
|
| `id` | string | Created blog post ID |
|
||||||
| `title` | string | Blog post title |
|
| `title` | string | Blog post title |
|
||||||
| `status` | string | Blog post status \(e.g., current, draft\) |
|
| `status` | string | Blog post status |
|
||||||
| `spaceId` | string | ID of the space containing the blog post |
|
| `spaceId` | string | Space ID |
|
||||||
| `authorId` | string | Account ID of the blog post author |
|
| `authorId` | string | Author account ID |
|
||||||
| `createdAt` | string | ISO 8601 timestamp when the blog post was created |
|
|
||||||
| `version` | object | Blog post version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| `body` | object | Blog post body content |
|
| `body` | object | Blog post body content |
|
||||||
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
| ↳ `value` | string | The content value in the specified format |
|
||||||
@@ -655,71 +604,13 @@ Create a new blog post in a Confluence space.
|
|||||||
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
| ↳ `value` | string | The content value in the specified format |
|
||||||
| ↳ `representation` | string | Content representation type |
|
| ↳ `representation` | string | Content representation type |
|
||||||
| `webUrl` | string | URL to view the blog post in Confluence |
|
|
||||||
|
|
||||||
### `confluence_update_blogpost`
|
|
||||||
|
|
||||||
Update an existing Confluence blog post title, content, or status.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `blogPostId` | string | Yes | The ID of the blog post to update |
|
|
||||||
| `title` | string | No | New title for the blog post |
|
|
||||||
| `content` | string | No | New content for the blog post in Confluence storage format |
|
|
||||||
| `status` | string | No | Blog post status: current or draft |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `id` | string | Unique blog post identifier |
|
|
||||||
| `title` | string | Blog post title |
|
|
||||||
| `status` | string | Blog post status \(e.g., current, draft\) |
|
|
||||||
| `spaceId` | string | ID of the space containing the blog post |
|
|
||||||
| `authorId` | string | Account ID of the blog post author |
|
|
||||||
| `createdAt` | string | ISO 8601 timestamp when the blog post was created |
|
|
||||||
| `version` | object | Blog post version information |
|
| `version` | object | Blog post version information |
|
||||||
| ↳ `number` | number | Version number |
|
| ↳ `number` | number | Version number |
|
||||||
| ↳ `message` | string | Version message |
|
| ↳ `message` | string | Version message |
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
| ↳ `authorId` | string | Account ID of the version author |
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||||
| `body` | object | Blog post body content |
|
| `webUrl` | string | URL to view the blog post |
|
||||||
| ↳ `storage` | object | Body in storage format \(Confluence markup\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `view` | object | Body in view format \(rendered HTML\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
|
||||||
| ↳ `representation` | string | Content representation type |
|
|
||||||
| `webUrl` | string | URL to view the blog post in Confluence |
|
|
||||||
|
|
||||||
### `confluence_delete_blogpost`
|
|
||||||
|
|
||||||
Delete a Confluence blog post.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `blogPostId` | string | Yes | The ID of the blog post to delete |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `deleted` | boolean | Deletion status |
|
|
||||||
| `blogPostId` | string | Deleted blog post ID |
|
|
||||||
|
|
||||||
### `confluence_list_blogposts_in_space`
|
### `confluence_list_blogposts_in_space`
|
||||||
|
|
||||||
@@ -743,13 +634,13 @@ List all blog posts within a specific Confluence space.
|
|||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||||
| `blogPosts` | array | Array of blog posts in the space |
|
| `blogPosts` | array | Array of blog posts in the space |
|
||||||
| ↳ `id` | string | Unique blog post identifier |
|
| ↳ `id` | string | Blog post ID |
|
||||||
| ↳ `title` | string | Blog post title |
|
| ↳ `title` | string | Blog post title |
|
||||||
| ↳ `status` | string | Blog post status \(e.g., current, draft\) |
|
| ↳ `status` | string | Blog post status |
|
||||||
| ↳ `spaceId` | string | ID of the space containing the blog post |
|
| ↳ `spaceId` | string | Space ID |
|
||||||
| ↳ `authorId` | string | Account ID of the blog post author |
|
| ↳ `authorId` | string | Author account ID |
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp when the blog post was created |
|
| ↳ `createdAt` | string | Creation timestamp |
|
||||||
| ↳ `version` | object | Blog post version information |
|
| ↳ `version` | object | Version information |
|
||||||
| ↳ `number` | number | Version number |
|
| ↳ `number` | number | Version number |
|
||||||
| ↳ `message` | string | Version message |
|
| ↳ `message` | string | Version message |
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
||||||
@@ -765,7 +656,7 @@ List all blog posts within a specific Confluence space.
|
|||||||
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
| ↳ `atlas_doc_format` | object | Body in Atlassian Document Format \(ADF\) |
|
||||||
| ↳ `value` | string | The content value in the specified format |
|
| ↳ `value` | string | The content value in the specified format |
|
||||||
| ↳ `representation` | string | Content representation type |
|
| ↳ `representation` | string | Content representation type |
|
||||||
| ↳ `webUrl` | string | URL to view the blog post in Confluence |
|
| ↳ `webUrl` | string | URL to view the blog post |
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||||
|
|
||||||
### `confluence_create_comment`
|
### `confluence_create_comment`
|
||||||
@@ -785,7 +676,7 @@ Add a comment to a Confluence page.
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of creation |
|
||||||
| `commentId` | string | Created comment ID |
|
| `commentId` | string | Created comment ID |
|
||||||
| `pageId` | string | Page ID |
|
| `pageId` | string | Page ID |
|
||||||
|
|
||||||
@@ -846,9 +737,9 @@ Update an existing comment on a Confluence page.
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of update |
|
||||||
| `updated` | boolean | Update status |
|
|
||||||
| `commentId` | string | Updated comment ID |
|
| `commentId` | string | Updated comment ID |
|
||||||
|
| `updated` | boolean | Update status |
|
||||||
|
|
||||||
### `confluence_delete_comment`
|
### `confluence_delete_comment`
|
||||||
|
|
||||||
@@ -866,9 +757,9 @@ Delete a comment from a Confluence page.
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of deletion |
|
||||||
| `deleted` | boolean | Deletion status |
|
|
||||||
| `commentId` | string | Deleted comment ID |
|
| `commentId` | string | Deleted comment ID |
|
||||||
|
| `deleted` | boolean | Deletion status |
|
||||||
|
|
||||||
### `confluence_upload_attachment`
|
### `confluence_upload_attachment`
|
||||||
|
|
||||||
@@ -889,7 +780,7 @@ Upload a file as an attachment to a Confluence page.
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of upload |
|
||||||
| `attachmentId` | string | Uploaded attachment ID |
|
| `attachmentId` | string | Uploaded attachment ID |
|
||||||
| `title` | string | Attachment file name |
|
| `title` | string | Attachment file name |
|
||||||
| `fileSize` | number | File size in bytes |
|
| `fileSize` | number | File size in bytes |
|
||||||
@@ -951,9 +842,9 @@ Delete an attachment from a Confluence page (moves to trash).
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of deletion |
|
||||||
| `deleted` | boolean | Deletion status |
|
|
||||||
| `attachmentId` | string | Deleted attachment ID |
|
| `attachmentId` | string | Deleted attachment ID |
|
||||||
|
| `deleted` | boolean | Deletion status |
|
||||||
|
|
||||||
### `confluence_list_labels`
|
### `confluence_list_labels`
|
||||||
|
|
||||||
@@ -973,7 +864,7 @@ List all labels on a Confluence page.
|
|||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
| `ts` | string | Timestamp of retrieval |
|
||||||
| `labels` | array | Array of labels on the page |
|
| `labels` | array | Array of labels on the page |
|
||||||
| ↳ `id` | string | Unique label identifier |
|
| ↳ `id` | string | Unique label identifier |
|
||||||
| ↳ `name` | string | Label name |
|
| ↳ `name` | string | Label name |
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ With Sim’s Jira Service Management integration, you can create, monitor, and u
|
|||||||
|
|
||||||
## Usage Instructions
|
## Usage Instructions
|
||||||
|
|
||||||
Integrate with Jira Service Management for IT service management. Create and manage service requests, handle customers and organizations, track SLAs, and manage queues. Can also trigger workflows based on Jira Service Management webhook events.
|
Integrate with Jira Service Management for IT service management. Create and manage service requests, handle customers and organizations, track SLAs, and manage queues.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -66,31 +66,6 @@ Get all service desks from Jira Service Management
|
|||||||
| `total` | number | Total number of service desks |
|
| `total` | number | Total number of service desks |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
### `jsm_get_service_desk`
|
|
||||||
|
|
||||||
Get a specific service desk by ID in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `id` | string | Service desk ID |
|
|
||||||
| `projectId` | string | Associated Jira project ID |
|
|
||||||
| `projectName` | string | Associated project name |
|
|
||||||
| `projectKey` | string | Associated project key |
|
|
||||||
| `name` | string | Service desk name |
|
|
||||||
| `description` | string | Service desk description |
|
|
||||||
| `leadDisplayName` | string | Project lead display name |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
|
|
||||||
### `jsm_get_request_types`
|
### `jsm_get_request_types`
|
||||||
|
|
||||||
Get request types for a service desk in Jira Service Management
|
Get request types for a service desk in Jira Service Management
|
||||||
@@ -126,39 +101,6 @@ Get request types for a service desk in Jira Service Management
|
|||||||
| `total` | number | Total number of request types |
|
| `total` | number | Total number of request types |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
### `jsm_get_request_type_fields`
|
|
||||||
|
|
||||||
Get the fields required to create a request of a specific type in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
|
||||||
| `requestTypeId` | string | Yes | Request Type ID \(e.g., "10", "15"\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `serviceDeskId` | string | Service desk ID |
|
|
||||||
| `requestTypeId` | string | Request type ID |
|
|
||||||
| `canAddRequestParticipants` | boolean | Whether participants can be added to requests of this type |
|
|
||||||
| `canRaiseOnBehalfOf` | boolean | Whether requests can be raised on behalf of another user |
|
|
||||||
| `requestTypeFields` | array | List of fields for this request type |
|
|
||||||
| ↳ `fieldId` | string | Field identifier \(e.g., summary, description, customfield_10010\) |
|
|
||||||
| ↳ `name` | string | Human-readable field name |
|
|
||||||
| ↳ `description` | string | Help text for the field |
|
|
||||||
| ↳ `required` | boolean | Whether the field is required |
|
|
||||||
| ↳ `visible` | boolean | Whether the field is visible |
|
|
||||||
| ↳ `validValues` | json | Allowed values for select fields |
|
|
||||||
| ↳ `presetValues` | json | Pre-populated values |
|
|
||||||
| ↳ `defaultValues` | json | Default values for the field |
|
|
||||||
| ↳ `jiraSchema` | json | Jira field schema with type, system, custom, customId |
|
|
||||||
|
|
||||||
### `jsm_create_request`
|
### `jsm_create_request`
|
||||||
|
|
||||||
Create a new service request in Jira Service Management
|
Create a new service request in Jira Service Management
|
||||||
@@ -280,59 +222,6 @@ Get multiple service requests from Jira Service Management
|
|||||||
| `total` | number | Total number of requests in current page |
|
| `total` | number | Total number of requests in current page |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
### `jsm_get_request_status`
|
|
||||||
|
|
||||||
Get status history for a service request in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
|
||||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
|
||||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
|
||||||
| `statuses` | array | Status history entries |
|
|
||||||
| ↳ `status` | string | Status name |
|
|
||||||
| ↳ `statusCategory` | string | Status category \(NEW, INDETERMINATE, DONE\) |
|
|
||||||
| ↳ `statusDate` | json | Status change date with iso8601, friendly, epochMillis |
|
|
||||||
| `total` | number | Total number of status entries |
|
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
|
||||||
|
|
||||||
### `jsm_get_request_attachments`
|
|
||||||
|
|
||||||
Get attachments for a service request in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
|
||||||
| `includeAttachments` | boolean | No | Download attachment file contents and include them as files in the output |
|
|
||||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
|
||||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
|
||||||
| `attachments` | array | List of attachments |
|
|
||||||
| `total` | number | Total number of attachments |
|
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
|
||||||
| `files` | file[] | Downloaded attachment files \(only when includeAttachments is true\) |
|
|
||||||
|
|
||||||
### `jsm_add_comment`
|
### `jsm_add_comment`
|
||||||
|
|
||||||
Add a comment (public or internal) to a service request in Jira Service Management
|
Add a comment (public or internal) to a service request in Jira Service Management
|
||||||
@@ -452,53 +341,6 @@ Add customers to a service desk in Jira Service Management
|
|||||||
| `serviceDeskId` | string | Service desk ID |
|
| `serviceDeskId` | string | Service desk ID |
|
||||||
| `success` | boolean | Whether customers were added successfully |
|
| `success` | boolean | Whether customers were added successfully |
|
||||||
|
|
||||||
### `jsm_remove_customer`
|
|
||||||
|
|
||||||
Remove customers from a service desk in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
|
||||||
| `accountIds` | string | No | Comma-separated Atlassian account IDs to remove |
|
|
||||||
| `emails` | string | No | Comma-separated email addresses to remove |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `serviceDeskId` | string | Service desk ID |
|
|
||||||
| `success` | boolean | Whether customers were removed successfully |
|
|
||||||
|
|
||||||
### `jsm_create_customer`
|
|
||||||
|
|
||||||
Create a new customer in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `email` | string | Yes | Email address for the new customer |
|
|
||||||
| `displayName` | string | Yes | Display name for the new customer |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `accountId` | string | Account ID of the created customer |
|
|
||||||
| `displayName` | string | Display name of the created customer |
|
|
||||||
| `emailAddress` | string | Email address of the created customer |
|
|
||||||
| `active` | boolean | Whether the customer account is active |
|
|
||||||
| `timeZone` | string | Customer timezone |
|
|
||||||
| `success` | boolean | Whether the customer was created successfully |
|
|
||||||
|
|
||||||
### `jsm_get_organizations`
|
### `jsm_get_organizations`
|
||||||
|
|
||||||
Get organizations for a service desk in Jira Service Management
|
Get organizations for a service desk in Jira Service Management
|
||||||
@@ -524,26 +366,6 @@ Get organizations for a service desk in Jira Service Management
|
|||||||
| `total` | number | Total number of organizations |
|
| `total` | number | Total number of organizations |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
### `jsm_get_organization`
|
|
||||||
|
|
||||||
Get a specific organization by ID in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `organizationId` | string | Yes | Organization ID to retrieve |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `id` | string | Organization ID |
|
|
||||||
| `name` | string | Organization name |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
|
|
||||||
### `jsm_create_organization`
|
### `jsm_create_organization`
|
||||||
|
|
||||||
Create a new organization in Jira Service Management
|
Create a new organization in Jira Service Management
|
||||||
@@ -587,119 +409,6 @@ Add an organization to a service desk in Jira Service Management
|
|||||||
| `organizationId` | string | Organization ID added |
|
| `organizationId` | string | Organization ID added |
|
||||||
| `success` | boolean | Whether the operation succeeded |
|
| `success` | boolean | Whether the operation succeeded |
|
||||||
|
|
||||||
### `jsm_remove_organization`
|
|
||||||
|
|
||||||
Remove an organization from a service desk in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
|
||||||
| `organizationId` | string | Yes | Organization ID to remove from the service desk |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `serviceDeskId` | string | Service Desk ID |
|
|
||||||
| `organizationId` | string | Organization ID removed |
|
|
||||||
| `success` | boolean | Whether the operation succeeded |
|
|
||||||
|
|
||||||
### `jsm_delete_organization`
|
|
||||||
|
|
||||||
Delete an organization in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `organizationId` | string | Yes | Organization ID to delete |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `organizationId` | string | ID of the deleted organization |
|
|
||||||
| `success` | boolean | Whether the organization was deleted |
|
|
||||||
|
|
||||||
### `jsm_get_organization_users`
|
|
||||||
|
|
||||||
Get users in an organization in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `organizationId` | string | Yes | Organization ID to get users from |
|
|
||||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
|
||||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `organizationId` | string | Organization ID |
|
|
||||||
| `users` | array | List of users in the organization |
|
|
||||||
| ↳ `accountId` | string | Atlassian account ID |
|
|
||||||
| ↳ `displayName` | string | Display name |
|
|
||||||
| ↳ `emailAddress` | string | Email address |
|
|
||||||
| ↳ `active` | boolean | Whether the account is active |
|
|
||||||
| ↳ `timeZone` | string | User timezone |
|
|
||||||
| `total` | number | Total number of users |
|
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
|
||||||
|
|
||||||
### `jsm_add_organization_users`
|
|
||||||
|
|
||||||
Add users to an organization in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `organizationId` | string | Yes | Organization ID to add users to |
|
|
||||||
| `accountIds` | string | Yes | Comma-separated account IDs to add to the organization |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `organizationId` | string | Organization ID |
|
|
||||||
| `success` | boolean | Whether users were added successfully |
|
|
||||||
|
|
||||||
### `jsm_remove_organization_users`
|
|
||||||
|
|
||||||
Remove users from an organization in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `organizationId` | string | Yes | Organization ID to remove users from |
|
|
||||||
| `accountIds` | string | Yes | Comma-separated account IDs to remove from the organization |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `organizationId` | string | Organization ID |
|
|
||||||
| `success` | boolean | Whether users were removed successfully |
|
|
||||||
|
|
||||||
### `jsm_get_queues`
|
### `jsm_get_queues`
|
||||||
|
|
||||||
Get queues for a service desk in Jira Service Management
|
Get queues for a service desk in Jira Service Management
|
||||||
@@ -729,51 +438,6 @@ Get queues for a service desk in Jira Service Management
|
|||||||
| `total` | number | Total number of queues |
|
| `total` | number | Total number of queues |
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
| `isLastPage` | boolean | Whether this is the last page |
|
||||||
|
|
||||||
### `jsm_get_queue_issues`
|
|
||||||
|
|
||||||
Get issues in a specific queue for a service desk in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
|
||||||
| `queueId` | string | Yes | Queue ID to get issues from |
|
|
||||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
|
||||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `serviceDeskId` | string | Service desk ID |
|
|
||||||
| `queueId` | string | Queue ID |
|
|
||||||
| `issues` | array | List of issues in the queue |
|
|
||||||
| ↳ `issueId` | string | Jira issue ID |
|
|
||||||
| ↳ `issueKey` | string | Issue key \(e.g., SD-123\) |
|
|
||||||
| ↳ `requestTypeId` | string | Request type ID |
|
|
||||||
| ↳ `serviceDeskId` | string | Service desk ID |
|
|
||||||
| ↳ `createdDate` | json | Creation date with iso8601, friendly, epochMillis |
|
|
||||||
| ↳ `currentStatus` | object | Current request status |
|
|
||||||
| ↳ `status` | string | Status name |
|
|
||||||
| ↳ `statusCategory` | string | Status category \(NEW, INDETERMINATE, DONE\) |
|
|
||||||
| ↳ `statusDate` | json | Status change date with iso8601, friendly, epochMillis |
|
|
||||||
| ↳ `reporter` | object | Reporter user details |
|
|
||||||
| ↳ `accountId` | string | Atlassian account ID |
|
|
||||||
| ↳ `displayName` | string | User display name |
|
|
||||||
| ↳ `emailAddress` | string | User email address |
|
|
||||||
| ↳ `active` | boolean | Whether the account is active |
|
|
||||||
| ↳ `requestFieldValues` | array | Request field values |
|
|
||||||
| ↳ `fieldId` | string | Field identifier |
|
|
||||||
| ↳ `label` | string | Human-readable field label |
|
|
||||||
| ↳ `value` | json | Field value |
|
|
||||||
| ↳ `renderedValue` | json | HTML-rendered field value |
|
|
||||||
| `total` | number | Total number of issues in the queue |
|
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
|
||||||
|
|
||||||
### `jsm_get_sla`
|
### `jsm_get_sla`
|
||||||
|
|
||||||
Get SLA information for a service request in Jira Service Management
|
Get SLA information for a service request in Jira Service Management
|
||||||
@@ -905,32 +569,6 @@ Add participants to a request in Jira Service Management
|
|||||||
| ↳ `active` | boolean | Whether the account is active |
|
| ↳ `active` | boolean | Whether the account is active |
|
||||||
| `success` | boolean | Whether the operation succeeded |
|
| `success` | boolean | Whether the operation succeeded |
|
||||||
|
|
||||||
### `jsm_remove_participants`
|
|
||||||
|
|
||||||
Remove participants from a request in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
|
||||||
| `accountIds` | string | Yes | Comma-separated account IDs to remove as participants |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
|
||||||
| `participants` | array | Remaining participants after removal |
|
|
||||||
| ↳ `accountId` | string | Atlassian account ID |
|
|
||||||
| ↳ `displayName` | string | Display name |
|
|
||||||
| ↳ `emailAddress` | string | Email address |
|
|
||||||
| ↳ `active` | boolean | Whether the account is active |
|
|
||||||
| `success` | boolean | Whether the operation succeeded |
|
|
||||||
|
|
||||||
### `jsm_get_approvals`
|
### `jsm_get_approvals`
|
||||||
|
|
||||||
Get approvals for a request in Jira Service Management
|
Get approvals for a request in Jira Service Management
|
||||||
@@ -1006,9 +644,9 @@ Approve or decline an approval request in Jira Service Management
|
|||||||
| `approval` | json | The approval object |
|
| `approval` | json | The approval object |
|
||||||
| `success` | boolean | Whether the operation succeeded |
|
| `success` | boolean | Whether the operation succeeded |
|
||||||
|
|
||||||
### `jsm_get_feedback`
|
### `jsm_get_request_type_fields`
|
||||||
|
|
||||||
Get CSAT feedback for a service request in Jira Service Management
|
Get the fields required to create a request of a specific type in Jira Service Management
|
||||||
|
|
||||||
#### Input
|
#### Input
|
||||||
|
|
||||||
@@ -1016,152 +654,27 @@ Get CSAT feedback for a service request in Jira Service Management
|
|||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
| `serviceDeskId` | string | Yes | Service Desk ID \(e.g., "1", "2"\) |
|
||||||
|
| `requestTypeId` | string | Yes | Request Type ID \(e.g., "10", "15"\) |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
| Parameter | Type | Description |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Timestamp of the operation |
|
| `ts` | string | Timestamp of the operation |
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
| `serviceDeskId` | string | Service desk ID |
|
||||||
| `rating` | number | CSAT rating \(1-5\) |
|
| `requestTypeId` | string | Request type ID |
|
||||||
| `comment` | string | Feedback comment |
|
| `canAddRequestParticipants` | boolean | Whether participants can be added to requests of this type |
|
||||||
| `type` | string | Feedback type \(e.g., csat\) |
|
| `canRaiseOnBehalfOf` | boolean | Whether requests can be raised on behalf of another user |
|
||||||
|
| `requestTypeFields` | array | List of fields for this request type |
|
||||||
### `jsm_add_feedback`
|
| ↳ `fieldId` | string | Field identifier \(e.g., summary, description, customfield_10010\) |
|
||||||
|
| ↳ `name` | string | Human-readable field name |
|
||||||
Add CSAT feedback to a service request in Jira Service Management
|
| ↳ `description` | string | Help text for the field |
|
||||||
|
| ↳ `required` | boolean | Whether the field is required |
|
||||||
#### Input
|
| ↳ `visible` | boolean | Whether the field is visible |
|
||||||
|
| ↳ `validValues` | json | Allowed values for select fields |
|
||||||
| Parameter | Type | Required | Description |
|
| ↳ `presetValues` | json | Pre-populated values |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| ↳ `defaultValues` | json | Default values for the field |
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
| ↳ `jiraSchema` | json | Jira field schema with type, system, custom, customId |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
|
||||||
| `rating` | number | Yes | CSAT rating \(1-5\) |
|
|
||||||
| `comment` | string | No | Optional feedback comment |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
|
||||||
| `rating` | number | CSAT rating submitted |
|
|
||||||
| `comment` | string | Feedback comment |
|
|
||||||
| `type` | string | Feedback type |
|
|
||||||
| `success` | boolean | Whether feedback was submitted successfully |
|
|
||||||
|
|
||||||
### `jsm_delete_feedback`
|
|
||||||
|
|
||||||
Delete CSAT feedback from a service request in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
|
||||||
| `success` | boolean | Whether feedback was deleted |
|
|
||||||
|
|
||||||
### `jsm_get_notification`
|
|
||||||
|
|
||||||
Get notification subscription status for a request in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
|
||||||
| `subscribed` | boolean | Whether currently subscribed to notifications |
|
|
||||||
|
|
||||||
### `jsm_subscribe_notification`
|
|
||||||
|
|
||||||
Subscribe to notifications for a request in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
|
||||||
| `success` | boolean | Whether subscription was successful |
|
|
||||||
|
|
||||||
### `jsm_unsubscribe_notification`
|
|
||||||
|
|
||||||
Unsubscribe from notifications for a request in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `issueIdOrKey` | string | Yes | Issue ID or key \(e.g., SD-123\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `issueIdOrKey` | string | Issue ID or key |
|
|
||||||
| `success` | boolean | Whether unsubscription was successful |
|
|
||||||
|
|
||||||
### `jsm_search_knowledge_base`
|
|
||||||
|
|
||||||
Search knowledge base articles in Jira Service Management
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance |
|
|
||||||
| `serviceDeskId` | string | No | Service Desk ID to search within \(optional, searches globally if omitted\) |
|
|
||||||
| `query` | string | Yes | Search query for knowledge base articles |
|
|
||||||
| `highlight` | boolean | No | Whether to highlight matching text in results |
|
|
||||||
| `start` | number | No | Start index for pagination \(e.g., 0, 50, 100\) |
|
|
||||||
| `limit` | number | No | Maximum results to return \(e.g., 10, 25, 50\) |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | Timestamp of the operation |
|
|
||||||
| `articles` | array | List of knowledge base articles |
|
|
||||||
| ↳ `title` | string | Article title |
|
|
||||||
| ↳ `excerpt` | string | Article excerpt/summary |
|
|
||||||
| ↳ `sourceType` | string | Source type \(e.g., confluence\) |
|
|
||||||
| ↳ `sourcePageId` | string | Source page ID |
|
|
||||||
| ↳ `sourceSpaceKey` | string | Source space key |
|
|
||||||
| ↳ `contentUrl` | string | URL to rendered content |
|
|
||||||
| `total` | number | Total number of articles found |
|
|
||||||
| `isLastPage` | boolean | Whether this is the last page |
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,145 +1,81 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { settings } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
|
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
||||||
|
|
||||||
/**
|
function copilotHeaders(): HeadersInit {
|
||||||
* GET - Fetch user's auto-allowed integration tools
|
const headers: Record<string, string> = {
|
||||||
*/
|
'Content-Type': 'application/json',
|
||||||
export async function GET() {
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const userId = session.user.id
|
|
||||||
|
|
||||||
const [userSettings] = await db
|
|
||||||
.select()
|
|
||||||
.from(settings)
|
|
||||||
.where(eq(settings.userId, userId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (userSettings) {
|
|
||||||
const autoAllowedTools = (userSettings.copilotAutoAllowedTools as string[]) || []
|
|
||||||
return NextResponse.json({ autoAllowedTools })
|
|
||||||
}
|
|
||||||
|
|
||||||
await db.insert(settings).values({
|
|
||||||
id: userId,
|
|
||||||
userId,
|
|
||||||
copilotAutoAllowedTools: [],
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({ autoAllowedTools: [] })
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to fetch auto-allowed tools', { error })
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
}
|
||||||
|
if (env.COPILOT_API_KEY) {
|
||||||
|
headers['x-api-key'] = env.COPILOT_API_KEY
|
||||||
|
}
|
||||||
|
return headers
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* POST - Add a tool to the auto-allowed list
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const userId = session.user.id
|
|
||||||
const body = await request.json()
|
|
||||||
|
|
||||||
if (!body.toolId || typeof body.toolId !== 'string') {
|
|
||||||
return NextResponse.json({ error: 'toolId must be a string' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const toolId = body.toolId
|
|
||||||
|
|
||||||
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
|
||||||
|
|
||||||
if (existing) {
|
|
||||||
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
|
||||||
|
|
||||||
if (!currentTools.includes(toolId)) {
|
|
||||||
const updatedTools = [...currentTools, toolId]
|
|
||||||
await db
|
|
||||||
.update(settings)
|
|
||||||
.set({
|
|
||||||
copilotAutoAllowedTools: updatedTools,
|
|
||||||
updatedAt: new Date(),
|
|
||||||
})
|
|
||||||
.where(eq(settings.userId, userId))
|
|
||||||
|
|
||||||
logger.info('Added tool to auto-allowed list', { userId, toolId })
|
|
||||||
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ success: true, autoAllowedTools: currentTools })
|
|
||||||
}
|
|
||||||
|
|
||||||
await db.insert(settings).values({
|
|
||||||
id: userId,
|
|
||||||
userId,
|
|
||||||
copilotAutoAllowedTools: [toolId],
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info('Created settings and added tool to auto-allowed list', { userId, toolId })
|
|
||||||
return NextResponse.json({ success: true, autoAllowedTools: [toolId] })
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to add auto-allowed tool', { error })
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* DELETE - Remove a tool from the auto-allowed list
|
|
||||||
*/
|
|
||||||
export async function DELETE(request: NextRequest) {
|
export async function DELETE(request: NextRequest) {
|
||||||
|
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||||
|
if (!isAuthenticated || !userId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolIdFromQuery = new URL(request.url).searchParams.get('toolId') || undefined
|
||||||
|
const toolIdFromBody = await request
|
||||||
|
.json()
|
||||||
|
.then((body) => (typeof body?.toolId === 'string' ? body.toolId : undefined))
|
||||||
|
.catch(() => undefined)
|
||||||
|
const toolId = toolIdFromBody || toolIdFromQuery
|
||||||
|
if (!toolId) {
|
||||||
|
return NextResponse.json({ error: 'toolId is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const session = await getSession()
|
const res = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
headers: copilotHeaders(),
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId,
|
||||||
|
toolId,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
if (!session?.user?.id) {
|
const payload = await res.json().catch(() => ({}))
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
if (!res.ok) {
|
||||||
|
logger.warn('Failed to remove auto-allowed tool via copilot backend', {
|
||||||
|
status: res.status,
|
||||||
|
userId,
|
||||||
|
toolId,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: payload?.error || 'Failed to remove auto-allowed tool',
|
||||||
|
autoAllowedTools: [],
|
||||||
|
},
|
||||||
|
{ status: res.status }
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const userId = session.user.id
|
return NextResponse.json({
|
||||||
const { searchParams } = new URL(request.url)
|
success: true,
|
||||||
const toolId = searchParams.get('toolId')
|
autoAllowedTools: Array.isArray(payload?.autoAllowedTools) ? payload.autoAllowedTools : [],
|
||||||
|
})
|
||||||
if (!toolId) {
|
|
||||||
return NextResponse.json({ error: 'toolId query parameter is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
|
||||||
|
|
||||||
if (existing) {
|
|
||||||
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
|
||||||
const updatedTools = currentTools.filter((t) => t !== toolId)
|
|
||||||
|
|
||||||
await db
|
|
||||||
.update(settings)
|
|
||||||
.set({
|
|
||||||
copilotAutoAllowedTools: updatedTools,
|
|
||||||
updatedAt: new Date(),
|
|
||||||
})
|
|
||||||
.where(eq(settings.userId, userId))
|
|
||||||
|
|
||||||
logger.info('Removed tool from auto-allowed list', { userId, toolId })
|
|
||||||
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ success: true, autoAllowedTools: [] })
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed to remove auto-allowed tool', { error })
|
logger.error('Error removing auto-allowed tool', {
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
userId,
|
||||||
|
toolId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to remove auto-allowed tool',
|
||||||
|
autoAllowedTools: [],
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,13 +28,24 @@ import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
|||||||
|
|
||||||
const logger = createLogger('CopilotChatAPI')
|
const logger = createLogger('CopilotChatAPI')
|
||||||
|
|
||||||
|
function truncateForLog(value: string, maxLength = 120): string {
|
||||||
|
if (!value || maxLength <= 0) return ''
|
||||||
|
return value.length <= maxLength ? value : `${value.slice(0, maxLength)}...`
|
||||||
|
}
|
||||||
|
|
||||||
async function requestChatTitleFromCopilot(params: {
|
async function requestChatTitleFromCopilot(params: {
|
||||||
message: string
|
message: string
|
||||||
model: string
|
model: string
|
||||||
provider?: string
|
provider?: string
|
||||||
}): Promise<string | null> {
|
}): Promise<string | null> {
|
||||||
const { message, model, provider } = params
|
const { message, model, provider } = params
|
||||||
if (!message || !model) return null
|
if (!message || !model) {
|
||||||
|
logger.warn('Skipping chat title request because message/model is missing', {
|
||||||
|
hasMessage: !!message,
|
||||||
|
hasModel: !!model,
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
const headers: Record<string, string> = {
|
const headers: Record<string, string> = {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -44,6 +55,13 @@ async function requestChatTitleFromCopilot(params: {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
logger.info('Requesting chat title from copilot backend', {
|
||||||
|
model,
|
||||||
|
provider: provider || null,
|
||||||
|
messageLength: message.length,
|
||||||
|
messagePreview: truncateForLog(message),
|
||||||
|
})
|
||||||
|
|
||||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/generate-chat-title`, {
|
const response = await fetch(`${SIM_AGENT_API_URL}/api/generate-chat-title`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers,
|
headers,
|
||||||
@@ -63,10 +81,32 @@ async function requestChatTitleFromCopilot(params: {
|
|||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const title = typeof payload?.title === 'string' ? payload.title.trim() : ''
|
const rawTitle = typeof payload?.title === 'string' ? payload.title : ''
|
||||||
|
const title = rawTitle.trim()
|
||||||
|
logger.info('Received chat title response from copilot backend', {
|
||||||
|
status: response.status,
|
||||||
|
hasRawTitle: !!rawTitle,
|
||||||
|
rawTitle,
|
||||||
|
normalizedTitle: title,
|
||||||
|
messagePreview: truncateForLog(message),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!title) {
|
||||||
|
logger.warn('Copilot backend returned empty chat title', {
|
||||||
|
payload,
|
||||||
|
model,
|
||||||
|
provider: provider || null,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
return title || null
|
return title || null
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error generating chat title:', error)
|
logger.error('Error generating chat title:', {
|
||||||
|
error,
|
||||||
|
model,
|
||||||
|
provider: provider || null,
|
||||||
|
messagePreview: truncateForLog(message),
|
||||||
|
})
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -238,6 +278,7 @@ export async function POST(req: NextRequest) {
|
|||||||
let currentChat: any = null
|
let currentChat: any = null
|
||||||
let conversationHistory: any[] = []
|
let conversationHistory: any[] = []
|
||||||
let actualChatId = chatId
|
let actualChatId = chatId
|
||||||
|
let chatWasCreatedForRequest = false
|
||||||
const selectedModel = model || 'claude-opus-4-6'
|
const selectedModel = model || 'claude-opus-4-6'
|
||||||
|
|
||||||
if (chatId || createNewChat) {
|
if (chatId || createNewChat) {
|
||||||
@@ -249,6 +290,7 @@ export async function POST(req: NextRequest) {
|
|||||||
})
|
})
|
||||||
currentChat = chatResult.chat
|
currentChat = chatResult.chat
|
||||||
actualChatId = chatResult.chatId || chatId
|
actualChatId = chatResult.chatId || chatId
|
||||||
|
chatWasCreatedForRequest = chatResult.isNew
|
||||||
const history = buildConversationHistory(
|
const history = buildConversationHistory(
|
||||||
chatResult.conversationHistory,
|
chatResult.conversationHistory,
|
||||||
(chatResult.chat?.conversationId as string | undefined) || conversationId
|
(chatResult.chat?.conversationId as string | undefined) || conversationId
|
||||||
@@ -256,6 +298,18 @@ export async function POST(req: NextRequest) {
|
|||||||
conversationHistory = history.history
|
conversationHistory = history.history
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const shouldGenerateTitleForRequest =
|
||||||
|
!!actualChatId &&
|
||||||
|
chatWasCreatedForRequest &&
|
||||||
|
!currentChat?.title &&
|
||||||
|
conversationHistory.length === 0
|
||||||
|
|
||||||
|
const titleGenerationParams = {
|
||||||
|
message,
|
||||||
|
model: selectedModel,
|
||||||
|
provider,
|
||||||
|
}
|
||||||
|
|
||||||
const effectiveMode = mode === 'agent' ? 'build' : mode
|
const effectiveMode = mode === 'agent' ? 'build' : mode
|
||||||
const effectiveConversationId =
|
const effectiveConversationId =
|
||||||
(currentChat?.conversationId as string | undefined) || conversationId
|
(currentChat?.conversationId as string | undefined) || conversationId
|
||||||
@@ -348,10 +402,22 @@ export async function POST(req: NextRequest) {
|
|||||||
await pushEvent({ type: 'chat_id', chatId: actualChatId })
|
await pushEvent({ type: 'chat_id', chatId: actualChatId })
|
||||||
}
|
}
|
||||||
|
|
||||||
if (actualChatId && !currentChat?.title && conversationHistory.length === 0) {
|
if (shouldGenerateTitleForRequest) {
|
||||||
requestChatTitleFromCopilot({ message, model: selectedModel, provider })
|
logger.info(`[${tracker.requestId}] Starting title generation for streaming response`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
model: titleGenerationParams.model,
|
||||||
|
provider: provider || null,
|
||||||
|
messageLength: message.length,
|
||||||
|
messagePreview: truncateForLog(message),
|
||||||
|
chatWasCreatedForRequest,
|
||||||
|
})
|
||||||
|
requestChatTitleFromCopilot(titleGenerationParams)
|
||||||
.then(async (title) => {
|
.then(async (title) => {
|
||||||
if (title) {
|
if (title) {
|
||||||
|
logger.info(`[${tracker.requestId}] Generated title for streaming response`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
title,
|
||||||
|
})
|
||||||
await db
|
await db
|
||||||
.update(copilotChats)
|
.update(copilotChats)
|
||||||
.set({
|
.set({
|
||||||
@@ -359,12 +425,30 @@ export async function POST(req: NextRequest) {
|
|||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
await pushEvent({ type: 'title_updated', title })
|
await pushEvent({ type: 'title_updated', title, chatId: actualChatId })
|
||||||
|
logger.info(`[${tracker.requestId}] Emitted title_updated SSE event`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
title,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
logger.warn(`[${tracker.requestId}] No title returned for streaming response`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
model: selectedModel,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||||
})
|
})
|
||||||
|
} else if (actualChatId && !chatWasCreatedForRequest) {
|
||||||
|
logger.info(
|
||||||
|
`[${tracker.requestId}] Skipping title generation because chat already exists`,
|
||||||
|
{
|
||||||
|
chatId: actualChatId,
|
||||||
|
model: titleGenerationParams.model,
|
||||||
|
provider: provider || null,
|
||||||
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -479,9 +563,9 @@ export async function POST(req: NextRequest) {
|
|||||||
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
|
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
|
||||||
|
|
||||||
// Start title generation in parallel if this is first message (non-streaming)
|
// Start title generation in parallel if this is first message (non-streaming)
|
||||||
if (actualChatId && !currentChat.title && conversationHistory.length === 0) {
|
if (shouldGenerateTitleForRequest) {
|
||||||
logger.info(`[${tracker.requestId}] Starting title generation for non-streaming response`)
|
logger.info(`[${tracker.requestId}] Starting title generation for non-streaming response`)
|
||||||
requestChatTitleFromCopilot({ message, model: selectedModel, provider })
|
requestChatTitleFromCopilot(titleGenerationParams)
|
||||||
.then(async (title) => {
|
.then(async (title) => {
|
||||||
if (title) {
|
if (title) {
|
||||||
await db
|
await db
|
||||||
@@ -492,11 +576,22 @@ export async function POST(req: NextRequest) {
|
|||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
logger.info(`[${tracker.requestId}] Generated and saved title: ${title}`)
|
logger.info(`[${tracker.requestId}] Generated and saved title: ${title}`)
|
||||||
|
} else {
|
||||||
|
logger.warn(`[${tracker.requestId}] No title returned for non-streaming response`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
model: selectedModel,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||||
})
|
})
|
||||||
|
} else if (actualChatId && !chatWasCreatedForRequest) {
|
||||||
|
logger.info(`[${tracker.requestId}] Skipping title generation because chat already exists`, {
|
||||||
|
chatId: actualChatId,
|
||||||
|
model: titleGenerationParams.model,
|
||||||
|
provider: provider || null,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update chat in database immediately (without blocking for title)
|
// Update chat in database immediately (without blocking for title)
|
||||||
|
|||||||
@@ -1,7 +1,11 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { REDIS_TOOL_CALL_PREFIX, REDIS_TOOL_CALL_TTL_SECONDS } from '@/lib/copilot/constants'
|
import {
|
||||||
|
REDIS_TOOL_CALL_PREFIX,
|
||||||
|
REDIS_TOOL_CALL_TTL_SECONDS,
|
||||||
|
SIM_AGENT_API_URL,
|
||||||
|
} from '@/lib/copilot/constants'
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -10,6 +14,7 @@ import {
|
|||||||
createUnauthorizedResponse,
|
createUnauthorizedResponse,
|
||||||
type NotificationStatus,
|
type NotificationStatus,
|
||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
import { getRedisClient } from '@/lib/core/config/redis'
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
|
||||||
const logger = createLogger('CopilotConfirmAPI')
|
const logger = createLogger('CopilotConfirmAPI')
|
||||||
@@ -21,6 +26,8 @@ const ConfirmationSchema = z.object({
|
|||||||
errorMap: () => ({ message: 'Invalid notification status' }),
|
errorMap: () => ({ message: 'Invalid notification status' }),
|
||||||
}),
|
}),
|
||||||
message: z.string().optional(), // Optional message for background moves or additional context
|
message: z.string().optional(), // Optional message for background moves or additional context
|
||||||
|
toolName: z.string().optional(),
|
||||||
|
remember: z.boolean().optional(),
|
||||||
})
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -57,6 +64,44 @@ async function updateToolCallStatus(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function saveAutoAllowedToolPreference(userId: string, toolName: string): Promise<boolean> {
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
}
|
||||||
|
if (env.COPILOT_API_KEY) {
|
||||||
|
headers['x-api-key'] = env.COPILOT_API_KEY
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers,
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId,
|
||||||
|
toolId: toolName,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
logger.warn('Failed to persist auto-allowed tool preference', {
|
||||||
|
userId,
|
||||||
|
toolName,
|
||||||
|
status: response.status,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error persisting auto-allowed tool preference', {
|
||||||
|
userId,
|
||||||
|
toolName,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* POST /api/copilot/confirm
|
* POST /api/copilot/confirm
|
||||||
* Update tool call status (Accept/Reject)
|
* Update tool call status (Accept/Reject)
|
||||||
@@ -74,7 +119,7 @@ export async function POST(req: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const body = await req.json()
|
const body = await req.json()
|
||||||
const { toolCallId, status, message } = ConfirmationSchema.parse(body)
|
const { toolCallId, status, message, toolName, remember } = ConfirmationSchema.parse(body)
|
||||||
|
|
||||||
// Update the tool call status in Redis
|
// Update the tool call status in Redis
|
||||||
const updated = await updateToolCallStatus(toolCallId, status, message)
|
const updated = await updateToolCallStatus(toolCallId, status, message)
|
||||||
@@ -90,14 +135,22 @@ export async function POST(req: NextRequest) {
|
|||||||
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
||||||
}
|
}
|
||||||
|
|
||||||
const duration = tracker.getDuration()
|
let rememberSaved = false
|
||||||
|
if (status === 'accepted' && remember === true && toolName && authenticatedUserId) {
|
||||||
|
rememberSaved = await saveAutoAllowedToolPreference(authenticatedUserId, toolName)
|
||||||
|
}
|
||||||
|
|
||||||
return NextResponse.json({
|
const response: Record<string, unknown> = {
|
||||||
success: true,
|
success: true,
|
||||||
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
})
|
}
|
||||||
|
if (remember === true) {
|
||||||
|
response.rememberSaved = rememberSaved
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(response)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const duration = tracker.getDuration()
|
const duration = tracker.getDuration()
|
||||||
|
|
||||||
|
|||||||
89
apps/sim/app/api/mcp/copilot/route.test.ts
Normal file
89
apps/sim/app/api/mcp/copilot/route.test.ts
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
/**
|
||||||
|
* @vitest-environment node
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
|
describe('mcp copilot route manifest contract', () => {
|
||||||
|
const previousInternalSecret = process.env.INTERNAL_API_SECRET
|
||||||
|
const previousAgentUrl = process.env.SIM_AGENT_API_URL
|
||||||
|
const previousFetch = global.fetch
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.resetModules()
|
||||||
|
process.env.INTERNAL_API_SECRET = 'x'.repeat(32)
|
||||||
|
process.env.SIM_AGENT_API_URL = 'https://copilot.sim.ai'
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks()
|
||||||
|
global.fetch = previousFetch
|
||||||
|
if (previousInternalSecret === undefined) {
|
||||||
|
delete process.env.INTERNAL_API_SECRET
|
||||||
|
} else {
|
||||||
|
process.env.INTERNAL_API_SECRET = previousInternalSecret
|
||||||
|
}
|
||||||
|
if (previousAgentUrl === undefined) {
|
||||||
|
delete process.env.SIM_AGENT_API_URL
|
||||||
|
} else {
|
||||||
|
process.env.SIM_AGENT_API_URL = previousAgentUrl
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('loads and caches tool manifest from copilot backend', async () => {
|
||||||
|
const payload = {
|
||||||
|
directTools: [
|
||||||
|
{
|
||||||
|
name: 'list_workspaces',
|
||||||
|
description: 'List workspaces',
|
||||||
|
inputSchema: { type: 'object', properties: {} },
|
||||||
|
toolId: 'list_user_workspaces',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
subagentTools: [
|
||||||
|
{
|
||||||
|
name: 'sim_build',
|
||||||
|
description: 'Build workflows',
|
||||||
|
inputSchema: { type: 'object', properties: {} },
|
||||||
|
agentId: 'build',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
generatedAt: '2026-02-12T00:00:00Z',
|
||||||
|
}
|
||||||
|
|
||||||
|
const fetchSpy = vi.spyOn(global, 'fetch').mockResolvedValue(
|
||||||
|
new Response(JSON.stringify(payload), {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const mod = await import('./route')
|
||||||
|
mod.clearMcpToolManifestCacheForTests()
|
||||||
|
|
||||||
|
const first = await mod.getMcpToolManifest()
|
||||||
|
const second = await mod.getMcpToolManifest()
|
||||||
|
|
||||||
|
expect(first).toEqual(payload)
|
||||||
|
expect(second).toEqual(payload)
|
||||||
|
expect(fetchSpy).toHaveBeenCalledTimes(1)
|
||||||
|
expect(fetchSpy.mock.calls[0]?.[0]).toBe('https://copilot.sim.ai/api/mcp/tools/manifest')
|
||||||
|
})
|
||||||
|
|
||||||
|
it('rejects invalid manifest payloads from copilot backend', async () => {
|
||||||
|
const fetchSpy = vi.spyOn(global, 'fetch').mockResolvedValue(
|
||||||
|
new Response(JSON.stringify({ tools: [] }), {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const mod = await import('./route')
|
||||||
|
mod.clearMcpToolManifestCacheForTests()
|
||||||
|
|
||||||
|
await expect(mod.fetchMcpToolManifestFromCopilot()).rejects.toThrow(
|
||||||
|
'invalid manifest payload from copilot'
|
||||||
|
)
|
||||||
|
expect(fetchSpy).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -28,7 +28,6 @@ import {
|
|||||||
executeToolServerSide,
|
executeToolServerSide,
|
||||||
prepareExecutionContext,
|
prepareExecutionContext,
|
||||||
} from '@/lib/copilot/orchestrator/tool-executor'
|
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/definitions'
|
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
import { RateLimiter } from '@/lib/core/rate-limiter'
|
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||||
import {
|
import {
|
||||||
@@ -39,6 +38,32 @@ import {
|
|||||||
const logger = createLogger('CopilotMcpAPI')
|
const logger = createLogger('CopilotMcpAPI')
|
||||||
const mcpRateLimiter = new RateLimiter()
|
const mcpRateLimiter = new RateLimiter()
|
||||||
const DEFAULT_COPILOT_MODEL = 'claude-opus-4-6'
|
const DEFAULT_COPILOT_MODEL = 'claude-opus-4-6'
|
||||||
|
const MCP_TOOL_MANIFEST_CACHE_TTL_MS = 60_000
|
||||||
|
|
||||||
|
type McpDirectToolDef = {
|
||||||
|
name: string
|
||||||
|
description: string
|
||||||
|
inputSchema: { type: 'object'; properties?: Record<string, unknown>; required?: string[] }
|
||||||
|
toolId: string
|
||||||
|
}
|
||||||
|
|
||||||
|
type McpSubagentToolDef = {
|
||||||
|
name: string
|
||||||
|
description: string
|
||||||
|
inputSchema: { type: 'object'; properties?: Record<string, unknown>; required?: string[] }
|
||||||
|
agentId: string
|
||||||
|
}
|
||||||
|
|
||||||
|
type McpToolManifest = {
|
||||||
|
directTools: McpDirectToolDef[]
|
||||||
|
subagentTools: McpSubagentToolDef[]
|
||||||
|
generatedAt?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
let cachedMcpToolManifest: {
|
||||||
|
value: McpToolManifest
|
||||||
|
expiresAt: number
|
||||||
|
} | null = null
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
export const runtime = 'nodejs'
|
export const runtime = 'nodejs'
|
||||||
@@ -112,6 +137,58 @@ async function authenticateCopilotApiKey(apiKey: string): Promise<CopilotKeyAuth
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function isMcpToolManifest(value: unknown): value is McpToolManifest {
|
||||||
|
if (!value || typeof value !== 'object') return false
|
||||||
|
const payload = value as Record<string, unknown>
|
||||||
|
return Array.isArray(payload.directTools) && Array.isArray(payload.subagentTools)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchMcpToolManifestFromCopilot(): Promise<McpToolManifest> {
|
||||||
|
const internalSecret = env.INTERNAL_API_SECRET
|
||||||
|
if (!internalSecret) {
|
||||||
|
throw new Error('INTERNAL_API_SECRET not configured')
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = await fetch(`${SIM_AGENT_API_URL}/api/mcp/tools/manifest`, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'x-api-key': internalSecret,
|
||||||
|
},
|
||||||
|
signal: AbortSignal.timeout(10_000),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
const bodyText = await res.text().catch(() => '')
|
||||||
|
throw new Error(`manifest fetch failed (${res.status}): ${bodyText || res.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload: unknown = await res.json()
|
||||||
|
if (!isMcpToolManifest(payload)) {
|
||||||
|
throw new Error('invalid manifest payload from copilot')
|
||||||
|
}
|
||||||
|
|
||||||
|
return payload
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getMcpToolManifest(): Promise<McpToolManifest> {
|
||||||
|
const now = Date.now()
|
||||||
|
if (cachedMcpToolManifest && cachedMcpToolManifest.expiresAt > now) {
|
||||||
|
return cachedMcpToolManifest.value
|
||||||
|
}
|
||||||
|
|
||||||
|
const manifest = await fetchMcpToolManifestFromCopilot()
|
||||||
|
cachedMcpToolManifest = {
|
||||||
|
value: manifest,
|
||||||
|
expiresAt: now + MCP_TOOL_MANIFEST_CACHE_TTL_MS,
|
||||||
|
}
|
||||||
|
return manifest
|
||||||
|
}
|
||||||
|
|
||||||
|
export function clearMcpToolManifestCacheForTests(): void {
|
||||||
|
cachedMcpToolManifest = null
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* MCP Server instructions that guide LLMs on how to use the Sim copilot tools.
|
* MCP Server instructions that guide LLMs on how to use the Sim copilot tools.
|
||||||
* This is included in the initialize response to help external LLMs understand
|
* This is included in the initialize response to help external LLMs understand
|
||||||
@@ -380,13 +457,15 @@ function buildMcpServer(abortSignal?: AbortSignal): Server {
|
|||||||
)
|
)
|
||||||
|
|
||||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||||
const directTools = DIRECT_TOOL_DEFS.map((tool) => ({
|
const manifest = await getMcpToolManifest()
|
||||||
|
|
||||||
|
const directTools = manifest.directTools.map((tool) => ({
|
||||||
name: tool.name,
|
name: tool.name,
|
||||||
description: tool.description,
|
description: tool.description,
|
||||||
inputSchema: tool.inputSchema,
|
inputSchema: tool.inputSchema,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
const subagentTools = SUBAGENT_TOOL_DEFS.map((tool) => ({
|
const subagentTools = manifest.subagentTools.map((tool) => ({
|
||||||
name: tool.name,
|
name: tool.name,
|
||||||
description: tool.description,
|
description: tool.description,
|
||||||
inputSchema: tool.inputSchema,
|
inputSchema: tool.inputSchema,
|
||||||
@@ -455,12 +534,15 @@ function buildMcpServer(abortSignal?: AbortSignal): Server {
|
|||||||
throw new McpError(ErrorCode.InvalidParams, 'Tool name required')
|
throw new McpError(ErrorCode.InvalidParams, 'Tool name required')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const manifest = await getMcpToolManifest()
|
||||||
|
|
||||||
const result = await handleToolsCall(
|
const result = await handleToolsCall(
|
||||||
{
|
{
|
||||||
name: params.name,
|
name: params.name,
|
||||||
arguments: params.arguments,
|
arguments: params.arguments,
|
||||||
},
|
},
|
||||||
authResult.userId,
|
authResult.userId,
|
||||||
|
manifest,
|
||||||
abortSignal
|
abortSignal
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -556,16 +638,17 @@ function trackMcpCopilotCall(userId: string): void {
|
|||||||
async function handleToolsCall(
|
async function handleToolsCall(
|
||||||
params: { name: string; arguments?: Record<string, unknown> },
|
params: { name: string; arguments?: Record<string, unknown> },
|
||||||
userId: string,
|
userId: string,
|
||||||
|
manifest: McpToolManifest,
|
||||||
abortSignal?: AbortSignal
|
abortSignal?: AbortSignal
|
||||||
): Promise<CallToolResult> {
|
): Promise<CallToolResult> {
|
||||||
const args = params.arguments || {}
|
const args = params.arguments || {}
|
||||||
|
|
||||||
const directTool = DIRECT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
const directTool = manifest.directTools.find((tool) => tool.name === params.name)
|
||||||
if (directTool) {
|
if (directTool) {
|
||||||
return handleDirectToolCall(directTool, args, userId)
|
return handleDirectToolCall(directTool, args, userId)
|
||||||
}
|
}
|
||||||
|
|
||||||
const subagentTool = SUBAGENT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
const subagentTool = manifest.subagentTools.find((tool) => tool.name === params.name)
|
||||||
if (subagentTool) {
|
if (subagentTool) {
|
||||||
return handleSubagentToolCall(subagentTool, args, userId, abortSignal)
|
return handleSubagentToolCall(subagentTool, args, userId, abortSignal)
|
||||||
}
|
}
|
||||||
@@ -574,7 +657,7 @@ async function handleToolsCall(
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function handleDirectToolCall(
|
async function handleDirectToolCall(
|
||||||
toolDef: (typeof DIRECT_TOOL_DEFS)[number],
|
toolDef: McpDirectToolDef,
|
||||||
args: Record<string, unknown>,
|
args: Record<string, unknown>,
|
||||||
userId: string
|
userId: string
|
||||||
): Promise<CallToolResult> {
|
): Promise<CallToolResult> {
|
||||||
@@ -711,7 +794,7 @@ async function handleBuildToolCall(
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function handleSubagentToolCall(
|
async function handleSubagentToolCall(
|
||||||
toolDef: (typeof SUBAGENT_TOOL_DEFS)[number],
|
toolDef: McpSubagentToolDef,
|
||||||
args: Record<string, unknown>,
|
args: Record<string, unknown>,
|
||||||
userId: string,
|
userId: string,
|
||||||
abortSignal?: AbortSignal
|
abortSignal?: AbortSignal
|
||||||
|
|||||||
@@ -38,45 +38,6 @@ const createBlogPostSchema = z.object({
|
|||||||
status: z.enum(['current', 'draft']).optional(),
|
status: z.enum(['current', 'draft']).optional(),
|
||||||
})
|
})
|
||||||
|
|
||||||
const updateBlogPostSchema = z
|
|
||||||
.object({
|
|
||||||
domain: z.string().min(1, 'Domain is required'),
|
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
|
||||||
cloudId: z.string().optional(),
|
|
||||||
blogPostId: z.string().min(1, 'Blog post ID is required'),
|
|
||||||
title: z.string().optional(),
|
|
||||||
content: z.string().optional(),
|
|
||||||
status: z.enum(['current', 'draft']).optional(),
|
|
||||||
})
|
|
||||||
.refine(
|
|
||||||
(data) => {
|
|
||||||
const validation = validateAlphanumericId(data.blogPostId, 'blogPostId', 255)
|
|
||||||
return validation.isValid
|
|
||||||
},
|
|
||||||
(data) => {
|
|
||||||
const validation = validateAlphanumericId(data.blogPostId, 'blogPostId', 255)
|
|
||||||
return { message: validation.error || 'Invalid blog post ID', path: ['blogPostId'] }
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const deleteBlogPostSchema = z
|
|
||||||
.object({
|
|
||||||
domain: z.string().min(1, 'Domain is required'),
|
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
|
||||||
cloudId: z.string().optional(),
|
|
||||||
blogPostId: z.string().min(1, 'Blog post ID is required'),
|
|
||||||
})
|
|
||||||
.refine(
|
|
||||||
(data) => {
|
|
||||||
const validation = validateAlphanumericId(data.blogPostId, 'blogPostId', 255)
|
|
||||||
return validation.isValid
|
|
||||||
},
|
|
||||||
(data) => {
|
|
||||||
const validation = validateAlphanumericId(data.blogPostId, 'blogPostId', 255)
|
|
||||||
return { message: validation.error || 'Invalid blog post ID', path: ['blogPostId'] }
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List all blog posts or get a specific blog post
|
* List all blog posts or get a specific blog post
|
||||||
*/
|
*/
|
||||||
@@ -322,174 +283,3 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Update a blog post
|
|
||||||
*/
|
|
||||||
export async function PUT(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
|
|
||||||
const validation = updateBlogPostSchema.safeParse(body)
|
|
||||||
if (!validation.success) {
|
|
||||||
const firstError = validation.error.errors[0]
|
|
||||||
return NextResponse.json({ error: firstError.message }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
cloudId: providedCloudId,
|
|
||||||
blogPostId,
|
|
||||||
title,
|
|
||||||
content,
|
|
||||||
status,
|
|
||||||
} = validation.data
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const blogPostUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/blogposts/${blogPostId}`
|
|
||||||
|
|
||||||
const currentResponse = await fetch(blogPostUrl, {
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!currentResponse.ok) {
|
|
||||||
const errorData = await currentResponse.json().catch(() => null)
|
|
||||||
const errorMessage =
|
|
||||||
errorData?.message || `Failed to fetch blog post for update (${currentResponse.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: currentResponse.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const currentPost = await currentResponse.json()
|
|
||||||
const currentVersion = currentPost.version.number
|
|
||||||
|
|
||||||
const updateBody: Record<string, unknown> = {
|
|
||||||
id: blogPostId,
|
|
||||||
version: {
|
|
||||||
number: currentVersion + 1,
|
|
||||||
message: 'Updated via Sim',
|
|
||||||
},
|
|
||||||
status: status || currentPost.status || 'current',
|
|
||||||
title: title || currentPost.title,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (content) {
|
|
||||||
updateBody.body = {
|
|
||||||
representation: 'storage',
|
|
||||||
value: content,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(blogPostUrl, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
body: JSON.stringify(updateBody),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to update blog post (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
return NextResponse.json({
|
|
||||||
id: data.id,
|
|
||||||
title: data.title,
|
|
||||||
status: data.status ?? null,
|
|
||||||
spaceId: data.spaceId ?? null,
|
|
||||||
authorId: data.authorId ?? null,
|
|
||||||
createdAt: data.createdAt ?? null,
|
|
||||||
version: data.version ?? null,
|
|
||||||
body: data.body ?? null,
|
|
||||||
webUrl: data._links?.webui ?? null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error updating blog post:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete a blog post
|
|
||||||
*/
|
|
||||||
export async function DELETE(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
|
|
||||||
const validation = deleteBlogPostSchema.safeParse(body)
|
|
||||||
if (!validation.success) {
|
|
||||||
const firstError = validation.error.errors[0]
|
|
||||||
return NextResponse.json({ error: firstError.message }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { domain, accessToken, cloudId: providedCloudId, blogPostId } = validation.data
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/blogposts/${blogPostId}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to delete blog post (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ blogPostId, deleted: true })
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error deleting blog post:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,152 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
|
||||||
import {
|
|
||||||
downloadJsmAttachments,
|
|
||||||
getJiraCloudId,
|
|
||||||
getJsmApiBaseUrl,
|
|
||||||
getJsmHeaders,
|
|
||||||
} from '@/tools/jsm/utils'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const logger = createLogger('JsmAttachmentsAPI')
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
const auth = await checkInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const body = await request.json()
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
cloudId: cloudIdParam,
|
|
||||||
issueIdOrKey,
|
|
||||||
includeAttachments,
|
|
||||||
start,
|
|
||||||
limit,
|
|
||||||
} = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
logger.error('Missing domain in request')
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
logger.error('Missing access token in request')
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!issueIdOrKey) {
|
|
||||||
logger.error('Missing issueIdOrKey in request')
|
|
||||||
return NextResponse.json({ error: 'Issue ID or key is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const issueIdOrKeyValidation = validateJiraIssueKey(issueIdOrKey, 'issueIdOrKey')
|
|
||||||
if (!issueIdOrKeyValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: issueIdOrKeyValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
|
||||||
const params = new URLSearchParams()
|
|
||||||
if (start) params.append('start', start)
|
|
||||||
if (limit) params.append('limit', limit)
|
|
||||||
|
|
||||||
const url = `${baseUrl}/request/${issueIdOrKey}/attachment${params.toString() ? `?${params.toString()}` : ''}`
|
|
||||||
|
|
||||||
logger.info('Fetching request attachments from:', url)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const rawAttachments = data.values || []
|
|
||||||
|
|
||||||
const attachments = rawAttachments.map((att: Record<string, unknown>) => ({
|
|
||||||
filename: att.filename ?? '',
|
|
||||||
author: att.author
|
|
||||||
? {
|
|
||||||
accountId: (att.author as Record<string, unknown>).accountId ?? '',
|
|
||||||
displayName: (att.author as Record<string, unknown>).displayName ?? '',
|
|
||||||
active: (att.author as Record<string, unknown>).active ?? true,
|
|
||||||
}
|
|
||||||
: null,
|
|
||||||
created: att.created ?? null,
|
|
||||||
size: att.size ?? 0,
|
|
||||||
mimeType: att.mimeType ?? '',
|
|
||||||
}))
|
|
||||||
|
|
||||||
let files: Array<{ name: string; mimeType: string; data: string; size: number }> | undefined
|
|
||||||
|
|
||||||
if (includeAttachments && rawAttachments.length > 0) {
|
|
||||||
const downloadable = rawAttachments
|
|
||||||
.filter((att: Record<string, unknown>) => {
|
|
||||||
const links = att._links as Record<string, string> | undefined
|
|
||||||
return links?.content
|
|
||||||
})
|
|
||||||
.map((att: Record<string, unknown>) => ({
|
|
||||||
contentUrl: (att._links as Record<string, string>).content as string,
|
|
||||||
filename: (att.filename as string) ?? '',
|
|
||||||
mimeType: (att.mimeType as string) ?? '',
|
|
||||||
size: (att.size as number) ?? 0,
|
|
||||||
}))
|
|
||||||
|
|
||||||
if (downloadable.length > 0) {
|
|
||||||
files = await downloadJsmAttachments(downloadable, accessToken)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
issueIdOrKey,
|
|
||||||
attachments,
|
|
||||||
total: data.size || 0,
|
|
||||||
isLastPage: data.isLastPage ?? true,
|
|
||||||
...(files && files.length > 0 ? { files } : {}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error fetching attachments:', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: error instanceof Error ? error.message : 'Internal server error',
|
|
||||||
success: false,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,101 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const logger = createLogger('JsmCustomerAPI')
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
const auth = await checkInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const body = await request.json()
|
|
||||||
const { domain, accessToken, cloudId: cloudIdParam, email, displayName } = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
logger.error('Missing domain in request')
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
logger.error('Missing access token in request')
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!email) {
|
|
||||||
logger.error('Missing email in request')
|
|
||||||
return NextResponse.json({ error: 'Email is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!displayName) {
|
|
||||||
logger.error('Missing displayName in request')
|
|
||||||
return NextResponse.json({ error: 'Display name is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
|
||||||
const url = `${baseUrl}/customer`
|
|
||||||
|
|
||||||
logger.info('Creating customer:', { email, displayName })
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
body: JSON.stringify({ email, displayName }),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
accountId: data.accountId ?? '',
|
|
||||||
displayName: data.displayName ?? '',
|
|
||||||
emailAddress: data.emailAddress ?? '',
|
|
||||||
active: data.active ?? true,
|
|
||||||
timeZone: data.timeZone ?? null,
|
|
||||||
success: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error creating customer:', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: error instanceof Error ? error.message : 'Internal server error',
|
|
||||||
success: false,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -57,8 +57,6 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||||
|
|
||||||
const { action: customerAction } = body
|
|
||||||
|
|
||||||
const rawIds = accountIds || emails
|
const rawIds = accountIds || emails
|
||||||
const parsedAccountIds = rawIds
|
const parsedAccountIds = rawIds
|
||||||
? typeof rawIds === 'string'
|
? typeof rawIds === 'string'
|
||||||
@@ -71,50 +69,7 @@ export async function POST(request: NextRequest) {
|
|||||||
: []
|
: []
|
||||||
: []
|
: []
|
||||||
|
|
||||||
const isRemoveOperation = customerAction === 'remove'
|
const isAddOperation = parsedAccountIds.length > 0
|
||||||
const isAddOperation = !isRemoveOperation && parsedAccountIds.length > 0
|
|
||||||
|
|
||||||
if (isRemoveOperation) {
|
|
||||||
if (parsedAccountIds.length === 0) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: 'Account IDs or emails are required for removal' },
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `${baseUrl}/servicedesk/${serviceDeskId}/customer`
|
|
||||||
|
|
||||||
logger.info('Removing customers from:', url, { accountIds: parsedAccountIds })
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
body: JSON.stringify({ accountIds: parsedAccountIds }),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (response.status === 204 || response.ok) {
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
serviceDeskId,
|
|
||||||
success: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isAddOperation) {
|
if (isAddOperation) {
|
||||||
const url = `${baseUrl}/servicedesk/${serviceDeskId}/customer`
|
const url = `${baseUrl}/servicedesk/${serviceDeskId}/customer`
|
||||||
|
|||||||
@@ -1,219 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import {
|
|
||||||
validateEnum,
|
|
||||||
validateJiraCloudId,
|
|
||||||
validateJiraIssueKey,
|
|
||||||
} from '@/lib/core/security/input-validation'
|
|
||||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const logger = createLogger('JsmFeedbackAPI')
|
|
||||||
|
|
||||||
const VALID_ACTIONS = ['get', 'add', 'delete'] as const
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
const auth = await checkInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const body = await request.json()
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
cloudId: cloudIdParam,
|
|
||||||
action,
|
|
||||||
issueIdOrKey,
|
|
||||||
rating,
|
|
||||||
comment,
|
|
||||||
} = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
logger.error('Missing domain in request')
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
logger.error('Missing access token in request')
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!issueIdOrKey) {
|
|
||||||
logger.error('Missing issueIdOrKey in request')
|
|
||||||
return NextResponse.json({ error: 'Issue ID or key is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!action) {
|
|
||||||
logger.error('Missing action in request')
|
|
||||||
return NextResponse.json({ error: 'Action is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const actionValidation = validateEnum(action, VALID_ACTIONS, 'action')
|
|
||||||
if (!actionValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: actionValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const issueIdOrKeyValidation = validateJiraIssueKey(issueIdOrKey, 'issueIdOrKey')
|
|
||||||
if (!issueIdOrKeyValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: issueIdOrKeyValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
|
||||||
const url = `${baseUrl}/request/${issueIdOrKey}/feedback`
|
|
||||||
|
|
||||||
if (action === 'get') {
|
|
||||||
logger.info('Fetching feedback from:', url)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: `JSM API error: ${response.status} ${response.statusText}`,
|
|
||||||
details: errorText,
|
|
||||||
},
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
issueIdOrKey,
|
|
||||||
rating: data.rating ?? null,
|
|
||||||
comment: data.comment?.body ?? null,
|
|
||||||
type: data.type ?? null,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (action === 'add') {
|
|
||||||
if (rating === undefined || rating === null) {
|
|
||||||
logger.error('Missing rating in request')
|
|
||||||
return NextResponse.json({ error: 'Rating is required (1-5)' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('Adding feedback to:', url, { rating })
|
|
||||||
|
|
||||||
const feedbackBody: Record<string, unknown> = {
|
|
||||||
rating: Number(rating),
|
|
||||||
type: 'csat',
|
|
||||||
}
|
|
||||||
|
|
||||||
if (comment) {
|
|
||||||
feedbackBody.comment = { body: comment }
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
body: JSON.stringify(feedbackBody),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: `JSM API error: ${response.status} ${response.statusText}`,
|
|
||||||
details: errorText,
|
|
||||||
},
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
issueIdOrKey,
|
|
||||||
rating: data.rating ?? Number(rating),
|
|
||||||
comment: data.comment?.body ?? comment ?? null,
|
|
||||||
type: data.type ?? 'csat',
|
|
||||||
success: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (action === 'delete') {
|
|
||||||
logger.info('Deleting feedback from:', url)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (response.status === 204 || response.ok) {
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
issueIdOrKey,
|
|
||||||
success: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: `JSM API error: ${response.status} ${response.statusText}`,
|
|
||||||
details: errorText,
|
|
||||||
},
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ error: 'Invalid action' }, { status: 400 })
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error in feedback operation:', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: error instanceof Error ? error.message : 'Internal server error',
|
|
||||||
success: false,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,127 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const logger = createLogger('JsmKnowledgeBaseAPI')
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
const auth = await checkInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const body = await request.json()
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
cloudId: cloudIdParam,
|
|
||||||
serviceDeskId,
|
|
||||||
query,
|
|
||||||
highlight,
|
|
||||||
start,
|
|
||||||
limit,
|
|
||||||
} = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
logger.error('Missing domain in request')
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
logger.error('Missing access token in request')
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!query) {
|
|
||||||
logger.error('Missing query in request')
|
|
||||||
return NextResponse.json({ error: 'Search query is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (serviceDeskId) {
|
|
||||||
const serviceDeskIdValidation = validateAlphanumericId(serviceDeskId, 'serviceDeskId')
|
|
||||||
if (!serviceDeskIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: serviceDeskIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
|
||||||
const params = new URLSearchParams()
|
|
||||||
params.append('query', query)
|
|
||||||
if (highlight !== undefined) params.append('highlight', String(highlight))
|
|
||||||
if (start) params.append('start', start)
|
|
||||||
if (limit) params.append('limit', limit)
|
|
||||||
|
|
||||||
const basePath = serviceDeskId
|
|
||||||
? `${baseUrl}/servicedesk/${serviceDeskId}/knowledgebase/article`
|
|
||||||
: `${baseUrl}/knowledgebase/article`
|
|
||||||
|
|
||||||
const url = `${basePath}?${params.toString()}`
|
|
||||||
|
|
||||||
logger.info('Searching knowledge base:', url)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const articles = (data.values || []).map((article: Record<string, unknown>) => ({
|
|
||||||
title: (article.title as string) ?? '',
|
|
||||||
excerpt: (article.excerpt as string) ?? '',
|
|
||||||
sourceType: (article.source as Record<string, unknown>)?.type ?? '',
|
|
||||||
sourcePageId: (article.source as Record<string, unknown>)?.pageId ?? null,
|
|
||||||
sourceSpaceKey: (article.source as Record<string, unknown>)?.spaceKey ?? null,
|
|
||||||
contentUrl: (article.content as Record<string, unknown>)?.iframeSrc ?? null,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
articles,
|
|
||||||
total: data.size || 0,
|
|
||||||
isLastPage: data.isLastPage ?? true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error searching knowledge base:', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: error instanceof Error ? error.message : 'Internal server error',
|
|
||||||
success: false,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,189 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import {
|
|
||||||
validateEnum,
|
|
||||||
validateJiraCloudId,
|
|
||||||
validateJiraIssueKey,
|
|
||||||
} from '@/lib/core/security/input-validation'
|
|
||||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const logger = createLogger('JsmNotificationAPI')
|
|
||||||
|
|
||||||
const VALID_ACTIONS = ['get', 'subscribe', 'unsubscribe'] as const
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
const auth = await checkInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const body = await request.json()
|
|
||||||
const { domain, accessToken, cloudId: cloudIdParam, action, issueIdOrKey } = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
logger.error('Missing domain in request')
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
logger.error('Missing access token in request')
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!issueIdOrKey) {
|
|
||||||
logger.error('Missing issueIdOrKey in request')
|
|
||||||
return NextResponse.json({ error: 'Issue ID or key is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!action) {
|
|
||||||
logger.error('Missing action in request')
|
|
||||||
return NextResponse.json({ error: 'Action is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const actionValidation = validateEnum(action, VALID_ACTIONS, 'action')
|
|
||||||
if (!actionValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: actionValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const issueIdOrKeyValidation = validateJiraIssueKey(issueIdOrKey, 'issueIdOrKey')
|
|
||||||
if (!issueIdOrKeyValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: issueIdOrKeyValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
|
||||||
const url = `${baseUrl}/request/${issueIdOrKey}/notification`
|
|
||||||
|
|
||||||
if (action === 'get') {
|
|
||||||
logger.info('Fetching notification status from:', url)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: `JSM API error: ${response.status} ${response.statusText}`,
|
|
||||||
details: errorText,
|
|
||||||
},
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
issueIdOrKey,
|
|
||||||
subscribed: data.subscribed ?? false,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (action === 'subscribe') {
|
|
||||||
logger.info('Subscribing to notifications:', url)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (response.status === 204 || response.ok) {
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
issueIdOrKey,
|
|
||||||
success: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: `JSM API error: ${response.status} ${response.statusText}`,
|
|
||||||
details: errorText,
|
|
||||||
},
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (action === 'unsubscribe') {
|
|
||||||
logger.info('Unsubscribing from notifications:', url)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (response.status === 204 || response.ok) {
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
issueIdOrKey,
|
|
||||||
success: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: `JSM API error: ${response.status} ${response.statusText}`,
|
|
||||||
details: errorText,
|
|
||||||
},
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ error: 'Invalid action' }, { status: 400 })
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error in notification operation:', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: error instanceof Error ? error.message : 'Internal server error',
|
|
||||||
success: false,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -12,13 +12,7 @@ export const dynamic = 'force-dynamic'
|
|||||||
|
|
||||||
const logger = createLogger('JsmOrganizationAPI')
|
const logger = createLogger('JsmOrganizationAPI')
|
||||||
|
|
||||||
const VALID_ACTIONS = [
|
const VALID_ACTIONS = ['create', 'add_to_service_desk'] as const
|
||||||
'create',
|
|
||||||
'add_to_service_desk',
|
|
||||||
'remove_from_service_desk',
|
|
||||||
'delete',
|
|
||||||
'get',
|
|
||||||
] as const
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
const auth = await checkInternalAuth(request)
|
const auth = await checkInternalAuth(request)
|
||||||
@@ -165,152 +159,6 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (action === 'remove_from_service_desk') {
|
|
||||||
if (!serviceDeskId) {
|
|
||||||
logger.error('Missing serviceDeskId in request')
|
|
||||||
return NextResponse.json({ error: 'Service Desk ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!organizationId) {
|
|
||||||
logger.error('Missing organizationId in request')
|
|
||||||
return NextResponse.json({ error: 'Organization ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const serviceDeskIdValidation = validateAlphanumericId(serviceDeskId, 'serviceDeskId')
|
|
||||||
if (!serviceDeskIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: serviceDeskIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const organizationIdValidation = validateAlphanumericId(organizationId, 'organizationId')
|
|
||||||
if (!organizationIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: organizationIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `${baseUrl}/servicedesk/${serviceDeskId}/organization`
|
|
||||||
|
|
||||||
logger.info('Removing organization from service desk:', { serviceDeskId, organizationId })
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
body: JSON.stringify({ organizationId: Number.parseInt(organizationId, 10) }),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (response.status === 204 || response.ok) {
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
serviceDeskId,
|
|
||||||
organizationId,
|
|
||||||
success: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (action === 'delete') {
|
|
||||||
if (!organizationId) {
|
|
||||||
logger.error('Missing organizationId in request')
|
|
||||||
return NextResponse.json({ error: 'Organization ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const organizationIdValidation = validateAlphanumericId(organizationId, 'organizationId')
|
|
||||||
if (!organizationIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: organizationIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `${baseUrl}/organization/${organizationId}`
|
|
||||||
|
|
||||||
logger.info('Deleting organization:', { organizationId })
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (response.status === 204 || response.ok) {
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
organizationId,
|
|
||||||
success: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (action === 'get') {
|
|
||||||
if (!organizationId) {
|
|
||||||
logger.error('Missing organizationId in request')
|
|
||||||
return NextResponse.json({ error: 'Organization ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const organizationIdValidation = validateAlphanumericId(organizationId, 'organizationId')
|
|
||||||
if (!organizationIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: organizationIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `${baseUrl}/organization/${organizationId}`
|
|
||||||
|
|
||||||
logger.info('Fetching organization:', { organizationId })
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
id: data.id ?? '',
|
|
||||||
name: data.name ?? '',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ error: 'Invalid action' }, { status: 400 })
|
return NextResponse.json({ error: 'Invalid action' }, { status: 400 })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error in organization operation:', {
|
logger.error('Error in organization operation:', {
|
||||||
|
|||||||
@@ -1,190 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import {
|
|
||||||
validateAlphanumericId,
|
|
||||||
validateEnum,
|
|
||||||
validateJiraCloudId,
|
|
||||||
} from '@/lib/core/security/input-validation'
|
|
||||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const logger = createLogger('JsmOrganizationUsersAPI')
|
|
||||||
|
|
||||||
const VALID_ACTIONS = ['get', 'add', 'remove'] as const
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
const auth = await checkInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const body = await request.json()
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
cloudId: cloudIdParam,
|
|
||||||
action,
|
|
||||||
organizationId,
|
|
||||||
accountIds,
|
|
||||||
start,
|
|
||||||
limit,
|
|
||||||
} = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
logger.error('Missing domain in request')
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
logger.error('Missing access token in request')
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!organizationId) {
|
|
||||||
logger.error('Missing organizationId in request')
|
|
||||||
return NextResponse.json({ error: 'Organization ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!action) {
|
|
||||||
logger.error('Missing action in request')
|
|
||||||
return NextResponse.json({ error: 'Action is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const actionValidation = validateEnum(action, VALID_ACTIONS, 'action')
|
|
||||||
if (!actionValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: actionValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const organizationIdValidation = validateAlphanumericId(organizationId, 'organizationId')
|
|
||||||
if (!organizationIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: organizationIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
|
||||||
const url = `${baseUrl}/organization/${organizationId}/user`
|
|
||||||
|
|
||||||
if (action === 'get') {
|
|
||||||
const params = new URLSearchParams()
|
|
||||||
if (start) params.append('start', start)
|
|
||||||
if (limit) params.append('limit', limit)
|
|
||||||
|
|
||||||
const getUrl = `${url}${params.toString() ? `?${params.toString()}` : ''}`
|
|
||||||
|
|
||||||
logger.info('Fetching organization users from:', getUrl)
|
|
||||||
|
|
||||||
const response = await fetch(getUrl, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: `JSM API error: ${response.status} ${response.statusText}`,
|
|
||||||
details: errorText,
|
|
||||||
},
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
organizationId,
|
|
||||||
users: data.values || [],
|
|
||||||
total: data.size || 0,
|
|
||||||
isLastPage: data.isLastPage ?? true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (action === 'add' || action === 'remove') {
|
|
||||||
if (!accountIds) {
|
|
||||||
logger.error('Missing accountIds in request')
|
|
||||||
return NextResponse.json({ error: 'Account IDs are required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const parsedAccountIds =
|
|
||||||
typeof accountIds === 'string'
|
|
||||||
? accountIds
|
|
||||||
.split(',')
|
|
||||||
.map((id: string) => id.trim())
|
|
||||||
.filter((id: string) => id)
|
|
||||||
: accountIds
|
|
||||||
|
|
||||||
logger.info(`${action === 'add' ? 'Adding' : 'Removing'} organization users:`, {
|
|
||||||
organizationId,
|
|
||||||
accountIds: parsedAccountIds,
|
|
||||||
})
|
|
||||||
|
|
||||||
const method = action === 'add' ? 'POST' : 'DELETE'
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method,
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
body: JSON.stringify({ accountIds: parsedAccountIds }),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (response.status === 204 || response.ok) {
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
organizationId,
|
|
||||||
success: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: `JSM API error: ${response.status} ${response.statusText}`,
|
|
||||||
details: errorText,
|
|
||||||
},
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({ error: 'Invalid action' }, { status: 400 })
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error in organization users operation:', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: error instanceof Error ? error.message : 'Internal server error',
|
|
||||||
success: false,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -12,7 +12,7 @@ export const dynamic = 'force-dynamic'
|
|||||||
|
|
||||||
const logger = createLogger('JsmParticipantsAPI')
|
const logger = createLogger('JsmParticipantsAPI')
|
||||||
|
|
||||||
const VALID_ACTIONS = ['get', 'add', 'remove'] as const
|
const VALID_ACTIONS = ['get', 'add'] as const
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
const auth = await checkInternalAuth(request)
|
const auth = await checkInternalAuth(request)
|
||||||
@@ -113,7 +113,7 @@ export async function POST(request: NextRequest) {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (action === 'add' || action === 'remove') {
|
if (action === 'add') {
|
||||||
if (!accountIds) {
|
if (!accountIds) {
|
||||||
logger.error('Missing accountIds in request')
|
logger.error('Missing accountIds in request')
|
||||||
return NextResponse.json({ error: 'Account IDs are required' }, { status: 400 })
|
return NextResponse.json({ error: 'Account IDs are required' }, { status: 400 })
|
||||||
@@ -128,19 +128,16 @@ export async function POST(request: NextRequest) {
|
|||||||
: accountIds
|
: accountIds
|
||||||
|
|
||||||
const url = `${baseUrl}/request/${issueIdOrKey}/participant`
|
const url = `${baseUrl}/request/${issueIdOrKey}/participant`
|
||||||
const method = action === 'add' ? 'POST' : 'DELETE'
|
|
||||||
|
|
||||||
logger.info(`${action === 'add' ? 'Adding' : 'Removing'} participants:`, url, {
|
logger.info('Adding participants to:', url, { accountIds: parsedAccountIds })
|
||||||
accountIds: parsedAccountIds,
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method,
|
method: 'POST',
|
||||||
headers: getJsmHeaders(accessToken),
|
headers: getJsmHeaders(accessToken),
|
||||||
body: JSON.stringify({ accountIds: parsedAccountIds }),
|
body: JSON.stringify({ accountIds: parsedAccountIds }),
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!response.ok && response.status !== 204) {
|
if (!response.ok) {
|
||||||
const errorText = await response.text()
|
const errorText = await response.text()
|
||||||
logger.error('JSM API error:', {
|
logger.error('JSM API error:', {
|
||||||
status: response.status,
|
status: response.status,
|
||||||
@@ -154,22 +151,14 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
let participants: unknown[] = []
|
const data = await response.json()
|
||||||
if (response.status !== 204) {
|
|
||||||
try {
|
|
||||||
const data = await response.json()
|
|
||||||
participants = data.values || []
|
|
||||||
} catch {
|
|
||||||
// DELETE may return empty body
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
output: {
|
output: {
|
||||||
ts: new Date().toISOString(),
|
ts: new Date().toISOString(),
|
||||||
issueIdOrKey,
|
issueIdOrKey,
|
||||||
participants,
|
participants: data.values || [],
|
||||||
success: true,
|
success: true,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,121 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const logger = createLogger('JsmQueueIssuesAPI')
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
const auth = await checkInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const body = await request.json()
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
cloudId: cloudIdParam,
|
|
||||||
serviceDeskId,
|
|
||||||
queueId,
|
|
||||||
start,
|
|
||||||
limit,
|
|
||||||
} = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
logger.error('Missing domain in request')
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
logger.error('Missing access token in request')
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!serviceDeskId) {
|
|
||||||
logger.error('Missing serviceDeskId in request')
|
|
||||||
return NextResponse.json({ error: 'Service Desk ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!queueId) {
|
|
||||||
logger.error('Missing queueId in request')
|
|
||||||
return NextResponse.json({ error: 'Queue ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const serviceDeskIdValidation = validateAlphanumericId(serviceDeskId, 'serviceDeskId')
|
|
||||||
if (!serviceDeskIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: serviceDeskIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const queueIdValidation = validateAlphanumericId(queueId, 'queueId')
|
|
||||||
if (!queueIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: queueIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
|
||||||
const params = new URLSearchParams()
|
|
||||||
if (start) params.append('start', start)
|
|
||||||
if (limit) params.append('limit', limit)
|
|
||||||
|
|
||||||
const url = `${baseUrl}/servicedesk/${serviceDeskId}/queue/${queueId}/issue${params.toString() ? `?${params.toString()}` : ''}`
|
|
||||||
|
|
||||||
logger.info('Fetching queue issues from:', url)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
serviceDeskId,
|
|
||||||
queueId,
|
|
||||||
issues: data.values || [],
|
|
||||||
total: data.size || 0,
|
|
||||||
isLastPage: data.isLastPage ?? true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error fetching queue issues:', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: error instanceof Error ? error.message : 'Internal server error',
|
|
||||||
success: false,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,102 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
|
||||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const logger = createLogger('JsmRequestStatusAPI')
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
const auth = await checkInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const body = await request.json()
|
|
||||||
const { domain, accessToken, cloudId: cloudIdParam, issueIdOrKey, start, limit } = body
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
logger.error('Missing domain in request')
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
logger.error('Missing access token in request')
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!issueIdOrKey) {
|
|
||||||
logger.error('Missing issueIdOrKey in request')
|
|
||||||
return NextResponse.json({ error: 'Issue ID or key is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const issueIdOrKeyValidation = validateJiraIssueKey(issueIdOrKey, 'issueIdOrKey')
|
|
||||||
if (!issueIdOrKeyValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: issueIdOrKeyValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = cloudIdParam || (await getJiraCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
|
||||||
const params = new URLSearchParams()
|
|
||||||
if (start) params.append('start', start)
|
|
||||||
if (limit) params.append('limit', limit)
|
|
||||||
|
|
||||||
const url = `${baseUrl}/request/${issueIdOrKey}/status${params.toString() ? `?${params.toString()}` : ''}`
|
|
||||||
|
|
||||||
logger.info('Fetching request status history from:', url)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
issueIdOrKey,
|
|
||||||
statuses: data.values || [],
|
|
||||||
total: data.size || 0,
|
|
||||||
isLastPage: data.isLastPage ?? true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error fetching request status:', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: error instanceof Error ? error.message : 'Internal server error',
|
|
||||||
success: false,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
import { validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -16,16 +16,7 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const {
|
const { domain, accessToken, cloudId: cloudIdParam, expand, start, limit } = body
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
cloudId: cloudIdParam,
|
|
||||||
expand,
|
|
||||||
start,
|
|
||||||
limit,
|
|
||||||
serviceDeskId,
|
|
||||||
action,
|
|
||||||
} = body
|
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
logger.error('Missing domain in request')
|
logger.error('Missing domain in request')
|
||||||
@@ -46,52 +37,6 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const baseUrl = getJsmApiBaseUrl(cloudId)
|
const baseUrl = getJsmApiBaseUrl(cloudId)
|
||||||
|
|
||||||
if (action === 'get' && serviceDeskId) {
|
|
||||||
const serviceDeskIdValidation = validateAlphanumericId(serviceDeskId, 'serviceDeskId')
|
|
||||||
if (!serviceDeskIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: serviceDeskIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `${baseUrl}/servicedesk/${serviceDeskId}`
|
|
||||||
|
|
||||||
logger.info('Fetching service desk:', url)
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: getJsmHeaders(accessToken),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
logger.error('JSM API error:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `JSM API error: ${response.status} ${response.statusText}`, details: errorText },
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
id: data.id ?? '',
|
|
||||||
projectId: data.projectId ?? '',
|
|
||||||
projectName: data.projectName ?? '',
|
|
||||||
projectKey: data.projectKey ?? '',
|
|
||||||
name: data.projectName ?? '',
|
|
||||||
description: data.description ?? null,
|
|
||||||
leadDisplayName: data.leadDisplayName ?? null,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const params = new URLSearchParams()
|
const params = new URLSearchParams()
|
||||||
if (expand) params.append('expand', expand)
|
if (expand) params.append('expand', expand)
|
||||||
if (start) params.append('start', start)
|
if (start) params.append('start', start)
|
||||||
|
|||||||
@@ -14,6 +14,14 @@ const logger = createLogger('DiffControls')
|
|||||||
const NOTIFICATION_WIDTH = 240
|
const NOTIFICATION_WIDTH = 240
|
||||||
const NOTIFICATION_GAP = 16
|
const NOTIFICATION_GAP = 16
|
||||||
|
|
||||||
|
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
||||||
|
if (name !== 'workflow_change') return false
|
||||||
|
|
||||||
|
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'apply') return true
|
||||||
|
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
export const DiffControls = memo(function DiffControls() {
|
export const DiffControls = memo(function DiffControls() {
|
||||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||||
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
||||||
@@ -64,7 +72,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (tn === 'edit_workflow') {
|
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -72,7 +80,9 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
const candidates = Object.values(toolCallsById).filter((t) =>
|
||||||
|
isWorkflowEditToolCall(t.name, t.params)
|
||||||
|
)
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
if (id) updatePreviewToolCallState('accepted', id)
|
if (id) updatePreviewToolCallState('accepted', id)
|
||||||
@@ -102,7 +112,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (tn === 'edit_workflow') {
|
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -110,7 +120,9 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
const candidates = Object.values(toolCallsById).filter((t) =>
|
||||||
|
isWorkflowEditToolCall(t.name, t.params)
|
||||||
|
)
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
if (id) updatePreviewToolCallState('rejected', id)
|
if (id) updatePreviewToolCallState('rejected', id)
|
||||||
|
|||||||
@@ -47,6 +47,27 @@ interface ParsedTags {
|
|||||||
cleanContent: string
|
cleanContent: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getToolCallParams(toolCall?: CopilotToolCall): Record<string, unknown> {
|
||||||
|
const candidate = ((toolCall as any)?.parameters ||
|
||||||
|
(toolCall as any)?.input ||
|
||||||
|
(toolCall as any)?.params ||
|
||||||
|
{}) as Record<string, unknown>
|
||||||
|
return candidate && typeof candidate === 'object' ? candidate : {}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isWorkflowChangeApplyMode(toolCall?: CopilotToolCall): boolean {
|
||||||
|
if (!toolCall || toolCall.name !== 'workflow_change') return false
|
||||||
|
const params = getToolCallParams(toolCall)
|
||||||
|
const mode = typeof params.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'apply') return true
|
||||||
|
return typeof params.proposalId === 'string' && params.proposalId.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
function isWorkflowEditSummaryTool(toolCall?: CopilotToolCall): boolean {
|
||||||
|
if (!toolCall) return false
|
||||||
|
return isWorkflowChangeApplyMode(toolCall)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
||||||
* @param blocks - The subagent content blocks to search
|
* @param blocks - The subagent content blocks to search
|
||||||
@@ -871,7 +892,10 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (segment.type === 'tool' && segment.block.toolCall) {
|
if (segment.type === 'tool' && segment.block.toolCall) {
|
||||||
if (toolCall.name === 'edit' && segment.block.toolCall.name === 'edit_workflow') {
|
if (
|
||||||
|
(toolCall.name === 'edit' || toolCall.name === 'build') &&
|
||||||
|
isWorkflowEditSummaryTool(segment.block.toolCall)
|
||||||
|
) {
|
||||||
return (
|
return (
|
||||||
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
||||||
<WorkflowEditSummary toolCall={segment.block.toolCall} />
|
<WorkflowEditSummary toolCall={segment.block.toolCall} />
|
||||||
@@ -968,12 +992,11 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
}
|
}
|
||||||
}, [blocks])
|
}, [blocks])
|
||||||
|
|
||||||
if (toolCall.name !== 'edit_workflow') {
|
if (!isWorkflowEditSummaryTool(toolCall)) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const params =
|
const params = getToolCallParams(toolCall)
|
||||||
(toolCall as any).parameters || (toolCall as any).input || (toolCall as any).params || {}
|
|
||||||
let operations = Array.isArray(params.operations) ? params.operations : []
|
let operations = Array.isArray(params.operations) ? params.operations : []
|
||||||
|
|
||||||
if (operations.length === 0 && Array.isArray((toolCall as any).operations)) {
|
if (operations.length === 0 && Array.isArray((toolCall as any).operations)) {
|
||||||
@@ -1219,11 +1242,6 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
/** Checks if a tool is server-side executed (not a client tool) */
|
|
||||||
function isIntegrationTool(toolName: string): boolean {
|
|
||||||
return !TOOL_DISPLAY_REGISTRY[toolName]
|
|
||||||
}
|
|
||||||
|
|
||||||
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||||
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
||||||
return false
|
return false
|
||||||
@@ -1233,59 +1251,96 @@ function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Never show buttons for tools the user has marked as always-allowed
|
if (toolCall.ui?.showInterrupt !== true) {
|
||||||
if (useCopilotStore.getState().isToolAutoAllowed(toolCall.name)) {
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
const hasInterrupt = !!TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.interrupt
|
return true
|
||||||
if (hasInterrupt) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Integration tools (user-installed) always require approval
|
|
||||||
if (isIntegrationTool(toolCall.name)) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const toolCallLogger = createLogger('CopilotToolCall')
|
const toolCallLogger = createLogger('CopilotToolCall')
|
||||||
|
|
||||||
async function sendToolDecision(
|
async function sendToolDecision(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
status: 'accepted' | 'rejected' | 'background'
|
status: 'accepted' | 'rejected' | 'background',
|
||||||
|
options?: {
|
||||||
|
toolName?: string
|
||||||
|
remember?: boolean
|
||||||
|
}
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
await fetch('/api/copilot/confirm', {
|
await fetch('/api/copilot/confirm', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({ toolCallId, status }),
|
body: JSON.stringify({
|
||||||
|
toolCallId,
|
||||||
|
status,
|
||||||
|
...(options?.toolName ? { toolName: options.toolName } : {}),
|
||||||
|
...(options?.remember ? { remember: true } : {}),
|
||||||
|
}),
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toolCallLogger.warn('Failed to send tool decision', {
|
toolCallLogger.warn('Failed to send tool decision', {
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
|
remember: options?.remember === true,
|
||||||
|
toolName: options?.toolName,
|
||||||
error: error instanceof Error ? error.message : String(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function removeAutoAllowedToolPreference(toolName: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/copilot/auto-allowed-tools?toolId=${encodeURIComponent(toolName)}`, {
|
||||||
|
method: 'DELETE',
|
||||||
|
})
|
||||||
|
return response.ok
|
||||||
|
} catch (error) {
|
||||||
|
toolCallLogger.warn('Failed to remove auto-allowed tool preference', {
|
||||||
|
toolName,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ToolUiAction = NonNullable<NonNullable<CopilotToolCall['ui']>['actions']>[number]
|
||||||
|
|
||||||
|
function actionDecision(action: ToolUiAction): 'accepted' | 'rejected' | 'background' {
|
||||||
|
const id = action.id.toLowerCase()
|
||||||
|
if (id.includes('background')) return 'background'
|
||||||
|
if (action.kind === 'reject') return 'rejected'
|
||||||
|
return 'accepted'
|
||||||
|
}
|
||||||
|
|
||||||
|
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
||||||
|
if (toolCall.execution?.target === 'sim_client_capability') {
|
||||||
|
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
||||||
|
}
|
||||||
|
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
||||||
|
}
|
||||||
|
|
||||||
async function handleRun(
|
async function handleRun(
|
||||||
toolCall: CopilotToolCall,
|
toolCall: CopilotToolCall,
|
||||||
setToolCallState: any,
|
setToolCallState: any,
|
||||||
onStateChange?: any,
|
onStateChange?: any,
|
||||||
editedParams?: any
|
editedParams?: any,
|
||||||
|
options?: {
|
||||||
|
remember?: boolean
|
||||||
|
}
|
||||||
) {
|
) {
|
||||||
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
||||||
onStateChange?.('executing')
|
onStateChange?.('executing')
|
||||||
await sendToolDecision(toolCall.id, 'accepted')
|
await sendToolDecision(toolCall.id, 'accepted', {
|
||||||
|
toolName: toolCall.name,
|
||||||
|
remember: options?.remember === true,
|
||||||
|
})
|
||||||
|
|
||||||
// Client-executable run tools: execute on the client for real-time feedback
|
// Client-executable run tools: execute on the client for real-time feedback
|
||||||
// (block pulsing, console logs, stop button). The server defers execution
|
// (block pulsing, console logs, stop button). The server defers execution
|
||||||
// for these tools; the client reports back via mark-complete.
|
// for these tools; the client reports back via mark-complete.
|
||||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)) {
|
if (isClientRunCapability(toolCall)) {
|
||||||
const params = editedParams || toolCall.params || {}
|
const params = editedParams || toolCall.params || {}
|
||||||
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
||||||
}
|
}
|
||||||
@@ -1298,6 +1353,9 @@ async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onSt
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getDisplayName(toolCall: CopilotToolCall): string {
|
function getDisplayName(toolCall: CopilotToolCall): string {
|
||||||
|
if (toolCall.ui?.phaseLabel) return toolCall.ui.phaseLabel
|
||||||
|
if (toolCall.ui?.title) return `${getStateVerb(toolCall.state)} ${toolCall.ui.title}`
|
||||||
|
|
||||||
const fromStore = (toolCall as any).display?.text
|
const fromStore = (toolCall as any).display?.text
|
||||||
if (fromStore) return fromStore
|
if (fromStore) return fromStore
|
||||||
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
@@ -1342,53 +1400,37 @@ function RunSkipButtons({
|
|||||||
toolCall,
|
toolCall,
|
||||||
onStateChange,
|
onStateChange,
|
||||||
editedParams,
|
editedParams,
|
||||||
|
actions,
|
||||||
}: {
|
}: {
|
||||||
toolCall: CopilotToolCall
|
toolCall: CopilotToolCall
|
||||||
onStateChange?: (state: any) => void
|
onStateChange?: (state: any) => void
|
||||||
editedParams?: any
|
editedParams?: any
|
||||||
|
actions: ToolUiAction[]
|
||||||
}) {
|
}) {
|
||||||
const [isProcessing, setIsProcessing] = useState(false)
|
const [isProcessing, setIsProcessing] = useState(false)
|
||||||
const [buttonsHidden, setButtonsHidden] = useState(false)
|
const [buttonsHidden, setButtonsHidden] = useState(false)
|
||||||
const actionInProgressRef = useRef(false)
|
const actionInProgressRef = useRef(false)
|
||||||
const { setToolCallState, addAutoAllowedTool } = useCopilotStore()
|
const { setToolCallState } = useCopilotStore()
|
||||||
|
|
||||||
const onRun = async () => {
|
const onAction = async (action: ToolUiAction) => {
|
||||||
// Prevent race condition - check ref synchronously
|
// Prevent race condition - check ref synchronously
|
||||||
if (actionInProgressRef.current) return
|
if (actionInProgressRef.current) return
|
||||||
actionInProgressRef.current = true
|
actionInProgressRef.current = true
|
||||||
setIsProcessing(true)
|
setIsProcessing(true)
|
||||||
setButtonsHidden(true)
|
setButtonsHidden(true)
|
||||||
try {
|
try {
|
||||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
const decision = actionDecision(action)
|
||||||
} finally {
|
if (decision === 'accepted') {
|
||||||
setIsProcessing(false)
|
await handleRun(toolCall, setToolCallState, onStateChange, editedParams, {
|
||||||
actionInProgressRef.current = false
|
remember: action.remember === true,
|
||||||
}
|
})
|
||||||
}
|
} else if (decision === 'rejected') {
|
||||||
|
await handleSkip(toolCall, setToolCallState, onStateChange)
|
||||||
const onAlwaysAllow = async () => {
|
} else {
|
||||||
// Prevent race condition - check ref synchronously
|
setToolCallState(toolCall, ClientToolCallState.background)
|
||||||
if (actionInProgressRef.current) return
|
onStateChange?.('background')
|
||||||
actionInProgressRef.current = true
|
await sendToolDecision(toolCall.id, 'background')
|
||||||
setIsProcessing(true)
|
}
|
||||||
setButtonsHidden(true)
|
|
||||||
try {
|
|
||||||
await addAutoAllowedTool(toolCall.name)
|
|
||||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
|
||||||
} finally {
|
|
||||||
setIsProcessing(false)
|
|
||||||
actionInProgressRef.current = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const onSkip = async () => {
|
|
||||||
// Prevent race condition - check ref synchronously
|
|
||||||
if (actionInProgressRef.current) return
|
|
||||||
actionInProgressRef.current = true
|
|
||||||
setIsProcessing(true)
|
|
||||||
setButtonsHidden(true)
|
|
||||||
try {
|
|
||||||
await handleSkip(toolCall, setToolCallState, onStateChange)
|
|
||||||
} finally {
|
} finally {
|
||||||
setIsProcessing(false)
|
setIsProcessing(false)
|
||||||
actionInProgressRef.current = false
|
actionInProgressRef.current = false
|
||||||
@@ -1397,23 +1439,22 @@ function RunSkipButtons({
|
|||||||
|
|
||||||
if (buttonsHidden) return null
|
if (buttonsHidden) return null
|
||||||
|
|
||||||
// Show "Always Allow" for all tools that require confirmation
|
|
||||||
const showAlwaysAllow = true
|
|
||||||
|
|
||||||
// Standardized buttons for all interrupt tools: Allow, Always Allow, Skip
|
|
||||||
return (
|
return (
|
||||||
<div className='mt-[10px] flex gap-[6px]'>
|
<div className='mt-[10px] flex gap-[6px]'>
|
||||||
<Button onClick={onRun} disabled={isProcessing} variant='tertiary'>
|
{actions.map((action, index) => {
|
||||||
{isProcessing ? 'Allowing...' : 'Allow'}
|
const variant =
|
||||||
</Button>
|
action.kind === 'reject' ? 'default' : action.remember ? 'default' : 'tertiary'
|
||||||
{showAlwaysAllow && (
|
return (
|
||||||
<Button onClick={onAlwaysAllow} disabled={isProcessing} variant='default'>
|
<Button
|
||||||
{isProcessing ? 'Allowing...' : 'Always Allow'}
|
key={action.id}
|
||||||
</Button>
|
onClick={() => onAction(action)}
|
||||||
)}
|
disabled={isProcessing}
|
||||||
<Button onClick={onSkip} disabled={isProcessing} variant='default'>
|
variant={variant}
|
||||||
Skip
|
>
|
||||||
</Button>
|
{isProcessing && index === 0 ? 'Working...' : action.label}
|
||||||
|
</Button>
|
||||||
|
)
|
||||||
|
})}
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -1430,10 +1471,16 @@ export function ToolCall({
|
|||||||
const liveToolCall = useCopilotStore((s) =>
|
const liveToolCall = useCopilotStore((s) =>
|
||||||
effectiveId ? s.toolCallsById[effectiveId] : undefined
|
effectiveId ? s.toolCallsById[effectiveId] : undefined
|
||||||
)
|
)
|
||||||
const toolCall = liveToolCall || toolCallProp
|
const rawToolCall = liveToolCall || toolCallProp
|
||||||
|
const hasRealToolCall = !!rawToolCall
|
||||||
// Guard: nothing to render without a toolCall
|
const toolCall: CopilotToolCall =
|
||||||
if (!toolCall) return null
|
rawToolCall ||
|
||||||
|
({
|
||||||
|
id: effectiveId || '',
|
||||||
|
name: '',
|
||||||
|
state: ClientToolCallState.generating,
|
||||||
|
params: {},
|
||||||
|
} as CopilotToolCall)
|
||||||
|
|
||||||
const isExpandablePending =
|
const isExpandablePending =
|
||||||
toolCall?.state === 'pending' &&
|
toolCall?.state === 'pending' &&
|
||||||
@@ -1441,17 +1488,15 @@ export function ToolCall({
|
|||||||
|
|
||||||
const [expanded, setExpanded] = useState(isExpandablePending)
|
const [expanded, setExpanded] = useState(isExpandablePending)
|
||||||
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
||||||
|
const [autoAllowRemovedForCall, setAutoAllowRemovedForCall] = useState(false)
|
||||||
|
|
||||||
// State for editable parameters
|
// State for editable parameters
|
||||||
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
||||||
const [editedParams, setEditedParams] = useState(params)
|
const [editedParams, setEditedParams] = useState(params)
|
||||||
const paramsRef = useRef(params)
|
const paramsRef = useRef(params)
|
||||||
|
|
||||||
// Check if this integration tool is auto-allowed
|
const { setToolCallState } = useCopilotStore()
|
||||||
const { removeAutoAllowedTool, setToolCallState } = useCopilotStore()
|
const isAutoAllowed = toolCall.ui?.autoAllowed === true && !autoAllowRemovedForCall
|
||||||
const isAutoAllowed = useCopilotStore(
|
|
||||||
(s) => isIntegrationTool(toolCall.name) && s.isToolAutoAllowed(toolCall.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -1461,6 +1506,14 @@ export function ToolCall({
|
|||||||
}
|
}
|
||||||
}, [params])
|
}, [params])
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
setAutoAllowRemovedForCall(false)
|
||||||
|
setShowRemoveAutoAllow(false)
|
||||||
|
}, [toolCall.id])
|
||||||
|
|
||||||
|
// Guard: nothing to render without a toolCall
|
||||||
|
if (!hasRealToolCall) return null
|
||||||
|
|
||||||
// Skip rendering some internal tools
|
// Skip rendering some internal tools
|
||||||
if (
|
if (
|
||||||
toolCall.name === 'checkoff_todo' ||
|
toolCall.name === 'checkoff_todo' ||
|
||||||
@@ -1472,7 +1525,9 @@ export function ToolCall({
|
|||||||
return null
|
return null
|
||||||
|
|
||||||
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
||||||
const isSubagentTool = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
const isSubagentTool =
|
||||||
|
toolCall.execution?.target === 'go_subagent' ||
|
||||||
|
TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
||||||
|
|
||||||
// For ALL subagent tools, don't show anything until we have blocks with content
|
// For ALL subagent tools, don't show anything until we have blocks with content
|
||||||
if (isSubagentTool) {
|
if (isSubagentTool) {
|
||||||
@@ -1499,28 +1554,6 @@ export function ToolCall({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get current mode from store to determine if we should render integration tools
|
|
||||||
const mode = useCopilotStore.getState().mode
|
|
||||||
|
|
||||||
// Check if this is a completed/historical tool call (not pending/executing)
|
|
||||||
// Use string comparison to handle both enum values and string values from DB
|
|
||||||
const stateStr = String(toolCall.state)
|
|
||||||
const isCompletedToolCall =
|
|
||||||
stateStr === 'success' ||
|
|
||||||
stateStr === 'error' ||
|
|
||||||
stateStr === 'rejected' ||
|
|
||||||
stateStr === 'aborted'
|
|
||||||
|
|
||||||
// Allow rendering if:
|
|
||||||
// 1. Tool is in TOOL_DISPLAY_REGISTRY (client tools), OR
|
|
||||||
// 2. We're in build mode (integration tools are executed server-side), OR
|
|
||||||
// 3. Tool call is already completed (historical - should always render)
|
|
||||||
const isClientTool = !!TOOL_DISPLAY_REGISTRY[toolCall.name]
|
|
||||||
const isIntegrationToolInBuildMode = mode === 'build' && !isClientTool
|
|
||||||
|
|
||||||
if (!isClientTool && !isIntegrationToolInBuildMode && !isCompletedToolCall) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
||||||
// Check if tool has params table config (meaning it's expandable)
|
// Check if tool has params table config (meaning it's expandable)
|
||||||
const hasParamsTable = !!toolUIConfig?.paramsTable
|
const hasParamsTable = !!toolUIConfig?.paramsTable
|
||||||
@@ -1530,6 +1563,14 @@ export function ToolCall({
|
|||||||
toolCall.name === 'make_api_request' ||
|
toolCall.name === 'make_api_request' ||
|
||||||
toolCall.name === 'set_global_workflow_variables'
|
toolCall.name === 'set_global_workflow_variables'
|
||||||
|
|
||||||
|
const interruptActions =
|
||||||
|
(toolCall.ui?.actions && toolCall.ui.actions.length > 0
|
||||||
|
? toolCall.ui.actions
|
||||||
|
: [
|
||||||
|
{ id: 'allow_once', label: 'Allow', kind: 'accept' as const },
|
||||||
|
{ id: 'allow_always', label: 'Always Allow', kind: 'accept' as const, remember: true },
|
||||||
|
{ id: 'reject', label: 'Skip', kind: 'reject' as const },
|
||||||
|
]) as ToolUiAction[]
|
||||||
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
||||||
|
|
||||||
// Check UI config for secondary action - only show for current message tool calls
|
// Check UI config for secondary action - only show for current message tool calls
|
||||||
@@ -1987,9 +2028,12 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
await removeAutoAllowedTool(toolCall.name)
|
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
||||||
setShowRemoveAutoAllow(false)
|
if (removed) {
|
||||||
forceUpdate({})
|
setAutoAllowRemovedForCall(true)
|
||||||
|
setShowRemoveAutoAllow(false)
|
||||||
|
forceUpdate({})
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2003,6 +2047,7 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
|
actions={interruptActions}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
@@ -2048,9 +2093,12 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
await removeAutoAllowedTool(toolCall.name)
|
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
||||||
setShowRemoveAutoAllow(false)
|
if (removed) {
|
||||||
forceUpdate({})
|
setAutoAllowRemovedForCall(true)
|
||||||
|
setShowRemoveAutoAllow(false)
|
||||||
|
forceUpdate({})
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2064,6 +2112,7 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
|
actions={interruptActions}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
@@ -2087,7 +2136,7 @@ export function ToolCall({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const isEditWorkflow = toolCall.name === 'edit_workflow'
|
const isEditWorkflow = isWorkflowEditSummaryTool(toolCall)
|
||||||
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
||||||
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
||||||
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
||||||
@@ -2109,9 +2158,12 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
await removeAutoAllowedTool(toolCall.name)
|
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
||||||
setShowRemoveAutoAllow(false)
|
if (removed) {
|
||||||
forceUpdate({})
|
setAutoAllowRemovedForCall(true)
|
||||||
|
setShowRemoveAutoAllow(false)
|
||||||
|
forceUpdate({})
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2125,6 +2177,7 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
|
actions={interruptActions}
|
||||||
/>
|
/>
|
||||||
) : showMoveToBackground ? (
|
) : showMoveToBackground ? (
|
||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
@@ -2155,7 +2208,7 @@ export function ToolCall({
|
|||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
) : null}
|
) : null}
|
||||||
{/* Workflow edit summary - shows block changes after edit_workflow completes */}
|
{/* Workflow edit summary - shows block changes after workflow_change(apply) */}
|
||||||
<WorkflowEditSummary toolCall={toolCall} />
|
<WorkflowEditSummary toolCall={toolCall} />
|
||||||
|
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
|
|||||||
@@ -113,7 +113,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
clearPlanArtifact,
|
clearPlanArtifact,
|
||||||
savePlanArtifact,
|
savePlanArtifact,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
loadAutoAllowedTools,
|
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
} = useCopilotStore()
|
} = useCopilotStore()
|
||||||
|
|
||||||
@@ -125,8 +124,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
setCopilotWorkflowId,
|
setCopilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
loadAutoAllowedTools,
|
|
||||||
currentChat,
|
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
})
|
})
|
||||||
@@ -154,6 +151,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
planTodos,
|
planTodos,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const renderedChatTitle = currentChat?.title || 'New Chat'
|
||||||
|
|
||||||
/** Gets markdown content for design document section (available in all modes once created) */
|
/** Gets markdown content for design document section (available in all modes once created) */
|
||||||
const designDocumentContent = useMemo(() => {
|
const designDocumentContent = useMemo(() => {
|
||||||
if (streamingPlanContent) {
|
if (streamingPlanContent) {
|
||||||
@@ -166,6 +165,14 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
return ''
|
return ''
|
||||||
}, [streamingPlanContent])
|
}, [streamingPlanContent])
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
logger.info('[TitleRender] Copilot header title changed', {
|
||||||
|
currentChatId: currentChat?.id || null,
|
||||||
|
currentChatTitle: currentChat?.title || null,
|
||||||
|
renderedTitle: renderedChatTitle,
|
||||||
|
})
|
||||||
|
}, [currentChat?.id, currentChat?.title, renderedChatTitle])
|
||||||
|
|
||||||
/** Focuses the copilot input */
|
/** Focuses the copilot input */
|
||||||
const focusInput = useCallback(() => {
|
const focusInput = useCallback(() => {
|
||||||
userInputRef.current?.focus()
|
userInputRef.current?.focus()
|
||||||
@@ -348,7 +355,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
{/* Header */}
|
{/* Header */}
|
||||||
<div className='mx-[-1px] flex flex-shrink-0 items-center justify-between gap-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] px-[12px] py-[6px]'>
|
<div className='mx-[-1px] flex flex-shrink-0 items-center justify-between gap-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] px-[12px] py-[6px]'>
|
||||||
<h2 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
<h2 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
||||||
{currentChat?.title || 'New Chat'}
|
{renderedChatTitle}
|
||||||
</h2>
|
</h2>
|
||||||
<div className='flex items-center gap-[8px]'>
|
<div className='flex items-center gap-[8px]'>
|
||||||
<Button variant='ghost' className='p-0' onClick={handleStartNewChat}>
|
<Button variant='ghost' className='p-0' onClick={handleStartNewChat}>
|
||||||
|
|||||||
@@ -12,8 +12,6 @@ interface UseCopilotInitializationProps {
|
|||||||
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
||||||
loadChats: (forceRefresh?: boolean) => Promise<void>
|
loadChats: (forceRefresh?: boolean) => Promise<void>
|
||||||
loadAvailableModels: () => Promise<void>
|
loadAvailableModels: () => Promise<void>
|
||||||
loadAutoAllowedTools: () => Promise<void>
|
|
||||||
currentChat: any
|
|
||||||
isSendingMessage: boolean
|
isSendingMessage: boolean
|
||||||
resumeActiveStream: () => Promise<boolean>
|
resumeActiveStream: () => Promise<boolean>
|
||||||
}
|
}
|
||||||
@@ -32,8 +30,6 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
setCopilotWorkflowId,
|
setCopilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
loadAutoAllowedTools,
|
|
||||||
currentChat,
|
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
} = props
|
} = props
|
||||||
@@ -120,17 +116,6 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
})
|
})
|
||||||
}, [isSendingMessage, resumeActiveStream])
|
}, [isSendingMessage, resumeActiveStream])
|
||||||
|
|
||||||
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
|
||||||
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
|
||||||
useEffect(() => {
|
|
||||||
if (!hasLoadedAutoAllowedToolsRef.current) {
|
|
||||||
hasLoadedAutoAllowedToolsRef.current = true
|
|
||||||
loadAutoAllowedTools().catch((err) => {
|
|
||||||
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}, [loadAutoAllowedTools])
|
|
||||||
|
|
||||||
/** Load available models once on mount */
|
/** Load available models once on mount */
|
||||||
const hasLoadedModelsRef = useRef(false)
|
const hasLoadedModelsRef = useRef(false)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -57,21 +57,6 @@ export function useChangeDetection({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (block.triggerMode) {
|
|
||||||
const triggerConfigValue = blockSubValues?.triggerConfig
|
|
||||||
if (
|
|
||||||
triggerConfigValue &&
|
|
||||||
typeof triggerConfigValue === 'object' &&
|
|
||||||
!subBlocks.triggerConfig
|
|
||||||
) {
|
|
||||||
subBlocks.triggerConfig = {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: triggerConfigValue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
blocksWithSubBlocks[blockId] = {
|
blocksWithSubBlocks[blockId] = {
|
||||||
...block,
|
...block,
|
||||||
subBlocks,
|
subBlocks,
|
||||||
|
|||||||
@@ -139,46 +139,6 @@ const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
|||||||
'delete:issue-worklog:jira': 'Delete worklog entries from Jira issues',
|
'delete:issue-worklog:jira': 'Delete worklog entries from Jira issues',
|
||||||
'write:issue-link:jira': 'Create links between Jira issues',
|
'write:issue-link:jira': 'Create links between Jira issues',
|
||||||
'delete:issue-link:jira': 'Delete links between Jira issues',
|
'delete:issue-link:jira': 'Delete links between Jira issues',
|
||||||
'manage:jira-project': 'Manage Jira project components and versions',
|
|
||||||
'read:board-scope:jira-software': 'View Jira boards',
|
|
||||||
'write:board-scope:jira-software': 'Manage Jira boards and backlog',
|
|
||||||
'read:sprint:jira-software': 'View Jira sprints',
|
|
||||||
'write:sprint:jira-software': 'Create and manage Jira sprints',
|
|
||||||
'delete:sprint:jira-software': 'Delete Jira sprints',
|
|
||||||
'read:servicedesk:jira-service-management': 'View JSM service desks',
|
|
||||||
'read:requesttype:jira-service-management': 'View JSM request types',
|
|
||||||
'read:request:jira-service-management': 'View JSM service requests',
|
|
||||||
'write:request:jira-service-management': 'Create and update JSM service requests',
|
|
||||||
'read:request.comment:jira-service-management': 'View comments on JSM requests',
|
|
||||||
'write:request.comment:jira-service-management': 'Add comments to JSM requests',
|
|
||||||
'read:customer:jira-service-management': 'View JSM customers',
|
|
||||||
'write:customer:jira-service-management': 'Create and manage JSM customers',
|
|
||||||
'read:servicedesk.customer:jira-service-management': 'View service desk customers',
|
|
||||||
'write:servicedesk.customer:jira-service-management': 'Add customers to service desks',
|
|
||||||
'delete:servicedesk.customer:jira-service-management': 'Remove customers from service desks',
|
|
||||||
'read:organization:jira-service-management': 'View JSM organizations',
|
|
||||||
'write:organization:jira-service-management': 'Create and manage JSM organizations',
|
|
||||||
'delete:organization:jira-service-management': 'Delete JSM organizations',
|
|
||||||
'read:servicedesk.organization:jira-service-management': 'View service desk organizations',
|
|
||||||
'write:servicedesk.organization:jira-service-management': 'Add organizations to service desks',
|
|
||||||
'read:organization.user:jira-service-management': 'View organization users',
|
|
||||||
'write:organization.user:jira-service-management': 'Add users to organizations',
|
|
||||||
'read:queue:jira-service-management': 'View JSM queues and queue issues',
|
|
||||||
'read:request.sla:jira-service-management': 'View request SLA information',
|
|
||||||
'read:request.status:jira-service-management': 'View request status history',
|
|
||||||
'write:request.status:jira-service-management': 'Transition request status',
|
|
||||||
'read:request.participant:jira-service-management': 'View request participants',
|
|
||||||
'write:request.participant:jira-service-management': 'Add request participants',
|
|
||||||
'read:request.approval:jira-service-management': 'View request approvals',
|
|
||||||
'write:request.approval:jira-service-management': 'Respond to request approvals',
|
|
||||||
'read:request.feedback:jira-service-management': 'View request feedback',
|
|
||||||
'write:request.feedback:jira-service-management': 'Add request feedback',
|
|
||||||
'delete:request.feedback:jira-service-management': 'Delete request feedback',
|
|
||||||
'read:request.notification:jira-service-management': 'View request notification status',
|
|
||||||
'write:request.notification:jira-service-management': 'Subscribe to request notifications',
|
|
||||||
'delete:request.notification:jira-service-management': 'Unsubscribe from request notifications',
|
|
||||||
'read:request.attachment:jira-service-management': 'View request attachments',
|
|
||||||
'read:knowledgebase:jira-service-management': 'Search knowledge base articles',
|
|
||||||
'User.Read': 'Read Microsoft user',
|
'User.Read': 'Read Microsoft user',
|
||||||
'Chat.Read': 'Read Microsoft chats',
|
'Chat.Read': 'Read Microsoft chats',
|
||||||
'Chat.ReadWrite': 'Write to Microsoft chats',
|
'Chat.ReadWrite': 'Write to Microsoft chats',
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ import type { BlockConfig } from '@/blocks/types'
|
|||||||
import { AuthMode } from '@/blocks/types'
|
import { AuthMode } from '@/blocks/types'
|
||||||
import { normalizeFileInput } from '@/blocks/utils'
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
import type { ConfluenceResponse } from '@/tools/confluence/types'
|
import type { ConfluenceResponse } from '@/tools/confluence/types'
|
||||||
import { getTrigger } from '@/triggers'
|
|
||||||
|
|
||||||
export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
||||||
type: 'confluence',
|
type: 'confluence',
|
||||||
@@ -395,7 +394,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
// Page Property Operations
|
// Page Property Operations
|
||||||
{ label: 'List Page Properties', id: 'list_page_properties' },
|
{ label: 'List Page Properties', id: 'list_page_properties' },
|
||||||
{ label: 'Create Page Property', id: 'create_page_property' },
|
{ label: 'Create Page Property', id: 'create_page_property' },
|
||||||
{ label: 'Update Page Property', id: 'update_page_property' },
|
|
||||||
{ label: 'Delete Page Property', id: 'delete_page_property' },
|
{ label: 'Delete Page Property', id: 'delete_page_property' },
|
||||||
// Search Operations
|
// Search Operations
|
||||||
{ label: 'Search Content', id: 'search' },
|
{ label: 'Search Content', id: 'search' },
|
||||||
@@ -404,8 +402,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
{ label: 'List Blog Posts', id: 'list_blogposts' },
|
{ label: 'List Blog Posts', id: 'list_blogposts' },
|
||||||
{ label: 'Get Blog Post', id: 'get_blogpost' },
|
{ label: 'Get Blog Post', id: 'get_blogpost' },
|
||||||
{ label: 'Create Blog Post', id: 'create_blogpost' },
|
{ label: 'Create Blog Post', id: 'create_blogpost' },
|
||||||
{ label: 'Update Blog Post', id: 'update_blogpost' },
|
|
||||||
{ label: 'Delete Blog Post', id: 'delete_blogpost' },
|
|
||||||
{ label: 'List Blog Posts in Space', id: 'list_blogposts_in_space' },
|
{ label: 'List Blog Posts in Space', id: 'list_blogposts_in_space' },
|
||||||
// Comment Operations
|
// Comment Operations
|
||||||
{ label: 'Create Comment', id: 'create_comment' },
|
{ label: 'Create Comment', id: 'create_comment' },
|
||||||
@@ -488,9 +484,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'list_pages_in_space',
|
'list_pages_in_space',
|
||||||
'list_blogposts',
|
'list_blogposts',
|
||||||
'get_blogpost',
|
'get_blogpost',
|
||||||
'create_blogpost',
|
|
||||||
'update_blogpost',
|
|
||||||
'delete_blogpost',
|
|
||||||
'list_blogposts_in_space',
|
'list_blogposts_in_space',
|
||||||
'search',
|
'search',
|
||||||
'search_in_space',
|
'search_in_space',
|
||||||
@@ -515,7 +508,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'add_label',
|
'add_label',
|
||||||
'delete_label',
|
'delete_label',
|
||||||
'delete_page_property',
|
'delete_page_property',
|
||||||
'update_page_property',
|
|
||||||
'get_page_children',
|
'get_page_children',
|
||||||
'get_page_ancestors',
|
'get_page_ancestors',
|
||||||
'list_page_versions',
|
'list_page_versions',
|
||||||
@@ -538,9 +530,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'list_pages_in_space',
|
'list_pages_in_space',
|
||||||
'list_blogposts',
|
'list_blogposts',
|
||||||
'get_blogpost',
|
'get_blogpost',
|
||||||
'create_blogpost',
|
|
||||||
'update_blogpost',
|
|
||||||
'delete_blogpost',
|
|
||||||
'list_blogposts_in_space',
|
'list_blogposts_in_space',
|
||||||
'search',
|
'search',
|
||||||
'search_in_space',
|
'search_in_space',
|
||||||
@@ -565,7 +554,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'add_label',
|
'add_label',
|
||||||
'delete_label',
|
'delete_label',
|
||||||
'delete_page_property',
|
'delete_page_property',
|
||||||
'update_page_property',
|
|
||||||
'get_page_children',
|
'get_page_children',
|
||||||
'get_page_ancestors',
|
'get_page_ancestors',
|
||||||
'list_page_versions',
|
'list_page_versions',
|
||||||
@@ -600,10 +588,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter blog post ID',
|
placeholder: 'Enter blog post ID',
|
||||||
required: true,
|
required: true,
|
||||||
condition: {
|
condition: { field: 'operation', value: 'get_blogpost' },
|
||||||
field: 'operation',
|
|
||||||
value: ['get_blogpost', 'update_blogpost', 'delete_blogpost'],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'versionNumber',
|
id: 'versionNumber',
|
||||||
@@ -619,7 +604,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter property key/name',
|
placeholder: 'Enter property key/name',
|
||||||
required: true,
|
required: true,
|
||||||
condition: { field: 'operation', value: ['create_page_property', 'update_page_property'] },
|
condition: { field: 'operation', value: 'create_page_property' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'propertyValue',
|
id: 'propertyValue',
|
||||||
@@ -627,46 +612,29 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
type: 'long-input',
|
type: 'long-input',
|
||||||
placeholder: 'Enter property value (JSON supported)',
|
placeholder: 'Enter property value (JSON supported)',
|
||||||
required: true,
|
required: true,
|
||||||
condition: { field: 'operation', value: ['create_page_property', 'update_page_property'] },
|
condition: { field: 'operation', value: 'create_page_property' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'propertyId',
|
id: 'propertyId',
|
||||||
title: 'Property ID',
|
title: 'Property ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter property ID',
|
placeholder: 'Enter property ID to delete',
|
||||||
required: true,
|
required: true,
|
||||||
condition: {
|
condition: { field: 'operation', value: 'delete_page_property' },
|
||||||
field: 'operation',
|
|
||||||
value: ['delete_page_property', 'update_page_property'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'propertyVersionNumber',
|
|
||||||
title: 'Property Version Number',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Enter current version number of the property',
|
|
||||||
required: true,
|
|
||||||
condition: { field: 'operation', value: 'update_page_property' },
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'title',
|
id: 'title',
|
||||||
title: 'Title',
|
title: 'Title',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter title',
|
placeholder: 'Enter title',
|
||||||
condition: {
|
condition: { field: 'operation', value: ['create', 'update', 'create_blogpost'] },
|
||||||
field: 'operation',
|
|
||||||
value: ['create', 'update', 'create_blogpost', 'update_blogpost'],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'content',
|
id: 'content',
|
||||||
title: 'Content',
|
title: 'Content',
|
||||||
type: 'long-input',
|
type: 'long-input',
|
||||||
placeholder: 'Enter content',
|
placeholder: 'Enter content',
|
||||||
condition: {
|
condition: { field: 'operation', value: ['create', 'update', 'create_blogpost'] },
|
||||||
field: 'operation',
|
|
||||||
value: ['create', 'update', 'create_blogpost', 'update_blogpost'],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'parentId',
|
id: 'parentId',
|
||||||
@@ -779,7 +747,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
{ label: 'Draft', id: 'draft' },
|
{ label: 'Draft', id: 'draft' },
|
||||||
],
|
],
|
||||||
value: () => 'current',
|
value: () => 'current',
|
||||||
condition: { field: 'operation', value: ['create_blogpost', 'update_blogpost'] },
|
condition: { field: 'operation', value: 'create_blogpost' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'purge',
|
id: 'purge',
|
||||||
@@ -848,46 +816,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
// Trigger subBlocks
|
|
||||||
...getTrigger('confluence_page_created').subBlocks,
|
|
||||||
...getTrigger('confluence_page_updated').subBlocks,
|
|
||||||
...getTrigger('confluence_page_removed').subBlocks,
|
|
||||||
...getTrigger('confluence_page_moved').subBlocks,
|
|
||||||
...getTrigger('confluence_comment_created').subBlocks,
|
|
||||||
...getTrigger('confluence_comment_removed').subBlocks,
|
|
||||||
...getTrigger('confluence_blog_created').subBlocks,
|
|
||||||
...getTrigger('confluence_blog_updated').subBlocks,
|
|
||||||
...getTrigger('confluence_blog_removed').subBlocks,
|
|
||||||
...getTrigger('confluence_attachment_created').subBlocks,
|
|
||||||
...getTrigger('confluence_attachment_removed').subBlocks,
|
|
||||||
...getTrigger('confluence_space_created').subBlocks,
|
|
||||||
...getTrigger('confluence_space_updated').subBlocks,
|
|
||||||
...getTrigger('confluence_label_added').subBlocks,
|
|
||||||
...getTrigger('confluence_label_removed').subBlocks,
|
|
||||||
...getTrigger('confluence_webhook').subBlocks,
|
|
||||||
],
|
],
|
||||||
triggers: {
|
|
||||||
enabled: true,
|
|
||||||
available: [
|
|
||||||
'confluence_page_created',
|
|
||||||
'confluence_page_updated',
|
|
||||||
'confluence_page_removed',
|
|
||||||
'confluence_page_moved',
|
|
||||||
'confluence_comment_created',
|
|
||||||
'confluence_comment_removed',
|
|
||||||
'confluence_blog_created',
|
|
||||||
'confluence_blog_updated',
|
|
||||||
'confluence_blog_removed',
|
|
||||||
'confluence_attachment_created',
|
|
||||||
'confluence_attachment_removed',
|
|
||||||
'confluence_space_created',
|
|
||||||
'confluence_space_updated',
|
|
||||||
'confluence_label_added',
|
|
||||||
'confluence_label_removed',
|
|
||||||
'confluence_webhook',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
tools: {
|
tools: {
|
||||||
access: [
|
access: [
|
||||||
// Page Tools
|
// Page Tools
|
||||||
@@ -904,7 +833,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
// Property Tools
|
// Property Tools
|
||||||
'confluence_list_page_properties',
|
'confluence_list_page_properties',
|
||||||
'confluence_create_page_property',
|
'confluence_create_page_property',
|
||||||
'confluence_update_page_property',
|
|
||||||
'confluence_delete_page_property',
|
'confluence_delete_page_property',
|
||||||
// Search Tools
|
// Search Tools
|
||||||
'confluence_search',
|
'confluence_search',
|
||||||
@@ -913,8 +841,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'confluence_list_blogposts',
|
'confluence_list_blogposts',
|
||||||
'confluence_get_blogpost',
|
'confluence_get_blogpost',
|
||||||
'confluence_create_blogpost',
|
'confluence_create_blogpost',
|
||||||
'confluence_update_blogpost',
|
|
||||||
'confluence_delete_blogpost',
|
|
||||||
'confluence_list_blogposts_in_space',
|
'confluence_list_blogposts_in_space',
|
||||||
// Comment Tools
|
// Comment Tools
|
||||||
'confluence_create_comment',
|
'confluence_create_comment',
|
||||||
@@ -963,8 +889,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
return 'confluence_list_page_properties'
|
return 'confluence_list_page_properties'
|
||||||
case 'create_page_property':
|
case 'create_page_property':
|
||||||
return 'confluence_create_page_property'
|
return 'confluence_create_page_property'
|
||||||
case 'update_page_property':
|
|
||||||
return 'confluence_update_page_property'
|
|
||||||
case 'delete_page_property':
|
case 'delete_page_property':
|
||||||
return 'confluence_delete_page_property'
|
return 'confluence_delete_page_property'
|
||||||
// Search Operations
|
// Search Operations
|
||||||
@@ -979,10 +903,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
return 'confluence_get_blogpost'
|
return 'confluence_get_blogpost'
|
||||||
case 'create_blogpost':
|
case 'create_blogpost':
|
||||||
return 'confluence_create_blogpost'
|
return 'confluence_create_blogpost'
|
||||||
case 'update_blogpost':
|
|
||||||
return 'confluence_update_blogpost'
|
|
||||||
case 'delete_blogpost':
|
|
||||||
return 'confluence_delete_blogpost'
|
|
||||||
case 'list_blogposts_in_space':
|
case 'list_blogposts_in_space':
|
||||||
return 'confluence_list_blogposts_in_space'
|
return 'confluence_list_blogposts_in_space'
|
||||||
// Comment Operations
|
// Comment Operations
|
||||||
@@ -1034,7 +954,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
propertyKey,
|
propertyKey,
|
||||||
propertyValue,
|
propertyValue,
|
||||||
propertyId,
|
propertyId,
|
||||||
propertyVersionNumber,
|
|
||||||
labelPrefix,
|
labelPrefix,
|
||||||
labelId,
|
labelId,
|
||||||
blogPostStatus,
|
blogPostStatus,
|
||||||
@@ -1066,25 +985,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (operation === 'update_blogpost') {
|
|
||||||
return {
|
|
||||||
credential,
|
|
||||||
operation,
|
|
||||||
blogPostId,
|
|
||||||
status: blogPostStatus || undefined,
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'delete_blogpost') {
|
|
||||||
return {
|
|
||||||
credential,
|
|
||||||
operation,
|
|
||||||
blogPostId,
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'delete') {
|
if (operation === 'delete') {
|
||||||
return {
|
return {
|
||||||
credential,
|
credential,
|
||||||
@@ -1145,24 +1045,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (operation === 'update_page_property') {
|
|
||||||
if (!propertyKey) {
|
|
||||||
throw new Error('Property key is required for this operation.')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
credential,
|
|
||||||
pageId: effectivePageId,
|
|
||||||
operation,
|
|
||||||
propertyId,
|
|
||||||
key: propertyKey,
|
|
||||||
value: propertyValue,
|
|
||||||
versionNumber: propertyVersionNumber
|
|
||||||
? Number.parseInt(String(propertyVersionNumber), 10)
|
|
||||||
: undefined,
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'delete_page_property') {
|
if (operation === 'delete_page_property') {
|
||||||
return {
|
return {
|
||||||
credential,
|
credential,
|
||||||
@@ -1243,10 +1125,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
labelId: { type: 'string', description: 'Label identifier' },
|
labelId: { type: 'string', description: 'Label identifier' },
|
||||||
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
|
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
|
||||||
propertyId: { type: 'string', description: 'Property identifier' },
|
propertyId: { type: 'string', description: 'Property identifier' },
|
||||||
propertyVersionNumber: {
|
|
||||||
type: 'number',
|
|
||||||
description: 'Current version number of the property',
|
|
||||||
},
|
|
||||||
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
|
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
|
||||||
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
|
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
|
||||||
bodyFormat: { type: 'string', description: 'Body format for comments' },
|
bodyFormat: { type: 'string', description: 'Body format for comments' },
|
||||||
|
|||||||
@@ -93,12 +93,6 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
'delete:issue-worklog:jira',
|
'delete:issue-worklog:jira',
|
||||||
'write:issue-link:jira',
|
'write:issue-link:jira',
|
||||||
'delete:issue-link:jira',
|
'delete:issue-link:jira',
|
||||||
'manage:jira-project',
|
|
||||||
'read:board-scope:jira-software',
|
|
||||||
'write:board-scope:jira-software',
|
|
||||||
'read:sprint:jira-software',
|
|
||||||
'write:sprint:jira-software',
|
|
||||||
'delete:sprint:jira-software',
|
|
||||||
],
|
],
|
||||||
placeholder: 'Select Jira account',
|
placeholder: 'Select Jira account',
|
||||||
},
|
},
|
||||||
@@ -701,32 +695,7 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
...getTrigger('jira_issue_updated').subBlocks,
|
...getTrigger('jira_issue_updated').subBlocks,
|
||||||
...getTrigger('jira_issue_deleted').subBlocks,
|
...getTrigger('jira_issue_deleted').subBlocks,
|
||||||
...getTrigger('jira_issue_commented').subBlocks,
|
...getTrigger('jira_issue_commented').subBlocks,
|
||||||
...getTrigger('jira_comment_updated').subBlocks,
|
|
||||||
...getTrigger('jira_comment_deleted').subBlocks,
|
|
||||||
...getTrigger('jira_worklog_created').subBlocks,
|
...getTrigger('jira_worklog_created').subBlocks,
|
||||||
...getTrigger('jira_worklog_updated').subBlocks,
|
|
||||||
...getTrigger('jira_worklog_deleted').subBlocks,
|
|
||||||
...getTrigger('jira_sprint_created').subBlocks,
|
|
||||||
...getTrigger('jira_sprint_started').subBlocks,
|
|
||||||
...getTrigger('jira_sprint_closed').subBlocks,
|
|
||||||
...getTrigger('jira_sprint_updated').subBlocks,
|
|
||||||
...getTrigger('jira_sprint_deleted').subBlocks,
|
|
||||||
...getTrigger('jira_project_created').subBlocks,
|
|
||||||
...getTrigger('jira_project_updated').subBlocks,
|
|
||||||
...getTrigger('jira_project_deleted').subBlocks,
|
|
||||||
...getTrigger('jira_version_created').subBlocks,
|
|
||||||
...getTrigger('jira_version_released').subBlocks,
|
|
||||||
...getTrigger('jira_version_unreleased').subBlocks,
|
|
||||||
...getTrigger('jira_version_updated').subBlocks,
|
|
||||||
...getTrigger('jira_version_deleted').subBlocks,
|
|
||||||
...getTrigger('jira_board_created').subBlocks,
|
|
||||||
...getTrigger('jira_board_updated').subBlocks,
|
|
||||||
...getTrigger('jira_board_deleted').subBlocks,
|
|
||||||
...getTrigger('jira_board_config_changed').subBlocks,
|
|
||||||
...getTrigger('jira_attachment_created').subBlocks,
|
|
||||||
...getTrigger('jira_attachment_deleted').subBlocks,
|
|
||||||
...getTrigger('jira_issuelink_created').subBlocks,
|
|
||||||
...getTrigger('jira_issuelink_deleted').subBlocks,
|
|
||||||
...getTrigger('jira_webhook').subBlocks,
|
...getTrigger('jira_webhook').subBlocks,
|
||||||
],
|
],
|
||||||
tools: {
|
tools: {
|
||||||
@@ -1271,32 +1240,7 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
'jira_issue_updated',
|
'jira_issue_updated',
|
||||||
'jira_issue_deleted',
|
'jira_issue_deleted',
|
||||||
'jira_issue_commented',
|
'jira_issue_commented',
|
||||||
'jira_comment_updated',
|
|
||||||
'jira_comment_deleted',
|
|
||||||
'jira_worklog_created',
|
'jira_worklog_created',
|
||||||
'jira_worklog_updated',
|
|
||||||
'jira_worklog_deleted',
|
|
||||||
'jira_sprint_created',
|
|
||||||
'jira_sprint_started',
|
|
||||||
'jira_sprint_closed',
|
|
||||||
'jira_sprint_updated',
|
|
||||||
'jira_sprint_deleted',
|
|
||||||
'jira_project_created',
|
|
||||||
'jira_project_updated',
|
|
||||||
'jira_project_deleted',
|
|
||||||
'jira_version_created',
|
|
||||||
'jira_version_released',
|
|
||||||
'jira_version_unreleased',
|
|
||||||
'jira_version_updated',
|
|
||||||
'jira_version_deleted',
|
|
||||||
'jira_board_created',
|
|
||||||
'jira_board_updated',
|
|
||||||
'jira_board_deleted',
|
|
||||||
'jira_board_config_changed',
|
|
||||||
'jira_attachment_created',
|
|
||||||
'jira_attachment_deleted',
|
|
||||||
'jira_issuelink_created',
|
|
||||||
'jira_issuelink_deleted',
|
|
||||||
'jira_webhook',
|
'jira_webhook',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -2,16 +2,14 @@ import { JiraServiceManagementIcon } from '@/components/icons'
|
|||||||
import type { BlockConfig } from '@/blocks/types'
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
import { AuthMode } from '@/blocks/types'
|
import { AuthMode } from '@/blocks/types'
|
||||||
import type { JsmResponse } from '@/tools/jsm/types'
|
import type { JsmResponse } from '@/tools/jsm/types'
|
||||||
import { getTrigger } from '@/triggers'
|
|
||||||
|
|
||||||
export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
||||||
type: 'jira_service_management',
|
type: 'jira_service_management',
|
||||||
name: 'Jira Service Management',
|
name: 'Jira Service Management',
|
||||||
description: 'Interact with Jira Service Management',
|
description: 'Interact with Jira Service Management',
|
||||||
authMode: AuthMode.OAuth,
|
authMode: AuthMode.OAuth,
|
||||||
triggerAllowed: true,
|
|
||||||
longDescription:
|
longDescription:
|
||||||
'Integrate with Jira Service Management for IT service management. Create and manage service requests, handle customers and organizations, track SLAs, and manage queues. Can also trigger workflows based on Jira Service Management webhook events.',
|
'Integrate with Jira Service Management for IT service management. Create and manage service requests, handle customers and organizations, track SLAs, and manage queues.',
|
||||||
docsLink: 'https://docs.sim.ai/tools/jira-service-management',
|
docsLink: 'https://docs.sim.ai/tools/jira-service-management',
|
||||||
category: 'tools',
|
category: 'tools',
|
||||||
bgColor: '#E0E0E0',
|
bgColor: '#E0E0E0',
|
||||||
@@ -23,46 +21,26 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
|||||||
type: 'dropdown',
|
type: 'dropdown',
|
||||||
options: [
|
options: [
|
||||||
{ label: 'Get Service Desks', id: 'get_service_desks' },
|
{ label: 'Get Service Desks', id: 'get_service_desks' },
|
||||||
{ label: 'Get Service Desk', id: 'get_service_desk' },
|
|
||||||
{ label: 'Get Request Types', id: 'get_request_types' },
|
{ label: 'Get Request Types', id: 'get_request_types' },
|
||||||
{ label: 'Get Request Type Fields', id: 'get_request_type_fields' },
|
|
||||||
{ label: 'Create Request', id: 'create_request' },
|
{ label: 'Create Request', id: 'create_request' },
|
||||||
{ label: 'Get Request', id: 'get_request' },
|
{ label: 'Get Request', id: 'get_request' },
|
||||||
{ label: 'Get Requests', id: 'get_requests' },
|
{ label: 'Get Requests', id: 'get_requests' },
|
||||||
{ label: 'Get Request Status', id: 'get_request_status' },
|
|
||||||
{ label: 'Get Request Attachments', id: 'get_request_attachments' },
|
|
||||||
{ label: 'Add Comment', id: 'add_comment' },
|
{ label: 'Add Comment', id: 'add_comment' },
|
||||||
{ label: 'Get Comments', id: 'get_comments' },
|
{ label: 'Get Comments', id: 'get_comments' },
|
||||||
{ label: 'Get Customers', id: 'get_customers' },
|
{ label: 'Get Customers', id: 'get_customers' },
|
||||||
{ label: 'Add Customer', id: 'add_customer' },
|
{ label: 'Add Customer', id: 'add_customer' },
|
||||||
{ label: 'Remove Customer', id: 'remove_customer' },
|
|
||||||
{ label: 'Create Customer', id: 'create_customer' },
|
|
||||||
{ label: 'Get Organizations', id: 'get_organizations' },
|
{ label: 'Get Organizations', id: 'get_organizations' },
|
||||||
{ label: 'Get Organization', id: 'get_organization' },
|
|
||||||
{ label: 'Create Organization', id: 'create_organization' },
|
{ label: 'Create Organization', id: 'create_organization' },
|
||||||
{ label: 'Add Organization', id: 'add_organization' },
|
{ label: 'Add Organization', id: 'add_organization' },
|
||||||
{ label: 'Remove Organization', id: 'remove_organization' },
|
|
||||||
{ label: 'Delete Organization', id: 'delete_organization' },
|
|
||||||
{ label: 'Get Organization Users', id: 'get_organization_users' },
|
|
||||||
{ label: 'Add Organization Users', id: 'add_organization_users' },
|
|
||||||
{ label: 'Remove Organization Users', id: 'remove_organization_users' },
|
|
||||||
{ label: 'Get Queues', id: 'get_queues' },
|
{ label: 'Get Queues', id: 'get_queues' },
|
||||||
{ label: 'Get Queue Issues', id: 'get_queue_issues' },
|
|
||||||
{ label: 'Get SLA', id: 'get_sla' },
|
{ label: 'Get SLA', id: 'get_sla' },
|
||||||
{ label: 'Get Transitions', id: 'get_transitions' },
|
{ label: 'Get Transitions', id: 'get_transitions' },
|
||||||
{ label: 'Transition Request', id: 'transition_request' },
|
{ label: 'Transition Request', id: 'transition_request' },
|
||||||
{ label: 'Get Participants', id: 'get_participants' },
|
{ label: 'Get Participants', id: 'get_participants' },
|
||||||
{ label: 'Add Participants', id: 'add_participants' },
|
{ label: 'Add Participants', id: 'add_participants' },
|
||||||
{ label: 'Remove Participants', id: 'remove_participants' },
|
|
||||||
{ label: 'Get Approvals', id: 'get_approvals' },
|
{ label: 'Get Approvals', id: 'get_approvals' },
|
||||||
{ label: 'Answer Approval', id: 'answer_approval' },
|
{ label: 'Answer Approval', id: 'answer_approval' },
|
||||||
{ label: 'Get Feedback', id: 'get_feedback' },
|
{ label: 'Get Request Type Fields', id: 'get_request_type_fields' },
|
||||||
{ label: 'Add Feedback', id: 'add_feedback' },
|
|
||||||
{ label: 'Delete Feedback', id: 'delete_feedback' },
|
|
||||||
{ label: 'Get Notification', id: 'get_notification' },
|
|
||||||
{ label: 'Subscribe Notification', id: 'subscribe_notification' },
|
|
||||||
{ label: 'Unsubscribe Notification', id: 'unsubscribe_notification' },
|
|
||||||
{ label: 'Search Knowledge Base', id: 'search_knowledge_base' },
|
|
||||||
],
|
],
|
||||||
value: () => 'get_service_desks',
|
value: () => 'get_service_desks',
|
||||||
},
|
},
|
||||||
@@ -114,18 +92,6 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
|||||||
'write:request.participant:jira-service-management',
|
'write:request.participant:jira-service-management',
|
||||||
'read:request.approval:jira-service-management',
|
'read:request.approval:jira-service-management',
|
||||||
'write:request.approval:jira-service-management',
|
'write:request.approval:jira-service-management',
|
||||||
'read:request.feedback:jira-service-management',
|
|
||||||
'write:request.feedback:jira-service-management',
|
|
||||||
'delete:request.feedback:jira-service-management',
|
|
||||||
'read:request.notification:jira-service-management',
|
|
||||||
'write:request.notification:jira-service-management',
|
|
||||||
'delete:request.notification:jira-service-management',
|
|
||||||
'read:request.attachment:jira-service-management',
|
|
||||||
'read:knowledgebase:jira-service-management',
|
|
||||||
'read:organization.user:jira-service-management',
|
|
||||||
'write:organization.user:jira-service-management',
|
|
||||||
'delete:organization:jira-service-management',
|
|
||||||
'delete:servicedesk.customer:jira-service-management',
|
|
||||||
],
|
],
|
||||||
placeholder: 'Select Jira account',
|
placeholder: 'Select Jira account',
|
||||||
},
|
},
|
||||||
@@ -137,20 +103,15 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
|||||||
condition: {
|
condition: {
|
||||||
field: 'operation',
|
field: 'operation',
|
||||||
value: [
|
value: [
|
||||||
'get_service_desk',
|
|
||||||
'get_request_types',
|
'get_request_types',
|
||||||
'create_request',
|
'create_request',
|
||||||
'get_customers',
|
'get_customers',
|
||||||
'add_customer',
|
'add_customer',
|
||||||
'remove_customer',
|
|
||||||
'get_organizations',
|
'get_organizations',
|
||||||
'add_organization',
|
'add_organization',
|
||||||
'remove_organization',
|
|
||||||
'get_queues',
|
'get_queues',
|
||||||
'get_queue_issues',
|
|
||||||
'get_requests',
|
'get_requests',
|
||||||
'get_request_type_fields',
|
'get_request_type_fields',
|
||||||
'search_knowledge_base',
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -172,8 +133,6 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
|||||||
field: 'operation',
|
field: 'operation',
|
||||||
value: [
|
value: [
|
||||||
'get_request',
|
'get_request',
|
||||||
'get_request_status',
|
|
||||||
'get_request_attachments',
|
|
||||||
'add_comment',
|
'add_comment',
|
||||||
'get_comments',
|
'get_comments',
|
||||||
'get_sla',
|
'get_sla',
|
||||||
@@ -181,15 +140,8 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
|||||||
'transition_request',
|
'transition_request',
|
||||||
'get_participants',
|
'get_participants',
|
||||||
'add_participants',
|
'add_participants',
|
||||||
'remove_participants',
|
|
||||||
'get_approvals',
|
'get_approvals',
|
||||||
'answer_approval',
|
'answer_approval',
|
||||||
'get_feedback',
|
|
||||||
'add_feedback',
|
|
||||||
'delete_feedback',
|
|
||||||
'get_notification',
|
|
||||||
'subscribe_notification',
|
|
||||||
'unsubscribe_notification',
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -321,15 +273,7 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
required: true,
|
required: true,
|
||||||
placeholder: 'Comma-separated Atlassian account IDs',
|
placeholder: 'Comma-separated Atlassian account IDs',
|
||||||
condition: {
|
condition: { field: 'operation', value: 'add_customer' },
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'add_customer',
|
|
||||||
'remove_customer',
|
|
||||||
'add_organization_users',
|
|
||||||
'remove_organization_users',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'customerQuery',
|
id: 'customerQuery',
|
||||||
@@ -422,18 +366,7 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
required: true,
|
required: true,
|
||||||
placeholder: 'Enter organization ID',
|
placeholder: 'Enter organization ID',
|
||||||
condition: {
|
condition: { field: 'operation', value: 'add_organization' },
|
||||||
field: 'operation',
|
|
||||||
value: [
|
|
||||||
'add_organization',
|
|
||||||
'remove_organization',
|
|
||||||
'delete_organization',
|
|
||||||
'get_organization',
|
|
||||||
'get_organization_users',
|
|
||||||
'add_organization_users',
|
|
||||||
'remove_organization_users',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'participantAccountIds',
|
id: 'participantAccountIds',
|
||||||
@@ -441,7 +374,7 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
required: true,
|
required: true,
|
||||||
placeholder: 'Comma-separated account IDs',
|
placeholder: 'Comma-separated account IDs',
|
||||||
condition: { field: 'operation', value: ['add_participants', 'remove_participants'] },
|
condition: { field: 'operation', value: 'add_participants' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'approvalId',
|
id: 'approvalId',
|
||||||
@@ -473,165 +406,55 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
'get_service_desks',
|
'get_service_desks',
|
||||||
'get_request_types',
|
'get_request_types',
|
||||||
'get_requests',
|
'get_requests',
|
||||||
'get_request_status',
|
|
||||||
'get_request_attachments',
|
|
||||||
'get_comments',
|
'get_comments',
|
||||||
'get_customers',
|
'get_customers',
|
||||||
'get_organizations',
|
'get_organizations',
|
||||||
'get_organization_users',
|
|
||||||
'get_queues',
|
'get_queues',
|
||||||
'get_queue_issues',
|
|
||||||
'get_sla',
|
'get_sla',
|
||||||
'get_transitions',
|
'get_transitions',
|
||||||
'get_participants',
|
'get_participants',
|
||||||
'get_approvals',
|
'get_approvals',
|
||||||
'search_knowledge_base',
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
id: 'queueId',
|
|
||||||
title: 'Queue ID',
|
|
||||||
type: 'short-input',
|
|
||||||
required: true,
|
|
||||||
placeholder: 'Enter queue ID',
|
|
||||||
condition: { field: 'operation', value: 'get_queue_issues' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'customerEmail',
|
|
||||||
title: 'Customer Email',
|
|
||||||
type: 'short-input',
|
|
||||||
required: true,
|
|
||||||
placeholder: 'Enter customer email address',
|
|
||||||
condition: { field: 'operation', value: 'create_customer' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'customerDisplayName',
|
|
||||||
title: 'Display Name',
|
|
||||||
type: 'short-input',
|
|
||||||
required: true,
|
|
||||||
placeholder: 'Enter customer display name',
|
|
||||||
condition: { field: 'operation', value: 'create_customer' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'knowledgeBaseQuery',
|
|
||||||
title: 'Search Query',
|
|
||||||
type: 'short-input',
|
|
||||||
required: true,
|
|
||||||
placeholder: 'Search knowledge base articles',
|
|
||||||
condition: { field: 'operation', value: 'search_knowledge_base' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'feedbackRating',
|
|
||||||
title: 'Rating',
|
|
||||||
type: 'dropdown',
|
|
||||||
options: [
|
|
||||||
{ label: '1 - Very Unsatisfied', id: '1' },
|
|
||||||
{ label: '2 - Unsatisfied', id: '2' },
|
|
||||||
{ label: '3 - Neutral', id: '3' },
|
|
||||||
{ label: '4 - Satisfied', id: '4' },
|
|
||||||
{ label: '5 - Very Satisfied', id: '5' },
|
|
||||||
],
|
|
||||||
value: () => '5',
|
|
||||||
condition: { field: 'operation', value: 'add_feedback' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'feedbackComment',
|
|
||||||
title: 'Feedback Comment',
|
|
||||||
type: 'long-input',
|
|
||||||
placeholder: 'Optional feedback comment',
|
|
||||||
condition: { field: 'operation', value: 'add_feedback' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'includeAttachments',
|
|
||||||
title: 'Include File Content',
|
|
||||||
type: 'dropdown',
|
|
||||||
options: [
|
|
||||||
{ label: 'No', id: 'false' },
|
|
||||||
{ label: 'Yes', id: 'true' },
|
|
||||||
],
|
|
||||||
value: () => 'false',
|
|
||||||
condition: { field: 'operation', value: 'get_request_attachments' },
|
|
||||||
},
|
|
||||||
// Trigger SubBlocks
|
|
||||||
...getTrigger('jsm_request_created').subBlocks,
|
|
||||||
...getTrigger('jsm_request_updated').subBlocks,
|
|
||||||
...getTrigger('jsm_request_deleted').subBlocks,
|
|
||||||
...getTrigger('jsm_request_commented').subBlocks,
|
|
||||||
...getTrigger('jsm_comment_updated').subBlocks,
|
|
||||||
...getTrigger('jsm_comment_deleted').subBlocks,
|
|
||||||
...getTrigger('jsm_worklog_created').subBlocks,
|
|
||||||
...getTrigger('jsm_worklog_updated').subBlocks,
|
|
||||||
...getTrigger('jsm_worklog_deleted').subBlocks,
|
|
||||||
...getTrigger('jsm_attachment_created').subBlocks,
|
|
||||||
...getTrigger('jsm_attachment_deleted').subBlocks,
|
|
||||||
...getTrigger('jsm_webhook').subBlocks,
|
|
||||||
],
|
],
|
||||||
tools: {
|
tools: {
|
||||||
access: [
|
access: [
|
||||||
'jsm_get_service_desks',
|
'jsm_get_service_desks',
|
||||||
'jsm_get_service_desk',
|
|
||||||
'jsm_get_request_types',
|
'jsm_get_request_types',
|
||||||
'jsm_get_request_type_fields',
|
|
||||||
'jsm_create_request',
|
'jsm_create_request',
|
||||||
'jsm_get_request',
|
'jsm_get_request',
|
||||||
'jsm_get_requests',
|
'jsm_get_requests',
|
||||||
'jsm_get_request_status',
|
|
||||||
'jsm_get_request_attachments',
|
|
||||||
'jsm_add_comment',
|
'jsm_add_comment',
|
||||||
'jsm_get_comments',
|
'jsm_get_comments',
|
||||||
'jsm_get_customers',
|
'jsm_get_customers',
|
||||||
'jsm_add_customer',
|
'jsm_add_customer',
|
||||||
'jsm_remove_customer',
|
|
||||||
'jsm_create_customer',
|
|
||||||
'jsm_get_organizations',
|
'jsm_get_organizations',
|
||||||
'jsm_get_organization',
|
|
||||||
'jsm_create_organization',
|
'jsm_create_organization',
|
||||||
'jsm_add_organization',
|
'jsm_add_organization',
|
||||||
'jsm_remove_organization',
|
|
||||||
'jsm_delete_organization',
|
|
||||||
'jsm_get_organization_users',
|
|
||||||
'jsm_add_organization_users',
|
|
||||||
'jsm_remove_organization_users',
|
|
||||||
'jsm_get_queues',
|
'jsm_get_queues',
|
||||||
'jsm_get_queue_issues',
|
|
||||||
'jsm_get_sla',
|
'jsm_get_sla',
|
||||||
'jsm_get_transitions',
|
'jsm_get_transitions',
|
||||||
'jsm_transition_request',
|
'jsm_transition_request',
|
||||||
'jsm_get_participants',
|
'jsm_get_participants',
|
||||||
'jsm_add_participants',
|
'jsm_add_participants',
|
||||||
'jsm_remove_participants',
|
|
||||||
'jsm_get_approvals',
|
'jsm_get_approvals',
|
||||||
'jsm_answer_approval',
|
'jsm_answer_approval',
|
||||||
'jsm_get_feedback',
|
'jsm_get_request_type_fields',
|
||||||
'jsm_add_feedback',
|
|
||||||
'jsm_delete_feedback',
|
|
||||||
'jsm_get_notification',
|
|
||||||
'jsm_subscribe_notification',
|
|
||||||
'jsm_unsubscribe_notification',
|
|
||||||
'jsm_search_knowledge_base',
|
|
||||||
],
|
],
|
||||||
config: {
|
config: {
|
||||||
tool: (params) => {
|
tool: (params) => {
|
||||||
switch (params.operation) {
|
switch (params.operation) {
|
||||||
case 'get_service_desks':
|
case 'get_service_desks':
|
||||||
return 'jsm_get_service_desks'
|
return 'jsm_get_service_desks'
|
||||||
case 'get_service_desk':
|
|
||||||
return 'jsm_get_service_desk'
|
|
||||||
case 'get_request_types':
|
case 'get_request_types':
|
||||||
return 'jsm_get_request_types'
|
return 'jsm_get_request_types'
|
||||||
case 'get_request_type_fields':
|
|
||||||
return 'jsm_get_request_type_fields'
|
|
||||||
case 'create_request':
|
case 'create_request':
|
||||||
return 'jsm_create_request'
|
return 'jsm_create_request'
|
||||||
case 'get_request':
|
case 'get_request':
|
||||||
return 'jsm_get_request'
|
return 'jsm_get_request'
|
||||||
case 'get_requests':
|
case 'get_requests':
|
||||||
return 'jsm_get_requests'
|
return 'jsm_get_requests'
|
||||||
case 'get_request_status':
|
|
||||||
return 'jsm_get_request_status'
|
|
||||||
case 'get_request_attachments':
|
|
||||||
return 'jsm_get_request_attachments'
|
|
||||||
case 'add_comment':
|
case 'add_comment':
|
||||||
return 'jsm_add_comment'
|
return 'jsm_add_comment'
|
||||||
case 'get_comments':
|
case 'get_comments':
|
||||||
@@ -640,32 +463,14 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
return 'jsm_get_customers'
|
return 'jsm_get_customers'
|
||||||
case 'add_customer':
|
case 'add_customer':
|
||||||
return 'jsm_add_customer'
|
return 'jsm_add_customer'
|
||||||
case 'remove_customer':
|
|
||||||
return 'jsm_remove_customer'
|
|
||||||
case 'create_customer':
|
|
||||||
return 'jsm_create_customer'
|
|
||||||
case 'get_organizations':
|
case 'get_organizations':
|
||||||
return 'jsm_get_organizations'
|
return 'jsm_get_organizations'
|
||||||
case 'get_organization':
|
|
||||||
return 'jsm_get_organization'
|
|
||||||
case 'create_organization':
|
case 'create_organization':
|
||||||
return 'jsm_create_organization'
|
return 'jsm_create_organization'
|
||||||
case 'add_organization':
|
case 'add_organization':
|
||||||
return 'jsm_add_organization'
|
return 'jsm_add_organization'
|
||||||
case 'remove_organization':
|
|
||||||
return 'jsm_remove_organization'
|
|
||||||
case 'delete_organization':
|
|
||||||
return 'jsm_delete_organization'
|
|
||||||
case 'get_organization_users':
|
|
||||||
return 'jsm_get_organization_users'
|
|
||||||
case 'add_organization_users':
|
|
||||||
return 'jsm_add_organization_users'
|
|
||||||
case 'remove_organization_users':
|
|
||||||
return 'jsm_remove_organization_users'
|
|
||||||
case 'get_queues':
|
case 'get_queues':
|
||||||
return 'jsm_get_queues'
|
return 'jsm_get_queues'
|
||||||
case 'get_queue_issues':
|
|
||||||
return 'jsm_get_queue_issues'
|
|
||||||
case 'get_sla':
|
case 'get_sla':
|
||||||
return 'jsm_get_sla'
|
return 'jsm_get_sla'
|
||||||
case 'get_transitions':
|
case 'get_transitions':
|
||||||
@@ -676,26 +481,12 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
return 'jsm_get_participants'
|
return 'jsm_get_participants'
|
||||||
case 'add_participants':
|
case 'add_participants':
|
||||||
return 'jsm_add_participants'
|
return 'jsm_add_participants'
|
||||||
case 'remove_participants':
|
|
||||||
return 'jsm_remove_participants'
|
|
||||||
case 'get_approvals':
|
case 'get_approvals':
|
||||||
return 'jsm_get_approvals'
|
return 'jsm_get_approvals'
|
||||||
case 'answer_approval':
|
case 'answer_approval':
|
||||||
return 'jsm_answer_approval'
|
return 'jsm_answer_approval'
|
||||||
case 'get_feedback':
|
case 'get_request_type_fields':
|
||||||
return 'jsm_get_feedback'
|
return 'jsm_get_request_type_fields'
|
||||||
case 'add_feedback':
|
|
||||||
return 'jsm_add_feedback'
|
|
||||||
case 'delete_feedback':
|
|
||||||
return 'jsm_delete_feedback'
|
|
||||||
case 'get_notification':
|
|
||||||
return 'jsm_get_notification'
|
|
||||||
case 'subscribe_notification':
|
|
||||||
return 'jsm_subscribe_notification'
|
|
||||||
case 'unsubscribe_notification':
|
|
||||||
return 'jsm_unsubscribe_notification'
|
|
||||||
case 'search_knowledge_base':
|
|
||||||
return 'jsm_search_knowledge_base'
|
|
||||||
default:
|
default:
|
||||||
return 'jsm_get_service_desks'
|
return 'jsm_get_service_desks'
|
||||||
}
|
}
|
||||||
@@ -940,204 +731,6 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
serviceDeskId: params.serviceDeskId,
|
serviceDeskId: params.serviceDeskId,
|
||||||
requestTypeId: params.requestTypeId,
|
requestTypeId: params.requestTypeId,
|
||||||
}
|
}
|
||||||
case 'get_service_desk':
|
|
||||||
if (!params.serviceDeskId) {
|
|
||||||
throw new Error('Service Desk ID is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
serviceDeskId: params.serviceDeskId,
|
|
||||||
}
|
|
||||||
case 'get_request_status':
|
|
||||||
if (!params.issueIdOrKey) {
|
|
||||||
throw new Error('Issue ID or key is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
issueIdOrKey: params.issueIdOrKey,
|
|
||||||
limit: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
|
||||||
}
|
|
||||||
case 'get_request_attachments':
|
|
||||||
if (!params.issueIdOrKey) {
|
|
||||||
throw new Error('Issue ID or key is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
issueIdOrKey: params.issueIdOrKey,
|
|
||||||
includeAttachments: params.includeAttachments === 'true',
|
|
||||||
limit: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
|
||||||
}
|
|
||||||
case 'remove_customer': {
|
|
||||||
if (!params.serviceDeskId) {
|
|
||||||
throw new Error('Service Desk ID is required')
|
|
||||||
}
|
|
||||||
if (!params.accountIds) {
|
|
||||||
throw new Error('Account IDs are required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
serviceDeskId: params.serviceDeskId,
|
|
||||||
accountIds: params.accountIds,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
case 'create_customer':
|
|
||||||
if (!params.customerEmail) {
|
|
||||||
throw new Error('Customer email is required')
|
|
||||||
}
|
|
||||||
if (!params.customerDisplayName) {
|
|
||||||
throw new Error('Customer display name is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
email: params.customerEmail,
|
|
||||||
displayName: params.customerDisplayName,
|
|
||||||
}
|
|
||||||
case 'get_organization':
|
|
||||||
if (!params.organizationId) {
|
|
||||||
throw new Error('Organization ID is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
organizationId: params.organizationId,
|
|
||||||
}
|
|
||||||
case 'remove_organization':
|
|
||||||
if (!params.serviceDeskId) {
|
|
||||||
throw new Error('Service Desk ID is required')
|
|
||||||
}
|
|
||||||
if (!params.organizationId) {
|
|
||||||
throw new Error('Organization ID is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
serviceDeskId: params.serviceDeskId,
|
|
||||||
organizationId: params.organizationId,
|
|
||||||
}
|
|
||||||
case 'delete_organization':
|
|
||||||
if (!params.organizationId) {
|
|
||||||
throw new Error('Organization ID is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
organizationId: params.organizationId,
|
|
||||||
}
|
|
||||||
case 'get_organization_users':
|
|
||||||
if (!params.organizationId) {
|
|
||||||
throw new Error('Organization ID is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
organizationId: params.organizationId,
|
|
||||||
limit: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
|
||||||
}
|
|
||||||
case 'add_organization_users':
|
|
||||||
if (!params.organizationId) {
|
|
||||||
throw new Error('Organization ID is required')
|
|
||||||
}
|
|
||||||
if (!params.accountIds) {
|
|
||||||
throw new Error('Account IDs are required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
organizationId: params.organizationId,
|
|
||||||
accountIds: params.accountIds,
|
|
||||||
}
|
|
||||||
case 'remove_organization_users':
|
|
||||||
if (!params.organizationId) {
|
|
||||||
throw new Error('Organization ID is required')
|
|
||||||
}
|
|
||||||
if (!params.accountIds) {
|
|
||||||
throw new Error('Account IDs are required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
organizationId: params.organizationId,
|
|
||||||
accountIds: params.accountIds,
|
|
||||||
}
|
|
||||||
case 'get_queue_issues':
|
|
||||||
if (!params.serviceDeskId) {
|
|
||||||
throw new Error('Service Desk ID is required')
|
|
||||||
}
|
|
||||||
if (!params.queueId) {
|
|
||||||
throw new Error('Queue ID is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
serviceDeskId: params.serviceDeskId,
|
|
||||||
queueId: params.queueId,
|
|
||||||
limit: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
|
||||||
}
|
|
||||||
case 'remove_participants':
|
|
||||||
if (!params.issueIdOrKey) {
|
|
||||||
throw new Error('Issue ID or key is required')
|
|
||||||
}
|
|
||||||
if (!params.participantAccountIds) {
|
|
||||||
throw new Error('Account IDs are required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
issueIdOrKey: params.issueIdOrKey,
|
|
||||||
accountIds: params.participantAccountIds,
|
|
||||||
}
|
|
||||||
case 'get_feedback':
|
|
||||||
if (!params.issueIdOrKey) {
|
|
||||||
throw new Error('Issue ID or key is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
issueIdOrKey: params.issueIdOrKey,
|
|
||||||
}
|
|
||||||
case 'add_feedback':
|
|
||||||
if (!params.issueIdOrKey) {
|
|
||||||
throw new Error('Issue ID or key is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
issueIdOrKey: params.issueIdOrKey,
|
|
||||||
rating: Number.parseInt(params.feedbackRating || '5'),
|
|
||||||
comment: params.feedbackComment,
|
|
||||||
}
|
|
||||||
case 'delete_feedback':
|
|
||||||
if (!params.issueIdOrKey) {
|
|
||||||
throw new Error('Issue ID or key is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
issueIdOrKey: params.issueIdOrKey,
|
|
||||||
}
|
|
||||||
case 'get_notification':
|
|
||||||
if (!params.issueIdOrKey) {
|
|
||||||
throw new Error('Issue ID or key is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
issueIdOrKey: params.issueIdOrKey,
|
|
||||||
}
|
|
||||||
case 'subscribe_notification':
|
|
||||||
if (!params.issueIdOrKey) {
|
|
||||||
throw new Error('Issue ID or key is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
issueIdOrKey: params.issueIdOrKey,
|
|
||||||
}
|
|
||||||
case 'unsubscribe_notification':
|
|
||||||
if (!params.issueIdOrKey) {
|
|
||||||
throw new Error('Issue ID or key is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
issueIdOrKey: params.issueIdOrKey,
|
|
||||||
}
|
|
||||||
case 'search_knowledge_base':
|
|
||||||
if (!params.knowledgeBaseQuery) {
|
|
||||||
throw new Error('Search query is required')
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...baseParams,
|
|
||||||
serviceDeskId: params.serviceDeskId,
|
|
||||||
query: params.knowledgeBaseQuery,
|
|
||||||
limit: params.maxResults ? Number.parseInt(params.maxResults) : undefined,
|
|
||||||
}
|
|
||||||
default:
|
default:
|
||||||
return baseParams
|
return baseParams
|
||||||
}
|
}
|
||||||
@@ -1186,16 +779,6 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
searchQuery: { type: 'string', description: 'Filter request types by name' },
|
searchQuery: { type: 'string', description: 'Filter request types by name' },
|
||||||
groupId: { type: 'string', description: 'Filter by request type group ID' },
|
groupId: { type: 'string', description: 'Filter by request type group ID' },
|
||||||
expand: { type: 'string', description: 'Comma-separated fields to expand' },
|
expand: { type: 'string', description: 'Comma-separated fields to expand' },
|
||||||
queueId: { type: 'string', description: 'Queue ID' },
|
|
||||||
customerEmail: { type: 'string', description: 'Customer email address' },
|
|
||||||
customerDisplayName: { type: 'string', description: 'Customer display name' },
|
|
||||||
knowledgeBaseQuery: { type: 'string', description: 'Knowledge base search query' },
|
|
||||||
feedbackRating: { type: 'string', description: 'CSAT feedback rating (1-5)' },
|
|
||||||
feedbackComment: { type: 'string', description: 'CSAT feedback comment' },
|
|
||||||
includeAttachments: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Whether to download attachment file content',
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
ts: { type: 'string', description: 'Timestamp of the operation' },
|
ts: { type: 'string', description: 'Timestamp of the operation' },
|
||||||
@@ -1227,19 +810,6 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
total: { type: 'number', description: 'Total count' },
|
total: { type: 'number', description: 'Total count' },
|
||||||
isLastPage: { type: 'boolean', description: 'Whether this is the last page' },
|
isLastPage: { type: 'boolean', description: 'Whether this is the last page' },
|
||||||
requestTypeFields: { type: 'json', description: 'Array of request type fields' },
|
requestTypeFields: { type: 'json', description: 'Array of request type fields' },
|
||||||
rating: { type: 'number', description: 'CSAT feedback rating' },
|
|
||||||
subscribed: { type: 'boolean', description: 'Whether subscribed to notifications' },
|
|
||||||
articles: { type: 'json', description: 'Array of knowledge base articles' },
|
|
||||||
statuses: { type: 'json', description: 'Array of request status history entries' },
|
|
||||||
attachments: { type: 'json', description: 'Array of attachment metadata' },
|
|
||||||
issues: { type: 'json', description: 'Array of queue issues' },
|
|
||||||
users: { type: 'json', description: 'Array of organization users' },
|
|
||||||
id: { type: 'string', description: 'Resource ID' },
|
|
||||||
projectId: { type: 'string', description: 'Service desk project ID' },
|
|
||||||
projectName: { type: 'string', description: 'Service desk project name' },
|
|
||||||
projectKey: { type: 'string', description: 'Service desk project key' },
|
|
||||||
email: { type: 'string', description: 'Customer email address' },
|
|
||||||
displayName: { type: 'string', description: 'Customer display name' },
|
|
||||||
canAddRequestParticipants: {
|
canAddRequestParticipants: {
|
||||||
type: 'boolean',
|
type: 'boolean',
|
||||||
description: 'Whether participants can be added to this request type',
|
description: 'Whether participants can be added to this request type',
|
||||||
@@ -1248,36 +818,5 @@ Return ONLY the comment text - no explanations.`,
|
|||||||
type: 'boolean',
|
type: 'boolean',
|
||||||
description: 'Whether requests can be raised on behalf of another user',
|
description: 'Whether requests can be raised on behalf of another user',
|
||||||
},
|
},
|
||||||
// Trigger outputs (from webhook events)
|
|
||||||
webhookEvent: { type: 'string', description: 'Webhook event type' },
|
|
||||||
issue: { type: 'json', description: 'Complete issue object from webhook' },
|
|
||||||
changelog: { type: 'json', description: 'Changelog object (for update events)' },
|
|
||||||
comment: { type: 'json', description: 'Comment object (for comment events)' },
|
|
||||||
worklog: { type: 'json', description: 'Worklog object (for worklog events)' },
|
|
||||||
attachment: { type: 'json', description: 'Attachment metadata (for attachment events)' },
|
|
||||||
files: {
|
|
||||||
type: 'file[]',
|
|
||||||
description:
|
|
||||||
'Downloaded file attachments (if includeFiles is enabled and Jira credentials are provided)',
|
|
||||||
},
|
|
||||||
user: { type: 'json', description: 'User object who triggered the event' },
|
|
||||||
webhook: { type: 'json', description: 'Complete webhook payload' },
|
|
||||||
},
|
|
||||||
triggers: {
|
|
||||||
enabled: true,
|
|
||||||
available: [
|
|
||||||
'jsm_request_created',
|
|
||||||
'jsm_request_updated',
|
|
||||||
'jsm_request_deleted',
|
|
||||||
'jsm_request_commented',
|
|
||||||
'jsm_comment_updated',
|
|
||||||
'jsm_comment_deleted',
|
|
||||||
'jsm_worklog_created',
|
|
||||||
'jsm_worklog_updated',
|
|
||||||
'jsm_worklog_deleted',
|
|
||||||
'jsm_attachment_created',
|
|
||||||
'jsm_attachment_deleted',
|
|
||||||
'jsm_webhook',
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,22 +1,21 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { COPILOT_CONFIRM_API_PATH, STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
import { STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
||||||
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
import {
|
import { isBackgroundState, isRejectedState, isReviewState } from '@/lib/copilot/store-utils'
|
||||||
isBackgroundState,
|
|
||||||
isRejectedState,
|
|
||||||
isReviewState,
|
|
||||||
resolveToolDisplay,
|
|
||||||
} from '@/lib/copilot/store-utils'
|
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
|
||||||
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
|
||||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
|
||||||
import { appendTextBlock, beginThinkingBlock, finalizeThinkingBlock } from './content-blocks'
|
import { appendTextBlock, beginThinkingBlock, finalizeThinkingBlock } from './content-blocks'
|
||||||
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
||||||
|
import {
|
||||||
|
extractOperationListFromResultPayload,
|
||||||
|
extractToolExecutionMetadata,
|
||||||
|
extractToolUiMetadata,
|
||||||
|
isWorkflowChangeApplyCall,
|
||||||
|
mapServerStateToClientState,
|
||||||
|
resolveDisplayFromServerUi,
|
||||||
|
} from './tool-call-helpers'
|
||||||
|
import { applyToolEffects } from './tool-effects'
|
||||||
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
||||||
|
|
||||||
const logger = createLogger('CopilotClientSseHandlers')
|
const logger = createLogger('CopilotClientSseHandlers')
|
||||||
@@ -26,21 +25,11 @@ const MAX_BATCH_INTERVAL = 50
|
|||||||
const MIN_BATCH_INTERVAL = 16
|
const MIN_BATCH_INTERVAL = 16
|
||||||
const MAX_QUEUE_SIZE = 5
|
const MAX_QUEUE_SIZE = 5
|
||||||
|
|
||||||
/**
|
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
||||||
* Send an auto-accept confirmation to the server for auto-allowed tools.
|
if (toolCall.execution?.target === 'sim_client_capability') {
|
||||||
* The server-side orchestrator polls Redis for this decision.
|
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
||||||
*/
|
}
|
||||||
export function sendAutoAcceptConfirmation(toolCallId: string): void {
|
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
||||||
fetch(COPILOT_CONFIRM_API_PATH, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolCallId, status: 'accepted' }),
|
|
||||||
}).catch((error) => {
|
|
||||||
logger.warn('Failed to send auto-accept confirmation', {
|
|
||||||
toolCallId,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
||||||
@@ -230,28 +219,86 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
title_updated: (_data, _context, get, set) => {
|
title_updated: (_data, _context, get, set) => {
|
||||||
const title = _data.title
|
const title = typeof _data.title === 'string' ? _data.title.trim() : ''
|
||||||
if (!title) return
|
const eventChatId = typeof _data.chatId === 'string' ? _data.chatId : undefined
|
||||||
const { currentChat, chats } = get()
|
const { currentChat, chats } = get()
|
||||||
if (currentChat) {
|
|
||||||
set({
|
logger.info('[Title] Received title_updated SSE event', {
|
||||||
currentChat: { ...currentChat, title },
|
eventTitle: title,
|
||||||
chats: chats.map((c) => (c.id === currentChat.id ? { ...c, title } : c)),
|
eventChatId: eventChatId || null,
|
||||||
|
currentChatId: currentChat?.id || null,
|
||||||
|
currentChatTitle: currentChat?.title || null,
|
||||||
|
chatCount: chats.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!title) {
|
||||||
|
logger.warn('[Title] Ignoring title_updated event with empty title', {
|
||||||
|
payload: _data,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!currentChat) {
|
||||||
|
logger.warn('[Title] Received title_updated event without an active currentChat', {
|
||||||
|
eventChatId: eventChatId || null,
|
||||||
|
title,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const targetChatId = eventChatId || currentChat.id
|
||||||
|
if (eventChatId && eventChatId !== currentChat.id) {
|
||||||
|
logger.warn('[Title] title_updated event chatId does not match currentChat', {
|
||||||
|
eventChatId,
|
||||||
|
currentChatId: currentChat.id,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
set({
|
||||||
|
currentChat:
|
||||||
|
currentChat.id === targetChatId
|
||||||
|
? {
|
||||||
|
...currentChat,
|
||||||
|
title,
|
||||||
|
}
|
||||||
|
: currentChat,
|
||||||
|
chats: chats.map((c) => (c.id === targetChatId ? { ...c, title } : c)),
|
||||||
|
})
|
||||||
|
|
||||||
|
const updatedState = get()
|
||||||
|
logger.info('[Title] Applied title_updated event to copilot store', {
|
||||||
|
targetChatId,
|
||||||
|
renderedCurrentChatId: updatedState.currentChat?.id || null,
|
||||||
|
renderedCurrentChatTitle: updatedState.currentChat?.title || null,
|
||||||
|
chatListTitle: updatedState.chats.find((c) => c.id === targetChatId)?.title || null,
|
||||||
|
})
|
||||||
},
|
},
|
||||||
tool_result: (data, context, get, set) => {
|
'copilot.tool.result': (data, context, get, set) => {
|
||||||
try {
|
try {
|
||||||
const eventData = asRecord(data?.data)
|
const eventData = asRecord(data?.data)
|
||||||
const toolCallId: string | undefined =
|
const toolCallId: string | undefined =
|
||||||
data?.toolCallId || (eventData.id as string | undefined)
|
data?.toolCallId ||
|
||||||
|
(eventData.id as string | undefined) ||
|
||||||
|
(eventData.callId as string | undefined)
|
||||||
const success: boolean | undefined = data?.success
|
const success: boolean | undefined = data?.success
|
||||||
const failedDependency: boolean = data?.failedDependency === true
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
const resultObj = asRecord(data?.result)
|
const resultObj = asRecord(data?.result)
|
||||||
const skipped: boolean = resultObj.skipped === true
|
const skipped: boolean = resultObj.skipped === true
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
|
const uiMetadata = extractToolUiMetadata(eventData)
|
||||||
|
const executionMetadata = extractToolExecutionMetadata(eventData)
|
||||||
|
const serverState = (eventData.state as string | undefined) || undefined
|
||||||
|
const targetState = serverState
|
||||||
|
? mapServerStateToClientState(serverState)
|
||||||
|
: success
|
||||||
|
? ClientToolCallState.success
|
||||||
|
: failedDependency || skipped
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
const resultPayload = asRecord(data?.result || eventData.result || eventData.data || data?.data)
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
const current = toolCallsById[toolCallId]
|
const current = toolCallsById[toolCallId]
|
||||||
|
let paramsForCurrentToolCall: Record<string, unknown> | undefined = current?.params
|
||||||
if (current) {
|
if (current) {
|
||||||
if (
|
if (
|
||||||
isRejectedState(current.state) ||
|
isRejectedState(current.state) ||
|
||||||
@@ -260,16 +307,33 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const targetState = success
|
if (
|
||||||
? ClientToolCallState.success
|
targetState === ClientToolCallState.success &&
|
||||||
: failedDependency || skipped
|
isWorkflowChangeApplyCall(current.name, paramsForCurrentToolCall)
|
||||||
? ClientToolCallState.rejected
|
) {
|
||||||
: ClientToolCallState.error
|
const operations = extractOperationListFromResultPayload(resultPayload || {})
|
||||||
|
if (operations && operations.length > 0) {
|
||||||
|
paramsForCurrentToolCall = {
|
||||||
|
...(current.params || {}),
|
||||||
|
operations,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const updatedMap = { ...toolCallsById }
|
const updatedMap = { ...toolCallsById }
|
||||||
updatedMap[toolCallId] = {
|
updatedMap[toolCallId] = {
|
||||||
...current,
|
...current,
|
||||||
|
ui: uiMetadata || current.ui,
|
||||||
|
execution: executionMetadata || current.execution,
|
||||||
|
params: paramsForCurrentToolCall,
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
display: resolveDisplayFromServerUi(
|
||||||
|
current.name,
|
||||||
|
targetState,
|
||||||
|
current.id,
|
||||||
|
paramsForCurrentToolCall,
|
||||||
|
uiMetadata || current.ui
|
||||||
|
),
|
||||||
}
|
}
|
||||||
set({ toolCallsById: updatedMap })
|
set({ toolCallsById: updatedMap })
|
||||||
|
|
||||||
@@ -312,138 +376,11 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (current.name === 'edit_workflow') {
|
applyToolEffects({
|
||||||
try {
|
effectsRaw: eventData.effects,
|
||||||
const resultPayload = asRecord(
|
toolCall: updatedMap[toolCallId],
|
||||||
data?.result || eventData.result || eventData.data || data?.data
|
resultPayload,
|
||||||
)
|
})
|
||||||
const workflowState = asRecord(resultPayload?.workflowState)
|
|
||||||
const hasWorkflowState = !!resultPayload?.workflowState
|
|
||||||
logger.info('[SSE] edit_workflow result received', {
|
|
||||||
hasWorkflowState,
|
|
||||||
blockCount: hasWorkflowState ? Object.keys(workflowState.blocks ?? {}).length : 0,
|
|
||||||
edgeCount: Array.isArray(workflowState.edges) ? workflowState.edges.length : 0,
|
|
||||||
})
|
|
||||||
if (hasWorkflowState) {
|
|
||||||
const diffStore = useWorkflowDiffStore.getState()
|
|
||||||
diffStore
|
|
||||||
.setProposedChanges(resultPayload.workflowState as WorkflowState)
|
|
||||||
.catch((err) => {
|
|
||||||
logger.error('[SSE] Failed to apply edit_workflow diff', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error('[SSE] edit_workflow result handling failed', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Deploy tools: update deployment status in workflow registry
|
|
||||||
if (
|
|
||||||
targetState === ClientToolCallState.success &&
|
|
||||||
(current.name === 'deploy_api' ||
|
|
||||||
current.name === 'deploy_chat' ||
|
|
||||||
current.name === 'deploy_mcp' ||
|
|
||||||
current.name === 'redeploy')
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
const resultPayload = asRecord(
|
|
||||||
data?.result || eventData.result || eventData.data || data?.data
|
|
||||||
)
|
|
||||||
const input = asRecord(current.params)
|
|
||||||
const workflowId =
|
|
||||||
(resultPayload?.workflowId as string) ||
|
|
||||||
(input?.workflowId as string) ||
|
|
||||||
useWorkflowRegistry.getState().activeWorkflowId
|
|
||||||
const isDeployed = resultPayload?.isDeployed !== false
|
|
||||||
if (workflowId) {
|
|
||||||
useWorkflowRegistry
|
|
||||||
.getState()
|
|
||||||
.setDeploymentStatus(workflowId, isDeployed, isDeployed ? new Date() : undefined)
|
|
||||||
logger.info('[SSE] Updated deployment status from tool result', {
|
|
||||||
toolName: current.name,
|
|
||||||
workflowId,
|
|
||||||
isDeployed,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn('[SSE] Failed to hydrate deployment status', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Environment variables: reload store after successful set
|
|
||||||
if (
|
|
||||||
targetState === ClientToolCallState.success &&
|
|
||||||
current.name === 'set_environment_variables'
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
useEnvironmentStore.getState().loadEnvironmentVariables()
|
|
||||||
logger.info('[SSE] Triggered environment variables reload')
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn('[SSE] Failed to reload environment variables', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Workflow variables: reload store after successful set
|
|
||||||
if (
|
|
||||||
targetState === ClientToolCallState.success &&
|
|
||||||
current.name === 'set_global_workflow_variables'
|
|
||||||
) {
|
|
||||||
try {
|
|
||||||
const input = asRecord(current.params)
|
|
||||||
const workflowId =
|
|
||||||
(input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId
|
|
||||||
if (workflowId) {
|
|
||||||
useVariablesStore.getState().loadForWorkflow(workflowId)
|
|
||||||
logger.info('[SSE] Triggered workflow variables reload', { workflowId })
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn('[SSE] Failed to reload workflow variables', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate API key: update deployment status with the new key
|
|
||||||
if (targetState === ClientToolCallState.success && current.name === 'generate_api_key') {
|
|
||||||
try {
|
|
||||||
const resultPayload = asRecord(
|
|
||||||
data?.result || eventData.result || eventData.data || data?.data
|
|
||||||
)
|
|
||||||
const input = asRecord(current.params)
|
|
||||||
const workflowId =
|
|
||||||
(input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId
|
|
||||||
const apiKey = (resultPayload?.apiKey || resultPayload?.key) as string | undefined
|
|
||||||
if (workflowId) {
|
|
||||||
const existingStatus = useWorkflowRegistry
|
|
||||||
.getState()
|
|
||||||
.getWorkflowDeploymentStatus(workflowId)
|
|
||||||
useWorkflowRegistry
|
|
||||||
.getState()
|
|
||||||
.setDeploymentStatus(
|
|
||||||
workflowId,
|
|
||||||
existingStatus?.isDeployed ?? false,
|
|
||||||
existingStatus?.deployedAt,
|
|
||||||
apiKey
|
|
||||||
)
|
|
||||||
logger.info('[SSE] Updated deployment status with API key', {
|
|
||||||
workflowId,
|
|
||||||
hasKey: !!apiKey,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn('[SSE] Failed to hydrate API key status', {
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (let i = 0; i < context.contentBlocks.length; i++) {
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
@@ -460,16 +397,24 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
: failedDependency || skipped
|
: failedDependency || skipped
|
||||||
? ClientToolCallState.rejected
|
? ClientToolCallState.rejected
|
||||||
: ClientToolCallState.error
|
: ClientToolCallState.error
|
||||||
|
const paramsForBlock =
|
||||||
|
b.toolCall?.id === toolCallId
|
||||||
|
? paramsForCurrentToolCall || b.toolCall?.params
|
||||||
|
: b.toolCall?.params
|
||||||
context.contentBlocks[i] = {
|
context.contentBlocks[i] = {
|
||||||
...b,
|
...b,
|
||||||
toolCall: {
|
toolCall: {
|
||||||
...b.toolCall,
|
...b.toolCall,
|
||||||
|
params: paramsForBlock,
|
||||||
|
ui: uiMetadata || b.toolCall?.ui,
|
||||||
|
execution: executionMetadata || b.toolCall?.execution,
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(
|
display: resolveDisplayFromServerUi(
|
||||||
b.toolCall?.name,
|
b.toolCall?.name,
|
||||||
targetState,
|
targetState,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
b.toolCall?.params
|
paramsForBlock,
|
||||||
|
uiMetadata || b.toolCall?.ui
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -483,106 +428,29 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
tool_error: (data, context, get, set) => {
|
'copilot.tool.call': (data, context, get, set) => {
|
||||||
try {
|
|
||||||
const errorData = asRecord(data?.data)
|
|
||||||
const toolCallId: string | undefined =
|
|
||||||
data?.toolCallId || (errorData.id as string | undefined)
|
|
||||||
const failedDependency: boolean = data?.failedDependency === true
|
|
||||||
if (!toolCallId) return
|
|
||||||
const { toolCallsById } = get()
|
|
||||||
const current = toolCallsById[toolCallId]
|
|
||||||
if (current) {
|
|
||||||
if (
|
|
||||||
isRejectedState(current.state) ||
|
|
||||||
isReviewState(current.state) ||
|
|
||||||
isBackgroundState(current.state)
|
|
||||||
) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const targetState = failedDependency
|
|
||||||
? ClientToolCallState.rejected
|
|
||||||
: ClientToolCallState.error
|
|
||||||
const updatedMap = { ...toolCallsById }
|
|
||||||
updatedMap[toolCallId] = {
|
|
||||||
...current,
|
|
||||||
state: targetState,
|
|
||||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
|
||||||
}
|
|
||||||
set({ toolCallsById: updatedMap })
|
|
||||||
}
|
|
||||||
for (let i = 0; i < context.contentBlocks.length; i++) {
|
|
||||||
const b = context.contentBlocks[i]
|
|
||||||
if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) {
|
|
||||||
if (
|
|
||||||
isRejectedState(b.toolCall?.state) ||
|
|
||||||
isReviewState(b.toolCall?.state) ||
|
|
||||||
isBackgroundState(b.toolCall?.state)
|
|
||||||
)
|
|
||||||
break
|
|
||||||
const targetState = failedDependency
|
|
||||||
? ClientToolCallState.rejected
|
|
||||||
: ClientToolCallState.error
|
|
||||||
context.contentBlocks[i] = {
|
|
||||||
...b,
|
|
||||||
toolCall: {
|
|
||||||
...b.toolCall,
|
|
||||||
state: targetState,
|
|
||||||
display: resolveToolDisplay(
|
|
||||||
b.toolCall?.name,
|
|
||||||
targetState,
|
|
||||||
toolCallId,
|
|
||||||
b.toolCall?.params
|
|
||||||
),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
updateStreamingMessage(set, context)
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed to process tool_error SSE event', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
tool_generating: (data, context, get, set) => {
|
|
||||||
const { toolCallId, toolName } = data
|
|
||||||
if (!toolCallId || !toolName) return
|
|
||||||
const { toolCallsById } = get()
|
|
||||||
|
|
||||||
if (!toolCallsById[toolCallId]) {
|
|
||||||
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
|
||||||
const initialState = isAutoAllowed
|
|
||||||
? ClientToolCallState.executing
|
|
||||||
: ClientToolCallState.pending
|
|
||||||
const tc: CopilotToolCall = {
|
|
||||||
id: toolCallId,
|
|
||||||
name: toolName,
|
|
||||||
state: initialState,
|
|
||||||
display: resolveToolDisplay(toolName, initialState, toolCallId),
|
|
||||||
}
|
|
||||||
const updated = { ...toolCallsById, [toolCallId]: tc }
|
|
||||||
set({ toolCallsById: updated })
|
|
||||||
logger.info('[toolCallsById] map updated', updated)
|
|
||||||
|
|
||||||
upsertToolCallBlock(context, tc)
|
|
||||||
updateStreamingMessage(set, context)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
tool_call: (data, context, get, set) => {
|
|
||||||
const toolData = asRecord(data?.data)
|
const toolData = asRecord(data?.data)
|
||||||
const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId
|
const id: string | undefined =
|
||||||
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
(toolData.id as string | undefined) ||
|
||||||
|
(toolData.callId as string | undefined) ||
|
||||||
|
data?.toolCallId
|
||||||
|
const name: string | undefined =
|
||||||
|
(toolData.name as string | undefined) ||
|
||||||
|
(toolData.toolName as string | undefined) ||
|
||||||
|
data?.toolName
|
||||||
if (!id) return
|
if (!id) return
|
||||||
const args = toolData.arguments as Record<string, unknown> | undefined
|
const args = toolData.arguments as Record<string, unknown> | undefined
|
||||||
const isPartial = toolData.partial === true
|
const isPartial = toolData.partial === true
|
||||||
|
const uiMetadata = extractToolUiMetadata(toolData)
|
||||||
|
const executionMetadata = extractToolExecutionMetadata(toolData)
|
||||||
|
const serverState = toolData.state
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
const existing = toolCallsById[id]
|
const existing = toolCallsById[id]
|
||||||
const toolName = name || existing?.name || 'unknown_tool'
|
const toolName = name || existing?.name || 'unknown_tool'
|
||||||
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
let initialState = serverState
|
||||||
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
? mapServerStateToClientState(serverState)
|
||||||
|
: ClientToolCallState.pending
|
||||||
|
|
||||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||||
if (
|
if (
|
||||||
@@ -597,15 +465,25 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
...existing,
|
...existing,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
|
ui: uiMetadata || existing.ui,
|
||||||
|
execution: executionMetadata || existing.execution,
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveToolDisplay(toolName, initialState, id, args || existing.params),
|
display: resolveDisplayFromServerUi(
|
||||||
|
toolName,
|
||||||
|
initialState,
|
||||||
|
id,
|
||||||
|
args || existing.params,
|
||||||
|
uiMetadata || existing.ui
|
||||||
|
),
|
||||||
}
|
}
|
||||||
: {
|
: {
|
||||||
id,
|
id,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
|
ui: uiMetadata,
|
||||||
|
execution: executionMetadata,
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveToolDisplay(toolName, initialState, id, args),
|
display: resolveDisplayFromServerUi(toolName, initialState, id, args, uiMetadata),
|
||||||
}
|
}
|
||||||
const updated = { ...toolCallsById, [id]: next }
|
const updated = { ...toolCallsById, [id]: next }
|
||||||
set({ toolCallsById: updated })
|
set({ toolCallsById: updated })
|
||||||
@@ -618,20 +496,12 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Auto-allowed tools: send confirmation to the server so it can proceed
|
const shouldInterrupt = next.ui?.showInterrupt === true
|
||||||
// without waiting for the user to click "Allow".
|
|
||||||
if (isAutoAllowed) {
|
|
||||||
sendAutoAcceptConfirmation(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Client-executable run tools: execute on the client for real-time feedback
|
// Client-run capability: execution is delegated to the browser.
|
||||||
// (block pulsing, console logs, stop button). The server defers execution
|
// We run immediately only when no interrupt is required.
|
||||||
// for these tools in interactive mode; the client reports back via mark-complete.
|
if (isClientRunCapability(next) && !shouldInterrupt) {
|
||||||
if (
|
executeRunToolOnClient(id, toolName, args || next.params || {})
|
||||||
CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName) &&
|
|
||||||
initialState === ClientToolCallState.executing
|
|
||||||
) {
|
|
||||||
executeRunToolOnClient(id, toolName, args || existing?.params || {})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// OAuth: dispatch event to open the OAuth connect modal
|
// OAuth: dispatch event to open the OAuth connect modal
|
||||||
@@ -661,7 +531,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
return
|
return
|
||||||
},
|
},
|
||||||
reasoning: (data, context, _get, set) => {
|
'copilot.phase.progress': (data, context, _get, set) => {
|
||||||
const phase = (data && (data.phase || data?.data?.phase)) as string | undefined
|
const phase = (data && (data.phase || data?.data?.phase)) as string | undefined
|
||||||
if (phase === 'start') {
|
if (phase === 'start') {
|
||||||
beginThinkingBlock(context)
|
beginThinkingBlock(context)
|
||||||
@@ -678,7 +548,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
appendThinkingContent(context, chunk)
|
appendThinkingContent(context, chunk)
|
||||||
updateStreamingMessage(set, context)
|
updateStreamingMessage(set, context)
|
||||||
},
|
},
|
||||||
content: (data, context, get, set) => {
|
'copilot.content': (data, context, get, set) => {
|
||||||
if (!data.data) return
|
if (!data.data) return
|
||||||
context.pendingContent += data.data
|
context.pendingContent += data.data
|
||||||
|
|
||||||
@@ -893,7 +763,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
updateStreamingMessage(set, context)
|
updateStreamingMessage(set, context)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
done: (_data, context) => {
|
'copilot.phase.completed': (_data, context) => {
|
||||||
logger.info('[SSE] DONE EVENT RECEIVED', {
|
logger.info('[SSE] DONE EVENT RECEIVED', {
|
||||||
doneEventCount: context.doneEventCount,
|
doneEventCount: context.doneEventCount,
|
||||||
data: _data,
|
data: _data,
|
||||||
@@ -904,7 +774,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.streamComplete = true
|
context.streamComplete = true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
error: (data, context, _get, set) => {
|
'copilot.error': (data, context, _get, set) => {
|
||||||
logger.error('Stream error:', data.error)
|
logger.error('Stream error:', data.error)
|
||||||
set((state: CopilotStore) => ({
|
set((state: CopilotStore) => ({
|
||||||
messages: state.messages.map((msg) =>
|
messages: state.messages.map((msg) =>
|
||||||
@@ -919,6 +789,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}))
|
}))
|
||||||
context.streamComplete = true
|
context.streamComplete = true
|
||||||
},
|
},
|
||||||
|
'copilot.phase.started': () => {},
|
||||||
stream_end: (_data, context, _get, set) => {
|
stream_end: (_data, context, _get, set) => {
|
||||||
if (context.pendingContent) {
|
if (context.pendingContent) {
|
||||||
if (context.isInThinkingBlock && context.currentThinkingBlock) {
|
if (context.isInThinkingBlock && context.currentThinkingBlock) {
|
||||||
@@ -933,3 +804,8 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
},
|
},
|
||||||
default: () => {},
|
default: () => {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sseHandlers['copilot.tool.interrupt_required'] = sseHandlers['copilot.tool.call']
|
||||||
|
sseHandlers['copilot.workflow.patch'] = sseHandlers['copilot.tool.result']
|
||||||
|
sseHandlers['copilot.workflow.verify'] = sseHandlers['copilot.tool.result']
|
||||||
|
sseHandlers['copilot.tool.interrupt_resolved'] = sseHandlers['copilot.tool.result']
|
||||||
|
|||||||
@@ -15,10 +15,7 @@ const logger = createLogger('CopilotRunToolExecution')
|
|||||||
* (block pulsing, logs, stop button, etc.).
|
* (block pulsing, logs, stop button, etc.).
|
||||||
*/
|
*/
|
||||||
export const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
export const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
||||||
'run_workflow',
|
'workflow_run',
|
||||||
'run_workflow_until_block',
|
|
||||||
'run_from_block',
|
|
||||||
'run_block',
|
|
||||||
])
|
])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -74,21 +71,44 @@ async function doExecuteRunTool(
|
|||||||
| Record<string, unknown>
|
| Record<string, unknown>
|
||||||
| undefined
|
| undefined
|
||||||
|
|
||||||
|
const runMode =
|
||||||
|
toolName === 'workflow_run' ? ((params.mode as string | undefined) || 'full').toLowerCase() : undefined
|
||||||
|
|
||||||
|
if (
|
||||||
|
toolName === 'workflow_run' &&
|
||||||
|
runMode !== 'full' &&
|
||||||
|
runMode !== 'until_block' &&
|
||||||
|
runMode !== 'from_block' &&
|
||||||
|
runMode !== 'block'
|
||||||
|
) {
|
||||||
|
const error = `Unsupported workflow_run mode: ${String(params.mode)}`
|
||||||
|
logger.warn('[RunTool] Execution prevented: unsupported workflow_run mode', {
|
||||||
|
toolCallId,
|
||||||
|
mode: params.mode,
|
||||||
|
})
|
||||||
|
setToolState(toolCallId, ClientToolCallState.error)
|
||||||
|
await reportCompletion(toolCallId, false, error)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
const stopAfterBlockId = (() => {
|
const stopAfterBlockId = (() => {
|
||||||
if (toolName === 'run_workflow_until_block')
|
if (toolName === 'workflow_run' && runMode === 'until_block') {
|
||||||
return params.stopAfterBlockId as string | undefined
|
return params.stopAfterBlockId as string | undefined
|
||||||
if (toolName === 'run_block') return params.blockId as string | undefined
|
}
|
||||||
|
if (toolName === 'workflow_run' && runMode === 'block') {
|
||||||
|
return params.blockId as string | undefined
|
||||||
|
}
|
||||||
return undefined
|
return undefined
|
||||||
})()
|
})()
|
||||||
|
|
||||||
const runFromBlock = (() => {
|
const runFromBlock = (() => {
|
||||||
if (toolName === 'run_from_block' && params.startBlockId) {
|
if (toolName === 'workflow_run' && runMode === 'from_block' && params.startBlockId) {
|
||||||
return {
|
return {
|
||||||
startBlockId: params.startBlockId as string,
|
startBlockId: params.startBlockId as string,
|
||||||
executionId: (params.executionId as string | undefined) || 'latest',
|
executionId: (params.executionId as string | undefined) || 'latest',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (toolName === 'run_block' && params.blockId) {
|
if (toolName === 'workflow_run' && runMode === 'block' && params.blockId) {
|
||||||
return {
|
return {
|
||||||
startBlockId: params.blockId as string,
|
startBlockId: params.blockId as string,
|
||||||
executionId: (params.executionId as string | undefined) || 'latest',
|
executionId: (params.executionId as string | undefined) || 'latest',
|
||||||
|
|||||||
172
apps/sim/lib/copilot/client-sse/subagent-handlers.test.ts
Normal file
172
apps/sim/lib/copilot/client-sse/subagent-handlers.test.ts
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
/**
|
||||||
|
* @vitest-environment node
|
||||||
|
*/
|
||||||
|
import { describe, expect, it, vi } from 'vitest'
|
||||||
|
import { applySseEvent } from '@/lib/copilot/client-sse/subagent-handlers'
|
||||||
|
import type { ClientStreamingContext } from '@/lib/copilot/client-sse/types'
|
||||||
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
|
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
|
||||||
|
type StoreSet = (
|
||||||
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
|
) => void
|
||||||
|
|
||||||
|
function createTestStore(initialToolCalls: Record<string, CopilotToolCall>) {
|
||||||
|
const state: Partial<CopilotStore> = {
|
||||||
|
messages: [{ id: 'assistant-msg', role: 'assistant', content: '', timestamp: new Date().toISOString() }],
|
||||||
|
toolCallsById: { ...initialToolCalls },
|
||||||
|
currentChat: null,
|
||||||
|
chats: [],
|
||||||
|
activeStream: null,
|
||||||
|
updatePlanTodoStatus: vi.fn(),
|
||||||
|
handleNewChatCreation: vi.fn().mockResolvedValue(undefined),
|
||||||
|
}
|
||||||
|
|
||||||
|
const get = () => state as CopilotStore
|
||||||
|
const set: StoreSet = (partial) => {
|
||||||
|
const patch = typeof partial === 'function' ? partial(get()) : partial
|
||||||
|
Object.assign(state, patch)
|
||||||
|
}
|
||||||
|
|
||||||
|
return { get, set }
|
||||||
|
}
|
||||||
|
|
||||||
|
function createStreamingContext(): ClientStreamingContext {
|
||||||
|
return {
|
||||||
|
messageId: 'assistant-msg',
|
||||||
|
accumulatedContent: '',
|
||||||
|
contentBlocks: [],
|
||||||
|
currentTextBlock: null,
|
||||||
|
isInThinkingBlock: false,
|
||||||
|
currentThinkingBlock: null,
|
||||||
|
isInDesignWorkflowBlock: false,
|
||||||
|
designWorkflowContent: '',
|
||||||
|
pendingContent: '',
|
||||||
|
doneEventCount: 0,
|
||||||
|
streamComplete: false,
|
||||||
|
subAgentContent: {},
|
||||||
|
subAgentToolCalls: {},
|
||||||
|
subAgentBlocks: {},
|
||||||
|
suppressStreamingUpdates: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('client SSE copilot.* stream smoke', () => {
|
||||||
|
it('processes main tool call/result events with copilot.* keys', async () => {
|
||||||
|
const { get, set } = createTestStore({})
|
||||||
|
const context = createStreamingContext()
|
||||||
|
|
||||||
|
await applySseEvent(
|
||||||
|
{
|
||||||
|
type: 'copilot.tool.call',
|
||||||
|
data: { id: 'main-tool-1', name: 'get_user_workflow', state: 'executing', arguments: {} },
|
||||||
|
} as any,
|
||||||
|
context,
|
||||||
|
get,
|
||||||
|
set
|
||||||
|
)
|
||||||
|
|
||||||
|
await applySseEvent(
|
||||||
|
{
|
||||||
|
type: 'copilot.tool.result',
|
||||||
|
toolCallId: 'main-tool-1',
|
||||||
|
success: true,
|
||||||
|
result: { ok: true },
|
||||||
|
data: {
|
||||||
|
id: 'main-tool-1',
|
||||||
|
name: 'get_user_workflow',
|
||||||
|
phase: 'completed',
|
||||||
|
state: 'success',
|
||||||
|
success: true,
|
||||||
|
result: { ok: true },
|
||||||
|
},
|
||||||
|
} as any,
|
||||||
|
context,
|
||||||
|
get,
|
||||||
|
set
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(get().toolCallsById['main-tool-1']).toBeDefined()
|
||||||
|
expect(get().toolCallsById['main-tool-1'].state).toBe(ClientToolCallState.success)
|
||||||
|
expect(
|
||||||
|
context.contentBlocks.some(
|
||||||
|
(block) => block.type === 'tool_call' && block.toolCall?.id === 'main-tool-1'
|
||||||
|
)
|
||||||
|
).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('processes subagent start/tool/result/end with copilot.* keys', async () => {
|
||||||
|
const parentToolCallId = 'parent-edit-tool'
|
||||||
|
const { get, set } = createTestStore({
|
||||||
|
[parentToolCallId]: {
|
||||||
|
id: parentToolCallId,
|
||||||
|
name: 'edit',
|
||||||
|
state: ClientToolCallState.executing,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
const context = createStreamingContext()
|
||||||
|
|
||||||
|
await applySseEvent(
|
||||||
|
{
|
||||||
|
type: 'copilot.subagent.started',
|
||||||
|
subagent: 'edit',
|
||||||
|
data: { tool_call_id: parentToolCallId },
|
||||||
|
} as any,
|
||||||
|
context,
|
||||||
|
get,
|
||||||
|
set
|
||||||
|
)
|
||||||
|
|
||||||
|
await applySseEvent(
|
||||||
|
{
|
||||||
|
type: 'copilot.tool.call',
|
||||||
|
subagent: 'edit',
|
||||||
|
data: {
|
||||||
|
id: 'sub-tool-1',
|
||||||
|
name: 'workflow_context_get',
|
||||||
|
state: 'executing',
|
||||||
|
arguments: { includeSchemas: false },
|
||||||
|
},
|
||||||
|
} as any,
|
||||||
|
context,
|
||||||
|
get,
|
||||||
|
set
|
||||||
|
)
|
||||||
|
|
||||||
|
await applySseEvent(
|
||||||
|
{
|
||||||
|
type: 'copilot.tool.result',
|
||||||
|
subagent: 'edit',
|
||||||
|
data: {
|
||||||
|
id: 'sub-tool-1',
|
||||||
|
name: 'workflow_context_get',
|
||||||
|
phase: 'completed',
|
||||||
|
state: 'success',
|
||||||
|
success: true,
|
||||||
|
result: { contextPackId: 'pack-1' },
|
||||||
|
},
|
||||||
|
} as any,
|
||||||
|
context,
|
||||||
|
get,
|
||||||
|
set
|
||||||
|
)
|
||||||
|
|
||||||
|
await applySseEvent(
|
||||||
|
{
|
||||||
|
type: 'copilot.subagent.completed',
|
||||||
|
subagent: 'edit',
|
||||||
|
data: {},
|
||||||
|
} as any,
|
||||||
|
context,
|
||||||
|
get,
|
||||||
|
set
|
||||||
|
)
|
||||||
|
|
||||||
|
const parentToolCall = get().toolCallsById[parentToolCallId]
|
||||||
|
expect(parentToolCall).toBeDefined()
|
||||||
|
expect(parentToolCall.subAgentStreaming).toBe(false)
|
||||||
|
expect(parentToolCall.subAgentToolCalls?.length).toBe(1)
|
||||||
|
expect(parentToolCall.subAgentToolCalls?.[0]?.id).toBe('sub-tool-1')
|
||||||
|
expect(parentToolCall.subAgentToolCalls?.[0]?.state).toBe(ClientToolCallState.success)
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -6,16 +6,23 @@ import {
|
|||||||
shouldSkipToolResultEvent,
|
shouldSkipToolResultEvent,
|
||||||
} from '@/lib/copilot/orchestrator/sse-utils'
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
import { resolveToolDisplay } from '@/lib/copilot/store-utils'
|
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
import {
|
import {
|
||||||
type SSEHandler,
|
type SSEHandler,
|
||||||
sendAutoAcceptConfirmation,
|
|
||||||
sseHandlers,
|
sseHandlers,
|
||||||
updateStreamingMessage,
|
updateStreamingMessage,
|
||||||
} from './handlers'
|
} from './handlers'
|
||||||
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
||||||
|
import {
|
||||||
|
extractOperationListFromResultPayload,
|
||||||
|
extractToolExecutionMetadata,
|
||||||
|
extractToolUiMetadata,
|
||||||
|
isWorkflowChangeApplyCall,
|
||||||
|
mapServerStateToClientState,
|
||||||
|
resolveDisplayFromServerUi,
|
||||||
|
} from './tool-call-helpers'
|
||||||
|
import { applyToolEffects } from './tool-effects'
|
||||||
import type { ClientStreamingContext } from './types'
|
import type { ClientStreamingContext } from './types'
|
||||||
|
|
||||||
const logger = createLogger('CopilotClientSubagentHandlers')
|
const logger = createLogger('CopilotClientSubagentHandlers')
|
||||||
@@ -24,6 +31,13 @@ type StoreSet = (
|
|||||||
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
) => void
|
) => void
|
||||||
|
|
||||||
|
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
||||||
|
if (toolCall.execution?.target === 'sim_client_capability') {
|
||||||
|
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
||||||
|
}
|
||||||
|
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
||||||
|
}
|
||||||
|
|
||||||
export function appendSubAgentContent(
|
export function appendSubAgentContent(
|
||||||
context: ClientStreamingContext,
|
context: ClientStreamingContext,
|
||||||
parentToolCallId: string,
|
parentToolCallId: string,
|
||||||
@@ -110,11 +124,11 @@ export function updateToolCallWithSubAgentData(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||||
start: () => {
|
'copilot.phase.started': () => {
|
||||||
// Subagent start event - no action needed, parent is already tracked from subagent_start
|
// No-op: parent subagent association is handled by copilot.subagent.started.
|
||||||
},
|
},
|
||||||
|
|
||||||
content: (data, context, get, set) => {
|
'copilot.content': (data, context, get, set) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
const contentStr = typeof data.data === 'string' ? data.data : data.content || ''
|
const contentStr = typeof data.data === 'string' ? data.data : data.content || ''
|
||||||
logger.info('[SubAgent] content event', {
|
logger.info('[SubAgent] content event', {
|
||||||
@@ -135,7 +149,7 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
},
|
},
|
||||||
|
|
||||||
reasoning: (data, context, get, set) => {
|
'copilot.phase.progress': (data, context, get, set) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
const dataObj = asRecord(data?.data)
|
const dataObj = asRecord(data?.data)
|
||||||
const phase = data?.phase || (dataObj.phase as string | undefined)
|
const phase = data?.phase || (dataObj.phase as string | undefined)
|
||||||
@@ -151,11 +165,7 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
},
|
},
|
||||||
|
|
||||||
tool_generating: () => {
|
'copilot.tool.call': async (data, context, get, set) => {
|
||||||
// Tool generating event - no action needed, we'll handle the actual tool_call
|
|
||||||
},
|
|
||||||
|
|
||||||
tool_call: async (data, context, get, set) => {
|
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId) return
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
@@ -164,6 +174,8 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
if (!id || !name) return
|
if (!id || !name) return
|
||||||
const isPartial = toolData.partial === true
|
const isPartial = toolData.partial === true
|
||||||
|
const uiMetadata = extractToolUiMetadata(toolData)
|
||||||
|
const executionMetadata = extractToolExecutionMetadata(toolData)
|
||||||
|
|
||||||
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
||||||
| Record<string, unknown>
|
| Record<string, unknown>
|
||||||
@@ -199,9 +211,10 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
const existingToolCall =
|
const existingToolCall =
|
||||||
existingIndex >= 0 ? context.subAgentToolCalls[parentToolCallId][existingIndex] : undefined
|
existingIndex >= 0 ? context.subAgentToolCalls[parentToolCallId][existingIndex] : undefined
|
||||||
|
|
||||||
// Auto-allowed tools skip pending state to avoid flashing interrupt buttons
|
const serverState = toolData.state
|
||||||
const isAutoAllowed = get().isToolAutoAllowed(name)
|
let initialState = serverState
|
||||||
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
? mapServerStateToClientState(serverState)
|
||||||
|
: ClientToolCallState.pending
|
||||||
|
|
||||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||||
if (
|
if (
|
||||||
@@ -215,8 +228,10 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
id,
|
id,
|
||||||
name,
|
name,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
|
ui: uiMetadata,
|
||||||
|
execution: executionMetadata,
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveToolDisplay(name, initialState, id, args),
|
display: resolveDisplayFromServerUi(name, initialState, id, args, uiMetadata),
|
||||||
}
|
}
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
@@ -241,21 +256,16 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Auto-allowed tools: send confirmation to the server so it can proceed
|
const shouldInterrupt = subAgentToolCall.ui?.showInterrupt === true
|
||||||
// without waiting for the user to click "Allow".
|
|
||||||
if (isAutoAllowed) {
|
|
||||||
sendAutoAcceptConfirmation(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Client-executable run tools: if auto-allowed, execute immediately for
|
// Client-run capability: execution is delegated to the browser.
|
||||||
// real-time feedback. For non-auto-allowed, the user must click "Allow"
|
// Execute immediately only for non-interrupting calls.
|
||||||
// first — handleRun in tool-call.tsx triggers executeRunToolOnClient.
|
if (isClientRunCapability(subAgentToolCall) && !shouldInterrupt) {
|
||||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(name) && isAutoAllowed) {
|
|
||||||
executeRunToolOnClient(id, name, args || {})
|
executeRunToolOnClient(id, name, args || {})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
tool_result: (data, context, get, set) => {
|
'copilot.tool.result': (data, context, get, set) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId) return
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
@@ -275,17 +285,51 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
if (!context.subAgentToolCalls[parentToolCallId]) return
|
if (!context.subAgentToolCalls[parentToolCallId]) return
|
||||||
if (!context.subAgentBlocks[parentToolCallId]) return
|
if (!context.subAgentBlocks[parentToolCallId]) return
|
||||||
|
|
||||||
const targetState = success ? ClientToolCallState.success : ClientToolCallState.error
|
const serverState = resultData.state
|
||||||
|
const targetState = serverState
|
||||||
|
? mapServerStateToClientState(serverState)
|
||||||
|
: success
|
||||||
|
? ClientToolCallState.success
|
||||||
|
: ClientToolCallState.error
|
||||||
|
const uiMetadata = extractToolUiMetadata(resultData)
|
||||||
|
const executionMetadata = extractToolExecutionMetadata(resultData)
|
||||||
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
||||||
(tc: CopilotToolCall) => tc.id === toolCallId
|
(tc: CopilotToolCall) => tc.id === toolCallId
|
||||||
)
|
)
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
||||||
|
let nextParams = existing.params
|
||||||
|
const resultPayload = asRecord(
|
||||||
|
data?.result || resultData.result || resultData.data || data?.data
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
isWorkflowChangeApplyCall(existing.name, existing.params as Record<string, unknown>) &&
|
||||||
|
resultPayload
|
||||||
|
) {
|
||||||
|
const operations = extractOperationListFromResultPayload(resultPayload)
|
||||||
|
if (operations && operations.length > 0) {
|
||||||
|
nextParams = {
|
||||||
|
...(existing.params || {}),
|
||||||
|
operations,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const updatedSubAgentToolCall = {
|
const updatedSubAgentToolCall = {
|
||||||
...existing,
|
...existing,
|
||||||
|
params: nextParams,
|
||||||
|
ui: uiMetadata || existing.ui,
|
||||||
|
execution: executionMetadata || existing.execution,
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params),
|
display: resolveDisplayFromServerUi(
|
||||||
|
existing.name,
|
||||||
|
targetState,
|
||||||
|
toolCallId,
|
||||||
|
nextParams,
|
||||||
|
uiMetadata || existing.ui
|
||||||
|
),
|
||||||
}
|
}
|
||||||
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
||||||
|
|
||||||
@@ -309,12 +353,18 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
state: targetState,
|
state: targetState,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
applyToolEffects({
|
||||||
|
effectsRaw: resultData.effects,
|
||||||
|
toolCall: updatedSubAgentToolCall,
|
||||||
|
resultPayload,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
},
|
},
|
||||||
|
|
||||||
done: (_data, context, get, set) => {
|
'copilot.phase.completed': (_data, context, get, set) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId) return
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
@@ -322,6 +372,11 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
subAgentSSEHandlers['copilot.tool.interrupt_required'] = subAgentSSEHandlers['copilot.tool.call']
|
||||||
|
subAgentSSEHandlers['copilot.workflow.patch'] = subAgentSSEHandlers['copilot.tool.result']
|
||||||
|
subAgentSSEHandlers['copilot.workflow.verify'] = subAgentSSEHandlers['copilot.tool.result']
|
||||||
|
subAgentSSEHandlers['copilot.tool.interrupt_resolved'] = subAgentSSEHandlers['copilot.tool.result']
|
||||||
|
|
||||||
export async function applySseEvent(
|
export async function applySseEvent(
|
||||||
rawData: SSEEvent,
|
rawData: SSEEvent,
|
||||||
context: ClientStreamingContext,
|
context: ClientStreamingContext,
|
||||||
@@ -334,7 +389,7 @@ export async function applySseEvent(
|
|||||||
}
|
}
|
||||||
const data = normalizedEvent
|
const data = normalizedEvent
|
||||||
|
|
||||||
if (data.type === 'subagent_start') {
|
if (data.type === 'copilot.subagent.started') {
|
||||||
const startData = asRecord(data.data)
|
const startData = asRecord(data.data)
|
||||||
const toolCallId = startData.tool_call_id as string | undefined
|
const toolCallId = startData.tool_call_id as string | undefined
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
@@ -357,7 +412,7 @@ export async function applySseEvent(
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
if (data.type === 'subagent_end') {
|
if (data.type === 'copilot.subagent.completed') {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (parentToolCallId) {
|
if (parentToolCallId) {
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
|
|||||||
134
apps/sim/lib/copilot/client-sse/tool-call-helpers.ts
Normal file
134
apps/sim/lib/copilot/client-sse/tool-call-helpers.ts
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import { humanizedFallback, resolveToolDisplay } from '@/lib/copilot/store-utils'
|
||||||
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
|
import type { CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
|
||||||
|
export function mapServerStateToClientState(state: unknown): ClientToolCallState {
|
||||||
|
switch (String(state || '')) {
|
||||||
|
case 'generating':
|
||||||
|
return ClientToolCallState.generating
|
||||||
|
case 'pending':
|
||||||
|
case 'awaiting_approval':
|
||||||
|
return ClientToolCallState.pending
|
||||||
|
case 'executing':
|
||||||
|
return ClientToolCallState.executing
|
||||||
|
case 'success':
|
||||||
|
return ClientToolCallState.success
|
||||||
|
case 'rejected':
|
||||||
|
case 'skipped':
|
||||||
|
return ClientToolCallState.rejected
|
||||||
|
case 'aborted':
|
||||||
|
return ClientToolCallState.aborted
|
||||||
|
case 'error':
|
||||||
|
case 'failed':
|
||||||
|
return ClientToolCallState.error
|
||||||
|
default:
|
||||||
|
return ClientToolCallState.pending
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function extractToolUiMetadata(
|
||||||
|
data: Record<string, unknown>
|
||||||
|
): CopilotToolCall['ui'] | undefined {
|
||||||
|
const ui = asRecord(data.ui)
|
||||||
|
if (!ui || Object.keys(ui).length === 0) return undefined
|
||||||
|
const autoAllowedFromUi = ui.autoAllowed === true
|
||||||
|
const autoAllowedFromData = data.autoAllowed === true
|
||||||
|
return {
|
||||||
|
title: typeof ui.title === 'string' ? ui.title : undefined,
|
||||||
|
phaseLabel: typeof ui.phaseLabel === 'string' ? ui.phaseLabel : undefined,
|
||||||
|
icon: typeof ui.icon === 'string' ? ui.icon : undefined,
|
||||||
|
showInterrupt: ui.showInterrupt === true,
|
||||||
|
showRemember: ui.showRemember === true,
|
||||||
|
autoAllowed: autoAllowedFromUi || autoAllowedFromData,
|
||||||
|
actions: Array.isArray(ui.actions)
|
||||||
|
? ui.actions
|
||||||
|
.map((action) => {
|
||||||
|
const a = asRecord(action)
|
||||||
|
const id = typeof a.id === 'string' ? a.id : undefined
|
||||||
|
const label = typeof a.label === 'string' ? a.label : undefined
|
||||||
|
const kind: 'accept' | 'reject' = a.kind === 'reject' ? 'reject' : 'accept'
|
||||||
|
if (!id || !label) return null
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
label,
|
||||||
|
kind,
|
||||||
|
remember: a.remember === true,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter((a): a is NonNullable<typeof a> => !!a)
|
||||||
|
: undefined,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function extractToolExecutionMetadata(
|
||||||
|
data: Record<string, unknown>
|
||||||
|
): CopilotToolCall['execution'] | undefined {
|
||||||
|
const execution = asRecord(data.execution)
|
||||||
|
if (!execution || Object.keys(execution).length === 0) return undefined
|
||||||
|
return {
|
||||||
|
target: typeof execution.target === 'string' ? execution.target : undefined,
|
||||||
|
capabilityId: typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function displayVerb(state: ClientToolCallState): string {
|
||||||
|
switch (state) {
|
||||||
|
case ClientToolCallState.success:
|
||||||
|
return 'Completed'
|
||||||
|
case ClientToolCallState.error:
|
||||||
|
return 'Failed'
|
||||||
|
case ClientToolCallState.rejected:
|
||||||
|
return 'Skipped'
|
||||||
|
case ClientToolCallState.aborted:
|
||||||
|
return 'Aborted'
|
||||||
|
case ClientToolCallState.generating:
|
||||||
|
return 'Preparing'
|
||||||
|
case ClientToolCallState.pending:
|
||||||
|
return 'Waiting'
|
||||||
|
default:
|
||||||
|
return 'Running'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function resolveDisplayFromServerUi(
|
||||||
|
toolName: string,
|
||||||
|
state: ClientToolCallState,
|
||||||
|
toolCallId: string,
|
||||||
|
params: Record<string, unknown> | undefined,
|
||||||
|
ui?: CopilotToolCall['ui']
|
||||||
|
) {
|
||||||
|
const fallback =
|
||||||
|
resolveToolDisplay(toolName, state, toolCallId, params) ||
|
||||||
|
humanizedFallback(toolName, state)
|
||||||
|
if (!fallback) return undefined
|
||||||
|
if (ui?.phaseLabel) {
|
||||||
|
return { text: ui.phaseLabel, icon: fallback.icon }
|
||||||
|
}
|
||||||
|
if (ui?.title) {
|
||||||
|
return { text: `${displayVerb(state)} ${ui.title}`, icon: fallback.icon }
|
||||||
|
}
|
||||||
|
return fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isWorkflowChangeApplyCall(
|
||||||
|
toolName?: string,
|
||||||
|
params?: Record<string, unknown>
|
||||||
|
): boolean {
|
||||||
|
if (toolName !== 'workflow_change') return false
|
||||||
|
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'apply') return true
|
||||||
|
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
export function extractOperationListFromResultPayload(
|
||||||
|
resultPayload: Record<string, unknown>
|
||||||
|
): Array<Record<string, unknown>> | undefined {
|
||||||
|
const operations = resultPayload.operations
|
||||||
|
if (Array.isArray(operations)) return operations as Array<Record<string, unknown>>
|
||||||
|
|
||||||
|
const compiled = resultPayload.compiledOperations
|
||||||
|
if (Array.isArray(compiled)) return compiled as Array<Record<string, unknown>>
|
||||||
|
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
170
apps/sim/lib/copilot/client-sse/tool-effects.test.ts
Normal file
170
apps/sim/lib/copilot/client-sse/tool-effects.test.ts
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
/**
|
||||||
|
* @vitest-environment node
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { loggerMock } from '@sim/testing'
|
||||||
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
|
vi.mock('@sim/logger', () => loggerMock)
|
||||||
|
|
||||||
|
const mocked = vi.hoisted(() => ({
|
||||||
|
setProposedChanges: vi.fn().mockResolvedValue(undefined),
|
||||||
|
loadEnvironmentVariables: vi.fn(),
|
||||||
|
loadVariablesForWorkflow: vi.fn(),
|
||||||
|
getWorkflowDeploymentStatus: vi.fn().mockReturnValue(null),
|
||||||
|
setDeploymentStatus: vi.fn(),
|
||||||
|
registryState: {
|
||||||
|
activeWorkflowId: 'workflow-active',
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.mock('@/stores/workflow-diff/store', () => ({
|
||||||
|
useWorkflowDiffStore: {
|
||||||
|
getState: () => ({
|
||||||
|
setProposedChanges: mocked.setProposedChanges,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.mock('@/stores/settings/environment/store', () => ({
|
||||||
|
useEnvironmentStore: {
|
||||||
|
getState: () => ({
|
||||||
|
loadEnvironmentVariables: mocked.loadEnvironmentVariables,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.mock('@/stores/panel/variables/store', () => ({
|
||||||
|
useVariablesStore: {
|
||||||
|
getState: () => ({
|
||||||
|
loadForWorkflow: mocked.loadVariablesForWorkflow,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.mock('@/stores/workflows/registry/store', () => ({
|
||||||
|
useWorkflowRegistry: {
|
||||||
|
getState: () => ({
|
||||||
|
activeWorkflowId: mocked.registryState.activeWorkflowId,
|
||||||
|
getWorkflowDeploymentStatus: mocked.getWorkflowDeploymentStatus,
|
||||||
|
setDeploymentStatus: mocked.setDeploymentStatus,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
|
||||||
|
import { applyToolEffects } from '@/lib/copilot/client-sse/tool-effects'
|
||||||
|
|
||||||
|
describe('applyToolEffects', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks()
|
||||||
|
mocked.registryState.activeWorkflowId = 'workflow-active'
|
||||||
|
})
|
||||||
|
|
||||||
|
it('applies workflow_change fallback diff when effects are absent', () => {
|
||||||
|
const workflowState = {
|
||||||
|
blocks: {
|
||||||
|
start: { id: 'start', metadata: { id: 'start', type: 'start' }, inputs: {}, outputs: {} },
|
||||||
|
},
|
||||||
|
edges: [],
|
||||||
|
loops: {},
|
||||||
|
parallels: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
applyToolEffects({
|
||||||
|
effectsRaw: [],
|
||||||
|
toolCall: {
|
||||||
|
id: 'tool-1',
|
||||||
|
name: 'workflow_change',
|
||||||
|
state: 'success',
|
||||||
|
params: { workflowId: 'workflow-123' },
|
||||||
|
} as any,
|
||||||
|
resultPayload: {
|
||||||
|
workflowState,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(mocked.setProposedChanges).toHaveBeenCalledTimes(1)
|
||||||
|
expect(mocked.setProposedChanges).toHaveBeenCalledWith(workflowState)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('applies workflow_change fallback diff from nested editResult.workflowState', () => {
|
||||||
|
const workflowState = {
|
||||||
|
blocks: {
|
||||||
|
start: { id: 'start', metadata: { id: 'start', type: 'start' }, inputs: {}, outputs: {} },
|
||||||
|
},
|
||||||
|
edges: [],
|
||||||
|
loops: {},
|
||||||
|
parallels: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
applyToolEffects({
|
||||||
|
effectsRaw: [],
|
||||||
|
toolCall: {
|
||||||
|
id: 'tool-2',
|
||||||
|
name: 'workflow_change',
|
||||||
|
state: 'success',
|
||||||
|
} as any,
|
||||||
|
resultPayload: {
|
||||||
|
editResult: {
|
||||||
|
workflowState,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(mocked.setProposedChanges).toHaveBeenCalledTimes(1)
|
||||||
|
expect(mocked.setProposedChanges).toHaveBeenCalledWith(workflowState)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('applies explicit workflow.diff.proposed effect', () => {
|
||||||
|
const workflowState = {
|
||||||
|
blocks: {
|
||||||
|
start: { id: 'start', metadata: { id: 'start', type: 'start' }, inputs: {}, outputs: {} },
|
||||||
|
},
|
||||||
|
edges: [],
|
||||||
|
loops: {},
|
||||||
|
parallels: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
applyToolEffects({
|
||||||
|
effectsRaw: [
|
||||||
|
{
|
||||||
|
kind: 'workflow.diff.proposed',
|
||||||
|
payload: {
|
||||||
|
workflowState,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
toolCall: {
|
||||||
|
id: 'tool-3',
|
||||||
|
name: 'workflow_change',
|
||||||
|
state: 'success',
|
||||||
|
} as any,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(mocked.setProposedChanges).toHaveBeenCalledTimes(1)
|
||||||
|
expect(mocked.setProposedChanges).toHaveBeenCalledWith(workflowState)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not apply fallback diff for non-workflow_change tools', () => {
|
||||||
|
const workflowState = {
|
||||||
|
blocks: {},
|
||||||
|
edges: [],
|
||||||
|
loops: {},
|
||||||
|
parallels: {},
|
||||||
|
}
|
||||||
|
|
||||||
|
applyToolEffects({
|
||||||
|
effectsRaw: [],
|
||||||
|
toolCall: {
|
||||||
|
id: 'tool-4',
|
||||||
|
name: 'list_workflows',
|
||||||
|
state: 'success',
|
||||||
|
} as any,
|
||||||
|
resultPayload: {
|
||||||
|
workflowState,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(mocked.setProposedChanges).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
})
|
||||||
180
apps/sim/lib/copilot/client-sse/tool-effects.ts
Normal file
180
apps/sim/lib/copilot/client-sse/tool-effects.ts
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import type { CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||||
|
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
||||||
|
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||||
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
|
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotToolEffects')
|
||||||
|
|
||||||
|
type ParsedToolEffect = {
|
||||||
|
kind: string
|
||||||
|
payload: Record<string, unknown>
|
||||||
|
}
|
||||||
|
|
||||||
|
function asNonEmptyRecord(value: unknown): Record<string, unknown> | null {
|
||||||
|
const record = asRecord(value)
|
||||||
|
return Object.keys(record).length > 0 ? record : null
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseToolEffects(raw: unknown): ParsedToolEffect[] {
|
||||||
|
if (!Array.isArray(raw)) return []
|
||||||
|
const effects: ParsedToolEffect[] = []
|
||||||
|
for (const item of raw) {
|
||||||
|
const effect = asRecord(item)
|
||||||
|
const kind = typeof effect.kind === 'string' ? effect.kind : ''
|
||||||
|
if (!kind) continue
|
||||||
|
effects.push({
|
||||||
|
kind,
|
||||||
|
payload: asRecord(effect.payload) || {},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return effects
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveWorkflowId(
|
||||||
|
payload: Record<string, unknown>,
|
||||||
|
toolCall?: CopilotToolCall
|
||||||
|
): string | undefined {
|
||||||
|
const payloadWorkflowId = typeof payload.workflowId === 'string' ? payload.workflowId : undefined
|
||||||
|
if (payloadWorkflowId) return payloadWorkflowId
|
||||||
|
|
||||||
|
const params = asRecord(toolCall?.params)
|
||||||
|
const paramWorkflowId = typeof params?.workflowId === 'string' ? params.workflowId : undefined
|
||||||
|
if (paramWorkflowId) return paramWorkflowId
|
||||||
|
|
||||||
|
return useWorkflowRegistry.getState().activeWorkflowId || undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveWorkflowState(
|
||||||
|
payload: Record<string, unknown>,
|
||||||
|
resultPayload?: Record<string, unknown>
|
||||||
|
): WorkflowState | null {
|
||||||
|
const payloadState = asNonEmptyRecord(payload.workflowState)
|
||||||
|
if (payloadState) return payloadState as unknown as WorkflowState
|
||||||
|
|
||||||
|
if (resultPayload) {
|
||||||
|
const directState = asNonEmptyRecord(resultPayload.workflowState)
|
||||||
|
if (directState) return directState as unknown as WorkflowState
|
||||||
|
const editResult = asRecord(resultPayload.editResult)
|
||||||
|
const nestedState = asNonEmptyRecord(editResult?.workflowState)
|
||||||
|
if (nestedState) return nestedState as unknown as WorkflowState
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyDeploymentSyncEffect(payload: Record<string, unknown>, toolCall?: CopilotToolCall): void {
|
||||||
|
const workflowId = resolveWorkflowId(payload, toolCall)
|
||||||
|
if (!workflowId) return
|
||||||
|
|
||||||
|
const registry = useWorkflowRegistry.getState()
|
||||||
|
const existingStatus = registry.getWorkflowDeploymentStatus(workflowId)
|
||||||
|
|
||||||
|
const isDeployed =
|
||||||
|
typeof payload.isDeployed === 'boolean'
|
||||||
|
? payload.isDeployed
|
||||||
|
: (existingStatus?.isDeployed ?? true)
|
||||||
|
|
||||||
|
const deployedAt = (() => {
|
||||||
|
if (typeof payload.deployedAt === 'string' && payload.deployedAt) {
|
||||||
|
const parsed = new Date(payload.deployedAt)
|
||||||
|
if (!Number.isNaN(parsed.getTime())) return parsed
|
||||||
|
}
|
||||||
|
return existingStatus?.deployedAt
|
||||||
|
})()
|
||||||
|
|
||||||
|
const apiKey =
|
||||||
|
typeof payload.apiKey === 'string' && payload.apiKey.length > 0
|
||||||
|
? payload.apiKey
|
||||||
|
: existingStatus?.apiKey
|
||||||
|
|
||||||
|
registry.setDeploymentStatus(workflowId, isDeployed, deployedAt, apiKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyApiKeySyncEffect(payload: Record<string, unknown>, toolCall?: CopilotToolCall): void {
|
||||||
|
const workflowId = resolveWorkflowId(payload, toolCall)
|
||||||
|
if (!workflowId) return
|
||||||
|
|
||||||
|
const apiKey = typeof payload.apiKey === 'string' ? payload.apiKey : undefined
|
||||||
|
const registry = useWorkflowRegistry.getState()
|
||||||
|
const existingStatus = registry.getWorkflowDeploymentStatus(workflowId)
|
||||||
|
registry.setDeploymentStatus(
|
||||||
|
workflowId,
|
||||||
|
existingStatus?.isDeployed ?? false,
|
||||||
|
existingStatus?.deployedAt,
|
||||||
|
apiKey || existingStatus?.apiKey
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyWorkflowVariablesReload(
|
||||||
|
payload: Record<string, unknown>,
|
||||||
|
toolCall?: CopilotToolCall
|
||||||
|
): void {
|
||||||
|
const workflowId = resolveWorkflowId(payload, toolCall)
|
||||||
|
if (!workflowId) return
|
||||||
|
useVariablesStore.getState().loadForWorkflow(workflowId)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function applyToolEffects(params: {
|
||||||
|
effectsRaw: unknown
|
||||||
|
toolCall?: CopilotToolCall
|
||||||
|
resultPayload?: Record<string, unknown>
|
||||||
|
}): void {
|
||||||
|
const effects = parseToolEffects(params.effectsRaw)
|
||||||
|
if (effects.length === 0) {
|
||||||
|
if (params.toolCall?.name === 'workflow_change' && params.resultPayload) {
|
||||||
|
const workflowState = resolveWorkflowState({}, params.resultPayload)
|
||||||
|
if (!workflowState) return
|
||||||
|
useWorkflowDiffStore
|
||||||
|
.getState()
|
||||||
|
.setProposedChanges(workflowState)
|
||||||
|
.catch((error) => {
|
||||||
|
logger.error('Failed to apply fallback workflow diff from result payload', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const effect of effects) {
|
||||||
|
switch (effect.kind) {
|
||||||
|
case 'workflow.diff.proposed': {
|
||||||
|
const workflowState = resolveWorkflowState(effect.payload, params.resultPayload)
|
||||||
|
if (!workflowState) break
|
||||||
|
useWorkflowDiffStore
|
||||||
|
.getState()
|
||||||
|
.setProposedChanges(workflowState)
|
||||||
|
.catch((error) => {
|
||||||
|
logger.error('Failed to apply workflow diff effect', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'workflow.deployment.sync':
|
||||||
|
applyDeploymentSyncEffect(effect.payload, params.toolCall)
|
||||||
|
break
|
||||||
|
|
||||||
|
case 'workflow.api_key.sync':
|
||||||
|
applyApiKeySyncEffect(effect.payload, params.toolCall)
|
||||||
|
break
|
||||||
|
|
||||||
|
case 'environment.variables.reload':
|
||||||
|
useEnvironmentStore.getState().loadEnvironmentVariables()
|
||||||
|
break
|
||||||
|
|
||||||
|
case 'workflow.variables.reload':
|
||||||
|
applyWorkflowVariablesReload(effect.payload, params.toolCall)
|
||||||
|
break
|
||||||
|
|
||||||
|
default:
|
||||||
|
logger.debug('Ignoring unknown tool effect', { kind: effect.kind })
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -101,9 +101,6 @@ export const COPILOT_CHECKPOINTS_API_PATH = '/api/copilot/checkpoints'
|
|||||||
/** POST — revert to a checkpoint. */
|
/** POST — revert to a checkpoint. */
|
||||||
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
||||||
|
|
||||||
/** GET/POST/DELETE — manage auto-allowed tools. */
|
|
||||||
export const COPILOT_AUTO_ALLOWED_TOOLS_API_PATH = '/api/copilot/auto-allowed-tools'
|
|
||||||
|
|
||||||
/** GET — fetch dynamically available copilot models. */
|
/** GET — fetch dynamically available copilot models. */
|
||||||
export const COPILOT_MODELS_API_PATH = '/api/copilot/models'
|
export const COPILOT_MODELS_API_PATH = '/api/copilot/models'
|
||||||
|
|
||||||
|
|||||||
@@ -1,67 +0,0 @@
|
|||||||
export const INTERRUPT_TOOL_NAMES = [
|
|
||||||
'set_global_workflow_variables',
|
|
||||||
'run_workflow',
|
|
||||||
'run_workflow_until_block',
|
|
||||||
'run_from_block',
|
|
||||||
'run_block',
|
|
||||||
'manage_mcp_tool',
|
|
||||||
'manage_custom_tool',
|
|
||||||
'deploy_mcp',
|
|
||||||
'deploy_chat',
|
|
||||||
'deploy_api',
|
|
||||||
'create_workspace_mcp_server',
|
|
||||||
'set_environment_variables',
|
|
||||||
'make_api_request',
|
|
||||||
'oauth_request_access',
|
|
||||||
'navigate_ui',
|
|
||||||
'knowledge_base',
|
|
||||||
'generate_api_key',
|
|
||||||
] as const
|
|
||||||
|
|
||||||
export const INTERRUPT_TOOL_SET = new Set<string>(INTERRUPT_TOOL_NAMES)
|
|
||||||
|
|
||||||
export const SUBAGENT_TOOL_NAMES = [
|
|
||||||
'debug',
|
|
||||||
'edit',
|
|
||||||
'build',
|
|
||||||
'plan',
|
|
||||||
'test',
|
|
||||||
'deploy',
|
|
||||||
'auth',
|
|
||||||
'research',
|
|
||||||
'knowledge',
|
|
||||||
'custom_tool',
|
|
||||||
'tour',
|
|
||||||
'info',
|
|
||||||
'workflow',
|
|
||||||
'evaluate',
|
|
||||||
'superagent',
|
|
||||||
'discovery',
|
|
||||||
] as const
|
|
||||||
|
|
||||||
export const SUBAGENT_TOOL_SET = new Set<string>(SUBAGENT_TOOL_NAMES)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Respond tools are internal to the copilot's subagent system.
|
|
||||||
* They're used by subagents to signal completion and should NOT be executed by the sim side.
|
|
||||||
* The copilot backend handles these internally.
|
|
||||||
*/
|
|
||||||
export const RESPOND_TOOL_NAMES = [
|
|
||||||
'plan_respond',
|
|
||||||
'edit_respond',
|
|
||||||
'build_respond',
|
|
||||||
'debug_respond',
|
|
||||||
'info_respond',
|
|
||||||
'research_respond',
|
|
||||||
'deploy_respond',
|
|
||||||
'superagent_respond',
|
|
||||||
'discovery_respond',
|
|
||||||
'tour_respond',
|
|
||||||
'auth_respond',
|
|
||||||
'workflow_respond',
|
|
||||||
'knowledge_respond',
|
|
||||||
'custom_tool_respond',
|
|
||||||
'test_respond',
|
|
||||||
] as const
|
|
||||||
|
|
||||||
export const RESPOND_TOOL_SET = new Set<string>(RESPOND_TOOL_NAMES)
|
|
||||||
@@ -54,14 +54,14 @@ describe('sse-handlers tool lifecycle', () => {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
it('executes tool_call and emits tool_result + mark-complete', async () => {
|
it('executes copilot.tool.call and emits copilot.tool.result + mark-complete', async () => {
|
||||||
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
||||||
markToolComplete.mockResolvedValueOnce(true)
|
markToolComplete.mockResolvedValueOnce(true)
|
||||||
const onEvent = vi.fn()
|
const onEvent = vi.fn()
|
||||||
|
|
||||||
await sseHandlers.tool_call(
|
await sseHandlers['copilot.tool.call'](
|
||||||
{
|
{
|
||||||
type: 'tool_call',
|
type: 'copilot.tool.call',
|
||||||
data: { id: 'tool-1', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
data: { id: 'tool-1', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||||
} as any,
|
} as any,
|
||||||
context,
|
context,
|
||||||
@@ -73,7 +73,7 @@ describe('sse-handlers tool lifecycle', () => {
|
|||||||
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
||||||
expect(onEvent).toHaveBeenCalledWith(
|
expect(onEvent).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
type: 'tool_result',
|
type: 'copilot.tool.result',
|
||||||
toolCallId: 'tool-1',
|
toolCallId: 'tool-1',
|
||||||
success: true,
|
success: true,
|
||||||
})
|
})
|
||||||
@@ -84,17 +84,17 @@ describe('sse-handlers tool lifecycle', () => {
|
|||||||
expect(updated?.result?.output).toEqual({ ok: true })
|
expect(updated?.result?.output).toEqual({ ok: true })
|
||||||
})
|
})
|
||||||
|
|
||||||
it('skips duplicate tool_call after result', async () => {
|
it('skips duplicate copilot.tool.call after result', async () => {
|
||||||
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
||||||
markToolComplete.mockResolvedValueOnce(true)
|
markToolComplete.mockResolvedValueOnce(true)
|
||||||
|
|
||||||
const event = {
|
const event = {
|
||||||
type: 'tool_call',
|
type: 'copilot.tool.call',
|
||||||
data: { id: 'tool-dup', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
data: { id: 'tool-dup', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||||
}
|
}
|
||||||
|
|
||||||
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })
|
await sseHandlers['copilot.tool.call'](event as any, context, execContext, { interactive: false })
|
||||||
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })
|
await sseHandlers['copilot.tool.call'](event as any, context, execContext, { interactive: false })
|
||||||
|
|
||||||
expect(executeToolServerSide).toHaveBeenCalledTimes(1)
|
expect(executeToolServerSide).toHaveBeenCalledTimes(1)
|
||||||
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
||||||
|
|||||||
@@ -1,17 +1,12 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
||||||
import { RESPOND_TOOL_SET, SUBAGENT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
|
||||||
import {
|
import {
|
||||||
asRecord,
|
asRecord,
|
||||||
getEventData,
|
getEventData,
|
||||||
markToolResultSeen,
|
markToolResultSeen,
|
||||||
wasToolResultSeen,
|
wasToolResultSeen,
|
||||||
} from '@/lib/copilot/orchestrator/sse-utils'
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import {
|
import { markToolComplete } from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
isIntegrationTool,
|
|
||||||
isToolAvailableOnSimSide,
|
|
||||||
markToolComplete,
|
|
||||||
} from '@/lib/copilot/orchestrator/tool-executor'
|
|
||||||
import type {
|
import type {
|
||||||
ContentBlock,
|
ContentBlock,
|
||||||
ExecutionContext,
|
ExecutionContext,
|
||||||
@@ -22,7 +17,6 @@ import type {
|
|||||||
} from '@/lib/copilot/orchestrator/types'
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
import {
|
import {
|
||||||
executeToolAndReport,
|
executeToolAndReport,
|
||||||
isInterruptToolName,
|
|
||||||
waitForToolCompletion,
|
waitForToolCompletion,
|
||||||
waitForToolDecision,
|
waitForToolDecision,
|
||||||
} from './tool-execution'
|
} from './tool-execution'
|
||||||
@@ -35,12 +29,208 @@ const logger = createLogger('CopilotSseHandlers')
|
|||||||
* execution to the browser client instead of running executeWorkflow directly.
|
* execution to the browser client instead of running executeWorkflow directly.
|
||||||
*/
|
*/
|
||||||
const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
||||||
'run_workflow',
|
'workflow_run',
|
||||||
'run_workflow_until_block',
|
|
||||||
'run_from_block',
|
|
||||||
'run_block',
|
|
||||||
])
|
])
|
||||||
|
|
||||||
|
function mapServerStateToToolStatus(state: unknown): ToolCallState['status'] {
|
||||||
|
switch (String(state || '')) {
|
||||||
|
case 'generating':
|
||||||
|
case 'pending':
|
||||||
|
case 'awaiting_approval':
|
||||||
|
return 'pending'
|
||||||
|
case 'executing':
|
||||||
|
return 'executing'
|
||||||
|
case 'success':
|
||||||
|
return 'success'
|
||||||
|
case 'rejected':
|
||||||
|
case 'skipped':
|
||||||
|
return 'rejected'
|
||||||
|
case 'aborted':
|
||||||
|
return 'skipped'
|
||||||
|
case 'error':
|
||||||
|
case 'failed':
|
||||||
|
return 'error'
|
||||||
|
default:
|
||||||
|
return 'pending'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getExecutionTarget(
|
||||||
|
toolData: Record<string, unknown>,
|
||||||
|
toolName: string
|
||||||
|
): { target: string; capabilityId?: string } {
|
||||||
|
const execution = asRecord(toolData.execution)
|
||||||
|
if (typeof execution.target === 'string' && execution.target.length > 0) {
|
||||||
|
return {
|
||||||
|
target: execution.target,
|
||||||
|
capabilityId:
|
||||||
|
typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback only when metadata is missing.
|
||||||
|
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
||||||
|
return { target: 'sim_client_capability', capabilityId: 'workflow.run' }
|
||||||
|
}
|
||||||
|
return { target: 'sim_server' }
|
||||||
|
}
|
||||||
|
|
||||||
|
function needsApproval(toolData: Record<string, unknown>): boolean {
|
||||||
|
const ui = asRecord(toolData.ui)
|
||||||
|
return ui.showInterrupt === true
|
||||||
|
}
|
||||||
|
|
||||||
|
async function waitForClientCapabilityAndReport(
|
||||||
|
toolCall: ToolCallState,
|
||||||
|
options: OrchestratorOptions,
|
||||||
|
logScope: string
|
||||||
|
): Promise<void> {
|
||||||
|
toolCall.status = 'executing'
|
||||||
|
const completion = await waitForToolCompletion(
|
||||||
|
toolCall.id,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
|
||||||
|
if (completion?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
completion.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error(`markToolComplete fire-and-forget failed (${logScope} background)`, {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (completion?.status === 'rejected') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, 400, completion.message || 'Tool execution rejected')
|
||||||
|
.catch((err) => {
|
||||||
|
logger.error(`markToolComplete fire-and-forget failed (${logScope} rejected)`, {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const success = completion?.status === 'success'
|
||||||
|
toolCall.status = success ? 'success' : 'error'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
||||||
|
logger.error(`markToolComplete fire-and-forget failed (${logScope})`, {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
function markToolCallAndNotify(
|
||||||
|
toolCall: ToolCallState,
|
||||||
|
statusCode: number,
|
||||||
|
message: string,
|
||||||
|
data: Record<string, unknown> | undefined,
|
||||||
|
logScope: string
|
||||||
|
): void {
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, statusCode, message, data).catch((err) => {
|
||||||
|
logger.error(`markToolComplete fire-and-forget failed (${logScope})`, {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function executeToolCallWithPolicy(
|
||||||
|
toolCall: ToolCallState,
|
||||||
|
toolName: string,
|
||||||
|
toolData: Record<string, unknown>,
|
||||||
|
context: StreamingContext,
|
||||||
|
execContext: ExecutionContext,
|
||||||
|
options: OrchestratorOptions,
|
||||||
|
logScope: string
|
||||||
|
): Promise<void> {
|
||||||
|
const execution = getExecutionTarget(toolData, toolName)
|
||||||
|
const isInteractive = options.interactive === true
|
||||||
|
const requiresApproval = isInteractive && needsApproval(toolData)
|
||||||
|
|
||||||
|
if (toolData.state) {
|
||||||
|
toolCall.status = mapServerStateToToolStatus(toolData.state)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (requiresApproval) {
|
||||||
|
const decision = await waitForToolDecision(
|
||||||
|
toolCall.id,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
|
||||||
|
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||||
|
// Continue below into normal execution path.
|
||||||
|
} else if (decision?.status === 'rejected' || decision?.status === 'error') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolCallAndNotify(
|
||||||
|
toolCall,
|
||||||
|
400,
|
||||||
|
decision.message || 'Tool execution rejected',
|
||||||
|
{ skipped: true, reason: 'user_rejected' },
|
||||||
|
`${logScope} rejected`
|
||||||
|
)
|
||||||
|
return
|
||||||
|
} else if (decision?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolCallAndNotify(
|
||||||
|
toolCall,
|
||||||
|
202,
|
||||||
|
decision.message || 'Tool execution moved to background',
|
||||||
|
{ background: true },
|
||||||
|
`${logScope} background`
|
||||||
|
)
|
||||||
|
return
|
||||||
|
} else {
|
||||||
|
// Decision was null (timeout/abort).
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolCallAndNotify(
|
||||||
|
toolCall,
|
||||||
|
408,
|
||||||
|
'Tool approval timed out',
|
||||||
|
{ skipped: true, reason: 'timeout' },
|
||||||
|
`${logScope} timeout`
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (execution.target === 'sim_client_capability' && isInteractive) {
|
||||||
|
await waitForClientCapabilityAndReport(toolCall, options, logScope)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
(execution.target === 'sim_server' || execution.target === 'sim_client_capability') &&
|
||||||
|
options.autoExecuteTools !== false
|
||||||
|
) {
|
||||||
|
await executeToolAndReport(toolCall.id, context, execContext, options)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
||||||
|
|
||||||
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
||||||
@@ -76,7 +266,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.chatId = asRecord(event.data).chatId as string | undefined
|
context.chatId = asRecord(event.data).chatId as string | undefined
|
||||||
},
|
},
|
||||||
title_updated: () => {},
|
title_updated: () => {},
|
||||||
tool_result: (event, context) => {
|
'copilot.tool.result': (event, context) => {
|
||||||
const data = getEventData(event)
|
const data = getEventData(event)
|
||||||
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
@@ -85,7 +275,11 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
current.status = success ? 'success' : 'error'
|
current.status = data?.state
|
||||||
|
? mapServerStateToToolStatus(data.state)
|
||||||
|
: success
|
||||||
|
? 'success'
|
||||||
|
: 'error'
|
||||||
current.endTime = Date.now()
|
current.endTime = Date.now()
|
||||||
if (hasResultData) {
|
if (hasResultData) {
|
||||||
current.result = {
|
current.result = {
|
||||||
@@ -98,35 +292,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
current.error = (data?.error || resultObj.error) as string | undefined
|
current.error = (data?.error || resultObj.error) as string | undefined
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
tool_error: (event, context) => {
|
'copilot.tool.call': async (event, context, execContext, options) => {
|
||||||
const data = getEventData(event)
|
|
||||||
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
|
||||||
if (!toolCallId) return
|
|
||||||
const current = context.toolCalls.get(toolCallId)
|
|
||||||
if (!current) return
|
|
||||||
current.status = 'error'
|
|
||||||
current.error = (data?.error as string | undefined) || 'Tool execution failed'
|
|
||||||
current.endTime = Date.now()
|
|
||||||
},
|
|
||||||
tool_generating: (event, context) => {
|
|
||||||
const data = getEventData(event)
|
|
||||||
const toolCallId =
|
|
||||||
event.toolCallId ||
|
|
||||||
(data?.toolCallId as string | undefined) ||
|
|
||||||
(data?.id as string | undefined)
|
|
||||||
const toolName =
|
|
||||||
event.toolName || (data?.toolName as string | undefined) || (data?.name as string | undefined)
|
|
||||||
if (!toolCallId || !toolName) return
|
|
||||||
if (!context.toolCalls.has(toolCallId)) {
|
|
||||||
context.toolCalls.set(toolCallId, {
|
|
||||||
id: toolCallId,
|
|
||||||
name: toolName,
|
|
||||||
status: 'pending',
|
|
||||||
startTime: Date.now(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
tool_call: async (event, context, execContext, options) => {
|
|
||||||
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
||||||
const toolCallId = (toolData.id as string | undefined) || event.toolCallId
|
const toolCallId = (toolData.id as string | undefined) || event.toolCallId
|
||||||
const toolName = (toolData.name as string | undefined) || event.toolName
|
const toolName = (toolData.name as string | undefined) || event.toolName
|
||||||
@@ -156,7 +322,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.toolCalls.set(toolCallId, {
|
context.toolCalls.set(toolCallId, {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: 'pending',
|
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
||||||
params: args,
|
params: args,
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
})
|
})
|
||||||
@@ -170,149 +336,17 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
const toolCall = context.toolCalls.get(toolCallId)
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
if (!toolCall) return
|
if (!toolCall) return
|
||||||
|
|
||||||
// Subagent tools are executed by the copilot backend, not sim side.
|
await executeToolCallWithPolicy(
|
||||||
if (SUBAGENT_TOOL_SET.has(toolName)) {
|
toolCall,
|
||||||
return
|
toolName,
|
||||||
}
|
toolData,
|
||||||
|
context,
|
||||||
// Respond tools are internal to copilot's subagent system - skip execution.
|
execContext,
|
||||||
// The copilot backend handles these internally to signal subagent completion.
|
options,
|
||||||
if (RESPOND_TOOL_SET.has(toolName)) {
|
'run tool'
|
||||||
toolCall.status = 'success'
|
)
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
toolCall.result = {
|
|
||||||
success: true,
|
|
||||||
output: 'Internal respond tool - handled by copilot backend',
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const isInterruptTool = isInterruptToolName(toolName)
|
|
||||||
const isInteractive = options.interactive === true
|
|
||||||
// Integration tools (user-installed) also require approval in interactive mode
|
|
||||||
const needsApproval = isInterruptTool || isIntegrationTool(toolName)
|
|
||||||
|
|
||||||
if (needsApproval && isInteractive) {
|
|
||||||
const decision = await waitForToolDecision(
|
|
||||||
toolCallId,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
|
||||||
// Client-executable run tools: defer execution to the browser client.
|
|
||||||
// The client calls executeWorkflowWithFullLogging for real-time feedback
|
|
||||||
// (block pulsing, logs, stop button) and reports completion via
|
|
||||||
// /api/copilot/confirm with status success/error. We poll Redis for
|
|
||||||
// that completion signal, then fire-and-forget markToolComplete to Go.
|
|
||||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
|
||||||
toolCall.status = 'executing'
|
|
||||||
const completion = await waitForToolCompletion(
|
|
||||||
toolCallId,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
if (completion?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
202,
|
|
||||||
completion.message || 'Tool execution moved to background',
|
|
||||||
{ background: true }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (run tool background)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCallId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const success = completion?.status === 'success'
|
|
||||||
toolCall.status = success ? 'success' : 'error'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
const msg =
|
|
||||||
completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
|
||||||
// Fire-and-forget: tell Go backend the tool is done
|
|
||||||
// (must NOT await — see deadlock note in executeToolAndReport)
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (run tool)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
toolName: toolCall.name,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCallId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
400,
|
|
||||||
decision.message || 'Tool execution rejected',
|
|
||||||
{ skipped: true, reason: 'user_rejected' }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (rejected)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (decision?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
202,
|
|
||||||
decision.message || 'Tool execution moved to background',
|
|
||||||
{ background: true }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (background)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decision was null — timed out or aborted.
|
|
||||||
// Do NOT fall through to auto-execute. Mark the tool as timed out
|
|
||||||
// and notify Go so it can unblock waitForExternalTool.
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, 408, 'Tool approval timed out', {
|
|
||||||
skipped: true,
|
|
||||||
reason: 'timeout',
|
|
||||||
}).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (timeout)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.autoExecuteTools !== false) {
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
reasoning: (event, context) => {
|
'copilot.phase.progress': (event, context) => {
|
||||||
const d = asRecord(event.data)
|
const d = asRecord(event.data)
|
||||||
const phase = d.phase || asRecord(d.data).phase
|
const phase = d.phase || asRecord(d.data).phase
|
||||||
if (phase === 'start') {
|
if (phase === 'start') {
|
||||||
@@ -336,7 +370,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
if (!chunk || !context.currentThinkingBlock) return
|
if (!chunk || !context.currentThinkingBlock) return
|
||||||
context.currentThinkingBlock.content = `${context.currentThinkingBlock.content || ''}${chunk}`
|
context.currentThinkingBlock.content = `${context.currentThinkingBlock.content || ''}${chunk}`
|
||||||
},
|
},
|
||||||
content: (event, context) => {
|
'copilot.content': (event, context) => {
|
||||||
// Go backend sends content as a plain string in event.data, not wrapped in an object.
|
// Go backend sends content as a plain string in event.data, not wrapped in an object.
|
||||||
let chunk: string | undefined
|
let chunk: string | undefined
|
||||||
if (typeof event.data === 'string') {
|
if (typeof event.data === 'string') {
|
||||||
@@ -349,20 +383,20 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.accumulatedContent += chunk
|
context.accumulatedContent += chunk
|
||||||
addContentBlock(context, { type: 'text', content: chunk })
|
addContentBlock(context, { type: 'text', content: chunk })
|
||||||
},
|
},
|
||||||
done: (event, context) => {
|
'copilot.phase.completed': (event, context) => {
|
||||||
const d = asRecord(event.data)
|
const d = asRecord(event.data)
|
||||||
if (d.responseId) {
|
if (d.responseId) {
|
||||||
context.conversationId = d.responseId as string
|
context.conversationId = d.responseId as string
|
||||||
}
|
}
|
||||||
context.streamComplete = true
|
context.streamComplete = true
|
||||||
},
|
},
|
||||||
start: (event, context) => {
|
'copilot.phase.started': (event, context) => {
|
||||||
const d = asRecord(event.data)
|
const d = asRecord(event.data)
|
||||||
if (d.responseId) {
|
if (d.responseId) {
|
||||||
context.conversationId = d.responseId as string
|
context.conversationId = d.responseId as string
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
error: (event, context) => {
|
'copilot.error': (event, context) => {
|
||||||
const d = asRecord(event.data)
|
const d = asRecord(event.data)
|
||||||
const message = (d.message || d.error || event.error) as string | undefined
|
const message = (d.message || d.error || event.error) as string | undefined
|
||||||
if (message) {
|
if (message) {
|
||||||
@@ -373,7 +407,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const subAgentHandlers: Record<string, SSEHandler> = {
|
export const subAgentHandlers: Record<string, SSEHandler> = {
|
||||||
content: (event, context) => {
|
'copilot.content': (event, context) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId || !event.data) return
|
if (!parentToolCallId || !event.data) return
|
||||||
// Go backend sends content as a plain string in event.data
|
// Go backend sends content as a plain string in event.data
|
||||||
@@ -389,7 +423,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
(context.subAgentContent[parentToolCallId] || '') + chunk
|
(context.subAgentContent[parentToolCallId] || '') + chunk
|
||||||
addContentBlock(context, { type: 'subagent_text', content: chunk })
|
addContentBlock(context, { type: 'subagent_text', content: chunk })
|
||||||
},
|
},
|
||||||
tool_call: async (event, context, execContext, options) => {
|
'copilot.tool.call': async (event, context, execContext, options) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId) return
|
if (!parentToolCallId) return
|
||||||
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
||||||
@@ -410,7 +444,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
const toolCall: ToolCallState = {
|
const toolCall: ToolCallState = {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: 'pending',
|
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
||||||
params: args,
|
params: args,
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
}
|
}
|
||||||
@@ -428,159 +462,17 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
if (isPartial) return
|
if (isPartial) return
|
||||||
|
|
||||||
// Respond tools are internal to copilot's subagent system - skip execution.
|
await executeToolCallWithPolicy(
|
||||||
if (RESPOND_TOOL_SET.has(toolName)) {
|
toolCall,
|
||||||
toolCall.status = 'success'
|
toolName,
|
||||||
toolCall.endTime = Date.now()
|
toolData,
|
||||||
toolCall.result = {
|
context,
|
||||||
success: true,
|
execContext,
|
||||||
output: 'Internal respond tool - handled by copilot backend',
|
options,
|
||||||
}
|
'subagent run tool'
|
||||||
return
|
)
|
||||||
}
|
|
||||||
|
|
||||||
// Tools that only exist on the Go backend (e.g. search_patterns,
|
|
||||||
// search_errors, remember_debug) should NOT be re-executed on the Sim side.
|
|
||||||
// The Go backend already executed them and will send its own tool_result
|
|
||||||
// SSE event with the real outcome. Trying to execute them here would fail
|
|
||||||
// with "Tool not found" and incorrectly mark the tool as failed.
|
|
||||||
if (!isToolAvailableOnSimSide(toolName)) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Interrupt tools and integration tools (user-installed) require approval
|
|
||||||
// in interactive mode, same as top-level handler.
|
|
||||||
const needsSubagentApproval = isInterruptToolName(toolName) || isIntegrationTool(toolName)
|
|
||||||
if (options.interactive === true && needsSubagentApproval) {
|
|
||||||
const decision = await waitForToolDecision(
|
|
||||||
toolCallId,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
400,
|
|
||||||
decision.message || 'Tool execution rejected',
|
|
||||||
{ skipped: true, reason: 'user_rejected' }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent rejected)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (decision?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
202,
|
|
||||||
decision.message || 'Tool execution moved to background',
|
|
||||||
{ background: true }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent background)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decision was null — timed out or aborted.
|
|
||||||
// Do NOT fall through to auto-execute.
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, 408, 'Tool approval timed out', {
|
|
||||||
skipped: true,
|
|
||||||
reason: 'timeout',
|
|
||||||
}).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent timeout)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Client-executable run tools in interactive mode: defer to client.
|
|
||||||
// Same pattern as main handler: wait for client completion, then tell Go.
|
|
||||||
if (options.interactive === true && CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
|
||||||
toolCall.status = 'executing'
|
|
||||||
const completion = await waitForToolCompletion(
|
|
||||||
toolCallId,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
if (completion?.status === 'rejected') {
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
400,
|
|
||||||
completion.message || 'Tool execution rejected'
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent run tool rejected)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCallId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (completion?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
202,
|
|
||||||
completion.message || 'Tool execution moved to background',
|
|
||||||
{ background: true }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent run tool background)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCallId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const success = completion?.status === 'success'
|
|
||||||
toolCall.status = success ? 'success' : 'error'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
|
||||||
logger.error('markToolComplete fire-and-forget failed (subagent run tool)', {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
toolName: toolCall.name,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCallId)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.autoExecuteTools !== false) {
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
tool_result: (event, context) => {
|
'copilot.tool.result': (event, context) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId) return
|
if (!parentToolCallId) return
|
||||||
const data = getEventData(event)
|
const data = getEventData(event)
|
||||||
@@ -596,7 +488,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
const status = success ? 'success' : 'error'
|
const status = data?.state ? mapServerStateToToolStatus(data.state) : success ? 'success' : 'error'
|
||||||
const endTime = Date.now()
|
const endTime = Date.now()
|
||||||
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
||||||
|
|
||||||
@@ -620,8 +512,22 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
'copilot.phase.progress': () => {
|
||||||
|
// Subagent reasoning chunks are surfaced via copilot.content.
|
||||||
|
},
|
||||||
|
'copilot.phase.completed': () => {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sseHandlers['copilot.tool.interrupt_required'] = sseHandlers['copilot.tool.call']
|
||||||
|
sseHandlers['copilot.workflow.patch'] = sseHandlers['copilot.tool.result']
|
||||||
|
sseHandlers['copilot.workflow.verify'] = sseHandlers['copilot.tool.result']
|
||||||
|
sseHandlers['copilot.tool.interrupt_resolved'] = sseHandlers['copilot.tool.result']
|
||||||
|
|
||||||
|
subAgentHandlers['copilot.tool.interrupt_required'] = subAgentHandlers['copilot.tool.call']
|
||||||
|
subAgentHandlers['copilot.workflow.patch'] = subAgentHandlers['copilot.tool.result']
|
||||||
|
subAgentHandlers['copilot.workflow.verify'] = subAgentHandlers['copilot.tool.result']
|
||||||
|
subAgentHandlers['copilot.tool.interrupt_resolved'] = subAgentHandlers['copilot.tool.result']
|
||||||
|
|
||||||
export function handleSubagentRouting(event: SSEEvent, context: StreamingContext): boolean {
|
export function handleSubagentRouting(event: SSEEvent, context: StreamingContext): boolean {
|
||||||
if (!event.subagent) return false
|
if (!event.subagent) return false
|
||||||
if (!context.subAgentParentToolCallId) {
|
if (!context.subAgentParentToolCallId) {
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import {
|
|||||||
TOOL_DECISION_MAX_POLL_MS,
|
TOOL_DECISION_MAX_POLL_MS,
|
||||||
TOOL_DECISION_POLL_BACKOFF,
|
TOOL_DECISION_POLL_BACKOFF,
|
||||||
} from '@/lib/copilot/constants'
|
} from '@/lib/copilot/constants'
|
||||||
import { INTERRUPT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
|
||||||
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
||||||
import {
|
import {
|
||||||
asRecord,
|
asRecord,
|
||||||
@@ -21,10 +20,6 @@ import type {
|
|||||||
|
|
||||||
const logger = createLogger('CopilotSseToolExecution')
|
const logger = createLogger('CopilotSseToolExecution')
|
||||||
|
|
||||||
export function isInterruptToolName(toolName: string): boolean {
|
|
||||||
return INTERRUPT_TOOL_SET.has(toolName)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function executeToolAndReport(
|
export async function executeToolAndReport(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
context: StreamingContext,
|
context: StreamingContext,
|
||||||
@@ -34,9 +29,11 @@ export async function executeToolAndReport(
|
|||||||
const toolCall = context.toolCalls.get(toolCallId)
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
if (!toolCall) return
|
if (!toolCall) return
|
||||||
|
|
||||||
if (toolCall.status === 'executing') return
|
const lockable = toolCall as typeof toolCall & { __simExecuting?: boolean }
|
||||||
|
if (lockable.__simExecuting) return
|
||||||
if (wasToolResultSeen(toolCall.id)) return
|
if (wasToolResultSeen(toolCall.id)) return
|
||||||
|
|
||||||
|
lockable.__simExecuting = true
|
||||||
toolCall.status = 'executing'
|
toolCall.status = 'executing'
|
||||||
try {
|
try {
|
||||||
const result = await executeToolServerSide(toolCall, execContext)
|
const result = await executeToolServerSide(toolCall, execContext)
|
||||||
@@ -83,7 +80,7 @@ export async function executeToolAndReport(
|
|||||||
})
|
})
|
||||||
|
|
||||||
const resultEvent: SSEEvent = {
|
const resultEvent: SSEEvent = {
|
||||||
type: 'tool_result',
|
type: 'copilot.tool.result',
|
||||||
toolCallId: toolCall.id,
|
toolCallId: toolCall.id,
|
||||||
toolName: toolCall.name,
|
toolName: toolCall.name,
|
||||||
success: result.success,
|
success: result.success,
|
||||||
@@ -91,6 +88,8 @@ export async function executeToolAndReport(
|
|||||||
data: {
|
data: {
|
||||||
id: toolCall.id,
|
id: toolCall.id,
|
||||||
name: toolCall.name,
|
name: toolCall.name,
|
||||||
|
phase: 'completed',
|
||||||
|
state: result.success ? 'success' : 'error',
|
||||||
success: result.success,
|
success: result.success,
|
||||||
result: result.output,
|
result: result.output,
|
||||||
},
|
},
|
||||||
@@ -113,15 +112,22 @@ export async function executeToolAndReport(
|
|||||||
})
|
})
|
||||||
|
|
||||||
const errorEvent: SSEEvent = {
|
const errorEvent: SSEEvent = {
|
||||||
type: 'tool_error',
|
type: 'copilot.tool.result',
|
||||||
toolCallId: toolCall.id,
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
success: false,
|
||||||
data: {
|
data: {
|
||||||
id: toolCall.id,
|
id: toolCall.id,
|
||||||
name: toolCall.name,
|
name: toolCall.name,
|
||||||
|
phase: 'completed',
|
||||||
|
state: 'error',
|
||||||
|
success: false,
|
||||||
error: toolCall.error,
|
error: toolCall.error,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
await options?.onEvent?.(errorEvent)
|
await options?.onEvent?.(errorEvent)
|
||||||
|
} finally {
|
||||||
|
delete lockable.__simExecuting
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,10 +11,10 @@ import {
|
|||||||
describe('sse-utils', () => {
|
describe('sse-utils', () => {
|
||||||
it.concurrent('normalizes tool fields from string data', () => {
|
it.concurrent('normalizes tool fields from string data', () => {
|
||||||
const event = {
|
const event = {
|
||||||
type: 'tool_result',
|
type: 'copilot.tool.result',
|
||||||
data: JSON.stringify({
|
data: JSON.stringify({
|
||||||
id: 'tool_1',
|
id: 'tool_1',
|
||||||
name: 'edit_workflow',
|
name: 'workflow_change',
|
||||||
success: true,
|
success: true,
|
||||||
result: { ok: true },
|
result: { ok: true },
|
||||||
}),
|
}),
|
||||||
@@ -22,21 +22,62 @@ describe('sse-utils', () => {
|
|||||||
|
|
||||||
const normalized = normalizeSseEvent(event as any)
|
const normalized = normalizeSseEvent(event as any)
|
||||||
|
|
||||||
|
expect(normalized.type).toBe('copilot.tool.result')
|
||||||
expect(normalized.toolCallId).toBe('tool_1')
|
expect(normalized.toolCallId).toBe('tool_1')
|
||||||
expect(normalized.toolName).toBe('edit_workflow')
|
expect(normalized.toolName).toBe('workflow_change')
|
||||||
expect(normalized.success).toBe(true)
|
expect(normalized.success).toBe(true)
|
||||||
expect(normalized.result).toEqual({ ok: true })
|
expect(normalized.result).toEqual({ ok: true })
|
||||||
})
|
})
|
||||||
|
|
||||||
it.concurrent('dedupes tool_call events', () => {
|
it.concurrent('maps copilot tool event aliases and preserves tool metadata', () => {
|
||||||
const event = { type: 'tool_call', data: { id: 'tool_call_1', name: 'plan' } }
|
const event = {
|
||||||
|
type: 'copilot.tool.interrupt_required',
|
||||||
|
data: {
|
||||||
|
id: 'tool_legacy_1',
|
||||||
|
name: 'workflow_run',
|
||||||
|
state: 'pending',
|
||||||
|
ui: { showInterrupt: true },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = normalizeSseEvent(event as any)
|
||||||
|
|
||||||
|
expect(normalized.type).toBe('copilot.tool.interrupt_required')
|
||||||
|
expect(normalized.toolCallId).toBe('tool_legacy_1')
|
||||||
|
expect(normalized.toolName).toBe('workflow_run')
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('keeps copilot content event type when payload is plain string', () => {
|
||||||
|
const event = {
|
||||||
|
type: 'copilot.content',
|
||||||
|
data: 'hello world',
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = normalizeSseEvent(event as any)
|
||||||
|
|
||||||
|
expect(normalized.type).toBe('copilot.content')
|
||||||
|
expect(normalized.data).toBe('hello world')
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('dedupes copilot tool call events', () => {
|
||||||
|
const event = { type: 'copilot.tool.call', data: { id: 'tool_call_1', name: 'plan' } }
|
||||||
expect(shouldSkipToolCallEvent(event as any)).toBe(false)
|
expect(shouldSkipToolCallEvent(event as any)).toBe(false)
|
||||||
expect(shouldSkipToolCallEvent(event as any)).toBe(true)
|
expect(shouldSkipToolCallEvent(event as any)).toBe(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it.concurrent('dedupes tool_result events', () => {
|
it.concurrent('dedupes copilot tool result events', () => {
|
||||||
const event = { type: 'tool_result', data: { id: 'tool_result_1', name: 'plan' } }
|
const event = { type: 'copilot.tool.result', data: { id: 'tool_result_1', name: 'plan' } }
|
||||||
expect(shouldSkipToolResultEvent(event as any)).toBe(false)
|
expect(shouldSkipToolResultEvent(event as any)).toBe(false)
|
||||||
expect(shouldSkipToolResultEvent(event as any)).toBe(true)
|
expect(shouldSkipToolResultEvent(event as any)).toBe(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it.concurrent('dedupes copilot workflow patch result events', () => {
|
||||||
|
const normalized = normalizeSseEvent({
|
||||||
|
type: 'copilot.workflow.patch',
|
||||||
|
data: { id: 'tool_result_aliased_1', name: 'workflow_change' },
|
||||||
|
} as any)
|
||||||
|
|
||||||
|
expect(shouldSkipToolResultEvent(normalized as any)).toBe(false)
|
||||||
|
expect(shouldSkipToolResultEvent(normalized as any)).toBe(true)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -101,8 +101,21 @@ export function wasToolResultSeen(toolCallId: string): boolean {
|
|||||||
return seenToolResults.has(toolCallId)
|
return seenToolResults.has(toolCallId)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isToolCallEventType(type: string): boolean {
|
||||||
|
return type === 'copilot.tool.call' || type === 'copilot.tool.interrupt_required'
|
||||||
|
}
|
||||||
|
|
||||||
|
function isToolResultEventType(type: string): boolean {
|
||||||
|
return (
|
||||||
|
type === 'copilot.tool.result' ||
|
||||||
|
type === 'copilot.workflow.patch' ||
|
||||||
|
type === 'copilot.workflow.verify' ||
|
||||||
|
type === 'copilot.tool.interrupt_resolved'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
export function shouldSkipToolCallEvent(event: SSEEvent): boolean {
|
export function shouldSkipToolCallEvent(event: SSEEvent): boolean {
|
||||||
if (event.type !== 'tool_call') return false
|
if (!isToolCallEventType(String(event.type || ''))) return false
|
||||||
const toolCallId = getToolCallIdFromEvent(event)
|
const toolCallId = getToolCallIdFromEvent(event)
|
||||||
if (!toolCallId) return false
|
if (!toolCallId) return false
|
||||||
const eventData = getEventData(event)
|
const eventData = getEventData(event)
|
||||||
@@ -115,7 +128,7 @@ export function shouldSkipToolCallEvent(event: SSEEvent): boolean {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function shouldSkipToolResultEvent(event: SSEEvent): boolean {
|
export function shouldSkipToolResultEvent(event: SSEEvent): boolean {
|
||||||
if (event.type !== 'tool_result') return false
|
if (!isToolResultEventType(String(event.type || ''))) return false
|
||||||
const toolCallId = getToolCallIdFromEvent(event)
|
const toolCallId = getToolCallIdFromEvent(event)
|
||||||
if (!toolCallId) return false
|
if (!toolCallId) return false
|
||||||
if (wasToolResultSeen(toolCallId)) return true
|
if (wasToolResultSeen(toolCallId)) return true
|
||||||
|
|||||||
@@ -97,8 +97,8 @@ describe('stream-buffer', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it.concurrent('replays events after a given event id', async () => {
|
it.concurrent('replays events after a given event id', async () => {
|
||||||
await appendStreamEvent('stream-1', { type: 'content', data: 'hello' })
|
await appendStreamEvent('stream-1', { type: 'copilot.content', data: 'hello' })
|
||||||
await appendStreamEvent('stream-1', { type: 'content', data: 'world' })
|
await appendStreamEvent('stream-1', { type: 'copilot.content', data: 'world' })
|
||||||
|
|
||||||
const allEvents = await readStreamEvents('stream-1', 0)
|
const allEvents = await readStreamEvents('stream-1', 0)
|
||||||
expect(allEvents.map((entry) => entry.event.data)).toEqual(['hello', 'world'])
|
expect(allEvents.map((entry) => entry.event.data)).toEqual(['hello', 'world'])
|
||||||
@@ -109,8 +109,8 @@ describe('stream-buffer', () => {
|
|||||||
|
|
||||||
it.concurrent('flushes buffered events for resume', async () => {
|
it.concurrent('flushes buffered events for resume', async () => {
|
||||||
const writer = createStreamEventWriter('stream-2')
|
const writer = createStreamEventWriter('stream-2')
|
||||||
await writer.write({ type: 'content', data: 'a' })
|
await writer.write({ type: 'copilot.content', data: 'a' })
|
||||||
await writer.write({ type: 'content', data: 'b' })
|
await writer.write({ type: 'copilot.content', data: 'b' })
|
||||||
await writer.flush()
|
await writer.flush()
|
||||||
|
|
||||||
const events = await readStreamEvents('stream-2', 0)
|
const events = await readStreamEvents('stream-2', 0)
|
||||||
|
|||||||
@@ -127,7 +127,7 @@ export async function runStreamLoop(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Standard subagent start/end handling.
|
// Standard subagent start/end handling.
|
||||||
if (normalizedEvent.type === 'subagent_start') {
|
if (normalizedEvent.type === 'copilot.subagent.started') {
|
||||||
const eventData = normalizedEvent.data as Record<string, unknown> | undefined
|
const eventData = normalizedEvent.data as Record<string, unknown> | undefined
|
||||||
const toolCallId = eventData?.tool_call_id as string | undefined
|
const toolCallId = eventData?.tool_call_id as string | undefined
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
@@ -138,7 +138,7 @@ export async function runStreamLoop(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if (normalizedEvent.type === 'subagent_end') {
|
if (normalizedEvent.type === 'copilot.subagent.completed') {
|
||||||
context.subAgentParentToolCallId = undefined
|
context.subAgentParentToolCallId = undefined
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ export async function orchestrateSubagentStream(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// For direct subagent calls, events may have the subagent field set
|
// For direct subagent calls, events may have the subagent field set
|
||||||
// but no subagent_start because this IS the top-level agent.
|
// but no copilot.subagent.started because this IS the top-level agent.
|
||||||
// Skip subagent routing for events where the subagent field matches
|
// Skip subagent routing for events where the subagent field matches
|
||||||
// the current agentId - these are top-level events.
|
// the current agentId - these are top-level events.
|
||||||
if (event.subagent === agentId && !ctx.subAgentParentToolCallId) {
|
if (event.subagent === agentId && !ctx.subAgentParentToolCallId) {
|
||||||
|
|||||||
@@ -220,7 +220,8 @@ export async function executeDeployMcp(
|
|||||||
if (!workflowRecord.isDeployed) {
|
if (!workflowRecord.isDeployed) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error: 'Workflow must be deployed before adding as an MCP tool. Use deploy_api first.',
|
error:
|
||||||
|
'Workflow must be deployed before adding as an MCP tool. Use workflow_deploy(mode: "api") first.',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -50,6 +50,8 @@ import type {
|
|||||||
RunWorkflowParams,
|
RunWorkflowParams,
|
||||||
RunWorkflowUntilBlockParams,
|
RunWorkflowUntilBlockParams,
|
||||||
SetGlobalWorkflowVariablesParams,
|
SetGlobalWorkflowVariablesParams,
|
||||||
|
WorkflowDeployParams,
|
||||||
|
WorkflowRunParams,
|
||||||
} from './param-types'
|
} from './param-types'
|
||||||
import { PLATFORM_ACTIONS_CONTENT } from './platform-actions'
|
import { PLATFORM_ACTIONS_CONTENT } from './platform-actions'
|
||||||
import {
|
import {
|
||||||
@@ -318,13 +320,91 @@ async function executeManageCustomTool(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function executeWorkflowRunUnified(
|
||||||
|
rawParams: Record<string, unknown>,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
const params = rawParams as WorkflowRunParams
|
||||||
|
const mode = params.mode || 'full'
|
||||||
|
|
||||||
|
switch (mode) {
|
||||||
|
case 'full':
|
||||||
|
return executeRunWorkflow(params as RunWorkflowParams, context)
|
||||||
|
case 'until_block':
|
||||||
|
if (!params.stopAfterBlockId) {
|
||||||
|
return { success: false, error: 'stopAfterBlockId is required for mode=until_block' }
|
||||||
|
}
|
||||||
|
return executeRunWorkflowUntilBlock(params as RunWorkflowUntilBlockParams, context)
|
||||||
|
case 'from_block':
|
||||||
|
if (!params.startBlockId) {
|
||||||
|
return { success: false, error: 'startBlockId is required for mode=from_block' }
|
||||||
|
}
|
||||||
|
return executeRunFromBlock(params as RunFromBlockParams, context)
|
||||||
|
case 'block':
|
||||||
|
if (!params.blockId) {
|
||||||
|
return { success: false, error: 'blockId is required for mode=block' }
|
||||||
|
}
|
||||||
|
return executeRunBlock(params as RunBlockParams, context)
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Unsupported workflow_run mode: ${String(mode)}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function executeWorkflowDeployUnified(
|
||||||
|
rawParams: Record<string, unknown>,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
const params = rawParams as unknown as WorkflowDeployParams
|
||||||
|
const mode = params.mode
|
||||||
|
|
||||||
|
if (!mode) {
|
||||||
|
return { success: false, error: 'mode is required for workflow_deploy' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const scopedContext =
|
||||||
|
params.workflowId && params.workflowId !== context.workflowId
|
||||||
|
? { ...context, workflowId: params.workflowId }
|
||||||
|
: context
|
||||||
|
|
||||||
|
switch (mode) {
|
||||||
|
case 'status':
|
||||||
|
return executeCheckDeploymentStatus(params as CheckDeploymentStatusParams, scopedContext)
|
||||||
|
case 'redeploy':
|
||||||
|
return executeRedeploy(scopedContext)
|
||||||
|
case 'api':
|
||||||
|
return executeDeployApi(params as DeployApiParams, scopedContext)
|
||||||
|
case 'chat':
|
||||||
|
return executeDeployChat(params as DeployChatParams, scopedContext)
|
||||||
|
case 'mcp':
|
||||||
|
return executeDeployMcp(params as DeployMcpParams, scopedContext)
|
||||||
|
case 'list_mcp_servers':
|
||||||
|
return executeListWorkspaceMcpServers(params as ListWorkspaceMcpServersParams, scopedContext)
|
||||||
|
case 'create_mcp_server':
|
||||||
|
return executeCreateWorkspaceMcpServer(
|
||||||
|
params as CreateWorkspaceMcpServerParams,
|
||||||
|
scopedContext
|
||||||
|
)
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Unsupported workflow_deploy mode: ${String(mode)}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const SERVER_TOOLS = new Set<string>([
|
const SERVER_TOOLS = new Set<string>([
|
||||||
'get_blocks_and_tools',
|
'get_blocks_and_tools',
|
||||||
'get_blocks_metadata',
|
'get_blocks_metadata',
|
||||||
'get_block_options',
|
'get_block_options',
|
||||||
'get_block_config',
|
'get_block_config',
|
||||||
'get_trigger_blocks',
|
'get_trigger_blocks',
|
||||||
'edit_workflow',
|
'workflow_context_get',
|
||||||
|
'workflow_context_expand',
|
||||||
|
'workflow_change',
|
||||||
|
'workflow_verify',
|
||||||
'get_workflow_console',
|
'get_workflow_console',
|
||||||
'search_documentation',
|
'search_documentation',
|
||||||
'search_online',
|
'search_online',
|
||||||
@@ -352,11 +432,7 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
|||||||
get_block_outputs: (p, c) => executeGetBlockOutputs(p as GetBlockOutputsParams, c),
|
get_block_outputs: (p, c) => executeGetBlockOutputs(p as GetBlockOutputsParams, c),
|
||||||
get_block_upstream_references: (p, c) =>
|
get_block_upstream_references: (p, c) =>
|
||||||
executeGetBlockUpstreamReferences(p as unknown as GetBlockUpstreamReferencesParams, c),
|
executeGetBlockUpstreamReferences(p as unknown as GetBlockUpstreamReferencesParams, c),
|
||||||
run_workflow: (p, c) => executeRunWorkflow(p as RunWorkflowParams, c),
|
workflow_run: (p, c) => executeWorkflowRunUnified(p, c),
|
||||||
run_workflow_until_block: (p, c) =>
|
|
||||||
executeRunWorkflowUntilBlock(p as unknown as RunWorkflowUntilBlockParams, c),
|
|
||||||
run_from_block: (p, c) => executeRunFromBlock(p as unknown as RunFromBlockParams, c),
|
|
||||||
run_block: (p, c) => executeRunBlock(p as unknown as RunBlockParams, c),
|
|
||||||
get_deployed_workflow_state: (p, c) =>
|
get_deployed_workflow_state: (p, c) =>
|
||||||
executeGetDeployedWorkflowState(p as GetDeployedWorkflowStateParams, c),
|
executeGetDeployedWorkflowState(p as GetDeployedWorkflowStateParams, c),
|
||||||
generate_api_key: (p, c) => executeGenerateApiKey(p as unknown as GenerateApiKeyParams, c),
|
generate_api_key: (p, c) => executeGenerateApiKey(p as unknown as GenerateApiKeyParams, c),
|
||||||
@@ -367,10 +443,7 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
|||||||
}),
|
}),
|
||||||
set_global_workflow_variables: (p, c) =>
|
set_global_workflow_variables: (p, c) =>
|
||||||
executeSetGlobalWorkflowVariables(p as SetGlobalWorkflowVariablesParams, c),
|
executeSetGlobalWorkflowVariables(p as SetGlobalWorkflowVariablesParams, c),
|
||||||
deploy_api: (p, c) => executeDeployApi(p as DeployApiParams, c),
|
workflow_deploy: (p, c) => executeWorkflowDeployUnified(p, c),
|
||||||
deploy_chat: (p, c) => executeDeployChat(p as DeployChatParams, c),
|
|
||||||
deploy_mcp: (p, c) => executeDeployMcp(p as DeployMcpParams, c),
|
|
||||||
redeploy: (_p, c) => executeRedeploy(c),
|
|
||||||
check_deployment_status: (p, c) =>
|
check_deployment_status: (p, c) =>
|
||||||
executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
|
executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
|
||||||
list_workspace_mcp_servers: (p, c) =>
|
list_workspace_mcp_servers: (p, c) =>
|
||||||
|
|||||||
@@ -93,6 +93,18 @@ export interface RunBlockParams {
|
|||||||
useDeployedState?: boolean
|
useDeployedState?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface WorkflowRunParams {
|
||||||
|
mode?: 'full' | 'until_block' | 'from_block' | 'block'
|
||||||
|
workflowId?: string
|
||||||
|
workflow_input?: unknown
|
||||||
|
input?: unknown
|
||||||
|
useDeployedState?: boolean
|
||||||
|
stopAfterBlockId?: string
|
||||||
|
startBlockId?: string
|
||||||
|
blockId?: string
|
||||||
|
executionId?: string
|
||||||
|
}
|
||||||
|
|
||||||
export interface GetDeployedWorkflowStateParams {
|
export interface GetDeployedWorkflowStateParams {
|
||||||
workflowId?: string
|
workflowId?: string
|
||||||
}
|
}
|
||||||
@@ -169,6 +181,39 @@ export interface CreateWorkspaceMcpServerParams {
|
|||||||
workflowIds?: string[]
|
workflowIds?: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface WorkflowDeployParams {
|
||||||
|
mode:
|
||||||
|
| 'status'
|
||||||
|
| 'redeploy'
|
||||||
|
| 'api'
|
||||||
|
| 'chat'
|
||||||
|
| 'mcp'
|
||||||
|
| 'list_mcp_servers'
|
||||||
|
| 'create_mcp_server'
|
||||||
|
workflowId?: string
|
||||||
|
action?: 'deploy' | 'undeploy'
|
||||||
|
identifier?: string
|
||||||
|
title?: string
|
||||||
|
description?: string
|
||||||
|
customizations?: {
|
||||||
|
primaryColor?: string
|
||||||
|
secondaryColor?: string
|
||||||
|
welcomeMessage?: string
|
||||||
|
iconUrl?: string
|
||||||
|
}
|
||||||
|
authType?: 'none' | 'password' | 'public' | 'email' | 'sso'
|
||||||
|
password?: string
|
||||||
|
allowedEmails?: string[]
|
||||||
|
outputConfigs?: unknown[]
|
||||||
|
serverId?: string
|
||||||
|
toolName?: string
|
||||||
|
toolDescription?: string
|
||||||
|
parameterSchema?: Record<string, unknown>
|
||||||
|
name?: string
|
||||||
|
isPublic?: boolean
|
||||||
|
workflowIds?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
// === Workflow Organization Params ===
|
// === Workflow Organization Params ===
|
||||||
|
|
||||||
export interface RenameWorkflowParams {
|
export interface RenameWorkflowParams {
|
||||||
|
|||||||
@@ -1,19 +1,22 @@
|
|||||||
export type SSEEventType =
|
export type SSEEventType =
|
||||||
| 'chat_id'
|
| 'chat_id'
|
||||||
| 'title_updated'
|
| 'title_updated'
|
||||||
| 'content'
|
|
||||||
| 'reasoning'
|
|
||||||
| 'tool_call'
|
|
||||||
| 'tool_generating'
|
|
||||||
| 'tool_result'
|
|
||||||
| 'tool_error'
|
|
||||||
| 'subagent_start'
|
|
||||||
| 'subagent_end'
|
|
||||||
| 'structured_result'
|
| 'structured_result'
|
||||||
| 'subagent_result'
|
| 'subagent_result'
|
||||||
| 'done'
|
| 'stream_end'
|
||||||
| 'error'
|
| 'copilot.phase.started'
|
||||||
| 'start'
|
| 'copilot.phase.progress'
|
||||||
|
| 'copilot.phase.completed'
|
||||||
|
| 'copilot.tool.call'
|
||||||
|
| 'copilot.tool.result'
|
||||||
|
| 'copilot.tool.interrupt_required'
|
||||||
|
| 'copilot.tool.interrupt_resolved'
|
||||||
|
| 'copilot.workflow.patch'
|
||||||
|
| 'copilot.workflow.verify'
|
||||||
|
| 'copilot.subagent.started'
|
||||||
|
| 'copilot.subagent.completed'
|
||||||
|
| 'copilot.content'
|
||||||
|
| 'copilot.error'
|
||||||
|
|
||||||
export interface SSEEvent {
|
export interface SSEEvent {
|
||||||
type: SSEEventType
|
type: SSEEventType
|
||||||
|
|||||||
@@ -592,16 +592,40 @@ const META_edit: ToolMetadata = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const META_edit_workflow: ToolMetadata = {
|
const META_workflow_change: ToolMetadata = {
|
||||||
displayNames: {
|
displayNames: {
|
||||||
[ClientToolCallState.generating]: { text: 'Editing your workflow', icon: Loader2 },
|
[ClientToolCallState.generating]: { text: 'Planning workflow changes', icon: Loader2 },
|
||||||
[ClientToolCallState.executing]: { text: 'Editing your workflow', icon: Loader2 },
|
[ClientToolCallState.executing]: { text: 'Applying workflow changes', icon: Loader2 },
|
||||||
[ClientToolCallState.success]: { text: 'Edited your workflow', icon: Grid2x2Check },
|
[ClientToolCallState.success]: { text: 'Updated your workflow', icon: Grid2x2Check },
|
||||||
[ClientToolCallState.error]: { text: 'Failed to edit your workflow', icon: XCircle },
|
[ClientToolCallState.error]: { text: 'Failed to update your workflow', icon: XCircle },
|
||||||
[ClientToolCallState.review]: { text: 'Review your workflow changes', icon: Grid2x2 },
|
[ClientToolCallState.review]: { text: 'Review your workflow changes', icon: Grid2x2 },
|
||||||
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted editing your workflow', icon: MinusCircle },
|
[ClientToolCallState.aborted]: { text: 'Aborted workflow changes', icon: MinusCircle },
|
||||||
[ClientToolCallState.pending]: { text: 'Editing your workflow', icon: Loader2 },
|
[ClientToolCallState.pending]: { text: 'Planning workflow changes', icon: Loader2 },
|
||||||
|
},
|
||||||
|
getDynamicText: (params, state) => {
|
||||||
|
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'dry_run') {
|
||||||
|
switch (state) {
|
||||||
|
case ClientToolCallState.success:
|
||||||
|
return 'Planned workflow changes'
|
||||||
|
case ClientToolCallState.executing:
|
||||||
|
case ClientToolCallState.generating:
|
||||||
|
case ClientToolCallState.pending:
|
||||||
|
return 'Planning workflow changes'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (mode === 'apply' || typeof params?.proposalId === 'string') {
|
||||||
|
switch (state) {
|
||||||
|
case ClientToolCallState.success:
|
||||||
|
return 'Applied workflow changes'
|
||||||
|
case ClientToolCallState.executing:
|
||||||
|
case ClientToolCallState.generating:
|
||||||
|
case ClientToolCallState.pending:
|
||||||
|
return 'Applying workflow changes'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined
|
||||||
},
|
},
|
||||||
uiConfig: {
|
uiConfig: {
|
||||||
isSpecial: true,
|
isSpecial: true,
|
||||||
@@ -609,6 +633,42 @@ const META_edit_workflow: ToolMetadata = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const META_workflow_context_get: ToolMetadata = {
|
||||||
|
displayNames: {
|
||||||
|
[ClientToolCallState.generating]: { text: 'Gathering workflow context', icon: Loader2 },
|
||||||
|
[ClientToolCallState.pending]: { text: 'Gathering workflow context', icon: Loader2 },
|
||||||
|
[ClientToolCallState.executing]: { text: 'Gathering workflow context', icon: Loader2 },
|
||||||
|
[ClientToolCallState.success]: { text: 'Gathered workflow context', icon: FileText },
|
||||||
|
[ClientToolCallState.error]: { text: 'Failed to gather workflow context', icon: XCircle },
|
||||||
|
[ClientToolCallState.rejected]: { text: 'Skipped workflow context', icon: MinusCircle },
|
||||||
|
[ClientToolCallState.aborted]: { text: 'Aborted workflow context', icon: MinusCircle },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const META_workflow_context_expand: ToolMetadata = {
|
||||||
|
displayNames: {
|
||||||
|
[ClientToolCallState.generating]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
||||||
|
[ClientToolCallState.pending]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
||||||
|
[ClientToolCallState.executing]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
||||||
|
[ClientToolCallState.success]: { text: 'Expanded workflow schemas', icon: FileText },
|
||||||
|
[ClientToolCallState.error]: { text: 'Failed to expand workflow schemas', icon: XCircle },
|
||||||
|
[ClientToolCallState.rejected]: { text: 'Skipped schema expansion', icon: MinusCircle },
|
||||||
|
[ClientToolCallState.aborted]: { text: 'Aborted schema expansion', icon: MinusCircle },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const META_workflow_verify: ToolMetadata = {
|
||||||
|
displayNames: {
|
||||||
|
[ClientToolCallState.generating]: { text: 'Verifying workflow', icon: Loader2 },
|
||||||
|
[ClientToolCallState.pending]: { text: 'Verifying workflow', icon: Loader2 },
|
||||||
|
[ClientToolCallState.executing]: { text: 'Verifying workflow', icon: Loader2 },
|
||||||
|
[ClientToolCallState.success]: { text: 'Verified workflow', icon: CheckCircle2 },
|
||||||
|
[ClientToolCallState.error]: { text: 'Workflow verification failed', icon: XCircle },
|
||||||
|
[ClientToolCallState.rejected]: { text: 'Skipped workflow verification', icon: MinusCircle },
|
||||||
|
[ClientToolCallState.aborted]: { text: 'Aborted workflow verification', icon: MinusCircle },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
const META_evaluate: ToolMetadata = {
|
const META_evaluate: ToolMetadata = {
|
||||||
displayNames: {
|
displayNames: {
|
||||||
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
||||||
@@ -2541,7 +2601,12 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
|||||||
deploy_chat: META_deploy_chat,
|
deploy_chat: META_deploy_chat,
|
||||||
deploy_mcp: META_deploy_mcp,
|
deploy_mcp: META_deploy_mcp,
|
||||||
edit: META_edit,
|
edit: META_edit,
|
||||||
edit_workflow: META_edit_workflow,
|
workflow_context_get: META_workflow_context_get,
|
||||||
|
workflow_context_expand: META_workflow_context_expand,
|
||||||
|
workflow_change: META_workflow_change,
|
||||||
|
workflow_verify: META_workflow_verify,
|
||||||
|
workflow_run: META_run_workflow,
|
||||||
|
workflow_deploy: META_deploy_api,
|
||||||
evaluate: META_evaluate,
|
evaluate: META_evaluate,
|
||||||
get_block_config: META_get_block_config,
|
get_block_config: META_get_block_config,
|
||||||
get_block_options: META_get_block_options,
|
get_block_options: META_get_block_options,
|
||||||
|
|||||||
@@ -1,680 +0,0 @@
|
|||||||
export type DirectToolDef = {
|
|
||||||
name: string
|
|
||||||
description: string
|
|
||||||
inputSchema: { type: 'object'; properties?: Record<string, unknown>; required?: string[] }
|
|
||||||
toolId: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export type SubagentToolDef = {
|
|
||||||
name: string
|
|
||||||
description: string
|
|
||||||
inputSchema: { type: 'object'; properties?: Record<string, unknown>; required?: string[] }
|
|
||||||
agentId: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Direct tools that execute immediately without LLM orchestration.
|
|
||||||
* These are fast database queries that don't need AI reasoning.
|
|
||||||
*/
|
|
||||||
export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
|
||||||
{
|
|
||||||
name: 'list_workspaces',
|
|
||||||
toolId: 'list_user_workspaces',
|
|
||||||
description:
|
|
||||||
'List all workspaces the user has access to. Returns workspace IDs, names, and roles. Use this first to determine which workspace to operate in.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'list_workflows',
|
|
||||||
toolId: 'list_user_workflows',
|
|
||||||
description:
|
|
||||||
'List all workflows the user has access to. Returns workflow IDs, names, workspace, and folder info. Use workspaceId/folderId to scope results.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workspaceId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Optional workspace ID to filter workflows.',
|
|
||||||
},
|
|
||||||
folderId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Optional folder ID to filter workflows.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'list_folders',
|
|
||||||
toolId: 'list_folders',
|
|
||||||
description:
|
|
||||||
'List all folders in a workspace. Returns folder IDs, names, and parent relationships for organizing workflows.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workspaceId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Workspace ID to list folders from.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workspaceId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'get_workflow',
|
|
||||||
toolId: 'get_user_workflow',
|
|
||||||
description:
|
|
||||||
'Get a workflow by ID. Returns the full workflow definition including all blocks, connections, and configuration.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Workflow ID to retrieve.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'create_workflow',
|
|
||||||
toolId: 'create_workflow',
|
|
||||||
description:
|
|
||||||
'Create a new empty workflow. Returns the new workflow ID. Always call this FIRST before sim_build for new workflows. Use workspaceId to place it in a specific workspace.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
name: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Name for the new workflow.',
|
|
||||||
},
|
|
||||||
workspaceId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Optional workspace ID. Uses default workspace if not provided.',
|
|
||||||
},
|
|
||||||
folderId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Optional folder ID to place the workflow in.',
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Optional description for the workflow.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['name'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'create_folder',
|
|
||||||
toolId: 'create_folder',
|
|
||||||
description:
|
|
||||||
'Create a new folder for organizing workflows. Use parentId to create nested folder hierarchies.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
name: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Name for the new folder.',
|
|
||||||
},
|
|
||||||
workspaceId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Optional workspace ID. Uses default workspace if not provided.',
|
|
||||||
},
|
|
||||||
parentId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Optional parent folder ID for nested folders.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['name'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'rename_workflow',
|
|
||||||
toolId: 'rename_workflow',
|
|
||||||
description: 'Rename an existing workflow.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'The workflow ID to rename.',
|
|
||||||
},
|
|
||||||
name: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'The new name for the workflow.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId', 'name'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'move_workflow',
|
|
||||||
toolId: 'move_workflow',
|
|
||||||
description:
|
|
||||||
'Move a workflow into a different folder. Omit folderId or pass empty string to move to workspace root.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'The workflow ID to move.',
|
|
||||||
},
|
|
||||||
folderId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Target folder ID. Omit or pass empty string to move to workspace root.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'move_folder',
|
|
||||||
toolId: 'move_folder',
|
|
||||||
description:
|
|
||||||
'Move a folder into another folder. Omit parentId or pass empty string to move to workspace root.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
folderId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'The folder ID to move.',
|
|
||||||
},
|
|
||||||
parentId: {
|
|
||||||
type: 'string',
|
|
||||||
description:
|
|
||||||
'Target parent folder ID. Omit or pass empty string to move to workspace root.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['folderId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'run_workflow',
|
|
||||||
toolId: 'run_workflow',
|
|
||||||
description:
|
|
||||||
'Run a workflow and return its output. Works on both draft and deployed states. By default runs the draft (live) state.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The workflow ID to run.',
|
|
||||||
},
|
|
||||||
workflow_input: {
|
|
||||||
type: 'object',
|
|
||||||
description:
|
|
||||||
'JSON object with input values. Keys should match the workflow start block input field names.',
|
|
||||||
},
|
|
||||||
useDeployedState: {
|
|
||||||
type: 'boolean',
|
|
||||||
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'run_workflow_until_block',
|
|
||||||
toolId: 'run_workflow_until_block',
|
|
||||||
description:
|
|
||||||
'Run a workflow and stop after a specific block completes. Useful for testing partial execution or debugging specific blocks.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The workflow ID to run.',
|
|
||||||
},
|
|
||||||
stopAfterBlockId: {
|
|
||||||
type: 'string',
|
|
||||||
description:
|
|
||||||
'REQUIRED. The block ID to stop after. Execution halts once this block completes.',
|
|
||||||
},
|
|
||||||
workflow_input: {
|
|
||||||
type: 'object',
|
|
||||||
description: 'JSON object with input values for the workflow.',
|
|
||||||
},
|
|
||||||
useDeployedState: {
|
|
||||||
type: 'boolean',
|
|
||||||
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId', 'stopAfterBlockId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'run_from_block',
|
|
||||||
toolId: 'run_from_block',
|
|
||||||
description:
|
|
||||||
'Run a workflow starting from a specific block, using cached outputs from a prior execution for upstream blocks. The workflow must have been run at least once first.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The workflow ID to run.',
|
|
||||||
},
|
|
||||||
startBlockId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The block ID to start execution from.',
|
|
||||||
},
|
|
||||||
executionId: {
|
|
||||||
type: 'string',
|
|
||||||
description:
|
|
||||||
'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
|
|
||||||
},
|
|
||||||
workflow_input: {
|
|
||||||
type: 'object',
|
|
||||||
description: 'Optional input values for the workflow.',
|
|
||||||
},
|
|
||||||
useDeployedState: {
|
|
||||||
type: 'boolean',
|
|
||||||
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId', 'startBlockId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'run_block',
|
|
||||||
toolId: 'run_block',
|
|
||||||
description:
|
|
||||||
'Run a single block in isolation using cached outputs from a prior execution. Only the specified block executes — nothing upstream or downstream. The workflow must have been run at least once first.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The workflow ID.',
|
|
||||||
},
|
|
||||||
blockId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The block ID to run in isolation.',
|
|
||||||
},
|
|
||||||
executionId: {
|
|
||||||
type: 'string',
|
|
||||||
description:
|
|
||||||
'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
|
|
||||||
},
|
|
||||||
workflow_input: {
|
|
||||||
type: 'object',
|
|
||||||
description: 'Optional input values for the workflow.',
|
|
||||||
},
|
|
||||||
useDeployedState: {
|
|
||||||
type: 'boolean',
|
|
||||||
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId', 'blockId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'get_deployed_workflow_state',
|
|
||||||
toolId: 'get_deployed_workflow_state',
|
|
||||||
description:
|
|
||||||
'Get the deployed (production) state of a workflow. Returns the full workflow definition as deployed, or indicates if the workflow is not yet deployed.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The workflow ID to get the deployed state for.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'generate_api_key',
|
|
||||||
toolId: 'generate_api_key',
|
|
||||||
description:
|
|
||||||
'Generate a new workspace API key for calling workflow API endpoints. The key is only shown once — tell the user to save it immediately.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
name: {
|
|
||||||
type: 'string',
|
|
||||||
description:
|
|
||||||
'A descriptive name for the API key (e.g., "production-key", "dev-testing").',
|
|
||||||
},
|
|
||||||
workspaceId: {
|
|
||||||
type: 'string',
|
|
||||||
description: "Optional workspace ID. Defaults to user's default workspace.",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['name'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
export const SUBAGENT_TOOL_DEFS: SubagentToolDef[] = [
|
|
||||||
{
|
|
||||||
name: 'sim_build',
|
|
||||||
agentId: 'build',
|
|
||||||
description: `Build a workflow end-to-end in a single step. This is the fast mode equivalent for headless/MCP usage.
|
|
||||||
|
|
||||||
USE THIS WHEN:
|
|
||||||
- Building a new workflow from scratch
|
|
||||||
- Modifying an existing workflow
|
|
||||||
- You want to gather information and build in one pass without separate plan→edit steps
|
|
||||||
|
|
||||||
WORKFLOW ID (REQUIRED):
|
|
||||||
- For NEW workflows: First call create_workflow to get a workflowId, then pass it here
|
|
||||||
- For EXISTING workflows: Always pass the workflowId parameter
|
|
||||||
|
|
||||||
CAN DO:
|
|
||||||
- Gather information about blocks, credentials, patterns
|
|
||||||
- Search documentation and patterns for best practices
|
|
||||||
- Add, modify, or remove blocks
|
|
||||||
- Configure block settings and connections
|
|
||||||
- Set environment variables and workflow variables
|
|
||||||
|
|
||||||
CANNOT DO:
|
|
||||||
- Run or test workflows (use sim_test separately)
|
|
||||||
- Deploy workflows (use sim_deploy separately)
|
|
||||||
|
|
||||||
WORKFLOW:
|
|
||||||
1. Call create_workflow to get a workflowId (for new workflows)
|
|
||||||
2. Call sim_build with the request and workflowId
|
|
||||||
3. Build agent gathers info and builds in one pass
|
|
||||||
4. Call sim_test to verify it works
|
|
||||||
5. Optionally call sim_deploy to make it externally accessible`,
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'What you want to build or modify in the workflow.',
|
|
||||||
},
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description:
|
|
||||||
'REQUIRED. The workflow ID. For new workflows, call create_workflow first to get this.',
|
|
||||||
},
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request', 'workflowId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_discovery',
|
|
||||||
agentId: 'discovery',
|
|
||||||
description: `Find workflows by their contents or functionality when the user doesn't know the exact name or ID.
|
|
||||||
|
|
||||||
USE THIS WHEN:
|
|
||||||
- User describes a workflow by what it does: "the one that sends emails", "my Slack notification workflow"
|
|
||||||
- User refers to workflow contents: "the workflow with the OpenAI block"
|
|
||||||
- User needs to search/match workflows by functionality or description
|
|
||||||
|
|
||||||
DO NOT USE (use direct tools instead):
|
|
||||||
- User knows the workflow name → use get_workflow
|
|
||||||
- User wants to list all workflows → use list_workflows
|
|
||||||
- User wants to list workspaces → use list_workspaces
|
|
||||||
- User wants to list folders → use list_folders`,
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: { type: 'string' },
|
|
||||||
workspaceId: { type: 'string' },
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_plan',
|
|
||||||
agentId: 'plan',
|
|
||||||
description: `Plan workflow changes by gathering required information. For most cases, prefer sim_build which combines planning and editing in one step.
|
|
||||||
|
|
||||||
USE THIS WHEN:
|
|
||||||
- You need fine-grained control over the build process
|
|
||||||
- You want to inspect the plan before executing it
|
|
||||||
|
|
||||||
WORKFLOW ID (REQUIRED):
|
|
||||||
- For NEW workflows: First call create_workflow to get a workflowId, then pass it here
|
|
||||||
- For EXISTING workflows: Always pass the workflowId parameter
|
|
||||||
|
|
||||||
This tool gathers information about available blocks, credentials, and the current workflow state.
|
|
||||||
|
|
||||||
RETURNS: A plan object containing block configurations, connections, and technical details.
|
|
||||||
IMPORTANT: Pass the returned plan EXACTLY to sim_edit - do not modify or summarize it.`,
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'What you want to build or modify in the workflow.',
|
|
||||||
},
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description:
|
|
||||||
'REQUIRED. The workflow ID. For new workflows, call create_workflow first to get this.',
|
|
||||||
},
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request', 'workflowId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_edit',
|
|
||||||
agentId: 'edit',
|
|
||||||
description: `Execute a workflow plan from sim_plan. For most cases, prefer sim_build which combines planning and editing in one step.
|
|
||||||
|
|
||||||
WORKFLOW ID (REQUIRED):
|
|
||||||
- You MUST provide the workflowId parameter
|
|
||||||
|
|
||||||
PLAN (REQUIRED):
|
|
||||||
- Pass the EXACT plan object from sim_plan in the context.plan field
|
|
||||||
- Do NOT modify, summarize, or interpret the plan - pass it verbatim
|
|
||||||
|
|
||||||
After sim_edit completes, you can test immediately with sim_test, or deploy with sim_deploy to make it accessible externally.`,
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
message: { type: 'string', description: 'Optional additional instructions for the edit.' },
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description:
|
|
||||||
'REQUIRED. The workflow ID to edit. Get this from create_workflow for new workflows.',
|
|
||||||
},
|
|
||||||
plan: {
|
|
||||||
type: 'object',
|
|
||||||
description: 'The plan object from sim_plan. Pass it EXACTLY as returned, do not modify.',
|
|
||||||
},
|
|
||||||
context: {
|
|
||||||
type: 'object',
|
|
||||||
description:
|
|
||||||
'Additional context. Put the plan in context.plan if not using the plan field directly.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['workflowId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_deploy',
|
|
||||||
agentId: 'deploy',
|
|
||||||
description: `Deploy a workflow to make it accessible externally. Workflows can be tested without deploying, but deployment is needed for API access, chat UIs, or MCP exposure.
|
|
||||||
|
|
||||||
DEPLOYMENT TYPES:
|
|
||||||
- "deploy as api" - REST API endpoint for programmatic access
|
|
||||||
- "deploy as chat" - Managed chat UI with auth options
|
|
||||||
- "deploy as mcp" - Expose as MCP tool on an MCP server for AI agents to call
|
|
||||||
|
|
||||||
MCP DEPLOYMENT FLOW:
|
|
||||||
The deploy subagent will automatically: list available MCP servers → create one if needed → deploy the workflow as an MCP tool to that server. You can specify server name, tool name, and tool description.
|
|
||||||
|
|
||||||
ALSO CAN:
|
|
||||||
- Get the deployed (production) state to compare with draft
|
|
||||||
- Generate workspace API keys for calling deployed workflows
|
|
||||||
- List and create MCP servers in the workspace`,
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'The deployment request, e.g. "deploy as api" or "deploy as chat"',
|
|
||||||
},
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The workflow ID to deploy.',
|
|
||||||
},
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request', 'workflowId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_test',
|
|
||||||
agentId: 'test',
|
|
||||||
description: `Run a workflow and verify its outputs. Works on both deployed and undeployed (draft) workflows. Use after building to verify correctness.
|
|
||||||
|
|
||||||
Supports full and partial execution:
|
|
||||||
- Full run with test inputs
|
|
||||||
- Stop after a specific block (run_workflow_until_block)
|
|
||||||
- Run a single block in isolation (run_block)
|
|
||||||
- Resume from a specific block (run_from_block)`,
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: { type: 'string' },
|
|
||||||
workflowId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'REQUIRED. The workflow ID to test.',
|
|
||||||
},
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request', 'workflowId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_debug',
|
|
||||||
agentId: 'debug',
|
|
||||||
description:
|
|
||||||
'Diagnose errors or unexpected workflow behavior. Provide the error message and workflowId. Returns root cause analysis and fix suggestions.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
error: { type: 'string', description: 'The error message or description of the issue.' },
|
|
||||||
workflowId: { type: 'string', description: 'REQUIRED. The workflow ID to debug.' },
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['error', 'workflowId'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_auth',
|
|
||||||
agentId: 'auth',
|
|
||||||
description:
|
|
||||||
'Check OAuth connection status, list connected services, and initiate new OAuth connections. Use when a workflow needs third-party service access (Google, Slack, GitHub, etc.). In MCP/headless mode, returns an authorization URL the user must open in their browser to complete the OAuth flow.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: { type: 'string' },
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_knowledge',
|
|
||||||
agentId: 'knowledge',
|
|
||||||
description:
|
|
||||||
'Manage knowledge bases for RAG-powered document retrieval. Supports listing, creating, updating, and deleting knowledge bases. Knowledge bases can be attached to agent blocks for context-aware responses.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: { type: 'string' },
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_custom_tool',
|
|
||||||
agentId: 'custom_tool',
|
|
||||||
description:
|
|
||||||
'Manage custom tools (reusable API integrations). Supports listing, creating, updating, and deleting custom tools. Custom tools can be added to agent blocks as callable functions.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: { type: 'string' },
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_info',
|
|
||||||
agentId: 'info',
|
|
||||||
description:
|
|
||||||
"Inspect a workflow's blocks, connections, outputs, variables, and metadata. Use for questions about the Sim platform itself — how blocks work, what integrations are available, platform concepts, etc. Always provide workflowId to scope results to a specific workflow.",
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: { type: 'string' },
|
|
||||||
workflowId: { type: 'string' },
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_workflow',
|
|
||||||
agentId: 'workflow',
|
|
||||||
description:
|
|
||||||
'Manage workflow-level configuration: environment variables, settings, scheduling, and deployment status. Use for any data about a specific workflow — its settings, credentials, variables, or deployment state.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: { type: 'string' },
|
|
||||||
workflowId: { type: 'string' },
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_research',
|
|
||||||
agentId: 'research',
|
|
||||||
description:
|
|
||||||
'Research external APIs and documentation. Use when you need to understand third-party services, external APIs, authentication flows, or data formats OUTSIDE of Sim. For questions about Sim itself, use sim_info instead.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: { type: 'string' },
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_superagent',
|
|
||||||
agentId: 'superagent',
|
|
||||||
description:
|
|
||||||
'Execute direct actions NOW: send an email, post to Slack, make an API call, etc. Use when the user wants to DO something immediately rather than build a workflow for it.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: { type: 'string' },
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'sim_platform',
|
|
||||||
agentId: 'tour',
|
|
||||||
description:
|
|
||||||
'Get help with Sim platform navigation, keyboard shortcuts, and UI actions. Use when the user asks "how do I..." about the Sim editor, wants keyboard shortcuts, or needs to know what actions are available in the UI.',
|
|
||||||
inputSchema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
request: { type: 'string' },
|
|
||||||
context: { type: 'object' },
|
|
||||||
},
|
|
||||||
required: ['request'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
@@ -109,7 +109,7 @@ function resolveSubBlockOptions(sb: SubBlockConfig): string[] | undefined {
|
|||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return the actual option ID/value that edit_workflow expects, not the display label
|
// Return canonical option IDs/values expected by workflow_change compilation and apply
|
||||||
return rawOptions
|
return rawOptions
|
||||||
.map((opt: any) => {
|
.map((opt: any) => {
|
||||||
if (!opt) return undefined
|
if (!opt) return undefined
|
||||||
|
|||||||
@@ -11,8 +11,13 @@ import { makeApiRequestServerTool } from '@/lib/copilot/tools/server/other/make-
|
|||||||
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
||||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||||
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
||||||
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
|
||||||
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
||||||
|
import { workflowChangeServerTool } from '@/lib/copilot/tools/server/workflow/workflow-change'
|
||||||
|
import {
|
||||||
|
workflowContextExpandServerTool,
|
||||||
|
workflowContextGetServerTool,
|
||||||
|
} from '@/lib/copilot/tools/server/workflow/workflow-context'
|
||||||
|
import { workflowVerifyServerTool } from '@/lib/copilot/tools/server/workflow/workflow-verify'
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||||
|
|
||||||
export { ExecuteResponseSuccessSchema }
|
export { ExecuteResponseSuccessSchema }
|
||||||
@@ -27,7 +32,6 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
|
|||||||
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
||||||
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
||||||
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
||||||
[editWorkflowServerTool.name]: editWorkflowServerTool,
|
|
||||||
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
||||||
[searchDocumentationServerTool.name]: searchDocumentationServerTool,
|
[searchDocumentationServerTool.name]: searchDocumentationServerTool,
|
||||||
[searchOnlineServerTool.name]: searchOnlineServerTool,
|
[searchOnlineServerTool.name]: searchOnlineServerTool,
|
||||||
@@ -35,6 +39,10 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
|
|||||||
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
||||||
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
||||||
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
||||||
|
[workflowContextGetServerTool.name]: workflowContextGetServerTool,
|
||||||
|
[workflowContextExpandServerTool.name]: workflowContextExpandServerTool,
|
||||||
|
[workflowChangeServerTool.name]: workflowChangeServerTool,
|
||||||
|
[workflowVerifyServerTool.name]: workflowVerifyServerTool,
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
225
apps/sim/lib/copilot/tools/server/workflow/change-store.ts
Normal file
225
apps/sim/lib/copilot/tools/server/workflow/change-store.ts
Normal file
@@ -0,0 +1,225 @@
|
|||||||
|
import crypto from 'crypto'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
|
||||||
|
type StoreEntry<T> = {
|
||||||
|
value: T
|
||||||
|
expiresAt: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_TTL_MS = 30 * 60 * 1000
|
||||||
|
const MAX_ENTRIES = 500
|
||||||
|
const DEFAULT_TTL_SECONDS = Math.floor(DEFAULT_TTL_MS / 1000)
|
||||||
|
const CONTEXT_PREFIX = 'copilot:workflow_change:context'
|
||||||
|
const PROPOSAL_PREFIX = 'copilot:workflow_change:proposal'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkflowChangeStore')
|
||||||
|
|
||||||
|
class TTLStore<T> {
|
||||||
|
private readonly data = new Map<string, StoreEntry<T>>()
|
||||||
|
|
||||||
|
constructor(private readonly ttlMs = DEFAULT_TTL_MS) {}
|
||||||
|
|
||||||
|
set(value: T): string {
|
||||||
|
this.gc()
|
||||||
|
if (this.data.size >= MAX_ENTRIES) {
|
||||||
|
const firstKey = this.data.keys().next().value as string | undefined
|
||||||
|
if (firstKey) {
|
||||||
|
this.data.delete(firstKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const id = crypto.randomUUID()
|
||||||
|
this.data.set(id, {
|
||||||
|
value,
|
||||||
|
expiresAt: Date.now() + this.ttlMs,
|
||||||
|
})
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
get(id: string): T | null {
|
||||||
|
const entry = this.data.get(id)
|
||||||
|
if (!entry) return null
|
||||||
|
if (entry.expiresAt <= Date.now()) {
|
||||||
|
this.data.delete(id)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return entry.value
|
||||||
|
}
|
||||||
|
|
||||||
|
upsert(id: string, value: T): void {
|
||||||
|
this.gc()
|
||||||
|
this.data.set(id, {
|
||||||
|
value,
|
||||||
|
expiresAt: Date.now() + this.ttlMs,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private gc(): void {
|
||||||
|
const now = Date.now()
|
||||||
|
for (const [key, entry] of this.data.entries()) {
|
||||||
|
if (entry.expiresAt <= now) {
|
||||||
|
this.data.delete(key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type WorkflowContextPack = {
|
||||||
|
workflowId: string
|
||||||
|
snapshotHash: string
|
||||||
|
workflowState: {
|
||||||
|
blocks: Record<string, any>
|
||||||
|
edges: Array<Record<string, any>>
|
||||||
|
loops: Record<string, any>
|
||||||
|
parallels: Record<string, any>
|
||||||
|
}
|
||||||
|
schemasByType: Record<string, any>
|
||||||
|
schemaRefsByType: Record<string, string>
|
||||||
|
summary: Record<string, any>
|
||||||
|
}
|
||||||
|
|
||||||
|
export type WorkflowChangeProposal = {
|
||||||
|
workflowId: string
|
||||||
|
baseSnapshotHash: string
|
||||||
|
compiledOperations: Array<Record<string, any>>
|
||||||
|
diffSummary: Record<string, any>
|
||||||
|
warnings: string[]
|
||||||
|
diagnostics: string[]
|
||||||
|
touchedBlocks: string[]
|
||||||
|
resolvedIds?: Record<string, string>
|
||||||
|
acceptanceAssertions: string[]
|
||||||
|
postApply?: {
|
||||||
|
verify?: boolean
|
||||||
|
run?: Record<string, any>
|
||||||
|
evaluator?: Record<string, any>
|
||||||
|
}
|
||||||
|
handoff?: {
|
||||||
|
objective?: string
|
||||||
|
constraints?: string[]
|
||||||
|
resolvedIds?: Record<string, string>
|
||||||
|
assumptions?: string[]
|
||||||
|
unresolvedRisks?: string[]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const contextPackStore = new TTLStore<WorkflowContextPack>()
|
||||||
|
const proposalStore = new TTLStore<WorkflowChangeProposal>()
|
||||||
|
|
||||||
|
function getContextRedisKey(id: string): string {
|
||||||
|
return `${CONTEXT_PREFIX}:${id}`
|
||||||
|
}
|
||||||
|
|
||||||
|
function getProposalRedisKey(id: string): string {
|
||||||
|
return `${PROPOSAL_PREFIX}:${id}`
|
||||||
|
}
|
||||||
|
|
||||||
|
async function writeRedisJson(key: string, value: unknown): Promise<void> {
|
||||||
|
const redis = getRedisClient()!
|
||||||
|
await redis.set(key, JSON.stringify(value), 'EX', DEFAULT_TTL_SECONDS)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function readRedisJson<T>(key: string): Promise<T | null> {
|
||||||
|
const redis = getRedisClient()!
|
||||||
|
|
||||||
|
const raw = await redis.get(key)
|
||||||
|
if (!raw) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return JSON.parse(raw) as T
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed parsing workflow change store JSON payload', { key, error })
|
||||||
|
await redis.del(key).catch(() => {})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveContextPack(pack: WorkflowContextPack): Promise<string> {
|
||||||
|
if (!getRedisClient()) {
|
||||||
|
return contextPackStore.set(pack)
|
||||||
|
}
|
||||||
|
const id = crypto.randomUUID()
|
||||||
|
try {
|
||||||
|
await writeRedisJson(getContextRedisKey(id), pack)
|
||||||
|
return id
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Redis write failed for workflow context pack, using memory fallback', { error })
|
||||||
|
return contextPackStore.set(pack)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getContextPack(id: string): Promise<WorkflowContextPack | null> {
|
||||||
|
if (!getRedisClient()) {
|
||||||
|
return contextPackStore.get(id)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const redisPayload = await readRedisJson<WorkflowContextPack>(getContextRedisKey(id))
|
||||||
|
if (redisPayload) {
|
||||||
|
return redisPayload
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Redis read failed for workflow context pack, using memory fallback', { error })
|
||||||
|
}
|
||||||
|
return contextPackStore.get(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateContextPack(
|
||||||
|
id: string,
|
||||||
|
patch: Partial<WorkflowContextPack>
|
||||||
|
): Promise<WorkflowContextPack | null> {
|
||||||
|
const existing = await getContextPack(id)
|
||||||
|
if (!existing) return null
|
||||||
|
const merged: WorkflowContextPack = {
|
||||||
|
...existing,
|
||||||
|
...patch,
|
||||||
|
workflowState: patch.workflowState || existing.workflowState,
|
||||||
|
schemasByType: patch.schemasByType || existing.schemasByType,
|
||||||
|
schemaRefsByType: patch.schemaRefsByType || existing.schemaRefsByType,
|
||||||
|
summary: patch.summary || existing.summary,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!getRedisClient()) {
|
||||||
|
contextPackStore.upsert(id, merged)
|
||||||
|
return merged
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await writeRedisJson(getContextRedisKey(id), merged)
|
||||||
|
contextPackStore.upsert(id, merged)
|
||||||
|
return merged
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Redis update failed for workflow context pack, using memory fallback', { error })
|
||||||
|
contextPackStore.upsert(id, merged)
|
||||||
|
return merged
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveProposal(proposal: WorkflowChangeProposal): Promise<string> {
|
||||||
|
if (!getRedisClient()) {
|
||||||
|
return proposalStore.set(proposal)
|
||||||
|
}
|
||||||
|
const id = crypto.randomUUID()
|
||||||
|
try {
|
||||||
|
await writeRedisJson(getProposalRedisKey(id), proposal)
|
||||||
|
return id
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Redis write failed for workflow proposal, using memory fallback', { error })
|
||||||
|
return proposalStore.set(proposal)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getProposal(id: string): Promise<WorkflowChangeProposal | null> {
|
||||||
|
if (!getRedisClient()) {
|
||||||
|
return proposalStore.get(id)
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const redisPayload = await readRedisJson<WorkflowChangeProposal>(getProposalRedisKey(id))
|
||||||
|
if (redisPayload) {
|
||||||
|
return redisPayload
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Redis read failed for workflow proposal, using memory fallback', { error })
|
||||||
|
}
|
||||||
|
return proposalStore.get(id)
|
||||||
|
}
|
||||||
@@ -1,298 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { workflow as workflowTable } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
|
||||||
import { applyAutoLayout } from '@/lib/workflows/autolayout'
|
|
||||||
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
|
|
||||||
import {
|
|
||||||
loadWorkflowFromNormalizedTables,
|
|
||||||
saveWorkflowToNormalizedTables,
|
|
||||||
} from '@/lib/workflows/persistence/utils'
|
|
||||||
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
|
||||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
|
||||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
|
||||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
|
||||||
import { applyOperationsToWorkflowState } from './engine'
|
|
||||||
import type { EditWorkflowParams, ValidationError } from './types'
|
|
||||||
import { preValidateCredentialInputs, validateWorkflowSelectorIds } from './validation'
|
|
||||||
|
|
||||||
async function getCurrentWorkflowStateFromDb(
|
|
||||||
workflowId: string
|
|
||||||
): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> {
|
|
||||||
const logger = createLogger('EditWorkflowServerTool')
|
|
||||||
const [workflowRecord] = await db
|
|
||||||
.select()
|
|
||||||
.from(workflowTable)
|
|
||||||
.where(eq(workflowTable.id, workflowId))
|
|
||||||
.limit(1)
|
|
||||||
if (!workflowRecord) throw new Error(`Workflow ${workflowId} not found in database`)
|
|
||||||
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
|
||||||
if (!normalized) throw new Error('Workflow has no normalized data')
|
|
||||||
|
|
||||||
// Validate and fix blocks without types
|
|
||||||
const blocks = { ...normalized.blocks }
|
|
||||||
const invalidBlocks: string[] = []
|
|
||||||
|
|
||||||
Object.entries(blocks).forEach(([id, block]: [string, any]) => {
|
|
||||||
if (!block.type) {
|
|
||||||
logger.warn(`Block ${id} loaded without type from database`, {
|
|
||||||
blockKeys: Object.keys(block),
|
|
||||||
blockName: block.name,
|
|
||||||
})
|
|
||||||
invalidBlocks.push(id)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
// Remove invalid blocks
|
|
||||||
invalidBlocks.forEach((id) => delete blocks[id])
|
|
||||||
|
|
||||||
// Remove edges connected to invalid blocks
|
|
||||||
const edges = normalized.edges.filter(
|
|
||||||
(edge: any) => !invalidBlocks.includes(edge.source) && !invalidBlocks.includes(edge.target)
|
|
||||||
)
|
|
||||||
|
|
||||||
const workflowState: any = {
|
|
||||||
blocks,
|
|
||||||
edges,
|
|
||||||
loops: normalized.loops || {},
|
|
||||||
parallels: normalized.parallels || {},
|
|
||||||
}
|
|
||||||
const subBlockValues: Record<string, Record<string, any>> = {}
|
|
||||||
Object.entries(normalized.blocks).forEach(([blockId, block]) => {
|
|
||||||
subBlockValues[blockId] = {}
|
|
||||||
Object.entries((block as any).subBlocks || {}).forEach(([subId, sub]) => {
|
|
||||||
if ((sub as any).value !== undefined) subBlockValues[blockId][subId] = (sub as any).value
|
|
||||||
})
|
|
||||||
})
|
|
||||||
return { workflowState, subBlockValues }
|
|
||||||
}
|
|
||||||
|
|
||||||
export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown> = {
|
|
||||||
name: 'edit_workflow',
|
|
||||||
async execute(params: EditWorkflowParams, context?: { userId: string }): Promise<unknown> {
|
|
||||||
const logger = createLogger('EditWorkflowServerTool')
|
|
||||||
const { operations, workflowId, currentUserWorkflow } = params
|
|
||||||
if (!Array.isArray(operations) || operations.length === 0) {
|
|
||||||
throw new Error('operations are required and must be an array')
|
|
||||||
}
|
|
||||||
if (!workflowId) throw new Error('workflowId is required')
|
|
||||||
if (!context?.userId) {
|
|
||||||
throw new Error('Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
|
||||||
workflowId,
|
|
||||||
userId: context.userId,
|
|
||||||
action: 'write',
|
|
||||||
})
|
|
||||||
if (!authorization.allowed) {
|
|
||||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('Executing edit_workflow', {
|
|
||||||
operationCount: operations.length,
|
|
||||||
workflowId,
|
|
||||||
hasCurrentUserWorkflow: !!currentUserWorkflow,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Get current workflow state
|
|
||||||
let workflowState: any
|
|
||||||
if (currentUserWorkflow) {
|
|
||||||
try {
|
|
||||||
workflowState = JSON.parse(currentUserWorkflow)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to parse currentUserWorkflow', error)
|
|
||||||
throw new Error('Invalid currentUserWorkflow format')
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const fromDb = await getCurrentWorkflowStateFromDb(workflowId)
|
|
||||||
workflowState = fromDb.workflowState
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get permission config for the user
|
|
||||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
|
||||||
|
|
||||||
// Pre-validate credential and apiKey inputs before applying operations
|
|
||||||
// This filters out invalid credentials and apiKeys for hosted models
|
|
||||||
let operationsToApply = operations
|
|
||||||
const credentialErrors: ValidationError[] = []
|
|
||||||
if (context?.userId) {
|
|
||||||
const { filteredOperations, errors: credErrors } = await preValidateCredentialInputs(
|
|
||||||
operations,
|
|
||||||
{ userId: context.userId },
|
|
||||||
workflowState
|
|
||||||
)
|
|
||||||
operationsToApply = filteredOperations
|
|
||||||
credentialErrors.push(...credErrors)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply operations directly to the workflow state
|
|
||||||
const {
|
|
||||||
state: modifiedWorkflowState,
|
|
||||||
validationErrors,
|
|
||||||
skippedItems,
|
|
||||||
} = applyOperationsToWorkflowState(workflowState, operationsToApply, permissionConfig)
|
|
||||||
|
|
||||||
// Add credential validation errors
|
|
||||||
validationErrors.push(...credentialErrors)
|
|
||||||
|
|
||||||
// Get workspaceId for selector validation
|
|
||||||
let workspaceId: string | undefined
|
|
||||||
try {
|
|
||||||
const [workflowRecord] = await db
|
|
||||||
.select({ workspaceId: workflowTable.workspaceId })
|
|
||||||
.from(workflowTable)
|
|
||||||
.where(eq(workflowTable.id, workflowId))
|
|
||||||
.limit(1)
|
|
||||||
workspaceId = workflowRecord?.workspaceId ?? undefined
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed to get workspaceId for selector validation', { error, workflowId })
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate selector IDs exist in the database
|
|
||||||
if (context?.userId) {
|
|
||||||
try {
|
|
||||||
const selectorErrors = await validateWorkflowSelectorIds(modifiedWorkflowState, {
|
|
||||||
userId: context.userId,
|
|
||||||
workspaceId,
|
|
||||||
})
|
|
||||||
validationErrors.push(...selectorErrors)
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Selector ID validation failed', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate the workflow state
|
|
||||||
const validation = validateWorkflowState(modifiedWorkflowState, { sanitize: true })
|
|
||||||
|
|
||||||
if (!validation.valid) {
|
|
||||||
logger.error('Edited workflow state is invalid', {
|
|
||||||
errors: validation.errors,
|
|
||||||
warnings: validation.warnings,
|
|
||||||
})
|
|
||||||
throw new Error(`Invalid edited workflow: ${validation.errors.join('; ')}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (validation.warnings.length > 0) {
|
|
||||||
logger.warn('Edited workflow validation warnings', {
|
|
||||||
warnings: validation.warnings,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract and persist custom tools to database (reuse workspaceId from selector validation)
|
|
||||||
if (context?.userId && workspaceId) {
|
|
||||||
try {
|
|
||||||
const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState
|
|
||||||
const { saved, errors } = await extractAndPersistCustomTools(
|
|
||||||
finalWorkflowState,
|
|
||||||
workspaceId,
|
|
||||||
context.userId
|
|
||||||
)
|
|
||||||
|
|
||||||
if (saved > 0) {
|
|
||||||
logger.info(`Persisted ${saved} custom tool(s) to database`, { workflowId })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (errors.length > 0) {
|
|
||||||
logger.warn('Some custom tools failed to persist', { errors, workflowId })
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to persist custom tools', { error, workflowId })
|
|
||||||
}
|
|
||||||
} else if (context?.userId && !workspaceId) {
|
|
||||||
logger.warn('Workflow has no workspaceId, skipping custom tools persistence', {
|
|
||||||
workflowId,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
logger.warn('No userId in context - skipping custom tools persistence', { workflowId })
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('edit_workflow successfully applied operations', {
|
|
||||||
operationCount: operations.length,
|
|
||||||
blocksCount: Object.keys(modifiedWorkflowState.blocks).length,
|
|
||||||
edgesCount: modifiedWorkflowState.edges.length,
|
|
||||||
inputValidationErrors: validationErrors.length,
|
|
||||||
skippedItemsCount: skippedItems.length,
|
|
||||||
schemaValidationErrors: validation.errors.length,
|
|
||||||
validationWarnings: validation.warnings.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Format validation errors for LLM feedback
|
|
||||||
const inputErrors =
|
|
||||||
validationErrors.length > 0
|
|
||||||
? validationErrors.map((e) => `Block "${e.blockId}" (${e.blockType}): ${e.error}`)
|
|
||||||
: undefined
|
|
||||||
|
|
||||||
// Format skipped items for LLM feedback
|
|
||||||
const skippedMessages =
|
|
||||||
skippedItems.length > 0 ? skippedItems.map((item) => item.reason) : undefined
|
|
||||||
|
|
||||||
// Persist the workflow state to the database
|
|
||||||
const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState
|
|
||||||
|
|
||||||
// Apply autolayout to position blocks properly
|
|
||||||
const layoutResult = applyAutoLayout(finalWorkflowState.blocks, finalWorkflowState.edges, {
|
|
||||||
horizontalSpacing: 250,
|
|
||||||
verticalSpacing: 100,
|
|
||||||
padding: { x: 100, y: 100 },
|
|
||||||
})
|
|
||||||
|
|
||||||
const layoutedBlocks =
|
|
||||||
layoutResult.success && layoutResult.blocks ? layoutResult.blocks : finalWorkflowState.blocks
|
|
||||||
|
|
||||||
if (!layoutResult.success) {
|
|
||||||
logger.warn('Autolayout failed, using default positions', {
|
|
||||||
workflowId,
|
|
||||||
error: layoutResult.error,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowStateForDb = {
|
|
||||||
blocks: layoutedBlocks,
|
|
||||||
edges: finalWorkflowState.edges,
|
|
||||||
loops: generateLoopBlocks(layoutedBlocks as any),
|
|
||||||
parallels: generateParallelBlocks(layoutedBlocks as any),
|
|
||||||
lastSaved: Date.now(),
|
|
||||||
isDeployed: false,
|
|
||||||
}
|
|
||||||
|
|
||||||
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowStateForDb as any)
|
|
||||||
if (!saveResult.success) {
|
|
||||||
logger.error('Failed to persist workflow state to database', {
|
|
||||||
workflowId,
|
|
||||||
error: saveResult.error,
|
|
||||||
})
|
|
||||||
throw new Error(`Failed to save workflow: ${saveResult.error}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update workflow's lastSynced timestamp
|
|
||||||
await db
|
|
||||||
.update(workflowTable)
|
|
||||||
.set({
|
|
||||||
lastSynced: new Date(),
|
|
||||||
updatedAt: new Date(),
|
|
||||||
})
|
|
||||||
.where(eq(workflowTable.id, workflowId))
|
|
||||||
|
|
||||||
logger.info('Workflow state persisted to database', { workflowId })
|
|
||||||
|
|
||||||
// Return the modified workflow state with autolayout applied
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
workflowState: { ...finalWorkflowState, blocks: layoutedBlocks },
|
|
||||||
// Include input validation errors so the LLM can see what was rejected
|
|
||||||
...(inputErrors && {
|
|
||||||
inputValidationErrors: inputErrors,
|
|
||||||
inputValidationMessage: `${inputErrors.length} input(s) were rejected due to validation errors. The workflow was still updated with valid inputs only. Errors: ${inputErrors.join('; ')}`,
|
|
||||||
}),
|
|
||||||
// Include skipped items so the LLM can see what operations were skipped
|
|
||||||
...(skippedMessages && {
|
|
||||||
skippedItems: skippedMessages,
|
|
||||||
skippedItemsMessage: `${skippedItems.length} operation(s) were skipped due to invalid references. Details: ${skippedMessages.join('; ')}`,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
2456
apps/sim/lib/copilot/tools/server/workflow/workflow-change.ts
Normal file
2456
apps/sim/lib/copilot/tools/server/workflow/workflow-change.ts
Normal file
File diff suppressed because it is too large
Load Diff
272
apps/sim/lib/copilot/tools/server/workflow/workflow-context.ts
Normal file
272
apps/sim/lib/copilot/tools/server/workflow/workflow-context.ts
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
import { getBlock } from '@/blocks/registry'
|
||||||
|
import { getContextPack, saveContextPack, updateContextPack } from './change-store'
|
||||||
|
import {
|
||||||
|
buildSchemasByType,
|
||||||
|
getAllKnownBlockTypes,
|
||||||
|
hashWorkflowState,
|
||||||
|
loadWorkflowStateFromDb,
|
||||||
|
summarizeWorkflowState,
|
||||||
|
} from './workflow-state'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkflowContextServerTool')
|
||||||
|
|
||||||
|
const WorkflowContextGetInputSchema = z.object({
|
||||||
|
workflowId: z.string(),
|
||||||
|
objective: z.string().optional(),
|
||||||
|
includeBlockTypes: z.array(z.string()).optional(),
|
||||||
|
includeAllSchemas: z.boolean().optional(),
|
||||||
|
schemaMode: z.enum(['minimal', 'workflow', 'all']).optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
type WorkflowContextGetParams = z.infer<typeof WorkflowContextGetInputSchema>
|
||||||
|
|
||||||
|
const WorkflowContextExpandInputSchema = z.object({
|
||||||
|
contextPackId: z.string(),
|
||||||
|
blockTypes: z.array(z.string()).optional(),
|
||||||
|
schemaRefs: z.array(z.string()).optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
type WorkflowContextExpandParams = z.infer<typeof WorkflowContextExpandInputSchema>
|
||||||
|
|
||||||
|
const BLOCK_TYPE_ALIAS_MAP: Record<string, string> = {
|
||||||
|
start: 'start_trigger',
|
||||||
|
starttrigger: 'start_trigger',
|
||||||
|
starter: 'start_trigger',
|
||||||
|
trigger: 'start_trigger',
|
||||||
|
loop: 'loop',
|
||||||
|
parallel: 'parallel',
|
||||||
|
parallelai: 'parallel',
|
||||||
|
hitl: 'human_in_the_loop',
|
||||||
|
humanintheloop: 'human_in_the_loop',
|
||||||
|
routerv2: 'router_v2',
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeToken(value: string): string {
|
||||||
|
return value
|
||||||
|
.trim()
|
||||||
|
.toLowerCase()
|
||||||
|
.replace(/[^a-z0-9]+/g, '')
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildBlockTypeIndex(knownTypes: string[]): Map<string, string> {
|
||||||
|
const index = new Map<string, string>()
|
||||||
|
for (const blockType of knownTypes) {
|
||||||
|
const canonicalType = String(blockType || '').trim()
|
||||||
|
if (!canonicalType) continue
|
||||||
|
|
||||||
|
const normalizedType = normalizeToken(canonicalType)
|
||||||
|
if (normalizedType && !index.has(normalizedType)) {
|
||||||
|
index.set(normalizedType, canonicalType)
|
||||||
|
}
|
||||||
|
|
||||||
|
const blockConfig = getBlock(canonicalType)
|
||||||
|
const displayName = String(blockConfig?.name || '').trim()
|
||||||
|
const normalizedDisplayName = normalizeToken(displayName)
|
||||||
|
if (normalizedDisplayName && !index.has(normalizedDisplayName)) {
|
||||||
|
index.set(normalizedDisplayName, canonicalType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return index
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveBlockTypes(
|
||||||
|
requestedBlockTypes: string[],
|
||||||
|
knownTypes: string[]
|
||||||
|
): { resolved: string[]; unresolved: string[] } {
|
||||||
|
const index = buildBlockTypeIndex(knownTypes)
|
||||||
|
const resolved = new Set<string>()
|
||||||
|
const unresolved = new Set<string>()
|
||||||
|
|
||||||
|
for (const rawType of requestedBlockTypes) {
|
||||||
|
const normalized = normalizeToken(String(rawType || ''))
|
||||||
|
if (!normalized) continue
|
||||||
|
|
||||||
|
const aliasResolved = BLOCK_TYPE_ALIAS_MAP[normalized]
|
||||||
|
if (aliasResolved) {
|
||||||
|
resolved.add(aliasResolved)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const direct = index.get(normalized)
|
||||||
|
if (direct) {
|
||||||
|
resolved.add(direct)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
unresolved.add(String(rawType))
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
resolved: [...resolved],
|
||||||
|
unresolved: [...unresolved],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseSchemaRefToBlockType(schemaRef: string): string | null {
|
||||||
|
if (!schemaRef) return null
|
||||||
|
const [blockType] = schemaRef.split('@')
|
||||||
|
return blockType || null
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildAvailableBlockCatalog(
|
||||||
|
schemaRefsByType: Record<string, string>
|
||||||
|
): Array<Record<string, any>> {
|
||||||
|
return Object.entries(schemaRefsByType)
|
||||||
|
.sort((a, b) => a[0].localeCompare(b[0]))
|
||||||
|
.map(([blockType, schemaRef]) => ({
|
||||||
|
blockType,
|
||||||
|
schemaRef,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
export const workflowContextGetServerTool: BaseServerTool<WorkflowContextGetParams, any> = {
|
||||||
|
name: 'workflow_context_get',
|
||||||
|
inputSchema: WorkflowContextGetInputSchema,
|
||||||
|
async execute(params: WorkflowContextGetParams, context?: { userId: string }): Promise<any> {
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
action: 'read',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflowState } = await loadWorkflowStateFromDb(params.workflowId)
|
||||||
|
const snapshotHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
||||||
|
|
||||||
|
const knownTypes = getAllKnownBlockTypes()
|
||||||
|
const blockTypesInWorkflowRaw = Object.values(workflowState.blocks || {}).map((block: any) =>
|
||||||
|
String(block?.type || '')
|
||||||
|
)
|
||||||
|
const requestedTypesRaw = params.includeBlockTypes || []
|
||||||
|
const resolvedWorkflowTypes = resolveBlockTypes(blockTypesInWorkflowRaw, knownTypes).resolved
|
||||||
|
const resolvedRequestedTypes = resolveBlockTypes(requestedTypesRaw, knownTypes)
|
||||||
|
const schemaMode =
|
||||||
|
params.includeAllSchemas === true ? 'all' : (params.schemaMode || 'minimal')
|
||||||
|
const candidateTypes =
|
||||||
|
schemaMode === 'all'
|
||||||
|
? knownTypes
|
||||||
|
: schemaMode === 'workflow'
|
||||||
|
? [...resolvedWorkflowTypes, ...resolvedRequestedTypes.resolved]
|
||||||
|
: [...resolvedRequestedTypes.resolved]
|
||||||
|
const { schemasByType, schemaRefsByType } = buildSchemasByType(candidateTypes)
|
||||||
|
const suggestedSchemaTypes = [...new Set(resolvedWorkflowTypes.filter(Boolean))]
|
||||||
|
|
||||||
|
const summary = summarizeWorkflowState(workflowState)
|
||||||
|
const packId = await saveContextPack({
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
snapshotHash,
|
||||||
|
workflowState,
|
||||||
|
schemasByType,
|
||||||
|
schemaRefsByType,
|
||||||
|
summary: {
|
||||||
|
...summary,
|
||||||
|
objective: params.objective || null,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Generated workflow context pack', {
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
contextPackId: packId,
|
||||||
|
schemaCount: Object.keys(schemaRefsByType).length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
contextPackId: packId,
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
snapshotHash,
|
||||||
|
schemaMode,
|
||||||
|
summary: {
|
||||||
|
...summary,
|
||||||
|
objective: params.objective || null,
|
||||||
|
},
|
||||||
|
schemaRefsByType,
|
||||||
|
availableBlockCatalog: buildAvailableBlockCatalog(schemaRefsByType),
|
||||||
|
suggestedSchemaTypes,
|
||||||
|
unresolvedRequestedBlockTypes: resolvedRequestedTypes.unresolved,
|
||||||
|
knownBlockTypes: knownTypes,
|
||||||
|
inScopeSchemas: schemasByType,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export const workflowContextExpandServerTool: BaseServerTool<WorkflowContextExpandParams, any> = {
|
||||||
|
name: 'workflow_context_expand',
|
||||||
|
inputSchema: WorkflowContextExpandInputSchema,
|
||||||
|
async execute(params: WorkflowContextExpandParams, context?: { userId: string }): Promise<any> {
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const contextPack = await getContextPack(params.contextPackId)
|
||||||
|
if (!contextPack) {
|
||||||
|
throw new Error(`Context pack not found or expired: ${params.contextPackId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId: contextPack.workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
action: 'read',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const knownTypes = getAllKnownBlockTypes()
|
||||||
|
const requestedBlockTypesRaw = new Set<string>()
|
||||||
|
for (const blockType of params.blockTypes || []) {
|
||||||
|
if (blockType) requestedBlockTypesRaw.add(String(blockType))
|
||||||
|
}
|
||||||
|
for (const schemaRef of params.schemaRefs || []) {
|
||||||
|
const blockType = parseSchemaRefToBlockType(schemaRef)
|
||||||
|
if (blockType) requestedBlockTypesRaw.add(blockType)
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedTypes = resolveBlockTypes([...requestedBlockTypesRaw], knownTypes)
|
||||||
|
const typesToExpand = resolvedTypes.resolved
|
||||||
|
const { schemasByType, schemaRefsByType } = buildSchemasByType(typesToExpand)
|
||||||
|
const mergedSchemasByType = {
|
||||||
|
...(contextPack.schemasByType || {}),
|
||||||
|
...schemasByType,
|
||||||
|
}
|
||||||
|
const mergedSchemaRefsByType = {
|
||||||
|
...(contextPack.schemaRefsByType || {}),
|
||||||
|
...schemaRefsByType,
|
||||||
|
}
|
||||||
|
const updatedContextPack = await updateContextPack(params.contextPackId, {
|
||||||
|
schemasByType: mergedSchemasByType,
|
||||||
|
schemaRefsByType: mergedSchemaRefsByType,
|
||||||
|
})
|
||||||
|
const warnings =
|
||||||
|
resolvedTypes.unresolved.length > 0
|
||||||
|
? [
|
||||||
|
`Unknown block type(s): ${resolvedTypes.unresolved.join(', ')}. ` +
|
||||||
|
'Use known block type IDs from knownBlockTypes.',
|
||||||
|
]
|
||||||
|
: []
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
contextPackId: params.contextPackId,
|
||||||
|
workflowId: contextPack.workflowId,
|
||||||
|
snapshotHash: contextPack.snapshotHash,
|
||||||
|
schemasByType,
|
||||||
|
schemaRefsByType,
|
||||||
|
loadedSchemaTypes: Object.keys(updatedContextPack?.schemasByType || mergedSchemasByType).sort(),
|
||||||
|
resolvedBlockTypes: resolvedTypes.resolved,
|
||||||
|
unresolvedBlockTypes: resolvedTypes.unresolved,
|
||||||
|
knownBlockTypes: knownTypes,
|
||||||
|
warnings,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -0,0 +1,286 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow as workflowTable } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { applyAutoLayout } from '@/lib/workflows/autolayout'
|
||||||
|
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
|
||||||
|
import {
|
||||||
|
loadWorkflowFromNormalizedTables,
|
||||||
|
saveWorkflowToNormalizedTables,
|
||||||
|
} from '@/lib/workflows/persistence/utils'
|
||||||
|
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||||
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||||
|
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||||
|
import { applyOperationsToWorkflowState } from './engine'
|
||||||
|
import type { EditWorkflowOperation, ValidationError } from './types'
|
||||||
|
import { preValidateCredentialInputs, validateWorkflowSelectorIds } from './validation'
|
||||||
|
|
||||||
|
type ApplyWorkflowOperationsParams = {
|
||||||
|
operations: EditWorkflowOperation[]
|
||||||
|
workflowId: string
|
||||||
|
userId: string
|
||||||
|
currentUserWorkflow?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getCurrentWorkflowStateFromDb(
|
||||||
|
workflowId: string
|
||||||
|
): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> {
|
||||||
|
const logger = createLogger('WorkflowOperationApply')
|
||||||
|
const [workflowRecord] = await db
|
||||||
|
.select()
|
||||||
|
.from(workflowTable)
|
||||||
|
.where(eq(workflowTable.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
if (!workflowRecord) throw new Error(`Workflow ${workflowId} not found in database`)
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
if (!normalized) throw new Error('Workflow has no normalized data')
|
||||||
|
|
||||||
|
// Validate and fix blocks without types
|
||||||
|
const blocks = { ...normalized.blocks }
|
||||||
|
const invalidBlocks: string[] = []
|
||||||
|
|
||||||
|
Object.entries(blocks).forEach(([id, block]: [string, any]) => {
|
||||||
|
if (!block.type) {
|
||||||
|
logger.warn(`Block ${id} loaded without type from database`, {
|
||||||
|
blockKeys: Object.keys(block),
|
||||||
|
blockName: block.name,
|
||||||
|
})
|
||||||
|
invalidBlocks.push(id)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Remove invalid blocks
|
||||||
|
invalidBlocks.forEach((id) => delete blocks[id])
|
||||||
|
|
||||||
|
// Remove edges connected to invalid blocks
|
||||||
|
const edges = normalized.edges.filter(
|
||||||
|
(edge: any) => !invalidBlocks.includes(edge.source) && !invalidBlocks.includes(edge.target)
|
||||||
|
)
|
||||||
|
|
||||||
|
const workflowState: any = {
|
||||||
|
blocks,
|
||||||
|
edges,
|
||||||
|
loops: normalized.loops || {},
|
||||||
|
parallels: normalized.parallels || {},
|
||||||
|
}
|
||||||
|
const subBlockValues: Record<string, Record<string, any>> = {}
|
||||||
|
Object.entries(normalized.blocks).forEach(([blockId, block]) => {
|
||||||
|
subBlockValues[blockId] = {}
|
||||||
|
Object.entries((block as any).subBlocks || {}).forEach(([subId, sub]) => {
|
||||||
|
if ((sub as any).value !== undefined) subBlockValues[blockId][subId] = (sub as any).value
|
||||||
|
})
|
||||||
|
})
|
||||||
|
return { workflowState, subBlockValues }
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function applyWorkflowOperations(params: ApplyWorkflowOperationsParams): Promise<any> {
|
||||||
|
const logger = createLogger('WorkflowOperationApply')
|
||||||
|
const { operations, workflowId, currentUserWorkflow, userId } = params
|
||||||
|
if (!Array.isArray(operations) || operations.length === 0) {
|
||||||
|
throw new Error('operations are required and must be an array')
|
||||||
|
}
|
||||||
|
if (!workflowId) throw new Error('workflowId is required')
|
||||||
|
if (!userId) throw new Error('Unauthorized workflow access')
|
||||||
|
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId,
|
||||||
|
userId,
|
||||||
|
action: 'write',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Executing workflow operation apply', {
|
||||||
|
operationCount: operations.length,
|
||||||
|
workflowId,
|
||||||
|
hasCurrentUserWorkflow: !!currentUserWorkflow,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Get current workflow state
|
||||||
|
let workflowState: any
|
||||||
|
if (currentUserWorkflow) {
|
||||||
|
try {
|
||||||
|
workflowState = JSON.parse(currentUserWorkflow)
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to parse currentUserWorkflow', error)
|
||||||
|
throw new Error('Invalid currentUserWorkflow format')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const fromDb = await getCurrentWorkflowStateFromDb(workflowId)
|
||||||
|
workflowState = fromDb.workflowState
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get permission config for the user
|
||||||
|
const permissionConfig = await getUserPermissionConfig(userId)
|
||||||
|
|
||||||
|
// Pre-validate credential and apiKey inputs before applying operations
|
||||||
|
// This filters out invalid credentials and apiKeys for hosted models
|
||||||
|
let operationsToApply = operations
|
||||||
|
const credentialErrors: ValidationError[] = []
|
||||||
|
const { filteredOperations, errors: credErrors } = await preValidateCredentialInputs(
|
||||||
|
operations,
|
||||||
|
{ userId },
|
||||||
|
workflowState
|
||||||
|
)
|
||||||
|
operationsToApply = filteredOperations
|
||||||
|
credentialErrors.push(...credErrors)
|
||||||
|
|
||||||
|
// Apply operations directly to the workflow state
|
||||||
|
const {
|
||||||
|
state: modifiedWorkflowState,
|
||||||
|
validationErrors,
|
||||||
|
skippedItems,
|
||||||
|
} = applyOperationsToWorkflowState(workflowState, operationsToApply, permissionConfig)
|
||||||
|
|
||||||
|
// Add credential validation errors
|
||||||
|
validationErrors.push(...credentialErrors)
|
||||||
|
|
||||||
|
// Get workspaceId for selector validation
|
||||||
|
let workspaceId: string | undefined
|
||||||
|
try {
|
||||||
|
const [workflowRecord] = await db
|
||||||
|
.select({ workspaceId: workflowTable.workspaceId })
|
||||||
|
.from(workflowTable)
|
||||||
|
.where(eq(workflowTable.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
workspaceId = workflowRecord?.workspaceId ?? undefined
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to get workspaceId for selector validation', { error, workflowId })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate selector IDs exist in the database
|
||||||
|
try {
|
||||||
|
const selectorErrors = await validateWorkflowSelectorIds(modifiedWorkflowState, {
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
})
|
||||||
|
validationErrors.push(...selectorErrors)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Selector ID validation failed', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate the workflow state
|
||||||
|
const validation = validateWorkflowState(modifiedWorkflowState, { sanitize: true })
|
||||||
|
|
||||||
|
if (!validation.valid) {
|
||||||
|
logger.error('Edited workflow state is invalid', {
|
||||||
|
errors: validation.errors,
|
||||||
|
warnings: validation.warnings,
|
||||||
|
})
|
||||||
|
throw new Error(`Invalid edited workflow: ${validation.errors.join('; ')}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validation.warnings.length > 0) {
|
||||||
|
logger.warn('Edited workflow validation warnings', {
|
||||||
|
warnings: validation.warnings,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract and persist custom tools to database (reuse workspaceId from selector validation)
|
||||||
|
if (workspaceId) {
|
||||||
|
try {
|
||||||
|
const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState
|
||||||
|
const { saved, errors } = await extractAndPersistCustomTools(finalWorkflowState, workspaceId, userId)
|
||||||
|
|
||||||
|
if (saved > 0) {
|
||||||
|
logger.info(`Persisted ${saved} custom tool(s) to database`, { workflowId })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errors.length > 0) {
|
||||||
|
logger.warn('Some custom tools failed to persist', { errors, workflowId })
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to persist custom tools', { error, workflowId })
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.warn('Workflow has no workspaceId, skipping custom tools persistence', {
|
||||||
|
workflowId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Workflow operation apply succeeded', {
|
||||||
|
operationCount: operations.length,
|
||||||
|
blocksCount: Object.keys(modifiedWorkflowState.blocks).length,
|
||||||
|
edgesCount: modifiedWorkflowState.edges.length,
|
||||||
|
inputValidationErrors: validationErrors.length,
|
||||||
|
skippedItemsCount: skippedItems.length,
|
||||||
|
schemaValidationErrors: validation.errors.length,
|
||||||
|
validationWarnings: validation.warnings.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Format validation errors for LLM feedback
|
||||||
|
const inputErrors =
|
||||||
|
validationErrors.length > 0
|
||||||
|
? validationErrors.map((e) => `Block "${e.blockId}" (${e.blockType}): ${e.error}`)
|
||||||
|
: undefined
|
||||||
|
|
||||||
|
// Format skipped items for LLM feedback
|
||||||
|
const skippedMessages =
|
||||||
|
skippedItems.length > 0 ? skippedItems.map((item) => item.reason) : undefined
|
||||||
|
|
||||||
|
// Persist the workflow state to the database
|
||||||
|
const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState
|
||||||
|
|
||||||
|
// Apply autolayout to position blocks properly
|
||||||
|
const layoutResult = applyAutoLayout(finalWorkflowState.blocks, finalWorkflowState.edges, {
|
||||||
|
horizontalSpacing: 250,
|
||||||
|
verticalSpacing: 100,
|
||||||
|
padding: { x: 100, y: 100 },
|
||||||
|
})
|
||||||
|
|
||||||
|
const layoutedBlocks =
|
||||||
|
layoutResult.success && layoutResult.blocks ? layoutResult.blocks : finalWorkflowState.blocks
|
||||||
|
|
||||||
|
if (!layoutResult.success) {
|
||||||
|
logger.warn('Autolayout failed, using default positions', {
|
||||||
|
workflowId,
|
||||||
|
error: layoutResult.error,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowStateForDb = {
|
||||||
|
blocks: layoutedBlocks,
|
||||||
|
edges: finalWorkflowState.edges,
|
||||||
|
loops: generateLoopBlocks(layoutedBlocks as any),
|
||||||
|
parallels: generateParallelBlocks(layoutedBlocks as any),
|
||||||
|
lastSaved: Date.now(),
|
||||||
|
isDeployed: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowStateForDb as any)
|
||||||
|
if (!saveResult.success) {
|
||||||
|
logger.error('Failed to persist workflow state to database', {
|
||||||
|
workflowId,
|
||||||
|
error: saveResult.error,
|
||||||
|
})
|
||||||
|
throw new Error(`Failed to save workflow: ${saveResult.error}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update workflow's lastSynced timestamp
|
||||||
|
await db
|
||||||
|
.update(workflowTable)
|
||||||
|
.set({
|
||||||
|
lastSynced: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(workflowTable.id, workflowId))
|
||||||
|
|
||||||
|
logger.info('Workflow state persisted to database', { workflowId })
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
workflowState: { ...finalWorkflowState, blocks: layoutedBlocks },
|
||||||
|
...(inputErrors && {
|
||||||
|
inputValidationErrors: inputErrors,
|
||||||
|
inputValidationMessage: `${inputErrors.length} input(s) were rejected due to validation errors. The workflow was still updated with valid inputs only. Errors: ${inputErrors.join('; ')}`,
|
||||||
|
}),
|
||||||
|
...(skippedMessages && {
|
||||||
|
skippedItems: skippedMessages,
|
||||||
|
skippedItemsMessage: `${skippedItems.length} operation(s) were skipped due to invalid references. Details: ${skippedMessages.join('; ')}`,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
497
apps/sim/lib/copilot/tools/server/workflow/workflow-state.ts
Normal file
497
apps/sim/lib/copilot/tools/server/workflow/workflow-state.ts
Normal file
@@ -0,0 +1,497 @@
|
|||||||
|
import crypto from 'crypto'
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow as workflowTable } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { getAllBlockTypes, getBlock } from '@/blocks/registry'
|
||||||
|
import type { SubBlockConfig } from '@/blocks/types'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkflowContextState')
|
||||||
|
const CONTAINER_BLOCK_TYPES = ['loop', 'parallel'] as const
|
||||||
|
|
||||||
|
function stableSortValue(value: any): any {
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
return value.map(stableSortValue)
|
||||||
|
}
|
||||||
|
if (value && typeof value === 'object') {
|
||||||
|
const sorted: Record<string, any> = {}
|
||||||
|
for (const key of Object.keys(value).sort()) {
|
||||||
|
sorted[key] = stableSortValue(value[key])
|
||||||
|
}
|
||||||
|
return sorted
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
export function hashWorkflowState(state: Record<string, unknown>): string {
|
||||||
|
const stable = stableSortValue(state)
|
||||||
|
const payload = JSON.stringify(stable)
|
||||||
|
return `sha256:${crypto.createHash('sha256').update(payload).digest('hex')}`
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeOptions(options: unknown): string[] | null {
|
||||||
|
if (!Array.isArray(options)) return null
|
||||||
|
const normalized = options
|
||||||
|
.map((option) => {
|
||||||
|
if (option == null) return null
|
||||||
|
if (typeof option === 'object') {
|
||||||
|
const optionRecord = option as Record<string, unknown>
|
||||||
|
const id = optionRecord.id
|
||||||
|
if (typeof id === 'string') return id
|
||||||
|
const label = optionRecord.label
|
||||||
|
if (typeof label === 'string') return label
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return String(option)
|
||||||
|
})
|
||||||
|
.filter((value): value is string => Boolean(value))
|
||||||
|
return normalized.length > 0 ? normalized : null
|
||||||
|
}
|
||||||
|
|
||||||
|
function serializeRequired(required: SubBlockConfig['required']): boolean | Record<string, any> {
|
||||||
|
if (typeof required === 'boolean') return required
|
||||||
|
if (!required) return false
|
||||||
|
if (typeof required === 'object') {
|
||||||
|
const out: Record<string, any> = {}
|
||||||
|
const record = required as Record<string, unknown>
|
||||||
|
for (const key of ['field', 'operator', 'value']) {
|
||||||
|
if (record[key] !== undefined) {
|
||||||
|
out[key] = record[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
function serializeSubBlock(subBlock: SubBlockConfig): Record<string, unknown> {
|
||||||
|
const staticOptions =
|
||||||
|
typeof subBlock.options === 'function' ? null : normalizeOptions(subBlock.options)
|
||||||
|
return {
|
||||||
|
id: subBlock.id,
|
||||||
|
type: subBlock.type,
|
||||||
|
title: subBlock.title,
|
||||||
|
description: subBlock.description || null,
|
||||||
|
mode: subBlock.mode || null,
|
||||||
|
placeholder: subBlock.placeholder || null,
|
||||||
|
hidden: Boolean(subBlock.hidden),
|
||||||
|
multiSelect: Boolean(subBlock.multiSelect),
|
||||||
|
required: serializeRequired(subBlock.required),
|
||||||
|
hasDynamicOptions: typeof subBlock.options === 'function',
|
||||||
|
options: staticOptions,
|
||||||
|
defaultValue: subBlock.defaultValue ?? null,
|
||||||
|
min: subBlock.min ?? null,
|
||||||
|
max: subBlock.max ?? null,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function serializeBlockSchema(blockType: string): Record<string, unknown> | null {
|
||||||
|
if (blockType === 'loop') {
|
||||||
|
return {
|
||||||
|
blockType: 'loop',
|
||||||
|
blockName: 'Loop',
|
||||||
|
category: 'blocks',
|
||||||
|
triggerAllowed: false,
|
||||||
|
hasTriggersConfig: false,
|
||||||
|
subBlocks: [
|
||||||
|
{
|
||||||
|
id: 'loopType',
|
||||||
|
type: 'dropdown',
|
||||||
|
title: 'Loop Type',
|
||||||
|
description: 'Loop mode: for, forEach, while, doWhile',
|
||||||
|
mode: null,
|
||||||
|
placeholder: null,
|
||||||
|
hidden: false,
|
||||||
|
multiSelect: false,
|
||||||
|
required: false,
|
||||||
|
hasDynamicOptions: false,
|
||||||
|
options: ['for', 'forEach', 'while', 'doWhile'],
|
||||||
|
defaultValue: 'for',
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'iterations',
|
||||||
|
type: 'short-input',
|
||||||
|
title: 'Iterations',
|
||||||
|
description: 'Iteration count for for-loops',
|
||||||
|
mode: null,
|
||||||
|
placeholder: null,
|
||||||
|
hidden: false,
|
||||||
|
multiSelect: false,
|
||||||
|
required: false,
|
||||||
|
hasDynamicOptions: false,
|
||||||
|
options: null,
|
||||||
|
defaultValue: 1,
|
||||||
|
min: 1,
|
||||||
|
max: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'collection',
|
||||||
|
type: 'long-input',
|
||||||
|
title: 'Collection',
|
||||||
|
description: 'Collection expression for forEach loops',
|
||||||
|
mode: null,
|
||||||
|
placeholder: null,
|
||||||
|
hidden: false,
|
||||||
|
multiSelect: false,
|
||||||
|
required: false,
|
||||||
|
hasDynamicOptions: false,
|
||||||
|
options: null,
|
||||||
|
defaultValue: null,
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'condition',
|
||||||
|
type: 'long-input',
|
||||||
|
title: 'Condition',
|
||||||
|
description: 'Condition expression for while/doWhile loops',
|
||||||
|
mode: null,
|
||||||
|
placeholder: null,
|
||||||
|
hidden: false,
|
||||||
|
multiSelect: false,
|
||||||
|
required: false,
|
||||||
|
hasDynamicOptions: false,
|
||||||
|
options: null,
|
||||||
|
defaultValue: null,
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
outputKeys: ['index', 'item', 'items'],
|
||||||
|
longDescription: null,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (blockType === 'parallel') {
|
||||||
|
return {
|
||||||
|
blockType: 'parallel',
|
||||||
|
blockName: 'Parallel',
|
||||||
|
category: 'blocks',
|
||||||
|
triggerAllowed: false,
|
||||||
|
hasTriggersConfig: false,
|
||||||
|
subBlocks: [
|
||||||
|
{
|
||||||
|
id: 'parallelType',
|
||||||
|
type: 'dropdown',
|
||||||
|
title: 'Parallel Type',
|
||||||
|
description: 'Parallel mode: count or collection',
|
||||||
|
mode: null,
|
||||||
|
placeholder: null,
|
||||||
|
hidden: false,
|
||||||
|
multiSelect: false,
|
||||||
|
required: false,
|
||||||
|
hasDynamicOptions: false,
|
||||||
|
options: ['count', 'collection'],
|
||||||
|
defaultValue: 'count',
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'count',
|
||||||
|
type: 'short-input',
|
||||||
|
title: 'Count',
|
||||||
|
description: 'Branch count when parallelType is count',
|
||||||
|
mode: null,
|
||||||
|
placeholder: null,
|
||||||
|
hidden: false,
|
||||||
|
multiSelect: false,
|
||||||
|
required: false,
|
||||||
|
hasDynamicOptions: false,
|
||||||
|
options: null,
|
||||||
|
defaultValue: 1,
|
||||||
|
min: 1,
|
||||||
|
max: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'collection',
|
||||||
|
type: 'long-input',
|
||||||
|
title: 'Collection',
|
||||||
|
description: 'Collection expression when parallelType is collection',
|
||||||
|
mode: null,
|
||||||
|
placeholder: null,
|
||||||
|
hidden: false,
|
||||||
|
multiSelect: false,
|
||||||
|
required: false,
|
||||||
|
hasDynamicOptions: false,
|
||||||
|
options: null,
|
||||||
|
defaultValue: null,
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
outputKeys: ['index', 'currentItem', 'items'],
|
||||||
|
longDescription: null,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const blockConfig = getBlock(blockType)
|
||||||
|
if (!blockConfig) return null
|
||||||
|
|
||||||
|
const subBlocks = Array.isArray(blockConfig.subBlocks)
|
||||||
|
? blockConfig.subBlocks.map(serializeSubBlock)
|
||||||
|
: []
|
||||||
|
const outputs = blockConfig.outputs || {}
|
||||||
|
const outputKeys = Object.keys(outputs)
|
||||||
|
|
||||||
|
return {
|
||||||
|
blockType,
|
||||||
|
blockName: blockConfig.name || blockType,
|
||||||
|
category: blockConfig.category,
|
||||||
|
triggerAllowed: Boolean(blockConfig.triggerAllowed || blockConfig.triggers?.enabled),
|
||||||
|
hasTriggersConfig: Boolean(blockConfig.triggers?.enabled),
|
||||||
|
subBlocks,
|
||||||
|
outputKeys,
|
||||||
|
longDescription: blockConfig.longDescription || null,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildSchemasByType(blockTypes: string[]): {
|
||||||
|
schemasByType: Record<string, any>
|
||||||
|
schemaRefsByType: Record<string, string>
|
||||||
|
} {
|
||||||
|
const schemasByType: Record<string, any> = {}
|
||||||
|
const schemaRefsByType: Record<string, string> = {}
|
||||||
|
|
||||||
|
const uniqueTypes = [...new Set(blockTypes.filter(Boolean))]
|
||||||
|
for (const blockType of uniqueTypes) {
|
||||||
|
const schema = serializeBlockSchema(blockType)
|
||||||
|
if (!schema) continue
|
||||||
|
const stableSchema = stableSortValue(schema)
|
||||||
|
const schemaHash = crypto
|
||||||
|
.createHash('sha256')
|
||||||
|
.update(JSON.stringify(stableSchema))
|
||||||
|
.digest('hex')
|
||||||
|
schemasByType[blockType] = stableSchema
|
||||||
|
schemaRefsByType[blockType] = `${blockType}@sha256:${schemaHash}`
|
||||||
|
}
|
||||||
|
|
||||||
|
return { schemasByType, schemaRefsByType }
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function loadWorkflowStateFromDb(workflowId: string): Promise<{
|
||||||
|
workflowState: {
|
||||||
|
blocks: Record<string, any>
|
||||||
|
edges: Array<Record<string, any>>
|
||||||
|
loops: Record<string, any>
|
||||||
|
parallels: Record<string, any>
|
||||||
|
}
|
||||||
|
workspaceId?: string
|
||||||
|
}> {
|
||||||
|
const [workflowRecord] = await db
|
||||||
|
.select({ workspaceId: workflowTable.workspaceId })
|
||||||
|
.from(workflowTable)
|
||||||
|
.where(eq(workflowTable.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
if (!workflowRecord) {
|
||||||
|
throw new Error(`Workflow ${workflowId} not found`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
if (!normalized) {
|
||||||
|
throw new Error(`Workflow ${workflowId} has no normalized data`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks = { ...normalized.blocks }
|
||||||
|
const invalidBlockIds: string[] = []
|
||||||
|
for (const [blockId, block] of Object.entries(blocks)) {
|
||||||
|
if (!(block as { type?: unknown })?.type) {
|
||||||
|
invalidBlockIds.push(blockId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const blockId of invalidBlockIds) {
|
||||||
|
delete blocks[blockId]
|
||||||
|
}
|
||||||
|
|
||||||
|
const invalidSet = new Set(invalidBlockIds)
|
||||||
|
const edges = (normalized.edges || []).filter(
|
||||||
|
(edge: any) => !invalidSet.has(edge.source) && !invalidSet.has(edge.target)
|
||||||
|
)
|
||||||
|
|
||||||
|
if (invalidBlockIds.length > 0) {
|
||||||
|
logger.warn('Dropped blocks without type while loading workflow state', {
|
||||||
|
workflowId,
|
||||||
|
dropped: invalidBlockIds,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
workflowState: {
|
||||||
|
blocks,
|
||||||
|
edges,
|
||||||
|
loops: normalized.loops || {},
|
||||||
|
parallels: normalized.parallels || {},
|
||||||
|
},
|
||||||
|
workspaceId: workflowRecord.workspaceId || undefined,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function summarizeWorkflowState(workflowState: {
|
||||||
|
blocks: Record<string, any>
|
||||||
|
edges: Array<Record<string, any>>
|
||||||
|
loops: Record<string, any>
|
||||||
|
parallels: Record<string, any>
|
||||||
|
}): Record<string, unknown> {
|
||||||
|
const MAX_BLOCK_INVENTORY = 160
|
||||||
|
const MAX_EDGE_INVENTORY = 240
|
||||||
|
const blocks = workflowState.blocks || {}
|
||||||
|
const edges = workflowState.edges || []
|
||||||
|
const blockTypes: Record<string, number> = {}
|
||||||
|
const triggerBlocks: Array<{ id: string; name: string; type: string }> = []
|
||||||
|
const blockInventoryRaw: Array<{
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
parentId: string | null
|
||||||
|
triggerMode: boolean
|
||||||
|
enabled: boolean
|
||||||
|
}> = []
|
||||||
|
|
||||||
|
const normalizeReferenceToken = (value: string): string =>
|
||||||
|
value
|
||||||
|
.toLowerCase()
|
||||||
|
.replace(/[^a-z0-9]+/g, '')
|
||||||
|
.trim()
|
||||||
|
|
||||||
|
const dedupeStrings = (values: string[]): string[] => [...new Set(values.filter(Boolean))]
|
||||||
|
const startOutputKeys = ['input', 'files', 'conversationId']
|
||||||
|
const duplicateNameIndex = new Map<string, { name: string; blockIds: string[] }>()
|
||||||
|
|
||||||
|
for (const [blockId, block] of Object.entries(blocks)) {
|
||||||
|
const blockRecord = block as Record<string, unknown>
|
||||||
|
const dataRecord = (blockRecord.data as Record<string, unknown> | undefined) || undefined
|
||||||
|
const blockType = String(blockRecord.type || 'unknown')
|
||||||
|
const blockName = String(blockRecord.name || blockType)
|
||||||
|
const parentId = String(dataRecord?.parentId || '').trim() || null
|
||||||
|
const normalizedName = normalizeReferenceToken(blockName)
|
||||||
|
|
||||||
|
blockTypes[blockType] = (blockTypes[blockType] || 0) + 1
|
||||||
|
if (blockRecord.triggerMode === true) {
|
||||||
|
triggerBlocks.push({
|
||||||
|
id: blockId,
|
||||||
|
name: blockName,
|
||||||
|
type: blockType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
blockInventoryRaw.push({
|
||||||
|
id: blockId,
|
||||||
|
name: blockName,
|
||||||
|
type: blockType,
|
||||||
|
parentId,
|
||||||
|
triggerMode: blockRecord.triggerMode === true,
|
||||||
|
enabled: blockRecord.enabled !== false,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (normalizedName) {
|
||||||
|
const existing = duplicateNameIndex.get(normalizedName)
|
||||||
|
if (existing) {
|
||||||
|
existing.blockIds.push(blockId)
|
||||||
|
} else {
|
||||||
|
duplicateNameIndex.set(normalizedName, { name: blockName, blockIds: [blockId] })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const blockInventory = [...blockInventoryRaw]
|
||||||
|
.sort((a, b) => a.name.localeCompare(b.name) || a.id.localeCompare(b.id))
|
||||||
|
.slice(0, MAX_BLOCK_INVENTORY)
|
||||||
|
const blockInventoryTruncated = blockInventoryRaw.length > MAX_BLOCK_INVENTORY
|
||||||
|
|
||||||
|
const blockNameById = new Map(blockInventoryRaw.map((entry) => [entry.id, entry.name]))
|
||||||
|
const edgeInventoryRaw = edges.map((edge: any) => {
|
||||||
|
const source = String(edge.source || '')
|
||||||
|
const target = String(edge.target || '')
|
||||||
|
const sourceHandle = String(edge.sourceHandle || '').trim() || null
|
||||||
|
const targetHandle = String(edge.targetHandle || '').trim() || null
|
||||||
|
return {
|
||||||
|
source,
|
||||||
|
sourceName: blockNameById.get(source) || source,
|
||||||
|
sourceHandle,
|
||||||
|
target,
|
||||||
|
targetName: blockNameById.get(target) || target,
|
||||||
|
targetHandle,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
const edgeInventory = edgeInventoryRaw
|
||||||
|
.sort((a, b) => {
|
||||||
|
const bySource = a.sourceName.localeCompare(b.sourceName)
|
||||||
|
if (bySource !== 0) return bySource
|
||||||
|
const byTarget = a.targetName.localeCompare(b.targetName)
|
||||||
|
if (byTarget !== 0) return byTarget
|
||||||
|
return a.source.localeCompare(b.source)
|
||||||
|
})
|
||||||
|
.slice(0, MAX_EDGE_INVENTORY)
|
||||||
|
const edgeInventoryTruncated = edgeInventoryRaw.length > MAX_EDGE_INVENTORY
|
||||||
|
|
||||||
|
const duplicateBlockNames = [...duplicateNameIndex.values()]
|
||||||
|
.filter((entry) => entry.blockIds.length > 1)
|
||||||
|
.map((entry) => ({
|
||||||
|
name: entry.name,
|
||||||
|
count: entry.blockIds.length,
|
||||||
|
blockIds: entry.blockIds.sort(),
|
||||||
|
}))
|
||||||
|
.sort((a, b) => b.count - a.count || a.name.localeCompare(b.name))
|
||||||
|
|
||||||
|
const subflowChildrenMap = new Map<string, string[]>()
|
||||||
|
for (const block of blockInventoryRaw) {
|
||||||
|
if (!block.parentId) continue
|
||||||
|
const existing = subflowChildrenMap.get(block.parentId) || []
|
||||||
|
existing.push(block.id)
|
||||||
|
subflowChildrenMap.set(block.parentId, existing)
|
||||||
|
}
|
||||||
|
const subflowChildren = [...subflowChildrenMap.entries()]
|
||||||
|
.map(([subflowId, childBlockIds]) => {
|
||||||
|
const subflowBlock = blockInventoryRaw.find((block) => block.id === subflowId)
|
||||||
|
return {
|
||||||
|
subflowId,
|
||||||
|
subflowName: subflowBlock?.name || subflowId,
|
||||||
|
subflowType: subflowBlock?.type || 'unknown',
|
||||||
|
childBlockIds: childBlockIds.sort(),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.sort((a, b) => a.subflowName.localeCompare(b.subflowName))
|
||||||
|
|
||||||
|
const referenceGuide = blockInventory.map((entry) => {
|
||||||
|
const blockSchema = getBlock(entry.type)
|
||||||
|
const schemaOutputKeys = Object.keys(blockSchema?.outputs || {})
|
||||||
|
const outputKeys =
|
||||||
|
entry.type === 'start'
|
||||||
|
? dedupeStrings([...schemaOutputKeys, ...startOutputKeys])
|
||||||
|
: dedupeStrings(schemaOutputKeys)
|
||||||
|
const referenceToken =
|
||||||
|
normalizeReferenceToken(entry.name) || normalizeReferenceToken(entry.type) || entry.id
|
||||||
|
return {
|
||||||
|
blockId: entry.id,
|
||||||
|
blockName: entry.name,
|
||||||
|
blockType: entry.type,
|
||||||
|
parentId: entry.parentId,
|
||||||
|
referenceToken,
|
||||||
|
outputKeys,
|
||||||
|
examples: outputKeys.slice(0, 4).map((key) => `<${referenceToken}.${key}>`),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
blockCount: Object.keys(blocks).length,
|
||||||
|
edgeCount: edges.length,
|
||||||
|
loopCount: Object.keys(workflowState.loops || {}).length,
|
||||||
|
parallelCount: Object.keys(workflowState.parallels || {}).length,
|
||||||
|
blockTypes,
|
||||||
|
triggerBlocks,
|
||||||
|
blockInventory,
|
||||||
|
blockInventoryTruncated,
|
||||||
|
edgeInventory,
|
||||||
|
edgeInventoryTruncated,
|
||||||
|
duplicateBlockNames,
|
||||||
|
subflowChildren,
|
||||||
|
referenceGuide,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAllKnownBlockTypes(): string[] {
|
||||||
|
return [...new Set([...getAllBlockTypes(), ...CONTAINER_BLOCK_TYPES])]
|
||||||
|
}
|
||||||
230
apps/sim/lib/copilot/tools/server/workflow/workflow-verify.ts
Normal file
230
apps/sim/lib/copilot/tools/server/workflow/workflow-verify.ts
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||||
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
import { hashWorkflowState, loadWorkflowStateFromDb } from './workflow-state'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkflowVerifyServerTool')
|
||||||
|
|
||||||
|
const AcceptanceItemSchema = z.union([
|
||||||
|
z.string(),
|
||||||
|
z.object({
|
||||||
|
kind: z.string().optional(),
|
||||||
|
assert: z.string(),
|
||||||
|
}),
|
||||||
|
])
|
||||||
|
|
||||||
|
const WorkflowVerifyInputSchema = z
|
||||||
|
.object({
|
||||||
|
workflowId: z.string(),
|
||||||
|
acceptance: z.array(AcceptanceItemSchema).optional(),
|
||||||
|
baseSnapshotHash: z.string().optional(),
|
||||||
|
})
|
||||||
|
.strict()
|
||||||
|
|
||||||
|
type WorkflowVerifyParams = z.infer<typeof WorkflowVerifyInputSchema>
|
||||||
|
|
||||||
|
function normalizeName(value: string): string {
|
||||||
|
return value.trim().toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
|
function canonicalizeToken(value: string): string {
|
||||||
|
return normalizeName(value).replace(/[^a-z0-9]/g, '')
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveBlockToken(
|
||||||
|
workflowState: { blocks: Record<string, any> },
|
||||||
|
token: string
|
||||||
|
): string | null {
|
||||||
|
if (!token) return null
|
||||||
|
if (workflowState.blocks[token]) return token
|
||||||
|
const normalized = normalizeName(token)
|
||||||
|
const canonical = canonicalizeToken(token)
|
||||||
|
for (const [blockId, block] of Object.entries(workflowState.blocks || {})) {
|
||||||
|
const blockName = normalizeName(String((block as Record<string, unknown>).name || ''))
|
||||||
|
if (blockName === normalized) return blockId
|
||||||
|
if (canonicalizeToken(blockName) === canonical) return blockId
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveBlocksByType(
|
||||||
|
workflowState: { blocks: Record<string, any> },
|
||||||
|
token: string
|
||||||
|
): string[] {
|
||||||
|
const normalized = normalizeName(token)
|
||||||
|
const canonical = canonicalizeToken(token)
|
||||||
|
const matches: string[] = []
|
||||||
|
for (const [blockId, block] of Object.entries(workflowState.blocks || {})) {
|
||||||
|
const blockType = normalizeName(String((block as Record<string, unknown>).type || ''))
|
||||||
|
if (!blockType) continue
|
||||||
|
if (blockType === normalized || canonicalizeToken(blockType) === canonical) {
|
||||||
|
matches.push(blockId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return matches
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasPath(
|
||||||
|
workflowState: { edges: Array<Record<string, any>> },
|
||||||
|
blockPath: string[]
|
||||||
|
): boolean {
|
||||||
|
if (blockPath.length < 2) return true
|
||||||
|
const adjacency = new Map<string, string[]>()
|
||||||
|
for (const edge of workflowState.edges || []) {
|
||||||
|
const source = String(edge.source || '')
|
||||||
|
const target = String(edge.target || '')
|
||||||
|
if (!source || !target) continue
|
||||||
|
const existing = adjacency.get(source) || []
|
||||||
|
existing.push(target)
|
||||||
|
adjacency.set(source, existing)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < blockPath.length - 1; i++) {
|
||||||
|
const from = blockPath[i]
|
||||||
|
const to = blockPath[i + 1]
|
||||||
|
const next = adjacency.get(from) || []
|
||||||
|
if (!next.includes(to)) return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
function evaluateAssertions(params: {
|
||||||
|
workflowState: {
|
||||||
|
blocks: Record<string, any>
|
||||||
|
edges: Array<Record<string, any>>
|
||||||
|
}
|
||||||
|
assertions: string[]
|
||||||
|
}): { failures: string[]; warnings: string[]; checks: Array<Record<string, any>> } {
|
||||||
|
const failures: string[] = []
|
||||||
|
const warnings: string[] = []
|
||||||
|
const checks: Array<Record<string, any>> = []
|
||||||
|
|
||||||
|
for (const assertion of params.assertions) {
|
||||||
|
if (assertion.startsWith('block_exists:')) {
|
||||||
|
const token = assertion.slice('block_exists:'.length).trim()
|
||||||
|
const blockId = resolveBlockToken(params.workflowState, token)
|
||||||
|
const passed = Boolean(blockId)
|
||||||
|
checks.push({ assert: assertion, passed, resolvedBlockId: blockId || null })
|
||||||
|
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (assertion.startsWith('block_type_exists:')) {
|
||||||
|
const token = assertion.slice('block_type_exists:'.length).trim()
|
||||||
|
const matchedBlockIds = resolveBlocksByType(params.workflowState, token)
|
||||||
|
const passed = matchedBlockIds.length > 0
|
||||||
|
checks.push({ assert: assertion, passed, matchedBlockIds })
|
||||||
|
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (assertion.startsWith('trigger_exists:')) {
|
||||||
|
const triggerType = normalizeName(assertion.slice('trigger_exists:'.length))
|
||||||
|
const triggerBlock = Object.values(params.workflowState.blocks || {}).find((block: any) => {
|
||||||
|
if (block?.triggerMode !== true) return false
|
||||||
|
return normalizeName(String(block?.type || '')) === triggerType
|
||||||
|
})
|
||||||
|
const passed = Boolean(triggerBlock)
|
||||||
|
checks.push({ assert: assertion, passed })
|
||||||
|
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (assertion.startsWith('path_exists:')) {
|
||||||
|
const rawPath = assertion.slice('path_exists:'.length).trim()
|
||||||
|
const tokens = rawPath
|
||||||
|
.split('->')
|
||||||
|
.map((token) => token.trim())
|
||||||
|
.filter(Boolean)
|
||||||
|
const resolvedPath = tokens
|
||||||
|
.map((token) => resolveBlockToken(params.workflowState, token))
|
||||||
|
.filter((value): value is string => Boolean(value))
|
||||||
|
|
||||||
|
const resolvedAll = resolvedPath.length === tokens.length
|
||||||
|
const passed = resolvedAll && hasPath(params.workflowState, resolvedPath)
|
||||||
|
checks.push({
|
||||||
|
assert: assertion,
|
||||||
|
passed,
|
||||||
|
resolvedPath,
|
||||||
|
})
|
||||||
|
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unknown assertion format should not fail structural verification.
|
||||||
|
// Keep explicit visibility via warnings/check metadata.
|
||||||
|
checks.push({ assert: assertion, passed: false, reason: 'unknown_assertion_type' })
|
||||||
|
warnings.push(`Unknown assertion format: ${assertion}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return { failures, warnings, checks }
|
||||||
|
}
|
||||||
|
|
||||||
|
export const workflowVerifyServerTool: BaseServerTool<WorkflowVerifyParams, any> = {
|
||||||
|
name: 'workflow_verify',
|
||||||
|
inputSchema: WorkflowVerifyInputSchema,
|
||||||
|
async execute(params: WorkflowVerifyParams, context?: { userId: string }): Promise<any> {
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
action: 'read',
|
||||||
|
})
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflowState } = await loadWorkflowStateFromDb(params.workflowId)
|
||||||
|
const snapshotHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
||||||
|
if (params.baseSnapshotHash && params.baseSnapshotHash !== snapshotHash) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
verified: false,
|
||||||
|
reason: 'snapshot_mismatch',
|
||||||
|
expected: params.baseSnapshotHash,
|
||||||
|
current: snapshotHash,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const validation = validateWorkflowState(workflowState as any, { sanitize: false })
|
||||||
|
|
||||||
|
const assertions = (params.acceptance || []).map((item) =>
|
||||||
|
typeof item === 'string' ? item : item.assert
|
||||||
|
)
|
||||||
|
const assertionResults = evaluateAssertions({
|
||||||
|
workflowState,
|
||||||
|
assertions,
|
||||||
|
})
|
||||||
|
|
||||||
|
const verified =
|
||||||
|
validation.valid && assertionResults.failures.length === 0 && validation.errors.length === 0
|
||||||
|
|
||||||
|
logger.info('Workflow verification complete', {
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
verified,
|
||||||
|
errorCount: validation.errors.length,
|
||||||
|
warningCount: validation.warnings.length,
|
||||||
|
assertionFailures: assertionResults.failures.length,
|
||||||
|
assertionWarnings: assertionResults.warnings.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
verified,
|
||||||
|
snapshotHash,
|
||||||
|
validation: {
|
||||||
|
valid: validation.valid,
|
||||||
|
errors: validation.errors,
|
||||||
|
warnings: validation.warnings,
|
||||||
|
},
|
||||||
|
assertions: assertionResults.checks,
|
||||||
|
failures: assertionResults.failures,
|
||||||
|
warnings: assertionResults.warnings,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -312,12 +312,6 @@ export const OAUTH_PROVIDERS: Record<string, OAuthProviderConfig> = {
|
|||||||
'read:attachment:confluence',
|
'read:attachment:confluence',
|
||||||
'write:attachment:confluence',
|
'write:attachment:confluence',
|
||||||
'search:confluence',
|
'search:confluence',
|
||||||
'read:blogpost:confluence',
|
|
||||||
'write:blogpost:confluence',
|
|
||||||
'read:content.property:confluence',
|
|
||||||
'write:content.property:confluence',
|
|
||||||
'read:hierarchical-content:confluence',
|
|
||||||
'read:content.metadata:confluence',
|
|
||||||
'read:me',
|
'read:me',
|
||||||
'offline_access',
|
'offline_access',
|
||||||
],
|
],
|
||||||
@@ -374,14 +368,6 @@ export const OAUTH_PROVIDERS: Record<string, OAuthProviderConfig> = {
|
|||||||
'read:comment.property:jira',
|
'read:comment.property:jira',
|
||||||
'read:jql:jira',
|
'read:jql:jira',
|
||||||
'read:field:jira',
|
'read:field:jira',
|
||||||
// Project management (components, versions)
|
|
||||||
'manage:jira-project',
|
|
||||||
// Jira Software / Agile scopes (no classic equivalent)
|
|
||||||
'read:board-scope:jira-software',
|
|
||||||
'write:board-scope:jira-software',
|
|
||||||
'read:sprint:jira-software',
|
|
||||||
'write:sprint:jira-software',
|
|
||||||
'delete:sprint:jira-software',
|
|
||||||
// Jira Service Management scopes
|
// Jira Service Management scopes
|
||||||
'read:servicedesk:jira-service-management',
|
'read:servicedesk:jira-service-management',
|
||||||
'read:requesttype:jira-service-management',
|
'read:requesttype:jira-service-management',
|
||||||
@@ -411,16 +397,6 @@ export const OAUTH_PROVIDERS: Record<string, OAuthProviderConfig> = {
|
|||||||
'write:request.participant:jira-service-management',
|
'write:request.participant:jira-service-management',
|
||||||
'read:request.approval:jira-service-management',
|
'read:request.approval:jira-service-management',
|
||||||
'write:request.approval:jira-service-management',
|
'write:request.approval:jira-service-management',
|
||||||
'read:request.feedback:jira-service-management',
|
|
||||||
'write:request.feedback:jira-service-management',
|
|
||||||
'delete:request.feedback:jira-service-management',
|
|
||||||
'read:request.notification:jira-service-management',
|
|
||||||
'write:request.notification:jira-service-management',
|
|
||||||
'delete:request.notification:jira-service-management',
|
|
||||||
'read:request.attachment:jira-service-management',
|
|
||||||
'read:knowledgebase:jira-service-management',
|
|
||||||
'delete:organization:jira-service-management',
|
|
||||||
'delete:servicedesk.customer:jira-service-management',
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -27,11 +27,9 @@ import {
|
|||||||
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
||||||
import { executeWebhookJob } from '@/background/webhook-execution'
|
import { executeWebhookJob } from '@/background/webhook-execution'
|
||||||
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
||||||
import { isConfluenceEventMatch } from '@/triggers/confluence/utils'
|
|
||||||
import { isGitHubEventMatch } from '@/triggers/github/utils'
|
import { isGitHubEventMatch } from '@/triggers/github/utils'
|
||||||
import { isHubSpotContactEventMatch } from '@/triggers/hubspot/utils'
|
import { isHubSpotContactEventMatch } from '@/triggers/hubspot/utils'
|
||||||
import { isJiraEventMatch } from '@/triggers/jira/utils'
|
import { isJiraEventMatch } from '@/triggers/jira/utils'
|
||||||
import { isJsmEventMatch } from '@/triggers/jsm/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('WebhookProcessor')
|
const logger = createLogger('WebhookProcessor')
|
||||||
|
|
||||||
@@ -683,7 +681,7 @@ export async function verifyProviderAuth(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (foundWebhook.provider === 'jira' || foundWebhook.provider === 'jira_service_management') {
|
if (foundWebhook.provider === 'jira') {
|
||||||
const secret = providerConfig.secret as string | undefined
|
const secret = providerConfig.secret as string | undefined
|
||||||
|
|
||||||
if (secret) {
|
if (secret) {
|
||||||
@@ -708,31 +706,6 @@ export async function verifyProviderAuth(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (foundWebhook.provider === 'confluence') {
|
|
||||||
const secret = providerConfig.secret as string | undefined
|
|
||||||
|
|
||||||
if (secret) {
|
|
||||||
const signature = request.headers.get('X-Hub-Signature')
|
|
||||||
|
|
||||||
if (!signature) {
|
|
||||||
logger.warn(`[${requestId}] Confluence webhook missing signature header`)
|
|
||||||
return new NextResponse('Unauthorized - Missing Confluence signature', { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const isValidSignature = validateJiraSignature(secret, signature, rawBody)
|
|
||||||
|
|
||||||
if (!isValidSignature) {
|
|
||||||
logger.warn(`[${requestId}] Confluence signature verification failed`, {
|
|
||||||
signatureLength: signature.length,
|
|
||||||
secretLength: secret.length,
|
|
||||||
})
|
|
||||||
return new NextResponse('Unauthorized - Invalid Confluence signature', { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.debug(`[${requestId}] Confluence signature verified successfully`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (foundWebhook.provider === 'github') {
|
if (foundWebhook.provider === 'github') {
|
||||||
const secret = providerConfig.secret as string | undefined
|
const secret = providerConfig.secret as string | undefined
|
||||||
|
|
||||||
@@ -956,60 +929,6 @@ export async function queueWebhookExecution(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// JSM event filtering for event-specific triggers
|
|
||||||
if (foundWebhook.provider === 'jira_service_management') {
|
|
||||||
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
|
|
||||||
const triggerId = providerConfig.triggerId as string | undefined
|
|
||||||
|
|
||||||
if (triggerId && triggerId !== 'jsm_webhook') {
|
|
||||||
const webhookEvent = body.webhookEvent as string | undefined
|
|
||||||
|
|
||||||
if (!isJsmEventMatch(triggerId, webhookEvent || '', body)) {
|
|
||||||
logger.debug(
|
|
||||||
`[${options.requestId}] JSM event mismatch for trigger ${triggerId}. Event: ${webhookEvent}. Skipping execution.`,
|
|
||||||
{
|
|
||||||
webhookId: foundWebhook.id,
|
|
||||||
workflowId: foundWorkflow.id,
|
|
||||||
triggerId,
|
|
||||||
receivedEvent: webhookEvent,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
// Return 200 OK to prevent Jira from retrying
|
|
||||||
return NextResponse.json({
|
|
||||||
message: 'Event type does not match trigger configuration. Ignoring.',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Confluence event filtering for event-specific triggers
|
|
||||||
if (foundWebhook.provider === 'confluence') {
|
|
||||||
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
|
|
||||||
const triggerId = providerConfig.triggerId as string | undefined
|
|
||||||
|
|
||||||
if (triggerId && triggerId !== 'confluence_webhook') {
|
|
||||||
const event = body.event as string | undefined
|
|
||||||
|
|
||||||
if (!isConfluenceEventMatch(triggerId, event || '')) {
|
|
||||||
logger.debug(
|
|
||||||
`[${options.requestId}] Confluence event mismatch for trigger ${triggerId}. Event: ${event}. Skipping execution.`,
|
|
||||||
{
|
|
||||||
webhookId: foundWebhook.id,
|
|
||||||
workflowId: foundWorkflow.id,
|
|
||||||
triggerId,
|
|
||||||
receivedEvent: event,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
// Return 200 OK to prevent Confluence from retrying
|
|
||||||
return NextResponse.json({
|
|
||||||
message: 'Event type does not match trigger configuration. Ignoring.',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (foundWebhook.provider === 'hubspot') {
|
if (foundWebhook.provider === 'hubspot') {
|
||||||
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
|
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
|
||||||
const triggerId = providerConfig.triggerId as string | undefined
|
const triggerId = providerConfig.triggerId as string | undefined
|
||||||
|
|||||||
@@ -78,7 +78,6 @@ const PROVIDER_EXTRACTORS: Record<string, (body: any) => string | null> = {
|
|||||||
hubspot: extractHubSpotIdentifier,
|
hubspot: extractHubSpotIdentifier,
|
||||||
linear: extractLinearIdentifier,
|
linear: extractLinearIdentifier,
|
||||||
jira: extractJiraIdentifier,
|
jira: extractJiraIdentifier,
|
||||||
jira_service_management: extractJiraIdentifier,
|
|
||||||
'microsoft-teams': extractMicrosoftTeamsIdentifier,
|
'microsoft-teams': extractMicrosoftTeamsIdentifier,
|
||||||
airtable: extractAirtableIdentifier,
|
airtable: extractAirtableIdentifier,
|
||||||
grain: extractGrainIdentifier,
|
grain: extractGrainIdentifier,
|
||||||
|
|||||||
@@ -530,9 +530,6 @@ export async function validateTwilioSignature(
|
|||||||
const SLACK_MAX_FILE_SIZE = 50 * 1024 * 1024 // 50 MB
|
const SLACK_MAX_FILE_SIZE = 50 * 1024 * 1024 // 50 MB
|
||||||
const SLACK_MAX_FILES = 15
|
const SLACK_MAX_FILES = 15
|
||||||
|
|
||||||
const JIRA_MAX_FILE_SIZE = 50 * 1024 * 1024 // 50 MB
|
|
||||||
const CONFLUENCE_MAX_FILE_SIZE = 50 * 1024 * 1024 // 50 MB
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolves the full file object from the Slack API when the event payload
|
* Resolves the full file object from the Slack API when the event payload
|
||||||
* only contains a partial file (e.g. missing url_private due to file_access restrictions).
|
* only contains a partial file (e.g. missing url_private due to file_access restrictions).
|
||||||
@@ -682,169 +679,6 @@ async function downloadSlackFiles(
|
|||||||
return downloaded
|
return downloaded
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Downloads a Jira attachment file using Basic auth (email + API token).
|
|
||||||
* Returns the file data in the format expected by WebhookAttachmentProcessor.
|
|
||||||
*/
|
|
||||||
async function downloadJiraAttachment(
|
|
||||||
attachment: { content?: string; filename?: string; mimeType?: string; size?: number },
|
|
||||||
apiEmail: string,
|
|
||||||
apiToken: string
|
|
||||||
): Promise<{ name: string; data: string; mimeType: string; size: number } | null> {
|
|
||||||
const contentUrl = attachment.content
|
|
||||||
if (!contentUrl) {
|
|
||||||
logger.warn('Jira attachment has no content URL, skipping download')
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const reportedSize = Number(attachment.size) || 0
|
|
||||||
if (reportedSize > JIRA_MAX_FILE_SIZE) {
|
|
||||||
logger.warn('Jira attachment exceeds size limit, skipping', {
|
|
||||||
filename: attachment.filename,
|
|
||||||
size: reportedSize,
|
|
||||||
limit: JIRA_MAX_FILE_SIZE,
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const urlValidation = await validateUrlWithDNS(contentUrl, 'attachment_content')
|
|
||||||
if (!urlValidation.isValid) {
|
|
||||||
logger.warn('Jira attachment URL failed DNS validation, skipping', {
|
|
||||||
filename: attachment.filename,
|
|
||||||
error: urlValidation.error,
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const authHeader = Buffer.from(`${apiEmail}:${apiToken}`).toString('base64')
|
|
||||||
|
|
||||||
const response = await secureFetchWithPinnedIP(contentUrl, urlValidation.resolvedIP!, {
|
|
||||||
headers: {
|
|
||||||
Authorization: `Basic ${authHeader}`,
|
|
||||||
Accept: '*/*',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
logger.warn('Failed to download Jira attachment', {
|
|
||||||
filename: attachment.filename,
|
|
||||||
status: response.status,
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const arrayBuffer = await response.arrayBuffer()
|
|
||||||
const buffer = Buffer.from(arrayBuffer)
|
|
||||||
|
|
||||||
if (buffer.length > JIRA_MAX_FILE_SIZE) {
|
|
||||||
logger.warn('Downloaded Jira attachment exceeds size limit, skipping', {
|
|
||||||
filename: attachment.filename,
|
|
||||||
actualSize: buffer.length,
|
|
||||||
limit: JIRA_MAX_FILE_SIZE,
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: attachment.filename || 'attachment',
|
|
||||||
data: buffer.toString('base64'),
|
|
||||||
mimeType: attachment.mimeType || 'application/octet-stream',
|
|
||||||
size: buffer.length,
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error downloading Jira attachment', {
|
|
||||||
filename: attachment.filename,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Downloads a Confluence attachment file using Atlassian Basic Auth.
|
|
||||||
* Constructs the download URL from the domain and attachment download path.
|
|
||||||
*/
|
|
||||||
async function downloadConfluenceAttachment(
|
|
||||||
attachment: Record<string, any>,
|
|
||||||
domain: string,
|
|
||||||
apiEmail: string,
|
|
||||||
apiToken: string
|
|
||||||
): Promise<{ name: string; data: string; mimeType: string; size: number } | null> {
|
|
||||||
// Confluence webhook payload includes _links.download for the attachment
|
|
||||||
const downloadPath = attachment?._links?.download || attachment?._expandable?.download || null
|
|
||||||
const attachmentId = attachment?.id
|
|
||||||
|
|
||||||
if (!downloadPath && !attachmentId) {
|
|
||||||
logger.warn('Confluence attachment has no download path or ID, skipping download')
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const reportedSize = Number(attachment?.extensions?.fileSize || attachment?.fileSize || 0)
|
|
||||||
if (reportedSize > CONFLUENCE_MAX_FILE_SIZE) {
|
|
||||||
logger.warn('Confluence attachment exceeds size limit, skipping', {
|
|
||||||
title: attachment?.title,
|
|
||||||
size: reportedSize,
|
|
||||||
limit: CONFLUENCE_MAX_FILE_SIZE,
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build the download URL
|
|
||||||
const cleanDomain = domain.replace(/\/+$/, '')
|
|
||||||
const baseUrl = cleanDomain.startsWith('http') ? cleanDomain : `https://${cleanDomain}`
|
|
||||||
const downloadUrl = downloadPath
|
|
||||||
? `${baseUrl}/wiki${downloadPath}`
|
|
||||||
: `${baseUrl}/wiki/rest/api/content/${attachmentId}/download`
|
|
||||||
|
|
||||||
try {
|
|
||||||
const authHeader = Buffer.from(`${apiEmail}:${apiToken}`).toString('base64')
|
|
||||||
|
|
||||||
const response = await fetch(downloadUrl, {
|
|
||||||
headers: {
|
|
||||||
Authorization: `Basic ${authHeader}`,
|
|
||||||
Accept: '*/*',
|
|
||||||
'X-Atlassian-Token': 'no-check',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
logger.warn('Failed to download Confluence attachment', {
|
|
||||||
title: attachment?.title,
|
|
||||||
status: response.status,
|
|
||||||
url: sanitizeUrlForLog(downloadUrl),
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const arrayBuffer = await response.arrayBuffer()
|
|
||||||
const buffer = Buffer.from(arrayBuffer)
|
|
||||||
|
|
||||||
if (buffer.length > CONFLUENCE_MAX_FILE_SIZE) {
|
|
||||||
logger.warn('Downloaded Confluence attachment exceeds size limit, skipping', {
|
|
||||||
title: attachment?.title,
|
|
||||||
actualSize: buffer.length,
|
|
||||||
limit: CONFLUENCE_MAX_FILE_SIZE,
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: attachment?.title || 'attachment',
|
|
||||||
data: buffer.toString('base64'),
|
|
||||||
mimeType:
|
|
||||||
attachment?.extensions?.mediaType || attachment?.mediaType || 'application/octet-stream',
|
|
||||||
size: buffer.length,
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error downloading Confluence attachment', {
|
|
||||||
title: attachment?.title,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format webhook input based on provider
|
* Format webhook input based on provider
|
||||||
*/
|
*/
|
||||||
@@ -1269,156 +1103,22 @@ export async function formatWebhookInput(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (foundWebhook.provider === 'jira') {
|
if (foundWebhook.provider === 'jira') {
|
||||||
const {
|
const { extractIssueData, extractCommentData, extractWorklogData } = await import(
|
||||||
extractIssueData,
|
'@/triggers/jira/utils'
|
||||||
extractCommentData,
|
)
|
||||||
extractWorklogData,
|
|
||||||
extractAttachmentData,
|
|
||||||
extractSprintData,
|
|
||||||
extractProjectData,
|
|
||||||
extractVersionData,
|
|
||||||
extractBoardData,
|
|
||||||
extractIssueLinkData,
|
|
||||||
} = await import('@/triggers/jira/utils')
|
|
||||||
|
|
||||||
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
|
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
|
||||||
const triggerId = providerConfig.triggerId as string | undefined
|
const triggerId = providerConfig.triggerId as string | undefined
|
||||||
|
|
||||||
if (
|
if (triggerId === 'jira_issue_commented') {
|
||||||
triggerId === 'jira_issue_commented' ||
|
|
||||||
triggerId === 'jira_comment_updated' ||
|
|
||||||
triggerId === 'jira_comment_deleted'
|
|
||||||
) {
|
|
||||||
return extractCommentData(body)
|
return extractCommentData(body)
|
||||||
}
|
}
|
||||||
if (
|
if (triggerId === 'jira_worklog_created') {
|
||||||
triggerId === 'jira_worklog_created' ||
|
|
||||||
triggerId === 'jira_worklog_updated' ||
|
|
||||||
triggerId === 'jira_worklog_deleted'
|
|
||||||
) {
|
|
||||||
return extractWorklogData(body)
|
return extractWorklogData(body)
|
||||||
}
|
}
|
||||||
if (triggerId === 'jira_attachment_created' || triggerId === 'jira_attachment_deleted') {
|
|
||||||
const result = extractAttachmentData(body)
|
|
||||||
|
|
||||||
// Download the attachment file if configured
|
|
||||||
if (triggerId === 'jira_attachment_created') {
|
|
||||||
const apiEmail = providerConfig.apiEmail as string | undefined
|
|
||||||
const apiToken = providerConfig.apiToken as string | undefined
|
|
||||||
const includeAttachments = Boolean(providerConfig.includeAttachments)
|
|
||||||
|
|
||||||
if (includeAttachments && apiEmail && apiToken && result.attachment?.content) {
|
|
||||||
const downloaded = await downloadJiraAttachment(result.attachment, apiEmail, apiToken)
|
|
||||||
if (downloaded) {
|
|
||||||
result.attachments = [downloaded]
|
|
||||||
}
|
|
||||||
} else if (includeAttachments && (!apiEmail || !apiToken)) {
|
|
||||||
logger.warn(
|
|
||||||
'Jira attachment trigger has includeAttachments enabled but missing API credentials'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
if (triggerId?.startsWith('jira_sprint_')) {
|
|
||||||
return extractSprintData(body)
|
|
||||||
}
|
|
||||||
if (triggerId?.startsWith('jira_project_')) {
|
|
||||||
return extractProjectData(body)
|
|
||||||
}
|
|
||||||
if (triggerId?.startsWith('jira_version_')) {
|
|
||||||
return extractVersionData(body)
|
|
||||||
}
|
|
||||||
if (triggerId?.startsWith('jira_board_')) {
|
|
||||||
return extractBoardData(body)
|
|
||||||
}
|
|
||||||
if (triggerId?.startsWith('jira_issuelink_')) {
|
|
||||||
return extractIssueLinkData(body)
|
|
||||||
}
|
|
||||||
return extractIssueData(body)
|
return extractIssueData(body)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (foundWebhook.provider === 'jira_service_management') {
|
|
||||||
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
|
|
||||||
const triggerId = providerConfig.triggerId as string | undefined
|
|
||||||
const includeFiles = Boolean(providerConfig.includeFiles)
|
|
||||||
const jiraEmail = providerConfig.jiraEmail as string | undefined
|
|
||||||
const jiraApiToken = providerConfig.jiraApiToken as string | undefined
|
|
||||||
|
|
||||||
const webhookEvent = body.webhookEvent || ''
|
|
||||||
|
|
||||||
// Base data common to all JSM events
|
|
||||||
const baseData: Record<string, any> = {
|
|
||||||
webhookEvent,
|
|
||||||
timestamp: body.timestamp,
|
|
||||||
issue: body.issue || {},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle attachment events
|
|
||||||
if (
|
|
||||||
triggerId === 'jsm_attachment_created' ||
|
|
||||||
triggerId === 'jsm_attachment_deleted' ||
|
|
||||||
webhookEvent.includes('attachment')
|
|
||||||
) {
|
|
||||||
const attachment = body.attachment || {}
|
|
||||||
baseData.attachment = attachment
|
|
||||||
|
|
||||||
let files: Array<{ name: string; data: string; mimeType: string; size: number }> = []
|
|
||||||
|
|
||||||
if (
|
|
||||||
webhookEvent.includes('attachment_created') &&
|
|
||||||
includeFiles &&
|
|
||||||
jiraEmail &&
|
|
||||||
jiraApiToken &&
|
|
||||||
attachment.content
|
|
||||||
) {
|
|
||||||
const downloaded = await downloadJiraAttachment(attachment, jiraEmail, jiraApiToken)
|
|
||||||
if (downloaded) {
|
|
||||||
files = [downloaded]
|
|
||||||
}
|
|
||||||
} else if (
|
|
||||||
webhookEvent.includes('attachment_created') &&
|
|
||||||
includeFiles &&
|
|
||||||
(!jiraEmail || !jiraApiToken)
|
|
||||||
) {
|
|
||||||
logger.warn(
|
|
||||||
'JSM attachment trigger has includeFiles enabled but missing Jira API credentials'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
baseData.files = files
|
|
||||||
return baseData
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle comment events
|
|
||||||
if (
|
|
||||||
triggerId === 'jsm_request_commented' ||
|
|
||||||
triggerId === 'jsm_comment_updated' ||
|
|
||||||
triggerId === 'jsm_comment_deleted' ||
|
|
||||||
webhookEvent.includes('comment')
|
|
||||||
) {
|
|
||||||
baseData.comment = body.comment || {}
|
|
||||||
return baseData
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle worklog events
|
|
||||||
if (
|
|
||||||
triggerId === 'jsm_worklog_created' ||
|
|
||||||
triggerId === 'jsm_worklog_updated' ||
|
|
||||||
triggerId === 'jsm_worklog_deleted' ||
|
|
||||||
webhookEvent.includes('worklog')
|
|
||||||
) {
|
|
||||||
baseData.worklog = body.worklog || {}
|
|
||||||
return baseData
|
|
||||||
}
|
|
||||||
|
|
||||||
// Default: request events (created/updated/deleted) and generic webhook
|
|
||||||
baseData.issue_event_type_name = body.issue_event_type_name
|
|
||||||
baseData.changelog = body.changelog
|
|
||||||
return baseData
|
|
||||||
}
|
|
||||||
|
|
||||||
if (foundWebhook.provider === 'stripe') {
|
if (foundWebhook.provider === 'stripe') {
|
||||||
return body
|
return body
|
||||||
}
|
}
|
||||||
@@ -1467,70 +1167,6 @@ export async function formatWebhookInput(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (foundWebhook.provider === 'confluence') {
|
|
||||||
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
|
|
||||||
const event = body.event as string | undefined
|
|
||||||
const result: Record<string, unknown> = {
|
|
||||||
event: event || '',
|
|
||||||
timestamp: body.timestamp,
|
|
||||||
userAccountId: body.userAccountId || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.page) {
|
|
||||||
result.page = body.page
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.comment) {
|
|
||||||
result.comment = body.comment
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.blog || body.blogpost) {
|
|
||||||
result.blog = body.blog || body.blogpost
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.attachment) {
|
|
||||||
result.attachment = body.attachment
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.space) {
|
|
||||||
result.space = body.space
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.label) {
|
|
||||||
result.label = body.label
|
|
||||||
}
|
|
||||||
|
|
||||||
if (body.content) {
|
|
||||||
result.content = body.content
|
|
||||||
}
|
|
||||||
|
|
||||||
// Download attachment file content when configured
|
|
||||||
const includeFileContent = Boolean(providerConfig.includeFileContent)
|
|
||||||
const confluenceEmail = providerConfig.confluenceEmail as string | undefined
|
|
||||||
const confluenceApiToken = providerConfig.confluenceApiToken as string | undefined
|
|
||||||
const confluenceDomain = providerConfig.confluenceDomain as string | undefined
|
|
||||||
|
|
||||||
if (body.attachment && includeFileContent) {
|
|
||||||
if (confluenceEmail && confluenceApiToken && confluenceDomain) {
|
|
||||||
const downloaded = await downloadConfluenceAttachment(
|
|
||||||
body.attachment,
|
|
||||||
confluenceDomain,
|
|
||||||
confluenceEmail,
|
|
||||||
confluenceApiToken
|
|
||||||
)
|
|
||||||
if (downloaded) {
|
|
||||||
result.files = [downloaded]
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
logger.warn(
|
|
||||||
'Confluence attachment trigger has includeFileContent enabled but missing credentials (email, API token, or domain)'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
return body
|
return body
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2364,261 +2364,6 @@ describe('hasWorkflowChanged', () => {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('Trigger Config Normalization (False Positive Prevention)', () => {
|
|
||||||
it.concurrent(
|
|
||||||
'should not detect change when deployed has null fields but current has values from triggerConfig',
|
|
||||||
() => {
|
|
||||||
// Core scenario: deployed state has null individual fields, current state has
|
|
||||||
// values populated from triggerConfig at runtime by populateTriggerFieldsFromConfig
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.concurrent(
|
|
||||||
'should detect change when user edits a trigger field to a different value',
|
|
||||||
() => {
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'old-secret' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'new-secret' },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'old-secret' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.concurrent('should not detect change when both sides have no triggerConfig', () => {
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent(
|
|
||||||
'should not detect change when deployed has empty fields and triggerConfig populates them',
|
|
||||||
() => {
|
|
||||||
// Empty string is also treated as "empty" by normalizeTriggerConfigValues
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.concurrent('should not detect change when triggerId differs', () => {
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
model: { value: 'gpt-4' },
|
|
||||||
triggerId: { value: null },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
model: { value: 'gpt-4' },
|
|
||||||
triggerId: { value: 'slack_webhook' },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent(
|
|
||||||
'should not detect change for namespaced system subBlock IDs like samplePayload_slack_webhook',
|
|
||||||
() => {
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
model: { value: 'gpt-4' },
|
|
||||||
samplePayload_slack_webhook: { value: 'old payload' },
|
|
||||||
triggerInstructions_slack_webhook: { value: 'old instructions' },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
model: { value: 'gpt-4' },
|
|
||||||
samplePayload_slack_webhook: { value: 'new payload' },
|
|
||||||
triggerInstructions_slack_webhook: { value: 'new instructions' },
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.concurrent(
|
|
||||||
'should handle mixed scenario: some fields from triggerConfig, some user-edited',
|
|
||||||
() => {
|
|
||||||
const deployedState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
|
||||||
includeFiles: { id: 'includeFiles', type: 'switch', value: false },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const currentState = createWorkflowState({
|
|
||||||
blocks: {
|
|
||||||
block1: createBlock('block1', {
|
|
||||||
type: 'starter',
|
|
||||||
subBlocks: {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
|
||||||
includeFiles: { id: 'includeFiles', type: 'switch', value: true },
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
// includeFiles changed from false to true — this IS a real change
|
|
||||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
|
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
|
||||||
it.concurrent('should not detect change when webhookId differs', () => {
|
it.concurrent('should not detect change when webhookId differs', () => {
|
||||||
const deployedState = createWorkflowState({
|
const deployedState = createWorkflowState({
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import {
|
|||||||
normalizeLoop,
|
normalizeLoop,
|
||||||
normalizeParallel,
|
normalizeParallel,
|
||||||
normalizeSubBlockValue,
|
normalizeSubBlockValue,
|
||||||
normalizeTriggerConfigValues,
|
|
||||||
normalizeValue,
|
normalizeValue,
|
||||||
normalizeVariables,
|
normalizeVariables,
|
||||||
sanitizeVariable,
|
sanitizeVariable,
|
||||||
@@ -173,18 +172,14 @@ export function generateWorkflowDiffSummary(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Normalize trigger config values for both states before comparison
|
|
||||||
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
|
|
||||||
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
|
|
||||||
|
|
||||||
// Compare subBlocks using shared helper for filtering (single source of truth)
|
// Compare subBlocks using shared helper for filtering (single source of truth)
|
||||||
const allSubBlockIds = filterSubBlockIds([
|
const allSubBlockIds = filterSubBlockIds([
|
||||||
...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]),
|
...new Set([...Object.keys(currentSubBlocks), ...Object.keys(previousSubBlocks)]),
|
||||||
])
|
])
|
||||||
|
|
||||||
for (const subId of allSubBlockIds) {
|
for (const subId of allSubBlockIds) {
|
||||||
const currentSub = normalizedCurrentSubs[subId] as Record<string, unknown> | undefined
|
const currentSub = currentSubBlocks[subId] as Record<string, unknown> | undefined
|
||||||
const previousSub = normalizedPreviousSubs[subId] as Record<string, unknown> | undefined
|
const previousSub = previousSubBlocks[subId] as Record<string, unknown> | undefined
|
||||||
|
|
||||||
if (!currentSub || !previousSub) {
|
if (!currentSub || !previousSub) {
|
||||||
changes.push({
|
changes.push({
|
||||||
|
|||||||
@@ -4,12 +4,10 @@
|
|||||||
import { describe, expect, it } from 'vitest'
|
import { describe, expect, it } from 'vitest'
|
||||||
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||||
import {
|
import {
|
||||||
filterSubBlockIds,
|
|
||||||
normalizedStringify,
|
normalizedStringify,
|
||||||
normalizeEdge,
|
normalizeEdge,
|
||||||
normalizeLoop,
|
normalizeLoop,
|
||||||
normalizeParallel,
|
normalizeParallel,
|
||||||
normalizeTriggerConfigValues,
|
|
||||||
normalizeValue,
|
normalizeValue,
|
||||||
sanitizeInputFormat,
|
sanitizeInputFormat,
|
||||||
sanitizeTools,
|
sanitizeTools,
|
||||||
@@ -586,214 +584,4 @@ describe('Workflow Normalization Utilities', () => {
|
|||||||
expect(result2).toBe(result3)
|
expect(result2).toBe(result3)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('filterSubBlockIds', () => {
|
|
||||||
it.concurrent('should exclude exact SYSTEM_SUBBLOCK_IDS', () => {
|
|
||||||
const ids = ['signingSecret', 'samplePayload', 'triggerInstructions', 'botToken']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['botToken', 'signingSecret'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should exclude namespaced SYSTEM_SUBBLOCK_IDS (prefix matching)', () => {
|
|
||||||
const ids = [
|
|
||||||
'signingSecret',
|
|
||||||
'samplePayload_slack_webhook',
|
|
||||||
'triggerInstructions_slack_webhook',
|
|
||||||
'webhookUrlDisplay_slack_webhook',
|
|
||||||
'botToken',
|
|
||||||
]
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['botToken', 'signingSecret'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should exclude exact TRIGGER_RUNTIME_SUBBLOCK_IDS', () => {
|
|
||||||
const ids = ['webhookId', 'triggerPath', 'triggerConfig', 'triggerId', 'signingSecret']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['signingSecret'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should not exclude IDs that merely contain a system ID substring', () => {
|
|
||||||
const ids = ['mySamplePayload', 'notSamplePayload']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['mySamplePayload', 'notSamplePayload'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should return sorted results', () => {
|
|
||||||
const ids = ['zebra', 'alpha', 'middle']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['alpha', 'middle', 'zebra'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should handle empty array', () => {
|
|
||||||
expect(filterSubBlockIds([])).toEqual([])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should handle all IDs being excluded', () => {
|
|
||||||
const ids = ['webhookId', 'triggerPath', 'samplePayload', 'triggerConfig']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual([])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should exclude setupScript and scheduleInfo namespaced variants', () => {
|
|
||||||
const ids = ['setupScript_google_sheets_row', 'scheduleInfo_cron_trigger', 'realField']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['realField'])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should exclude triggerCredentials namespaced variants', () => {
|
|
||||||
const ids = ['triggerCredentials_slack_webhook', 'signingSecret']
|
|
||||||
const result = filterSubBlockIds(ids)
|
|
||||||
expect(result).toEqual(['signingSecret'])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('normalizeTriggerConfigValues', () => {
|
|
||||||
it.concurrent('should return subBlocks unchanged when no triggerConfig exists', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect(result).toEqual(subBlocks)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should return subBlocks unchanged when triggerConfig value is null', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: null },
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect(result).toEqual(subBlocks)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent(
|
|
||||||
'should return subBlocks unchanged when triggerConfig value is not an object',
|
|
||||||
() => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: 'string-value' },
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect(result).toEqual(subBlocks)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.concurrent('should populate null individual fields from triggerConfig', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
|
||||||
expect((result.botToken as Record<string, unknown>).value).toBe('token456')
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should populate undefined individual fields from triggerConfig', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: undefined },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should populate empty string individual fields from triggerConfig', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should NOT overwrite existing non-empty individual field values', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'old-secret' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'user-edited-secret' },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('user-edited-secret')
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should skip triggerConfig fields that are null/undefined', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: null, botToken: undefined },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
|
||||||
expect((result.botToken as Record<string, unknown>).value).toBe(null)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should skip fields from triggerConfig that have no matching subBlock', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { nonExistentField: 'value123' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
expect(result.nonExistentField).toBeUndefined()
|
|
||||||
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should not mutate the original subBlocks object', () => {
|
|
||||||
const original = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
|
||||||
}
|
|
||||||
normalizeTriggerConfigValues(original)
|
|
||||||
expect((original.signingSecret as Record<string, unknown>).value).toBe(null)
|
|
||||||
})
|
|
||||||
|
|
||||||
it.concurrent('should preserve other subBlock properties when populating value', () => {
|
|
||||||
const subBlocks = {
|
|
||||||
triggerConfig: {
|
|
||||||
id: 'triggerConfig',
|
|
||||||
type: 'short-input',
|
|
||||||
value: { signingSecret: 'secret123' },
|
|
||||||
},
|
|
||||||
signingSecret: {
|
|
||||||
id: 'signingSecret',
|
|
||||||
type: 'short-input',
|
|
||||||
value: null,
|
|
||||||
placeholder: 'Enter signing secret',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
const result = normalizeTriggerConfigValues(subBlocks)
|
|
||||||
const normalized = result.signingSecret as Record<string, unknown>
|
|
||||||
expect(normalized.value).toBe('secret123')
|
|
||||||
expect(normalized.id).toBe('signingSecret')
|
|
||||||
expect(normalized.type).toBe('short-input')
|
|
||||||
expect(normalized.placeholder).toBe('Enter signing secret')
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -418,48 +418,10 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
|
|||||||
*/
|
*/
|
||||||
export function filterSubBlockIds(subBlockIds: string[]): string[] {
|
export function filterSubBlockIds(subBlockIds: string[]): string[] {
|
||||||
return subBlockIds
|
return subBlockIds
|
||||||
.filter((id) => {
|
.filter((id) => !SYSTEM_SUBBLOCK_IDS.includes(id) && !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id))
|
||||||
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
|
|
||||||
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
|
|
||||||
return false
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
.sort()
|
.sort()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Normalizes trigger block subBlocks by populating null/empty individual fields
|
|
||||||
* from the triggerConfig aggregate subBlock. This compensates for the runtime
|
|
||||||
* population done by populateTriggerFieldsFromConfig, ensuring consistent
|
|
||||||
* comparison between client state (with populated values) and deployed state
|
|
||||||
* (with null values from DB).
|
|
||||||
*/
|
|
||||||
export function normalizeTriggerConfigValues(
|
|
||||||
subBlocks: Record<string, unknown>
|
|
||||||
): Record<string, unknown> {
|
|
||||||
const triggerConfigSub = subBlocks.triggerConfig as Record<string, unknown> | undefined
|
|
||||||
const triggerConfigValue = triggerConfigSub?.value
|
|
||||||
if (!triggerConfigValue || typeof triggerConfigValue !== 'object') {
|
|
||||||
return subBlocks
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = { ...subBlocks }
|
|
||||||
for (const [fieldId, configValue] of Object.entries(
|
|
||||||
triggerConfigValue as Record<string, unknown>
|
|
||||||
)) {
|
|
||||||
if (configValue === null || configValue === undefined) continue
|
|
||||||
const existingSub = result[fieldId] as Record<string, unknown> | undefined
|
|
||||||
if (
|
|
||||||
existingSub &&
|
|
||||||
(existingSub.value === null || existingSub.value === undefined || existingSub.value === '')
|
|
||||||
) {
|
|
||||||
result[fieldId] = { ...existingSub, value: configValue }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Normalizes a subBlock value with sanitization for specific subBlock types.
|
* Normalizes a subBlock value with sanitization for specific subBlock types.
|
||||||
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)
|
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ import {
|
|||||||
import { flushStreamingUpdates, stopStreamingUpdates } from '@/lib/copilot/client-sse/handlers'
|
import { flushStreamingUpdates, stopStreamingUpdates } from '@/lib/copilot/client-sse/handlers'
|
||||||
import type { ClientContentBlock, ClientStreamingContext } from '@/lib/copilot/client-sse/types'
|
import type { ClientContentBlock, ClientStreamingContext } from '@/lib/copilot/client-sse/types'
|
||||||
import {
|
import {
|
||||||
COPILOT_AUTO_ALLOWED_TOOLS_API_PATH,
|
|
||||||
COPILOT_CHAT_API_PATH,
|
COPILOT_CHAT_API_PATH,
|
||||||
COPILOT_CHAT_STREAM_API_PATH,
|
COPILOT_CHAT_STREAM_API_PATH,
|
||||||
COPILOT_CHECKPOINTS_API_PATH,
|
COPILOT_CHECKPOINTS_API_PATH,
|
||||||
@@ -84,6 +83,14 @@ function isPageUnloading(): boolean {
|
|||||||
return _isPageUnloading
|
return _isPageUnloading
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
||||||
|
if (name !== 'workflow_change') return false
|
||||||
|
|
||||||
|
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'apply') return true
|
||||||
|
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
function readActiveStreamFromStorage(): CopilotStreamInfo | null {
|
function readActiveStreamFromStorage(): CopilotStreamInfo | null {
|
||||||
if (typeof window === 'undefined') return null
|
if (typeof window === 'undefined') return null
|
||||||
try {
|
try {
|
||||||
@@ -140,41 +147,6 @@ function updateActiveStreamEventId(
|
|||||||
writeActiveStreamToStorage(next)
|
writeActiveStreamToStorage(next)
|
||||||
}
|
}
|
||||||
|
|
||||||
const AUTO_ALLOWED_TOOLS_STORAGE_KEY = 'copilot_auto_allowed_tools'
|
|
||||||
|
|
||||||
function readAutoAllowedToolsFromStorage(): string[] | null {
|
|
||||||
if (typeof window === 'undefined') return null
|
|
||||||
try {
|
|
||||||
const raw = window.localStorage.getItem(AUTO_ALLOWED_TOOLS_STORAGE_KEY)
|
|
||||||
if (!raw) return null
|
|
||||||
const parsed = JSON.parse(raw)
|
|
||||||
if (!Array.isArray(parsed)) return null
|
|
||||||
return parsed.filter((item): item is string => typeof item === 'string')
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('[AutoAllowedTools] Failed to read local cache', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function writeAutoAllowedToolsToStorage(tools: string[]): void {
|
|
||||||
if (typeof window === 'undefined') return
|
|
||||||
try {
|
|
||||||
window.localStorage.setItem(AUTO_ALLOWED_TOOLS_STORAGE_KEY, JSON.stringify(tools))
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('[AutoAllowedTools] Failed to write local cache', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function isToolAutoAllowedByList(toolId: string, autoAllowedTools: string[]): boolean {
|
|
||||||
if (!toolId) return false
|
|
||||||
const normalizedTarget = toolId.trim()
|
|
||||||
return autoAllowedTools.some((allowed) => allowed?.trim() === normalizedTarget)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clear any lingering diff preview from a previous session.
|
* Clear any lingering diff preview from a previous session.
|
||||||
* Called lazily when the store is first activated (setWorkflowId).
|
* Called lazily when the store is first activated (setWorkflowId).
|
||||||
@@ -480,11 +452,6 @@ function prepareSendContext(
|
|||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
logger.warn('[Copilot] Failed to load sensitive credential IDs', err)
|
logger.warn('[Copilot] Failed to load sensitive credential IDs', err)
|
||||||
})
|
})
|
||||||
get()
|
|
||||||
.loadAutoAllowedTools()
|
|
||||||
.catch((err) => {
|
|
||||||
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
|
||||||
})
|
|
||||||
|
|
||||||
let newMessages: CopilotMessage[]
|
let newMessages: CopilotMessage[]
|
||||||
if (revertState) {
|
if (revertState) {
|
||||||
@@ -1037,8 +1004,6 @@ async function resumeFromLiveStream(
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
const cachedAutoAllowedTools = readAutoAllowedToolsFromStorage()
|
|
||||||
|
|
||||||
// Initial state (subset required for UI/streaming)
|
// Initial state (subset required for UI/streaming)
|
||||||
const initialState = {
|
const initialState = {
|
||||||
mode: 'build' as const,
|
mode: 'build' as const,
|
||||||
@@ -1073,8 +1038,6 @@ const initialState = {
|
|||||||
streamingPlanContent: '',
|
streamingPlanContent: '',
|
||||||
toolCallsById: {} as Record<string, CopilotToolCall>,
|
toolCallsById: {} as Record<string, CopilotToolCall>,
|
||||||
suppressAutoSelect: false,
|
suppressAutoSelect: false,
|
||||||
autoAllowedTools: cachedAutoAllowedTools ?? ([] as string[]),
|
|
||||||
autoAllowedToolsLoaded: cachedAutoAllowedTools !== null,
|
|
||||||
activeStream: null as CopilotStreamInfo | null,
|
activeStream: null as CopilotStreamInfo | null,
|
||||||
messageQueue: [] as import('./types').QueuedMessage[],
|
messageQueue: [] as import('./types').QueuedMessage[],
|
||||||
suppressAbortContinueOption: false,
|
suppressAbortContinueOption: false,
|
||||||
@@ -1113,8 +1076,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
agentPrefetch: get().agentPrefetch,
|
agentPrefetch: get().agentPrefetch,
|
||||||
availableModels: get().availableModels,
|
availableModels: get().availableModels,
|
||||||
isLoadingModels: get().isLoadingModels,
|
isLoadingModels: get().isLoadingModels,
|
||||||
autoAllowedTools: get().autoAllowedTools,
|
|
||||||
autoAllowedToolsLoaded: get().autoAllowedToolsLoaded,
|
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -1429,16 +1390,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
|
|
||||||
// Send a message (streaming only)
|
// Send a message (streaming only)
|
||||||
sendMessage: async (message: string, options = {}) => {
|
sendMessage: async (message: string, options = {}) => {
|
||||||
if (!get().autoAllowedToolsLoaded) {
|
|
||||||
try {
|
|
||||||
await get().loadAutoAllowedTools()
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('[Copilot] Failed to preload auto-allowed tools before send', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const prepared = prepareSendContext(get, set, message, options as SendMessageOptionsInput)
|
const prepared = prepareSendContext(get, set, message, options as SendMessageOptionsInput)
|
||||||
if (!prepared) return
|
if (!prepared) return
|
||||||
|
|
||||||
@@ -1705,7 +1656,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (tn === 'edit_workflow') {
|
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -1714,7 +1665,9 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
}
|
}
|
||||||
// Fallback to map if not found in messages
|
// Fallback to map if not found in messages
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
const candidates = Object.values(toolCallsById).filter((t) =>
|
||||||
|
isWorkflowEditToolCall(t.name, t.params)
|
||||||
|
)
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2009,7 +1962,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!context.wasAborted && sseHandlers.stream_end) {
|
if (!context.wasAborted && sseHandlers.stream_end) {
|
||||||
sseHandlers.stream_end({ type: 'done' }, context, get, set)
|
sseHandlers.stream_end({ type: 'copilot.phase.completed' }, context, get, set)
|
||||||
}
|
}
|
||||||
|
|
||||||
stopStreamingUpdates()
|
stopStreamingUpdates()
|
||||||
@@ -2407,74 +2360,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
loadAutoAllowedTools: async () => {
|
|
||||||
try {
|
|
||||||
logger.debug('[AutoAllowedTools] Loading from API...')
|
|
||||||
const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH)
|
|
||||||
logger.debug('[AutoAllowedTools] Load response', { status: res.status, ok: res.ok })
|
|
||||||
if (res.ok) {
|
|
||||||
const data = await res.json()
|
|
||||||
const tools = data.autoAllowedTools ?? []
|
|
||||||
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
|
||||||
writeAutoAllowedToolsToStorage(tools)
|
|
||||||
logger.debug('[AutoAllowedTools] Loaded successfully', { count: tools.length, tools })
|
|
||||||
} else {
|
|
||||||
set({ autoAllowedToolsLoaded: true })
|
|
||||||
logger.warn('[AutoAllowedTools] Load failed with status', { status: res.status })
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
set({ autoAllowedToolsLoaded: true })
|
|
||||||
logger.error('[AutoAllowedTools] Failed to load', { error: err })
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
addAutoAllowedTool: async (toolId: string) => {
|
|
||||||
try {
|
|
||||||
logger.debug('[AutoAllowedTools] Adding tool...', { toolId })
|
|
||||||
const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolId }),
|
|
||||||
})
|
|
||||||
logger.debug('[AutoAllowedTools] API response', { toolId, status: res.status, ok: res.ok })
|
|
||||||
if (res.ok) {
|
|
||||||
const data = await res.json()
|
|
||||||
logger.debug('[AutoAllowedTools] API returned', { toolId, tools: data.autoAllowedTools })
|
|
||||||
const tools = data.autoAllowedTools ?? []
|
|
||||||
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
|
||||||
writeAutoAllowedToolsToStorage(tools)
|
|
||||||
logger.debug('[AutoAllowedTools] Added tool to store', { toolId })
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error('[AutoAllowedTools] Failed to add tool', { toolId, error: err })
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
removeAutoAllowedTool: async (toolId: string) => {
|
|
||||||
try {
|
|
||||||
const res = await fetch(
|
|
||||||
`${COPILOT_AUTO_ALLOWED_TOOLS_API_PATH}?toolId=${encodeURIComponent(toolId)}`,
|
|
||||||
{
|
|
||||||
method: 'DELETE',
|
|
||||||
}
|
|
||||||
)
|
|
||||||
if (res.ok) {
|
|
||||||
const data = await res.json()
|
|
||||||
const tools = data.autoAllowedTools ?? []
|
|
||||||
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
|
||||||
writeAutoAllowedToolsToStorage(tools)
|
|
||||||
logger.debug('[AutoAllowedTools] Removed tool', { toolId })
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error('[AutoAllowedTools] Failed to remove tool', { toolId, error: err })
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
isToolAutoAllowed: (toolId: string) => {
|
|
||||||
const { autoAllowedTools } = get()
|
|
||||||
return isToolAutoAllowedByList(toolId, autoAllowedTools)
|
|
||||||
},
|
|
||||||
|
|
||||||
// Credential masking
|
// Credential masking
|
||||||
loadSensitiveCredentialIds: async () => {
|
loadSensitiveCredentialIds: async () => {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -26,6 +26,26 @@ export interface CopilotToolCall {
|
|||||||
params?: Record<string, unknown>
|
params?: Record<string, unknown>
|
||||||
input?: Record<string, unknown>
|
input?: Record<string, unknown>
|
||||||
display?: ClientToolDisplay
|
display?: ClientToolDisplay
|
||||||
|
/** Server-provided UI contract for this tool call phase */
|
||||||
|
ui?: {
|
||||||
|
title?: string
|
||||||
|
phaseLabel?: string
|
||||||
|
icon?: string
|
||||||
|
showInterrupt?: boolean
|
||||||
|
showRemember?: boolean
|
||||||
|
autoAllowed?: boolean
|
||||||
|
actions?: Array<{
|
||||||
|
id: string
|
||||||
|
label: string
|
||||||
|
kind: 'accept' | 'reject'
|
||||||
|
remember?: boolean
|
||||||
|
}>
|
||||||
|
}
|
||||||
|
/** Server-provided execution routing contract */
|
||||||
|
execution?: {
|
||||||
|
target?: 'go' | 'go_subagent' | 'sim_server' | 'sim_client_capability' | string
|
||||||
|
capabilityId?: string
|
||||||
|
}
|
||||||
/** Content streamed from a subagent (e.g., debug agent) */
|
/** Content streamed from a subagent (e.g., debug agent) */
|
||||||
subAgentContent?: string
|
subAgentContent?: string
|
||||||
/** Tool calls made by the subagent */
|
/** Tool calls made by the subagent */
|
||||||
@@ -167,10 +187,6 @@ export interface CopilotState {
|
|||||||
|
|
||||||
// Per-message metadata captured at send-time for reliable stats
|
// Per-message metadata captured at send-time for reliable stats
|
||||||
|
|
||||||
// Auto-allowed integration tools (tools that can run without confirmation)
|
|
||||||
autoAllowedTools: string[]
|
|
||||||
autoAllowedToolsLoaded: boolean
|
|
||||||
|
|
||||||
// Active stream metadata for reconnect/replay
|
// Active stream metadata for reconnect/replay
|
||||||
activeStream: CopilotStreamInfo | null
|
activeStream: CopilotStreamInfo | null
|
||||||
|
|
||||||
@@ -247,11 +263,6 @@ export interface CopilotActions {
|
|||||||
abortSignal?: AbortSignal
|
abortSignal?: AbortSignal
|
||||||
) => Promise<void>
|
) => Promise<void>
|
||||||
handleNewChatCreation: (newChatId: string) => Promise<void>
|
handleNewChatCreation: (newChatId: string) => Promise<void>
|
||||||
loadAutoAllowedTools: () => Promise<void>
|
|
||||||
addAutoAllowedTool: (toolId: string) => Promise<void>
|
|
||||||
removeAutoAllowedTool: (toolId: string) => Promise<void>
|
|
||||||
isToolAutoAllowed: (toolId: string) => boolean
|
|
||||||
|
|
||||||
// Credential masking
|
// Credential masking
|
||||||
loadSensitiveCredentialIds: () => Promise<void>
|
loadSensitiveCredentialIds: () => Promise<void>
|
||||||
maskCredentialValue: (value: string) => string
|
maskCredentialValue: (value: string) => string
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import {
|
|||||||
captureBaselineSnapshot,
|
captureBaselineSnapshot,
|
||||||
cloneWorkflowState,
|
cloneWorkflowState,
|
||||||
createBatchedUpdater,
|
createBatchedUpdater,
|
||||||
findLatestEditWorkflowToolCallId,
|
findLatestWorkflowEditToolCallId,
|
||||||
getLatestUserMessageId,
|
getLatestUserMessageId,
|
||||||
persistWorkflowStateToServer,
|
persistWorkflowStateToServer,
|
||||||
} from './utils'
|
} from './utils'
|
||||||
@@ -334,7 +334,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
findLatestEditWorkflowToolCallId().then((toolCallId) => {
|
findLatestWorkflowEditToolCallId().then((toolCallId) => {
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
import('@/stores/panel/copilot/store')
|
import('@/stores/panel/copilot/store')
|
||||||
.then(({ useCopilotStore }) => {
|
.then(({ useCopilotStore }) => {
|
||||||
@@ -439,7 +439,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
findLatestEditWorkflowToolCallId().then((toolCallId) => {
|
findLatestWorkflowEditToolCallId().then((toolCallId) => {
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
import('@/stores/panel/copilot/store')
|
import('@/stores/panel/copilot/store')
|
||||||
.then(({ useCopilotStore }) => {
|
.then(({ useCopilotStore }) => {
|
||||||
|
|||||||
@@ -126,6 +126,20 @@ export async function getLatestUserMessageId(): Promise<string | null> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function findLatestEditWorkflowToolCallId(): Promise<string | undefined> {
|
export async function findLatestEditWorkflowToolCallId(): Promise<string | undefined> {
|
||||||
|
return findLatestWorkflowEditToolCallId()
|
||||||
|
}
|
||||||
|
|
||||||
|
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
||||||
|
if (name !== 'workflow_change') return false
|
||||||
|
|
||||||
|
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
||||||
|
if (mode === 'apply') return true
|
||||||
|
|
||||||
|
// Be permissive for incomplete events: apply calls always include proposalId.
|
||||||
|
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function findLatestWorkflowEditToolCallId(): Promise<string | undefined> {
|
||||||
try {
|
try {
|
||||||
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
||||||
const { messages, toolCallsById } = useCopilotStore.getState()
|
const { messages, toolCallsById } = useCopilotStore.getState()
|
||||||
@@ -134,17 +148,22 @@ export async function findLatestEditWorkflowToolCallId(): Promise<string | undef
|
|||||||
const message = messages[mi]
|
const message = messages[mi]
|
||||||
if (message.role !== 'assistant' || !message.contentBlocks) continue
|
if (message.role !== 'assistant' || !message.contentBlocks) continue
|
||||||
for (const block of message.contentBlocks) {
|
for (const block of message.contentBlocks) {
|
||||||
if (block?.type === 'tool_call' && block.toolCall?.name === 'edit_workflow') {
|
if (
|
||||||
|
block?.type === 'tool_call' &&
|
||||||
|
isWorkflowEditToolCall(block.toolCall?.name, block.toolCall?.params)
|
||||||
|
) {
|
||||||
return block.toolCall?.id
|
return block.toolCall?.id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const fallback = Object.values(toolCallsById).filter((call) => call.name === 'edit_workflow')
|
const fallback = Object.values(toolCallsById).filter((call) =>
|
||||||
|
isWorkflowEditToolCall(call.name, call.params)
|
||||||
|
)
|
||||||
|
|
||||||
return fallback.length ? fallback[fallback.length - 1].id : undefined
|
return fallback.length ? fallback[fallback.length - 1].id : undefined
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn('Failed to resolve edit_workflow tool call id', { error })
|
logger.warn('Failed to resolve workflow edit tool call id', { error })
|
||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,8 @@
|
|||||||
import { BLOGPOST_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
import {
|
||||||
|
CONTENT_BODY_OUTPUT_PROPERTIES,
|
||||||
|
TIMESTAMP_OUTPUT,
|
||||||
|
VERSION_OUTPUT_PROPERTIES,
|
||||||
|
} from '@/tools/confluence/types'
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceCreateBlogPostParams {
|
export interface ConfluenceCreateBlogPostParams {
|
||||||
@@ -125,6 +129,23 @@ export const confluenceCreateBlogPostTool: ToolConfig<
|
|||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
ts: TIMESTAMP_OUTPUT,
|
ts: TIMESTAMP_OUTPUT,
|
||||||
...BLOGPOST_ITEM_PROPERTIES,
|
id: { type: 'string', description: 'Created blog post ID' },
|
||||||
|
title: { type: 'string', description: 'Blog post title' },
|
||||||
|
status: { type: 'string', description: 'Blog post status', optional: true },
|
||||||
|
spaceId: { type: 'string', description: 'Space ID' },
|
||||||
|
authorId: { type: 'string', description: 'Author account ID', optional: true },
|
||||||
|
body: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Blog post body content',
|
||||||
|
properties: CONTENT_BODY_OUTPUT_PROPERTIES,
|
||||||
|
optional: true,
|
||||||
|
},
|
||||||
|
version: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Blog post version information',
|
||||||
|
properties: VERSION_OUTPUT_PROPERTIES,
|
||||||
|
optional: true,
|
||||||
|
},
|
||||||
|
webUrl: { type: 'string', description: 'URL to view the blog post', optional: true },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceCreateCommentParams {
|
export interface ConfluenceCreateCommentParams {
|
||||||
@@ -100,7 +99,7 @@ export const confluenceCreateCommentTool: ToolConfig<
|
|||||||
},
|
},
|
||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
ts: TIMESTAMP_OUTPUT,
|
ts: { type: 'string', description: 'Timestamp of creation' },
|
||||||
commentId: { type: 'string', description: 'Created comment ID' },
|
commentId: { type: 'string', description: 'Created comment ID' },
|
||||||
pageId: { type: 'string', description: 'Page ID' },
|
pageId: { type: 'string', description: 'Page ID' },
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,8 +1,4 @@
|
|||||||
import {
|
import { CONTENT_BODY_OUTPUT_PROPERTIES, VERSION_OUTPUT_PROPERTIES } from '@/tools/confluence/types'
|
||||||
CONTENT_BODY_OUTPUT_PROPERTIES,
|
|
||||||
TIMESTAMP_OUTPUT,
|
|
||||||
VERSION_OUTPUT_PROPERTIES,
|
|
||||||
} from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceCreatePageParams {
|
export interface ConfluenceCreatePageParams {
|
||||||
@@ -132,7 +128,7 @@ export const confluenceCreatePageTool: ToolConfig<
|
|||||||
},
|
},
|
||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
ts: TIMESTAMP_OUTPUT,
|
ts: { type: 'string', description: 'Timestamp of creation' },
|
||||||
pageId: { type: 'string', description: 'Created page ID' },
|
pageId: { type: 'string', description: 'Created page ID' },
|
||||||
title: { type: 'string', description: 'Page title' },
|
title: { type: 'string', description: 'Page title' },
|
||||||
status: { type: 'string', description: 'Page status', optional: true },
|
status: { type: 'string', description: 'Page status', optional: true },
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { PAGE_PROPERTY_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
import { TIMESTAMP_OUTPUT, VERSION_OUTPUT_PROPERTIES } from '@/tools/confluence/types'
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceCreatePagePropertyParams {
|
export interface ConfluenceCreatePagePropertyParams {
|
||||||
@@ -115,6 +115,13 @@ export const confluenceCreatePagePropertyTool: ToolConfig<
|
|||||||
ts: TIMESTAMP_OUTPUT,
|
ts: TIMESTAMP_OUTPUT,
|
||||||
pageId: { type: 'string', description: 'ID of the page' },
|
pageId: { type: 'string', description: 'ID of the page' },
|
||||||
propertyId: { type: 'string', description: 'ID of the created property' },
|
propertyId: { type: 'string', description: 'ID of the created property' },
|
||||||
...PAGE_PROPERTY_ITEM_PROPERTIES,
|
key: { type: 'string', description: 'Property key' },
|
||||||
|
value: { type: 'json', description: 'Property value' },
|
||||||
|
version: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Version information',
|
||||||
|
properties: VERSION_OUTPUT_PROPERTIES,
|
||||||
|
optional: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,123 +0,0 @@
|
|||||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export interface ConfluenceCreateSpaceParams {
|
|
||||||
accessToken: string
|
|
||||||
domain: string
|
|
||||||
name: string
|
|
||||||
key: string
|
|
||||||
description?: string
|
|
||||||
cloudId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfluenceCreateSpaceResponse {
|
|
||||||
success: boolean
|
|
||||||
output: {
|
|
||||||
ts: string
|
|
||||||
id: string
|
|
||||||
key: string
|
|
||||||
name: string
|
|
||||||
type: string
|
|
||||||
status: string
|
|
||||||
homepageId: string | null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const confluenceCreateSpaceTool: ToolConfig<
|
|
||||||
ConfluenceCreateSpaceParams,
|
|
||||||
ConfluenceCreateSpaceResponse
|
|
||||||
> = {
|
|
||||||
id: 'confluence_create_space',
|
|
||||||
name: 'Confluence Create Space',
|
|
||||||
description: 'Create a new Confluence space.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
name: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Name for the new space',
|
|
||||||
},
|
|
||||||
key: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Unique key for the space (short identifier used in URLs)',
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Description for the space',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: () => '/api/tools/confluence/spaces',
|
|
||||||
method: 'POST',
|
|
||||||
headers: (params: ConfluenceCreateSpaceParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
body: (params: ConfluenceCreateSpaceParams) => ({
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
name: params.name,
|
|
||||||
key: params.key,
|
|
||||||
description: params.description,
|
|
||||||
cloudId: params.cloudId,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
id: data.id ?? '',
|
|
||||||
key: data.key ?? '',
|
|
||||||
name: data.name ?? '',
|
|
||||||
type: data.type ?? '',
|
|
||||||
status: data.status ?? '',
|
|
||||||
homepageId: data.homepageId ?? null,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
id: { type: 'string', description: 'ID of the created space' },
|
|
||||||
key: { type: 'string', description: 'Key of the created space' },
|
|
||||||
name: { type: 'string', description: 'Name of the created space' },
|
|
||||||
type: { type: 'string', description: 'Type of the space' },
|
|
||||||
status: { type: 'string', description: 'Status of the space' },
|
|
||||||
homepageId: { type: 'string', description: 'ID of the space homepage', optional: true },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,118 +0,0 @@
|
|||||||
import { SPACE_PROPERTY_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export interface ConfluenceCreateSpacePropertyParams {
|
|
||||||
accessToken: string
|
|
||||||
domain: string
|
|
||||||
spaceId: string
|
|
||||||
key: string
|
|
||||||
value: unknown
|
|
||||||
cloudId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfluenceCreateSpacePropertyResponse {
|
|
||||||
success: boolean
|
|
||||||
output: {
|
|
||||||
ts: string
|
|
||||||
spaceId: string
|
|
||||||
propertyId: string
|
|
||||||
key: string
|
|
||||||
value: unknown
|
|
||||||
version: { number: number } | null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const confluenceCreateSpacePropertyTool: ToolConfig<
|
|
||||||
ConfluenceCreateSpacePropertyParams,
|
|
||||||
ConfluenceCreateSpacePropertyResponse
|
|
||||||
> = {
|
|
||||||
id: 'confluence_create_space_property',
|
|
||||||
name: 'Confluence Create Space Property',
|
|
||||||
description: 'Create a new content property on a Confluence space.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
spaceId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the space to add the property to',
|
|
||||||
},
|
|
||||||
key: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The key/name for the property',
|
|
||||||
},
|
|
||||||
value: {
|
|
||||||
type: 'json',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The value for the property (can be any JSON value)',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: () => '/api/tools/confluence/space-properties',
|
|
||||||
method: 'POST',
|
|
||||||
headers: (params: ConfluenceCreateSpacePropertyParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
body: (params: ConfluenceCreateSpacePropertyParams) => ({
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
spaceId: params.spaceId?.trim(),
|
|
||||||
key: params.key,
|
|
||||||
value: params.value,
|
|
||||||
cloudId: params.cloudId,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
spaceId: data.spaceId ?? '',
|
|
||||||
propertyId: data.id ?? '',
|
|
||||||
key: data.key ?? '',
|
|
||||||
value: data.value ?? null,
|
|
||||||
version: data.version ?? null,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
spaceId: { type: 'string', description: 'ID of the space' },
|
|
||||||
propertyId: { type: 'string', description: 'ID of the created property' },
|
|
||||||
...SPACE_PROPERTY_ITEM_PROPERTIES,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,127 +0,0 @@
|
|||||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export interface ConfluenceCreateWhiteboardParams {
|
|
||||||
accessToken: string
|
|
||||||
domain: string
|
|
||||||
spaceId: string
|
|
||||||
title: string
|
|
||||||
parentId?: string
|
|
||||||
cloudId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfluenceCreateWhiteboardResponse {
|
|
||||||
success: boolean
|
|
||||||
output: {
|
|
||||||
ts: string
|
|
||||||
id: string
|
|
||||||
title: string
|
|
||||||
spaceId: string
|
|
||||||
parentId: string | null
|
|
||||||
parentType: string | null
|
|
||||||
authorId: string | null
|
|
||||||
createdAt: string | null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const confluenceCreateWhiteboardTool: ToolConfig<
|
|
||||||
ConfluenceCreateWhiteboardParams,
|
|
||||||
ConfluenceCreateWhiteboardResponse
|
|
||||||
> = {
|
|
||||||
id: 'confluence_create_whiteboard',
|
|
||||||
name: 'Confluence Create Whiteboard',
|
|
||||||
description: 'Create a new whiteboard in a Confluence space.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
spaceId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the space to create the whiteboard in',
|
|
||||||
},
|
|
||||||
title: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Title for the whiteboard',
|
|
||||||
},
|
|
||||||
parentId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'ID of the parent content (optional)',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: () => '/api/tools/confluence/whiteboards',
|
|
||||||
method: 'POST',
|
|
||||||
headers: (params: ConfluenceCreateWhiteboardParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
body: (params: ConfluenceCreateWhiteboardParams) => ({
|
|
||||||
action: 'create',
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
spaceId: params.spaceId?.trim(),
|
|
||||||
title: params.title,
|
|
||||||
parentId: params.parentId?.trim(),
|
|
||||||
cloudId: params.cloudId,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
id: data.id ?? '',
|
|
||||||
title: data.title ?? '',
|
|
||||||
spaceId: data.spaceId ?? '',
|
|
||||||
parentId: data.parentId ?? null,
|
|
||||||
parentType: data.parentType ?? null,
|
|
||||||
authorId: data.authorId ?? null,
|
|
||||||
createdAt: data.createdAt ?? null,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
id: { type: 'string', description: 'ID of the created whiteboard' },
|
|
||||||
title: { type: 'string', description: 'Title of the whiteboard' },
|
|
||||||
spaceId: { type: 'string', description: 'ID of the space' },
|
|
||||||
parentId: { type: 'string', description: 'ID of the parent content', optional: true },
|
|
||||||
parentType: { type: 'string', description: 'Type of the parent content', optional: true },
|
|
||||||
authorId: { type: 'string', description: 'Author account ID', optional: true },
|
|
||||||
createdAt: { type: 'string', description: 'Creation timestamp', optional: true },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
import { DELETED_OUTPUT, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceDeleteAttachmentParams {
|
export interface ConfluenceDeleteAttachmentParams {
|
||||||
@@ -91,8 +90,8 @@ export const confluenceDeleteAttachmentTool: ToolConfig<
|
|||||||
},
|
},
|
||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
ts: TIMESTAMP_OUTPUT,
|
ts: { type: 'string', description: 'Timestamp of deletion' },
|
||||||
attachmentId: { type: 'string', description: 'Deleted attachment ID' },
|
attachmentId: { type: 'string', description: 'Deleted attachment ID' },
|
||||||
deleted: DELETED_OUTPUT,
|
deleted: { type: 'boolean', description: 'Deletion status' },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,82 +0,0 @@
|
|||||||
import type {
|
|
||||||
ConfluenceDeleteBlogPostParams,
|
|
||||||
ConfluenceDeleteBlogPostResponse,
|
|
||||||
} from '@/tools/confluence/types'
|
|
||||||
import { DELETED_OUTPUT, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export const confluenceDeleteBlogPostTool: ToolConfig<
|
|
||||||
ConfluenceDeleteBlogPostParams,
|
|
||||||
ConfluenceDeleteBlogPostResponse
|
|
||||||
> = {
|
|
||||||
id: 'confluence_delete_blogpost',
|
|
||||||
name: 'Confluence Delete Blog Post',
|
|
||||||
description: 'Delete a Confluence blog post.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
blogPostId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the blog post to delete',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: () => '/api/tools/confluence/blogposts',
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: (params: ConfluenceDeleteBlogPostParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
body: (params: ConfluenceDeleteBlogPostParams) => ({
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
blogPostId: params.blogPostId?.trim(),
|
|
||||||
cloudId: params.cloudId,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
blogPostId: data.blogPostId ?? '',
|
|
||||||
deleted: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
blogPostId: { type: 'string', description: 'Deleted blog post ID' },
|
|
||||||
deleted: DELETED_OUTPUT,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
import { DELETED_OUTPUT, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceDeleteCommentParams {
|
export interface ConfluenceDeleteCommentParams {
|
||||||
@@ -91,8 +90,8 @@ export const confluenceDeleteCommentTool: ToolConfig<
|
|||||||
},
|
},
|
||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
ts: TIMESTAMP_OUTPUT,
|
ts: { type: 'string', description: 'Timestamp of deletion' },
|
||||||
commentId: { type: 'string', description: 'Deleted comment ID' },
|
commentId: { type: 'string', description: 'Deleted comment ID' },
|
||||||
deleted: DELETED_OUTPUT,
|
deleted: { type: 'boolean', description: 'Deletion status' },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { DELETED_OUTPUT, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceDeletePageParams {
|
export interface ConfluenceDeletePageParams {
|
||||||
@@ -101,8 +100,8 @@ export const confluenceDeletePageTool: ToolConfig<
|
|||||||
},
|
},
|
||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
ts: TIMESTAMP_OUTPUT,
|
ts: { type: 'string', description: 'Timestamp of deletion' },
|
||||||
pageId: { type: 'string', description: 'Deleted page ID' },
|
pageId: { type: 'string', description: 'Deleted page ID' },
|
||||||
deleted: DELETED_OUTPUT,
|
deleted: { type: 'boolean', description: 'Deletion status' },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,94 +0,0 @@
|
|||||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export interface ConfluenceDeleteSpaceParams {
|
|
||||||
accessToken: string
|
|
||||||
domain: string
|
|
||||||
spaceId: string
|
|
||||||
cloudId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfluenceDeleteSpaceResponse {
|
|
||||||
success: boolean
|
|
||||||
output: {
|
|
||||||
ts: string
|
|
||||||
spaceId: string
|
|
||||||
deleted: boolean
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const confluenceDeleteSpaceTool: ToolConfig<
|
|
||||||
ConfluenceDeleteSpaceParams,
|
|
||||||
ConfluenceDeleteSpaceResponse
|
|
||||||
> = {
|
|
||||||
id: 'confluence_delete_space',
|
|
||||||
name: 'Confluence Delete Space',
|
|
||||||
description: 'Delete a Confluence space by its ID.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
spaceId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the space to delete',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: () => '/api/tools/confluence/spaces',
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: (params: ConfluenceDeleteSpaceParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
body: (params: ConfluenceDeleteSpaceParams) => ({
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
spaceId: params.spaceId?.trim(),
|
|
||||||
cloudId: params.cloudId,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
spaceId: data.spaceId ?? '',
|
|
||||||
deleted: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
spaceId: { type: 'string', description: 'ID of the deleted space' },
|
|
||||||
deleted: { type: 'boolean', description: 'Deletion status' },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,8 @@
|
|||||||
import { BLOGPOST_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
import {
|
||||||
|
CONTENT_BODY_OUTPUT_PROPERTIES,
|
||||||
|
TIMESTAMP_OUTPUT,
|
||||||
|
VERSION_OUTPUT_PROPERTIES,
|
||||||
|
} from '@/tools/confluence/types'
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceGetBlogPostParams {
|
export interface ConfluenceGetBlogPostParams {
|
||||||
@@ -117,6 +121,24 @@ export const confluenceGetBlogPostTool: ToolConfig<
|
|||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
ts: TIMESTAMP_OUTPUT,
|
ts: TIMESTAMP_OUTPUT,
|
||||||
...BLOGPOST_ITEM_PROPERTIES,
|
id: { type: 'string', description: 'Blog post ID' },
|
||||||
|
title: { type: 'string', description: 'Blog post title' },
|
||||||
|
status: { type: 'string', description: 'Blog post status', optional: true },
|
||||||
|
spaceId: { type: 'string', description: 'Space ID', optional: true },
|
||||||
|
authorId: { type: 'string', description: 'Author account ID', optional: true },
|
||||||
|
createdAt: { type: 'string', description: 'Creation timestamp', optional: true },
|
||||||
|
version: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Version information',
|
||||||
|
properties: VERSION_OUTPUT_PROPERTIES,
|
||||||
|
optional: true,
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Blog post body content in requested format(s)',
|
||||||
|
properties: CONTENT_BODY_OUTPUT_PROPERTIES,
|
||||||
|
optional: true,
|
||||||
|
},
|
||||||
|
webUrl: { type: 'string', description: 'URL to view the blog post', optional: true },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,111 +0,0 @@
|
|||||||
import { SPACE_PROPERTY_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export interface ConfluenceGetSpacePropertyParams {
|
|
||||||
accessToken: string
|
|
||||||
domain: string
|
|
||||||
spaceId: string
|
|
||||||
propertyId: string
|
|
||||||
cloudId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfluenceGetSpacePropertyResponse {
|
|
||||||
success: boolean
|
|
||||||
output: {
|
|
||||||
ts: string
|
|
||||||
spaceId: string
|
|
||||||
id: string
|
|
||||||
key: string
|
|
||||||
value: unknown
|
|
||||||
version: { number: number } | null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const confluenceGetSpacePropertyTool: ToolConfig<
|
|
||||||
ConfluenceGetSpacePropertyParams,
|
|
||||||
ConfluenceGetSpacePropertyResponse
|
|
||||||
> = {
|
|
||||||
id: 'confluence_get_space_property',
|
|
||||||
name: 'Confluence Get Space Property',
|
|
||||||
description: 'Get a specific content property from a Confluence space by its ID.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
spaceId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the space containing the property',
|
|
||||||
},
|
|
||||||
propertyId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the property to retrieve',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: (params: ConfluenceGetSpacePropertyParams) => {
|
|
||||||
const query = new URLSearchParams({
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
spaceId: params.spaceId,
|
|
||||||
propertyId: params.propertyId,
|
|
||||||
action: 'get',
|
|
||||||
})
|
|
||||||
if (params.cloudId) query.set('cloudId', params.cloudId)
|
|
||||||
return `/api/tools/confluence/space-properties?${query.toString()}`
|
|
||||||
},
|
|
||||||
method: 'GET',
|
|
||||||
headers: (params: ConfluenceGetSpacePropertyParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
spaceId: data.spaceId ?? '',
|
|
||||||
id: data.id ?? '',
|
|
||||||
key: data.key ?? '',
|
|
||||||
value: data.value ?? null,
|
|
||||||
version: data.version ?? null,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
spaceId: { type: 'string', description: 'ID of the space' },
|
|
||||||
...SPACE_PROPERTY_ITEM_PROPERTIES,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,117 +0,0 @@
|
|||||||
import { TASK_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export interface ConfluenceGetTaskParams {
|
|
||||||
accessToken: string
|
|
||||||
domain: string
|
|
||||||
taskId: string
|
|
||||||
cloudId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfluenceGetTaskResponse {
|
|
||||||
success: boolean
|
|
||||||
output: {
|
|
||||||
ts: string
|
|
||||||
id: string
|
|
||||||
localId: string
|
|
||||||
spaceId: string | null
|
|
||||||
pageId: string | null
|
|
||||||
blogPostId: string | null
|
|
||||||
status: string
|
|
||||||
body: Record<string, unknown> | null
|
|
||||||
createdBy: string | null
|
|
||||||
assignedTo: string | null
|
|
||||||
completedBy: string | null
|
|
||||||
createdAt: string | null
|
|
||||||
updatedAt: string | null
|
|
||||||
dueAt: string | null
|
|
||||||
completedAt: string | null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const confluenceGetTaskTool: ToolConfig<ConfluenceGetTaskParams, ConfluenceGetTaskResponse> =
|
|
||||||
{
|
|
||||||
id: 'confluence_get_task',
|
|
||||||
name: 'Confluence Get Task',
|
|
||||||
description: 'Get a specific task by its ID from Confluence.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
taskId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the task to retrieve',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: () => '/api/tools/confluence/tasks',
|
|
||||||
method: 'POST',
|
|
||||||
headers: (params: ConfluenceGetTaskParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
body: (params: ConfluenceGetTaskParams) => ({
|
|
||||||
action: 'get',
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
taskId: params.taskId?.trim(),
|
|
||||||
cloudId: params.cloudId,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
id: data.id ?? '',
|
|
||||||
localId: data.localId ?? '',
|
|
||||||
spaceId: data.spaceId ?? null,
|
|
||||||
pageId: data.pageId ?? null,
|
|
||||||
blogPostId: data.blogPostId ?? null,
|
|
||||||
status: data.status ?? '',
|
|
||||||
body: data.body ?? null,
|
|
||||||
createdBy: data.createdBy ?? null,
|
|
||||||
assignedTo: data.assignedTo ?? null,
|
|
||||||
completedBy: data.completedBy ?? null,
|
|
||||||
createdAt: data.createdAt ?? null,
|
|
||||||
updatedAt: data.updatedAt ?? null,
|
|
||||||
dueAt: data.dueAt ?? null,
|
|
||||||
completedAt: data.completedAt ?? null,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
...TASK_ITEM_PROPERTIES,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -4,7 +4,6 @@ import { confluenceCreateCommentTool } from '@/tools/confluence/create_comment'
|
|||||||
import { confluenceCreatePageTool } from '@/tools/confluence/create_page'
|
import { confluenceCreatePageTool } from '@/tools/confluence/create_page'
|
||||||
import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property'
|
import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property'
|
||||||
import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment'
|
import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment'
|
||||||
import { confluenceDeleteBlogPostTool } from '@/tools/confluence/delete_blogpost'
|
|
||||||
import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment'
|
import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment'
|
||||||
import { confluenceDeleteLabelTool } from '@/tools/confluence/delete_label'
|
import { confluenceDeleteLabelTool } from '@/tools/confluence/delete_label'
|
||||||
import { confluenceDeletePageTool } from '@/tools/confluence/delete_page'
|
import { confluenceDeletePageTool } from '@/tools/confluence/delete_page'
|
||||||
@@ -32,9 +31,6 @@ import {
|
|||||||
ATTACHMENT_ITEM_PROPERTIES,
|
ATTACHMENT_ITEM_PROPERTIES,
|
||||||
ATTACHMENT_OUTPUT,
|
ATTACHMENT_OUTPUT,
|
||||||
ATTACHMENTS_OUTPUT,
|
ATTACHMENTS_OUTPUT,
|
||||||
BLOGPOST_ITEM_PROPERTIES,
|
|
||||||
BLOGPOST_OUTPUT,
|
|
||||||
BLOGPOSTS_OUTPUT,
|
|
||||||
BODY_FORMAT_PROPERTIES,
|
BODY_FORMAT_PROPERTIES,
|
||||||
COMMENT_BODY_OUTPUT_PROPERTIES,
|
COMMENT_BODY_OUTPUT_PROPERTIES,
|
||||||
COMMENT_ITEM_PROPERTIES,
|
COMMENT_ITEM_PROPERTIES,
|
||||||
@@ -51,7 +47,6 @@ import {
|
|||||||
PAGE_ID_OUTPUT,
|
PAGE_ID_OUTPUT,
|
||||||
PAGE_ITEM_PROPERTIES,
|
PAGE_ITEM_PROPERTIES,
|
||||||
PAGE_OUTPUT,
|
PAGE_OUTPUT,
|
||||||
PAGE_PROPERTY_ITEM_PROPERTIES,
|
|
||||||
PAGES_OUTPUT,
|
PAGES_OUTPUT,
|
||||||
PAGINATION_LINKS_PROPERTIES,
|
PAGINATION_LINKS_PROPERTIES,
|
||||||
SEARCH_RESULT_ITEM_PROPERTIES,
|
SEARCH_RESULT_ITEM_PROPERTIES,
|
||||||
@@ -64,15 +59,12 @@ import {
|
|||||||
SPACES_OUTPUT,
|
SPACES_OUTPUT,
|
||||||
SUCCESS_OUTPUT,
|
SUCCESS_OUTPUT,
|
||||||
TIMESTAMP_OUTPUT,
|
TIMESTAMP_OUTPUT,
|
||||||
UPDATED_OUTPUT,
|
|
||||||
URL_OUTPUT,
|
URL_OUTPUT,
|
||||||
VERSION_OUTPUT,
|
VERSION_OUTPUT,
|
||||||
VERSION_OUTPUT_PROPERTIES,
|
VERSION_OUTPUT_PROPERTIES,
|
||||||
} from '@/tools/confluence/types'
|
} from '@/tools/confluence/types'
|
||||||
import { confluenceUpdateTool } from '@/tools/confluence/update'
|
import { confluenceUpdateTool } from '@/tools/confluence/update'
|
||||||
import { confluenceUpdateBlogPostTool } from '@/tools/confluence/update_blogpost'
|
|
||||||
import { confluenceUpdateCommentTool } from '@/tools/confluence/update_comment'
|
import { confluenceUpdateCommentTool } from '@/tools/confluence/update_comment'
|
||||||
import { confluenceUpdatePagePropertyTool } from '@/tools/confluence/update_page_property'
|
|
||||||
import { confluenceUploadAttachmentTool } from '@/tools/confluence/upload_attachment'
|
import { confluenceUploadAttachmentTool } from '@/tools/confluence/upload_attachment'
|
||||||
|
|
||||||
export {
|
export {
|
||||||
@@ -90,14 +82,11 @@ export {
|
|||||||
// Page Properties Tools
|
// Page Properties Tools
|
||||||
confluenceListPagePropertiesTool,
|
confluenceListPagePropertiesTool,
|
||||||
confluenceCreatePagePropertyTool,
|
confluenceCreatePagePropertyTool,
|
||||||
confluenceUpdatePagePropertyTool,
|
|
||||||
confluenceDeletePagePropertyTool,
|
confluenceDeletePagePropertyTool,
|
||||||
// Blog Post Tools
|
// Blog Post Tools
|
||||||
confluenceListBlogPostsTool,
|
confluenceListBlogPostsTool,
|
||||||
confluenceGetBlogPostTool,
|
confluenceGetBlogPostTool,
|
||||||
confluenceCreateBlogPostTool,
|
confluenceCreateBlogPostTool,
|
||||||
confluenceUpdateBlogPostTool,
|
|
||||||
confluenceDeleteBlogPostTool,
|
|
||||||
confluenceListBlogPostsInSpaceTool,
|
confluenceListBlogPostsInSpaceTool,
|
||||||
// Search Tools
|
// Search Tools
|
||||||
confluenceSearchTool,
|
confluenceSearchTool,
|
||||||
@@ -122,11 +111,9 @@ export {
|
|||||||
confluenceListSpacesTool,
|
confluenceListSpacesTool,
|
||||||
// Item property constants (for use in outputs)
|
// Item property constants (for use in outputs)
|
||||||
ATTACHMENT_ITEM_PROPERTIES,
|
ATTACHMENT_ITEM_PROPERTIES,
|
||||||
BLOGPOST_ITEM_PROPERTIES,
|
|
||||||
COMMENT_ITEM_PROPERTIES,
|
COMMENT_ITEM_PROPERTIES,
|
||||||
LABEL_ITEM_PROPERTIES,
|
LABEL_ITEM_PROPERTIES,
|
||||||
PAGE_ITEM_PROPERTIES,
|
PAGE_ITEM_PROPERTIES,
|
||||||
PAGE_PROPERTY_ITEM_PROPERTIES,
|
|
||||||
SEARCH_RESULT_ITEM_PROPERTIES,
|
SEARCH_RESULT_ITEM_PROPERTIES,
|
||||||
SPACE_ITEM_PROPERTIES,
|
SPACE_ITEM_PROPERTIES,
|
||||||
VERSION_OUTPUT_PROPERTIES,
|
VERSION_OUTPUT_PROPERTIES,
|
||||||
@@ -140,8 +127,6 @@ export {
|
|||||||
// Complete output definitions (for use in outputs)
|
// Complete output definitions (for use in outputs)
|
||||||
ATTACHMENT_OUTPUT,
|
ATTACHMENT_OUTPUT,
|
||||||
ATTACHMENTS_OUTPUT,
|
ATTACHMENTS_OUTPUT,
|
||||||
BLOGPOST_OUTPUT,
|
|
||||||
BLOGPOSTS_OUTPUT,
|
|
||||||
COMMENT_OUTPUT,
|
COMMENT_OUTPUT,
|
||||||
COMMENTS_OUTPUT,
|
COMMENTS_OUTPUT,
|
||||||
CONTENT_BODY_OUTPUT,
|
CONTENT_BODY_OUTPUT,
|
||||||
@@ -160,6 +145,5 @@ export {
|
|||||||
PAGE_ID_OUTPUT,
|
PAGE_ID_OUTPUT,
|
||||||
SUCCESS_OUTPUT,
|
SUCCESS_OUTPUT,
|
||||||
DELETED_OUTPUT,
|
DELETED_OUTPUT,
|
||||||
UPDATED_OUTPUT,
|
|
||||||
URL_OUTPUT,
|
URL_OUTPUT,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { BLOGPOST_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
import { TIMESTAMP_OUTPUT, VERSION_OUTPUT_PROPERTIES } from '@/tools/confluence/types'
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceListBlogPostsParams {
|
export interface ConfluenceListBlogPostsParams {
|
||||||
@@ -141,7 +141,21 @@ export const confluenceListBlogPostsTool: ToolConfig<
|
|||||||
description: 'Array of blog posts',
|
description: 'Array of blog posts',
|
||||||
items: {
|
items: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
properties: BLOGPOST_ITEM_PROPERTIES,
|
properties: {
|
||||||
|
id: { type: 'string', description: 'Blog post ID' },
|
||||||
|
title: { type: 'string', description: 'Blog post title' },
|
||||||
|
status: { type: 'string', description: 'Blog post status', optional: true },
|
||||||
|
spaceId: { type: 'string', description: 'Space ID', optional: true },
|
||||||
|
authorId: { type: 'string', description: 'Author account ID', optional: true },
|
||||||
|
createdAt: { type: 'string', description: 'Creation timestamp', optional: true },
|
||||||
|
version: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Version information',
|
||||||
|
properties: VERSION_OUTPUT_PROPERTIES,
|
||||||
|
optional: true,
|
||||||
|
},
|
||||||
|
webUrl: { type: 'string', description: 'URL to view the blog post', optional: true },
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
nextCursor: {
|
nextCursor: {
|
||||||
|
|||||||
@@ -1,4 +1,8 @@
|
|||||||
import { BLOGPOST_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
import {
|
||||||
|
CONTENT_BODY_OUTPUT_PROPERTIES,
|
||||||
|
TIMESTAMP_OUTPUT,
|
||||||
|
VERSION_OUTPUT_PROPERTIES,
|
||||||
|
} from '@/tools/confluence/types'
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceListBlogPostsInSpaceParams {
|
export interface ConfluenceListBlogPostsInSpaceParams {
|
||||||
@@ -142,7 +146,27 @@ export const confluenceListBlogPostsInSpaceTool: ToolConfig<
|
|||||||
description: 'Array of blog posts in the space',
|
description: 'Array of blog posts in the space',
|
||||||
items: {
|
items: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
properties: BLOGPOST_ITEM_PROPERTIES,
|
properties: {
|
||||||
|
id: { type: 'string', description: 'Blog post ID' },
|
||||||
|
title: { type: 'string', description: 'Blog post title' },
|
||||||
|
status: { type: 'string', description: 'Blog post status', optional: true },
|
||||||
|
spaceId: { type: 'string', description: 'Space ID', optional: true },
|
||||||
|
authorId: { type: 'string', description: 'Author account ID', optional: true },
|
||||||
|
createdAt: { type: 'string', description: 'Creation timestamp', optional: true },
|
||||||
|
version: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Version information',
|
||||||
|
properties: VERSION_OUTPUT_PROPERTIES,
|
||||||
|
optional: true,
|
||||||
|
},
|
||||||
|
body: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Blog post body content',
|
||||||
|
properties: CONTENT_BODY_OUTPUT_PROPERTIES,
|
||||||
|
optional: true,
|
||||||
|
},
|
||||||
|
webUrl: { type: 'string', description: 'URL to view the blog post', optional: true },
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
nextCursor: {
|
nextCursor: {
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { LABEL_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
import { LABEL_ITEM_PROPERTIES } from '@/tools/confluence/types'
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceListLabelsParams {
|
export interface ConfluenceListLabelsParams {
|
||||||
@@ -115,7 +115,7 @@ export const confluenceListLabelsTool: ToolConfig<
|
|||||||
},
|
},
|
||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
ts: TIMESTAMP_OUTPUT,
|
ts: { type: 'string', description: 'Timestamp of retrieval' },
|
||||||
labels: {
|
labels: {
|
||||||
type: 'array',
|
type: 'array',
|
||||||
description: 'Array of labels on the page',
|
description: 'Array of labels on the page',
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { PAGE_PROPERTY_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
import { TIMESTAMP_OUTPUT, VERSION_OUTPUT_PROPERTIES } from '@/tools/confluence/types'
|
||||||
import type { ToolConfig } from '@/tools/types'
|
import type { ToolConfig } from '@/tools/types'
|
||||||
|
|
||||||
export interface ConfluenceListPagePropertiesParams {
|
export interface ConfluenceListPagePropertiesParams {
|
||||||
@@ -127,7 +127,17 @@ export const confluenceListPagePropertiesTool: ToolConfig<
|
|||||||
description: 'Array of content properties',
|
description: 'Array of content properties',
|
||||||
items: {
|
items: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
properties: PAGE_PROPERTY_ITEM_PROPERTIES,
|
properties: {
|
||||||
|
id: { type: 'string', description: 'Property ID' },
|
||||||
|
key: { type: 'string', description: 'Property key' },
|
||||||
|
value: { type: 'json', description: 'Property value (can be any JSON)' },
|
||||||
|
version: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Version information',
|
||||||
|
properties: VERSION_OUTPUT_PROPERTIES,
|
||||||
|
optional: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
nextCursor: {
|
nextCursor: {
|
||||||
|
|||||||
@@ -1,126 +0,0 @@
|
|||||||
import { SPACE_PROPERTY_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export interface ConfluenceListSpacePropertiesParams {
|
|
||||||
accessToken: string
|
|
||||||
domain: string
|
|
||||||
spaceId: string
|
|
||||||
limit?: number
|
|
||||||
cursor?: string
|
|
||||||
cloudId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfluenceListSpacePropertiesResponse {
|
|
||||||
success: boolean
|
|
||||||
output: {
|
|
||||||
ts: string
|
|
||||||
spaceId: string
|
|
||||||
properties: Array<Record<string, unknown>>
|
|
||||||
nextCursor: string | null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const confluenceListSpacePropertiesTool: ToolConfig<
|
|
||||||
ConfluenceListSpacePropertiesParams,
|
|
||||||
ConfluenceListSpacePropertiesResponse
|
|
||||||
> = {
|
|
||||||
id: 'confluence_list_space_properties',
|
|
||||||
name: 'Confluence List Space Properties',
|
|
||||||
description: 'List all content properties on a Confluence space.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
spaceId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the space to list properties from',
|
|
||||||
},
|
|
||||||
limit: {
|
|
||||||
type: 'number',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Maximum number of properties to return (default: 50, max: 250)',
|
|
||||||
},
|
|
||||||
cursor: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Pagination cursor from previous response',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: (params: ConfluenceListSpacePropertiesParams) => {
|
|
||||||
const query = new URLSearchParams({
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
spaceId: params.spaceId,
|
|
||||||
limit: String(params.limit || 50),
|
|
||||||
})
|
|
||||||
if (params.cursor) query.set('cursor', params.cursor)
|
|
||||||
if (params.cloudId) query.set('cloudId', params.cloudId)
|
|
||||||
return `/api/tools/confluence/space-properties?${query.toString()}`
|
|
||||||
},
|
|
||||||
method: 'GET',
|
|
||||||
headers: (params: ConfluenceListSpacePropertiesParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
spaceId: data.spaceId ?? '',
|
|
||||||
properties: data.properties ?? [],
|
|
||||||
nextCursor: data.nextCursor ?? null,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
spaceId: { type: 'string', description: 'ID of the space' },
|
|
||||||
properties: {
|
|
||||||
type: 'array',
|
|
||||||
description: 'Array of space properties',
|
|
||||||
items: {
|
|
||||||
type: 'object',
|
|
||||||
properties: SPACE_PROPERTY_ITEM_PROPERTIES,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
nextCursor: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Cursor for fetching the next page of results',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user